summaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorNik Everett <nik9000@gmail.com>2016-02-01 18:33:09 -0500
committerNik Everett <nik9000@gmail.com>2016-02-01 18:33:09 -0500
commitc1d44780675a9ff43e87a44f62969a0214928988 (patch)
tree515b696381d64bb693bcabfac8937732c8c431b4 /core
parenta034e12bfa04044dead41956f14d0d53d7bb7438 (diff)
parent7b5ed21d0dda532d43af51d3b5cd44ae6f4b82ed (diff)
Merge branch 'master' into feature/reindex
Diffstat (limited to 'core')
-rw-r--r--core/src/main/java/org/elasticsearch/ElasticsearchException.java32
-rw-r--r--core/src/main/java/org/elasticsearch/ExceptionsHelper.java8
-rw-r--r--core/src/main/java/org/elasticsearch/action/ActionModule.java30
-rw-r--r--core/src/main/java/org/elasticsearch/action/ActionRequest.java4
-rw-r--r--core/src/main/java/org/elasticsearch/action/ActionRequestBuilder.java6
-rw-r--r--core/src/main/java/org/elasticsearch/action/DocWriteResponse.java4
-rw-r--r--core/src/main/java/org/elasticsearch/action/ReplicationResponse.java23
-rw-r--r--core/src/main/java/org/elasticsearch/action/UnavailableShardsException.java8
-rw-r--r--core/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsGroup.java11
-rw-r--r--core/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java3
-rw-r--r--core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java6
-rw-r--r--core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java8
-rw-r--r--core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java7
-rw-r--r--core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIndices.java4
-rw-r--r--core/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/action/admin/cluster/validate/template/TransportRenderSearchTemplateAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java8
-rw-r--r--core/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushRequest.java11
-rw-r--r--core/src/main/java/org/elasticsearch/action/admin/indices/flush/ShardFlushRequest.java2
-rw-r--r--core/src/main/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushRequest.java11
-rw-r--r--core/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsIndexRequest.java1
-rw-r--r--core/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java4
-rw-r--r--core/src/main/java/org/elasticsearch/action/admin/indices/recovery/TransportRecoveryAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshRequest.java11
-rw-r--r--core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportRefreshAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentResponse.java8
-rw-r--r--core/src/main/java/org/elasticsearch/action/admin/indices/segments/TransportIndicesSegmentsAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresAction.java10
-rw-r--r--core/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java8
-rw-r--r--core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/UpgradeStatusResponse.java6
-rw-r--r--core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java7
-rw-r--r--core/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java7
-rw-r--r--core/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java6
-rw-r--r--core/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java40
-rw-r--r--core/src/main/java/org/elasticsearch/action/bulk/BulkShardRequest.java2
-rw-r--r--core/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java4
-rw-r--r--core/src/main/java/org/elasticsearch/action/delete/DeleteRequest.java10
-rw-r--r--core/src/main/java/org/elasticsearch/action/delete/TransportDeleteAction.java15
-rw-r--r--core/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java6
-rw-r--r--core/src/main/java/org/elasticsearch/action/get/GetRequest.java12
-rw-r--r--core/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java12
-rw-r--r--core/src/main/java/org/elasticsearch/action/get/MultiGetShardRequest.java2
-rw-r--r--core/src/main/java/org/elasticsearch/action/get/TransportMultiGetAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/action/index/IndexRequest.java32
-rw-r--r--core/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java8
-rw-r--r--core/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java23
-rw-r--r--core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineAction.java43
-rw-r--r--core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequest.java70
-rw-r--r--core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequestBuilder.java35
-rw-r--r--core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineTransportAction.java71
-rw-r--r--core/src/main/java/org/elasticsearch/action/ingest/GetPipelineAction.java43
-rw-r--r--core/src/main/java/org/elasticsearch/action/ingest/GetPipelineRequest.java66
-rw-r--r--core/src/main/java/org/elasticsearch/action/ingest/GetPipelineRequestBuilder.java35
-rw-r--r--core/src/main/java/org/elasticsearch/action/ingest/GetPipelineResponse.java86
-rw-r--r--core/src/main/java/org/elasticsearch/action/ingest/GetPipelineTransportAction.java69
-rw-r--r--core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java225
-rw-r--r--core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java125
-rw-r--r--core/src/main/java/org/elasticsearch/action/ingest/PutPipelineAction.java44
-rw-r--r--core/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequest.java79
-rw-r--r--core/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequestBuilder.java36
-rw-r--r--core/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java71
-rw-r--r--core/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentBaseResult.java98
-rw-r--r--core/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentResult.java (renamed from core/src/main/java/org/elasticsearch/cluster/action/shard/NoOpShardStateActionListener.java)7
-rw-r--r--core/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResult.java81
-rw-r--r--core/src/main/java/org/elasticsearch/action/ingest/SimulateExecutionService.java99
-rw-r--r--core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineAction.java43
-rw-r--r--core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java165
-rw-r--r--core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequestBuilder.java46
-rw-r--r--core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineResponse.java137
-rw-r--r--core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineTransportAction.java71
-rw-r--r--core/src/main/java/org/elasticsearch/action/ingest/SimulateProcessorResult.java106
-rw-r--r--core/src/main/java/org/elasticsearch/action/ingest/WritePipelineResponse.java70
-rw-r--r--core/src/main/java/org/elasticsearch/action/ingest/WritePipelineResponseRestListener.java41
-rw-r--r--core/src/main/java/org/elasticsearch/action/ingest/WriteableIngestDocument.java105
-rw-r--r--core/src/main/java/org/elasticsearch/action/percolate/PercolateRequest.java3
-rw-r--r--core/src/main/java/org/elasticsearch/action/percolate/PercolateShardResponse.java2
-rw-r--r--core/src/main/java/org/elasticsearch/action/percolate/TransportMultiPercolateAction.java4
-rw-r--r--core/src/main/java/org/elasticsearch/action/percolate/TransportPercolateAction.java11
-rw-r--r--core/src/main/java/org/elasticsearch/action/percolate/TransportShardMultiPercolateAction.java4
-rw-r--r--core/src/main/java/org/elasticsearch/action/search/ClearScrollRequest.java11
-rw-r--r--core/src/main/java/org/elasticsearch/action/search/SearchRequest.java12
-rw-r--r--core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java27
-rw-r--r--core/src/main/java/org/elasticsearch/action/search/SearchScrollRequest.java8
-rw-r--r--core/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/action/search/type/TransportSearchDfsQueryAndFetchAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/action/search/type/TransportSearchDfsQueryThenFetchAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/action/search/type/TransportSearchQueryAndFetchAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/action/search/type/TransportSearchQueryThenFetchAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryAndFetchAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryThenFetchAction.java4
-rw-r--r--core/src/main/java/org/elasticsearch/action/search/type/TransportSearchTypeAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/action/suggest/TransportSuggestAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/action/support/ActionFilter.java12
-rw-r--r--core/src/main/java/org/elasticsearch/action/support/ActionFilterChain.java6
-rw-r--r--core/src/main/java/org/elasticsearch/action/support/AutoCreateIndex.java106
-rw-r--r--core/src/main/java/org/elasticsearch/action/support/ChildTaskActionRequest.java66
-rw-r--r--core/src/main/java/org/elasticsearch/action/support/ChildTaskRequest.java9
-rw-r--r--core/src/main/java/org/elasticsearch/action/support/DefaultShardOperationFailedException.java2
-rw-r--r--core/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java9
-rw-r--r--core/src/main/java/org/elasticsearch/action/support/IndicesOptions.java6
-rw-r--r--core/src/main/java/org/elasticsearch/action/support/ThreadedActionListener.java2
-rw-r--r--core/src/main/java/org/elasticsearch/action/support/TransportAction.java40
-rw-r--r--core/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastRequest.java5
-rw-r--r--core/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardRequest.java5
-rw-r--r--core/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardResponse.java2
-rw-r--r--core/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java15
-rw-r--r--core/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java27
-rw-r--r--core/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequest.java4
-rw-r--r--core/src/main/java/org/elasticsearch/action/support/master/MasterNodeRequest.java8
-rw-r--r--core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java5
-rw-r--r--core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeReadAction.java12
-rw-r--r--core/src/main/java/org/elasticsearch/action/support/nodes/BaseNodeRequest.java3
-rw-r--r--core/src/main/java/org/elasticsearch/action/support/nodes/BaseNodesRequest.java5
-rw-r--r--core/src/main/java/org/elasticsearch/action/support/replication/BasicReplicationRequest.java13
-rw-r--r--core/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequest.java24
-rw-r--r--core/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java23
-rw-r--r--core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java48
-rw-r--r--core/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java6
-rw-r--r--core/src/main/java/org/elasticsearch/action/support/single/shard/SingleShardRequest.java9
-rw-r--r--core/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksRequest.java31
-rw-r--r--core/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsShardRequest.java4
-rw-r--r--core/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/action/termvectors/dfs/TransportDfsOnlyAction.java7
-rw-r--r--core/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java10
-rw-r--r--core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java20
-rw-r--r--core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java31
-rw-r--r--core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java6
-rw-r--r--core/src/main/java/org/elasticsearch/bootstrap/Security.java42
-rw-r--r--core/src/main/java/org/elasticsearch/cache/recycler/PageCacheRecycler.java34
-rw-r--r--core/src/main/java/org/elasticsearch/client/Client.java24
-rw-r--r--core/src/main/java/org/elasticsearch/client/ClusterAdminClient.java72
-rw-r--r--core/src/main/java/org/elasticsearch/client/FilterClient.java2
-rw-r--r--core/src/main/java/org/elasticsearch/client/node/NodeClient.java5
-rw-r--r--core/src/main/java/org/elasticsearch/client/node/NodeClientModule.java2
-rw-r--r--core/src/main/java/org/elasticsearch/client/support/AbstractClient.java104
-rw-r--r--core/src/main/java/org/elasticsearch/client/support/Headers.java65
-rw-r--r--core/src/main/java/org/elasticsearch/client/transport/TransportClient.java28
-rw-r--r--core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java23
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/ClusterModule.java6
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/ClusterName.java14
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/ClusterState.java4
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/ClusterStateObserver.java40
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/ClusterStateTaskExecutor.java2
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java2
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/action/index/NodeIndexDeletedAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java66
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java20
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/health/ClusterIndexHealth.java2
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/metadata/AliasOrIndex.java2
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/metadata/AliasValidator.java2
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java51
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java10
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java8
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java27
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java430
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java4
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java56
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java2
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java2
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java40
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java22
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java71
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java7
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java25
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/routing/RoutingNode.java4
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java13
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java41
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java26
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java14
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java2
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java49
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AbstractAllocateAllocationCommand.java27
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateEmptyPrimaryAllocationCommand.java12
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateReplicaAllocationCommand.java19
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateStalePrimaryAllocationCommand.java19
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/BasePrimaryAllocationCommand.java4
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/CancelAllocationCommand.java34
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/MoveAllocationCommand.java27
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java2
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java2
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java2
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java2
-rw-r--r--core/src/main/java/org/elasticsearch/common/ContextAndHeaderHolder.java153
-rw-r--r--core/src/main/java/org/elasticsearch/common/DelegatingHasContextAndHeaders.java111
-rw-r--r--core/src/main/java/org/elasticsearch/common/HasContext.java82
-rw-r--r--core/src/main/java/org/elasticsearch/common/Randomness.java12
-rw-r--r--core/src/main/java/org/elasticsearch/common/blobstore/BlobStore.java4
-rw-r--r--core/src/main/java/org/elasticsearch/common/cli/HelpPrinter.java2
-rw-r--r--core/src/main/java/org/elasticsearch/common/cli/Terminal.java12
-rw-r--r--core/src/main/java/org/elasticsearch/common/geo/GeoUtils.java2
-rw-r--r--core/src/main/java/org/elasticsearch/common/http/client/HttpDownloadHelper.java488
-rw-r--r--core/src/main/java/org/elasticsearch/common/io/FileSystemUtils.java188
-rw-r--r--core/src/main/java/org/elasticsearch/common/io/stream/NotSerializableExceptionWrapper.java3
-rw-r--r--core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java9
-rw-r--r--core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java6
-rw-r--r--core/src/main/java/org/elasticsearch/common/lease/Releasable.java4
-rw-r--r--core/src/main/java/org/elasticsearch/common/lease/Releasables.java50
-rw-r--r--core/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java18
-rw-r--r--core/src/main/java/org/elasticsearch/common/logging/Loggers.java6
-rw-r--r--core/src/main/java/org/elasticsearch/common/logging/log4j/TerminalAppender.java2
-rw-r--r--core/src/main/java/org/elasticsearch/common/lucene/ShardCoreKeyMap.java2
-rw-r--r--core/src/main/java/org/elasticsearch/common/lucene/search/Queries.java6
-rw-r--r--core/src/main/java/org/elasticsearch/common/network/NetworkModule.java20
-rw-r--r--core/src/main/java/org/elasticsearch/common/network/NetworkService.java97
-rw-r--r--core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java60
-rw-r--r--core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java267
-rw-r--r--core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java13
-rw-r--r--core/src/main/java/org/elasticsearch/common/settings/Setting.java148
-rw-r--r--core/src/main/java/org/elasticsearch/common/settings/Settings.java23
-rw-r--r--core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java35
-rw-r--r--core/src/main/java/org/elasticsearch/common/transport/PortsRange.java4
-rw-r--r--core/src/main/java/org/elasticsearch/common/unit/DistanceUnit.java23
-rw-r--r--core/src/main/java/org/elasticsearch/common/unit/TimeValue.java6
-rw-r--r--core/src/main/java/org/elasticsearch/common/util/MultiDataPathUpgrader.java383
-rw-r--r--core/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java28
-rw-r--r--core/src/main/java/org/elasticsearch/common/util/concurrent/EsThreadPoolExecutor.java133
-rw-r--r--core/src/main/java/org/elasticsearch/common/util/concurrent/PrioritizedEsThreadPoolExecutor.java54
-rw-r--r--core/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java359
-rw-r--r--core/src/main/java/org/elasticsearch/common/xcontent/ObjectParser.java8
-rw-r--r--core/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java23
-rw-r--r--core/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java15
-rw-r--r--core/src/main/java/org/elasticsearch/discovery/DiscoveryService.java9
-rw-r--r--core/src/main/java/org/elasticsearch/discovery/local/LocalDiscovery.java67
-rw-r--r--core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java45
-rw-r--r--core/src/main/java/org/elasticsearch/discovery/zen/fd/FaultDetection.java22
-rw-r--r--core/src/main/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPing.java17
-rw-r--r--core/src/main/java/org/elasticsearch/env/Environment.java65
-rw-r--r--core/src/main/java/org/elasticsearch/env/NodeEnvironment.java151
-rw-r--r--core/src/main/java/org/elasticsearch/gateway/DanglingIndicesState.java2
-rw-r--r--core/src/main/java/org/elasticsearch/gateway/Gateway.java2
-rw-r--r--core/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java16
-rw-r--r--core/src/main/java/org/elasticsearch/gateway/GatewayService.java46
-rw-r--r--core/src/main/java/org/elasticsearch/gateway/LocalAllocateDangledIndices.java6
-rw-r--r--core/src/main/java/org/elasticsearch/gateway/MetaStateService.java5
-rw-r--r--core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java34
-rw-r--r--core/src/main/java/org/elasticsearch/gateway/PriorityComparator.java6
-rw-r--r--core/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java4
-rw-r--r--core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java2
-rw-r--r--core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java27
-rw-r--r--core/src/main/java/org/elasticsearch/http/HttpServer.java185
-rw-r--r--core/src/main/java/org/elasticsearch/http/HttpServerAdapter.java4
-rw-r--r--core/src/main/java/org/elasticsearch/http/HttpTransportSettings.java53
-rw-r--r--core/src/main/java/org/elasticsearch/http/netty/HttpRequestHandler.java10
-rw-r--r--core/src/main/java/org/elasticsearch/http/netty/NettyHttpChannel.java24
-rw-r--r--core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java101
-rw-r--r--core/src/main/java/org/elasticsearch/index/Index.java45
-rw-r--r--core/src/main/java/org/elasticsearch/index/IndexModule.java2
-rw-r--r--core/src/main/java/org/elasticsearch/index/IndexNotFoundException.java9
-rw-r--r--core/src/main/java/org/elasticsearch/index/IndexService.java31
-rw-r--r--core/src/main/java/org/elasticsearch/index/IndexSettings.java48
-rw-r--r--core/src/main/java/org/elasticsearch/index/MergePolicyConfig.java32
-rw-r--r--core/src/main/java/org/elasticsearch/index/MergeSchedulerConfig.java5
-rw-r--r--core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java7
-rw-r--r--core/src/main/java/org/elasticsearch/index/cache/query/index/IndexQueryCache.java2
-rw-r--r--core/src/main/java/org/elasticsearch/index/cache/query/none/NoneQueryCache.java1
-rw-r--r--core/src/main/java/org/elasticsearch/index/engine/ElasticsearchConcurrentMergeScheduler.java2
-rw-r--r--core/src/main/java/org/elasticsearch/index/engine/Engine.java2
-rw-r--r--core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java31
-rw-r--r--core/src/main/java/org/elasticsearch/index/fielddata/ScriptDocValues.java2
-rw-r--r--core/src/main/java/org/elasticsearch/index/fielddata/plain/DisabledIndexFieldData.java72
-rw-r--r--core/src/main/java/org/elasticsearch/index/fielddata/plain/IndexIndexFieldData.java2
-rw-r--r--core/src/main/java/org/elasticsearch/index/get/ShardGetService.java25
-rw-r--r--core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java35
-rw-r--r--core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java6
-rwxr-xr-xcore/src/main/java/org/elasticsearch/index/mapper/MapperService.java162
-rw-r--r--core/src/main/java/org/elasticsearch/index/mapper/ParseContext.java13
-rw-r--r--core/src/main/java/org/elasticsearch/index/mapper/SourceToParse.java11
-rw-r--r--core/src/main/java/org/elasticsearch/index/mapper/core/BooleanFieldMapper.java8
-rw-r--r--core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java33
-rw-r--r--core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java86
-rw-r--r--core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java10
-rw-r--r--core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperLegacy.java2
-rw-r--r--core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java6
-rw-r--r--core/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java6
-rw-r--r--core/src/main/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapper.java4
-rw-r--r--core/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java2
-rw-r--r--core/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java209
-rw-r--r--core/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java4
-rw-r--r--core/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java9
-rw-r--r--core/src/main/java/org/elasticsearch/index/mapper/internal/TTLFieldMapper.java6
-rw-r--r--core/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java6
-rw-r--r--core/src/main/java/org/elasticsearch/index/mapper/internal/UidFieldMapper.java2
-rw-r--r--core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java2
-rw-r--r--core/src/main/java/org/elasticsearch/index/mapper/object/ObjectMapper.java12
-rw-r--r--core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java6
-rw-r--r--core/src/main/java/org/elasticsearch/index/percolator/PercolatorFieldMapper.java4
-rw-r--r--core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueriesRegistry.java4
-rw-r--r--core/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java4
-rw-r--r--core/src/main/java/org/elasticsearch/index/query/FuzzyQueryParser.java5
-rw-r--r--core/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java1
-rw-r--r--core/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java5
-rw-r--r--core/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java3
-rw-r--r--core/src/main/java/org/elasticsearch/index/query/ParentIdQueryBuilder.java104
-rw-r--r--core/src/main/java/org/elasticsearch/index/query/ParentIdQueryParser.java76
-rw-r--r--core/src/main/java/org/elasticsearch/index/query/QueryBuilders.java22
-rw-r--r--core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java24
-rw-r--r--core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java5
-rw-r--r--core/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java5
-rw-r--r--core/src/main/java/org/elasticsearch/index/query/TemplateQueryBuilder.java2
-rw-r--r--core/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java3
-rw-r--r--core/src/main/java/org/elasticsearch/index/query/functionscore/random/RandomScoreFunctionBuilder.java2
-rw-r--r--core/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java5
-rw-r--r--core/src/main/java/org/elasticsearch/index/shard/IndexShard.java61
-rw-r--r--core/src/main/java/org/elasticsearch/index/shard/ShardId.java30
-rw-r--r--core/src/main/java/org/elasticsearch/index/shard/ShardPath.java4
-rw-r--r--core/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java5
-rw-r--r--core/src/main/java/org/elasticsearch/index/similarity/DFISimilarityProvider.java51
-rw-r--r--core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java1
-rw-r--r--core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardRepository.java2
-rw-r--r--core/src/main/java/org/elasticsearch/index/store/IndexStore.java45
-rw-r--r--core/src/main/java/org/elasticsearch/index/store/Store.java15
-rw-r--r--core/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java15
-rw-r--r--core/src/main/java/org/elasticsearch/index/translog/Translog.java3
-rw-r--r--core/src/main/java/org/elasticsearch/index/translog/TranslogConfig.java7
-rw-r--r--core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java12
-rw-r--r--core/src/main/java/org/elasticsearch/indices/IndexCreationException.java4
-rw-r--r--core/src/main/java/org/elasticsearch/indices/IndicesService.java61
-rw-r--r--core/src/main/java/org/elasticsearch/indices/InvalidIndexNameException.java4
-rw-r--r--core/src/main/java/org/elasticsearch/indices/NodeIndicesStats.java6
-rw-r--r--core/src/main/java/org/elasticsearch/indices/TypeMissingException.java5
-rw-r--r--core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java22
-rw-r--r--core/src/main/java/org/elasticsearch/indices/cache/query/IndicesQueryCache.java30
-rw-r--r--core/src/main/java/org/elasticsearch/indices/cache/request/IndicesRequestCache.java19
-rw-r--r--core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java35
-rw-r--r--core/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java15
-rw-r--r--core/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java15
-rw-r--r--core/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java1
-rw-r--r--core/src/main/java/org/elasticsearch/indices/recovery/RecoverySource.java4
-rw-r--r--core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java96
-rw-r--r--core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java8
-rw-r--r--core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java25
-rw-r--r--core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java15
-rw-r--r--core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java4
-rw-r--r--core/src/main/java/org/elasticsearch/ingest/IngestMetadata.java119
-rw-r--r--core/src/main/java/org/elasticsearch/ingest/IngestService.java61
-rw-r--r--core/src/main/java/org/elasticsearch/ingest/InternalTemplateService.java91
-rw-r--r--core/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java116
-rw-r--r--core/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java124
-rw-r--r--core/src/main/java/org/elasticsearch/ingest/PipelineStore.java249
-rw-r--r--core/src/main/java/org/elasticsearch/ingest/ProcessorsRegistry.java47
-rw-r--r--core/src/main/java/org/elasticsearch/ingest/core/AbstractProcessor.java (renamed from core/src/test/java/org/elasticsearch/plugins/loading/classpath/InClassPathPlugin.java)19
-rw-r--r--core/src/main/java/org/elasticsearch/ingest/core/AbstractProcessorFactory.java39
-rw-r--r--core/src/main/java/org/elasticsearch/ingest/core/CompoundProcessor.java100
-rw-r--r--core/src/main/java/org/elasticsearch/ingest/core/ConfigurationUtils.java165
-rw-r--r--core/src/main/java/org/elasticsearch/ingest/core/IngestDocument.java573
-rw-r--r--core/src/main/java/org/elasticsearch/ingest/core/Pipeline.java137
-rw-r--r--core/src/main/java/org/elasticsearch/ingest/core/PipelineFactoryError.java96
-rw-r--r--core/src/main/java/org/elasticsearch/ingest/core/PipelineFactoryResult.java43
-rw-r--r--core/src/main/java/org/elasticsearch/ingest/core/Processor.java61
-rw-r--r--core/src/main/java/org/elasticsearch/ingest/core/TemplateService.java (renamed from core/src/main/java/org/elasticsearch/common/HasContextAndHeaders.java)21
-rw-r--r--core/src/main/java/org/elasticsearch/ingest/core/ValueSource.java191
-rw-r--r--core/src/main/java/org/elasticsearch/ingest/processor/AbstractStringProcessor.java72
-rw-r--r--core/src/main/java/org/elasticsearch/ingest/processor/AppendProcessor.java82
-rw-r--r--core/src/main/java/org/elasticsearch/ingest/processor/ConfigurationPropertyException.java53
-rw-r--r--core/src/main/java/org/elasticsearch/ingest/processor/ConvertProcessor.java145
-rw-r--r--core/src/main/java/org/elasticsearch/ingest/processor/DateFormat.java90
-rw-r--r--core/src/main/java/org/elasticsearch/ingest/processor/DateProcessor.java132
-rw-r--r--core/src/main/java/org/elasticsearch/ingest/processor/DeDotProcessor.java106
-rw-r--r--core/src/main/java/org/elasticsearch/ingest/processor/FailProcessor.java74
-rw-r--r--core/src/main/java/org/elasticsearch/ingest/processor/FailProcessorException.java (renamed from core/src/main/java/org/elasticsearch/common/HasHeaders.java)23
-rw-r--r--core/src/main/java/org/elasticsearch/ingest/processor/GsubProcessor.java89
-rw-r--r--core/src/main/java/org/elasticsearch/ingest/processor/JoinProcessor.java82
-rw-r--r--core/src/main/java/org/elasticsearch/ingest/processor/LowercaseProcessor.java58
-rw-r--r--core/src/main/java/org/elasticsearch/ingest/processor/RemoveProcessor.java73
-rw-r--r--core/src/main/java/org/elasticsearch/ingest/processor/RenameProcessor.java86
-rw-r--r--core/src/main/java/org/elasticsearch/ingest/processor/SetProcessor.java81
-rw-r--r--core/src/main/java/org/elasticsearch/ingest/processor/SplitProcessor.java82
-rw-r--r--core/src/main/java/org/elasticsearch/ingest/processor/TrimProcessor.java56
-rw-r--r--core/src/main/java/org/elasticsearch/ingest/processor/UppercaseProcessor.java58
-rw-r--r--core/src/main/java/org/elasticsearch/monitor/MonitorService.java12
-rw-r--r--core/src/main/java/org/elasticsearch/monitor/fs/FsService.java6
-rw-r--r--core/src/main/java/org/elasticsearch/monitor/jvm/JvmGcMonitorService.java (renamed from core/src/main/java/org/elasticsearch/monitor/jvm/JvmMonitorService.java)47
-rw-r--r--core/src/main/java/org/elasticsearch/monitor/jvm/JvmService.java6
-rw-r--r--core/src/main/java/org/elasticsearch/monitor/os/OsProbe.java5
-rw-r--r--core/src/main/java/org/elasticsearch/monitor/os/OsService.java6
-rw-r--r--core/src/main/java/org/elasticsearch/monitor/os/OsStats.java3
-rw-r--r--core/src/main/java/org/elasticsearch/monitor/process/ProcessService.java6
-rw-r--r--core/src/main/java/org/elasticsearch/node/Node.java159
-rw-r--r--core/src/main/java/org/elasticsearch/node/NodeModule.java51
-rw-r--r--core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java13
-rw-r--r--core/src/main/java/org/elasticsearch/node/service/NodeService.java28
-rw-r--r--core/src/main/java/org/elasticsearch/percolator/MultiDocumentPercolatorIndex.java12
-rw-r--r--core/src/main/java/org/elasticsearch/percolator/PercolateContext.java102
-rw-r--r--core/src/main/java/org/elasticsearch/percolator/PercolateDocumentParser.java36
-rw-r--r--core/src/main/java/org/elasticsearch/percolator/PercolatorService.java23
-rw-r--r--core/src/main/java/org/elasticsearch/percolator/SingleDocumentPercolatorIndex.java7
-rw-r--r--core/src/main/java/org/elasticsearch/plugins/DummyPluginInfo.java6
-rw-r--r--core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java401
-rw-r--r--core/src/main/java/org/elasticsearch/plugins/ListPluginsCommand.java56
-rw-r--r--core/src/main/java/org/elasticsearch/plugins/PluginCli.java124
-rw-r--r--core/src/main/java/org/elasticsearch/plugins/PluginInfo.java118
-rw-r--r--core/src/main/java/org/elasticsearch/plugins/PluginManager.java688
-rw-r--r--core/src/main/java/org/elasticsearch/plugins/PluginManagerCliParser.java256
-rw-r--r--core/src/main/java/org/elasticsearch/plugins/PluginSecurity.java21
-rw-r--r--core/src/main/java/org/elasticsearch/plugins/PluginsService.java52
-rw-r--r--core/src/main/java/org/elasticsearch/plugins/RemovePluginCommand.java77
-rw-r--r--core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java7
-rw-r--r--core/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java23
-rw-r--r--core/src/main/java/org/elasticsearch/repositories/uri/URLRepository.java52
-rw-r--r--core/src/main/java/org/elasticsearch/rest/BaseRestHandler.java46
-rw-r--r--core/src/main/java/org/elasticsearch/rest/RestController.java40
-rw-r--r--core/src/main/java/org/elasticsearch/rest/RestRequest.java3
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/cluster/health/RestClusterHealthAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/hotthreads/RestNodesHotThreadsAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/info/RestNodesInfoAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/stats/RestNodesStatsAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/tasks/RestListTasksAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/delete/RestDeleteRepositoryAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/get/RestGetRepositoriesAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/put/RestPutRepositoryAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/verify/RestVerifyRepositoryAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/cluster/reroute/RestClusterRerouteAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/cluster/settings/RestClusterGetSettingsAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/cluster/settings/RestClusterUpdateSettingsAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/cluster/shards/RestClusterSearchShardsAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/create/RestCreateSnapshotAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/delete/RestDeleteSnapshotAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/get/RestGetSnapshotsAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/restore/RestRestoreSnapshotAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/status/RestSnapshotsStatusAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/cluster/state/RestClusterStateAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/cluster/stats/RestClusterStatsAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/cluster/tasks/RestPendingClusterTasksAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/RestIndicesAliasesAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/delete/RestIndexDeleteAliasesAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/get/RestGetAliasesAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/get/RestGetIndicesAliasesAction.java4
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/head/RestAliasesExistAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/put/RestIndexPutAliasAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/indices/cache/clear/RestClearIndicesCacheAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/indices/close/RestCloseIndexAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/indices/create/RestCreateIndexAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/indices/delete/RestDeleteIndexAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/indices/exists/indices/RestIndicesExistsAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/indices/exists/types/RestTypesExistsAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/indices/flush/RestFlushAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/indices/flush/RestSyncedFlushAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/indices/forcemerge/RestForceMergeAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/indices/get/RestGetIndicesAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/indices/mapping/get/RestGetFieldMappingAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/indices/mapping/get/RestGetMappingAction.java4
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/indices/mapping/put/RestPutMappingAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/indices/open/RestOpenIndexAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/indices/recovery/RestRecoveryAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/indices/refresh/RestRefreshAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/indices/segments/RestIndicesSegmentsAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/indices/settings/RestGetSettingsAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/indices/settings/RestUpdateSettingsAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/indices/shards/RestIndicesShardStoresAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/indices/stats/RestIndicesStatsAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/delete/RestDeleteIndexTemplateAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/get/RestGetIndexTemplateAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/head/RestHeadIndexTemplateAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/put/RestPutIndexTemplateAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/indices/upgrade/RestUpgradeAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/indices/validate/query/RestValidateQueryAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/admin/indices/validate/template/RestRenderSearchTemplateAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/bulk/RestBulkAction.java7
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/cat/AbstractCatAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/cat/RestCatAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/cat/RestPluginsAction.java18
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/cat/RestSegmentsAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/count/RestCountAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/delete/RestDeleteAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/explain/RestExplainAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/fieldstats/RestFieldStatsAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/get/RestGetAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/get/RestGetSourceAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/get/RestHeadAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/get/RestMultiGetAction.java4
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java5
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/ingest/RestDeletePipelineAction.java47
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/ingest/RestGetPipelineAction.java47
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/ingest/RestPutPipelineAction.java55
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulatePipelineAction.java52
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/main/RestMainAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/percolate/RestMultiPercolateAction.java4
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/percolate/RestPercolateAction.java8
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/script/RestDeleteIndexedScriptAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/script/RestGetIndexedScriptAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/script/RestPutIndexedScriptAction.java4
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/search/RestClearScrollAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java8
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/search/RestSearchScrollAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/suggest/RestSuggestAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/template/RestPutSearchTemplateAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/termvectors/RestMultiTermVectorsAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/termvectors/RestTermVectorsAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/update/RestUpdateAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/support/RestUtils.java2
-rw-r--r--core/src/main/java/org/elasticsearch/script/AbstractExecutableScript.java5
-rw-r--r--core/src/main/java/org/elasticsearch/script/ExecutableScript.java10
-rw-r--r--core/src/main/java/org/elasticsearch/script/NativeScriptEngineService.java14
-rw-r--r--core/src/main/java/org/elasticsearch/script/ScriptContext.java2
-rw-r--r--core/src/main/java/org/elasticsearch/script/ScriptEngineRegistry.java93
-rw-r--r--core/src/main/java/org/elasticsearch/script/ScriptEngineService.java7
-rw-r--r--core/src/main/java/org/elasticsearch/script/ScriptMode.java47
-rw-r--r--core/src/main/java/org/elasticsearch/script/ScriptModes.java127
-rw-r--r--core/src/main/java/org/elasticsearch/script/ScriptModule.java39
-rw-r--r--core/src/main/java/org/elasticsearch/script/ScriptService.java95
-rw-r--r--core/src/main/java/org/elasticsearch/script/ScriptSettings.java165
-rw-r--r--core/src/main/java/org/elasticsearch/search/SearchModule.java27
-rw-r--r--core/src/main/java/org/elasticsearch/search/SearchService.java97
-rw-r--r--core/src/main/java/org/elasticsearch/search/SearchShardTarget.java5
-rw-r--r--core/src/main/java/org/elasticsearch/search/action/SearchServiceTransportAction.java17
-rw-r--r--core/src/main/java/org/elasticsearch/search/aggregations/InternalAggregation.java9
-rw-r--r--core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridParser.java4
-rw-r--r--core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramParser.java5
-rw-r--r--core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramParser.java7
-rw-r--r--core/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregator.java4
-rw-r--r--core/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingParser.java4
-rw-r--r--core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregator.java3
-rw-r--r--core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerParser.java7
-rw-r--r--core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java10
-rw-r--r--core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsParser.java3
-rw-r--r--core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/ScriptHeuristic.java4
-rw-r--r--core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractTermsParametersParser.java8
-rw-r--r--core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java2
-rw-r--r--core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsParser.java4
-rw-r--r--core/src/main/java/org/elasticsearch/search/aggregations/metrics/ValuesSourceMetricsAggregationBuilder.java1
-rw-r--r--core/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/AvgAggregator.java2
-rw-r--r--core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityAggregatorFactory.java2
-rw-r--r--core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityParser.java3
-rw-r--r--core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidAggregator.java2
-rw-r--r--core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetric.java2
-rw-r--r--core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregator.java4
-rw-r--r--core/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountAggregator.java2
-rw-r--r--core/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountParser.java4
-rw-r--r--core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketscript/BucketScriptPipelineAggregator.java2
-rw-r--r--core/src/main/java/org/elasticsearch/search/aggregations/pipeline/having/BucketSelectorPipelineAggregator.java2
-rw-r--r--core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSource.java4
-rw-r--r--core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregatorFactory.java9
-rw-r--r--core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceParser.java7
-rw-r--r--core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java77
-rw-r--r--core/src/main/java/org/elasticsearch/search/controller/SearchPhaseController.java7
-rw-r--r--core/src/main/java/org/elasticsearch/search/fetch/ShardFetchRequest.java12
-rw-r--r--core/src/main/java/org/elasticsearch/search/fetch/ShardFetchSearchRequest.java2
-rw-r--r--core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsContext.java30
-rw-r--r--core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsFetchSubPhase.java2
-rw-r--r--core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsParseElement.java7
-rw-r--r--core/src/main/java/org/elasticsearch/search/fetch/parent/ParentFieldSubFetchPhase.java88
-rw-r--r--core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java60
-rw-r--r--core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java18
-rw-r--r--core/src/main/java/org/elasticsearch/search/internal/InternalScrollSearchRequest.java1
-rw-r--r--core/src/main/java/org/elasticsearch/search/internal/SearchContext.java22
-rw-r--r--core/src/main/java/org/elasticsearch/search/internal/ShardSearchLocalRequest.java6
-rw-r--r--core/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java3
-rw-r--r--core/src/main/java/org/elasticsearch/search/internal/ShardSearchTransportRequest.java1
-rw-r--r--core/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java11
-rw-r--r--core/src/main/java/org/elasticsearch/search/profile/CollectorResult.java6
-rw-r--r--core/src/main/java/org/elasticsearch/search/query/QueryPhase.java12
-rw-r--r--core/src/main/java/org/elasticsearch/search/query/QuerySearchRequest.java1
-rw-r--r--core/src/main/java/org/elasticsearch/search/rescore/QueryRescorer.java20
-rw-r--r--core/src/main/java/org/elasticsearch/search/rescore/QueryRescorerBuilder.java242
-rw-r--r--core/src/main/java/org/elasticsearch/search/rescore/RescoreBuilder.java259
-rw-r--r--core/src/main/java/org/elasticsearch/search/rescore/RescoreParseElement.java11
-rw-r--r--core/src/main/java/org/elasticsearch/search/rescore/Rescorer.java7
-rw-r--r--core/src/main/java/org/elasticsearch/search/searchafter/SearchAfterBuilder.java312
-rw-r--r--core/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java81
-rw-r--r--core/src/main/java/org/elasticsearch/search/sort/SortBuilders.java14
-rw-r--r--core/src/main/java/org/elasticsearch/search/sort/SortOrder.java34
-rw-r--r--core/src/main/java/org/elasticsearch/search/suggest/SuggestContextParser.java5
-rw-r--r--core/src/main/java/org/elasticsearch/search/suggest/SuggestParseElement.java7
-rw-r--r--core/src/main/java/org/elasticsearch/search/suggest/SuggestUtils.java15
-rw-r--r--core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestParser.java46
-rw-r--r--core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java39
-rw-r--r--core/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorBuilder.java493
-rw-r--r--core/src/main/java/org/elasticsearch/search/suggest/phrase/LaplaceScorer.java10
-rw-r--r--core/src/main/java/org/elasticsearch/search/suggest/phrase/LinearInterpoatingScorer.java14
-rw-r--r--core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestParser.java87
-rw-r--r--core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java526
-rw-r--r--core/src/main/java/org/elasticsearch/search/suggest/phrase/StupidBackoffScorer.java4
-rw-r--r--core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestParser.java4
-rw-r--r--core/src/main/java/org/elasticsearch/snapshots/RestoreService.java20
-rw-r--r--core/src/main/java/org/elasticsearch/snapshots/SnapshotShardFailure.java44
-rw-r--r--core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java18
-rw-r--r--core/src/main/java/org/elasticsearch/tasks/ChildTask.java57
-rw-r--r--core/src/main/java/org/elasticsearch/tasks/Task.java30
-rw-r--r--core/src/main/java/org/elasticsearch/tasks/TaskManager.java7
-rw-r--r--core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java42
-rw-r--r--core/src/main/java/org/elasticsearch/transport/Transport.java2
-rw-r--r--core/src/main/java/org/elasticsearch/transport/TransportMessage.java26
-rw-r--r--core/src/main/java/org/elasticsearch/transport/TransportRequest.java12
-rw-r--r--core/src/main/java/org/elasticsearch/transport/TransportResponse.java17
-rw-r--r--core/src/main/java/org/elasticsearch/transport/TransportService.java45
-rw-r--r--core/src/main/java/org/elasticsearch/transport/TransportSettings.java45
-rw-r--r--core/src/main/java/org/elasticsearch/transport/local/LocalTransport.java31
-rw-r--r--core/src/main/java/org/elasticsearch/transport/local/LocalTransportChannel.java13
-rw-r--r--core/src/main/java/org/elasticsearch/transport/netty/MessageChannelHandler.java7
-rw-r--r--core/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java219
-rw-r--r--core/src/main/java/org/elasticsearch/tribe/TribeService.java100
-rw-r--r--core/src/main/resources/org/elasticsearch/bootstrap/security.policy2
-rw-r--r--core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy2
-rw-r--r--core/src/main/resources/org/elasticsearch/plugins/plugin-install.help17
-rw-r--r--core/src/test/java/org/elasticsearch/ESExceptionTests.java46
-rw-r--r--core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java30
-rw-r--r--core/src/test/java/org/elasticsearch/action/ListenerActionIT.java2
-rw-r--r--core/src/test/java/org/elasticsearch/action/admin/HotThreadsIT.java2
-rw-r--r--core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/RecordingTaskManagerListener.java81
-rw-r--r--core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java258
-rw-r--r--core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java61
-rw-r--r--core/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIT.java19
-rw-r--r--core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java2
-rw-r--r--core/src/test/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushUnitTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java11
-rw-r--r--core/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsTests.java12
-rw-r--r--core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorIT.java3
-rw-r--r--core/src/test/java/org/elasticsearch/action/bulk/BulkShardRequestTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/action/ingest/BulkRequestModifierTests.java165
-rw-r--r--core/src/test/java/org/elasticsearch/action/ingest/IngestActionFilterTests.java249
-rw-r--r--core/src/test/java/org/elasticsearch/action/ingest/IngestProxyActionFilterTests.java251
-rw-r--r--core/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentSimpleResultTests.java57
-rw-r--r--core/src/test/java/org/elasticsearch/action/ingest/SimulateExecutionServiceTests.java206
-rw-r--r--core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineRequestParsingTests.java181
-rw-r--r--core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineResponseTests.java117
-rw-r--r--core/src/test/java/org/elasticsearch/action/ingest/SimulateProcessorResultTests.java58
-rw-r--r--core/src/test/java/org/elasticsearch/action/ingest/WritePipelineResponseTests.java61
-rw-r--r--core/src/test/java/org/elasticsearch/action/ingest/WriteableIngestDocumentTests.java114
-rw-r--r--core/src/test/java/org/elasticsearch/action/search/SearchRequestBuilderTests.java3
-rw-r--r--core/src/test/java/org/elasticsearch/action/support/AutoCreateIndexTests.java147
-rw-r--r--core/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java59
-rw-r--r--core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java15
-rw-r--r--core/src/test/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java4
-rw-r--r--core/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java9
-rw-r--r--core/src/test/java/org/elasticsearch/action/support/replication/ClusterStateCreationUtils.java8
-rw-r--r--core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java54
-rw-r--r--core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsCheckDocFreqIT.java6
-rw-r--r--core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java69
-rw-r--r--core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java5
-rw-r--r--core/src/test/java/org/elasticsearch/broadcast/BroadcastActionsIT.java2
-rw-r--r--core/src/test/java/org/elasticsearch/bwcompat/BasicBackwardsCompatibilityIT.java24
-rw-r--r--core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java22
-rw-r--r--core/src/test/java/org/elasticsearch/bwcompat/RecoveryWithUnsupportedIndicesIT.java3
-rw-r--r--core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java7
-rw-r--r--core/src/test/java/org/elasticsearch/bwcompat/StaticIndexBackwardCompatibilityIT.java2
-rw-r--r--core/src/test/java/org/elasticsearch/bwcompat/TransportClientBackwardsCompatibilityIT.java2
-rw-r--r--core/src/test/java/org/elasticsearch/bwcompat/UnicastBackwardsCompatibilityIT.java5
-rw-r--r--core/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java103
-rw-r--r--core/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java6
-rw-r--r--core/src/test/java/org/elasticsearch/client/node/NodeClientIT.java4
-rw-r--r--core/src/test/java/org/elasticsearch/client/transport/TransportClientHeadersTests.java13
-rw-r--r--core/src/test/java/org/elasticsearch/client/transport/TransportClientIT.java21
-rw-r--r--core/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java3
-rw-r--r--core/src/test/java/org/elasticsearch/client/transport/TransportClientRetryIT.java12
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java5
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java6
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java7
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/ClusterStateDiffIT.java5
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/DiskUsageTests.java10
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java20
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/NoMasterNodeIT.java8
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/SimpleClusterStateIT.java4
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/SimpleDataNodesIT.java7
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/SpecificMasterNodesIT.java21
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/UpdateSettingsValidationIT.java7
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/ack/AckIT.java25
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java241
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java43
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/allocation/AwarenessAllocationIT.java2
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java22
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/allocation/FilteringAllocationIT.java6
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/allocation/ShardsAllocatorModuleIT.java4
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/health/RoutingTableGenerator.java4
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java32
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java93
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java3
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/metadata/ToAndFromJsonMetaDataTests.java34
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/AllocationIdTests.java11
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java14
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/RoutingServiceTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java6
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/ShardRoutingTests.java16
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java16
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java101
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationPriorityTests.java16
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/allocation/AwarenessAllocationTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/allocation/CatAllocationTestCase.java4
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/allocation/ClusterRebalanceRoutingTests.java9
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/allocation/DeadNodesAllocationTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/allocation/ExpectedShardSizeAllocationTests.java7
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java16
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/allocation/RebalanceAfterActiveTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardVersioningTests.java12
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsLimitAllocationTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardNoReplicasRoutingTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/allocation/StartedShardsRoutingTests.java10
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/allocation/ThrottlingAllocationTests.java6
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java75
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java65
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationTests.java6
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java54
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java8
-rw-r--r--core/src/test/java/org/elasticsearch/common/blobstore/BlobStoreTests.java139
-rw-r--r--core/src/test/java/org/elasticsearch/common/blobstore/FsBlobStoreContainerTests.java38
-rw-r--r--core/src/test/java/org/elasticsearch/common/blobstore/FsBlobStoreTests.java38
-rw-r--r--core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java1
-rw-r--r--core/src/test/java/org/elasticsearch/common/io/FileSystemUtilsTests.java85
-rw-r--r--core/src/test/java/org/elasticsearch/common/logging/log4j/Log4jESLoggerTests.java5
-rw-r--r--core/src/test/java/org/elasticsearch/common/logging/log4j/LoggingConfigurationTests.java28
-rw-r--r--core/src/test/java/org/elasticsearch/common/lucene/ShardCoreKeyMapTests.java8
-rw-r--r--core/src/test/java/org/elasticsearch/common/lucene/index/ESDirectoryReaderTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/common/lucene/search/morelikethis/MoreLikeThisQueryTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/common/lucene/store/InputStreamIndexInputTests.java34
-rw-r--r--core/src/test/java/org/elasticsearch/common/lucene/uid/VersionsTests.java34
-rw-r--r--core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/common/rounding/TimeZoneRoundingTests.java8
-rw-r--r--core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java74
-rw-r--r--core/src/test/java/org/elasticsearch/common/settings/SettingTests.java69
-rw-r--r--core/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java101
-rw-r--r--core/src/test/java/org/elasticsearch/common/unit/ByteSizeUnitTests.java54
-rw-r--r--core/src/test/java/org/elasticsearch/common/unit/ByteSizeValueTests.java6
-rw-r--r--core/src/test/java/org/elasticsearch/common/unit/DistanceUnitTests.java21
-rw-r--r--core/src/test/java/org/elasticsearch/common/unit/FuzzinessTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/common/unit/SizeValueTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/common/unit/TimeValueTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/common/util/BigArraysTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/common/util/BytesRefHashTests.java9
-rw-r--r--core/src/test/java/org/elasticsearch/common/util/LongHashTests.java9
-rw-r--r--core/src/test/java/org/elasticsearch/common/util/LongObjectHashMapTests.java10
-rw-r--r--core/src/test/java/org/elasticsearch/common/util/MultiDataPathUpgraderTests.java297
-rw-r--r--core/src/test/java/org/elasticsearch/common/util/concurrent/EsExecutorsTests.java73
-rw-r--r--core/src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedExecutorsTests.java16
-rw-r--r--core/src/test/java/org/elasticsearch/common/util/concurrent/ThreadContextTests.java238
-rw-r--r--core/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/deps/joda/SimpleJodaTests.java34
-rw-r--r--core/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java15
-rw-r--r--core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java28
-rw-r--r--core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java8
-rw-r--r--core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java19
-rw-r--r--core/src/test/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPingIT.java3
-rw-r--r--core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java5
-rw-r--r--core/src/test/java/org/elasticsearch/document/DocumentActionsIT.java4
-rw-r--r--core/src/test/java/org/elasticsearch/env/EnvironmentTests.java6
-rw-r--r--core/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java138
-rw-r--r--core/src/test/java/org/elasticsearch/fieldstats/FieldStatsIntegrationIT.java64
-rw-r--r--core/src/test/java/org/elasticsearch/fieldstats/FieldStatsTests.java78
-rw-r--r--core/src/test/java/org/elasticsearch/gateway/AsyncShardFetchTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/gateway/DanglingIndicesStateTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/gateway/GatewayIndexStateIT.java19
-rw-r--r--core/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/gateway/MetaDataWriteDataNodesIT.java2
-rw-r--r--core/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java100
-rw-r--r--core/src/test/java/org/elasticsearch/gateway/PriorityComparatorTests.java12
-rw-r--r--core/src/test/java/org/elasticsearch/gateway/QuorumGatewayIT.java6
-rw-r--r--core/src/test/java/org/elasticsearch/gateway/RecoverAfterNodesIT.java17
-rw-r--r--core/src/test/java/org/elasticsearch/gateway/RecoveryBackwardsCompatibilityIT.java4
-rw-r--r--core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java15
-rw-r--r--core/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java13
-rw-r--r--core/src/test/java/org/elasticsearch/gateway/ReusePeerRecoverySharedTest.java4
-rw-r--r--core/src/test/java/org/elasticsearch/get/GetActionIT.java24
-rw-r--r--core/src/test/java/org/elasticsearch/http/netty/HttpPublishPortIT.java7
-rw-r--r--core/src/test/java/org/elasticsearch/http/netty/NettyHttpChannelTests.java11
-rw-r--r--core/src/test/java/org/elasticsearch/http/netty/NettyHttpServerPipeliningTests.java9
-rw-r--r--core/src/test/java/org/elasticsearch/http/netty/NettyPipeliningDisabledIT.java3
-rw-r--r--core/src/test/java/org/elasticsearch/http/netty/NettyPipeliningEnabledIT.java3
-rw-r--r--core/src/test/java/org/elasticsearch/index/IndexModuleTests.java67
-rw-r--r--core/src/test/java/org/elasticsearch/index/IndexServiceTests.java44
-rw-r--r--core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java5
-rw-r--r--core/src/test/java/org/elasticsearch/index/MergePolicySettingsTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/index/TransportIndexFailuresIT.java4
-rw-r--r--core/src/test/java/org/elasticsearch/index/VersionTypeTests.java28
-rw-r--r--core/src/test/java/org/elasticsearch/index/analysis/ASCIIFoldingTokenFilterFactoryTests.java5
-rw-r--r--core/src/test/java/org/elasticsearch/index/analysis/AnalysisFactoryTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java23
-rw-r--r--core/src/test/java/org/elasticsearch/index/analysis/AnalysisServiceTests.java32
-rw-r--r--core/src/test/java/org/elasticsearch/index/analysis/AnalysisTestsHelper.java5
-rw-r--r--core/src/test/java/org/elasticsearch/index/analysis/AnalyzerBackwardsCompatTests.java3
-rw-r--r--core/src/test/java/org/elasticsearch/index/analysis/CharFilterTests.java10
-rw-r--r--core/src/test/java/org/elasticsearch/index/analysis/CompoundAnalysisTests.java10
-rw-r--r--core/src/test/java/org/elasticsearch/index/analysis/HunspellTokenFilterFactoryTests.java9
-rw-r--r--core/src/test/java/org/elasticsearch/index/analysis/KeepFilterFactoryTests.java5
-rw-r--r--core/src/test/java/org/elasticsearch/index/analysis/KeepTypesFilterFactoryTests.java3
-rw-r--r--core/src/test/java/org/elasticsearch/index/analysis/LimitTokenCountFilterFactoryTests.java9
-rw-r--r--core/src/test/java/org/elasticsearch/index/analysis/NGramTokenizerFactoryTests.java10
-rw-r--r--core/src/test/java/org/elasticsearch/index/analysis/PatternCaptureTokenFilterTests.java6
-rw-r--r--core/src/test/java/org/elasticsearch/index/analysis/StemmerTokenFilterFactoryTests.java5
-rw-r--r--core/src/test/java/org/elasticsearch/index/analysis/StopAnalyzerTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/index/analysis/StopTokenFilterTests.java9
-rw-r--r--core/src/test/java/org/elasticsearch/index/analysis/WordDelimiterTokenFilterFactoryTests.java19
-rw-r--r--core/src/test/java/org/elasticsearch/index/analysis/commongrams/CommonGramsTokenFilterFactoryTests.java25
-rw-r--r--core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java15
-rw-r--r--core/src/test/java/org/elasticsearch/index/codec/CodecTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java150
-rw-r--r--core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java50
-rw-r--r--core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java2
-rw-r--r--core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java4
-rw-r--r--core/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java35
-rw-r--r--core/src/test/java/org/elasticsearch/index/fielddata/BinaryDVFieldDataTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/index/fielddata/DisabledFieldDataFormatTests.java115
-rw-r--r--core/src/test/java/org/elasticsearch/index/fielddata/FieldDataCacheTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/index/fielddata/FieldDataLoadingIT.java4
-rw-r--r--core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java28
-rw-r--r--core/src/test/java/org/elasticsearch/index/fielddata/ParentChildFieldDataTests.java6
-rw-r--r--core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingDisabledTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java46
-rw-r--r--core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java24
-rw-r--r--core/src/test/java/org/elasticsearch/index/mapper/binary/BinaryMappingTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/index/mapper/boost/CustomBoostMappingTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/index/mapper/core/BooleanFieldMapperTests.java81
-rw-r--r--core/src/test/java/org/elasticsearch/index/mapper/core/CompletionFieldTypeTests.java6
-rw-r--r--core/src/test/java/org/elasticsearch/index/mapper/core/DateFieldTypeTests.java6
-rw-r--r--core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalValuesMapperIntegrationIT.java4
-rw-r--r--core/src/test/java/org/elasticsearch/index/mapper/externalvalues/SimpleExternalMappingTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java12
-rw-r--r--core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldTypeTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldTypeTests.java12
-rw-r--r--core/src/test/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldTypeTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/index/mapper/internal/ParentFieldMapperTests.java24
-rw-r--r--core/src/test/java/org/elasticsearch/index/mapper/lucene/StoredNumericValuesTests.java24
-rw-r--r--core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldsIntegrationIT.java16
-rw-r--r--core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java116
-rw-r--r--core/src/test/java/org/elasticsearch/index/mapper/parent/ParentMappingTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java82
-rw-r--r--core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingOnClusterIT.java2
-rw-r--r--core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java26
-rw-r--r--core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/index/query/FuzzyQueryBuilderTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java14
-rw-r--r--core/src/test/java/org/elasticsearch/index/query/HasParentQueryBuilderTests.java14
-rw-r--r--core/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java21
-rw-r--r--core/src/test/java/org/elasticsearch/index/query/ParentIdQueryBuilderTests.java115
-rw-r--r--core/src/test/java/org/elasticsearch/index/query/QueryDSLDocumentationTests.java1
-rw-r--r--core/src/test/java/org/elasticsearch/index/query/QueryShardContextTests.java83
-rw-r--r--core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/index/query/plugin/CustomQueryParserIT.java4
-rw-r--r--core/src/test/java/org/elasticsearch/index/search/nested/AbstractNumberNestedSortingTestCase.java3
-rw-r--r--core/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java5
-rw-r--r--core/src/test/java/org/elasticsearch/index/shard/IndexSearcherWrapperTests.java10
-rw-r--r--core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java84
-rw-r--r--core/src/test/java/org/elasticsearch/index/shard/NewPathForShardTests.java10
-rw-r--r--core/src/test/java/org/elasticsearch/index/shard/ShardPathTests.java27
-rw-r--r--core/src/test/java/org/elasticsearch/index/shard/ShardUtilsTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java23
-rw-r--r--core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java3
-rw-r--r--core/src/test/java/org/elasticsearch/index/store/ExceptionRetryIT.java4
-rw-r--r--core/src/test/java/org/elasticsearch/index/store/IndexStoreTests.java27
-rw-r--r--core/src/test/java/org/elasticsearch/index/store/StoreTests.java46
-rw-r--r--core/src/test/java/org/elasticsearch/index/suggest/stats/SuggestStatsIT.java12
-rw-r--r--core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java57
-rw-r--r--core/src/test/java/org/elasticsearch/indices/IndicesLifecycleListenerIT.java2
-rw-r--r--core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java6
-rw-r--r--core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java12
-rw-r--r--core/src/test/java/org/elasticsearch/indices/analyze/HunspellServiceIT.java21
-rw-r--r--core/src/test/java/org/elasticsearch/indices/cache/query/IndicesQueryCacheTests.java16
-rw-r--r--core/src/test/java/org/elasticsearch/indices/cache/query/IndicesRequestCacheIT.java2
-rw-r--r--core/src/test/java/org/elasticsearch/indices/flush/FlushIT.java9
-rw-r--r--core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/indices/mapping/ConcurrentDynamicTemplateIT.java2
-rw-r--r--core/src/test/java/org/elasticsearch/indices/mapping/DedicatedMasterGetFieldMappingIT.java3
-rw-r--r--core/src/test/java/org/elasticsearch/indices/mapping/SimpleGetFieldMappingsIT.java2
-rw-r--r--core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java6
-rw-r--r--core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java15
-rw-r--r--core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/indices/recovery/RecoveryStateTests.java32
-rw-r--r--core/src/test/java/org/elasticsearch/indices/recovery/StartRecoveryRequestTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/indices/settings/UpdateNumberOfReplicasIT.java4
-rw-r--r--core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java2
-rw-r--r--core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java5
-rw-r--r--core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java204
-rw-r--r--core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java39
-rw-r--r--core/src/test/java/org/elasticsearch/indices/store/IndicesStoreTests.java17
-rw-r--r--core/src/test/java/org/elasticsearch/indices/template/IndexTemplateBlocksIT.java4
-rw-r--r--core/src/test/java/org/elasticsearch/indices/template/IndexTemplateFilteringIT.java4
-rw-r--r--core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java61
-rw-r--r--core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java265
-rw-r--r--core/src/test/java/org/elasticsearch/ingest/IngestMetadataTests.java64
-rw-r--r--core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java366
-rw-r--r--core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java221
-rw-r--r--core/src/test/java/org/elasticsearch/ingest/ProcessorsRegistryTests.java60
-rw-r--r--core/src/test/java/org/elasticsearch/ingest/core/CompoundProcessorTests.java120
-rw-r--r--core/src/test/java/org/elasticsearch/ingest/core/ConfigurationUtilsTests.java71
-rw-r--r--core/src/test/java/org/elasticsearch/ingest/core/IngestDocumentTests.java999
-rw-r--r--core/src/test/java/org/elasticsearch/ingest/core/PipelineFactoryTests.java114
-rw-r--r--core/src/test/java/org/elasticsearch/ingest/core/ValueSourceTests.java72
-rw-r--r--core/src/test/java/org/elasticsearch/ingest/processor/AbstractStringProcessorTestCase.java87
-rw-r--r--core/src/test/java/org/elasticsearch/ingest/processor/AppendProcessorFactoryTests.java95
-rw-r--r--core/src/test/java/org/elasticsearch/ingest/processor/AppendProcessorTests.java209
-rw-r--r--core/src/test/java/org/elasticsearch/ingest/processor/ConvertProcessorFactoryTests.java86
-rw-r--r--core/src/test/java/org/elasticsearch/ingest/processor/ConvertProcessorTests.java268
-rw-r--r--core/src/test/java/org/elasticsearch/ingest/processor/DateFormatTests.java83
-rw-r--r--core/src/test/java/org/elasticsearch/ingest/processor/DateProcessorFactoryTests.java190
-rw-r--r--core/src/test/java/org/elasticsearch/ingest/processor/DateProcessorTests.java147
-rw-r--r--core/src/test/java/org/elasticsearch/ingest/processor/DeDotProcessorFactoryTests.java56
-rw-r--r--core/src/test/java/org/elasticsearch/ingest/processor/DeDotProcessorTests.java75
-rw-r--r--core/src/test/java/org/elasticsearch/ingest/processor/FailProcessorFactoryTests.java63
-rw-r--r--core/src/test/java/org/elasticsearch/ingest/processor/FailProcessorTests.java43
-rw-r--r--core/src/test/java/org/elasticsearch/ingest/processor/GsubProcessorFactoryTests.java86
-rw-r--r--core/src/test/java/org/elasticsearch/ingest/processor/GsubProcessorTests.java79
-rw-r--r--core/src/test/java/org/elasticsearch/ingest/processor/JoinProcessorFactoryTests.java69
-rw-r--r--core/src/test/java/org/elasticsearch/ingest/processor/JoinProcessorTests.java111
-rw-r--r--core/src/test/java/org/elasticsearch/ingest/processor/LowercaseProcessorFactoryTests.java54
-rw-r--r--core/src/test/java/org/elasticsearch/ingest/processor/LowercaseProcessorTests.java (renamed from core/src/main/java/org/elasticsearch/plugins/SitePlugin.java)21
-rw-r--r--core/src/test/java/org/elasticsearch/ingest/processor/RemoveProcessorFactoryTests.java63
-rw-r--r--core/src/test/java/org/elasticsearch/ingest/processor/RemoveProcessorTests.java54
-rw-r--r--core/src/test/java/org/elasticsearch/ingest/processor/RenameProcessorFactoryTests.java69
-rw-r--r--core/src/test/java/org/elasticsearch/ingest/processor/RenameProcessorTests.java173
-rw-r--r--core/src/test/java/org/elasticsearch/ingest/processor/SetProcessorFactoryTests.java89
-rw-r--r--core/src/test/java/org/elasticsearch/ingest/processor/SetProcessorTests.java83
-rw-r--r--core/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorFactoryTests.java69
-rw-r--r--core/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorTests.java97
-rw-r--r--core/src/test/java/org/elasticsearch/ingest/processor/TrimProcessorFactoryTests.java54
-rw-r--r--core/src/test/java/org/elasticsearch/ingest/processor/TrimProcessorTests.java50
-rw-r--r--core/src/test/java/org/elasticsearch/ingest/processor/UppercaseProcessorFactoryTests.java54
-rw-r--r--core/src/test/java/org/elasticsearch/ingest/processor/UppercaseProcessorTests.java (renamed from core/src/main/java/org/elasticsearch/index/mapper/DocumentTypeListener.java)22
-rw-r--r--core/src/test/java/org/elasticsearch/mget/SimpleMgetIT.java4
-rw-r--r--core/src/test/java/org/elasticsearch/monitor/jvm/JvmGcMonitorServiceSettingsTests.java113
-rw-r--r--core/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java24
-rw-r--r--core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java12
-rw-r--r--core/src/test/java/org/elasticsearch/nodesinfo/SimpleNodesInfoIT.java4
-rw-r--r--core/src/test/java/org/elasticsearch/options/detailederrors/DetailedErrorsDisabledIT.java8
-rw-r--r--core/src/test/java/org/elasticsearch/options/detailederrors/DetailedErrorsEnabledIT.java3
-rw-r--r--core/src/test/java/org/elasticsearch/percolator/ConcurrentPercolatorIT.java2
-rw-r--r--core/src/test/java/org/elasticsearch/percolator/MultiPercolatorIT.java16
-rw-r--r--core/src/test/java/org/elasticsearch/percolator/PercolateDocumentParserTests.java16
-rw-r--r--core/src/test/java/org/elasticsearch/percolator/PercolatorBackwardsCompatibilityIT.java3
-rw-r--r--core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java284
-rw-r--r--core/src/test/java/org/elasticsearch/percolator/PercolatorServiceTests.java24
-rw-r--r--core/src/test/java/org/elasticsearch/percolator/RecoveryPercolatorIT.java20
-rw-r--r--core/src/test/java/org/elasticsearch/percolator/TTLPercolatorIT.java2
-rw-r--r--core/src/test/java/org/elasticsearch/plugins/PluginCliTests.java (renamed from core/src/test/java/org/elasticsearch/plugins/PluginManagerCliTests.java)15
-rw-r--r--core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java74
-rw-r--r--core/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java6
-rw-r--r--core/src/test/java/org/elasticsearch/plugins/SitePluginIT.java131
-rw-r--r--core/src/test/java/org/elasticsearch/plugins/SitePluginRelativePathConfigIT.java88
-rw-r--r--core/src/test/java/org/elasticsearch/plugins/responseheader/TestResponseHeaderRestAction.java2
-rw-r--r--core/src/test/java/org/elasticsearch/recovery/FullRollingRestartIT.java8
-rw-r--r--core/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java2
-rw-r--r--core/src/test/java/org/elasticsearch/recovery/RelocationIT.java14
-rw-r--r--core/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java5
-rw-r--r--core/src/test/java/org/elasticsearch/rest/CorsRegexDefaultIT.java3
-rw-r--r--core/src/test/java/org/elasticsearch/rest/CorsRegexIT.java16
-rw-r--r--core/src/test/java/org/elasticsearch/rest/HeadersAndContextCopyClientTests.java426
-rw-r--r--core/src/test/java/org/elasticsearch/rest/RestControllerTests.java99
-rw-r--r--core/src/test/java/org/elasticsearch/rest/RestFilterChainTests.java5
-rw-r--r--core/src/test/java/org/elasticsearch/rest/RestRequestTests.java107
-rw-r--r--core/src/test/java/org/elasticsearch/routing/AliasResolveRoutingIT.java2
-rw-r--r--core/src/test/java/org/elasticsearch/routing/AliasRoutingIT.java84
-rw-r--r--core/src/test/java/org/elasticsearch/routing/SimpleRoutingIT.java32
-rw-r--r--core/src/test/java/org/elasticsearch/script/FileScriptTests.java27
-rw-r--r--core/src/test/java/org/elasticsearch/script/NativeScriptTests.java26
-rw-r--r--core/src/test/java/org/elasticsearch/script/ScriptContextTests.java33
-rw-r--r--core/src/test/java/org/elasticsearch/script/ScriptFieldIT.java2
-rw-r--r--core/src/test/java/org/elasticsearch/script/ScriptModesTests.java48
-rw-r--r--core/src/test/java/org/elasticsearch/script/ScriptParameterParserTests.java8
-rw-r--r--core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java162
-rw-r--r--core/src/test/java/org/elasticsearch/script/ScriptSettingsTests.java144
-rw-r--r--core/src/test/java/org/elasticsearch/search/SearchWithRejectionsIT.java4
-rw-r--r--core/src/test/java/org/elasticsearch/search/StressSearchServiceReaperIT.java2
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/AggregationsBinaryIT.java8
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/CombiIT.java2
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/bucket/ChildrenIT.java52
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java168
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java16
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java6
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java6
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java48
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/bucket/MissingIT.java6
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java90
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/bucket/ParentIdAggIT.java8
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/bucket/ReverseNestedIT.java182
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/bucket/SamplerIT.java10
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTermsIT.java90
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTestCase.java4
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsIT.java2
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/bucket/TermsDocCountErrorIT.java26
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java7
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java25
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java2
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgIT.java49
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsIT.java4
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidIT.java2
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/metrics/SumIT.java44
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java68
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java38
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketIT.java2
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/pipeline/DateDerivativeIT.java38
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeIT.java8
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java2
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketIT.java2
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/pipeline/MinBucketIT.java2
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java4
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregationHelperTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketIT.java2
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/pipeline/SumBucketIT.java2
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java28
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/pipeline/serialdiff/SerialDiffIT.java6
-rw-r--r--core/src/test/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java68
-rw-r--r--core/src/test/java/org/elasticsearch/search/basic/TransportSearchFailuresIT.java1
-rw-r--r--core/src/test/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java26
-rw-r--r--core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java72
-rw-r--r--core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java294
-rw-r--r--core/src/test/java/org/elasticsearch/search/child/ParentFieldLoadingIT.java14
-rw-r--r--core/src/test/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java2
-rw-r--r--core/src/test/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java2
-rw-r--r--core/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreBackwardCompatibilityIT.java2
-rw-r--r--core/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java22
-rw-r--r--core/src/test/java/org/elasticsearch/search/geo/GeoBoundingBoxIT.java44
-rw-r--r--core/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java10
-rw-r--r--core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java16
-rw-r--r--core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java91
-rw-r--r--core/src/test/java/org/elasticsearch/search/indicesboost/SimpleIndicesBoostSearchIT.java8
-rw-r--r--core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java105
-rw-r--r--core/src/test/java/org/elasticsearch/search/internal/DefaultSearchContextTests.java72
-rw-r--r--core/src/test/java/org/elasticsearch/search/internal/InternalSearchHitTests.java3
-rw-r--r--core/src/test/java/org/elasticsearch/search/matchedqueries/MatchedQueriesIT.java29
-rw-r--r--core/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java26
-rw-r--r--core/src/test/java/org/elasticsearch/search/msearch/SimpleMultiSearchIT.java6
-rw-r--r--core/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java104
-rw-r--r--core/src/test/java/org/elasticsearch/search/preference/SearchPreferenceIT.java24
-rw-r--r--core/src/test/java/org/elasticsearch/search/profile/RandomQueryGenerator.java3
-rw-r--r--core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java30
-rw-r--r--core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java457
-rw-r--r--core/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java68
-rw-r--r--core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java218
-rw-r--r--core/src/test/java/org/elasticsearch/search/scroll/SearchScrollIT.java54
-rw-r--r--core/src/test/java/org/elasticsearch/search/scroll/SearchScrollWithFailingNodesIT.java4
-rw-r--r--core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java252
-rw-r--r--core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterIT.java332
-rw-r--r--core/src/test/java/org/elasticsearch/search/simple/SimpleSearchIT.java36
-rw-r--r--core/src/test/java/org/elasticsearch/search/sort/SortOrderTests.java52
-rw-r--r--core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java12
-rw-r--r--core/src/test/java/org/elasticsearch/search/suggest/CustomSuggester.java2
-rw-r--r--core/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java323
-rw-r--r--core/src/test/java/org/elasticsearch/search/suggest/phrase/LaplaceModelTests.java49
-rw-r--r--core/src/test/java/org/elasticsearch/search/suggest/phrase/LinearInterpolationModelTests.java69
-rw-r--r--core/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java196
-rw-r--r--core/src/test/java/org/elasticsearch/search/suggest/phrase/StupidBackoffModelTests.java49
-rw-r--r--core/src/test/java/org/elasticsearch/similarity/SimilarityIT.java6
-rw-r--r--core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java9
-rw-r--r--core/src/test/java/org/elasticsearch/snapshots/FsBlobStoreRepositoryIT.java37
-rw-r--r--core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java11
-rw-r--r--core/src/test/java/org/elasticsearch/snapshots/mockstore/BlobStoreWrapper.java2
-rw-r--r--core/src/test/java/org/elasticsearch/test/ESBlobStoreContainerTestCase.java115
-rw-r--r--core/src/test/java/org/elasticsearch/test/ESBlobStoreRepositoryIntegTestCase.java197
-rw-r--r--core/src/test/java/org/elasticsearch/test/ESBlobStoreTestCase.java82
-rw-r--r--core/src/test/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java5
-rw-r--r--core/src/test/java/org/elasticsearch/threadpool/ThreadPoolSerializationTests.java2
-rw-r--r--core/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java13
-rw-r--r--core/src/test/java/org/elasticsearch/timestamp/SimpleTimestampIT.java4
-rw-r--r--core/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java60
-rw-r--r--core/src/test/java/org/elasticsearch/transport/ContextAndHeaderTransportIT.java379
-rw-r--r--core/src/test/java/org/elasticsearch/transport/NettySizeHeaderFrameDecoderTests.java8
-rw-r--r--core/src/test/java/org/elasticsearch/transport/TransportMessageTests.java92
-rw-r--r--core/src/test/java/org/elasticsearch/transport/netty/NettyScheduledPingTests.java19
-rw-r--r--core/src/test/java/org/elasticsearch/transport/netty/NettyTransportIT.java17
-rw-r--r--core/src/test/java/org/elasticsearch/transport/netty/NettyTransportMultiPortIntegrationIT.java6
-rw-r--r--core/src/test/java/org/elasticsearch/transport/netty/NettyTransportMultiPortTests.java12
-rw-r--r--core/src/test/java/org/elasticsearch/transport/netty/NettyTransportPublishAddressIT.java3
-rw-r--r--core/src/test/java/org/elasticsearch/transport/netty/SimpleNettyTransportTests.java3
-rw-r--r--core/src/test/java/org/elasticsearch/tribe/TribeIT.java30
-rw-r--r--core/src/test/java/org/elasticsearch/ttl/SimpleTTLIT.java10
-rw-r--r--core/src/test/java/org/elasticsearch/update/UpdateIT.java91
-rw-r--r--core/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java5
-rw-r--r--core/src/test/java/org/elasticsearch/versioning/SimpleVersioningIT.java60
-rw-r--r--core/src/test/java/org/elasticsearch/watcher/ResourceWatcherServiceTests.java2
-rw-r--r--core/src/test/resources/org/elasticsearch/index/mapper/all/mapping.json4
-rw-r--r--core/src/test/resources/org/elasticsearch/index/mapper/all/mapping_boost_omit_positions_on_all.json4
-rw-r--r--core/src/test/resources/org/elasticsearch/index/mapper/all/mapping_offsets_on_all.json4
-rw-r--r--core/src/test/resources/org/elasticsearch/index/mapper/all/mapping_omit_positions_on_all.json4
-rw-r--r--core/src/test/resources/org/elasticsearch/index/mapper/all/noboost-mapping.json4
-rw-r--r--core/src/test/resources/org/elasticsearch/index/mapper/all/store-mapping.json6
-rw-r--r--core/src/test/resources/org/elasticsearch/index/mapper/dynamictemplate/genericstore/test-mapping.json2
-rw-r--r--core/src/test/resources/org/elasticsearch/index/mapper/dynamictemplate/pathmatch/test-mapping.json6
-rw-r--r--core/src/test/resources/org/elasticsearch/index/mapper/dynamictemplate/simple/test-mapping.json4
-rw-r--r--core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/test-mapping1.json2
-rw-r--r--core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/test-mapping2.json6
-rw-r--r--core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/test-mapping3.json8
-rw-r--r--core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/test-mapping4.json4
-rw-r--r--core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/upgrade1.json4
-rw-r--r--core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/upgrade2.json6
-rw-r--r--core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/upgrade3.json2
-rw-r--r--core/src/test/resources/org/elasticsearch/index/mapper/multifield/test-multi-field-type-no-default-field.json4
-rw-r--r--core/src/test/resources/org/elasticsearch/index/mapper/multifield/test-multi-fields.json14
-rw-r--r--core/src/test/resources/org/elasticsearch/index/mapper/simple/test-mapping.json4
-rw-r--r--core/src/test/resources/org/elasticsearch/plugins/loading/classpath/es-plugin-test.properties19
1092 files changed, 29540 insertions, 12770 deletions
diff --git a/core/src/main/java/org/elasticsearch/ElasticsearchException.java b/core/src/main/java/org/elasticsearch/ElasticsearchException.java
index 4a35bcbcfb..e6dc7deff2 100644
--- a/core/src/main/java/org/elasticsearch/ElasticsearchException.java
+++ b/core/src/main/java/org/elasticsearch/ElasticsearchException.java
@@ -39,6 +39,8 @@ import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
+import static org.elasticsearch.cluster.metadata.IndexMetaData.INDEX_UUID_NA_VALUE;
+
/**
* A base class for all elasticsearch exceptions.
*/
@@ -49,6 +51,7 @@ public class ElasticsearchException extends RuntimeException implements ToXConte
public static final boolean REST_EXCEPTION_SKIP_STACK_TRACE_DEFAULT = true;
public static final boolean REST_EXCEPTION_SKIP_CAUSE_DEFAULT = false;
private static final String INDEX_HEADER_KEY = "es.index";
+ private static final String INDEX_HEADER_KEY_UUID = "es.index_uuid";
private static final String SHARD_HEADER_KEY = "es.shard";
private static final String RESOURCE_HEADER_TYPE_KEY = "es.resource.type";
private static final String RESOURCE_HEADER_ID_KEY = "es.resource.id";
@@ -70,7 +73,7 @@ public class ElasticsearchException extends RuntimeException implements ToXConte
* The message can be parameterized using <code>{}</code> as placeholders for the given
* arguments
*
- * @param msg the detail message
+ * @param msg the detail message
* @param args the arguments for the message
*/
public ElasticsearchException(String msg, Object... args) {
@@ -332,7 +335,7 @@ public class ElasticsearchException extends RuntimeException implements ToXConte
private void xContentHeader(XContentBuilder builder, String key, List<String> values) throws IOException {
if (values != null && values.isEmpty() == false) {
- if(values.size() == 1) {
+ if (values.size() == 1) {
builder.field(key, values.get(0));
} else {
builder.startArray(key);
@@ -374,7 +377,7 @@ public class ElasticsearchException extends RuntimeException implements ToXConte
if (cause != null && cause instanceof ElasticsearchException) {
return ((ElasticsearchException) cause).guessRootCauses();
}
- return new ElasticsearchException[] {this};
+ return new ElasticsearchException[]{this};
}
/**
@@ -387,7 +390,7 @@ public class ElasticsearchException extends RuntimeException implements ToXConte
if (ex instanceof ElasticsearchException) {
return ((ElasticsearchException) ex).guessRootCauses();
}
- return new ElasticsearchException[] {new ElasticsearchException(t.getMessage(), t) {
+ return new ElasticsearchException[]{new ElasticsearchException(t.getMessage(), t) {
@Override
protected String getExceptionName() {
return getExceptionName(getCause());
@@ -414,7 +417,7 @@ public class ElasticsearchException extends RuntimeException implements ToXConte
public String toString() {
StringBuilder builder = new StringBuilder();
if (headers.containsKey(INDEX_HEADER_KEY)) {
- builder.append('[').append(getIndex()).append(']');
+ builder.append(getIndex());
if (headers.containsKey(SHARD_HEADER_KEY)) {
builder.append('[').append(getShardId()).append(']');
}
@@ -435,7 +438,7 @@ public class ElasticsearchException extends RuntimeException implements ToXConte
final String fileName = in.readOptionalString();
final String methodName = in.readString();
final int lineNumber = in.readVInt();
- stackTrace[i] = new StackTraceElement(declaringClasss,methodName, fileName, lineNumber);
+ stackTrace[i] = new StackTraceElement(declaringClasss, methodName, fileName, lineNumber);
}
throwable.setStackTrace(stackTrace);
@@ -631,10 +634,11 @@ public class ElasticsearchException extends RuntimeException implements ToXConte
CLASS_TO_ELASTICSEARCH_EXCEPTION_HANDLE = Collections.unmodifiableMap(exceptions);
}
- public String getIndex() {
+ public Index getIndex() {
List<String> index = getHeader(INDEX_HEADER_KEY);
if (index != null && index.isEmpty() == false) {
- return index.get(0);
+ List<String> index_uuid = getHeader(INDEX_HEADER_KEY_UUID);
+ return new Index(index.get(0), index_uuid.get(0));
}
return null;
@@ -651,22 +655,28 @@ public class ElasticsearchException extends RuntimeException implements ToXConte
public void setIndex(Index index) {
if (index != null) {
addHeader(INDEX_HEADER_KEY, index.getName());
+ addHeader(INDEX_HEADER_KEY_UUID, index.getUUID());
}
}
public void setIndex(String index) {
if (index != null) {
- addHeader(INDEX_HEADER_KEY, index);
+ setIndex(new Index(index, INDEX_UUID_NA_VALUE));
}
}
public void setShard(ShardId shardId) {
if (shardId != null) {
- addHeader(INDEX_HEADER_KEY, shardId.getIndex());
+ setIndex(shardId.getIndex());
addHeader(SHARD_HEADER_KEY, Integer.toString(shardId.id()));
}
}
+ public void setShard(String index, int shardId) {
+ setIndex(index);
+ addHeader(SHARD_HEADER_KEY, Integer.toString(shardId));
+ }
+
public void setResources(String type, String... id) {
assert type != null;
addHeader(RESOURCE_HEADER_ID_KEY, id);
@@ -691,7 +701,7 @@ public class ElasticsearchException extends RuntimeException implements ToXConte
final ElasticsearchException[] rootCauses = ElasticsearchException.guessRootCauses(t);
builder.field("root_cause");
builder.startArray();
- for (ElasticsearchException rootCause : rootCauses){
+ for (ElasticsearchException rootCause : rootCauses) {
builder.startObject();
rootCause.toXContent(builder, new ToXContent.DelegatingMapParams(Collections.singletonMap(ElasticsearchException.REST_EXCEPTION_SKIP_CAUSE, "true"), params));
builder.endObject();
diff --git a/core/src/main/java/org/elasticsearch/ExceptionsHelper.java b/core/src/main/java/org/elasticsearch/ExceptionsHelper.java
index 555a172c0d..3842ab4e3b 100644
--- a/core/src/main/java/org/elasticsearch/ExceptionsHelper.java
+++ b/core/src/main/java/org/elasticsearch/ExceptionsHelper.java
@@ -26,6 +26,7 @@ import org.elasticsearch.action.ShardOperationFailedException;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
+import org.elasticsearch.index.Index;
import org.elasticsearch.rest.RestStatus;
import java.io.IOException;
@@ -243,7 +244,12 @@ public final class ExceptionsHelper {
public GroupBy(Throwable t) {
if (t instanceof ElasticsearchException) {
- index = ((ElasticsearchException) t).getIndex();
+ final Index index = ((ElasticsearchException) t).getIndex();
+ if (index != null) {
+ this.index = index.getName();
+ } else {
+ this.index = null;
+ }
} else {
index = null;
}
diff --git a/core/src/main/java/org/elasticsearch/action/ActionModule.java b/core/src/main/java/org/elasticsearch/action/ActionModule.java
index 67f256c6bd..39aa4b7a2b 100644
--- a/core/src/main/java/org/elasticsearch/action/ActionModule.java
+++ b/core/src/main/java/org/elasticsearch/action/ActionModule.java
@@ -149,6 +149,16 @@ import org.elasticsearch.action.indexedscripts.get.GetIndexedScriptAction;
import org.elasticsearch.action.indexedscripts.get.TransportGetIndexedScriptAction;
import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptAction;
import org.elasticsearch.action.indexedscripts.put.TransportPutIndexedScriptAction;
+import org.elasticsearch.action.ingest.IngestActionFilter;
+import org.elasticsearch.action.ingest.IngestProxyActionFilter;
+import org.elasticsearch.action.ingest.DeletePipelineAction;
+import org.elasticsearch.action.ingest.DeletePipelineTransportAction;
+import org.elasticsearch.action.ingest.GetPipelineAction;
+import org.elasticsearch.action.ingest.GetPipelineTransportAction;
+import org.elasticsearch.action.ingest.PutPipelineAction;
+import org.elasticsearch.action.ingest.PutPipelineTransportAction;
+import org.elasticsearch.action.ingest.SimulatePipelineAction;
+import org.elasticsearch.action.ingest.SimulatePipelineTransportAction;
import org.elasticsearch.action.percolate.MultiPercolateAction;
import org.elasticsearch.action.percolate.PercolateAction;
import org.elasticsearch.action.percolate.TransportMultiPercolateAction;
@@ -186,6 +196,8 @@ import org.elasticsearch.action.update.UpdateAction;
import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.inject.multibindings.MapBinder;
import org.elasticsearch.common.inject.multibindings.Multibinder;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.node.NodeModule;
import java.util.ArrayList;
import java.util.HashMap;
@@ -210,13 +222,13 @@ public class ActionModule extends AbstractModule {
this.transportAction = transportAction;
this.supportTransportActions = supportTransportActions;
}
-
-
}
+ private final boolean ingestEnabled;
private final boolean proxy;
- public ActionModule(boolean proxy) {
+ public ActionModule(boolean ingestEnabled, boolean proxy) {
+ this.ingestEnabled = ingestEnabled;
this.proxy = proxy;
}
@@ -240,6 +252,13 @@ public class ActionModule extends AbstractModule {
@Override
protected void configure() {
+ if (proxy == false) {
+ if (ingestEnabled) {
+ registerFilter(IngestActionFilter.class);
+ } else {
+ registerFilter(IngestProxyActionFilter.class);
+ }
+ }
Multibinder<ActionFilter> actionFilterMultibinder = Multibinder.newSetBinder(binder(), ActionFilter.class);
for (Class<? extends ActionFilter> actionFilter : actionFilters) {
@@ -340,6 +359,11 @@ public class ActionModule extends AbstractModule {
registerAction(FieldStatsAction.INSTANCE, TransportFieldStatsTransportAction.class);
+ registerAction(PutPipelineAction.INSTANCE, PutPipelineTransportAction.class);
+ registerAction(GetPipelineAction.INSTANCE, GetPipelineTransportAction.class);
+ registerAction(DeletePipelineAction.INSTANCE, DeletePipelineTransportAction.class);
+ registerAction(SimulatePipelineAction.INSTANCE, SimulatePipelineTransportAction.class);
+
// register Name -> GenericAction Map that can be injected to instances.
MapBinder<String, GenericAction> actionsBinder
= MapBinder.newMapBinder(binder(), String.class, GenericAction.class);
diff --git a/core/src/main/java/org/elasticsearch/action/ActionRequest.java b/core/src/main/java/org/elasticsearch/action/ActionRequest.java
index 6c522d03f0..7955855bc0 100644
--- a/core/src/main/java/org/elasticsearch/action/ActionRequest.java
+++ b/core/src/main/java/org/elasticsearch/action/ActionRequest.java
@@ -32,10 +32,6 @@ public abstract class ActionRequest<Request extends ActionRequest<Request>> exte
public ActionRequest() {
super();
- }
-
- protected ActionRequest(ActionRequest<?> request) {
- super(request);
// this does not set the listenerThreaded API, if needed, its up to the caller to set it
// since most times, we actually want it to not be threaded...
// this.listenerThreaded = request.listenerThreaded();
diff --git a/core/src/main/java/org/elasticsearch/action/ActionRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/ActionRequestBuilder.java
index 9ad449f1f4..8cbc405daf 100644
--- a/core/src/main/java/org/elasticsearch/action/ActionRequestBuilder.java
+++ b/core/src/main/java/org/elasticsearch/action/ActionRequestBuilder.java
@@ -49,12 +49,6 @@ public abstract class ActionRequestBuilder<Request extends ActionRequest, Respon
return this.request;
}
- @SuppressWarnings("unchecked")
- public final RequestBuilder putHeader(String key, Object value) {
- request.putHeader(key, value);
- return (RequestBuilder) this;
- }
-
public ListenableActionFuture<Response> execute() {
PlainListenableActionFuture<Response> future = new PlainListenableActionFuture<>(threadPool);
execute(future);
diff --git a/core/src/main/java/org/elasticsearch/action/DocWriteResponse.java b/core/src/main/java/org/elasticsearch/action/DocWriteResponse.java
index c0389c6c19..76093aa9cb 100644
--- a/core/src/main/java/org/elasticsearch/action/DocWriteResponse.java
+++ b/core/src/main/java/org/elasticsearch/action/DocWriteResponse.java
@@ -53,7 +53,7 @@ public abstract class DocWriteResponse extends ReplicationResponse implements St
* The index the document was changed in.
*/
public String getIndex() {
- return this.shardId.getIndex();
+ return this.shardId.getIndexName();
}
@@ -119,7 +119,7 @@ public abstract class DocWriteResponse extends ReplicationResponse implements St
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
ReplicationResponse.ShardInfo shardInfo = getShardInfo();
- builder.field(Fields._INDEX, shardId.getIndex())
+ builder.field(Fields._INDEX, shardId.getIndexName())
.field(Fields._TYPE, type)
.field(Fields._ID, id)
.field(Fields._VERSION, version);
diff --git a/core/src/main/java/org/elasticsearch/action/ReplicationResponse.java b/core/src/main/java/org/elasticsearch/action/ReplicationResponse.java
index 4e358c8d42..8a81446d45 100644
--- a/core/src/main/java/org/elasticsearch/action/ReplicationResponse.java
+++ b/core/src/main/java/org/elasticsearch/action/ReplicationResponse.java
@@ -29,6 +29,7 @@ import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
+import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.rest.RestStatus;
import java.io.IOException;
@@ -169,15 +170,13 @@ public class ReplicationResponse extends ActionResponse {
public static class Failure implements ShardOperationFailedException, ToXContent {
- private String index;
- private int shardId;
+ private ShardId shardId;
private String nodeId;
private Throwable cause;
private RestStatus status;
private boolean primary;
- public Failure(String index, int shardId, @Nullable String nodeId, Throwable cause, RestStatus status, boolean primary) {
- this.index = index;
+ public Failure(ShardId shardId, @Nullable String nodeId, Throwable cause, RestStatus status, boolean primary) {
this.shardId = shardId;
this.nodeId = nodeId;
this.cause = cause;
@@ -193,7 +192,7 @@ public class ReplicationResponse extends ActionResponse {
*/
@Override
public String index() {
- return index;
+ return shardId.getIndexName();
}
/**
@@ -201,6 +200,10 @@ public class ReplicationResponse extends ActionResponse {
*/
@Override
public int shardId() {
+ return shardId.id();
+ }
+
+ public ShardId fullShardId() {
return shardId;
}
@@ -243,8 +246,7 @@ public class ReplicationResponse extends ActionResponse {
@Override
public void readFrom(StreamInput in) throws IOException {
- index = in.readString();
- shardId = in.readVInt();
+ shardId = ShardId.readShardId(in);
nodeId = in.readOptionalString();
cause = in.readThrowable();
status = RestStatus.readFrom(in);
@@ -253,8 +255,7 @@ public class ReplicationResponse extends ActionResponse {
@Override
public void writeTo(StreamOutput out) throws IOException {
- out.writeString(index);
- out.writeVInt(shardId);
+ shardId.writeTo(out);
out.writeOptionalString(nodeId);
out.writeThrowable(cause);
RestStatus.writeTo(out, status);
@@ -264,8 +265,8 @@ public class ReplicationResponse extends ActionResponse {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
- builder.field(Fields._INDEX, index);
- builder.field(Fields._SHARD, shardId);
+ builder.field(Fields._INDEX, shardId.getIndexName());
+ builder.field(Fields._SHARD, shardId.id());
builder.field(Fields._NODE, nodeId);
builder.field(Fields.REASON);
builder.startObject();
diff --git a/core/src/main/java/org/elasticsearch/action/UnavailableShardsException.java b/core/src/main/java/org/elasticsearch/action/UnavailableShardsException.java
index 3534021314..e2cff9244e 100644
--- a/core/src/main/java/org/elasticsearch/action/UnavailableShardsException.java
+++ b/core/src/main/java/org/elasticsearch/action/UnavailableShardsException.java
@@ -36,13 +36,19 @@ public class UnavailableShardsException extends ElasticsearchException {
super(buildMessage(shardId, message), args);
}
+ public UnavailableShardsException(String index, int shardId, String message, Object... args) {
+ super(buildMessage(index, shardId, message), args);
+ }
+
private static String buildMessage(ShardId shardId, String message) {
if (shardId == null) {
return message;
}
- return "[" + shardId.index().name() + "][" + shardId.id() + "] " + message;
+ return buildMessage(shardId.getIndexName(), shardId.id(), message);
}
+ private static String buildMessage(String index, int shardId, String message) {return "[" + index + "][" + shardId + "] " + message;}
+
public UnavailableShardsException(StreamInput in) throws IOException {
super(in);
}
diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java
index 79adbafa9b..b5c9577aff 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java
@@ -141,7 +141,7 @@ public class TransportClusterHealthAction extends TransportMasterNodeReadAction<
}
assert waitFor >= 0;
- final ClusterStateObserver observer = new ClusterStateObserver(clusterService, logger);
+ final ClusterStateObserver observer = new ClusterStateObserver(clusterService, logger, threadPool.getThreadContext());
final ClusterState state = observer.observedState();
if (waitFor == 0 || request.timeout().millis() == 0) {
listener.onResponse(getResponse(request, state, waitFor, request.timeout().millis() == 0));
diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java
index f26177a0ce..c743a1d2a9 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java
@@ -102,7 +102,7 @@ public class TransportNodesHotThreadsAction extends TransportNodesAction<NodesHo
}
NodeRequest(String nodeId, NodesHotThreadsRequest request) {
- super(request, nodeId);
+ super(nodeId);
this.request = request;
}
diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java
index 3062148912..2a763910dd 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java
@@ -96,7 +96,7 @@ public class TransportNodesInfoAction extends TransportNodesAction<NodesInfoRequ
}
NodeInfoRequest(String nodeId, NodesInfoRequest request) {
- super(request, nodeId);
+ super(nodeId);
this.request = request;
}
diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java
index 1660a6de4a..8460eb59ba 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java
@@ -96,7 +96,7 @@ public class TransportNodesStatsAction extends TransportNodesAction<NodesStatsRe
}
NodeStatsRequest(String nodeId, NodesStatsRequest request) {
- super(request, nodeId);
+ super(nodeId);
this.request = request;
}
diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsGroup.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsGroup.java
index adc9e3c3f2..9f3ccac8f6 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsGroup.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsGroup.java
@@ -25,6 +25,7 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.index.Index;
import java.io.IOException;
@@ -32,7 +33,7 @@ import java.io.IOException;
*/
public class ClusterSearchShardsGroup implements Streamable, ToXContent {
- private String index;
+ private Index index;
private int shardId;
ShardRouting[] shards;
@@ -40,7 +41,7 @@ public class ClusterSearchShardsGroup implements Streamable, ToXContent {
}
- public ClusterSearchShardsGroup(String index, int shardId, ShardRouting[] shards) {
+ public ClusterSearchShardsGroup(Index index, int shardId, ShardRouting[] shards) {
this.index = index;
this.shardId = shardId;
this.shards = shards;
@@ -53,7 +54,7 @@ public class ClusterSearchShardsGroup implements Streamable, ToXContent {
}
public String getIndex() {
- return index;
+ return index.getName();
}
public int getShardId() {
@@ -66,7 +67,7 @@ public class ClusterSearchShardsGroup implements Streamable, ToXContent {
@Override
public void readFrom(StreamInput in) throws IOException {
- index = in.readString();
+ index = Index.readIndex(in);
shardId = in.readVInt();
shards = new ShardRouting[in.readVInt()];
for (int i = 0; i < shards.length; i++) {
@@ -76,7 +77,7 @@ public class ClusterSearchShardsGroup implements Streamable, ToXContent {
@Override
public void writeTo(StreamOutput out) throws IOException {
- out.writeString(index);
+ index.writeTo(out);
out.writeVInt(shardId);
out.writeVInt(shards.length);
for (ShardRouting shardRouting : shards) {
diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java
index 9263386aa7..e6a9d98eb1 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java
@@ -33,6 +33,7 @@ import org.elasticsearch.cluster.routing.ShardIterator;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.index.Index;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
@@ -77,7 +78,7 @@ public class TransportClusterSearchShardsAction extends TransportMasterNodeReadA
ClusterSearchShardsGroup[] groupResponses = new ClusterSearchShardsGroup[groupShardsIterator.size()];
int currentGroup = 0;
for (ShardIterator shardIt : groupShardsIterator) {
- String index = shardIt.shardId().getIndex();
+ Index index = shardIt.shardId().getIndex();
int shardId = shardIt.shardId().getId();
ShardRouting[] shardRoutings = new ShardRouting[shardIt.size()];
int currentShard = 0;
diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java
index 41d3f9c359..13c706531b 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java
@@ -45,7 +45,7 @@ import static org.elasticsearch.common.Strings.hasLength;
import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
import static org.elasticsearch.common.settings.Settings.readSettingsFromStream;
import static org.elasticsearch.common.settings.Settings.writeSettingsToStream;
-import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue;
+import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue;
/**
* Create snapshot request
@@ -379,14 +379,14 @@ public class CreateSnapshotRequest extends MasterNodeRequest<CreateSnapshotReque
throw new IllegalArgumentException("malformed indices section, should be an array of strings");
}
} else if (name.equals("partial")) {
- partial(nodeBooleanValue(entry.getValue()));
+ partial(lenientNodeBooleanValue(entry.getValue()));
} else if (name.equals("settings")) {
if (!(entry.getValue() instanceof Map)) {
throw new IllegalArgumentException("malformed settings section, should indices an inner object");
}
settings((Map<String, Object>) entry.getValue());
} else if (name.equals("include_global_state")) {
- includeGlobalState = nodeBooleanValue(entry.getValue());
+ includeGlobalState = lenientNodeBooleanValue(entry.getValue());
}
}
indicesOptions(IndicesOptions.fromMap((Map<String, Object>) source, IndicesOptions.lenientExpandOpen()));
diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java
index 0f79ceb0f9..59dad56531 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java
@@ -43,7 +43,7 @@ import static org.elasticsearch.common.Strings.hasLength;
import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
import static org.elasticsearch.common.settings.Settings.readSettingsFromStream;
import static org.elasticsearch.common.settings.Settings.writeSettingsToStream;
-import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue;
+import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue;
/**
* Restore snapshot request
@@ -498,16 +498,16 @@ public class RestoreSnapshotRequest extends MasterNodeRequest<RestoreSnapshotReq
throw new IllegalArgumentException("malformed indices section, should be an array of strings");
}
} else if (name.equals("partial")) {
- partial(nodeBooleanValue(entry.getValue()));
+ partial(lenientNodeBooleanValue(entry.getValue()));
} else if (name.equals("settings")) {
if (!(entry.getValue() instanceof Map)) {
throw new IllegalArgumentException("malformed settings section");
}
settings((Map<String, Object>) entry.getValue());
} else if (name.equals("include_global_state")) {
- includeGlobalState = nodeBooleanValue(entry.getValue());
+ includeGlobalState = lenientNodeBooleanValue(entry.getValue());
} else if (name.equals("include_aliases")) {
- includeAliases = nodeBooleanValue(entry.getValue());
+ includeAliases = lenientNodeBooleanValue(entry.getValue());
} else if (name.equals("rename_pattern")) {
if (entry.getValue() instanceof String) {
renamePattern((String) entry.getValue());
diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java
index 44874a0227..45c3f89919 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java
@@ -20,7 +20,6 @@
package org.elasticsearch.action.admin.cluster.snapshots.status;
import org.elasticsearch.ElasticsearchException;
-import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.FailedNodeException;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.nodes.BaseNodeRequest;
@@ -146,8 +145,8 @@ public class TransportNodesSnapshotsStatus extends TransportNodesAction<Transpor
public Request() {
}
- public Request(ActionRequest<?> request, String[] nodesIds) {
- super(request, nodesIds);
+ public Request(String[] nodesIds) {
+ super(nodesIds);
}
public Request snapshotIds(SnapshotId[] snapshotIds) {
@@ -214,7 +213,7 @@ public class TransportNodesSnapshotsStatus extends TransportNodesAction<Transpor
}
NodeRequest(String nodeId, TransportNodesSnapshotsStatus.Request request) {
- super(request, nodeId);
+ super(nodeId);
snapshotIds = request.snapshotIds;
}
diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java
index b5bb259409..fc19dd986d 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java
@@ -110,7 +110,7 @@ public class TransportSnapshotsStatusAction extends TransportMasterNodeAction<Sn
snapshotIds[i] = currentSnapshots.get(i).snapshotId();
}
- TransportNodesSnapshotsStatus.Request nodesRequest = new TransportNodesSnapshotsStatus.Request(request, nodesIds.toArray(new String[nodesIds.size()]))
+ TransportNodesSnapshotsStatus.Request nodesRequest = new TransportNodesSnapshotsStatus.Request(nodesIds.toArray(new String[nodesIds.size()]))
.snapshotIds(snapshotIds).timeout(request.masterNodeTimeout());
transportNodesSnapshotsStatus.execute(nodesRequest, new ActionListener<TransportNodesSnapshotsStatus.NodesSnapshotStatus>() {
@Override
diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIndices.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIndices.java
index ff754be2a2..ec7017160c 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIndices.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIndices.java
@@ -66,10 +66,10 @@ public class ClusterStatsIndices implements ToXContent, Streamable {
for (ClusterStatsNodeResponse r : nodeResponses) {
for (org.elasticsearch.action.admin.indices.stats.ShardStats shardStats : r.shardsStats()) {
- ShardStats indexShardStats = countsPerIndex.get(shardStats.getShardRouting().getIndex());
+ ShardStats indexShardStats = countsPerIndex.get(shardStats.getShardRouting().getIndexName());
if (indexShardStats == null) {
indexShardStats = new ShardStats();
- countsPerIndex.put(shardStats.getShardRouting().getIndex(), indexShardStats);
+ countsPerIndex.put(shardStats.getShardRouting().getIndexName(), indexShardStats);
}
indexShardStats.total++;
diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java
index 3e4880d0a6..3fc2f4b631 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java
@@ -132,7 +132,7 @@ public class TransportClusterStatsAction extends TransportNodesAction<ClusterSta
}
ClusterStatsNodeRequest(String nodeId, ClusterStatsRequest request) {
- super(request, nodeId);
+ super(nodeId);
this.request = request;
}
diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/validate/template/TransportRenderSearchTemplateAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/validate/template/TransportRenderSearchTemplateAction.java
index f2bfb18c43..0b4250ef74 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/cluster/validate/template/TransportRenderSearchTemplateAction.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/validate/template/TransportRenderSearchTemplateAction.java
@@ -57,7 +57,7 @@ public class TransportRenderSearchTemplateAction extends HandledTransportAction<
@Override
protected void doRun() throws Exception {
- ExecutableScript executable = scriptService.executable(request.template(), ScriptContext.Standard.SEARCH, request, Collections.emptyMap());
+ ExecutableScript executable = scriptService.executable(request.template(), ScriptContext.Standard.SEARCH, Collections.emptyMap());
BytesReference processedTemplate = (BytesReference) executable.run();
RenderSearchTemplateResponse response = new RenderSearchTemplateResponse();
response.source(processedTemplate);
diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java
index 2308d7be6a..d633c3a09d 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java
@@ -81,7 +81,7 @@ public class TransportClearIndicesCacheAction extends TransportBroadcastByNodeAc
@Override
protected EmptyResult shardOperation(ClearIndicesCacheRequest request, ShardRouting shardRouting) {
- IndexService service = indicesService.indexService(shardRouting.getIndex());
+ IndexService service = indicesService.indexService(shardRouting.getIndexName());
if (service != null) {
IndexShard shard = service.getShardOrNull(shardRouting.id());
boolean clearedAtLeastOne = false;
diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java b/core/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java
index ac0d5740d0..d1c753047a 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java
@@ -82,14 +82,6 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
}
/**
- * Constructs a new request to create an index that was triggered by a different request,
- * provided as an argument so that its headers and context can be copied to the new request.
- */
- public CreateIndexRequest(ActionRequest request) {
- super(request);
- }
-
- /**
* Constructs a new request to create an index with the specified name.
*/
public CreateIndexRequest(String index) {
diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushRequest.java b/core/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushRequest.java
index 0152254d22..7dc55c08fa 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushRequest.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushRequest.java
@@ -42,17 +42,6 @@ public class FlushRequest extends BroadcastRequest<FlushRequest> {
private boolean force = false;
private boolean waitIfOngoing = false;
- public FlushRequest() {
- }
-
- /**
- * Copy constructor that creates a new flush request that is a copy of the one provided as an argument.
- * The new request will inherit though headers and context from the original request that caused it.
- */
- public FlushRequest(ActionRequest originalRequest) {
- super(originalRequest);
- }
-
/**
* Constructs a new flush request against one or more indices. If nothing is provided, all indices will
* be flushed.
diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/flush/ShardFlushRequest.java b/core/src/main/java/org/elasticsearch/action/admin/indices/flush/ShardFlushRequest.java
index ccf06be8bd..3a9ec89db5 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/indices/flush/ShardFlushRequest.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/indices/flush/ShardFlushRequest.java
@@ -31,7 +31,7 @@ public class ShardFlushRequest extends ReplicationRequest<ShardFlushRequest> {
private FlushRequest request = new FlushRequest();
public ShardFlushRequest(FlushRequest request, ShardId shardId) {
- super(request, shardId);
+ super(shardId);
this.request = request;
}
diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushRequest.java b/core/src/main/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushRequest.java
index 59719fe887..2a14d66a76 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushRequest.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushRequest.java
@@ -36,17 +36,6 @@ import java.util.Arrays;
*/
public class SyncedFlushRequest extends BroadcastRequest<SyncedFlushRequest> {
- public SyncedFlushRequest() {
- }
-
- /**
- * Copy constructor that creates a new synced flush request that is a copy of the one provided as an argument.
- * The new request will inherit though headers and context from the original request that caused it.
- */
- public SyncedFlushRequest(ActionRequest originalRequest) {
- super(originalRequest);
- }
-
/**
* Constructs a new synced flush request against one or more indices. If nothing is provided, all indices will
* be sync flushed.
diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsIndexRequest.java b/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsIndexRequest.java
index 59844430b3..149cba9a33 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsIndexRequest.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsIndexRequest.java
@@ -42,7 +42,6 @@ public class GetFieldMappingsIndexRequest extends SingleShardRequest<GetFieldMap
}
GetFieldMappingsIndexRequest(GetFieldMappingsRequest other, String index, boolean probablySingleFieldRequest) {
- super(other);
this.probablySingleFieldRequest = probablySingleFieldRequest;
this.includeDefaults = other.includeDefaults();
this.types = other.types();
diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java
index e968269261..3d11df97de 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java
@@ -102,7 +102,7 @@ public class TransportGetFieldMappingsIndexAction extends TransportSingleShardAc
.filter(type -> Regex.simpleMatch(request.types(), type))
.collect(Collectors.toCollection(ArrayList::new));
if (typeIntersection.isEmpty()) {
- throw new TypeMissingException(shardId.index(), request.types());
+ throw new TypeMissingException(shardId.getIndex(), request.types());
}
}
@@ -115,7 +115,7 @@ public class TransportGetFieldMappingsIndexAction extends TransportSingleShardAc
}
}
- return new GetFieldMappingsResponse(singletonMap(shardId.getIndex(), typeMappings.immutableMap()));
+ return new GetFieldMappingsResponse(singletonMap(shardId.getIndexName(), typeMappings.immutableMap()));
}
@Override
diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/recovery/TransportRecoveryAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/recovery/TransportRecoveryAction.java
index 9798e189f7..8590fc210a 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/indices/recovery/TransportRecoveryAction.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/indices/recovery/TransportRecoveryAction.java
@@ -75,7 +75,7 @@ public class TransportRecoveryAction extends TransportBroadcastByNodeAction<Reco
if (recoveryState == null) {
continue;
}
- String indexName = recoveryState.getShardId().getIndex();
+ String indexName = recoveryState.getShardId().getIndexName();
if (!shardResponses.containsKey(indexName)) {
shardResponses.put(indexName, new ArrayList<>());
}
diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshRequest.java b/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshRequest.java
index ab9186c987..b5bce3c85c 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshRequest.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshRequest.java
@@ -33,17 +33,6 @@ import org.elasticsearch.action.support.broadcast.BroadcastRequest;
*/
public class RefreshRequest extends BroadcastRequest<RefreshRequest> {
- public RefreshRequest() {
- }
-
- /**
- * Copy constructor that creates a new refresh request that is a copy of the one provided as an argument.
- * The new request will inherit though headers and context from the original request that caused it.
- */
- public RefreshRequest(ActionRequest originalRequest) {
- super(originalRequest);
- }
-
public RefreshRequest(String... indices) {
super(indices);
}
diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportRefreshAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportRefreshAction.java
index aaaf11e453..bd879e0eaa 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportRefreshAction.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportRefreshAction.java
@@ -54,7 +54,7 @@ public class TransportRefreshAction extends TransportBroadcastReplicationAction<
@Override
protected BasicReplicationRequest newShardRequest(RefreshRequest request, ShardId shardId) {
- return new BasicReplicationRequest(request, shardId);
+ return new BasicReplicationRequest(shardId);
}
@Override
diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentResponse.java b/core/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentResponse.java
index 979e51f83c..6bcf7d71c3 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentResponse.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentResponse.java
@@ -62,17 +62,17 @@ public class IndicesSegmentResponse extends BroadcastResponse implements ToXCont
Set<String> indices = new HashSet<>();
for (ShardSegments shard : shards) {
- indices.add(shard.getShardRouting().getIndex());
+ indices.add(shard.getShardRouting().getIndexName());
}
- for (String index : indices) {
+ for (String indexName : indices) {
List<ShardSegments> shards = new ArrayList<>();
for (ShardSegments shard : this.shards) {
- if (shard.getShardRouting().index().equals(index)) {
+ if (shard.getShardRouting().getIndexName().equals(indexName)) {
shards.add(shard);
}
}
- indicesSegments.put(index, new IndexSegments(index, shards.toArray(new ShardSegments[shards.size()])));
+ indicesSegments.put(indexName, new IndexSegments(indexName, shards.toArray(new ShardSegments[shards.size()])));
}
this.indicesSegments = indicesSegments;
return indicesSegments;
diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/segments/TransportIndicesSegmentsAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/segments/TransportIndicesSegmentsAction.java
index 4a9f2c3b0c..fd45e22a17 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/indices/segments/TransportIndicesSegmentsAction.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/indices/segments/TransportIndicesSegmentsAction.java
@@ -93,7 +93,7 @@ public class TransportIndicesSegmentsAction extends TransportBroadcastByNodeActi
@Override
protected ShardSegments shardOperation(IndicesSegmentsRequest request, ShardRouting shardRouting) {
- IndexService indexService = indicesService.indexServiceSafe(shardRouting.getIndex());
+ IndexService indexService = indicesService.indexServiceSafe(shardRouting.getIndexName());
IndexShard indexShard = indexService.getShard(shardRouting.id());
return new ShardSegments(indexShard.routingEntry(), indexShard.segments(request.verbose()));
}
diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresAction.java
index 77f8608089..79c49e292a 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresAction.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresAction.java
@@ -166,7 +166,7 @@ public class TransportIndicesShardStoresAction extends TransportMasterNodeReadAc
ImmutableOpenMap.Builder<String, ImmutableOpenIntMap<java.util.List<IndicesShardStoresResponse.StoreStatus>>> indicesStoreStatusesBuilder = ImmutableOpenMap.builder();
java.util.List<IndicesShardStoresResponse.Failure> failureBuilder = new ArrayList<>();
for (Response fetchResponse : fetchResponses) {
- ImmutableOpenIntMap<java.util.List<IndicesShardStoresResponse.StoreStatus>> indexStoreStatuses = indicesStoreStatusesBuilder.get(fetchResponse.shardId.getIndex());
+ ImmutableOpenIntMap<java.util.List<IndicesShardStoresResponse.StoreStatus>> indexStoreStatuses = indicesStoreStatusesBuilder.get(fetchResponse.shardId.getIndexName());
final ImmutableOpenIntMap.Builder<java.util.List<IndicesShardStoresResponse.StoreStatus>> indexShardsBuilder;
if (indexStoreStatuses == null) {
indexShardsBuilder = ImmutableOpenIntMap.builder();
@@ -179,15 +179,15 @@ public class TransportIndicesShardStoresAction extends TransportMasterNodeReadAc
}
for (NodeGatewayStartedShards response : fetchResponse.responses) {
if (shardExistsInNode(response)) {
- IndicesShardStoresResponse.StoreStatus.AllocationStatus allocationStatus = getAllocationStatus(fetchResponse.shardId.getIndex(), fetchResponse.shardId.id(), response.getNode());
+ IndicesShardStoresResponse.StoreStatus.AllocationStatus allocationStatus = getAllocationStatus(fetchResponse.shardId.getIndexName(), fetchResponse.shardId.id(), response.getNode());
storeStatuses.add(new IndicesShardStoresResponse.StoreStatus(response.getNode(), response.version(), response.allocationId(), allocationStatus, response.storeException()));
}
}
CollectionUtil.timSort(storeStatuses);
indexShardsBuilder.put(fetchResponse.shardId.id(), storeStatuses);
- indicesStoreStatusesBuilder.put(fetchResponse.shardId.getIndex(), indexShardsBuilder.build());
+ indicesStoreStatusesBuilder.put(fetchResponse.shardId.getIndexName(), indexShardsBuilder.build());
for (FailedNodeException failure : fetchResponse.failures) {
- failureBuilder.add(new IndicesShardStoresResponse.Failure(failure.nodeId(), fetchResponse.shardId.getIndex(), fetchResponse.shardId.id(), failure.getCause()));
+ failureBuilder.add(new IndicesShardStoresResponse.Failure(failure.nodeId(), fetchResponse.shardId.getIndexName(), fetchResponse.shardId.id(), failure.getCause()));
}
}
listener.onResponse(new IndicesShardStoresResponse(indicesStoreStatusesBuilder.build(), Collections.unmodifiableList(failureBuilder)));
@@ -196,7 +196,7 @@ public class TransportIndicesShardStoresAction extends TransportMasterNodeReadAc
private IndicesShardStoresResponse.StoreStatus.AllocationStatus getAllocationStatus(String index, int shardID, DiscoveryNode node) {
for (ShardRouting shardRouting : routingNodes.node(node.id())) {
ShardId shardId = shardRouting.shardId();
- if (shardId.id() == shardID && shardId.getIndex().equals(index)) {
+ if (shardId.id() == shardID && shardId.getIndexName().equals(index)) {
if (shardRouting.primary()) {
return IndicesShardStoresResponse.StoreStatus.AllocationStatus.PRIMARY;
} else if (shardRouting.assignedToNode()) {
diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java b/core/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java
index 55ad75eab3..11f91397c5 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java
@@ -89,17 +89,17 @@ public class IndicesStatsResponse extends BroadcastResponse implements ToXConten
Set<String> indices = new HashSet<>();
for (ShardStats shard : shards) {
- indices.add(shard.getShardRouting().getIndex());
+ indices.add(shard.getShardRouting().getIndexName());
}
- for (String index : indices) {
+ for (String indexName : indices) {
List<ShardStats> shards = new ArrayList<>();
for (ShardStats shard : this.shards) {
- if (shard.getShardRouting().index().equals(index)) {
+ if (shard.getShardRouting().getIndexName().equals(indexName)) {
shards.add(shard);
}
}
- indicesStats.put(index, new IndexStats(index, shards.toArray(new ShardStats[shards.size()])));
+ indicesStats.put(indexName, new IndexStats(indexName, shards.toArray(new ShardStats[shards.size()])));
}
this.indicesStats = indicesStats;
return indicesStats;
diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/UpgradeStatusResponse.java b/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/UpgradeStatusResponse.java
index 5ace664135..6c3a64e04f 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/UpgradeStatusResponse.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/UpgradeStatusResponse.java
@@ -59,14 +59,14 @@ public class UpgradeStatusResponse extends BroadcastResponse implements ToXConte
indices.add(shard.getIndex());
}
- for (String index : indices) {
+ for (String indexName : indices) {
List<ShardUpgradeStatus> shards = new ArrayList<>();
for (ShardUpgradeStatus shard : this.shards) {
- if (shard.getShardRouting().index().equals(index)) {
+ if (shard.getShardRouting().getIndexName().equals(indexName)) {
shards.add(shard);
}
}
- indicesUpgradeStats.put(index, new IndexUpgradeStatus(index, shards.toArray(new ShardUpgradeStatus[shards.size()])));
+ indicesUpgradeStats.put(indexName, new IndexUpgradeStatus(indexName, shards.toArray(new ShardUpgradeStatus[shards.size()])));
}
this.indicesUpgradeStatus = indicesUpgradeStats;
return indicesUpgradeStats;
diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java
index 5e87e91b25..f3cf2da9fd 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java
@@ -41,6 +41,7 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.indices.IndicesService;
+import org.elasticsearch.tasks.Task;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
@@ -75,7 +76,7 @@ public class TransportUpgradeAction extends TransportBroadcastByNodeAction<Upgra
Map<String, Tuple<Version, org.apache.lucene.util.Version>> versions = new HashMap<>();
for (ShardUpgradeResult result : shardUpgradeResults) {
successfulShards++;
- String index = result.getShardId().getIndex();
+ String index = result.getShardId().getIndex().getName();
if (result.primary()) {
Integer count = successfulPrimaryShards.get(index);
successfulPrimaryShards.put(index, count == null ? 1 : count + 1);
@@ -179,7 +180,7 @@ public class TransportUpgradeAction extends TransportBroadcastByNodeAction<Upgra
}
@Override
- protected void doExecute(UpgradeRequest request, final ActionListener<UpgradeResponse> listener) {
+ protected void doExecute(Task task, UpgradeRequest request, final ActionListener<UpgradeResponse> listener) {
ActionListener<UpgradeResponse> settingsUpdateListener = new ActionListener<UpgradeResponse>() {
@Override
public void onResponse(UpgradeResponse upgradeResponse) {
@@ -199,7 +200,7 @@ public class TransportUpgradeAction extends TransportBroadcastByNodeAction<Upgra
listener.onFailure(e);
}
};
- super.doExecute(request, settingsUpdateListener);
+ super.doExecute(task, request, settingsUpdateListener);
}
private void updateSettings(final UpgradeResponse upgradeResponse, final ActionListener<UpgradeResponse> listener) {
diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java
index 6d6bbd691b..72cbe37c91 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java
@@ -52,6 +52,7 @@ import org.elasticsearch.search.SearchService;
import org.elasticsearch.search.internal.DefaultSearchContext;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.internal.ShardSearchLocalRequest;
+import org.elasticsearch.tasks.Task;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
@@ -89,14 +90,14 @@ public class TransportValidateQueryAction extends TransportBroadcastAction<Valid
}
@Override
- protected void doExecute(ValidateQueryRequest request, ActionListener<ValidateQueryResponse> listener) {
+ protected void doExecute(Task task, ValidateQueryRequest request, ActionListener<ValidateQueryResponse> listener) {
request.nowInMillis = System.currentTimeMillis();
- super.doExecute(request, listener);
+ super.doExecute(task, request, listener);
}
@Override
protected ShardValidateQueryRequest newShardRequest(int numShards, ShardRouting shard, ValidateQueryRequest request) {
- String[] filteringAliases = indexNameExpressionResolver.filteringAliases(clusterService.state(), shard.index(), request.indices());
+ String[] filteringAliases = indexNameExpressionResolver.filteringAliases(clusterService.state(), shard.getIndexName(), request.indices());
return new ShardValidateQueryRequest(shard.shardId(), filteringAliases, request);
}
diff --git a/core/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java b/core/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java
index 9a7299ad17..c54b3588c1 100644
--- a/core/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java
+++ b/core/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java
@@ -289,11 +289,11 @@ public class BulkProcessor implements Closeable {
}
public BulkProcessor add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType) throws Exception {
- return add(data, defaultIndex, defaultType, null);
+ return add(data, defaultIndex, defaultType, null, null);
}
- public synchronized BulkProcessor add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType, @Nullable Object payload) throws Exception {
- bulkRequest.add(data, defaultIndex, defaultType, null, null, payload, true);
+ public synchronized BulkProcessor add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType, @Nullable String defaultPipeline, @Nullable Object payload) throws Exception {
+ bulkRequest.add(data, defaultIndex, defaultType, null, null, defaultPipeline, payload, true);
executeIfNeeded();
return this;
}
diff --git a/core/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java b/core/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java
index 0026064489..f008bf9a4e 100644
--- a/core/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java
+++ b/core/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java
@@ -28,6 +28,7 @@ import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.common.Nullable;
+import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
@@ -69,14 +70,6 @@ public class BulkRequest extends ActionRequest<BulkRequest> implements Composite
}
/**
- * Creates a bulk request caused by some other request, which is provided as an
- * argument so that its headers and context can be copied to the new request
- */
- public BulkRequest(ActionRequest<?> request) {
- super(request);
- }
-
- /**
* Adds a list of requests to be executed. Either index or delete requests.
*/
public BulkRequest add(ActionRequest<?>... requests) {
@@ -253,17 +246,17 @@ public class BulkRequest extends ActionRequest<BulkRequest> implements Composite
* Adds a framed data in binary format
*/
public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType) throws Exception {
- return add(data, defaultIndex, defaultType, null, null, null, true);
+ return add(data, defaultIndex, defaultType, null, null, null, null, true);
}
/**
* Adds a framed data in binary format
*/
public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType, boolean allowExplicitIndex) throws Exception {
- return add(data, defaultIndex, defaultType, null, null, null, allowExplicitIndex);
+ return add(data, defaultIndex, defaultType, null, null, null, null, allowExplicitIndex);
}
- public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType, @Nullable String defaultRouting, @Nullable String[] defaultFields, @Nullable Object payload, boolean allowExplicitIndex) throws Exception {
+ public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType, @Nullable String defaultRouting, @Nullable String[] defaultFields, @Nullable String defaultPipeline, @Nullable Object payload, boolean allowExplicitIndex) throws Exception {
XContent xContent = XContentFactory.xContent(data);
int line = 0;
int from = 0;
@@ -304,6 +297,7 @@ public class BulkRequest extends ActionRequest<BulkRequest> implements Composite
long version = Versions.MATCH_ANY;
VersionType versionType = VersionType.INTERNAL;
int retryOnConflict = 0;
+ String pipeline = defaultPipeline;
// at this stage, next token can either be END_OBJECT (and use default index and type, with auto generated id)
// or START_OBJECT which will have another set of parameters
@@ -344,6 +338,8 @@ public class BulkRequest extends ActionRequest<BulkRequest> implements Composite
versionType = VersionType.fromString(parser.text());
} else if ("_retry_on_conflict".equals(currentFieldName) || "_retryOnConflict".equals(currentFieldName)) {
retryOnConflict = parser.intValue();
+ } else if ("pipeline".equals(currentFieldName)) {
+ pipeline = parser.text();
} else if ("fields".equals(currentFieldName)) {
throw new IllegalArgumentException("Action/metadata line [" + line + "] contains a simple value for parameter [fields] while a list is expected");
} else {
@@ -380,15 +376,15 @@ public class BulkRequest extends ActionRequest<BulkRequest> implements Composite
if ("index".equals(action)) {
if (opType == null) {
internalAdd(new IndexRequest(index, type, id).routing(routing).parent(parent).timestamp(timestamp).ttl(ttl).version(version).versionType(versionType)
- .source(data.slice(from, nextMarker - from)), payload);
+ .setPipeline(pipeline).source(data.slice(from, nextMarker - from)), payload);
} else {
internalAdd(new IndexRequest(index, type, id).routing(routing).parent(parent).timestamp(timestamp).ttl(ttl).version(version).versionType(versionType)
- .create("create".equals(opType))
+ .create("create".equals(opType)).setPipeline(pipeline)
.source(data.slice(from, nextMarker - from)), payload);
}
} else if ("create".equals(action)) {
internalAdd(new IndexRequest(index, type, id).routing(routing).parent(parent).timestamp(timestamp).ttl(ttl).version(version).versionType(versionType)
- .create(true)
+ .create(true).setPipeline(pipeline)
.source(data.slice(from, nextMarker - from)), payload);
} else if ("update".equals(action)) {
UpdateRequest updateRequest = new UpdateRequest(index, type, id).routing(routing).parent(parent).retryOnConflict(retryOnConflict)
@@ -479,6 +475,22 @@ public class BulkRequest extends ActionRequest<BulkRequest> implements Composite
return -1;
}
+ /**
+ * @return Whether this bulk request contains index request with an ingest pipeline enabled.
+ */
+ public boolean hasIndexRequestsWithPipelines() {
+ for (ActionRequest actionRequest : requests) {
+ if (actionRequest instanceof IndexRequest) {
+ IndexRequest indexRequest = (IndexRequest) actionRequest;
+ if (Strings.hasText(indexRequest.getPipeline())) {
+ return true;
+ }
+ }
+ }
+
+ return false;
+ }
+
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = null;
diff --git a/core/src/main/java/org/elasticsearch/action/bulk/BulkShardRequest.java b/core/src/main/java/org/elasticsearch/action/bulk/BulkShardRequest.java
index 71dc1b48f6..874789e8d6 100644
--- a/core/src/main/java/org/elasticsearch/action/bulk/BulkShardRequest.java
+++ b/core/src/main/java/org/elasticsearch/action/bulk/BulkShardRequest.java
@@ -41,7 +41,7 @@ public class BulkShardRequest extends ReplicationRequest<BulkShardRequest> {
}
BulkShardRequest(BulkRequest bulkRequest, ShardId shardId, boolean refresh, BulkItemRequest[] items) {
- super(bulkRequest, shardId);
+ super(shardId);
this.items = items;
this.refresh = refresh;
}
diff --git a/core/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java b/core/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java
index 7252993427..9cdce240d7 100644
--- a/core/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java
+++ b/core/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java
@@ -114,7 +114,7 @@ public class TransportBulkAction extends HandledTransportAction<BulkRequest, Bul
for (Map.Entry<String, Set<String>> entry : indicesAndTypes.entrySet()) {
final String index = entry.getKey();
if (autoCreateIndex.shouldAutoCreate(index, state)) {
- CreateIndexRequest createIndexRequest = new CreateIndexRequest(bulkRequest);
+ CreateIndexRequest createIndexRequest = new CreateIndexRequest();
createIndexRequest.index(index);
for (String type : entry.getValue()) {
createIndexRequest.mapping(type);
@@ -377,7 +377,7 @@ public class TransportBulkAction extends HandledTransportAction<BulkRequest, Bul
if (unavailableException == null) {
IndexMetaData indexMetaData = metaData.index(concreteIndex);
if (indexMetaData.getState() == IndexMetaData.State.CLOSE) {
- unavailableException = new IndexClosedException(new Index(metaData.index(request.index()).getIndex()));
+ unavailableException = new IndexClosedException(metaData.index(request.index()).getIndex());
}
}
if (unavailableException != null) {
diff --git a/core/src/main/java/org/elasticsearch/action/delete/DeleteRequest.java b/core/src/main/java/org/elasticsearch/action/delete/DeleteRequest.java
index ba63f33ee1..6c609eb9f3 100644
--- a/core/src/main/java/org/elasticsearch/action/delete/DeleteRequest.java
+++ b/core/src/main/java/org/elasticsearch/action/delete/DeleteRequest.java
@@ -92,7 +92,7 @@ public class DeleteRequest extends ReplicationRequest<DeleteRequest> implements
* The new request will inherit though headers and context from the original request that caused it.
*/
public DeleteRequest(DeleteRequest request, ActionRequest originalRequest) {
- super(request, originalRequest);
+ super(request);
this.type = request.type();
this.id = request.id();
this.routing = request.routing();
@@ -102,14 +102,6 @@ public class DeleteRequest extends ReplicationRequest<DeleteRequest> implements
this.versionType = request.versionType();
}
- /**
- * Creates a delete request caused by some other request, which is provided as an
- * argument so that its headers and context can be copied to the new request
- */
- public DeleteRequest(ActionRequest request) {
- super(request);
- }
-
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = super.validate();
diff --git a/core/src/main/java/org/elasticsearch/action/delete/TransportDeleteAction.java b/core/src/main/java/org/elasticsearch/action/delete/TransportDeleteAction.java
index f80b1a2439..3a0e7aeec2 100644
--- a/core/src/main/java/org/elasticsearch/action/delete/TransportDeleteAction.java
+++ b/core/src/main/java/org/elasticsearch/action/delete/TransportDeleteAction.java
@@ -44,6 +44,7 @@ import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.indices.IndexAlreadyExistsException;
import org.elasticsearch.indices.IndicesService;
+import org.elasticsearch.tasks.Task;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
@@ -69,27 +70,27 @@ public class TransportDeleteAction extends TransportReplicationAction<DeleteRequ
}
@Override
- protected void doExecute(final DeleteRequest request, final ActionListener<DeleteResponse> listener) {
+ protected void doExecute(Task task, final DeleteRequest request, final ActionListener<DeleteResponse> listener) {
ClusterState state = clusterService.state();
if (autoCreateIndex.shouldAutoCreate(request.index(), state)) {
- createIndexAction.execute(new CreateIndexRequest(request).index(request.index()).cause("auto(delete api)").masterNodeTimeout(request.timeout()), new ActionListener<CreateIndexResponse>() {
+ createIndexAction.execute(task, new CreateIndexRequest().index(request.index()).cause("auto(delete api)").masterNodeTimeout(request.timeout()), new ActionListener<CreateIndexResponse>() {
@Override
public void onResponse(CreateIndexResponse result) {
- innerExecute(request, listener);
+ innerExecute(task, request, listener);
}
@Override
public void onFailure(Throwable e) {
if (ExceptionsHelper.unwrapCause(e) instanceof IndexAlreadyExistsException) {
// we have the index, do it
- innerExecute(request, listener);
+ innerExecute(task, request, listener);
} else {
listener.onFailure(e);
}
}
});
} else {
- innerExecute(request, listener);
+ innerExecute(task, request, listener);
}
}
@@ -114,8 +115,8 @@ public class TransportDeleteAction extends TransportReplicationAction<DeleteRequ
request.setShardId(shardId);
}
- private void innerExecute(final DeleteRequest request, final ActionListener<DeleteResponse> listener) {
- super.doExecute(request, listener);
+ private void innerExecute(Task task, final DeleteRequest request, final ActionListener<DeleteResponse> listener) {
+ super.doExecute(task, request, listener);
}
@Override
diff --git a/core/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java b/core/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java
index 7b6253c0ae..26ae4bef49 100644
--- a/core/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java
+++ b/core/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java
@@ -108,7 +108,7 @@ public class TransportExplainAction extends TransportSingleShardAction<ExplainRe
Term uidTerm = new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(request.type(), request.id()));
Engine.GetResult result = indexShard.get(new Engine.Get(false, uidTerm));
if (!result.exists()) {
- return new ExplainResponse(shardId.getIndex(), request.type(), request.id(), false);
+ return new ExplainResponse(shardId.getIndexName(), request.type(), request.id(), false);
}
SearchContext context = new DefaultSearchContext(
@@ -134,9 +134,9 @@ public class TransportExplainAction extends TransportSingleShardAction<ExplainRe
// because we are working in the same searcher in engineGetResult we can be sure that a
// doc isn't deleted between the initial get and this call.
GetResult getResult = indexShard.getService().get(result, request.id(), request.type(), request.fields(), request.fetchSourceContext(), false);
- return new ExplainResponse(shardId.getIndex(), request.type(), request.id(), true, explanation, getResult);
+ return new ExplainResponse(shardId.getIndexName(), request.type(), request.id(), true, explanation, getResult);
} else {
- return new ExplainResponse(shardId.getIndex(), request.type(), request.id(), true, explanation);
+ return new ExplainResponse(shardId.getIndexName(), request.type(), request.id(), true, explanation);
}
} catch (IOException e) {
throw new ElasticsearchException("Could not explain", e);
diff --git a/core/src/main/java/org/elasticsearch/action/get/GetRequest.java b/core/src/main/java/org/elasticsearch/action/get/GetRequest.java
index c6919e858d..1c83cbeb63 100644
--- a/core/src/main/java/org/elasticsearch/action/get/GetRequest.java
+++ b/core/src/main/java/org/elasticsearch/action/get/GetRequest.java
@@ -19,7 +19,6 @@
package org.elasticsearch.action.get;
-import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.RealtimeRequest;
import org.elasticsearch.action.ValidateActions;
@@ -72,8 +71,7 @@ public class GetRequest extends SingleShardRequest<GetRequest> implements Realti
* Copy constructor that creates a new get request that is a copy of the one provided as an argument.
* The new request will inherit though headers and context from the original request that caused it.
*/
- public GetRequest(GetRequest getRequest, ActionRequest originalRequest) {
- super(originalRequest);
+ public GetRequest(GetRequest getRequest) {
this.index = getRequest.index;
this.type = getRequest.type;
this.id = getRequest.id;
@@ -99,14 +97,6 @@ public class GetRequest extends SingleShardRequest<GetRequest> implements Realti
}
/**
- * Constructs a new get request starting from the provided request, meaning that it will
- * inherit its headers and context, and against the specified index.
- */
- public GetRequest(ActionRequest request, String index) {
- super(request, index);
- }
-
- /**
* Constructs a new get request against the specified index with the type and id.
*
* @param index The index to get the document from
diff --git a/core/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java b/core/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java
index db3c0f72bf..f67e2b27b5 100644
--- a/core/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java
+++ b/core/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java
@@ -266,18 +266,6 @@ public class MultiGetRequest extends ActionRequest<MultiGetRequest> implements I
List<Item> items = new ArrayList<>();
- public MultiGetRequest() {
-
- }
-
- /**
- * Creates a multi get request caused by some other request, which is provided as an
- * argument so that its headers and context can be copied to the new request
- */
- public MultiGetRequest(ActionRequest request) {
- super(request);
- }
-
public List<Item> getItems() {
return this.items;
}
diff --git a/core/src/main/java/org/elasticsearch/action/get/MultiGetShardRequest.java b/core/src/main/java/org/elasticsearch/action/get/MultiGetShardRequest.java
index 6715319bc3..9250204858 100644
--- a/core/src/main/java/org/elasticsearch/action/get/MultiGetShardRequest.java
+++ b/core/src/main/java/org/elasticsearch/action/get/MultiGetShardRequest.java
@@ -45,7 +45,7 @@ public class MultiGetShardRequest extends SingleShardRequest<MultiGetShardReques
}
MultiGetShardRequest(MultiGetRequest multiGetRequest, String index, int shardId) {
- super(multiGetRequest, index);
+ super(index);
this.shardId = shardId;
locations = new IntArrayList();
items = new ArrayList<>();
diff --git a/core/src/main/java/org/elasticsearch/action/get/TransportMultiGetAction.java b/core/src/main/java/org/elasticsearch/action/get/TransportMultiGetAction.java
index 52554e0ede..7f5de65c61 100644
--- a/core/src/main/java/org/elasticsearch/action/get/TransportMultiGetAction.java
+++ b/core/src/main/java/org/elasticsearch/action/get/TransportMultiGetAction.java
@@ -79,7 +79,7 @@ public class TransportMultiGetAction extends HandledTransportAction<MultiGetRequ
.getShards(clusterState, concreteSingleIndex, item.type(), item.id(), item.routing(), null).shardId();
MultiGetShardRequest shardRequest = shardRequests.get(shardId);
if (shardRequest == null) {
- shardRequest = new MultiGetShardRequest(request, shardId.index().name(), shardId.id());
+ shardRequest = new MultiGetShardRequest(request, shardId.getIndexName(), shardId.id());
shardRequests.put(shardId, shardRequest);
}
shardRequest.add(i, item);
diff --git a/core/src/main/java/org/elasticsearch/action/index/IndexRequest.java b/core/src/main/java/org/elasticsearch/action/index/IndexRequest.java
index 2caec81aba..a303aa9307 100644
--- a/core/src/main/java/org/elasticsearch/action/index/IndexRequest.java
+++ b/core/src/main/java/org/elasticsearch/action/index/IndexRequest.java
@@ -21,7 +21,6 @@ package org.elasticsearch.action.index;
import org.elasticsearch.ElasticsearchGenerationException;
import org.elasticsearch.Version;
-import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.DocumentRequest;
import org.elasticsearch.action.RoutingMissingException;
@@ -155,23 +154,17 @@ public class IndexRequest extends ReplicationRequest<IndexRequest> implements Do
private XContentType contentType = Requests.INDEX_CONTENT_TYPE;
- public IndexRequest() {
- }
+ private String pipeline;
- /**
- * Creates an index request caused by some other request, which is provided as an
- * argument so that its headers and context can be copied to the new request
- */
- public IndexRequest(ActionRequest request) {
- super(request);
+ public IndexRequest() {
}
/**
* Copy constructor that creates a new index request that is a copy of the one provided as an argument.
* The new request will inherit though headers and context from the original request that caused it.
*/
- public IndexRequest(IndexRequest indexRequest, ActionRequest originalRequest) {
- super(indexRequest, originalRequest);
+ public IndexRequest(IndexRequest indexRequest) {
+ super(indexRequest);
this.type = indexRequest.type;
this.id = indexRequest.id;
this.routing = indexRequest.routing;
@@ -365,6 +358,21 @@ public class IndexRequest extends ReplicationRequest<IndexRequest> implements Do
}
/**
+ * Sets the ingest pipeline to be executed before indexing the document
+ */
+ public IndexRequest setPipeline(String pipeline) {
+ this.pipeline = pipeline;
+ return this;
+ }
+
+ /**
+ * Returns the ingest pipeline to be executed before indexing the document
+ */
+ public String getPipeline() {
+ return this.pipeline;
+ }
+
+ /**
* The source of the document to index, recopied to a new array if it is unsage.
*/
public BytesReference source() {
@@ -659,6 +667,7 @@ public class IndexRequest extends ReplicationRequest<IndexRequest> implements Do
refresh = in.readBoolean();
version = in.readLong();
versionType = VersionType.fromValue(in.readByte());
+ pipeline = in.readOptionalString();
}
@Override
@@ -680,6 +689,7 @@ public class IndexRequest extends ReplicationRequest<IndexRequest> implements Do
out.writeBoolean(refresh);
out.writeLong(version);
out.writeByte(versionType.getValue());
+ out.writeOptionalString(pipeline);
}
@Override
diff --git a/core/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java
index f7134d8484..4116755e4e 100644
--- a/core/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java
+++ b/core/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java
@@ -278,4 +278,12 @@ public class IndexRequestBuilder extends ReplicationRequestBuilder<IndexRequest,
request.ttl(ttl);
return this;
}
+
+ /**
+ * Sets the ingest pipeline to be executed before indexing the document
+ */
+ public IndexRequestBuilder setPipeline(String pipeline) {
+ request.setPipeline(pipeline);
+ return this;
+ }
}
diff --git a/core/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java b/core/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java
index 620056ded4..33bf3547d0 100644
--- a/core/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java
+++ b/core/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java
@@ -48,6 +48,7 @@ import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.translog.Translog;
import org.elasticsearch.indices.IndexAlreadyExistsException;
import org.elasticsearch.indices.IndicesService;
+import org.elasticsearch.tasks.Task;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
@@ -84,19 +85,19 @@ public class TransportIndexAction extends TransportReplicationAction<IndexReques
}
@Override
- protected void doExecute(final IndexRequest request, final ActionListener<IndexResponse> listener) {
+ protected void doExecute(Task task, final IndexRequest request, final ActionListener<IndexResponse> listener) {
// if we don't have a master, we don't have metadata, that's fine, let it find a master using create index API
ClusterState state = clusterService.state();
if (autoCreateIndex.shouldAutoCreate(request.index(), state)) {
- CreateIndexRequest createIndexRequest = new CreateIndexRequest(request);
+ CreateIndexRequest createIndexRequest = new CreateIndexRequest();
createIndexRequest.index(request.index());
createIndexRequest.mapping(request.type());
createIndexRequest.cause("auto(index api)");
createIndexRequest.masterNodeTimeout(request.timeout());
- createIndexAction.execute(createIndexRequest, new ActionListener<CreateIndexResponse>() {
+ createIndexAction.execute(task, createIndexRequest, new ActionListener<CreateIndexResponse>() {
@Override
public void onResponse(CreateIndexResponse result) {
- innerExecute(request, listener);
+ innerExecute(task, request, listener);
}
@Override
@@ -104,7 +105,7 @@ public class TransportIndexAction extends TransportReplicationAction<IndexReques
if (ExceptionsHelper.unwrapCause(e) instanceof IndexAlreadyExistsException) {
// we have the index, do it
try {
- innerExecute(request, listener);
+ innerExecute(task, request, listener);
} catch (Throwable e1) {
listener.onFailure(e1);
}
@@ -114,7 +115,7 @@ public class TransportIndexAction extends TransportReplicationAction<IndexReques
}
});
} else {
- innerExecute(request, listener);
+ innerExecute(task, request, listener);
}
}
@@ -129,8 +130,8 @@ public class TransportIndexAction extends TransportReplicationAction<IndexReques
request.setShardId(shardId);
}
- private void innerExecute(final IndexRequest request, final ActionListener<IndexResponse> listener) {
- super.doExecute(request, listener);
+ private void innerExecute(Task task, final IndexRequest request, final ActionListener<IndexResponse> listener) {
+ super.doExecute(task, request, listener);
}
@Override
@@ -146,7 +147,7 @@ public class TransportIndexAction extends TransportReplicationAction<IndexReques
MappingMetaData mappingMd = indexMetaData.mappingOrDefault(request.type());
if (mappingMd != null && mappingMd.routing().required()) {
if (request.routing() == null) {
- throw new RoutingMissingException(request.shardId().getIndex(), request.type(), request.id());
+ throw new RoutingMissingException(request.shardId().getIndex().getName(), request.type(), request.id());
}
}
@@ -176,7 +177,7 @@ public class TransportIndexAction extends TransportReplicationAction<IndexReques
*/
public static Engine.Index executeIndexRequestOnReplica(IndexRequest request, IndexShard indexShard) {
final ShardId shardId = indexShard.shardId();
- SourceToParse sourceToParse = SourceToParse.source(SourceToParse.Origin.REPLICA, request.source()).index(shardId.getIndex()).type(request.type()).id(request.id())
+ SourceToParse sourceToParse = SourceToParse.source(SourceToParse.Origin.REPLICA, request.source()).index(shardId.getIndexName()).type(request.type()).id(request.id())
.routing(request.routing()).parent(request.parent()).timestamp(request.timestamp()).ttl(request.ttl());
final Engine.Index operation = indexShard.prepareIndexOnReplica(sourceToParse, request.version(), request.versionType());
@@ -204,7 +205,7 @@ public class TransportIndexAction extends TransportReplicationAction<IndexReques
Mapping update = operation.parsedDoc().dynamicMappingsUpdate();
final ShardId shardId = indexShard.shardId();
if (update != null) {
- final String indexName = shardId.getIndex();
+ final String indexName = shardId.getIndexName();
mappingUpdatedAction.updateMappingOnMasterSynchronously(indexName, request.type(), update);
operation = prepareIndexOperationOnPrimary(request, indexShard);
update = operation.parsedDoc().dynamicMappingsUpdate();
diff --git a/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineAction.java b/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineAction.java
new file mode 100644
index 0000000000..ba1dd5d385
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineAction.java
@@ -0,0 +1,43 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.ingest;
+
+import org.elasticsearch.action.Action;
+import org.elasticsearch.client.ElasticsearchClient;
+
+public class DeletePipelineAction extends Action<DeletePipelineRequest, WritePipelineResponse, DeletePipelineRequestBuilder> {
+
+ public static final DeletePipelineAction INSTANCE = new DeletePipelineAction();
+ public static final String NAME = "cluster:admin/ingest/pipeline/delete";
+
+ public DeletePipelineAction() {
+ super(NAME);
+ }
+
+ @Override
+ public DeletePipelineRequestBuilder newRequestBuilder(ElasticsearchClient client) {
+ return new DeletePipelineRequestBuilder(client, this);
+ }
+
+ @Override
+ public WritePipelineResponse newResponse() {
+ return new WritePipelineResponse();
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequest.java b/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequest.java
new file mode 100644
index 0000000000..6e5b9d80c6
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequest.java
@@ -0,0 +1,70 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.ingest;
+
+import org.elasticsearch.action.ActionRequestValidationException;
+import org.elasticsearch.action.support.master.AcknowledgedRequest;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.io.stream.StreamOutput;
+
+import java.io.IOException;
+import java.util.Objects;
+
+import static org.elasticsearch.action.ValidateActions.addValidationError;
+
+public class DeletePipelineRequest extends AcknowledgedRequest<DeletePipelineRequest> {
+
+ private String id;
+
+ public DeletePipelineRequest(String id) {
+ if (id == null) {
+ throw new IllegalArgumentException("id is missing");
+ }
+ this.id = id;
+ }
+
+ DeletePipelineRequest() {
+ }
+
+ public void setId(String id) {
+ this.id = Objects.requireNonNull(id);
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ @Override
+ public ActionRequestValidationException validate() {
+ return null;
+ }
+
+ @Override
+ public void readFrom(StreamInput in) throws IOException {
+ super.readFrom(in);
+ id = in.readString();
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ super.writeTo(out);
+ out.writeString(id);
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequestBuilder.java
new file mode 100644
index 0000000000..fc14e0de2d
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequestBuilder.java
@@ -0,0 +1,35 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.ingest;
+
+import org.elasticsearch.action.ActionRequestBuilder;
+import org.elasticsearch.client.ElasticsearchClient;
+
+public class DeletePipelineRequestBuilder extends ActionRequestBuilder<DeletePipelineRequest, WritePipelineResponse, DeletePipelineRequestBuilder> {
+
+ public DeletePipelineRequestBuilder(ElasticsearchClient client, DeletePipelineAction action) {
+ super(client, action, new DeletePipelineRequest());
+ }
+
+ public DeletePipelineRequestBuilder(ElasticsearchClient client, DeletePipelineAction action, String id) {
+ super(client, action, new DeletePipelineRequest(id));
+ }
+
+}
diff --git a/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineTransportAction.java b/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineTransportAction.java
new file mode 100644
index 0000000000..6378eb5757
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/action/ingest/DeletePipelineTransportAction.java
@@ -0,0 +1,71 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.ingest;
+
+import org.elasticsearch.action.ActionListener;
+import org.elasticsearch.action.support.ActionFilters;
+import org.elasticsearch.action.support.master.TransportMasterNodeAction;
+import org.elasticsearch.cluster.ClusterService;
+import org.elasticsearch.cluster.ClusterState;
+import org.elasticsearch.cluster.block.ClusterBlockException;
+import org.elasticsearch.cluster.block.ClusterBlockLevel;
+import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
+import org.elasticsearch.common.inject.Inject;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.ingest.PipelineStore;
+import org.elasticsearch.node.service.NodeService;
+import org.elasticsearch.threadpool.ThreadPool;
+import org.elasticsearch.transport.TransportService;
+
+public class DeletePipelineTransportAction extends TransportMasterNodeAction<DeletePipelineRequest, WritePipelineResponse> {
+
+ private final PipelineStore pipelineStore;
+ private final ClusterService clusterService;
+
+ @Inject
+ public DeletePipelineTransportAction(Settings settings, ThreadPool threadPool, ClusterService clusterService,
+ TransportService transportService, ActionFilters actionFilters,
+ IndexNameExpressionResolver indexNameExpressionResolver, NodeService nodeService) {
+ super(settings, DeletePipelineAction.NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, DeletePipelineRequest::new);
+ this.clusterService = clusterService;
+ this.pipelineStore = nodeService.getIngestService().getPipelineStore();
+ }
+
+ @Override
+ protected String executor() {
+ return ThreadPool.Names.SAME;
+ }
+
+ @Override
+ protected WritePipelineResponse newResponse() {
+ return new WritePipelineResponse();
+ }
+
+ @Override
+ protected void masterOperation(DeletePipelineRequest request, ClusterState state, ActionListener<WritePipelineResponse> listener) throws Exception {
+ pipelineStore.delete(clusterService, request, listener);
+ }
+
+ @Override
+ protected ClusterBlockException checkBlock(DeletePipelineRequest request, ClusterState state) {
+ return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE);
+ }
+
+}
diff --git a/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineAction.java b/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineAction.java
new file mode 100644
index 0000000000..f6bc3d9a77
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineAction.java
@@ -0,0 +1,43 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.ingest;
+
+import org.elasticsearch.action.Action;
+import org.elasticsearch.client.ElasticsearchClient;
+
+public class GetPipelineAction extends Action<GetPipelineRequest, GetPipelineResponse, GetPipelineRequestBuilder> {
+
+ public static final GetPipelineAction INSTANCE = new GetPipelineAction();
+ public static final String NAME = "cluster:admin/ingest/pipeline/get";
+
+ public GetPipelineAction() {
+ super(NAME);
+ }
+
+ @Override
+ public GetPipelineRequestBuilder newRequestBuilder(ElasticsearchClient client) {
+ return new GetPipelineRequestBuilder(client, this);
+ }
+
+ @Override
+ public GetPipelineResponse newResponse() {
+ return new GetPipelineResponse();
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineRequest.java b/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineRequest.java
new file mode 100644
index 0000000000..6525c26c6c
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineRequest.java
@@ -0,0 +1,66 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.ingest;
+
+import org.elasticsearch.action.ActionRequestValidationException;
+import org.elasticsearch.action.support.master.MasterNodeReadRequest;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.io.stream.StreamOutput;
+
+import java.io.IOException;
+import java.util.Objects;
+
+import static org.elasticsearch.action.ValidateActions.addValidationError;
+
+public class GetPipelineRequest extends MasterNodeReadRequest<GetPipelineRequest> {
+
+ private String[] ids;
+
+ public GetPipelineRequest(String... ids) {
+ if (ids == null || ids.length == 0) {
+ throw new IllegalArgumentException("No ids specified");
+ }
+ this.ids = ids;
+ }
+
+ GetPipelineRequest() {
+ }
+
+ public String[] getIds() {
+ return ids;
+ }
+
+ @Override
+ public ActionRequestValidationException validate() {
+ return null;
+ }
+
+ @Override
+ public void readFrom(StreamInput in) throws IOException {
+ super.readFrom(in);
+ ids = in.readStringArray();
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ super.writeTo(out);
+ out.writeStringArray(ids);
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineRequestBuilder.java
new file mode 100644
index 0000000000..f96a5ffbe2
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineRequestBuilder.java
@@ -0,0 +1,35 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.ingest;
+
+import org.elasticsearch.action.support.master.MasterNodeReadOperationRequestBuilder;
+import org.elasticsearch.client.ElasticsearchClient;
+
+public class GetPipelineRequestBuilder extends MasterNodeReadOperationRequestBuilder<GetPipelineRequest, GetPipelineResponse, GetPipelineRequestBuilder> {
+
+ public GetPipelineRequestBuilder(ElasticsearchClient client, GetPipelineAction action) {
+ super(client, action, new GetPipelineRequest());
+ }
+
+ public GetPipelineRequestBuilder(ElasticsearchClient client, GetPipelineAction action, String[] ids) {
+ super(client, action, new GetPipelineRequest(ids));
+ }
+
+}
diff --git a/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineResponse.java b/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineResponse.java
new file mode 100644
index 0000000000..9f0b229d32
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineResponse.java
@@ -0,0 +1,86 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.ingest;
+
+import org.elasticsearch.action.ActionResponse;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.common.xcontent.StatusToXContent;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.ingest.PipelineConfiguration;
+import org.elasticsearch.rest.RestStatus;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+public class GetPipelineResponse extends ActionResponse implements StatusToXContent {
+
+ private List<PipelineConfiguration> pipelines;
+
+ public GetPipelineResponse() {
+ }
+
+ public GetPipelineResponse(List<PipelineConfiguration> pipelines) {
+ this.pipelines = pipelines;
+ }
+
+ public List<PipelineConfiguration> pipelines() {
+ return pipelines;
+ }
+
+ @Override
+ public void readFrom(StreamInput in) throws IOException {
+ super.readFrom(in);
+ int size = in.readVInt();
+ pipelines = new ArrayList<>(size);
+ for (int i = 0; i < size; i++) {
+ pipelines.add(PipelineConfiguration.readPipelineConfiguration(in));
+ }
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ super.writeTo(out);
+ out.writeVInt(pipelines.size());
+ for (PipelineConfiguration pipeline : pipelines) {
+ pipeline.writeTo(out);
+ }
+ }
+
+ public boolean isFound() {
+ return !pipelines.isEmpty();
+ }
+
+ @Override
+ public RestStatus status() {
+ return isFound() ? RestStatus.OK : RestStatus.NOT_FOUND;
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startArray("pipelines");
+ for (PipelineConfiguration pipeline : pipelines) {
+ pipeline.toXContent(builder, params);
+ }
+ builder.endArray();
+ return builder;
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineTransportAction.java b/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineTransportAction.java
new file mode 100644
index 0000000000..e762d0b8d3
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/action/ingest/GetPipelineTransportAction.java
@@ -0,0 +1,69 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.ingest;
+
+import org.elasticsearch.action.ActionListener;
+import org.elasticsearch.action.support.ActionFilters;
+import org.elasticsearch.action.support.master.TransportMasterNodeReadAction;
+import org.elasticsearch.cluster.ClusterService;
+import org.elasticsearch.cluster.ClusterState;
+import org.elasticsearch.cluster.block.ClusterBlockException;
+import org.elasticsearch.cluster.block.ClusterBlockLevel;
+import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
+import org.elasticsearch.common.inject.Inject;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.ingest.PipelineStore;
+import org.elasticsearch.node.service.NodeService;
+import org.elasticsearch.threadpool.ThreadPool;
+import org.elasticsearch.transport.TransportService;
+
+public class GetPipelineTransportAction extends TransportMasterNodeReadAction<GetPipelineRequest, GetPipelineResponse> {
+
+ private final PipelineStore pipelineStore;
+
+ @Inject
+ public GetPipelineTransportAction(Settings settings, ThreadPool threadPool, ClusterService clusterService,
+ TransportService transportService, ActionFilters actionFilters,
+ IndexNameExpressionResolver indexNameExpressionResolver, NodeService nodeService) {
+ super(settings, GetPipelineAction.NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, GetPipelineRequest::new);
+ this.pipelineStore = nodeService.getIngestService().getPipelineStore();
+ }
+
+ @Override
+ protected String executor() {
+ return ThreadPool.Names.SAME;
+ }
+
+ @Override
+ protected GetPipelineResponse newResponse() {
+ return new GetPipelineResponse();
+ }
+
+ @Override
+ protected void masterOperation(GetPipelineRequest request, ClusterState state, ActionListener<GetPipelineResponse> listener) throws Exception {
+ listener.onResponse(new GetPipelineResponse(pipelineStore.getPipelines(state, request.getIds())));
+ }
+
+ @Override
+ protected ClusterBlockException checkBlock(GetPipelineRequest request, ClusterState state) {
+ return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_READ);
+ }
+
+}
diff --git a/core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java b/core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java
new file mode 100644
index 0000000000..3ec0af0e84
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java
@@ -0,0 +1,225 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.ingest;
+
+import org.elasticsearch.action.ActionListener;
+import org.elasticsearch.action.ActionRequest;
+import org.elasticsearch.action.ActionResponse;
+import org.elasticsearch.action.bulk.BulkAction;
+import org.elasticsearch.action.bulk.BulkItemResponse;
+import org.elasticsearch.action.bulk.BulkRequest;
+import org.elasticsearch.action.bulk.BulkResponse;
+import org.elasticsearch.action.index.IndexAction;
+import org.elasticsearch.action.index.IndexRequest;
+import org.elasticsearch.action.support.ActionFilter;
+import org.elasticsearch.action.support.ActionFilterChain;
+import org.elasticsearch.common.Strings;
+import org.elasticsearch.common.component.AbstractComponent;
+import org.elasticsearch.common.inject.Inject;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.ingest.PipelineExecutionService;
+import org.elasticsearch.node.service.NodeService;
+import org.elasticsearch.tasks.Task;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Set;
+
+public final class IngestActionFilter extends AbstractComponent implements ActionFilter {
+
+ private final PipelineExecutionService executionService;
+
+ @Inject
+ public IngestActionFilter(Settings settings, NodeService nodeService) {
+ super(settings);
+ this.executionService = nodeService.getIngestService().getPipelineExecutionService();
+ }
+
+ @Override
+ public <Request extends ActionRequest<Request>, Response extends ActionResponse> void apply(Task task, String action, Request request, ActionListener<Response> listener, ActionFilterChain<Request, Response> chain) {
+ switch (action) {
+ case IndexAction.NAME:
+ IndexRequest indexRequest = (IndexRequest) request;
+ if (Strings.hasText(indexRequest.getPipeline())) {
+ processIndexRequest(task, action, listener, chain, (IndexRequest) request);
+ } else {
+ chain.proceed(task, action, request, listener);
+ }
+ break;
+ case BulkAction.NAME:
+ BulkRequest bulkRequest = (BulkRequest) request;
+ if (bulkRequest.hasIndexRequestsWithPipelines()) {
+ @SuppressWarnings("unchecked")
+ ActionListener<BulkResponse> actionListener = (ActionListener<BulkResponse>) listener;
+ processBulkIndexRequest(task, bulkRequest, action, chain, actionListener);
+ } else {
+ chain.proceed(task, action, request, listener);
+ }
+ break;
+ default:
+ chain.proceed(task, action, request, listener);
+ break;
+ }
+ }
+
+ @Override
+ public <Response extends ActionResponse> void apply(String action, Response response, ActionListener<Response> listener, ActionFilterChain<?, Response> chain) {
+ chain.proceed(action, response, listener);
+ }
+
+ void processIndexRequest(Task task, String action, ActionListener listener, ActionFilterChain chain, IndexRequest indexRequest) {
+
+ executionService.execute(indexRequest, t -> {
+ logger.error("failed to execute pipeline [{}]", t, indexRequest.getPipeline());
+ listener.onFailure(t);
+ }, success -> {
+ // TransportIndexAction uses IndexRequest and same action name on the node that receives the request and the node that
+ // processes the primary action. This could lead to a pipeline being executed twice for the same
+ // index request, hence we set the pipeline to null once its execution completed.
+ indexRequest.setPipeline(null);
+ chain.proceed(task, action, indexRequest, listener);
+ });
+ }
+
+ void processBulkIndexRequest(Task task, BulkRequest original, String action, ActionFilterChain chain, ActionListener<BulkResponse> listener) {
+ BulkRequestModifier bulkRequestModifier = new BulkRequestModifier(original);
+ executionService.execute(() -> bulkRequestModifier, (indexRequest, throwable) -> {
+ logger.debug("failed to execute pipeline [{}] for document [{}/{}/{}]", indexRequest.getPipeline(), indexRequest.index(), indexRequest.type(), indexRequest.id(), throwable);
+ bulkRequestModifier.markCurrentItemAsFailed(throwable);
+ }, (throwable) -> {
+ if (throwable != null) {
+ logger.error("failed to execute pipeline for a bulk request", throwable);
+ listener.onFailure(throwable);
+ } else {
+ BulkRequest bulkRequest = bulkRequestModifier.getBulkRequest();
+ ActionListener<BulkResponse> actionListener = bulkRequestModifier.wrapActionListenerIfNeeded(listener);
+ if (bulkRequest.requests().isEmpty()) {
+ // at this stage, the transport bulk action can't deal with a bulk request with no requests,
+ // so we stop and send an empty response back to the client.
+ // (this will happen if pre-processing all items in the bulk failed)
+ actionListener.onResponse(new BulkResponse(new BulkItemResponse[0], 0));
+ } else {
+ chain.proceed(task, action, bulkRequest, actionListener);
+ }
+ }
+ });
+ }
+
+ @Override
+ public int order() {
+ return Integer.MAX_VALUE;
+ }
+
+ final static class BulkRequestModifier implements Iterator<ActionRequest<?>> {
+
+ final BulkRequest bulkRequest;
+ final Set<Integer> failedSlots;
+ final List<BulkItemResponse> itemResponses;
+
+ int currentSlot = -1;
+ int[] originalSlots;
+
+ BulkRequestModifier(BulkRequest bulkRequest) {
+ this.bulkRequest = bulkRequest;
+ this.failedSlots = new HashSet<>();
+ this.itemResponses = new ArrayList<>(bulkRequest.requests().size());
+ }
+
+ @Override
+ public ActionRequest next() {
+ return bulkRequest.requests().get(++currentSlot);
+ }
+
+ @Override
+ public boolean hasNext() {
+ return (currentSlot + 1) < bulkRequest.requests().size();
+ }
+
+ BulkRequest getBulkRequest() {
+ if (itemResponses.isEmpty()) {
+ return bulkRequest;
+ } else {
+ BulkRequest modifiedBulkRequest = new BulkRequest();
+ modifiedBulkRequest.refresh(bulkRequest.refresh());
+ modifiedBulkRequest.consistencyLevel(bulkRequest.consistencyLevel());
+ modifiedBulkRequest.timeout(bulkRequest.timeout());
+
+ int slot = 0;
+ originalSlots = new int[bulkRequest.requests().size() - failedSlots.size()];
+ for (int i = 0; i < bulkRequest.requests().size(); i++) {
+ ActionRequest request = bulkRequest.requests().get(i);
+ if (failedSlots.contains(i) == false) {
+ modifiedBulkRequest.add(request);
+ originalSlots[slot++] = i;
+ }
+ }
+ return modifiedBulkRequest;
+ }
+ }
+
+ ActionListener<BulkResponse> wrapActionListenerIfNeeded(ActionListener<BulkResponse> actionListener) {
+ if (itemResponses.isEmpty()) {
+ return actionListener;
+ } else {
+ return new IngestBulkResponseListener(originalSlots, itemResponses, actionListener);
+ }
+ }
+
+ void markCurrentItemAsFailed(Throwable e) {
+ IndexRequest indexRequest = (IndexRequest) bulkRequest.requests().get(currentSlot);
+ // We hit a error during preprocessing a request, so we:
+ // 1) Remember the request item slot from the bulk, so that we're done processing all requests we know what failed
+ // 2) Add a bulk item failure for this request
+ // 3) Continue with the next request in the bulk.
+ failedSlots.add(currentSlot);
+ BulkItemResponse.Failure failure = new BulkItemResponse.Failure(indexRequest.index(), indexRequest.type(), indexRequest.id(), e);
+ itemResponses.add(new BulkItemResponse(currentSlot, indexRequest.opType().lowercase(), failure));
+ }
+
+ }
+
+ private final static class IngestBulkResponseListener implements ActionListener<BulkResponse> {
+
+ private final int[] originalSlots;
+ private final List<BulkItemResponse> itemResponses;
+ private final ActionListener<BulkResponse> actionListener;
+
+ IngestBulkResponseListener(int[] originalSlots, List<BulkItemResponse> itemResponses, ActionListener<BulkResponse> actionListener) {
+ this.itemResponses = itemResponses;
+ this.actionListener = actionListener;
+ this.originalSlots = originalSlots;
+ }
+
+ @Override
+ public void onResponse(BulkResponse bulkItemResponses) {
+ for (int i = 0; i < bulkItemResponses.getItems().length; i++) {
+ itemResponses.add(originalSlots[i], bulkItemResponses.getItems()[i]);
+ }
+ actionListener.onResponse(new BulkResponse(itemResponses.toArray(new BulkItemResponse[itemResponses.size()]), bulkItemResponses.getTookInMillis()));
+ }
+
+ @Override
+ public void onFailure(Throwable e) {
+ actionListener.onFailure(e);
+ }
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java b/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java
new file mode 100644
index 0000000000..39a4b1fa4e
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/action/ingest/IngestProxyActionFilter.java
@@ -0,0 +1,125 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.action.ingest;
+
+import org.elasticsearch.action.Action;
+import org.elasticsearch.action.ActionListener;
+import org.elasticsearch.action.ActionListenerResponseHandler;
+import org.elasticsearch.action.ActionRequest;
+import org.elasticsearch.action.ActionResponse;
+import org.elasticsearch.action.bulk.BulkAction;
+import org.elasticsearch.action.bulk.BulkRequest;
+import org.elasticsearch.action.index.IndexAction;
+import org.elasticsearch.action.index.IndexRequest;
+import org.elasticsearch.action.support.ActionFilter;
+import org.elasticsearch.action.support.ActionFilterChain;
+import org.elasticsearch.cluster.ClusterService;
+import org.elasticsearch.cluster.node.DiscoveryNode;
+import org.elasticsearch.cluster.node.DiscoveryNodes;
+import org.elasticsearch.common.Randomness;
+import org.elasticsearch.common.Strings;
+import org.elasticsearch.common.inject.Inject;
+import org.elasticsearch.tasks.Task;
+import org.elasticsearch.transport.TransportResponse;
+import org.elasticsearch.transport.TransportService;
+
+import java.util.concurrent.atomic.AtomicInteger;
+
+public final class IngestProxyActionFilter implements ActionFilter {
+
+ private final ClusterService clusterService;
+ private final TransportService transportService;
+ private final AtomicInteger randomNodeGenerator = new AtomicInteger(Randomness.get().nextInt());
+
+ @Inject
+ public IngestProxyActionFilter(ClusterService clusterService, TransportService transportService) {
+ this.clusterService = clusterService;
+ this.transportService = transportService;
+ }
+
+ @Override
+ public <Request extends ActionRequest<Request>, Response extends ActionResponse> void apply(Task task, String action, Request request, ActionListener<Response> listener, ActionFilterChain<Request, Response> chain) {
+ Action ingestAction;
+ switch (action) {
+ case IndexAction.NAME:
+ ingestAction = IndexAction.INSTANCE;
+ IndexRequest indexRequest = (IndexRequest) request;
+ if (Strings.hasText(indexRequest.getPipeline())) {
+ forwardIngestRequest(ingestAction, request, listener);
+ } else {
+ chain.proceed(task, action, request, listener);
+ }
+ break;
+ case BulkAction.NAME:
+ ingestAction = BulkAction.INSTANCE;
+ BulkRequest bulkRequest = (BulkRequest) request;
+ if (bulkRequest.hasIndexRequestsWithPipelines()) {
+ forwardIngestRequest(ingestAction, request, listener);
+ } else {
+ chain.proceed(task, action, request, listener);
+ }
+ break;
+ default:
+ chain.proceed(task, action, request, listener);
+ break;
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ private void forwardIngestRequest(Action<?, ?, ?> action, ActionRequest request, ActionListener<?> listener) {
+ transportService.sendRequest(randomIngestNode(), action.name(), request, new ActionListenerResponseHandler(listener) {
+ @Override
+ public TransportResponse newInstance() {
+ return action.newResponse();
+ }
+
+ });
+ }
+
+ @Override
+ public <Response extends ActionResponse> void apply(String action, Response response, ActionListener<Response> listener, ActionFilterChain<?, Response> chain) {
+ chain.proceed(action, response, listener);
+ }
+
+ @Override
+ public int order() {
+ return Integer.MAX_VALUE;
+ }
+
+ private DiscoveryNode randomIngestNode() {
+ assert clusterService.localNode().isIngestNode() == false;
+ DiscoveryNodes nodes = clusterService.state().getNodes();
+ DiscoveryNode[] ingestNodes = nodes.getIngestNodes().values().toArray(DiscoveryNode.class);
+ if (ingestNodes.length == 0) {
+ throw new IllegalStateException("There are no ingest nodes in this cluster, unable to forward request to an ingest node.");
+ }
+
+ int index = getNodeNumber();
+ return ingestNodes[(index) % ingestNodes.length];
+ }
+
+ private int getNodeNumber() {
+ int index = randomNodeGenerator.incrementAndGet();
+ if (index < 0) {
+ index = 0;
+ randomNodeGenerator.set(0);
+ }
+ return index;
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineAction.java b/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineAction.java
new file mode 100644
index 0000000000..8f4b4170f5
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineAction.java
@@ -0,0 +1,44 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.ingest;
+
+import org.elasticsearch.action.Action;
+import org.elasticsearch.action.index.IndexResponse;
+import org.elasticsearch.client.ElasticsearchClient;
+
+public class PutPipelineAction extends Action<PutPipelineRequest, WritePipelineResponse, PutPipelineRequestBuilder> {
+
+ public static final PutPipelineAction INSTANCE = new PutPipelineAction();
+ public static final String NAME = "cluster:admin/ingest/pipeline/put";
+
+ public PutPipelineAction() {
+ super(NAME);
+ }
+
+ @Override
+ public PutPipelineRequestBuilder newRequestBuilder(ElasticsearchClient client) {
+ return new PutPipelineRequestBuilder(client, this);
+ }
+
+ @Override
+ public WritePipelineResponse newResponse() {
+ return new WritePipelineResponse();
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequest.java b/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequest.java
new file mode 100644
index 0000000000..10416146ba
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequest.java
@@ -0,0 +1,79 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.ingest;
+
+import org.elasticsearch.action.ActionRequestValidationException;
+import org.elasticsearch.action.support.master.AcknowledgedRequest;
+import org.elasticsearch.common.bytes.BytesReference;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.io.stream.StreamOutput;
+
+import java.io.IOException;
+import java.util.Objects;
+
+import static org.elasticsearch.action.ValidateActions.addValidationError;
+
+public class PutPipelineRequest extends AcknowledgedRequest<PutPipelineRequest> {
+
+ private String id;
+ private BytesReference source;
+
+ public PutPipelineRequest(String id, BytesReference source) {
+ if (id == null) {
+ throw new IllegalArgumentException("id is missing");
+ }
+ if (source == null) {
+ throw new IllegalArgumentException("source is missing");
+ }
+
+ this.id = id;
+ this.source = source;
+ }
+
+ PutPipelineRequest() {
+ }
+
+ @Override
+ public ActionRequestValidationException validate() {
+ return null;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ public BytesReference getSource() {
+ return source;
+ }
+
+ @Override
+ public void readFrom(StreamInput in) throws IOException {
+ super.readFrom(in);
+ id = in.readString();
+ source = in.readBytesReference();
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ super.writeTo(out);
+ out.writeString(id);
+ out.writeBytesReference(source);
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequestBuilder.java
new file mode 100644
index 0000000000..bd927115fb
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequestBuilder.java
@@ -0,0 +1,36 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.ingest;
+
+import org.elasticsearch.action.ActionRequestBuilder;
+import org.elasticsearch.client.ElasticsearchClient;
+import org.elasticsearch.common.bytes.BytesReference;
+
+public class PutPipelineRequestBuilder extends ActionRequestBuilder<PutPipelineRequest, WritePipelineResponse, PutPipelineRequestBuilder> {
+
+ public PutPipelineRequestBuilder(ElasticsearchClient client, PutPipelineAction action) {
+ super(client, action, new PutPipelineRequest());
+ }
+
+ public PutPipelineRequestBuilder(ElasticsearchClient client, PutPipelineAction action, String id, BytesReference source) {
+ super(client, action, new PutPipelineRequest(id, source));
+ }
+
+}
diff --git a/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java b/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java
new file mode 100644
index 0000000000..31a911207a
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java
@@ -0,0 +1,71 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.ingest;
+
+import org.elasticsearch.action.ActionListener;
+import org.elasticsearch.action.support.ActionFilters;
+import org.elasticsearch.action.support.master.TransportMasterNodeAction;
+import org.elasticsearch.cluster.ClusterService;
+import org.elasticsearch.cluster.ClusterState;
+import org.elasticsearch.cluster.block.ClusterBlockException;
+import org.elasticsearch.cluster.block.ClusterBlockLevel;
+import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
+import org.elasticsearch.common.inject.Inject;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.ingest.PipelineStore;
+import org.elasticsearch.node.service.NodeService;
+import org.elasticsearch.threadpool.ThreadPool;
+import org.elasticsearch.transport.TransportService;
+
+public class PutPipelineTransportAction extends TransportMasterNodeAction<PutPipelineRequest, WritePipelineResponse> {
+
+ private final PipelineStore pipelineStore;
+ private final ClusterService clusterService;
+
+ @Inject
+ public PutPipelineTransportAction(Settings settings, ThreadPool threadPool, ClusterService clusterService,
+ TransportService transportService, ActionFilters actionFilters,
+ IndexNameExpressionResolver indexNameExpressionResolver, NodeService nodeService) {
+ super(settings, PutPipelineAction.NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, PutPipelineRequest::new);
+ this.clusterService = clusterService;
+ this.pipelineStore = nodeService.getIngestService().getPipelineStore();
+ }
+
+ @Override
+ protected String executor() {
+ return ThreadPool.Names.SAME;
+ }
+
+ @Override
+ protected WritePipelineResponse newResponse() {
+ return new WritePipelineResponse();
+ }
+
+ @Override
+ protected void masterOperation(PutPipelineRequest request, ClusterState state, ActionListener<WritePipelineResponse> listener) throws Exception {
+ pipelineStore.put(clusterService, request, listener);
+ }
+
+ @Override
+ protected ClusterBlockException checkBlock(PutPipelineRequest request, ClusterState state) {
+ return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE);
+ }
+
+}
diff --git a/core/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentBaseResult.java b/core/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentBaseResult.java
new file mode 100644
index 0000000000..036703e98f
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentBaseResult.java
@@ -0,0 +1,98 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.action.ingest;
+
+import org.elasticsearch.ElasticsearchException;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.ingest.core.IngestDocument;
+
+import java.io.IOException;
+import java.util.Collections;
+
+/**
+ * Holds the end result of what a pipeline did to sample document provided via the simulate api.
+ */
+public final class SimulateDocumentBaseResult implements SimulateDocumentResult<SimulateDocumentBaseResult> {
+
+ private static final SimulateDocumentBaseResult PROTOTYPE = new SimulateDocumentBaseResult(new WriteableIngestDocument(new IngestDocument(Collections.emptyMap(), Collections.emptyMap())));
+
+ private WriteableIngestDocument ingestDocument;
+ private Exception failure;
+
+ public SimulateDocumentBaseResult(IngestDocument ingestDocument) {
+ this.ingestDocument = new WriteableIngestDocument(ingestDocument);
+ }
+
+ private SimulateDocumentBaseResult(WriteableIngestDocument ingestDocument) {
+ this.ingestDocument = ingestDocument;
+ }
+
+ public SimulateDocumentBaseResult(Exception failure) {
+ this.failure = failure;
+ }
+
+ public IngestDocument getIngestDocument() {
+ if (ingestDocument == null) {
+ return null;
+ }
+ return ingestDocument.getIngestDocument();
+ }
+
+ public Exception getFailure() {
+ return failure;
+ }
+
+ public static SimulateDocumentBaseResult readSimulateDocumentSimpleResult(StreamInput in) throws IOException {
+ return PROTOTYPE.readFrom(in);
+ }
+
+ @Override
+ public SimulateDocumentBaseResult readFrom(StreamInput in) throws IOException {
+ if (in.readBoolean()) {
+ Exception exception = in.readThrowable();
+ return new SimulateDocumentBaseResult(exception);
+ }
+ return new SimulateDocumentBaseResult(new WriteableIngestDocument(in));
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ if (failure == null) {
+ out.writeBoolean(false);
+ ingestDocument.writeTo(out);
+ } else {
+ out.writeBoolean(true);
+ out.writeThrowable(failure);
+ }
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ if (failure == null) {
+ ingestDocument.toXContent(builder, params);
+ } else {
+ ElasticsearchException.renderThrowable(builder, params, failure);
+ }
+ builder.endObject();
+ return builder;
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/cluster/action/shard/NoOpShardStateActionListener.java b/core/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentResult.java
index ed0a7f56b9..7e7682bc25 100644
--- a/core/src/main/java/org/elasticsearch/cluster/action/shard/NoOpShardStateActionListener.java
+++ b/core/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentResult.java
@@ -16,8 +16,11 @@
* specific language governing permissions and limitations
* under the License.
*/
+package org.elasticsearch.action.ingest;
-package org.elasticsearch.cluster.action.shard;
+import org.elasticsearch.common.io.stream.Writeable;
+import org.elasticsearch.common.xcontent.ToXContent;
+
+public interface SimulateDocumentResult<T extends SimulateDocumentResult> extends Writeable<T>, ToXContent {
-public class NoOpShardStateActionListener implements ShardStateAction.Listener {
}
diff --git a/core/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResult.java b/core/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResult.java
new file mode 100644
index 0000000000..d9d705f101
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResult.java
@@ -0,0 +1,81 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.action.ingest;
+
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+/**
+ * Holds the result of what a pipeline did to a sample document via the simulate api, but instead of {@link SimulateDocumentBaseResult}
+ * this result class holds the intermediate result each processor did to the sample document.
+ */
+public final class SimulateDocumentVerboseResult implements SimulateDocumentResult<SimulateDocumentVerboseResult> {
+
+ private static final SimulateDocumentVerboseResult PROTOTYPE = new SimulateDocumentVerboseResult(Collections.emptyList());
+
+ private final List<SimulateProcessorResult> processorResults;
+
+ public SimulateDocumentVerboseResult(List<SimulateProcessorResult> processorResults) {
+ this.processorResults = processorResults;
+ }
+
+ public List<SimulateProcessorResult> getProcessorResults() {
+ return processorResults;
+ }
+
+ public static SimulateDocumentVerboseResult readSimulateDocumentVerboseResultFrom(StreamInput in) throws IOException {
+ return PROTOTYPE.readFrom(in);
+ }
+
+ @Override
+ public SimulateDocumentVerboseResult readFrom(StreamInput in) throws IOException {
+ int size = in.readVInt();
+ List<SimulateProcessorResult> processorResults = new ArrayList<>();
+ for (int i = 0; i < size; i++) {
+ processorResults.add(new SimulateProcessorResult(in));
+ }
+ return new SimulateDocumentVerboseResult(processorResults);
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ out.writeVInt(processorResults.size());
+ for (SimulateProcessorResult result : processorResults) {
+ result.writeTo(out);
+ }
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ builder.startArray("processor_results");
+ for (SimulateProcessorResult processorResult : processorResults) {
+ processorResult.toXContent(builder, params);
+ }
+ builder.endArray();
+ builder.endObject();
+ return builder;
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/action/ingest/SimulateExecutionService.java b/core/src/main/java/org/elasticsearch/action/ingest/SimulateExecutionService.java
new file mode 100644
index 0000000000..30efbe1b0f
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/action/ingest/SimulateExecutionService.java
@@ -0,0 +1,99 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.ingest;
+
+import org.elasticsearch.action.ActionListener;
+import org.elasticsearch.action.ActionRunnable;
+import org.elasticsearch.ingest.core.IngestDocument;
+import org.elasticsearch.ingest.core.Pipeline;
+import org.elasticsearch.ingest.core.Processor;
+import org.elasticsearch.ingest.core.CompoundProcessor;
+import org.elasticsearch.threadpool.ThreadPool;
+
+import java.util.ArrayList;
+import java.util.List;
+
+class SimulateExecutionService {
+
+ private static final String THREAD_POOL_NAME = ThreadPool.Names.MANAGEMENT;
+
+ private final ThreadPool threadPool;
+
+ SimulateExecutionService(ThreadPool threadPool) {
+ this.threadPool = threadPool;
+ }
+
+ void executeVerboseDocument(Processor processor, IngestDocument ingestDocument, List<SimulateProcessorResult> processorResultList) throws Exception {
+ if (processor instanceof CompoundProcessor) {
+ CompoundProcessor cp = (CompoundProcessor) processor;
+ try {
+ for (Processor p : cp.getProcessors()) {
+ executeVerboseDocument(p, ingestDocument, processorResultList);
+ }
+ } catch (Exception e) {
+ for (Processor p : cp.getOnFailureProcessors()) {
+ executeVerboseDocument(p, ingestDocument, processorResultList);
+ }
+ }
+ } else {
+ try {
+ processor.execute(ingestDocument);
+ processorResultList.add(new SimulateProcessorResult(processor.getTag(), new IngestDocument(ingestDocument)));
+ } catch (Exception e) {
+ processorResultList.add(new SimulateProcessorResult(processor.getTag(), e));
+ throw e;
+ }
+ }
+ }
+
+ SimulateDocumentResult executeDocument(Pipeline pipeline, IngestDocument ingestDocument, boolean verbose) {
+ if (verbose) {
+ List<SimulateProcessorResult> processorResultList = new ArrayList<>();
+ IngestDocument currentIngestDocument = new IngestDocument(ingestDocument);
+ CompoundProcessor pipelineProcessor = new CompoundProcessor(pipeline.getProcessors(), pipeline.getOnFailureProcessors());
+ try {
+ executeVerboseDocument(pipelineProcessor, currentIngestDocument, processorResultList);
+ } catch (Exception e) {
+ return new SimulateDocumentBaseResult(e);
+ }
+ return new SimulateDocumentVerboseResult(processorResultList);
+ } else {
+ try {
+ pipeline.execute(ingestDocument);
+ return new SimulateDocumentBaseResult(ingestDocument);
+ } catch (Exception e) {
+ return new SimulateDocumentBaseResult(e);
+ }
+ }
+ }
+
+ public void execute(SimulatePipelineRequest.Parsed request, ActionListener<SimulatePipelineResponse> listener) {
+ threadPool.executor(THREAD_POOL_NAME).execute(new ActionRunnable<SimulatePipelineResponse>(listener) {
+ @Override
+ protected void doRun() throws Exception {
+ List<SimulateDocumentResult> responses = new ArrayList<>();
+ for (IngestDocument ingestDocument : request.getDocuments()) {
+ responses.add(executeDocument(request.getPipeline(), ingestDocument, request.isVerbose()));
+ }
+ listener.onResponse(new SimulatePipelineResponse(request.getPipeline().getId(), request.isVerbose(), responses));
+ }
+ });
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineAction.java b/core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineAction.java
new file mode 100644
index 0000000000..c1d219a419
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineAction.java
@@ -0,0 +1,43 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.ingest;
+
+import org.elasticsearch.action.Action;
+import org.elasticsearch.client.ElasticsearchClient;
+
+public class SimulatePipelineAction extends Action<SimulatePipelineRequest, SimulatePipelineResponse, SimulatePipelineRequestBuilder> {
+
+ public static final SimulatePipelineAction INSTANCE = new SimulatePipelineAction();
+ public static final String NAME = "cluster:admin/ingest/pipeline/simulate";
+
+ public SimulatePipelineAction() {
+ super(NAME);
+ }
+
+ @Override
+ public SimulatePipelineRequestBuilder newRequestBuilder(ElasticsearchClient client) {
+ return new SimulatePipelineRequestBuilder(client, this);
+ }
+
+ @Override
+ public SimulatePipelineResponse newResponse() {
+ return new SimulatePipelineResponse();
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java b/core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java
new file mode 100644
index 0000000000..847de99f37
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java
@@ -0,0 +1,165 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.ingest;
+
+import org.elasticsearch.action.ActionRequest;
+import org.elasticsearch.action.ActionRequestValidationException;
+import org.elasticsearch.common.bytes.BytesReference;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.ingest.core.ConfigurationUtils;
+import org.elasticsearch.ingest.core.IngestDocument;
+import org.elasticsearch.ingest.core.Pipeline;
+import org.elasticsearch.ingest.PipelineStore;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+
+import static org.elasticsearch.action.ValidateActions.addValidationError;
+import static org.elasticsearch.ingest.core.IngestDocument.MetaData;
+
+public class SimulatePipelineRequest extends ActionRequest<SimulatePipelineRequest> {
+
+ private String id;
+ private boolean verbose;
+ private BytesReference source;
+
+ public SimulatePipelineRequest(BytesReference source) {
+ if (source == null) {
+ throw new IllegalArgumentException("source is missing");
+ }
+ this.source = source;
+ }
+
+ SimulatePipelineRequest() {
+ }
+
+ @Override
+ public ActionRequestValidationException validate() {
+ return null;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ public void setId(String id) {
+ this.id = id;
+ }
+
+ public boolean isVerbose() {
+ return verbose;
+ }
+
+ public void setVerbose(boolean verbose) {
+ this.verbose = verbose;
+ }
+
+ public BytesReference getSource() {
+ return source;
+ }
+
+ @Override
+ public void readFrom(StreamInput in) throws IOException {
+ super.readFrom(in);
+ id = in.readString();
+ verbose = in.readBoolean();
+ source = in.readBytesReference();
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ super.writeTo(out);
+ out.writeString(id);
+ out.writeBoolean(verbose);
+ out.writeBytesReference(source);
+ }
+
+ public static final class Fields {
+ static final String PIPELINE = "pipeline";
+ static final String DOCS = "docs";
+ static final String SOURCE = "_source";
+ }
+
+ static class Parsed {
+ private final List<IngestDocument> documents;
+ private final Pipeline pipeline;
+ private final boolean verbose;
+
+ Parsed(Pipeline pipeline, List<IngestDocument> documents, boolean verbose) {
+ this.pipeline = pipeline;
+ this.documents = Collections.unmodifiableList(documents);
+ this.verbose = verbose;
+ }
+
+ public Pipeline getPipeline() {
+ return pipeline;
+ }
+
+ public List<IngestDocument> getDocuments() {
+ return documents;
+ }
+
+ public boolean isVerbose() {
+ return verbose;
+ }
+ }
+
+ private static final Pipeline.Factory PIPELINE_FACTORY = new Pipeline.Factory();
+ static final String SIMULATED_PIPELINE_ID = "_simulate_pipeline";
+
+ static Parsed parseWithPipelineId(String pipelineId, Map<String, Object> config, boolean verbose, PipelineStore pipelineStore) {
+ if (pipelineId == null) {
+ throw new IllegalArgumentException("param [pipeline] is null");
+ }
+ Pipeline pipeline = pipelineStore.get(pipelineId);
+ List<IngestDocument> ingestDocumentList = parseDocs(config);
+ return new Parsed(pipeline, ingestDocumentList, verbose);
+ }
+
+ static Parsed parse(Map<String, Object> config, boolean verbose, PipelineStore pipelineStore) throws Exception {
+ Map<String, Object> pipelineConfig = ConfigurationUtils.readMap(null, null, config, Fields.PIPELINE);
+ Pipeline pipeline = PIPELINE_FACTORY.create(SIMULATED_PIPELINE_ID, pipelineConfig, pipelineStore.getProcessorFactoryRegistry());
+ List<IngestDocument> ingestDocumentList = parseDocs(config);
+ return new Parsed(pipeline, ingestDocumentList, verbose);
+ }
+
+ private static List<IngestDocument> parseDocs(Map<String, Object> config) {
+ List<Map<String, Object>> docs = ConfigurationUtils.readList(null, null, config, Fields.DOCS);
+ List<IngestDocument> ingestDocumentList = new ArrayList<>();
+ for (Map<String, Object> dataMap : docs) {
+ Map<String, Object> document = ConfigurationUtils.readMap(null, null, dataMap, Fields.SOURCE);
+ IngestDocument ingestDocument = new IngestDocument(ConfigurationUtils.readStringProperty(null, null, dataMap, MetaData.INDEX.getFieldName(), "_index"),
+ ConfigurationUtils.readStringProperty(null, null, dataMap, MetaData.TYPE.getFieldName(), "_type"),
+ ConfigurationUtils.readStringProperty(null, null, dataMap, MetaData.ID.getFieldName(), "_id"),
+ ConfigurationUtils.readOptionalStringProperty(null, null, dataMap, MetaData.ROUTING.getFieldName()),
+ ConfigurationUtils.readOptionalStringProperty(null, null, dataMap, MetaData.PARENT.getFieldName()),
+ ConfigurationUtils.readOptionalStringProperty(null, null, dataMap, MetaData.TIMESTAMP.getFieldName()),
+ ConfigurationUtils.readOptionalStringProperty(null, null, dataMap, MetaData.TTL.getFieldName()),
+ document);
+ ingestDocumentList.add(ingestDocument);
+ }
+ return ingestDocumentList;
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequestBuilder.java
new file mode 100644
index 0000000000..4a13fa111e
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequestBuilder.java
@@ -0,0 +1,46 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.ingest;
+
+import org.elasticsearch.action.ActionRequestBuilder;
+import org.elasticsearch.client.ElasticsearchClient;
+import org.elasticsearch.common.bytes.BytesReference;
+
+public class SimulatePipelineRequestBuilder extends ActionRequestBuilder<SimulatePipelineRequest, SimulatePipelineResponse, SimulatePipelineRequestBuilder> {
+
+ public SimulatePipelineRequestBuilder(ElasticsearchClient client, SimulatePipelineAction action) {
+ super(client, action, new SimulatePipelineRequest());
+ }
+
+ public SimulatePipelineRequestBuilder(ElasticsearchClient client, SimulatePipelineAction action, BytesReference source) {
+ super(client, action, new SimulatePipelineRequest(source));
+ }
+
+ public SimulatePipelineRequestBuilder setId(String id) {
+ request.setId(id);
+ return this;
+ }
+
+ public SimulatePipelineRequestBuilder setVerbose(boolean verbose) {
+ request.setVerbose(verbose);
+ return this;
+ }
+
+}
diff --git a/core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineResponse.java b/core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineResponse.java
new file mode 100644
index 0000000000..4337d0ee16
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineResponse.java
@@ -0,0 +1,137 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.ingest;
+
+import org.elasticsearch.action.ActionResponse;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.common.xcontent.StatusToXContent;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentBuilderString;
+import org.elasticsearch.ingest.core.PipelineFactoryError;
+import org.elasticsearch.rest.RestStatus;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+public class SimulatePipelineResponse extends ActionResponse implements StatusToXContent {
+ private String pipelineId;
+ private boolean verbose;
+ private List<SimulateDocumentResult> results;
+ private PipelineFactoryError error;
+
+ public SimulatePipelineResponse() {
+
+ }
+
+ public SimulatePipelineResponse(PipelineFactoryError error) {
+ this.error = error;
+ }
+
+ public SimulatePipelineResponse(String pipelineId, boolean verbose, List<SimulateDocumentResult> responses) {
+ this.pipelineId = pipelineId;
+ this.verbose = verbose;
+ this.results = Collections.unmodifiableList(responses);
+ }
+
+ public String getPipelineId() {
+ return pipelineId;
+ }
+
+ public List<SimulateDocumentResult> getResults() {
+ return results;
+ }
+
+ public boolean isVerbose() {
+ return verbose;
+ }
+
+ public boolean isError() {
+ return error != null;
+ }
+
+ @Override
+ public RestStatus status() {
+ if (isError()) {
+ return RestStatus.BAD_REQUEST;
+ }
+ return RestStatus.OK;
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ super.writeTo(out);
+ out.writeBoolean(isError());
+ if (isError()) {
+ error.writeTo(out);
+ } else {
+ out.writeString(pipelineId);
+ out.writeBoolean(verbose);
+ out.writeVInt(results.size());
+ for (SimulateDocumentResult response : results) {
+ response.writeTo(out);
+ }
+ }
+ }
+
+ @Override
+ public void readFrom(StreamInput in) throws IOException {
+ super.readFrom(in);
+ boolean isError = in.readBoolean();
+ if (isError) {
+ error = new PipelineFactoryError();
+ error.readFrom(in);
+ } else {
+ this.pipelineId = in.readString();
+ boolean verbose = in.readBoolean();
+ int responsesLength = in.readVInt();
+ results = new ArrayList<>();
+ for (int i = 0; i < responsesLength; i++) {
+ SimulateDocumentResult<?> simulateDocumentResult;
+ if (verbose) {
+ simulateDocumentResult = SimulateDocumentVerboseResult.readSimulateDocumentVerboseResultFrom(in);
+ } else {
+ simulateDocumentResult = SimulateDocumentBaseResult.readSimulateDocumentSimpleResult(in);
+ }
+ results.add(simulateDocumentResult);
+ }
+ }
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ if (isError()) {
+ error.toXContent(builder, params);
+ } else {
+ builder.startArray(Fields.DOCUMENTS);
+ for (SimulateDocumentResult response : results) {
+ response.toXContent(builder, params);
+ }
+ builder.endArray();
+ }
+ return builder;
+ }
+
+ static final class Fields {
+ static final XContentBuilderString DOCUMENTS = new XContentBuilderString("docs");
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineTransportAction.java b/core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineTransportAction.java
new file mode 100644
index 0000000000..3d6586315a
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineTransportAction.java
@@ -0,0 +1,71 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.ingest;
+
+import org.elasticsearch.action.ActionListener;
+import org.elasticsearch.action.support.ActionFilters;
+import org.elasticsearch.action.support.HandledTransportAction;
+import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
+import org.elasticsearch.common.inject.Inject;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.xcontent.XContentHelper;
+import org.elasticsearch.ingest.PipelineStore;
+import org.elasticsearch.ingest.core.PipelineFactoryError;
+import org.elasticsearch.ingest.processor.ConfigurationPropertyException;
+import org.elasticsearch.node.service.NodeService;
+import org.elasticsearch.threadpool.ThreadPool;
+import org.elasticsearch.transport.TransportService;
+
+import java.util.Map;
+
+public class SimulatePipelineTransportAction extends HandledTransportAction<SimulatePipelineRequest, SimulatePipelineResponse> {
+
+ private final PipelineStore pipelineStore;
+ private final SimulateExecutionService executionService;
+
+ @Inject
+ public SimulatePipelineTransportAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, NodeService nodeService) {
+ super(settings, SimulatePipelineAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, SimulatePipelineRequest::new);
+ this.pipelineStore = nodeService.getIngestService().getPipelineStore();
+ this.executionService = new SimulateExecutionService(threadPool);
+ }
+
+ @Override
+ protected void doExecute(SimulatePipelineRequest request, ActionListener<SimulatePipelineResponse> listener) {
+ final Map<String, Object> source = XContentHelper.convertToMap(request.getSource(), false).v2();
+
+ final SimulatePipelineRequest.Parsed simulateRequest;
+ try {
+ if (request.getId() != null) {
+ simulateRequest = SimulatePipelineRequest.parseWithPipelineId(request.getId(), source, request.isVerbose(), pipelineStore);
+ } else {
+ simulateRequest = SimulatePipelineRequest.parse(source, request.isVerbose(), pipelineStore);
+ }
+ } catch (ConfigurationPropertyException e) {
+ listener.onResponse(new SimulatePipelineResponse(new PipelineFactoryError(e)));
+ return;
+ } catch (Exception e) {
+ listener.onFailure(e);
+ return;
+ }
+
+ executionService.execute(simulateRequest, listener);
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/action/ingest/SimulateProcessorResult.java b/core/src/main/java/org/elasticsearch/action/ingest/SimulateProcessorResult.java
new file mode 100644
index 0000000000..6a38434d4c
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/action/ingest/SimulateProcessorResult.java
@@ -0,0 +1,106 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.action.ingest;
+
+import org.elasticsearch.ElasticsearchException;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.common.io.stream.Writeable;
+import org.elasticsearch.common.xcontent.ToXContent;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.ingest.core.AbstractProcessorFactory;
+import org.elasticsearch.ingest.core.IngestDocument;
+
+import java.io.IOException;
+
+public class SimulateProcessorResult implements Writeable<SimulateProcessorResult>, ToXContent {
+ private final String processorTag;
+ private final WriteableIngestDocument ingestDocument;
+ private final Exception failure;
+
+ public SimulateProcessorResult(StreamInput in) throws IOException {
+ this.processorTag = in.readString();
+ if (in.readBoolean()) {
+ this.failure = in.readThrowable();
+ this.ingestDocument = null;
+ } else {
+ this.ingestDocument = new WriteableIngestDocument(in);
+ this.failure = null;
+ }
+ }
+
+ public SimulateProcessorResult(String processorTag, IngestDocument ingestDocument) {
+ this.processorTag = processorTag;
+ this.ingestDocument = new WriteableIngestDocument(ingestDocument);
+ this.failure = null;
+ }
+
+ public SimulateProcessorResult(String processorTag, Exception failure) {
+ this.processorTag = processorTag;
+ this.failure = failure;
+ this.ingestDocument = null;
+ }
+
+ public IngestDocument getIngestDocument() {
+ if (ingestDocument == null) {
+ return null;
+ }
+ return ingestDocument.getIngestDocument();
+ }
+
+ public String getProcessorTag() {
+ return processorTag;
+ }
+
+ public Exception getFailure() {
+ return failure;
+ }
+
+ @Override
+ public SimulateProcessorResult readFrom(StreamInput in) throws IOException {
+ return new SimulateProcessorResult(in);
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ out.writeString(processorTag);
+ if (failure == null) {
+ out.writeBoolean(false);
+ ingestDocument.writeTo(out);
+ } else {
+ out.writeBoolean(true);
+ out.writeThrowable(failure);
+ }
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ if (processorTag != null) {
+ builder.field(AbstractProcessorFactory.TAG_KEY, processorTag);
+ }
+ if (failure == null) {
+ ingestDocument.toXContent(builder, params);
+ } else {
+ ElasticsearchException.renderThrowable(builder, params, failure);
+ }
+ builder.endObject();
+ return builder;
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/action/ingest/WritePipelineResponse.java b/core/src/main/java/org/elasticsearch/action/ingest/WritePipelineResponse.java
new file mode 100644
index 0000000000..2df9195039
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/action/ingest/WritePipelineResponse.java
@@ -0,0 +1,70 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.ingest;
+
+import org.elasticsearch.action.support.master.AcknowledgedResponse;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.ingest.core.PipelineFactoryError;
+
+import java.io.IOException;
+
+public class WritePipelineResponse extends AcknowledgedResponse {
+ private PipelineFactoryError error;
+
+ WritePipelineResponse() {
+
+ }
+
+ public WritePipelineResponse(boolean acknowledged) {
+ super(acknowledged);
+ if (!isAcknowledged()) {
+ error = new PipelineFactoryError("pipeline write is not acknowledged");
+ }
+ }
+
+ public WritePipelineResponse(PipelineFactoryError error) {
+ super(false);
+ this.error = error;
+ }
+
+ public PipelineFactoryError getError() {
+ return error;
+ }
+
+ @Override
+ public void readFrom(StreamInput in) throws IOException {
+ super.readFrom(in);
+ readAcknowledged(in);
+ if (!isAcknowledged()) {
+ error = new PipelineFactoryError();
+ error.readFrom(in);
+ }
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ super.writeTo(out);
+ writeAcknowledged(out);
+ if (!isAcknowledged()) {
+ error.writeTo(out);
+ }
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/action/ingest/WritePipelineResponseRestListener.java b/core/src/main/java/org/elasticsearch/action/ingest/WritePipelineResponseRestListener.java
new file mode 100644
index 0000000000..1b2629c3ad
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/action/ingest/WritePipelineResponseRestListener.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.ingest;
+
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.rest.RestChannel;
+import org.elasticsearch.rest.action.support.AcknowledgedRestListener;
+
+import java.io.IOException;
+
+public class WritePipelineResponseRestListener extends AcknowledgedRestListener<WritePipelineResponse> {
+
+ public WritePipelineResponseRestListener(RestChannel channel) {
+ super(channel);
+ }
+
+ @Override
+ protected void addCustomFields(XContentBuilder builder, WritePipelineResponse response) throws IOException {
+ if (!response.isAcknowledged()) {
+ response.getError().toXContent(builder, null);
+ }
+ }
+}
+
diff --git a/core/src/main/java/org/elasticsearch/action/ingest/WriteableIngestDocument.java b/core/src/main/java/org/elasticsearch/action/ingest/WriteableIngestDocument.java
new file mode 100644
index 0000000000..342e4bd3a3
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/action/ingest/WriteableIngestDocument.java
@@ -0,0 +1,105 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.ingest;
+
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.common.io.stream.Writeable;
+import org.elasticsearch.common.xcontent.ToXContent;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentBuilderString;
+import org.elasticsearch.ingest.core.IngestDocument;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.Map;
+import java.util.Objects;
+
+final class WriteableIngestDocument implements Writeable<WriteableIngestDocument>, ToXContent {
+
+ private final IngestDocument ingestDocument;
+
+ WriteableIngestDocument(IngestDocument ingestDocument) {
+ assert ingestDocument != null;
+ this.ingestDocument = ingestDocument;
+ }
+
+ WriteableIngestDocument(StreamInput in) throws IOException {
+ Map<String, Object> sourceAndMetadata = in.readMap();
+ @SuppressWarnings("unchecked")
+ Map<String, String> ingestMetadata = (Map<String, String>) in.readGenericValue();
+ this.ingestDocument = new IngestDocument(sourceAndMetadata, ingestMetadata);
+ }
+
+ IngestDocument getIngestDocument() {
+ return ingestDocument;
+ }
+
+
+ @Override
+ public WriteableIngestDocument readFrom(StreamInput in) throws IOException {
+ return new WriteableIngestDocument(in);
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ out.writeMap(ingestDocument.getSourceAndMetadata());
+ out.writeGenericValue(ingestDocument.getIngestMetadata());
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject("doc");
+ Map<IngestDocument.MetaData, String> metadataMap = ingestDocument.extractMetadata();
+ for (Map.Entry<IngestDocument.MetaData, String> metadata : metadataMap.entrySet()) {
+ builder.field(metadata.getKey().getFieldName(), metadata.getValue());
+ }
+ builder.field("_source", ingestDocument.getSourceAndMetadata());
+ builder.startObject("_ingest");
+ for (Map.Entry<String, String> ingestMetadata : ingestDocument.getIngestMetadata().entrySet()) {
+ builder.field(ingestMetadata.getKey(), ingestMetadata.getValue());
+ }
+ builder.endObject();
+ builder.endObject();
+ return builder;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ }
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
+ WriteableIngestDocument that = (WriteableIngestDocument) o;
+ return Objects.equals(ingestDocument, that.ingestDocument);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(ingestDocument);
+ }
+
+ @Override
+ public String toString() {
+ return ingestDocument.toString();
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/action/percolate/PercolateRequest.java b/core/src/main/java/org/elasticsearch/action/percolate/PercolateRequest.java
index 47f39cef0b..e69da6bf51 100644
--- a/core/src/main/java/org/elasticsearch/action/percolate/PercolateRequest.java
+++ b/core/src/main/java/org/elasticsearch/action/percolate/PercolateRequest.java
@@ -66,7 +66,6 @@ public class PercolateRequest extends BroadcastRequest<PercolateRequest> impleme
}
PercolateRequest(PercolateRequest request, BytesReference docSource) {
- super(request);
this.indices = request.indices();
this.documentType = request.documentType();
this.routing = request.routing();
@@ -274,7 +273,7 @@ public class PercolateRequest extends BroadcastRequest<PercolateRequest> impleme
source = in.readBytesReference();
docSource = in.readBytesReference();
if (in.readBoolean()) {
- getRequest = new GetRequest(null);
+ getRequest = new GetRequest();
getRequest.readFrom(in);
}
onlyCount = in.readBoolean();
diff --git a/core/src/main/java/org/elasticsearch/action/percolate/PercolateShardResponse.java b/core/src/main/java/org/elasticsearch/action/percolate/PercolateShardResponse.java
index 1ca5e24496..e3fc737c2c 100644
--- a/core/src/main/java/org/elasticsearch/action/percolate/PercolateShardResponse.java
+++ b/core/src/main/java/org/elasticsearch/action/percolate/PercolateShardResponse.java
@@ -57,7 +57,7 @@ public class PercolateShardResponse extends BroadcastShardResponse {
}
public PercolateShardResponse(TopDocs topDocs, Map<Integer, String> ids, Map<Integer, Map<String, HighlightField>> hls, PercolateContext context) {
- super(new ShardId(context.shardTarget().getIndex(), context.shardTarget().getShardId()));
+ super(context.indexShard().shardId());
this.topDocs = topDocs;
this.ids = ids;
this.hls = hls;
diff --git a/core/src/main/java/org/elasticsearch/action/percolate/TransportMultiPercolateAction.java b/core/src/main/java/org/elasticsearch/action/percolate/TransportMultiPercolateAction.java
index bf7b9e5307..d86d91c654 100644
--- a/core/src/main/java/org/elasticsearch/action/percolate/TransportMultiPercolateAction.java
+++ b/core/src/main/java/org/elasticsearch/action/percolate/TransportMultiPercolateAction.java
@@ -97,7 +97,7 @@ public class TransportMultiPercolateAction extends HandledTransportAction<MultiP
}
if (!existingDocsRequests.isEmpty()) {
- final MultiGetRequest multiGetRequest = new MultiGetRequest(request);
+ final MultiGetRequest multiGetRequest = new MultiGetRequest();
for (GetRequest getRequest : existingDocsRequests) {
multiGetRequest.add(
new MultiGetRequest.Item(getRequest.index(), getRequest.type(), getRequest.id())
@@ -200,7 +200,7 @@ public class TransportMultiPercolateAction extends HandledTransportAction<MultiP
ShardId shardId = shard.shardId();
TransportShardMultiPercolateAction.Request requests = requestsByShard.get(shardId);
if (requests == null) {
- requestsByShard.put(shardId, requests = new TransportShardMultiPercolateAction.Request(multiPercolateRequest, shardId.getIndex(), shardId.getId(), percolateRequest.preference()));
+ requestsByShard.put(shardId, requests = new TransportShardMultiPercolateAction.Request(shardId.getIndexName(), shardId.getId(), percolateRequest.preference()));
}
logger.trace("Adding shard[{}] percolate request for item[{}]", shardId, slot);
requests.add(new TransportShardMultiPercolateAction.Request.Item(slot, new PercolateShardRequest(shardId, percolateRequest)));
diff --git a/core/src/main/java/org/elasticsearch/action/percolate/TransportPercolateAction.java b/core/src/main/java/org/elasticsearch/action/percolate/TransportPercolateAction.java
index fdac839e14..0edce177be 100644
--- a/core/src/main/java/org/elasticsearch/action/percolate/TransportPercolateAction.java
+++ b/core/src/main/java/org/elasticsearch/action/percolate/TransportPercolateAction.java
@@ -41,6 +41,7 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.engine.DocumentMissingException;
import org.elasticsearch.percolator.PercolateException;
import org.elasticsearch.percolator.PercolatorService;
+import org.elasticsearch.tasks.Task;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
@@ -70,11 +71,11 @@ public class TransportPercolateAction extends TransportBroadcastAction<Percolate
}
@Override
- protected void doExecute(final PercolateRequest request, final ActionListener<PercolateResponse> listener) {
+ protected void doExecute(Task task, final PercolateRequest request, final ActionListener<PercolateResponse> listener) {
request.startTime = System.currentTimeMillis();
if (request.getRequest() != null) {
//create a new get request to make sure it has the same headers and context as the original percolate request
- GetRequest getRequest = new GetRequest(request.getRequest(), request);
+ GetRequest getRequest = new GetRequest(request.getRequest());
getAction.execute(getRequest, new ActionListener<GetResponse>() {
@Override
public void onResponse(GetResponse getResponse) {
@@ -84,7 +85,7 @@ public class TransportPercolateAction extends TransportBroadcastAction<Percolate
}
BytesReference docSource = getResponse.getSourceAsBytesRef();
- TransportPercolateAction.super.doExecute(new PercolateRequest(request, docSource), listener);
+ TransportPercolateAction.super.doExecute(task, new PercolateRequest(request, docSource), listener);
}
@Override
@@ -93,7 +94,7 @@ public class TransportPercolateAction extends TransportBroadcastAction<Percolate
}
});
} else {
- super.doExecute(request, listener);
+ super.doExecute(task, request, listener);
}
}
@@ -150,7 +151,7 @@ public class TransportPercolateAction extends TransportBroadcastAction<Percolate
} else {
PercolatorService.ReduceResult result = null;
try {
- result = percolatorService.reduce(onlyCount, shardResults, request);
+ result = percolatorService.reduce(onlyCount, shardResults);
} catch (IOException e) {
throw new ElasticsearchException("error during reduce phase", e);
}
diff --git a/core/src/main/java/org/elasticsearch/action/percolate/TransportShardMultiPercolateAction.java b/core/src/main/java/org/elasticsearch/action/percolate/TransportShardMultiPercolateAction.java
index c2ae538085..0732d4d406 100644
--- a/core/src/main/java/org/elasticsearch/action/percolate/TransportShardMultiPercolateAction.java
+++ b/core/src/main/java/org/elasticsearch/action/percolate/TransportShardMultiPercolateAction.java
@@ -117,8 +117,8 @@ public class TransportShardMultiPercolateAction extends TransportSingleShardActi
public Request() {
}
- Request(MultiPercolateRequest multiPercolateRequest, String concreteIndex, int shardId, String preference) {
- super(multiPercolateRequest, concreteIndex);
+ Request(String concreteIndex, int shardId, String preference) {
+ super(concreteIndex);
this.shardId = shardId;
this.preference = preference;
this.items = new ArrayList<>();
diff --git a/core/src/main/java/org/elasticsearch/action/search/ClearScrollRequest.java b/core/src/main/java/org/elasticsearch/action/search/ClearScrollRequest.java
index b390b77504..17343e8691 100644
--- a/core/src/main/java/org/elasticsearch/action/search/ClearScrollRequest.java
+++ b/core/src/main/java/org/elasticsearch/action/search/ClearScrollRequest.java
@@ -37,17 +37,6 @@ public class ClearScrollRequest extends ActionRequest<ClearScrollRequest> {
private List<String> scrollIds;
- public ClearScrollRequest() {
- }
-
- /**
- * Creates a clear scroll request caused by some other request, which is provided as an
- * argument so that its headers and context can be copied to the new request
- */
- public ClearScrollRequest(ActionRequest request) {
- super(request);
- }
-
public List<String> getScrollIds() {
return scrollIds;
}
diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchRequest.java b/core/src/main/java/org/elasticsearch/action/search/SearchRequest.java
index 8014e4acb6..10a1ad2efc 100644
--- a/core/src/main/java/org/elasticsearch/action/search/SearchRequest.java
+++ b/core/src/main/java/org/elasticsearch/action/search/SearchRequest.java
@@ -80,8 +80,7 @@ public class SearchRequest extends ActionRequest<SearchRequest> implements Indic
* Copy constructor that creates a new search request that is a copy of the one provided as an argument.
* The new request will inherit though headers and context from the original request that caused it.
*/
- public SearchRequest(SearchRequest searchRequest, ActionRequest originalRequest) {
- super(originalRequest);
+ public SearchRequest(SearchRequest searchRequest) {
this.searchType = searchRequest.searchType;
this.indices = searchRequest.indices;
this.routing = searchRequest.routing;
@@ -95,15 +94,6 @@ public class SearchRequest extends ActionRequest<SearchRequest> implements Indic
}
/**
- * Constructs a new search request starting from the provided request, meaning that it will
- * inherit its headers and context
- */
- public SearchRequest(ActionRequest request) {
- super(request);
- this.source = new SearchSourceBuilder();
- }
-
- /**
* Constructs a new search request against the indices. No indices provided here means that search
* will run against all indices.
*/
diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java
index 1557c266bd..07d7b2fa3d 100644
--- a/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java
+++ b/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java
@@ -344,6 +344,15 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
}
/**
+ * Set the sort values that indicates which docs this request should "search after".
+ *
+ */
+ public SearchRequestBuilder searchAfter(Object[] values) {
+ sourceBuilder().searchAfter(values);
+ return this;
+ }
+
+ /**
* Applies when sorting, and controls if scores will be tracked as well. Defaults to
* <tt>false</tt>.
*/
@@ -391,27 +400,27 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
/**
* Clears all rescorers on the builder and sets the first one. To use multiple rescore windows use
- * {@link #addRescorer(org.elasticsearch.search.rescore.RescoreBuilder.Rescorer, int)}.
+ * {@link #addRescorer(org.elasticsearch.search.rescore.RescoreBuilder, int)}.
*
* @param rescorer rescorer configuration
* @return this for chaining
*/
- public SearchRequestBuilder setRescorer(RescoreBuilder.Rescorer rescorer) {
+ public SearchRequestBuilder setRescorer(RescoreBuilder<?> rescorer) {
sourceBuilder().clearRescorers();
return addRescorer(rescorer);
}
/**
* Clears all rescorers on the builder and sets the first one. To use multiple rescore windows use
- * {@link #addRescorer(org.elasticsearch.search.rescore.RescoreBuilder.Rescorer, int)}.
+ * {@link #addRescorer(org.elasticsearch.search.rescore.RescoreBuilder, int)}.
*
* @param rescorer rescorer configuration
* @param window rescore window
* @return this for chaining
*/
- public SearchRequestBuilder setRescorer(RescoreBuilder.Rescorer rescorer, int window) {
+ public SearchRequestBuilder setRescorer(RescoreBuilder rescorer, int window) {
sourceBuilder().clearRescorers();
- return addRescorer(rescorer, window);
+ return addRescorer(rescorer.windowSize(window));
}
/**
@@ -420,8 +429,8 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
* @param rescorer rescorer configuration
* @return this for chaining
*/
- public SearchRequestBuilder addRescorer(RescoreBuilder.Rescorer rescorer) {
- sourceBuilder().addRescorer(new RescoreBuilder(rescorer));
+ public SearchRequestBuilder addRescorer(RescoreBuilder<?> rescorer) {
+ sourceBuilder().addRescorer(rescorer);
return this;
}
@@ -432,8 +441,8 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
* @param window rescore window
* @return this for chaining
*/
- public SearchRequestBuilder addRescorer(RescoreBuilder.Rescorer rescorer, int window) {
- sourceBuilder().addRescorer(new RescoreBuilder(rescorer).windowSize(window));
+ public SearchRequestBuilder addRescorer(RescoreBuilder<?> rescorer, int window) {
+ sourceBuilder().addRescorer(rescorer.windowSize(window));
return this;
}
diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchScrollRequest.java b/core/src/main/java/org/elasticsearch/action/search/SearchScrollRequest.java
index 537d61ac55..c1ff788dae 100644
--- a/core/src/main/java/org/elasticsearch/action/search/SearchScrollRequest.java
+++ b/core/src/main/java/org/elasticsearch/action/search/SearchScrollRequest.java
@@ -46,14 +46,6 @@ public class SearchScrollRequest extends ActionRequest<SearchScrollRequest> {
this.scrollId = scrollId;
}
- /**
- * Creates a scroll request caused by some other request, which is provided as an
- * argument so that its headers and context can be copied to the new request
- */
- public SearchScrollRequest(ActionRequest request) {
- super(request);
- }
-
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = null;
diff --git a/core/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java b/core/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java
index fd2b257e53..1849073563 100644
--- a/core/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java
+++ b/core/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java
@@ -59,7 +59,7 @@ public class TransportMultiSearchAction extends HandledTransportAction<MultiSear
final AtomicInteger counter = new AtomicInteger(responses.length());
for (int i = 0; i < responses.length(); i++) {
final int index = i;
- SearchRequest searchRequest = new SearchRequest(request.requests().get(i), request);
+ SearchRequest searchRequest = new SearchRequest(request.requests().get(i));
searchAction.execute(searchRequest, new ActionListener<SearchResponse>() {
@Override
public void onResponse(SearchResponse searchResponse) {
diff --git a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchDfsQueryAndFetchAction.java b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchDfsQueryAndFetchAction.java
index 7244a1ff58..6d22264815 100644
--- a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchDfsQueryAndFetchAction.java
+++ b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchDfsQueryAndFetchAction.java
@@ -135,7 +135,7 @@ public class TransportSearchDfsQueryAndFetchAction extends TransportSearchTypeAc
public void doRun() throws IOException {
sortedShardList = searchPhaseController.sortDocs(true, queryFetchResults);
final InternalSearchResponse internalResponse = searchPhaseController.merge(sortedShardList, queryFetchResults,
- queryFetchResults, request);
+ queryFetchResults);
String scrollId = null;
if (request.scroll() != null) {
scrollId = TransportSearchHelper.buildScrollId(request.searchType(), firstResults, null);
diff --git a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchDfsQueryThenFetchAction.java b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchDfsQueryThenFetchAction.java
index faaf121554..31128cea96 100644
--- a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchDfsQueryThenFetchAction.java
+++ b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchDfsQueryThenFetchAction.java
@@ -211,7 +211,7 @@ public class TransportSearchDfsQueryThenFetchAction extends TransportSearchTypeA
@Override
public void doRun() throws IOException {
final InternalSearchResponse internalResponse = searchPhaseController.merge(sortedShardList, queryResults,
- fetchResults, request);
+ fetchResults);
String scrollId = null;
if (request.scroll() != null) {
scrollId = TransportSearchHelper.buildScrollId(request.searchType(), firstResults, null);
diff --git a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchQueryAndFetchAction.java b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchQueryAndFetchAction.java
index 3c4f5419f0..0e1e8db551 100644
--- a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchQueryAndFetchAction.java
+++ b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchQueryAndFetchAction.java
@@ -82,7 +82,7 @@ public class TransportSearchQueryAndFetchAction extends TransportSearchTypeActio
boolean useScroll = request.scroll() != null;
sortedShardList = searchPhaseController.sortDocs(useScroll, firstResults);
final InternalSearchResponse internalResponse = searchPhaseController.merge(sortedShardList, firstResults,
- firstResults, request);
+ firstResults);
String scrollId = null;
if (request.scroll() != null) {
scrollId = buildScrollId(request.searchType(), firstResults, null);
diff --git a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchQueryThenFetchAction.java b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchQueryThenFetchAction.java
index 1d8589e7a9..c63287d995 100644
--- a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchQueryThenFetchAction.java
+++ b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchQueryThenFetchAction.java
@@ -146,7 +146,7 @@ public class TransportSearchQueryThenFetchAction extends TransportSearchTypeActi
@Override
public void doRun() throws IOException {
final InternalSearchResponse internalResponse = searchPhaseController.merge(sortedShardList, firstResults,
- fetchResults, request);
+ fetchResults);
String scrollId = null;
if (request.scroll() != null) {
scrollId = TransportSearchHelper.buildScrollId(request.searchType(), firstResults, null);
diff --git a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryAndFetchAction.java b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryAndFetchAction.java
index 2a953f9b73..b718baaa29 100644
--- a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryAndFetchAction.java
+++ b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryAndFetchAction.java
@@ -193,7 +193,7 @@ public class TransportSearchScrollQueryAndFetchAction extends AbstractComponent
private void innerFinishHim() throws Exception {
ScoreDoc[] sortedShardList = searchPhaseController.sortDocs(true, queryFetchResults);
final InternalSearchResponse internalResponse = searchPhaseController.merge(sortedShardList, queryFetchResults,
- queryFetchResults, request);
+ queryFetchResults);
String scrollId = null;
if (request.scroll() != null) {
scrollId = request.scrollId();
diff --git a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryThenFetchAction.java b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryThenFetchAction.java
index 8dd9c139c3..93a28b29aa 100644
--- a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryThenFetchAction.java
+++ b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryThenFetchAction.java
@@ -208,7 +208,7 @@ public class TransportSearchScrollQueryThenFetchAction extends AbstractComponent
IntArrayList docIds = entry.value;
final QuerySearchResult querySearchResult = queryResults.get(entry.index);
ScoreDoc lastEmittedDoc = lastEmittedDocPerShard[entry.index];
- ShardFetchRequest shardFetchRequest = new ShardFetchRequest(request, querySearchResult.id(), docIds, lastEmittedDoc);
+ ShardFetchRequest shardFetchRequest = new ShardFetchRequest(querySearchResult.id(), docIds, lastEmittedDoc);
DiscoveryNode node = nodes.get(querySearchResult.shardTarget().nodeId());
searchService.sendExecuteFetchScroll(node, shardFetchRequest, new ActionListener<FetchSearchResult>() {
@Override
@@ -243,7 +243,7 @@ public class TransportSearchScrollQueryThenFetchAction extends AbstractComponent
}
private void innerFinishHim() {
- InternalSearchResponse internalResponse = searchPhaseController.merge(sortedShardList, queryResults, fetchResults, request);
+ InternalSearchResponse internalResponse = searchPhaseController.merge(sortedShardList, queryResults, fetchResults);
String scrollId = null;
if (request.scroll() != null) {
scrollId = request.scrollId();
diff --git a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchTypeAction.java b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchTypeAction.java
index 9d1004ccd5..042534a2e7 100644
--- a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchTypeAction.java
+++ b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchTypeAction.java
@@ -163,7 +163,7 @@ public abstract class TransportSearchTypeAction extends TransportAction<SearchRe
if (node == null) {
onFirstPhaseResult(shardIndex, shard, null, shardIt, new NoShardAvailableActionException(shardIt.shardId()));
} else {
- String[] filteringAliases = indexNameExpressionResolver.filteringAliases(clusterState, shard.index(), request.indices());
+ String[] filteringAliases = indexNameExpressionResolver.filteringAliases(clusterState, shard.index().getName(), request.indices());
sendExecuteFirstPhase(node, internalSearchRequest(shard, shardsIts.size(), request, filteringAliases, startTime()), new ActionListener<FirstResult>() {
@Override
public void onResponse(FirstResult result) {
diff --git a/core/src/main/java/org/elasticsearch/action/suggest/TransportSuggestAction.java b/core/src/main/java/org/elasticsearch/action/suggest/TransportSuggestAction.java
index 6bc62cf83b..0ed9857855 100644
--- a/core/src/main/java/org/elasticsearch/action/suggest/TransportSuggestAction.java
+++ b/core/src/main/java/org/elasticsearch/action/suggest/TransportSuggestAction.java
@@ -143,7 +143,7 @@ public class TransportSuggestAction extends TransportBroadcastAction<SuggestRequ
throw new IllegalArgumentException("suggest content missing");
}
final SuggestionSearchContext context = suggestPhase.parseElement().parseInternal(parser, indexService.mapperService(),
- indexService.fieldData(), request.shardId().getIndex(), request.shardId().id(), request);
+ indexService.fieldData(), request.shardId().getIndexName(), request.shardId().id());
final Suggest result = suggestPhase.execute(context, searcher.searcher());
return new ShardSuggestResponse(request.shardId(), result);
}
diff --git a/core/src/main/java/org/elasticsearch/action/support/ActionFilter.java b/core/src/main/java/org/elasticsearch/action/support/ActionFilter.java
index 6c08eec323..d753eda4c6 100644
--- a/core/src/main/java/org/elasticsearch/action/support/ActionFilter.java
+++ b/core/src/main/java/org/elasticsearch/action/support/ActionFilter.java
@@ -40,13 +40,15 @@ public interface ActionFilter {
* Enables filtering the execution of an action on the request side, either by sending a response through the
* {@link ActionListener} or by continuing the execution through the given {@link ActionFilterChain chain}
*/
- void apply(Task task, String action, ActionRequest<?> request, ActionListener<?> listener, ActionFilterChain chain);
+ <Request extends ActionRequest<Request>, Response extends ActionResponse> void apply(Task task, String action, Request request,
+ ActionListener<Response> listener, ActionFilterChain<Request, Response> chain);
/**
* Enables filtering the execution of an action on the response side, either by sending a response through the
* {@link ActionListener} or by continuing the execution through the given {@link ActionFilterChain chain}
*/
- void apply(String action, ActionResponse response, ActionListener<?> listener, ActionFilterChain chain);
+ <Response extends ActionResponse> void apply(String action, Response response, ActionListener<Response> listener,
+ ActionFilterChain<?, Response> chain);
/**
* A simple base class for injectable action filters that spares the implementation from handling the
@@ -60,7 +62,8 @@ public interface ActionFilter {
}
@Override
- public final void apply(Task task, String action, ActionRequest<?> request, ActionListener<?> listener, ActionFilterChain chain) {
+ public final <Request extends ActionRequest<Request>, Response extends ActionResponse> void apply(Task task, String action, Request request,
+ ActionListener<Response> listener, ActionFilterChain<Request, Response> chain) {
if (apply(action, request, listener)) {
chain.proceed(task, action, request, listener);
}
@@ -73,7 +76,8 @@ public interface ActionFilter {
protected abstract boolean apply(String action, ActionRequest<?> request, ActionListener<?> listener);
@Override
- public final void apply(String action, ActionResponse response, ActionListener<?> listener, ActionFilterChain chain) {
+ public final <Response extends ActionResponse> void apply(String action, Response response, ActionListener<Response> listener,
+ ActionFilterChain<?, Response> chain) {
if (apply(action, response, listener)) {
chain.proceed(action, response, listener);
}
diff --git a/core/src/main/java/org/elasticsearch/action/support/ActionFilterChain.java b/core/src/main/java/org/elasticsearch/action/support/ActionFilterChain.java
index 9b1ae9b269..54f55e187a 100644
--- a/core/src/main/java/org/elasticsearch/action/support/ActionFilterChain.java
+++ b/core/src/main/java/org/elasticsearch/action/support/ActionFilterChain.java
@@ -27,17 +27,17 @@ import org.elasticsearch.tasks.Task;
/**
* A filter chain allowing to continue and process the transport action request
*/
-public interface ActionFilterChain {
+public interface ActionFilterChain<Request extends ActionRequest<Request>, Response extends ActionResponse> {
/**
* Continue processing the request. Should only be called if a response has not been sent through
* the given {@link ActionListener listener}
*/
- void proceed(Task task, final String action, final ActionRequest request, final ActionListener listener);
+ void proceed(Task task, final String action, final Request request, final ActionListener<Response> listener);
/**
* Continue processing the response. Should only be called if a response has not been sent through
* the given {@link ActionListener listener}
*/
- void proceed(final String action, final ActionResponse response, final ActionListener listener);
+ void proceed(final String action, final Response response, final ActionListener<Response> listener);
}
diff --git a/core/src/main/java/org/elasticsearch/action/support/AutoCreateIndex.java b/core/src/main/java/org/elasticsearch/action/support/AutoCreateIndex.java
index c8f0485245..d557475534 100644
--- a/core/src/main/java/org/elasticsearch/action/support/AutoCreateIndex.java
+++ b/core/src/main/java/org/elasticsearch/action/support/AutoCreateIndex.java
@@ -23,91 +23,109 @@ import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.common.Booleans;
import org.elasticsearch.common.Strings;
+import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.regex.Regex;
+import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.mapper.MapperService;
+import java.util.ArrayList;
+import java.util.List;
+
/**
* Encapsulates the logic of whether a new index should be automatically created when
* a write operation is about to happen in a non existing index.
*/
public final class AutoCreateIndex {
- private final boolean needToCheck;
- private final boolean globallyDisabled;
+ public static final Setting<AutoCreate> AUTO_CREATE_INDEX_SETTING = new Setting<>("action.auto_create_index", "true", AutoCreate::new, false, Setting.Scope.CLUSTER);
+
private final boolean dynamicMappingDisabled;
- private final String[] matches;
- private final String[] matches2;
private final IndexNameExpressionResolver resolver;
+ private final AutoCreate autoCreate;
@Inject
public AutoCreateIndex(Settings settings, IndexNameExpressionResolver resolver) {
this.resolver = resolver;
dynamicMappingDisabled = !MapperService.INDEX_MAPPER_DYNAMIC_SETTING.get(settings);
- String value = settings.get("action.auto_create_index");
- if (value == null || Booleans.isExplicitTrue(value)) {
- needToCheck = true;
- globallyDisabled = false;
- matches = null;
- matches2 = null;
- } else if (Booleans.isExplicitFalse(value)) {
- needToCheck = false;
- globallyDisabled = true;
- matches = null;
- matches2 = null;
- } else {
- needToCheck = true;
- globallyDisabled = false;
- matches = Strings.commaDelimitedListToStringArray(value);
- matches2 = new String[matches.length];
- for (int i = 0; i < matches.length; i++) {
- matches2[i] = matches[i].substring(1);
- }
- }
+ this.autoCreate = AUTO_CREATE_INDEX_SETTING.get(settings);
}
/**
* Do we really need to check if an index should be auto created?
*/
public boolean needToCheck() {
- return this.needToCheck;
+ return this.autoCreate.autoCreateIndex;
}
/**
* Should the index be auto created?
*/
public boolean shouldAutoCreate(String index, ClusterState state) {
- if (!needToCheck) {
+ if (autoCreate.autoCreateIndex == false) {
return false;
}
- boolean exists = resolver.hasIndexOrAlias(index, state);
- if (exists) {
+ if (dynamicMappingDisabled) {
return false;
}
- if (globallyDisabled || dynamicMappingDisabled) {
+ if (resolver.hasIndexOrAlias(index, state)) {
return false;
}
// matches not set, default value of "true"
- if (matches == null) {
+ if (autoCreate.expressions.isEmpty()) {
return true;
}
- for (int i = 0; i < matches.length; i++) {
- char c = matches[i].charAt(0);
- if (c == '-') {
- if (Regex.simpleMatch(matches2[i], index)) {
- return false;
- }
- } else if (c == '+') {
- if (Regex.simpleMatch(matches2[i], index)) {
- return true;
- }
- } else {
- if (Regex.simpleMatch(matches[i], index)) {
- return true;
- }
+ for (Tuple<String, Boolean> expression : autoCreate.expressions) {
+ String indexExpression = expression.v1();
+ boolean include = expression.v2();
+ if (Regex.simpleMatch(indexExpression, index)) {
+ return include;
}
}
return false;
}
+
+ private static class AutoCreate {
+ private final boolean autoCreateIndex;
+ private final List<Tuple<String, Boolean>> expressions;
+
+ private AutoCreate(String value) {
+ boolean autoCreateIndex;
+ List<Tuple<String, Boolean>> expressions = new ArrayList<>();
+ try {
+ autoCreateIndex = Booleans.parseBooleanExact(value);
+ } catch (IllegalArgumentException ex) {
+ try {
+ String[] patterns = Strings.commaDelimitedListToStringArray(value);
+ for (String pattern : patterns) {
+ if (pattern == null || pattern.length() == 0) {
+ throw new IllegalArgumentException("Can't parse [" + value + "] for setting [action.auto_create_index] must be either [true, false, or a comma separated list of index patterns]");
+ }
+ Tuple<String, Boolean> expression;
+ if (pattern.startsWith("-")) {
+ if (pattern.length() == 1) {
+ throw new IllegalArgumentException("Can't parse [" + value + "] for setting [action.auto_create_index] must contain an index name after [-]");
+ }
+ expression = new Tuple<>(pattern.substring(1), false);
+ } else if(pattern.startsWith("+")) {
+ if (pattern.length() == 1) {
+ throw new IllegalArgumentException("Can't parse [" + value + "] for setting [action.auto_create_index] must contain an index name after [+]");
+ }
+ expression = new Tuple<>(pattern.substring(1), true);
+ } else {
+ expression = new Tuple<>(pattern, true);
+ }
+ expressions.add(expression);
+ }
+ autoCreateIndex = true;
+ } catch (IllegalArgumentException ex1) {
+ ex1.addSuppressed(ex);
+ throw ex1;
+ }
+ }
+ this.expressions = expressions;
+ this.autoCreateIndex = autoCreateIndex;
+ }
+ }
}
diff --git a/core/src/main/java/org/elasticsearch/action/support/ChildTaskActionRequest.java b/core/src/main/java/org/elasticsearch/action/support/ChildTaskActionRequest.java
new file mode 100644
index 0000000000..18e885e760
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/action/support/ChildTaskActionRequest.java
@@ -0,0 +1,66 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.support;
+
+import org.elasticsearch.action.ActionRequest;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.tasks.Task;
+
+import java.io.IOException;
+
+/**
+ * Base class for action requests that can have associated child tasks
+ */
+public abstract class ChildTaskActionRequest<Request extends ActionRequest<Request>> extends ActionRequest<Request> {
+
+ private String parentTaskNode;
+
+ private long parentTaskId;
+
+ protected ChildTaskActionRequest() {
+
+ }
+
+ public void setParentTask(String parentTaskNode, long parentTaskId) {
+ this.parentTaskNode = parentTaskNode;
+ this.parentTaskId = parentTaskId;
+ }
+
+ @Override
+ public void readFrom(StreamInput in) throws IOException {
+ super.readFrom(in);
+ parentTaskNode = in.readOptionalString();
+ parentTaskId = in.readLong();
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ super.writeTo(out);
+ out.writeOptionalString(parentTaskNode);
+ out.writeLong(parentTaskId);
+ }
+
+ @Override
+ public Task createTask(long id, String type, String action) {
+ return new Task(id, type, action, this::getDescription, parentTaskNode, parentTaskId);
+ }
+
+}
diff --git a/core/src/main/java/org/elasticsearch/action/support/ChildTaskRequest.java b/core/src/main/java/org/elasticsearch/action/support/ChildTaskRequest.java
index c231028f0b..59ebb47670 100644
--- a/core/src/main/java/org/elasticsearch/action/support/ChildTaskRequest.java
+++ b/core/src/main/java/org/elasticsearch/action/support/ChildTaskRequest.java
@@ -19,10 +19,8 @@
package org.elasticsearch.action.support;
-import org.elasticsearch.Version;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.tasks.ChildTask;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.transport.TransportRequest;
@@ -38,11 +36,6 @@ public class ChildTaskRequest extends TransportRequest {
private long parentTaskId;
protected ChildTaskRequest() {
-
- }
-
- protected ChildTaskRequest(TransportRequest parentTaskRequest) {
- super(parentTaskRequest);
}
public void setParentTask(String parentTaskNode, long parentTaskId) {
@@ -66,6 +59,6 @@ public class ChildTaskRequest extends TransportRequest {
@Override
public Task createTask(long id, String type, String action) {
- return new ChildTask(id, type, action, this::getDescription, parentTaskNode, parentTaskId);
+ return new Task(id, type, action, this::getDescription, parentTaskNode, parentTaskId);
}
}
diff --git a/core/src/main/java/org/elasticsearch/action/support/DefaultShardOperationFailedException.java b/core/src/main/java/org/elasticsearch/action/support/DefaultShardOperationFailedException.java
index 17d8bcfb99..676949e367 100644
--- a/core/src/main/java/org/elasticsearch/action/support/DefaultShardOperationFailedException.java
+++ b/core/src/main/java/org/elasticsearch/action/support/DefaultShardOperationFailedException.java
@@ -48,7 +48,7 @@ public class DefaultShardOperationFailedException implements ShardOperationFaile
}
public DefaultShardOperationFailedException(ElasticsearchException e) {
- this.index = e.getIndex();
+ this.index = e.getIndex() == null ? null : e.getIndex().getName();
this.shardId = e.getShardId().id();
this.reason = e;
this.status = e.status();
diff --git a/core/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java b/core/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java
index a439117fef..337d98fce6 100644
--- a/core/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java
+++ b/core/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java
@@ -44,13 +44,14 @@ public abstract class HandledTransportAction<Request extends ActionRequest<Reque
class TransportHandler implements TransportRequestHandler<Request> {
@Override
- public final void messageReceived(final Request request, final TransportChannel channel, Task task) throws Exception {
- messageReceived(request, channel);
+ public final void messageReceived(Request request, TransportChannel channel) throws Exception {
+ throw new UnsupportedOperationException("the task parameter is required for this operation");
}
@Override
- public final void messageReceived(Request request, TransportChannel channel) throws Exception {
- execute(request, new ActionListener<Response>() {
+ public final void messageReceived(final Request request, final TransportChannel channel, Task task) throws Exception {
+ // We already got the task created on the netty layer - no need to create it again on the transport layer
+ execute(task, request, new ActionListener<Response>() {
@Override
public void onResponse(Response response) {
try {
diff --git a/core/src/main/java/org/elasticsearch/action/support/IndicesOptions.java b/core/src/main/java/org/elasticsearch/action/support/IndicesOptions.java
index 793dbe0840..2bc49f7e9f 100644
--- a/core/src/main/java/org/elasticsearch/action/support/IndicesOptions.java
+++ b/core/src/main/java/org/elasticsearch/action/support/IndicesOptions.java
@@ -26,7 +26,7 @@ import org.elasticsearch.rest.RestRequest;
import java.io.IOException;
import java.util.Map;
-import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue;
+import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeStringArrayValue;
/**
@@ -195,8 +195,8 @@ public class IndicesOptions {
//note that allowAliasesToMultipleIndices is not exposed, always true (only for internal use)
return fromOptions(
- nodeBooleanValue(ignoreUnavailableString, defaultSettings.ignoreUnavailable()),
- nodeBooleanValue(allowNoIndicesString, defaultSettings.allowNoIndices()),
+ lenientNodeBooleanValue(ignoreUnavailableString, defaultSettings.ignoreUnavailable()),
+ lenientNodeBooleanValue(allowNoIndicesString, defaultSettings.allowNoIndices()),
expandWildcardsOpen,
expandWildcardsClosed,
defaultSettings.allowAliasesToMultipleIndices(),
diff --git a/core/src/main/java/org/elasticsearch/action/support/ThreadedActionListener.java b/core/src/main/java/org/elasticsearch/action/support/ThreadedActionListener.java
index 2a2bdd7a06..6ed52e1ac4 100644
--- a/core/src/main/java/org/elasticsearch/action/support/ThreadedActionListener.java
+++ b/core/src/main/java/org/elasticsearch/action/support/ThreadedActionListener.java
@@ -50,7 +50,7 @@ public final class ThreadedActionListener<Response> implements ActionListener<Re
this.threadPool = threadPool;
// Should the action listener be threaded or not by default. Action listeners are automatically threaded for client
// nodes and transport client in order to make sure client side code is not executed on IO threads.
- this.threadedListener = DiscoveryNode.clientNode(settings) || TransportClient.CLIENT_TYPE.equals(settings.get(Client.CLIENT_TYPE_SETTING));
+ this.threadedListener = DiscoveryNode.clientNode(settings) || TransportClient.CLIENT_TYPE.equals(Client.CLIENT_TYPE_SETTING_S.get(settings));
}
public <Response> ActionListener<Response> wrap(ActionListener<Response> listener) {
diff --git a/core/src/main/java/org/elasticsearch/action/support/TransportAction.java b/core/src/main/java/org/elasticsearch/action/support/TransportAction.java
index 0f4b26eb6e..79dbf85db6 100644
--- a/core/src/main/java/org/elasticsearch/action/support/TransportAction.java
+++ b/core/src/main/java/org/elasticsearch/action/support/TransportAction.java
@@ -67,6 +67,11 @@ public abstract class TransportAction<Request extends ActionRequest<Request>, Re
return future;
}
+ /**
+ * Use this method when the transport action call should result in creation of a new task associated with the call.
+ *
+ * This is a typical behavior.
+ */
public final Task execute(Request request, ActionListener<Response> listener) {
/*
* While this version of execute could delegate to the TaskListener
@@ -118,8 +123,10 @@ public abstract class TransportAction<Request extends ActionRequest<Request>, Re
return task;
}
- private final void execute(Task task, Request request, ActionListener<Response> listener) {
-
+ /**
+ * Use this method when the transport action should continue to run in the context of the current task
+ */
+ public final void execute(Task task, Request request, ActionListener<Response> listener) {
ActionRequestValidationException validationException = request.validate();
if (validationException != null) {
listener.onFailure(validationException);
@@ -134,7 +141,7 @@ public abstract class TransportAction<Request extends ActionRequest<Request>, Re
listener.onFailure(t);
}
} else {
- RequestFilterChain requestFilterChain = new RequestFilterChain<>(this, logger);
+ RequestFilterChain<Request, Response> requestFilterChain = new RequestFilterChain<>(this, logger);
requestFilterChain.proceed(task, actionName, request, listener);
}
}
@@ -145,7 +152,8 @@ public abstract class TransportAction<Request extends ActionRequest<Request>, Re
protected abstract void doExecute(Request request, ActionListener<Response> listener);
- private static class RequestFilterChain<Request extends ActionRequest<Request>, Response extends ActionResponse> implements ActionFilterChain {
+ private static class RequestFilterChain<Request extends ActionRequest<Request>, Response extends ActionResponse>
+ implements ActionFilterChain<Request, Response> {
private final TransportAction<Request, Response> action;
private final AtomicInteger index = new AtomicInteger();
@@ -156,14 +164,15 @@ public abstract class TransportAction<Request extends ActionRequest<Request>, Re
this.logger = logger;
}
- @Override @SuppressWarnings("unchecked")
- public void proceed(Task task, String actionName, ActionRequest request, ActionListener listener) {
+ @Override
+ public void proceed(Task task, String actionName, Request request, ActionListener<Response> listener) {
int i = index.getAndIncrement();
try {
if (i < this.action.filters.length) {
this.action.filters[i].apply(task, actionName, request, listener, this);
} else if (i == this.action.filters.length) {
- this.action.doExecute(task, (Request) request, new FilteredActionListener<Response>(actionName, listener, new ResponseFilterChain(this.action.filters, logger)));
+ this.action.doExecute(task, request, new FilteredActionListener<Response>(actionName, listener,
+ new ResponseFilterChain<>(this.action.filters, logger)));
} else {
listener.onFailure(new IllegalStateException("proceed was called too many times"));
}
@@ -174,12 +183,13 @@ public abstract class TransportAction<Request extends ActionRequest<Request>, Re
}
@Override
- public void proceed(String action, ActionResponse response, ActionListener listener) {
+ public void proceed(String action, Response response, ActionListener<Response> listener) {
assert false : "request filter chain should never be called on the response side";
}
}
- private static class ResponseFilterChain implements ActionFilterChain {
+ private static class ResponseFilterChain<Request extends ActionRequest<Request>, Response extends ActionResponse>
+ implements ActionFilterChain<Request, Response> {
private final ActionFilter[] filters;
private final AtomicInteger index;
@@ -192,12 +202,12 @@ public abstract class TransportAction<Request extends ActionRequest<Request>, Re
}
@Override
- public void proceed(Task task, String action, ActionRequest request, ActionListener listener) {
+ public void proceed(Task task, String action, Request request, ActionListener<Response> listener) {
assert false : "response filter chain should never be called on the request side";
}
- @Override @SuppressWarnings("unchecked")
- public void proceed(String action, ActionResponse response, ActionListener listener) {
+ @Override
+ public void proceed(String action, Response response, ActionListener<Response> listener) {
int i = index.decrementAndGet();
try {
if (i >= 0) {
@@ -217,10 +227,10 @@ public abstract class TransportAction<Request extends ActionRequest<Request>, Re
private static class FilteredActionListener<Response extends ActionResponse> implements ActionListener<Response> {
private final String actionName;
- private final ActionListener listener;
- private final ResponseFilterChain chain;
+ private final ActionListener<Response> listener;
+ private final ResponseFilterChain<?, Response> chain;
- private FilteredActionListener(String actionName, ActionListener listener, ResponseFilterChain chain) {
+ private FilteredActionListener(String actionName, ActionListener<Response> listener, ResponseFilterChain<?, Response> chain) {
this.actionName = actionName;
this.listener = listener;
this.chain = chain;
diff --git a/core/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastRequest.java b/core/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastRequest.java
index 96576d5809..508581050a 100644
--- a/core/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastRequest.java
+++ b/core/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastRequest.java
@@ -37,11 +37,6 @@ public class BroadcastRequest<Request extends BroadcastRequest<Request>> extends
private IndicesOptions indicesOptions = IndicesOptions.strictExpandOpenAndForbidClosed();
public BroadcastRequest() {
-
- }
-
- protected BroadcastRequest(ActionRequest<?> originalRequest) {
- super(originalRequest);
}
protected BroadcastRequest(String[] indices) {
diff --git a/core/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardRequest.java b/core/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardRequest.java
index 8e22a90b9d..76cb04f71f 100644
--- a/core/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardRequest.java
+++ b/core/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardRequest.java
@@ -21,18 +21,18 @@ package org.elasticsearch.action.support.broadcast;
import org.elasticsearch.action.IndicesRequest;
import org.elasticsearch.action.OriginalIndices;
+import org.elasticsearch.action.support.ChildTaskRequest;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.index.shard.ShardId;
-import org.elasticsearch.transport.TransportRequest;
import java.io.IOException;
/**
*
*/
-public abstract class BroadcastShardRequest extends TransportRequest implements IndicesRequest {
+public abstract class BroadcastShardRequest extends ChildTaskRequest implements IndicesRequest {
private ShardId shardId;
@@ -42,7 +42,6 @@ public abstract class BroadcastShardRequest extends TransportRequest implements
}
protected BroadcastShardRequest(ShardId shardId, BroadcastRequest request) {
- super(request);
this.shardId = shardId;
this.originalIndices = new OriginalIndices(request);
}
diff --git a/core/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardResponse.java b/core/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardResponse.java
index bf7d271bb6..ad79285051 100644
--- a/core/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardResponse.java
+++ b/core/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardResponse.java
@@ -42,7 +42,7 @@ public abstract class BroadcastShardResponse extends TransportResponse {
}
public String getIndex() {
- return this.shardId.getIndex();
+ return this.shardId.getIndexName();
}
public int getShardId() {
diff --git a/core/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java b/core/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java
index be851cfa7e..c36f4cd1a0 100644
--- a/core/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java
+++ b/core/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java
@@ -35,6 +35,7 @@ import org.elasticsearch.cluster.routing.ShardIterator;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.tasks.Task;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.BaseTransportResponseHandler;
import org.elasticsearch.transport.TransportChannel;
@@ -69,8 +70,13 @@ public abstract class TransportBroadcastAction<Request extends BroadcastRequest<
}
@Override
- protected void doExecute(Request request, ActionListener<Response> listener) {
- new AsyncBroadcastAction(request, listener).start();
+ protected void doExecute(Task task, Request request, ActionListener<Response> listener) {
+ new AsyncBroadcastAction(task, request, listener).start();
+ }
+
+ @Override
+ protected final void doExecute(Request request, ActionListener<Response> listener) {
+ throw new UnsupportedOperationException("the task parameter is required for this operation");
}
protected abstract Response newResponse(Request request, AtomicReferenceArray shardsResponses, ClusterState clusterState);
@@ -93,6 +99,7 @@ public abstract class TransportBroadcastAction<Request extends BroadcastRequest<
protected class AsyncBroadcastAction {
+ private final Task task;
private final Request request;
private final ActionListener<Response> listener;
private final ClusterState clusterState;
@@ -102,7 +109,8 @@ public abstract class TransportBroadcastAction<Request extends BroadcastRequest<
private final AtomicInteger counterOps = new AtomicInteger();
private final AtomicReferenceArray shardsResponses;
- protected AsyncBroadcastAction(Request request, ActionListener<Response> listener) {
+ protected AsyncBroadcastAction(Task task, Request request, ActionListener<Response> listener) {
+ this.task = task;
this.request = request;
this.listener = listener;
@@ -158,6 +166,7 @@ public abstract class TransportBroadcastAction<Request extends BroadcastRequest<
} else {
try {
final ShardRequest shardRequest = newShardRequest(shardIt.size(), shard, request);
+ shardRequest.setParentTask(clusterService.localNode().getId(), task.getId());
DiscoveryNode node = nodes.get(shard.currentNodeId());
if (node == null) {
// no node connected, act as failure
diff --git a/core/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java b/core/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java
index 613de1aa92..1096f4525c 100644
--- a/core/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java
+++ b/core/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java
@@ -25,6 +25,7 @@ import org.elasticsearch.action.IndicesRequest;
import org.elasticsearch.action.NoShardAvailableActionException;
import org.elasticsearch.action.ShardOperationFailedException;
import org.elasticsearch.action.support.ActionFilters;
+import org.elasticsearch.action.support.ChildTaskRequest;
import org.elasticsearch.action.support.DefaultShardOperationFailedException;
import org.elasticsearch.action.support.HandledTransportAction;
import org.elasticsearch.action.support.IndicesOptions;
@@ -44,6 +45,7 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.tasks.Task;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.BaseTransportResponseHandler;
import org.elasticsearch.transport.NodeShouldNotConnectException;
@@ -118,7 +120,7 @@ public abstract class TransportBroadcastByNodeAction<Request extends BroadcastRe
FailedNodeException exception = (FailedNodeException) responses.get(i);
totalShards += nodes.get(exception.nodeId()).size();
for (ShardRouting shard : nodes.get(exception.nodeId())) {
- exceptions.add(new DefaultShardOperationFailedException(shard.getIndex(), shard.getId(), exception));
+ exceptions.add(new DefaultShardOperationFailedException(shard.getIndexName(), shard.getId(), exception));
}
} else {
NodeResponse response = (NodeResponse) responses.get(i);
@@ -127,7 +129,7 @@ public abstract class TransportBroadcastByNodeAction<Request extends BroadcastRe
successfulShards += response.getSuccessfulShards();
for (BroadcastShardOperationFailedException throwable : response.getExceptions()) {
if (!TransportActions.isShardNotAvailableException(throwable)) {
- exceptions.add(new DefaultShardOperationFailedException(throwable.getIndex(), throwable.getShardId().getId(), throwable));
+ exceptions.add(new DefaultShardOperationFailedException(throwable.getShardId().getIndexName(), throwable.getShardId().getId(), throwable));
}
}
}
@@ -206,11 +208,17 @@ public abstract class TransportBroadcastByNodeAction<Request extends BroadcastRe
protected abstract ClusterBlockException checkRequestBlock(ClusterState state, Request request, String[] concreteIndices);
@Override
- protected void doExecute(Request request, ActionListener<Response> listener) {
- new AsyncAction(request, listener).start();
+ protected final void doExecute(Request request, ActionListener<Response> listener) {
+ throw new UnsupportedOperationException("the task parameter is required for this operation");
+ }
+
+ @Override
+ protected void doExecute(Task task, Request request, ActionListener<Response> listener) {
+ new AsyncAction(task, request, listener).start();
}
protected class AsyncAction {
+ private final Task task;
private final Request request;
private final ActionListener<Response> listener;
private final ClusterState clusterState;
@@ -220,7 +228,8 @@ public abstract class TransportBroadcastByNodeAction<Request extends BroadcastRe
private final AtomicInteger counter = new AtomicInteger();
private List<NoShardAvailableActionException> unavailableShardExceptions = new ArrayList<>();
- protected AsyncAction(Request request, ActionListener<Response> listener) {
+ protected AsyncAction(Task task, Request request, ActionListener<Response> listener) {
+ this.task = task;
this.request = request;
this.listener = listener;
@@ -290,6 +299,9 @@ public abstract class TransportBroadcastByNodeAction<Request extends BroadcastRe
private void sendNodeRequest(final DiscoveryNode node, List<ShardRouting> shards, final int nodeIndex) {
try {
NodeRequest nodeRequest = new NodeRequest(node.getId(), request, shards);
+ if (task != null) {
+ nodeRequest.setParentTask(clusterService.localNode().id(), task.getId());
+ }
transportService.sendRequest(node, transportNodeBroadcastAction, nodeRequest, new BaseTransportResponseHandler<NodeResponse>() {
@Override
public NodeResponse newInstance() {
@@ -406,7 +418,7 @@ public abstract class TransportBroadcastByNodeAction<Request extends BroadcastRe
}
} catch (Throwable t) {
BroadcastShardOperationFailedException e = new BroadcastShardOperationFailedException(shardRouting.shardId(), "operation " + actionName + " failed", t);
- e.setIndex(shardRouting.getIndex());
+ e.setIndex(shardRouting.getIndexName());
e.setShard(shardRouting.shardId());
shardResults[shardIndex] = e;
if (TransportActions.isShardNotAvailableException(t)) {
@@ -422,7 +434,7 @@ public abstract class TransportBroadcastByNodeAction<Request extends BroadcastRe
}
}
- public class NodeRequest extends TransportRequest implements IndicesRequest {
+ public class NodeRequest extends ChildTaskRequest implements IndicesRequest {
private String nodeId;
private List<ShardRouting> shards;
@@ -433,7 +445,6 @@ public abstract class TransportBroadcastByNodeAction<Request extends BroadcastRe
}
public NodeRequest(String nodeId, Request request, List<ShardRouting> shards) {
- super(request);
this.indicesLevelRequest = request;
this.shards = shards;
this.nodeId = nodeId;
diff --git a/core/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequest.java b/core/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequest.java
index b142d0dae8..5d45b7b11e 100644
--- a/core/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequest.java
+++ b/core/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequest.java
@@ -42,10 +42,6 @@ public abstract class AcknowledgedRequest<Request extends MasterNodeRequest<Requ
protected AcknowledgedRequest() {
}
- protected AcknowledgedRequest(ActionRequest<?> request) {
- super(request);
- }
-
/**
* Allows to set the timeout
* @param timeout timeout as a string (e.g. 1s)
diff --git a/core/src/main/java/org/elasticsearch/action/support/master/MasterNodeRequest.java b/core/src/main/java/org/elasticsearch/action/support/master/MasterNodeRequest.java
index d954caba8e..93d34e09ac 100644
--- a/core/src/main/java/org/elasticsearch/action/support/master/MasterNodeRequest.java
+++ b/core/src/main/java/org/elasticsearch/action/support/master/MasterNodeRequest.java
@@ -20,6 +20,7 @@
package org.elasticsearch.action.support.master;
import org.elasticsearch.action.ActionRequest;
+import org.elasticsearch.action.support.ChildTaskActionRequest;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.unit.TimeValue;
@@ -29,18 +30,13 @@ import java.io.IOException;
/**
* A based request for master based operation.
*/
-public abstract class MasterNodeRequest<Request extends MasterNodeRequest<Request>> extends ActionRequest<Request> {
+public abstract class MasterNodeRequest<Request extends MasterNodeRequest<Request>> extends ChildTaskActionRequest<Request> {
public static final TimeValue DEFAULT_MASTER_NODE_TIMEOUT = TimeValue.timeValueSeconds(30);
protected TimeValue masterNodeTimeout = DEFAULT_MASTER_NODE_TIMEOUT;
protected MasterNodeRequest() {
-
- }
-
- protected MasterNodeRequest(ActionRequest<?> request) {
- super(request);
}
/**
diff --git a/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java b/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java
index e0c9c9b8b6..3b8c751f93 100644
--- a/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java
+++ b/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java
@@ -113,6 +113,9 @@ public abstract class TransportMasterNodeAction<Request extends MasterNodeReques
AsyncSingleAction(Task task, Request request, ActionListener<Response> listener) {
this.task = task;
this.request = request;
+ if (task != null) {
+ request.setParentTask(clusterService.localNode().getId(), task.getId());
+ }
// TODO do we really need to wrap it in a listener? the handlers should be cheap
if ((listener instanceof ThreadedActionListener) == false) {
listener = new ThreadedActionListener<>(logger, threadPool, ThreadPool.Names.LISTENER, listener);
@@ -121,7 +124,7 @@ public abstract class TransportMasterNodeAction<Request extends MasterNodeReques
}
public void start() {
- this.observer = new ClusterStateObserver(clusterService, request.masterNodeTimeout(), logger);
+ this.observer = new ClusterStateObserver(clusterService, request.masterNodeTimeout(), logger, threadPool.getThreadContext());
doStart();
}
diff --git a/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeReadAction.java b/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeReadAction.java
index 681b9f0a64..ed08e5bdba 100644
--- a/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeReadAction.java
+++ b/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeReadAction.java
@@ -23,6 +23,7 @@ import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
+import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
@@ -36,22 +37,19 @@ import java.util.function.Supplier;
public abstract class TransportMasterNodeReadAction<Request extends MasterNodeReadRequest<Request>, Response extends ActionResponse>
extends TransportMasterNodeAction<Request, Response> {
- public static final String FORCE_LOCAL_SETTING = "action.master.force_local";
+ public static final Setting<Boolean> FORCE_LOCAL_SETTING = Setting.boolSetting("action.master.force_local", false, false, Setting.Scope.CLUSTER);
- private Boolean forceLocal;
+ private final boolean forceLocal;
protected TransportMasterNodeReadAction(Settings settings, String actionName, TransportService transportService,
ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters,
IndexNameExpressionResolver indexNameExpressionResolver, Supplier<Request> request) {
super(settings, actionName, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver,request);
- this.forceLocal = settings.getAsBoolean(FORCE_LOCAL_SETTING, null);
+ this.forceLocal = FORCE_LOCAL_SETTING.get(settings);
}
@Override
protected final boolean localExecute(Request request) {
- if (forceLocal != null) {
- return forceLocal;
- }
- return request.local();
+ return forceLocal || request.local();
}
}
diff --git a/core/src/main/java/org/elasticsearch/action/support/nodes/BaseNodeRequest.java b/core/src/main/java/org/elasticsearch/action/support/nodes/BaseNodeRequest.java
index 9371605e8a..9631fe659d 100644
--- a/core/src/main/java/org/elasticsearch/action/support/nodes/BaseNodeRequest.java
+++ b/core/src/main/java/org/elasticsearch/action/support/nodes/BaseNodeRequest.java
@@ -36,8 +36,7 @@ public abstract class BaseNodeRequest extends ChildTaskRequest {
}
- protected BaseNodeRequest(BaseNodesRequest request, String nodeId) {
- super(request);
+ protected BaseNodeRequest(String nodeId) {
this.nodeId = nodeId;
}
diff --git a/core/src/main/java/org/elasticsearch/action/support/nodes/BaseNodesRequest.java b/core/src/main/java/org/elasticsearch/action/support/nodes/BaseNodesRequest.java
index 41a890ee37..5176ae5251 100644
--- a/core/src/main/java/org/elasticsearch/action/support/nodes/BaseNodesRequest.java
+++ b/core/src/main/java/org/elasticsearch/action/support/nodes/BaseNodesRequest.java
@@ -43,11 +43,6 @@ public abstract class BaseNodesRequest<Request extends BaseNodesRequest<Request>
}
- protected BaseNodesRequest(ActionRequest<?> request, String... nodesIds) {
- super(request);
- this.nodesIds = nodesIds;
- }
-
protected BaseNodesRequest(String... nodesIds) {
this.nodesIds = nodesIds;
}
diff --git a/core/src/main/java/org/elasticsearch/action/support/replication/BasicReplicationRequest.java b/core/src/main/java/org/elasticsearch/action/support/replication/BasicReplicationRequest.java
index 3778275d40..274d13bf4c 100644
--- a/core/src/main/java/org/elasticsearch/action/support/replication/BasicReplicationRequest.java
+++ b/core/src/main/java/org/elasticsearch/action/support/replication/BasicReplicationRequest.java
@@ -30,22 +30,13 @@ import org.elasticsearch.index.shard.ShardId;
*/
public class BasicReplicationRequest extends ReplicationRequest<BasicReplicationRequest> {
public BasicReplicationRequest() {
-
- }
-
- /**
- * Creates a new request that inherits headers and context from the request
- * provided as argument.
- */
- public BasicReplicationRequest(ActionRequest<?> request) {
- super(request);
}
/**
* Creates a new request with resolved shard id
*/
- public BasicReplicationRequest(ActionRequest<?> request, ShardId shardId) {
- super(request, shardId);
+ public BasicReplicationRequest(ShardId shardId) {
+ super(shardId);
}
/**
diff --git a/core/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequest.java b/core/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequest.java
index a6c9b8f65a..ed23017410 100644
--- a/core/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequest.java
+++ b/core/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequest.java
@@ -23,6 +23,7 @@ import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.IndicesRequest;
import org.elasticsearch.action.WriteConsistencyLevel;
+import org.elasticsearch.action.support.ChildTaskActionRequest;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.StreamInput;
@@ -38,7 +39,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError;
/**
*
*/
-public abstract class ReplicationRequest<Request extends ReplicationRequest<Request>> extends ActionRequest<Request> implements IndicesRequest {
+public abstract class ReplicationRequest<Request extends ReplicationRequest<Request>> extends ChildTaskActionRequest<Request> implements IndicesRequest {
public static final TimeValue DEFAULT_TIMEOUT = new TimeValue(1, TimeUnit.MINUTES);
@@ -58,35 +59,20 @@ public abstract class ReplicationRequest<Request extends ReplicationRequest<Requ
}
- /**
- * Creates a new request that inherits headers and context from the request provided as argument.
- */
- public ReplicationRequest(ActionRequest<?> request) {
- super(request);
- }
/**
* Creates a new request with resolved shard id
*/
- public ReplicationRequest(ActionRequest<?> request, ShardId shardId) {
- super(request);
- this.index = shardId.getIndex();
+ public ReplicationRequest(ShardId shardId) {
+ this.index = shardId.getIndexName();
this.shardId = shardId;
}
/**
* Copy constructor that creates a new request that is a copy of the one provided as an argument.
- */
- protected ReplicationRequest(Request request) {
- this(request, request);
- }
-
- /**
- * Copy constructor that creates a new request that is a copy of the one provided as an argument.
* The new request will inherit though headers and context from the original request that caused it.
*/
- protected ReplicationRequest(Request request, ActionRequest<?> originalRequest) {
- super(originalRequest);
+ protected ReplicationRequest(Request request) {
this.timeout = request.timeout();
this.index = request.index();
this.consistencyLevel = request.consistencyLevel();
diff --git a/core/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java b/core/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java
index ab88d73d3b..fd649f046e 100644
--- a/core/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java
+++ b/core/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java
@@ -40,6 +40,7 @@ import org.elasticsearch.cluster.routing.IndexShardRoutingTable;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.CountDown;
import org.elasticsearch.index.shard.ShardId;
+import org.elasticsearch.tasks.Task;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
@@ -67,8 +68,14 @@ public abstract class TransportBroadcastReplicationAction<Request extends Broadc
this.clusterService = clusterService;
}
+
+ @Override
+ protected final void doExecute(final Request request, final ActionListener<Response> listener) {
+ throw new UnsupportedOperationException("the task parameter is required for this operation");
+ }
+
@Override
- protected void doExecute(final Request request, final ActionListener<Response> listener) {
+ protected void doExecute(Task task, Request request, ActionListener<Response> listener) {
final ClusterState clusterState = clusterService.state();
List<ShardId> shards = shards(request, clusterState);
final CopyOnWriteArrayList<ShardResponse> shardsResponses = new CopyOnWriteArrayList();
@@ -90,13 +97,13 @@ public abstract class TransportBroadcastReplicationAction<Request extends Broadc
@Override
public void onFailure(Throwable e) {
logger.trace("{}: got failure from {}", actionName, shardId);
- int totalNumCopies = clusterState.getMetaData().index(shardId.index().getName()).getNumberOfReplicas() + 1;
+ int totalNumCopies = clusterState.getMetaData().index(shardId.getIndexName()).getNumberOfReplicas() + 1;
ShardResponse shardResponse = newShardResponse();
ReplicationResponse.ShardInfo.Failure[] failures;
if (TransportActions.isShardNotAvailableException(e)) {
failures = new ReplicationResponse.ShardInfo.Failure[0];
} else {
- ReplicationResponse.ShardInfo.Failure failure = new ReplicationResponse.ShardInfo.Failure(shardId.index().name(), shardId.id(), null, e, ExceptionsHelper.status(e), true);
+ ReplicationResponse.ShardInfo.Failure failure = new ReplicationResponse.ShardInfo.Failure(shardId, null, e, ExceptionsHelper.status(e), true);
failures = new ReplicationResponse.ShardInfo.Failure[totalNumCopies];
Arrays.fill(failures, failure);
}
@@ -107,12 +114,14 @@ public abstract class TransportBroadcastReplicationAction<Request extends Broadc
}
}
};
- shardExecute(request, shardId, shardActionListener);
+ shardExecute(task, request, shardId, shardActionListener);
}
}
- protected void shardExecute(Request request, ShardId shardId, ActionListener<ShardResponse> shardActionListener) {
- replicatedBroadcastShardAction.execute(newShardRequest(request, shardId), shardActionListener);
+ protected void shardExecute(Task task, Request request, ShardId shardId, ActionListener<ShardResponse> shardActionListener) {
+ ShardRequest shardRequest = newShardRequest(request, shardId);
+ shardRequest.setParentTask(clusterService.localNode().getId(), task.getId());
+ replicatedBroadcastShardAction.execute(shardRequest, shardActionListener);
}
/**
@@ -154,7 +163,7 @@ public abstract class TransportBroadcastReplicationAction<Request extends Broadc
shardFailures = new ArrayList<>();
}
for (ReplicationResponse.ShardInfo.Failure failure : shardResponse.getShardInfo().getFailures()) {
- shardFailures.add(new DefaultShardOperationFailedException(new BroadcastShardOperationFailedException(new ShardId(failure.index(), failure.shardId()), failure.getCause())));
+ shardFailures.add(new DefaultShardOperationFailedException(new BroadcastShardOperationFailedException(failure.fullShardId(), failure.getCause())));
}
}
}
diff --git a/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java b/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java
index b297220180..c40d3fb579 100644
--- a/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java
+++ b/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java
@@ -52,6 +52,7 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
+import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.engine.VersionConflictEngineException;
import org.elasticsearch.index.shard.IndexShard;
@@ -60,6 +61,7 @@ import org.elasticsearch.index.translog.Translog;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.node.NodeClosedException;
import org.elasticsearch.rest.RestStatus;
+import org.elasticsearch.tasks.Task;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.BaseTransportResponseHandler;
import org.elasticsearch.transport.ConnectTransportException;
@@ -133,8 +135,13 @@ public abstract class TransportReplicationAction<Request extends ReplicationRequ
}
@Override
- protected void doExecute(Request request, ActionListener<Response> listener) {
- new ReroutePhase(request, listener).run();
+ protected final void doExecute(Request request, ActionListener<Response> listener) {
+ throw new UnsupportedOperationException("the task parameter is required for this operation");
+ }
+
+ @Override
+ protected void doExecute(Task task, Request request, ActionListener<Response> listener) {
+ new ReroutePhase(task, request, listener).run();
}
protected abstract Response newResponseInstance();
@@ -243,8 +250,8 @@ public abstract class TransportReplicationAction<Request extends ReplicationRequ
class OperationTransportHandler implements TransportRequestHandler<Request> {
@Override
- public void messageReceived(final Request request, final TransportChannel channel) throws Exception {
- execute(request, new ActionListener<Response>() {
+ public void messageReceived(final Request request, final TransportChannel channel, Task task) throws Exception {
+ execute(task, request, new ActionListener<Response>() {
@Override
public void onResponse(Response result) {
try {
@@ -264,6 +271,11 @@ public abstract class TransportReplicationAction<Request extends ReplicationRequ
}
});
}
+
+ @Override
+ public void messageReceived(Request request, TransportChannel channel) throws Exception {
+ throw new UnsupportedOperationException("the task parameter is required for this operation");
+ }
}
class PrimaryOperationTransportHandler implements TransportRequestHandler<Request> {
@@ -297,7 +309,7 @@ public abstract class TransportReplicationAction<Request extends ReplicationRequ
private final TransportChannel channel;
// important: we pass null as a timeout as failing a replica is
// something we want to avoid at all costs
- private final ClusterStateObserver observer = new ClusterStateObserver(clusterService, null, logger);
+ private final ClusterStateObserver observer = new ClusterStateObserver(clusterService, null, logger, threadPool.getThreadContext());
AsyncReplicaAction(ReplicaRequest request, TransportChannel channel) {
this.request = request;
@@ -308,9 +320,11 @@ public abstract class TransportReplicationAction<Request extends ReplicationRequ
public void onFailure(Throwable t) {
if (t instanceof RetryOnReplicaException) {
logger.trace("Retrying operation on replica, action [{}], request [{}]", t, transportReplicaAction, request);
+ final ThreadContext.StoredContext context = threadPool.getThreadContext().newStoredContext();
observer.waitForNextChange(new ClusterStateObserver.Listener() {
@Override
public void onNewClusterState(ClusterState state) {
+ context.close();
// Forking a thread on local node via transport service so that custom transport service have an
// opportunity to execute custom logic before the replica operation begins
String extraMessage = "action [" + transportReplicaAction + "], request[" + request + "]";
@@ -339,7 +353,7 @@ public abstract class TransportReplicationAction<Request extends ReplicationRequ
}
}
private void failReplicaIfNeeded(Throwable t) {
- String index = request.shardId().getIndex();
+ String index = request.shardId().getIndex().getName();
int shardId = request.shardId().id();
logger.trace("failure on replica [{}][{}], action [{}], request [{}]", t, index, shardId, actionName, request);
if (ignoreReplicaException(t) == false) {
@@ -403,10 +417,13 @@ public abstract class TransportReplicationAction<Request extends ReplicationRequ
private final ClusterStateObserver observer;
private final AtomicBoolean finished = new AtomicBoolean();
- ReroutePhase(Request request, ActionListener<Response> listener) {
+ ReroutePhase(Task task, Request request, ActionListener<Response> listener) {
this.request = request;
+ if (task != null) {
+ this.request.setParentTask(clusterService.localNode().getId(), task.getId());
+ }
this.listener = listener;
- this.observer = new ClusterStateObserver(clusterService, request.timeout(), logger);
+ this.observer = new ClusterStateObserver(clusterService, request.timeout(), logger, threadPool.getThreadContext());
}
@Override
@@ -432,7 +449,7 @@ public abstract class TransportReplicationAction<Request extends ReplicationRequ
resolveRequest(state.metaData(), concreteIndex, request);
assert request.shardId() != null : "request shardId must be set in resolveRequest";
- IndexShardRoutingTable indexShard = state.getRoutingTable().shardRoutingTable(request.shardId().getIndex(), request.shardId().id());
+ IndexShardRoutingTable indexShard = state.getRoutingTable().shardRoutingTable(request.shardId());
final ShardRouting primary = indexShard.primaryShard();
if (primary == null || primary.active() == false) {
logger.trace("primary shard [{}] is not yet active, scheduling a retry: action [{}], request [{}], cluster state version [{}]", request.shardId(), actionName, request, state.version());
@@ -510,9 +527,11 @@ public abstract class TransportReplicationAction<Request extends ReplicationRequ
finishAsFailed(failure);
return;
}
+ final ThreadContext.StoredContext context = threadPool.getThreadContext().newStoredContext();
observer.waitForNextChange(new ClusterStateObserver.Listener() {
@Override
public void onNewClusterState(ClusterState state) {
+ context.close();
run();
}
@@ -523,6 +542,7 @@ public abstract class TransportReplicationAction<Request extends ReplicationRequ
@Override
public void onTimeout(TimeValue timeout) {
+ context.close();
// Try one more time...
run();
}
@@ -637,7 +657,7 @@ public abstract class TransportReplicationAction<Request extends ReplicationRequ
}
final int sizeActive;
final int requiredNumber;
- IndexRoutingTable indexRoutingTable = state.getRoutingTable().index(shardId.getIndex());
+ IndexRoutingTable indexRoutingTable = state.getRoutingTable().index(shardId.getIndexName());
if (indexRoutingTable != null) {
IndexShardRoutingTable shardRoutingTable = indexRoutingTable.shard(shardId.getId());
if (shardRoutingTable != null) {
@@ -702,7 +722,7 @@ public abstract class TransportReplicationAction<Request extends ReplicationRequ
}
protected Releasable getIndexShardOperationsCounter(ShardId shardId) {
- IndexService indexService = indicesService.indexServiceSafe(shardId.index().getName());
+ IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex());
IndexShard indexShard = indexService.getShard(shardId.id());
return new IndexShardReference(indexShard);
}
@@ -876,7 +896,7 @@ public abstract class TransportReplicationAction<Request extends ReplicationRequ
onReplicaFailure(nodeId, exp);
} else {
String message = String.format(Locale.ROOT, "failed to perform %s on replica on node %s", transportReplicaAction, node);
- logger.warn("{} {}", exp, shardId, message);
+ logger.warn("[{}] {}", exp, shardId, message);
shardStateAction.shardFailed(
shard,
indexUUID,
@@ -941,9 +961,7 @@ public abstract class TransportReplicationAction<Request extends ReplicationRequ
failuresArray = new ReplicationResponse.ShardInfo.Failure[shardReplicaFailures.size()];
for (Map.Entry<String, Throwable> entry : shardReplicaFailures.entrySet()) {
RestStatus restStatus = ExceptionsHelper.status(entry.getValue());
- failuresArray[slot++] = new ReplicationResponse.ShardInfo.Failure(
- shardId.getIndex(), shardId.getId(), entry.getKey(), entry.getValue(), restStatus, false
- );
+ failuresArray[slot++] = new ReplicationResponse.ShardInfo.Failure(shardId, entry.getKey(), entry.getValue(), restStatus, false);
}
} else {
failuresArray = ReplicationResponse.EMPTY;
diff --git a/core/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java b/core/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java
index 74d9f3c701..97cc6b044a 100644
--- a/core/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java
+++ b/core/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java
@@ -124,7 +124,7 @@ public abstract class TransportInstanceSingleOperationAction<Request extends Ins
}
public void start() {
- this.observer = new ClusterStateObserver(clusterService, request.timeout(), logger);
+ this.observer = new ClusterStateObserver(clusterService, request.timeout(), logger, threadPool.getThreadContext());
doStart();
}
@@ -143,7 +143,7 @@ public abstract class TransportInstanceSingleOperationAction<Request extends Ins
request.concreteIndex(indexNameExpressionResolver.concreteSingleIndex(observer.observedState(), request));
// check if we need to execute, and if not, return
if (!resolveRequest(observer.observedState(), request, listener)) {
- listener.onFailure(new IllegalStateException(LoggerMessageFormat.format("{} request {} could not be resolved", new ShardId(request.index, request.shardId), actionName)));
+ listener.onFailure(new IllegalStateException(LoggerMessageFormat.format("[{}][{}] request {} could not be resolved",request.index, request.shardId, actionName)));
return;
}
blockException = checkRequestBlock(observer.observedState(), request);
@@ -217,7 +217,7 @@ public abstract class TransportInstanceSingleOperationAction<Request extends Ins
Throwable listenFailure = failure;
if (listenFailure == null) {
if (shardIt == null) {
- listenFailure = new UnavailableShardsException(new ShardId(request.concreteIndex(), -1), "Timeout waiting for [{}], request: {}", request.timeout(), actionName);
+ listenFailure = new UnavailableShardsException(request.concreteIndex(), -1, "Timeout waiting for [{}], request: {}", request.timeout(), actionName);
} else {
listenFailure = new UnavailableShardsException(shardIt.shardId(), "[{}] shardIt, [{}] active : Timeout waiting for [{}], request: {}", shardIt.size(), shardIt.sizeActive(), request.timeout(), actionName);
}
diff --git a/core/src/main/java/org/elasticsearch/action/support/single/shard/SingleShardRequest.java b/core/src/main/java/org/elasticsearch/action/support/single/shard/SingleShardRequest.java
index c0bb73ea85..499932fce6 100644
--- a/core/src/main/java/org/elasticsearch/action/support/single/shard/SingleShardRequest.java
+++ b/core/src/main/java/org/elasticsearch/action/support/single/shard/SingleShardRequest.java
@@ -56,15 +56,6 @@ public abstract class SingleShardRequest<Request extends SingleShardRequest<Requ
this.index = index;
}
- protected SingleShardRequest(ActionRequest<?> request) {
- super(request);
- }
-
- protected SingleShardRequest(ActionRequest<?> request, String index) {
- super(request);
- this.index = index;
- }
-
/**
* @return a validation exception if the index property hasn't been set
*/
diff --git a/core/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksRequest.java b/core/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksRequest.java
index b7498bc8bd..5bb1880110 100644
--- a/core/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksRequest.java
+++ b/core/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksRequest.java
@@ -26,7 +26,6 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.unit.TimeValue;
-import org.elasticsearch.tasks.ChildTask;
import org.elasticsearch.tasks.Task;
import java.io.IOException;
@@ -65,15 +64,6 @@ public class BaseTasksRequest<Request extends BaseTasksRequest<Request>> extends
* Get information about tasks from nodes based on the nodes ids specified.
* If none are passed, information for all nodes will be returned.
*/
- public BaseTasksRequest(ActionRequest<?> request, String... nodesIds) {
- super(request);
- this.nodesIds = nodesIds;
- }
-
- /**
- * Get information about tasks from nodes based on the nodes ids specified.
- * If none are passed, information for all nodes will be returned.
- */
public BaseTasksRequest(String... nodesIds) {
this.nodesIds = nodesIds;
}
@@ -173,20 +163,13 @@ public class BaseTasksRequest<Request extends BaseTasksRequest<Request>> extends
if (actions() != null && actions().length > 0 && Regex.simpleMatch(actions(), task.getAction()) == false) {
return false;
}
- if (parentNode() != null || parentTaskId() != BaseTasksRequest.ALL_TASKS) {
- if (task instanceof ChildTask) {
- if (parentNode() != null) {
- if (parentNode().equals(((ChildTask) task).getParentNode()) == false) {
- return false;
- }
- }
- if (parentTaskId() != BaseTasksRequest.ALL_TASKS) {
- if (parentTaskId() != ((ChildTask) task).getParentId()) {
- return false;
- }
- }
- } else {
- // This is not a child task and we need to match parent node or id
+ if (parentNode() != null) {
+ if (parentNode().equals(task.getParentNode()) == false) {
+ return false;
+ }
+ }
+ if (parentTaskId() != BaseTasksRequest.ALL_TASKS) {
+ if (parentTaskId() != task.getParentId()) {
return false;
}
}
diff --git a/core/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java b/core/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java
index 42be7e4eef..d2ce298565 100644
--- a/core/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java
+++ b/core/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java
@@ -291,7 +291,7 @@ public abstract class TransportTasksAction<
}
protected NodeTaskRequest(TasksRequest tasksRequest) {
- super(tasksRequest);
+ super();
this.tasksRequest = tasksRequest;
}
diff --git a/core/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsShardRequest.java b/core/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsShardRequest.java
index 5f541b02c7..6356c55499 100644
--- a/core/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsShardRequest.java
+++ b/core/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsShardRequest.java
@@ -41,8 +41,8 @@ public class MultiTermVectorsShardRequest extends SingleShardRequest<MultiTermVe
}
- MultiTermVectorsShardRequest(MultiTermVectorsRequest request, String index, int shardId) {
- super(request, index);
+ MultiTermVectorsShardRequest(String index, int shardId) {
+ super(index);
this.shardId = shardId;
locations = new IntArrayList();
requests = new ArrayList<>();
diff --git a/core/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java b/core/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java
index 3943d2e6a6..7047ee6904 100644
--- a/core/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java
+++ b/core/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java
@@ -82,7 +82,7 @@ public class TransportMultiTermVectorsAction extends HandledTransportAction<Mult
termVectorsRequest.id(), termVectorsRequest.routing());
MultiTermVectorsShardRequest shardRequest = shardRequests.get(shardId);
if (shardRequest == null) {
- shardRequest = new MultiTermVectorsShardRequest(request, shardId.index().name(), shardId.id());
+ shardRequest = new MultiTermVectorsShardRequest(shardId.getIndexName(), shardId.id());
shardRequest.preference(request.preference);
shardRequests.put(shardId, shardRequest);
}
diff --git a/core/src/main/java/org/elasticsearch/action/termvectors/dfs/TransportDfsOnlyAction.java b/core/src/main/java/org/elasticsearch/action/termvectors/dfs/TransportDfsOnlyAction.java
index 2cb86f653f..647e3cc754 100644
--- a/core/src/main/java/org/elasticsearch/action/termvectors/dfs/TransportDfsOnlyAction.java
+++ b/core/src/main/java/org/elasticsearch/action/termvectors/dfs/TransportDfsOnlyAction.java
@@ -39,6 +39,7 @@ import org.elasticsearch.search.SearchService;
import org.elasticsearch.search.controller.SearchPhaseController;
import org.elasticsearch.search.dfs.AggregatedDfs;
import org.elasticsearch.search.dfs.DfsSearchResult;
+import org.elasticsearch.tasks.Task;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
@@ -69,14 +70,14 @@ public class TransportDfsOnlyAction extends TransportBroadcastAction<DfsOnlyRequ
}
@Override
- protected void doExecute(DfsOnlyRequest request, ActionListener<DfsOnlyResponse> listener) {
+ protected void doExecute(Task task, DfsOnlyRequest request, ActionListener<DfsOnlyResponse> listener) {
request.nowInMillis = System.currentTimeMillis();
- super.doExecute(request, listener);
+ super.doExecute(task, request, listener);
}
@Override
protected ShardDfsOnlyRequest newShardRequest(int numShards, ShardRouting shard, DfsOnlyRequest request) {
- String[] filteringAliases = indexNameExpressionResolver.filteringAliases(clusterService.state(), shard.index(), request.indices());
+ String[] filteringAliases = indexNameExpressionResolver.filteringAliases(clusterService.state(), shard.index().getName(), request.indices());
return new ShardDfsOnlyRequest(shard, numShards, filteringAliases, request.nowInMillis, request);
}
diff --git a/core/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java b/core/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java
index 9ba1f2d1ea..b60403b00b 100644
--- a/core/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java
+++ b/core/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java
@@ -113,7 +113,7 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio
protected void doExecute(final UpdateRequest request, final ActionListener<UpdateResponse> listener) {
// if we don't have a master, we don't have metadata, that's fine, let it find a master using create index API
if (autoCreateIndex.shouldAutoCreate(request.index(), clusterService.state())) {
- createIndexAction.execute(new CreateIndexRequest(request).index(request.index()).cause("auto(update api)").masterNodeTimeout(request.timeout()), new ActionListener<CreateIndexResponse>() {
+ createIndexAction.execute(new CreateIndexRequest().index(request.index()).cause("auto(update api)").masterNodeTimeout(request.timeout()), new ActionListener<CreateIndexResponse>() {
@Override
public void onResponse(CreateIndexResponse result) {
innerExecute(request, listener);
@@ -164,12 +164,12 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio
}
protected void shardOperation(final UpdateRequest request, final ActionListener<UpdateResponse> listener, final int retryCount) {
- IndexService indexService = indicesService.indexServiceSafe(request.concreteIndex());
- IndexShard indexShard = indexService.getShard(request.shardId());
+ final IndexService indexService = indicesService.indexServiceSafe(request.concreteIndex());
+ final IndexShard indexShard = indexService.getShard(request.shardId());
final UpdateHelper.Result result = updateHelper.prepare(request, indexShard);
switch (result.operation()) {
case UPSERT:
- IndexRequest upsertRequest = new IndexRequest(result.action(), request);
+ IndexRequest upsertRequest = new IndexRequest((IndexRequest)result.action());
// we fetch it from the index request so we don't generate the bytes twice, its already done in the index request
final BytesReference upsertSourceBytes = upsertRequest.source();
indexAction.execute(upsertRequest, new ActionListener<IndexResponse>() {
@@ -206,7 +206,7 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio
});
break;
case INDEX:
- IndexRequest indexRequest = new IndexRequest(result.action(), request);
+ IndexRequest indexRequest = new IndexRequest((IndexRequest)result.action());
// we fetch it from the index request so we don't generate the bytes twice, its already done in the index request
final BytesReference indexSourceBytes = indexRequest.source();
indexAction.execute(indexRequest, new ActionListener<IndexResponse>() {
diff --git a/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java b/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java
index d28ba2986e..34bc2b2518 100644
--- a/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java
+++ b/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java
@@ -44,6 +44,7 @@ import org.elasticsearch.index.mapper.internal.TimestampFieldMapper;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.script.ExecutableScript;
+import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.fetch.source.FetchSourceContext;
@@ -75,16 +76,15 @@ public class UpdateHelper extends AbstractComponent {
final GetResult getResult = indexShard.getService().get(request.type(), request.id(),
new String[]{RoutingFieldMapper.NAME, ParentFieldMapper.NAME, TTLFieldMapper.NAME, TimestampFieldMapper.NAME},
true, request.version(), request.versionType(), FetchSourceContext.FETCH_SOURCE, false);
- return prepare(request, getResult);
+ return prepare(indexShard.shardId(), request, getResult);
}
/**
* Prepares an update request by converting it into an index or delete request or an update response (no action).
*/
@SuppressWarnings("unchecked")
- protected Result prepare(UpdateRequest request, final GetResult getResult) {
+ protected Result prepare(ShardId shardId, UpdateRequest request, final GetResult getResult) {
long getDateNS = System.nanoTime();
- final ShardId shardId = new ShardId(getResult.getIndex(), request.shardId());
if (!getResult.isExists()) {
if (request.upsertRequest() == null && !request.docAsUpsert()) {
throw new DocumentMissingException(shardId, request.type(), request.id());
@@ -99,7 +99,7 @@ public class UpdateHelper extends AbstractComponent {
// Tell the script that this is a create and not an update
ctx.put("op", "create");
ctx.put("_source", upsertDoc);
- ctx = executeScript(request, ctx);
+ ctx = executeScript(request.script, ctx);
//Allow the script to set TTL using ctx._ttl
if (ttl == null) {
ttl = getTTLFromScriptContext(ctx);
@@ -193,7 +193,7 @@ public class UpdateHelper extends AbstractComponent {
ctx.put("_ttl", originalTtl);
ctx.put("_source", sourceAndContent.v2());
- ctx = executeScript(request, ctx);
+ ctx = executeScript(request.script, ctx);
operation = (String) ctx.get("op");
@@ -243,14 +243,14 @@ public class UpdateHelper extends AbstractComponent {
}
}
- private Map<String, Object> executeScript(UpdateRequest request, Map<String, Object> ctx) {
+ private Map<String, Object> executeScript(Script script, Map<String, Object> ctx) {
try {
if (scriptService != null) {
- ExecutableScript script = scriptService.executable(request.script, ScriptContext.Standard.UPDATE, request, Collections.emptyMap());
- script.setNextVar("ctx", ctx);
- script.run();
+ ExecutableScript executableScript = scriptService.executable(script, ScriptContext.Standard.UPDATE, Collections.emptyMap());
+ executableScript.setNextVar("ctx", ctx);
+ executableScript.run();
// we need to unwrap the ctx...
- ctx = (Map<String, Object>) script.unwrap(ctx);
+ ctx = (Map<String, Object>) executableScript.unwrap(ctx);
}
} catch (Exception e) {
throw new IllegalArgumentException("failed to execute script", e);
diff --git a/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java b/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java
index 053f4aeaf3..5aa760dc9f 100644
--- a/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java
+++ b/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java
@@ -20,7 +20,9 @@
package org.elasticsearch.bootstrap;
import org.apache.lucene.util.Constants;
+import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.StringHelper;
+import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.Version;
import org.elasticsearch.common.PidFile;
import org.elasticsearch.common.SuppressForbidden;
@@ -40,6 +42,7 @@ import org.elasticsearch.node.Node;
import org.elasticsearch.node.internal.InternalSettingsPreparer;
import java.io.ByteArrayOutputStream;
+import java.io.IOException;
import java.io.PrintStream;
import java.nio.file.Path;
import java.util.Locale;
@@ -114,7 +117,11 @@ final class Bootstrap {
public boolean handle(int code) {
if (CTRL_CLOSE_EVENT == code) {
logger.info("running graceful exit on windows");
- Bootstrap.stop();
+ try {
+ Bootstrap.stop();
+ } catch (IOException e) {
+ throw new ElasticsearchException("failed to stop node", e);
+ }
return true;
}
return false;
@@ -153,8 +160,10 @@ final class Bootstrap {
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
- if (node != null) {
- node.close();
+ try {
+ IOUtils.close(node);
+ } catch (IOException ex) {
+ throw new ElasticsearchException("failed to stop node", ex);
}
}
});
@@ -171,7 +180,7 @@ final class Bootstrap {
// placeholder
Settings nodeSettings = Settings.settingsBuilder()
.put(settings)
- .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true)
+ .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING.getKey(), true)
.build();
node = new Node(nodeSettings);
@@ -221,9 +230,9 @@ final class Bootstrap {
keepAliveThread.start();
}
- static void stop() {
+ static void stop() throws IOException {
try {
- Releasables.close(INSTANCE.node);
+ IOUtils.close(INSTANCE.node);
} finally {
INSTANCE.keepAliveLatch.countDown();
}
@@ -288,6 +297,9 @@ final class Bootstrap {
// fail if using broken version
JVMCheck.check();
+ // fail if somebody replaced the lucene jars
+ checkLucene();
+
INSTANCE.setup(true, settings, environment);
INSTANCE.start();
@@ -364,4 +376,11 @@ final class Bootstrap {
private static void exit(int status) {
System.exit(status);
}
+
+ private static void checkLucene() {
+ if (Version.CURRENT.luceneVersion.equals(org.apache.lucene.util.Version.LATEST) == false) {
+ throw new AssertionError("Lucene version mismatch this version of Elasticsearch requires lucene version ["
+ + Version.CURRENT.luceneVersion + "] but the current lucene version is [" + org.apache.lucene.util.Version.LATEST + "]");
+ }
+ }
}
diff --git a/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java b/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java
index 977939a3d8..107a955696 100644
--- a/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java
+++ b/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java
@@ -19,6 +19,8 @@
package org.elasticsearch.bootstrap;
+import java.io.IOException;
+
/**
* This class starts elasticsearch.
*/
@@ -48,7 +50,7 @@ public final class Elasticsearch {
*
* NOTE: If this method is renamed and/or moved, make sure to update service.bat!
*/
- static void close(String[] args) {
+ static void close(String[] args) throws IOException {
Bootstrap.stop();
}
-} \ No newline at end of file
+}
diff --git a/core/src/main/java/org/elasticsearch/bootstrap/Security.java b/core/src/main/java/org/elasticsearch/bootstrap/Security.java
index 43ad73b5de..b9d2bfda24 100644
--- a/core/src/main/java/org/elasticsearch/bootstrap/Security.java
+++ b/core/src/main/java/org/elasticsearch/bootstrap/Security.java
@@ -25,9 +25,9 @@ import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
-import org.elasticsearch.http.netty.NettyHttpServerTransport;
+import org.elasticsearch.http.HttpTransportSettings;
import org.elasticsearch.plugins.PluginInfo;
-import org.elasticsearch.transport.netty.NettyTransport;
+import org.elasticsearch.transport.TransportSettings;
import java.io.FilePermission;
import java.io.IOException;
@@ -241,26 +241,26 @@ final class Security {
*/
static void addFilePermissions(Permissions policy, Environment environment) {
// read-only dirs
- addPath(policy, "path.home", environment.binFile(), "read,readlink");
- addPath(policy, "path.home", environment.libFile(), "read,readlink");
- addPath(policy, "path.home", environment.modulesFile(), "read,readlink");
- addPath(policy, "path.plugins", environment.pluginsFile(), "read,readlink");
- addPath(policy, "path.conf", environment.configFile(), "read,readlink");
- addPath(policy, "path.scripts", environment.scriptsFile(), "read,readlink");
+ addPath(policy, Environment.PATH_HOME_SETTING.getKey(), environment.binFile(), "read,readlink");
+ addPath(policy, Environment.PATH_HOME_SETTING.getKey(), environment.libFile(), "read,readlink");
+ addPath(policy, Environment.PATH_HOME_SETTING.getKey(), environment.modulesFile(), "read,readlink");
+ addPath(policy, Environment.PATH_PLUGINS_SETTING.getKey(), environment.pluginsFile(), "read,readlink");
+ addPath(policy, Environment.PATH_CONF_SETTING.getKey(), environment.configFile(), "read,readlink");
+ addPath(policy, Environment.PATH_SCRIPTS_SETTING.getKey(), environment.scriptsFile(), "read,readlink");
// read-write dirs
addPath(policy, "java.io.tmpdir", environment.tmpFile(), "read,readlink,write,delete");
- addPath(policy, "path.logs", environment.logsFile(), "read,readlink,write,delete");
+ addPath(policy, Environment.PATH_LOGS_SETTING.getKey(), environment.logsFile(), "read,readlink,write,delete");
if (environment.sharedDataFile() != null) {
- addPath(policy, "path.shared_data", environment.sharedDataFile(), "read,readlink,write,delete");
+ addPath(policy, Environment.PATH_SHARED_DATA_SETTING.getKey(), environment.sharedDataFile(), "read,readlink,write,delete");
}
for (Path path : environment.dataFiles()) {
- addPath(policy, "path.data", path, "read,readlink,write,delete");
+ addPath(policy, Environment.PATH_DATA_SETTING.getKey(), path, "read,readlink,write,delete");
}
for (Path path : environment.dataWithClusterFiles()) {
- addPath(policy, "path.data", path, "read,readlink,write,delete");
+ addPath(policy, Environment.PATH_DATA_SETTING.getKey(), path, "read,readlink,write,delete");
}
for (Path path : environment.repoFiles()) {
- addPath(policy, "path.repo", path, "read,readlink,write,delete");
+ addPath(policy, Environment.PATH_REPO_SETTING.getKey(), path, "read,readlink,write,delete");
}
if (environment.pidFile() != null) {
// we just need permission to remove the file if its elsewhere.
@@ -270,17 +270,15 @@ final class Security {
static void addBindPermissions(Permissions policy, Settings settings) throws IOException {
// http is simple
- String httpRange = settings.get("http.netty.port",
- settings.get("http.port",
- NettyHttpServerTransport.DEFAULT_PORT_RANGE));
+ String httpRange = HttpTransportSettings.SETTING_HTTP_PORT.get(settings).getPortRangeString();
// listen is always called with 'localhost' but use wildcard to be sure, no name service is consulted.
// see SocketPermission implies() code
policy.add(new SocketPermission("*:" + httpRange, "listen,resolve"));
// transport is waaaay overengineered
- Map<String, Settings> profiles = settings.getGroups("transport.profiles", true);
- if (!profiles.containsKey(NettyTransport.DEFAULT_PROFILE)) {
+ Map<String, Settings> profiles = TransportSettings.TRANSPORT_PROFILES_SETTING.get(settings).getAsGroups();
+ if (!profiles.containsKey(TransportSettings.DEFAULT_PROFILE)) {
profiles = new HashMap<>(profiles);
- profiles.put(NettyTransport.DEFAULT_PROFILE, Settings.EMPTY);
+ profiles.put(TransportSettings.DEFAULT_PROFILE, Settings.EMPTY);
}
// loop through all profiles and add permissions for each one, if its valid.
@@ -288,12 +286,10 @@ final class Security {
for (Map.Entry<String, Settings> entry : profiles.entrySet()) {
Settings profileSettings = entry.getValue();
String name = entry.getKey();
- String transportRange = profileSettings.get("port",
- settings.get("transport.tcp.port",
- NettyTransport.DEFAULT_PORT_RANGE));
+ String transportRange = profileSettings.get("port", TransportSettings.PORT.get(settings));
// a profile is only valid if its the default profile, or if it has an actual name and specifies a port
- boolean valid = NettyTransport.DEFAULT_PROFILE.equals(name) || (Strings.hasLength(name) && profileSettings.get("port") != null);
+ boolean valid = TransportSettings.DEFAULT_PROFILE.equals(name) || (Strings.hasLength(name) && profileSettings.get("port") != null);
if (valid) {
// listen is always called with 'localhost' but use wildcard to be sure, no name service is consulted.
// see SocketPermission implies() code
diff --git a/core/src/main/java/org/elasticsearch/cache/recycler/PageCacheRecycler.java b/core/src/main/java/org/elasticsearch/cache/recycler/PageCacheRecycler.java
index dc050e0b6c..7016718b30 100644
--- a/core/src/main/java/org/elasticsearch/cache/recycler/PageCacheRecycler.java
+++ b/core/src/main/java/org/elasticsearch/cache/recycler/PageCacheRecycler.java
@@ -22,9 +22,12 @@ package org.elasticsearch.cache.recycler;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
+import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.recycler.AbstractRecyclerC;
import org.elasticsearch.common.recycler.Recycler;
+import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.concurrent.EsExecutors;
import org.elasticsearch.threadpool.ThreadPool;
@@ -38,17 +41,22 @@ import static org.elasticsearch.common.recycler.Recyclers.dequeFactory;
import static org.elasticsearch.common.recycler.Recyclers.none;
/** A recycler of fixed-size pages. */
-public class PageCacheRecycler extends AbstractComponent {
+public class PageCacheRecycler extends AbstractComponent implements Releasable {
- public static final String TYPE = "recycler.page.type";
- public static final String LIMIT_HEAP = "recycler.page.limit.heap";
- public static final String WEIGHT = "recycler.page.weight";
+ public static final Setting<Type> TYPE_SETTING = new Setting<>("cache.recycler.page.type", Type.CONCURRENT.name(), Type::parse, false, Setting.Scope.CLUSTER);
+ public static final Setting<ByteSizeValue> LIMIT_HEAP_SETTING = Setting.byteSizeSetting("cache.recycler.page.limit.heap", "10%", false, Setting.Scope.CLUSTER);
+ public static final Setting<Double> WEIGHT_BYTES_SETTING = Setting.doubleSetting("cache.recycler.page.weight.bytes", 1d, 0d, false, Setting.Scope.CLUSTER);
+ public static final Setting<Double> WEIGHT_LONG_SETTING = Setting.doubleSetting("cache.recycler.page.weight.longs", 1d, 0d, false, Setting.Scope.CLUSTER);
+ public static final Setting<Double> WEIGHT_INT_SETTING = Setting.doubleSetting("cache.recycler.page.weight.ints", 1d, 0d, false, Setting.Scope.CLUSTER);
+ // object pages are less useful to us so we give them a lower weight by default
+ public static final Setting<Double> WEIGHT_OBJECTS_SETTING = Setting.doubleSetting("cache.recycler.page.weight.objects", 0.1d, 0d, false, Setting.Scope.CLUSTER);
private final Recycler<byte[]> bytePage;
private final Recycler<int[]> intPage;
private final Recycler<long[]> longPage;
private final Recycler<Object[]> objectPage;
+ @Override
public void close() {
bytePage.close();
intPage.close();
@@ -71,8 +79,8 @@ public class PageCacheRecycler extends AbstractComponent {
@Inject
public PageCacheRecycler(Settings settings, ThreadPool threadPool) {
super(settings);
- final Type type = Type.parse(settings.get(TYPE));
- final long limit = settings.getAsMemory(LIMIT_HEAP, "10%").bytes();
+ final Type type = TYPE_SETTING .get(settings);
+ final long limit = LIMIT_HEAP_SETTING .get(settings).bytes();
final int availableProcessors = EsExecutors.boundedNumberOfProcessors(settings);
final int searchThreadPoolSize = maximumSearchThreadPoolSize(threadPool, settings);
@@ -89,11 +97,10 @@ public class PageCacheRecycler extends AbstractComponent {
// to direct ByteBuffers or sun.misc.Unsafe on a byte[] but this would have other issues
// that would need to be addressed such as garbage collection of native memory or safety
// of Unsafe writes.
- final double bytesWeight = settings.getAsDouble(WEIGHT + ".bytes", 1d);
- final double intsWeight = settings.getAsDouble(WEIGHT + ".ints", 1d);
- final double longsWeight = settings.getAsDouble(WEIGHT + ".longs", 1d);
- // object pages are less useful to us so we give them a lower weight by default
- final double objectsWeight = settings.getAsDouble(WEIGHT + ".objects", 0.1d);
+ final double bytesWeight = WEIGHT_BYTES_SETTING .get(settings);
+ final double intsWeight = WEIGHT_INT_SETTING .get(settings);
+ final double longsWeight = WEIGHT_LONG_SETTING .get(settings);
+ final double objectsWeight = WEIGHT_OBJECTS_SETTING .get(settings);
final double totalWeight = bytesWeight + intsWeight + longsWeight + objectsWeight;
final int maxPageCount = (int) Math.min(Integer.MAX_VALUE, limit / BigArrays.PAGE_SIZE_IN_BYTES);
@@ -188,7 +195,7 @@ public class PageCacheRecycler extends AbstractComponent {
return recycler;
}
- public static enum Type {
+ public enum Type {
QUEUE {
@Override
<T> Recycler<T> build(Recycler.C<T> c, int limit, int estimatedThreadPoolSize, int availableProcessors) {
@@ -209,9 +216,6 @@ public class PageCacheRecycler extends AbstractComponent {
};
public static Type parse(String type) {
- if (Strings.isNullOrEmpty(type)) {
- return CONCURRENT;
- }
try {
return Type.valueOf(type.toUpperCase(Locale.ROOT));
} catch (IllegalArgumentException e) {
diff --git a/core/src/main/java/org/elasticsearch/client/Client.java b/core/src/main/java/org/elasticsearch/client/Client.java
index e7461dabfe..f81ba9eb1b 100644
--- a/core/src/main/java/org/elasticsearch/client/Client.java
+++ b/core/src/main/java/org/elasticsearch/client/Client.java
@@ -19,8 +19,12 @@
package org.elasticsearch.client;
+import org.elasticsearch.action.Action;
import org.elasticsearch.action.ActionFuture;
import org.elasticsearch.action.ActionListener;
+import org.elasticsearch.action.ActionRequest;
+import org.elasticsearch.action.ActionRequestBuilder;
+import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.bulk.BulkResponse;
@@ -80,11 +84,13 @@ import org.elasticsearch.action.termvectors.TermVectorsResponse;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.action.update.UpdateRequestBuilder;
import org.elasticsearch.action.update.UpdateResponse;
-import org.elasticsearch.client.support.Headers;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.lease.Releasable;
+import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
+import java.util.Map;
+
/**
* A client provides a one stop interface for performing actions/operations against the cluster.
* <p>
@@ -100,7 +106,15 @@ import org.elasticsearch.common.settings.Settings;
*/
public interface Client extends ElasticsearchClient, Releasable {
- String CLIENT_TYPE_SETTING = "client.type";
+ Setting<String> CLIENT_TYPE_SETTING_S = new Setting<>("client.type", "node", (s) -> {
+ switch (s) {
+ case "node":
+ case "transport":
+ return s;
+ default:
+ throw new IllegalArgumentException("Can't parse [client.type] must be one of [node, transport]");
+ }
+ }, false, Setting.Scope.CLUSTER);
/**
* The admin client that can be used to perform administrative operations.
@@ -597,5 +611,9 @@ public interface Client extends ElasticsearchClient, Releasable {
*/
Settings settings();
- Headers headers();
+ /**
+ * Returns a new lightweight Client that applies all given headers to each of the requests
+ * issued from it.
+ */
+ Client filterWithHeader(Map<String, String> headers);
}
diff --git a/core/src/main/java/org/elasticsearch/client/ClusterAdminClient.java b/core/src/main/java/org/elasticsearch/client/ClusterAdminClient.java
index 2cee4341a3..c56fd08706 100644
--- a/core/src/main/java/org/elasticsearch/client/ClusterAdminClient.java
+++ b/core/src/main/java/org/elasticsearch/client/ClusterAdminClient.java
@@ -84,6 +84,18 @@ import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksResponse;
import org.elasticsearch.action.admin.cluster.validate.template.RenderSearchTemplateRequest;
import org.elasticsearch.action.admin.cluster.validate.template.RenderSearchTemplateRequestBuilder;
import org.elasticsearch.action.admin.cluster.validate.template.RenderSearchTemplateResponse;
+import org.elasticsearch.action.ingest.DeletePipelineRequest;
+import org.elasticsearch.action.ingest.DeletePipelineRequestBuilder;
+import org.elasticsearch.action.ingest.GetPipelineRequest;
+import org.elasticsearch.action.ingest.GetPipelineRequestBuilder;
+import org.elasticsearch.action.ingest.GetPipelineResponse;
+import org.elasticsearch.action.ingest.PutPipelineRequest;
+import org.elasticsearch.action.ingest.PutPipelineRequestBuilder;
+import org.elasticsearch.action.ingest.SimulatePipelineRequest;
+import org.elasticsearch.action.ingest.SimulatePipelineRequestBuilder;
+import org.elasticsearch.action.ingest.SimulatePipelineResponse;
+import org.elasticsearch.action.ingest.WritePipelineResponse;
+import org.elasticsearch.common.bytes.BytesReference;
/**
* Administrative actions/operations against indices.
@@ -474,4 +486,64 @@ public interface ClusterAdminClient extends ElasticsearchClient {
* Return the rendered search request for a given search template.
*/
RenderSearchTemplateRequestBuilder prepareRenderSearchTemplate();
+
+ /**
+ * Stores an ingest pipeline
+ */
+ void putPipeline(PutPipelineRequest request, ActionListener<WritePipelineResponse> listener);
+
+ /**
+ * Stores an ingest pipeline
+ */
+ ActionFuture<WritePipelineResponse> putPipeline(PutPipelineRequest request);
+
+ /**
+ * Stores an ingest pipeline
+ */
+ PutPipelineRequestBuilder preparePutPipeline(String id, BytesReference source);
+
+ /**
+ * Deletes a stored ingest pipeline
+ */
+ void deletePipeline(DeletePipelineRequest request, ActionListener<WritePipelineResponse> listener);
+
+ /**
+ * Deletes a stored ingest pipeline
+ */
+ ActionFuture<WritePipelineResponse> deletePipeline(DeletePipelineRequest request);
+
+ /**
+ * Deletes a stored ingest pipeline
+ */
+ DeletePipelineRequestBuilder prepareDeletePipeline();
+
+ /**
+ * Returns a stored ingest pipeline
+ */
+ void getPipeline(GetPipelineRequest request, ActionListener<GetPipelineResponse> listener);
+
+ /**
+ * Returns a stored ingest pipeline
+ */
+ ActionFuture<GetPipelineResponse> getPipeline(GetPipelineRequest request);
+
+ /**
+ * Returns a stored ingest pipeline
+ */
+ GetPipelineRequestBuilder prepareGetPipeline(String... ids);
+
+ /**
+ * Simulates an ingest pipeline
+ */
+ void simulatePipeline(SimulatePipelineRequest request, ActionListener<SimulatePipelineResponse> listener);
+
+ /**
+ * Simulates an ingest pipeline
+ */
+ ActionFuture<SimulatePipelineResponse> simulatePipeline(SimulatePipelineRequest request);
+
+ /**
+ * Simulates an ingest pipeline
+ */
+ SimulatePipelineRequestBuilder prepareSimulatePipeline(BytesReference source);
}
diff --git a/core/src/main/java/org/elasticsearch/client/FilterClient.java b/core/src/main/java/org/elasticsearch/client/FilterClient.java
index 77abceef17..d2ea209a8c 100644
--- a/core/src/main/java/org/elasticsearch/client/FilterClient.java
+++ b/core/src/main/java/org/elasticsearch/client/FilterClient.java
@@ -42,7 +42,7 @@ public abstract class FilterClient extends AbstractClient {
* @see #in()
*/
public FilterClient(Client in) {
- super(in.settings(), in.threadPool(), in.headers());
+ super(in.settings(), in.threadPool());
this.in = in;
}
diff --git a/core/src/main/java/org/elasticsearch/client/node/NodeClient.java b/core/src/main/java/org/elasticsearch/client/node/NodeClient.java
index 4f64f63f8d..3e9bed9e25 100644
--- a/core/src/main/java/org/elasticsearch/client/node/NodeClient.java
+++ b/core/src/main/java/org/elasticsearch/client/node/NodeClient.java
@@ -27,7 +27,6 @@ import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.action.GenericAction;
import org.elasticsearch.action.support.TransportAction;
import org.elasticsearch.client.support.AbstractClient;
-import org.elasticsearch.client.support.Headers;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.threadpool.ThreadPool;
@@ -44,8 +43,8 @@ public class NodeClient extends AbstractClient {
private final Map<GenericAction, TransportAction> actions;
@Inject
- public NodeClient(Settings settings, ThreadPool threadPool, Headers headers, Map<GenericAction, TransportAction> actions) {
- super(settings, threadPool, headers);
+ public NodeClient(Settings settings, ThreadPool threadPool, Map<GenericAction, TransportAction> actions) {
+ super(settings, threadPool);
this.actions = unmodifiableMap(actions);
}
diff --git a/core/src/main/java/org/elasticsearch/client/node/NodeClientModule.java b/core/src/main/java/org/elasticsearch/client/node/NodeClientModule.java
index fb0891da8c..de13488730 100644
--- a/core/src/main/java/org/elasticsearch/client/node/NodeClientModule.java
+++ b/core/src/main/java/org/elasticsearch/client/node/NodeClientModule.java
@@ -20,7 +20,6 @@
package org.elasticsearch.client.node;
import org.elasticsearch.client.Client;
-import org.elasticsearch.client.support.Headers;
import org.elasticsearch.common.inject.AbstractModule;
/**
@@ -30,7 +29,6 @@ public class NodeClientModule extends AbstractModule {
@Override
protected void configure() {
- bind(Headers.class).asEagerSingleton();
bind(Client.class).to(NodeClient.class).asEagerSingleton();
}
}
diff --git a/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java b/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java
index e5e1bea6cb..ac60df1d67 100644
--- a/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java
+++ b/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java
@@ -272,6 +272,21 @@ import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptAction;
import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptRequest;
import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptRequestBuilder;
import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptResponse;
+import org.elasticsearch.action.ingest.DeletePipelineAction;
+import org.elasticsearch.action.ingest.DeletePipelineRequest;
+import org.elasticsearch.action.ingest.DeletePipelineRequestBuilder;
+import org.elasticsearch.action.ingest.GetPipelineAction;
+import org.elasticsearch.action.ingest.GetPipelineRequest;
+import org.elasticsearch.action.ingest.GetPipelineRequestBuilder;
+import org.elasticsearch.action.ingest.GetPipelineResponse;
+import org.elasticsearch.action.ingest.PutPipelineAction;
+import org.elasticsearch.action.ingest.PutPipelineRequest;
+import org.elasticsearch.action.ingest.PutPipelineRequestBuilder;
+import org.elasticsearch.action.ingest.SimulatePipelineAction;
+import org.elasticsearch.action.ingest.SimulatePipelineRequest;
+import org.elasticsearch.action.ingest.SimulatePipelineRequestBuilder;
+import org.elasticsearch.action.ingest.SimulatePipelineResponse;
+import org.elasticsearch.action.ingest.WritePipelineResponse;
import org.elasticsearch.action.percolate.MultiPercolateAction;
import org.elasticsearch.action.percolate.MultiPercolateRequest;
import org.elasticsearch.action.percolate.MultiPercolateRequestBuilder;
@@ -317,12 +332,17 @@ import org.elasticsearch.client.AdminClient;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.ClusterAdminClient;
import org.elasticsearch.client.ElasticsearchClient;
+import org.elasticsearch.client.FilterClient;
import org.elasticsearch.client.IndicesAdminClient;
import org.elasticsearch.common.Nullable;
+import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.threadpool.ThreadPool;
+import java.util.Map;
+
/**
*
*/
@@ -330,24 +350,16 @@ public abstract class AbstractClient extends AbstractComponent implements Client
private final ThreadPool threadPool;
private final Admin admin;
-
- private final Headers headers;
private final ThreadedActionListener.Wrapper threadedWrapper;
- public AbstractClient(Settings settings, ThreadPool threadPool, Headers headers) {
+ public AbstractClient(Settings settings, ThreadPool threadPool) {
super(settings);
this.threadPool = threadPool;
- this.headers = headers;
this.admin = new Admin(this);
this.threadedWrapper = new ThreadedActionListener.Wrapper(logger, settings, threadPool);
}
@Override
- public Headers headers() {
- return this.headers;
- }
-
- @Override
public final Settings settings() {
return this.settings;
}
@@ -382,7 +394,6 @@ public abstract class AbstractClient extends AbstractComponent implements Client
@Override
public final <Request extends ActionRequest<Request>, Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder>> void execute(
Action<Request, Response, RequestBuilder> action, Request request, ActionListener<Response> listener) {
- headers.applyTo(request);
listener = threadedWrapper.wrap(listener);
doExecute(action, request, listener);
}
@@ -1173,6 +1184,66 @@ public abstract class AbstractClient extends AbstractComponent implements Client
public RenderSearchTemplateRequestBuilder prepareRenderSearchTemplate() {
return new RenderSearchTemplateRequestBuilder(this, RenderSearchTemplateAction.INSTANCE);
}
+
+ @Override
+ public void putPipeline(PutPipelineRequest request, ActionListener<WritePipelineResponse> listener) {
+ execute(PutPipelineAction.INSTANCE, request, listener);
+ }
+
+ @Override
+ public ActionFuture<WritePipelineResponse> putPipeline(PutPipelineRequest request) {
+ return execute(PutPipelineAction.INSTANCE, request);
+ }
+
+ @Override
+ public PutPipelineRequestBuilder preparePutPipeline(String id, BytesReference source) {
+ return new PutPipelineRequestBuilder(this, PutPipelineAction.INSTANCE, id, source);
+ }
+
+ @Override
+ public void deletePipeline(DeletePipelineRequest request, ActionListener<WritePipelineResponse> listener) {
+ execute(DeletePipelineAction.INSTANCE, request, listener);
+ }
+
+ @Override
+ public ActionFuture<WritePipelineResponse> deletePipeline(DeletePipelineRequest request) {
+ return execute(DeletePipelineAction.INSTANCE, request);
+ }
+
+ @Override
+ public DeletePipelineRequestBuilder prepareDeletePipeline() {
+ return new DeletePipelineRequestBuilder(this, DeletePipelineAction.INSTANCE);
+ }
+
+ @Override
+ public void getPipeline(GetPipelineRequest request, ActionListener<GetPipelineResponse> listener) {
+ execute(GetPipelineAction.INSTANCE, request, listener);
+ }
+
+ @Override
+ public ActionFuture<GetPipelineResponse> getPipeline(GetPipelineRequest request) {
+ return execute(GetPipelineAction.INSTANCE, request);
+ }
+
+ @Override
+ public GetPipelineRequestBuilder prepareGetPipeline(String... ids) {
+ return new GetPipelineRequestBuilder(this, GetPipelineAction.INSTANCE, ids);
+ }
+
+ @Override
+ public void simulatePipeline(SimulatePipelineRequest request, ActionListener<SimulatePipelineResponse> listener) {
+ execute(SimulatePipelineAction.INSTANCE, request, listener);
+ }
+
+ @Override
+ public ActionFuture<SimulatePipelineResponse> simulatePipeline(SimulatePipelineRequest request) {
+ return execute(SimulatePipelineAction.INSTANCE, request);
+ }
+
+ @Override
+ public SimulatePipelineRequestBuilder prepareSimulatePipeline(BytesReference source) {
+ return new SimulatePipelineRequestBuilder(this, SimulatePipelineAction.INSTANCE, source);
+ }
}
static class IndicesAdmin implements IndicesAdminClient {
@@ -1681,4 +1752,17 @@ public abstract class AbstractClient extends AbstractComponent implements Client
execute(GetSettingsAction.INSTANCE, request, listener);
}
}
+
+ @Override
+ public Client filterWithHeader(Map<String, String> headers) {
+ return new FilterClient(this) {
+ @Override
+ protected <Request extends ActionRequest<Request>, Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder>> void doExecute(Action<Request, Response, RequestBuilder> action, Request request, ActionListener<Response> listener) {
+ ThreadContext threadContext = threadPool().getThreadContext();
+ try (ThreadContext.StoredContext ctx = threadContext.stashAndMergeHeaders(headers)) {
+ super.doExecute(action, request, listener);
+ }
+ }
+ };
+ }
}
diff --git a/core/src/main/java/org/elasticsearch/client/support/Headers.java b/core/src/main/java/org/elasticsearch/client/support/Headers.java
deleted file mode 100644
index f46bd0a1c9..0000000000
--- a/core/src/main/java/org/elasticsearch/client/support/Headers.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.client.support;
-
-import org.elasticsearch.common.inject.Inject;
-import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.transport.TransportMessage;
-
-/**
- * Client request headers picked up from the client settings. Applied to every
- * request sent by the client (both transport and node clients)
- */
-public class Headers {
-
- public static final String PREFIX = "request.headers";
-
- public static final Headers EMPTY = new Headers(Settings.EMPTY) {
- @Override
- public <M extends TransportMessage<?>> M applyTo(M message) {
- return message;
- }
- };
-
- private final Settings headers;
-
- @Inject
- public Headers(Settings settings) {
- headers = resolveHeaders(settings);
- }
-
- public <M extends TransportMessage<?>> M applyTo(M message) {
- for (String key : headers.names()) {
- if (!message.hasHeader(key)) {
- message.putHeader(key, headers.get(key));
- }
- }
- return message;
- }
-
- public Settings headers() {
- return headers;
- }
-
- static Settings resolveHeaders(Settings settings) {
- Settings headers = settings.getAsSettings(PREFIX);
- return headers != null ? headers : Settings.EMPTY;
- }
-}
diff --git a/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java b/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java
index 3d68e642c4..ecbf3eb961 100644
--- a/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java
+++ b/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java
@@ -19,10 +19,6 @@
package org.elasticsearch.client.transport;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.concurrent.TimeUnit;
-
import org.elasticsearch.Version;
import org.elasticsearch.action.Action;
import org.elasticsearch.action.ActionListener;
@@ -32,7 +28,6 @@ import org.elasticsearch.action.ActionRequestBuilder;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.cache.recycler.PageCacheRecycler;
import org.elasticsearch.client.support.AbstractClient;
-import org.elasticsearch.client.support.Headers;
import org.elasticsearch.client.transport.support.TransportProxyClient;
import org.elasticsearch.cluster.ClusterNameModule;
import org.elasticsearch.cluster.node.DiscoveryNode;
@@ -49,6 +44,7 @@ import org.elasticsearch.common.settings.SettingsModule;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.indices.breaker.CircuitBreakerModule;
import org.elasticsearch.monitor.MonitorService;
+import org.elasticsearch.node.Node;
import org.elasticsearch.node.internal.InternalSettingsPreparer;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.plugins.PluginsModule;
@@ -59,6 +55,10 @@ import org.elasticsearch.threadpool.ThreadPoolModule;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.transport.netty.NettyTransport;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
/**
@@ -110,13 +110,13 @@ public class TransportClient extends AbstractClient {
private PluginsService newPluginService(final Settings settings) {
final Settings.Builder settingsBuilder = settingsBuilder()
- .put(NettyTransport.PING_SCHEDULE, "5s") // enable by default the transport schedule ping interval
- .put( InternalSettingsPreparer.prepareSettings(settings))
- .put("network.server", false)
- .put("node.client", true)
- .put(CLIENT_TYPE_SETTING, CLIENT_TYPE);
+ .put(NettyTransport.PING_SCHEDULE.getKey(), "5s") // enable by default the transport schedule ping interval
+ .put(InternalSettingsPreparer.prepareSettings(settings))
+ .put(NetworkService.NETWORK_SERVER.getKey(), false)
+ .put(Node.NODE_CLIENT_SETTING.getKey(), true)
+ .put(CLIENT_TYPE_SETTING_S.getKey(), CLIENT_TYPE);
return new PluginsService(settingsBuilder.build(), null, null, pluginClasses);
- };
+ }
/**
* Builds a new instance of the transport client.
@@ -140,7 +140,7 @@ public class TransportClient extends AbstractClient {
modules.add(pluginModule);
}
modules.add(new PluginsModule(pluginsService));
- modules.add(new SettingsModule(settings, settingsFilter ));
+ modules.add(new SettingsModule(settings, settingsFilter));
modules.add(new NetworkModule(networkService, settings, true, namedWriteableRegistry));
modules.add(new ClusterNameModule(settings));
modules.add(new ThreadPoolModule(threadPool));
@@ -150,7 +150,7 @@ public class TransportClient extends AbstractClient {
// noop
}
});
- modules.add(new ActionModule(true));
+ modules.add(new ActionModule(false, true));
modules.add(new CircuitBreakerModule(settings));
pluginsService.processModules(modules);
@@ -176,7 +176,7 @@ public class TransportClient extends AbstractClient {
private final TransportProxyClient proxy;
private TransportClient(Injector injector) {
- super(injector.getInstance(Settings.class), injector.getInstance(ThreadPool.class), injector.getInstance(Headers.class));
+ super(injector.getInstance(Settings.class), injector.getInstance(ThreadPool.class));
this.injector = injector;
nodesService = injector.getInstance(TransportClientNodesService.class);
proxy = injector.getInstance(TransportProxyClient.class);
diff --git a/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java b/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java
index 56befbb9b8..f9ee988d81 100644
--- a/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java
+++ b/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java
@@ -29,11 +29,11 @@ import org.elasticsearch.action.admin.cluster.node.liveness.TransportLivenessAct
import org.elasticsearch.action.admin.cluster.state.ClusterStateAction;
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
import org.elasticsearch.client.Requests;
-import org.elasticsearch.client.support.Headers;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
+import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.common.unit.TimeValue;
@@ -79,8 +79,6 @@ public class TransportClientNodesService extends AbstractComponent {
private final Version minCompatibilityVersion;
- private final Headers headers;
-
// nodes that are added to be discovered
private volatile List<DiscoveryNode> listedNodes = Collections.emptyList();
@@ -101,19 +99,23 @@ public class TransportClientNodesService extends AbstractComponent {
private volatile boolean closed;
+
+ public static final Setting<TimeValue> CLIENT_TRANSPORT_NODES_SAMPLER_INTERVAL = Setting.positiveTimeSetting("client.transport.nodes_sampler_interval", timeValueSeconds(5), false, Setting.Scope.CLUSTER);
+ public static final Setting<TimeValue> CLIENT_TRANSPORT_PING_TIMEOUT = Setting.positiveTimeSetting("client.transport.ping_timeout", timeValueSeconds(5), false, Setting.Scope.CLUSTER);
+ public static final Setting<Boolean> CLIENT_TRANSPORT_IGNORE_CLUSTER_NAME = Setting.boolSetting("client.transport.ignore_cluster_name", false, false, Setting.Scope.CLUSTER);
+
@Inject
public TransportClientNodesService(Settings settings, ClusterName clusterName, TransportService transportService,
- ThreadPool threadPool, Headers headers, Version version) {
+ ThreadPool threadPool, Version version) {
super(settings);
this.clusterName = clusterName;
this.transportService = transportService;
this.threadPool = threadPool;
this.minCompatibilityVersion = version.minimumCompatibilityVersion();
- this.headers = headers;
- this.nodesSamplerInterval = this.settings.getAsTime("client.transport.nodes_sampler_interval", timeValueSeconds(5));
- this.pingTimeout = this.settings.getAsTime("client.transport.ping_timeout", timeValueSeconds(5)).millis();
- this.ignoreClusterName = this.settings.getAsBoolean("client.transport.ignore_cluster_name", false);
+ this.nodesSamplerInterval = CLIENT_TRANSPORT_NODES_SAMPLER_INTERVAL.get(this.settings);
+ this.pingTimeout = CLIENT_TRANSPORT_PING_TIMEOUT.get(this.settings).millis();
+ this.ignoreClusterName = CLIENT_TRANSPORT_IGNORE_CLUSTER_NAME.get(this.settings);
if (logger.isDebugEnabled()) {
logger.debug("node_sampler_interval[" + nodesSamplerInterval + "]");
@@ -358,7 +360,7 @@ public class TransportClientNodesService extends AbstractComponent {
}
try {
LivenessResponse livenessResponse = transportService.submitRequest(listedNode, TransportLivenessAction.NAME,
- headers.applyTo(new LivenessRequest()),
+ new LivenessRequest(),
TransportRequestOptions.builder().withType(TransportRequestOptions.Type.STATE).withTimeout(pingTimeout).build(),
new FutureTransportResponseHandler<LivenessResponse>() {
@Override
@@ -428,8 +430,7 @@ public class TransportClientNodesService extends AbstractComponent {
return;
}
}
- transportService.sendRequest(listedNode, ClusterStateAction.NAME,
- headers.applyTo(Requests.clusterStateRequest().clear().nodes(true).local(true)),
+ transportService.sendRequest(listedNode, ClusterStateAction.NAME, Requests.clusterStateRequest().clear().nodes(true).local(true),
TransportRequestOptions.builder().withType(TransportRequestOptions.Type.STATE).withTimeout(pingTimeout).build(),
new BaseTransportResponseHandler<ClusterStateResponse>() {
diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java
index 626b020c56..3e668191ff 100644
--- a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java
+++ b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java
@@ -57,6 +57,7 @@ import org.elasticsearch.cluster.service.InternalClusterService;
import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
+import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.ExtensionPoint;
import org.elasticsearch.gateway.GatewayAllocator;
@@ -64,6 +65,7 @@ import org.elasticsearch.gateway.GatewayAllocator;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
+import java.util.function.Function;
/**
* Configures classes and services that affect the entire cluster.
@@ -72,7 +74,7 @@ public class ClusterModule extends AbstractModule {
public static final String EVEN_SHARD_COUNT_ALLOCATOR = "even_shard";
public static final String BALANCED_ALLOCATOR = "balanced"; // default
- public static final String SHARDS_ALLOCATOR_TYPE_KEY = "cluster.routing.allocation.type";
+ public static final Setting<String> SHARDS_ALLOCATOR_TYPE_SETTING = new Setting<>("cluster.routing.allocation.type", BALANCED_ALLOCATOR, Function.identity(), false, Setting.Scope.CLUSTER);
public static final List<Class<? extends AllocationDecider>> DEFAULT_ALLOCATION_DECIDERS =
Collections.unmodifiableList(Arrays.asList(
SameShardAllocationDecider.class,
@@ -121,7 +123,7 @@ public class ClusterModule extends AbstractModule {
@Override
protected void configure() {
// bind ShardsAllocator
- String shardsAllocatorType = shardsAllocators.bindType(binder(), settings, ClusterModule.SHARDS_ALLOCATOR_TYPE_KEY, ClusterModule.BALANCED_ALLOCATOR);
+ String shardsAllocatorType = shardsAllocators.bindType(binder(), settings, ClusterModule.SHARDS_ALLOCATOR_TYPE_SETTING.getKey(), ClusterModule.BALANCED_ALLOCATOR);
if (shardsAllocatorType.equals(ClusterModule.EVEN_SHARD_COUNT_ALLOCATOR)) {
final ESLogger logger = Loggers.getLogger(getClass(), settings);
logger.warn("{} allocator has been removed in 2.0 using {} instead", ClusterModule.EVEN_SHARD_COUNT_ALLOCATOR, ClusterModule.BALANCED_ALLOCATOR);
diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterName.java b/core/src/main/java/org/elasticsearch/cluster/ClusterName.java
index 3a9dd82732..daf3000d71 100644
--- a/core/src/main/java/org/elasticsearch/cluster/ClusterName.java
+++ b/core/src/main/java/org/elasticsearch/cluster/ClusterName.java
@@ -22,6 +22,7 @@ package org.elasticsearch.cluster;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
+import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import java.io.IOException;
@@ -31,18 +32,23 @@ import java.io.IOException;
*/
public class ClusterName implements Streamable {
- public static final String SETTING = "cluster.name";
+ public static final Setting<String> CLUSTER_NAME_SETTING = new Setting<>("cluster.name", "elasticsearch", (s) -> {
+ if (s.isEmpty()) {
+ throw new IllegalArgumentException("[cluster.name] must not be empty");
+ }
+ return s;
+ }, false, Setting.Scope.CLUSTER);
- public static final ClusterName DEFAULT = new ClusterName("elasticsearch".intern());
+
+ public static final ClusterName DEFAULT = new ClusterName(CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY).intern());
private String value;
public static ClusterName clusterNameFromSettings(Settings settings) {
- return new ClusterName(settings.get("cluster.name", ClusterName.DEFAULT.value()));
+ return new ClusterName(CLUSTER_NAME_SETTING.get(settings));
}
private ClusterName() {
-
}
public ClusterName(String value) {
diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterState.java b/core/src/main/java/org/elasticsearch/cluster/ClusterState.java
index dd8c737b6b..2a4d57724c 100644
--- a/core/src/main/java/org/elasticsearch/cluster/ClusterState.java
+++ b/core/src/main/java/org/elasticsearch/cluster/ClusterState.java
@@ -449,7 +449,7 @@ public class ClusterState implements ToXContent, Diffable<ClusterState> {
builder.startObject("indices");
for (IndexMetaData indexMetaData : metaData()) {
- builder.startObject(indexMetaData.getIndex(), XContentBuilder.FieldCaseConversion.NONE);
+ builder.startObject(indexMetaData.getIndex().getName(), XContentBuilder.FieldCaseConversion.NONE);
builder.field("state", indexMetaData.getState().toString().toLowerCase(Locale.ENGLISH));
@@ -506,7 +506,7 @@ public class ClusterState implements ToXContent, Diffable<ClusterState> {
builder.startObject("routing_table");
builder.startObject("indices");
for (IndexRoutingTable indexRoutingTable : routingTable()) {
- builder.startObject(indexRoutingTable.index(), XContentBuilder.FieldCaseConversion.NONE);
+ builder.startObject(indexRoutingTable.getIndex().getName(), XContentBuilder.FieldCaseConversion.NONE);
builder.startObject("shards");
for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) {
builder.startArray(Integer.toString(indexShardRoutingTable.shardId().id()));
diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterStateObserver.java b/core/src/main/java/org/elasticsearch/cluster/ClusterStateObserver.java
index df85762357..dd30a71168 100644
--- a/core/src/main/java/org/elasticsearch/cluster/ClusterStateObserver.java
+++ b/core/src/main/java/org/elasticsearch/cluster/ClusterStateObserver.java
@@ -23,6 +23,7 @@ import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.common.util.concurrent.ThreadContext;
import java.util.concurrent.atomic.AtomicReference;
@@ -44,6 +45,7 @@ public class ClusterStateObserver {
};
private final ClusterService clusterService;
+ private final ThreadContext contextHolder;
volatile TimeValue timeOutValue;
@@ -55,8 +57,8 @@ public class ClusterStateObserver {
volatile boolean timedOut;
- public ClusterStateObserver(ClusterService clusterService, ESLogger logger) {
- this(clusterService, new TimeValue(60000), logger);
+ public ClusterStateObserver(ClusterService clusterService, ESLogger logger, ThreadContext contextHolder) {
+ this(clusterService, new TimeValue(60000), logger, contextHolder);
}
/**
@@ -64,7 +66,7 @@ public class ClusterStateObserver {
* will fail any existing or new #waitForNextChange calls. Set to null
* to wait indefinitely
*/
- public ClusterStateObserver(ClusterService clusterService, @Nullable TimeValue timeout, ESLogger logger) {
+ public ClusterStateObserver(ClusterService clusterService, @Nullable TimeValue timeout, ESLogger logger, ThreadContext contextHolder) {
this.clusterService = clusterService;
this.lastObservedState = new AtomicReference<>(new ObservedState(clusterService.state()));
this.timeOutValue = timeout;
@@ -72,6 +74,7 @@ public class ClusterStateObserver {
this.startTimeNS = System.nanoTime();
}
this.logger = logger;
+ this.contextHolder = contextHolder;
}
/** last cluster state observer by this observer. Note that this may not be the current one */
@@ -146,7 +149,7 @@ public class ClusterStateObserver {
listener.onNewClusterState(newState.clusterState);
} else {
logger.trace("observer: sampled state rejected by predicate ({}). adding listener to ClusterService", newState);
- ObservingContext context = new ObservingContext(listener, changePredicate);
+ ObservingContext context = new ObservingContext(new ContextPreservingListener(listener, contextHolder.newStoredContext()), changePredicate);
if (!observingContext.compareAndSet(null, context)) {
throw new ElasticsearchException("already waiting for a cluster state change");
}
@@ -317,4 +320,33 @@ public class ClusterStateObserver {
return "version [" + clusterState.version() + "], status [" + status + "]";
}
}
+
+ private final static class ContextPreservingListener implements Listener {
+ private final Listener delegate;
+ private final ThreadContext.StoredContext tempContext;
+
+
+ private ContextPreservingListener(Listener delegate, ThreadContext.StoredContext storedContext) {
+ this.tempContext = storedContext;
+ this.delegate = delegate;
+ }
+
+ @Override
+ public void onNewClusterState(ClusterState state) {
+ tempContext.restore();
+ delegate.onNewClusterState(state);
+ }
+
+ @Override
+ public void onClusterServiceClose() {
+ tempContext.restore();
+ delegate.onClusterServiceClose();
+ }
+
+ @Override
+ public void onTimeout(TimeValue timeout) {
+ tempContext.restore();
+ delegate.onTimeout(timeout);
+ }
+ }
}
diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterStateTaskExecutor.java b/core/src/main/java/org/elasticsearch/cluster/ClusterStateTaskExecutor.java
index fb22c2ca36..e5d3f06f1e 100644
--- a/core/src/main/java/org/elasticsearch/cluster/ClusterStateTaskExecutor.java
+++ b/core/src/main/java/org/elasticsearch/cluster/ClusterStateTaskExecutor.java
@@ -120,7 +120,7 @@ public interface ClusterStateTaskExecutor<T> {
}
public boolean isSuccess() {
- return failure != null;
+ return this == SUCCESS;
}
/**
diff --git a/core/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java b/core/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java
index 9a112613b1..78eef31633 100644
--- a/core/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java
+++ b/core/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java
@@ -162,7 +162,7 @@ public class SnapshotsInProgress extends AbstractDiffable<Custom> implements Cus
List<ShardId> waitingShards = waitingIndicesMap.get(entry.key.getIndex());
if (waitingShards == null) {
waitingShards = new ArrayList<>();
- waitingIndicesMap.put(entry.key.getIndex(), waitingShards);
+ waitingIndicesMap.put(entry.key.getIndexName(), waitingShards);
}
waitingShards.add(entry.key);
}
diff --git a/core/src/main/java/org/elasticsearch/cluster/action/index/NodeIndexDeletedAction.java b/core/src/main/java/org/elasticsearch/cluster/action/index/NodeIndexDeletedAction.java
index d4f453530b..012cc66e11 100644
--- a/core/src/main/java/org/elasticsearch/cluster/action/index/NodeIndexDeletedAction.java
+++ b/core/src/main/java/org/elasticsearch/cluster/action/index/NodeIndexDeletedAction.java
@@ -103,7 +103,7 @@ public class NodeIndexDeletedAction extends AbstractComponent {
// master. If we can't acquire the locks here immediately there might be a shard of this index still holding on to the lock
// due to a "currently canceled recovery" or so. The shard will delete itself BEFORE the lock is released so it's guaranteed to be
// deleted by the time we get the lock
- indicesService.processPendingDeletes(new Index(index), indexSettings, new TimeValue(30, TimeUnit.MINUTES));
+ indicesService.processPendingDeletes(indexSettings.getIndex(), indexSettings, new TimeValue(30, TimeUnit.MINUTES));
transportService.sendRequest(clusterState.nodes().masterNode(),
INDEX_STORE_DELETED_ACTION_NAME, new NodeIndexStoreDeletedMessage(index, nodeId), EmptyTransportResponseHandler.INSTANCE_SAME);
} catch (LockObtainFailedException exc) {
diff --git a/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java b/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java
index 3a837d81a7..4aca9a4e23 100644
--- a/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java
+++ b/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java
@@ -30,6 +30,7 @@ import org.elasticsearch.cluster.MasterNodeChangePredicate;
import org.elasticsearch.cluster.NotMasterException;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.node.DiscoveryNode;
+import org.elasticsearch.cluster.routing.RoutingNodes;
import org.elasticsearch.cluster.routing.RoutingService;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.allocation.AllocationService;
@@ -61,6 +62,8 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
+import java.util.Map;
+import java.util.stream.Collectors;
import static org.elasticsearch.cluster.routing.ShardRouting.readShardRoutingEntry;
@@ -71,13 +74,15 @@ public class ShardStateAction extends AbstractComponent {
private final TransportService transportService;
private final ClusterService clusterService;
+ private final ThreadPool threadPool;
@Inject
public ShardStateAction(Settings settings, ClusterService clusterService, TransportService transportService,
- AllocationService allocationService, RoutingService routingService) {
+ AllocationService allocationService, RoutingService routingService, ThreadPool threadPool) {
super(settings);
this.transportService = transportService;
this.clusterService = clusterService;
+ this.threadPool = threadPool;
transportService.registerRequestHandler(SHARD_STARTED_ACTION_NAME, ShardRoutingEntry::new, ThreadPool.Names.SAME, new ShardStartedTransportHandler(clusterService, new ShardStartedClusterStateTaskExecutor(allocationService, logger), logger));
transportService.registerRequestHandler(SHARD_FAILED_ACTION_NAME, ShardRoutingEntry::new, ThreadPool.Names.SAME, new ShardFailedTransportHandler(clusterService, new ShardFailedClusterStateTaskExecutor(allocationService, routingService, logger), logger));
@@ -89,7 +94,7 @@ public class ShardStateAction extends AbstractComponent {
logger.warn("{} no master known for action [{}] for shard [{}]", shardRoutingEntry.getShardRouting().shardId(), actionName, shardRoutingEntry.getShardRouting());
waitForNewMasterAndRetry(actionName, observer, shardRoutingEntry, listener);
} else {
- logger.debug("{} sending [{}] to [{}] for shard [{}]", shardRoutingEntry.getShardRouting().getId(), actionName, masterNode.getId(), shardRoutingEntry);
+ logger.debug("{} sending [{}] to [{}] for shard [{}]", shardRoutingEntry.getShardRouting().shardId(), actionName, masterNode.getId(), shardRoutingEntry);
transportService.sendRequest(masterNode,
actionName, shardRoutingEntry, new EmptyTransportResponseHandler(ThreadPool.Names.SAME) {
@Override
@@ -121,7 +126,7 @@ public class ShardStateAction extends AbstractComponent {
}
public void shardFailed(final ShardRouting shardRouting, final String indexUUID, final String message, @Nullable final Throwable failure, Listener listener) {
- ClusterStateObserver observer = new ClusterStateObserver(clusterService, null, logger);
+ ClusterStateObserver observer = new ClusterStateObserver(clusterService, null, logger, threadPool.getThreadContext());
ShardRoutingEntry shardRoutingEntry = new ShardRoutingEntry(shardRouting, indexUUID, message, failure);
sendShardAction(SHARD_FAILED_ACTION_NAME, observer, shardRoutingEntry, listener);
}
@@ -144,7 +149,7 @@ public class ShardStateAction extends AbstractComponent {
@Override
public void onClusterServiceClose() {
- logger.warn("{} node closed while execution action [{}] for shard [{}]", shardRoutingEntry.failure, shardRoutingEntry.getShardRouting().getId(), actionName, shardRoutingEntry.getShardRouting());
+ logger.warn("{} node closed while execution action [{}] for shard [{}]", shardRoutingEntry.failure, shardRoutingEntry.getShardRouting().shardId(), actionName, shardRoutingEntry.getShardRouting());
listener.onFailure(new NodeClosedException(clusterService.localNode()));
}
@@ -209,12 +214,12 @@ public class ShardStateAction extends AbstractComponent {
}
}
- private static class ShardFailedClusterStateTaskExecutor implements ClusterStateTaskExecutor<ShardRoutingEntry> {
+ static class ShardFailedClusterStateTaskExecutor implements ClusterStateTaskExecutor<ShardRoutingEntry> {
private final AllocationService allocationService;
private final RoutingService routingService;
private final ESLogger logger;
- public ShardFailedClusterStateTaskExecutor(AllocationService allocationService, RoutingService routingService, ESLogger logger) {
+ ShardFailedClusterStateTaskExecutor(AllocationService allocationService, RoutingService routingService, ESLogger logger) {
this.allocationService = allocationService;
this.routingService = routingService;
this.logger = logger;
@@ -223,23 +228,56 @@ public class ShardStateAction extends AbstractComponent {
@Override
public BatchResult<ShardRoutingEntry> execute(ClusterState currentState, List<ShardRoutingEntry> tasks) throws Exception {
BatchResult.Builder<ShardRoutingEntry> batchResultBuilder = BatchResult.builder();
- List<FailedRerouteAllocation.FailedShard> failedShards = new ArrayList<>(tasks.size());
- for (ShardRoutingEntry task : tasks) {
- failedShards.add(new FailedRerouteAllocation.FailedShard(task.shardRouting, task.message, task.failure));
- }
+
+ // partition tasks into those that correspond to shards
+ // that exist versus do not exist
+ Map<Boolean, List<ShardRoutingEntry>> partition =
+ tasks.stream().collect(Collectors.partitioningBy(task -> shardExists(currentState, task)));
+
+ // tasks that correspond to non-existent shards are marked
+ // as successful
+ batchResultBuilder.successes(partition.get(false));
+
ClusterState maybeUpdatedState = currentState;
+ List<ShardRoutingEntry> tasksToFail = partition.get(true);
try {
- RoutingAllocation.Result result = allocationService.applyFailedShards(currentState, failedShards);
+ List<FailedRerouteAllocation.FailedShard> failedShards =
+ tasksToFail
+ .stream()
+ .map(task -> new FailedRerouteAllocation.FailedShard(task.shardRouting, task.message, task.failure))
+ .collect(Collectors.toList());
+ RoutingAllocation.Result result = applyFailedShards(currentState, failedShards);
if (result.changed()) {
maybeUpdatedState = ClusterState.builder(currentState).routingResult(result).build();
}
- batchResultBuilder.successes(tasks);
+ batchResultBuilder.successes(tasksToFail);
} catch (Throwable t) {
- batchResultBuilder.failures(tasks, t);
+ // failures are communicated back to the requester
+ // cluster state will not be updated in this case
+ batchResultBuilder.failures(tasksToFail, t);
}
+
return batchResultBuilder.build(maybeUpdatedState);
}
+ // visible for testing
+ RoutingAllocation.Result applyFailedShards(ClusterState currentState, List<FailedRerouteAllocation.FailedShard> failedShards) {
+ return allocationService.applyFailedShards(currentState, failedShards);
+ }
+
+ private boolean shardExists(ClusterState currentState, ShardRoutingEntry task) {
+ RoutingNodes.RoutingNodeIterator routingNodeIterator =
+ currentState.getRoutingNodes().routingNodeIter(task.getShardRouting().currentNodeId());
+ if (routingNodeIterator != null) {
+ for (ShardRouting maybe : routingNodeIterator) {
+ if (task.getShardRouting().isSameAllocation(maybe)) {
+ return true;
+ }
+ }
+ }
+ return false;
+ }
+
@Override
public void clusterStatePublished(ClusterState newClusterState) {
int numberOfUnassignedShards = newClusterState.getRoutingNodes().unassigned().size();
@@ -254,7 +292,7 @@ public class ShardStateAction extends AbstractComponent {
}
public void shardStarted(final ShardRouting shardRouting, String indexUUID, final String message, Listener listener) {
- ClusterStateObserver observer = new ClusterStateObserver(clusterService, null, logger);
+ ClusterStateObserver observer = new ClusterStateObserver(clusterService, null, logger, threadPool.getThreadContext());
ShardRoutingEntry shardRoutingEntry = new ShardRoutingEntry(shardRouting, indexUUID, message, null);
sendShardAction(SHARD_STARTED_ACTION_NAME, observer, shardRoutingEntry, listener);
}
diff --git a/core/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java b/core/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java
index 0006c7da8c..d48fc3138d 100644
--- a/core/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java
+++ b/core/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java
@@ -304,29 +304,29 @@ public class ClusterBlocks extends AbstractDiffable<ClusterBlocks> {
public Builder addBlocks(IndexMetaData indexMetaData) {
if (indexMetaData.getState() == IndexMetaData.State.CLOSE) {
- addIndexBlock(indexMetaData.getIndex(), MetaDataIndexStateService.INDEX_CLOSED_BLOCK);
+ addIndexBlock(indexMetaData.getIndex().getName(), MetaDataIndexStateService.INDEX_CLOSED_BLOCK);
}
if (IndexMetaData.INDEX_READ_ONLY_SETTING.get(indexMetaData.getSettings())) {
- addIndexBlock(indexMetaData.getIndex(), IndexMetaData.INDEX_READ_ONLY_BLOCK);
+ addIndexBlock(indexMetaData.getIndex().getName(), IndexMetaData.INDEX_READ_ONLY_BLOCK);
}
if (IndexMetaData.INDEX_BLOCKS_READ_SETTING.get(indexMetaData.getSettings())) {
- addIndexBlock(indexMetaData.getIndex(), IndexMetaData.INDEX_READ_BLOCK);
+ addIndexBlock(indexMetaData.getIndex().getName(), IndexMetaData.INDEX_READ_BLOCK);
}
if (IndexMetaData.INDEX_BLOCKS_WRITE_SETTING.get(indexMetaData.getSettings())) {
- addIndexBlock(indexMetaData.getIndex(), IndexMetaData.INDEX_WRITE_BLOCK);
+ addIndexBlock(indexMetaData.getIndex().getName(), IndexMetaData.INDEX_WRITE_BLOCK);
}
if (IndexMetaData.INDEX_BLOCKS_METADATA_SETTING.get(indexMetaData.getSettings())) {
- addIndexBlock(indexMetaData.getIndex(), IndexMetaData.INDEX_METADATA_BLOCK);
+ addIndexBlock(indexMetaData.getIndex().getName(), IndexMetaData.INDEX_METADATA_BLOCK);
}
return this;
}
public Builder updateBlocks(IndexMetaData indexMetaData) {
- removeIndexBlock(indexMetaData.getIndex(), MetaDataIndexStateService.INDEX_CLOSED_BLOCK);
- removeIndexBlock(indexMetaData.getIndex(), IndexMetaData.INDEX_READ_ONLY_BLOCK);
- removeIndexBlock(indexMetaData.getIndex(), IndexMetaData.INDEX_READ_BLOCK);
- removeIndexBlock(indexMetaData.getIndex(), IndexMetaData.INDEX_WRITE_BLOCK);
- removeIndexBlock(indexMetaData.getIndex(), IndexMetaData.INDEX_METADATA_BLOCK);
+ removeIndexBlock(indexMetaData.getIndex().getName(), MetaDataIndexStateService.INDEX_CLOSED_BLOCK);
+ removeIndexBlock(indexMetaData.getIndex().getName(), IndexMetaData.INDEX_READ_ONLY_BLOCK);
+ removeIndexBlock(indexMetaData.getIndex().getName(), IndexMetaData.INDEX_READ_BLOCK);
+ removeIndexBlock(indexMetaData.getIndex().getName(), IndexMetaData.INDEX_WRITE_BLOCK);
+ removeIndexBlock(indexMetaData.getIndex().getName(), IndexMetaData.INDEX_METADATA_BLOCK);
return addBlocks(indexMetaData);
}
diff --git a/core/src/main/java/org/elasticsearch/cluster/health/ClusterIndexHealth.java b/core/src/main/java/org/elasticsearch/cluster/health/ClusterIndexHealth.java
index 3fd10fd91d..ce1f3adb53 100644
--- a/core/src/main/java/org/elasticsearch/cluster/health/ClusterIndexHealth.java
+++ b/core/src/main/java/org/elasticsearch/cluster/health/ClusterIndexHealth.java
@@ -67,7 +67,7 @@ public final class ClusterIndexHealth implements Iterable<ClusterShardHealth>, S
}
public ClusterIndexHealth(IndexMetaData indexMetaData, IndexRoutingTable indexRoutingTable) {
- this.index = indexMetaData.getIndex();
+ this.index = indexMetaData.getIndex().getName();
this.numberOfShards = indexMetaData.getNumberOfShards();
this.numberOfReplicas = indexMetaData.getNumberOfReplicas();
this.validationFailures = indexRoutingTable.validate(indexMetaData);
diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/AliasOrIndex.java b/core/src/main/java/org/elasticsearch/cluster/metadata/AliasOrIndex.java
index b8de2ea525..4ad9b7e531 100644
--- a/core/src/main/java/org/elasticsearch/cluster/metadata/AliasOrIndex.java
+++ b/core/src/main/java/org/elasticsearch/cluster/metadata/AliasOrIndex.java
@@ -117,7 +117,7 @@ public interface AliasOrIndex {
@Override
public Tuple<String, AliasMetaData> next() {
IndexMetaData indexMetaData = referenceIndexMetaDatas.get(index++);
- return new Tuple<>(indexMetaData.getIndex(), indexMetaData.getAliases().get(aliasName));
+ return new Tuple<>(indexMetaData.getIndex().getName(), indexMetaData.getAliases().get(aliasName));
}
@Override
diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/AliasValidator.java b/core/src/main/java/org/elasticsearch/cluster/metadata/AliasValidator.java
index e5b170b05a..091fde6dec 100644
--- a/core/src/main/java/org/elasticsearch/cluster/metadata/AliasValidator.java
+++ b/core/src/main/java/org/elasticsearch/cluster/metadata/AliasValidator.java
@@ -97,7 +97,7 @@ public class AliasValidator extends AbstractComponent {
assert metaData != null;
if (metaData.hasIndex(alias)) {
- throw new InvalidAliasNameException(new Index(index), alias, "an index exists with the same name as the alias");
+ throw new InvalidAliasNameException(metaData.index(alias).getIndex(), alias, "an index exists with the same name as the alias");
}
}
diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java
index b2772c0b2c..f8822ceb28 100644
--- a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java
+++ b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java
@@ -29,13 +29,11 @@ import org.elasticsearch.cluster.DiffableUtils;
import org.elasticsearch.cluster.block.ClusterBlock;
import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.node.DiscoveryNodeFilters;
-import org.elasticsearch.common.Booleans;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.collect.ImmutableOpenIntMap;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.collect.MapBuilder;
-import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
@@ -47,6 +45,7 @@ import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.index.Index;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.rest.RestStatus;
import org.joda.time.DateTime;
@@ -72,7 +71,7 @@ import static org.elasticsearch.common.settings.Settings.writeSettingsToStream;
/**
*
*/
-public class IndexMetaData implements Diffable<IndexMetaData>, FromXContentBuilder<IndexMetaData>, ToXContent {
+public class IndexMetaData implements Diffable<IndexMetaData>, FromXContentBuilder<IndexMetaData>, ToXContent {
public interface Custom extends Diffable<Custom>, ToXContent {
@@ -150,6 +149,7 @@ public class IndexMetaData implements Diffable<IndexMetaData>, FromXContentBuild
throw new IllegalStateException("No state match for [" + state + "]");
}
}
+
public static final String INDEX_SETTING_PREFIX = "index.";
public static final String SETTING_NUMBER_OF_SHARDS = "index.number_of_shards";
public static final Setting<Integer> INDEX_NUMBER_OF_SHARDS_SETTING = Setting.intSetting(SETTING_NUMBER_OF_SHARDS, 5, 1, false, Setting.Scope.INDEX);
@@ -196,15 +196,15 @@ public class IndexMetaData implements Diffable<IndexMetaData>, FromXContentBuild
public static final Setting<Settings> INDEX_ROUTING_EXCLUDE_GROUP_SETTING = Setting.groupSetting("index.routing.allocation.exclude.", true, Setting.Scope.INDEX);
public static final IndexMetaData PROTO = IndexMetaData.builder("")
- .settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT))
- .numberOfShards(1).numberOfReplicas(0).build();
+ .settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT))
+ .numberOfShards(1).numberOfReplicas(0).build();
public static final String KEY_ACTIVE_ALLOCATIONS = "active_allocations";
private final int numberOfShards;
private final int numberOfReplicas;
- private final String index;
+ private final Index index;
private final long version;
private final State state;
@@ -229,7 +229,7 @@ public class IndexMetaData implements Diffable<IndexMetaData>, FromXContentBuild
private final Version indexUpgradedVersion;
private final org.apache.lucene.util.Version minimumCompatibleLuceneVersion;
- private IndexMetaData(String index, long version, State state, int numberOfShards, int numberOfReplicas, Settings settings,
+ private IndexMetaData(Index index, long version, State state, int numberOfShards, int numberOfReplicas, Settings settings,
ImmutableOpenMap<String, MappingMetaData> mappings, ImmutableOpenMap<String, AliasMetaData> aliases,
ImmutableOpenMap<String, Custom> customs, ImmutableOpenIntMap<Set<String>> activeAllocationIds,
DiscoveryNodeFilters requireFilters, DiscoveryNodeFilters includeFilters, DiscoveryNodeFilters excludeFilters,
@@ -254,12 +254,12 @@ public class IndexMetaData implements Diffable<IndexMetaData>, FromXContentBuild
this.minimumCompatibleLuceneVersion = minimumCompatibleLuceneVersion;
}
- public String getIndex() {
+ public Index getIndex() {
return index;
}
public String getIndexUUID() {
- return settings.get(SETTING_INDEX_UUID, INDEX_UUID_NA_VALUE);
+ return index.getUUID();
}
/**
@@ -302,7 +302,7 @@ public class IndexMetaData implements Diffable<IndexMetaData>, FromXContentBuild
}
public long getCreationDate() {
- return settings.getAsLong(SETTING_CREATION_DATE, -1l);
+ return settings.getAsLong(SETTING_CREATION_DATE, -1L);
}
public State getState() {
@@ -466,7 +466,7 @@ public class IndexMetaData implements Diffable<IndexMetaData>, FromXContentBuild
private final Diff<ImmutableOpenIntMap<Set<String>>> activeAllocationIds;
public IndexMetaDataDiff(IndexMetaData before, IndexMetaData after) {
- index = after.index;
+ index = after.index.getName();
version = after.version;
state = after.state;
settings = after.settings;
@@ -486,16 +486,16 @@ public class IndexMetaData implements Diffable<IndexMetaData>, FromXContentBuild
aliases = DiffableUtils.readImmutableOpenMapDiff(in, DiffableUtils.getStringKeySerializer(), AliasMetaData.PROTO);
customs = DiffableUtils.readImmutableOpenMapDiff(in, DiffableUtils.getStringKeySerializer(),
new DiffableUtils.DiffableValueSerializer<String, Custom>() {
- @Override
- public Custom read(StreamInput in, String key) throws IOException {
- return lookupPrototypeSafe(key).readFrom(in);
- }
+ @Override
+ public Custom read(StreamInput in, String key) throws IOException {
+ return lookupPrototypeSafe(key).readFrom(in);
+ }
- @Override
- public Diff<Custom> readDiff(StreamInput in, String key) throws IOException {
- return lookupPrototypeSafe(key).readDiffFrom(in);
- }
- });
+ @Override
+ public Diff<Custom> readDiff(StreamInput in, String key) throws IOException {
+ return lookupPrototypeSafe(key).readDiffFrom(in);
+ }
+ });
activeAllocationIds = DiffableUtils.readImmutableOpenIntMapDiff(in, DiffableUtils.getVIntKeySerializer(),
DiffableUtils.StringSetValueSerializer.getInstance());
}
@@ -559,7 +559,7 @@ public class IndexMetaData implements Diffable<IndexMetaData>, FromXContentBuild
@Override
public void writeTo(StreamOutput out) throws IOException {
- out.writeString(index);
+ out.writeString(index.getName()); // uuid will come as part of settings
out.writeLong(version);
out.writeByte(state.id());
writeSettingsToStream(settings, out);
@@ -611,7 +611,7 @@ public class IndexMetaData implements Diffable<IndexMetaData>, FromXContentBuild
}
public Builder(IndexMetaData indexMetaData) {
- this.index = indexMetaData.getIndex();
+ this.index = indexMetaData.getIndex().getName();
this.state = indexMetaData.state;
this.version = indexMetaData.version;
this.settings = indexMetaData.getSettings();
@@ -791,19 +791,20 @@ public class IndexMetaData implements Diffable<IndexMetaData>, FromXContentBuild
try {
minimumCompatibleLuceneVersion = org.apache.lucene.util.Version.parse(stringLuceneVersion);
} catch (ParseException ex) {
- throw new IllegalStateException("Cannot parse lucene version [" + stringLuceneVersion + "] in the [" + SETTING_VERSION_MINIMUM_COMPATIBLE +"] setting", ex);
+ throw new IllegalStateException("Cannot parse lucene version [" + stringLuceneVersion + "] in the [" + SETTING_VERSION_MINIMUM_COMPATIBLE + "] setting", ex);
}
} else {
minimumCompatibleLuceneVersion = null;
}
- return new IndexMetaData(index, version, state, numberOfShards, numberOfReplicas, tmpSettings, mappings.build(),
+ final String uuid = settings.get(SETTING_INDEX_UUID, INDEX_UUID_NA_VALUE);
+ return new IndexMetaData(new Index(index, uuid), version, state, numberOfShards, numberOfReplicas, tmpSettings, mappings.build(),
tmpAliases.build(), customs.build(), filledActiveAllocationIds.build(), requireFilters, includeFilters, excludeFilters,
indexCreatedVersion, indexUpgradedVersion, minimumCompatibleLuceneVersion);
}
public static void toXContent(IndexMetaData indexMetaData, XContentBuilder builder, ToXContent.Params params) throws IOException {
- builder.startObject(indexMetaData.getIndex(), XContentBuilder.FieldCaseConversion.NONE);
+ builder.startObject(indexMetaData.getIndex().getName(), XContentBuilder.FieldCaseConversion.NONE);
builder.field("version", indexMetaData.getVersion());
builder.field("state", indexMetaData.getState().toString().toLowerCase(Locale.ENGLISH));
diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java
index d2f3a47b75..0661f6c436 100644
--- a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java
+++ b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java
@@ -159,7 +159,7 @@ public class IndexNameExpressionResolver extends AbstractComponent {
String[] indexNames = new String[resolvedIndices.size()];
int i = 0;
for (IndexMetaData indexMetaData : resolvedIndices) {
- indexNames[i++] = indexMetaData.getIndex();
+ indexNames[i++] = indexMetaData.getIndex().getName();
}
throw new IllegalArgumentException("Alias [" + expression + "] has more than one indices associated with it [" + Arrays.toString(indexNames) + "], can't execute a single index op");
}
@@ -167,14 +167,14 @@ public class IndexNameExpressionResolver extends AbstractComponent {
for (IndexMetaData index : resolvedIndices) {
if (index.getState() == IndexMetaData.State.CLOSE) {
if (failClosed) {
- throw new IndexClosedException(new Index(index.getIndex()));
+ throw new IndexClosedException(index.getIndex());
} else {
if (options.forbidClosedIndices() == false) {
- concreteIndices.add(index.getIndex());
+ concreteIndices.add(index.getIndex().getName());
}
}
} else if (index.getState() == IndexMetaData.State.OPEN) {
- concreteIndices.add(index.getIndex());
+ concreteIndices.add(index.getIndex().getName());
} else {
throw new IllegalStateException("index state [" + index.getState() + "] not supported");
}
@@ -640,7 +640,7 @@ public class IndexNameExpressionResolver extends AbstractComponent {
} else {
for (IndexMetaData meta : aliasOrIndex.getIndices()) {
if (excludeState == null || meta.getState() != excludeState) {
- expand.add(meta.getIndex());
+ expand.add(meta.getIndex().getName());
}
}
}
diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java
index a26e95c40e..a88f1609b9 100644
--- a/core/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java
+++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java
@@ -41,7 +41,7 @@ import java.io.IOException;
import java.util.Arrays;
import java.util.Map;
-import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue;
+import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue;
/**
* Mapping configuration for a type.
@@ -237,7 +237,7 @@ public class MappingMetaData extends AbstractDiffable<MappingMetaData> {
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals("required")) {
- required = nodeBooleanValue(fieldNode);
+ required = lenientNodeBooleanValue(fieldNode);
}
}
this.routing = new Routing(required);
@@ -254,13 +254,13 @@ public class MappingMetaData extends AbstractDiffable<MappingMetaData> {
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals("enabled")) {
- enabled = nodeBooleanValue(fieldNode);
+ enabled = lenientNodeBooleanValue(fieldNode);
} else if (fieldName.equals("format")) {
format = fieldNode.toString();
} else if (fieldName.equals("default") && fieldNode != null) {
defaultTimestamp = fieldNode.toString();
} else if (fieldName.equals("ignore_missing")) {
- ignoreMissing = nodeBooleanValue(fieldNode);
+ ignoreMissing = lenientNodeBooleanValue(fieldNode);
}
}
this.timestamp = new Timestamp(enabled, format, defaultTimestamp, ignoreMissing);
diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java
index 002d1a5110..9fb526b740 100644
--- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java
+++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java
@@ -50,10 +50,12 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.discovery.DiscoverySettings;
+import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexNotFoundException;
import org.elasticsearch.index.store.IndexStoreConfig;
import org.elasticsearch.indices.recovery.RecoverySettings;
import org.elasticsearch.indices.ttl.IndicesTTLService;
+import org.elasticsearch.ingest.IngestMetadata;
import org.elasticsearch.rest.RestStatus;
import java.io.IOException;
@@ -111,6 +113,7 @@ public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData>, Fr
static {
// register non plugin custom metadata
registerPrototype(RepositoriesMetaData.TYPE, RepositoriesMetaData.PROTO);
+ registerPrototype(IngestMetadata.TYPE, IngestMetadata.PROTO);
}
/**
@@ -227,7 +230,7 @@ public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData>, Fr
public boolean equalsAliases(MetaData other) {
for (ObjectCursor<IndexMetaData> cursor : other.indices().values()) {
IndexMetaData otherIndex = cursor.value;
- IndexMetaData thisIndex= indices().get(otherIndex.getIndex());
+ IndexMetaData thisIndex= index(otherIndex.getIndex());
if (thisIndex == null) {
return false;
}
@@ -414,7 +417,7 @@ public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData>, Fr
String[] indexNames = new String[result.getIndices().size()];
int i = 0;
for (IndexMetaData indexMetaData : result.getIndices()) {
- indexNames[i++] = indexMetaData.getIndex();
+ indexNames[i++] = indexMetaData.getIndex().getName();
}
throw new IllegalArgumentException("Alias [" + aliasOrIndex + "] has more than one index associated with it [" + Arrays.toString(indexNames) + "], can't execute a single index op");
}
@@ -449,6 +452,10 @@ public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData>, Fr
return indices.get(index);
}
+ public IndexMetaData index(Index index) {
+ return index(index.getName());
+ }
+
public ImmutableOpenMap<String, IndexMetaData> indices() {
return this.indices;
}
@@ -813,19 +820,19 @@ public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData>, Fr
// we know its a new one, increment the version and store
indexMetaDataBuilder.version(indexMetaDataBuilder.version() + 1);
IndexMetaData indexMetaData = indexMetaDataBuilder.build();
- indices.put(indexMetaData.getIndex(), indexMetaData);
+ indices.put(indexMetaData.getIndex().getName(), indexMetaData);
return this;
}
public Builder put(IndexMetaData indexMetaData, boolean incrementVersion) {
- if (indices.get(indexMetaData.getIndex()) == indexMetaData) {
+ if (indices.get(indexMetaData.getIndex().getName()) == indexMetaData) {
return this;
}
// if we put a new index metadata, increment its version
if (incrementVersion) {
indexMetaData = IndexMetaData.builder(indexMetaData).version(indexMetaData.getVersion() + 1).build();
}
- indices.put(indexMetaData.getIndex(), indexMetaData);
+ indices.put(indexMetaData.getIndex().getName(), indexMetaData);
return this;
}
@@ -962,7 +969,7 @@ public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData>, Fr
// do the required operations, the bottleneck isn't resolving expressions into concrete indices.
List<String> allIndicesLst = new ArrayList<>();
for (ObjectCursor<IndexMetaData> cursor : indices.values()) {
- allIndicesLst.add(cursor.value.getIndex());
+ allIndicesLst.add(cursor.value.getIndex().getName());
}
String[] allIndices = allIndicesLst.toArray(new String[allIndicesLst.size()]);
@@ -971,9 +978,9 @@ public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData>, Fr
for (ObjectCursor<IndexMetaData> cursor : indices.values()) {
IndexMetaData indexMetaData = cursor.value;
if (indexMetaData.getState() == IndexMetaData.State.OPEN) {
- allOpenIndicesLst.add(indexMetaData.getIndex());
+ allOpenIndicesLst.add(indexMetaData.getIndex().getName());
} else if (indexMetaData.getState() == IndexMetaData.State.CLOSE) {
- allClosedIndicesLst.add(indexMetaData.getIndex());
+ allClosedIndicesLst.add(indexMetaData.getIndex().getName());
}
}
String[] allOpenIndices = allOpenIndicesLst.toArray(new String[allOpenIndicesLst.size()]);
@@ -983,7 +990,7 @@ public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData>, Fr
SortedMap<String, AliasOrIndex> aliasAndIndexLookup = new TreeMap<>();
for (ObjectCursor<IndexMetaData> cursor : indices.values()) {
IndexMetaData indexMetaData = cursor.value;
- aliasAndIndexLookup.put(indexMetaData.getIndex(), new AliasOrIndex.Index(indexMetaData));
+ aliasAndIndexLookup.put(indexMetaData.getIndex().getName(), new AliasOrIndex.Index(indexMetaData));
for (ObjectObjectCursor<String, AliasMetaData> aliasCursor : indexMetaData.getAliases()) {
AliasMetaData aliasMetaData = aliasCursor.value;
@@ -996,7 +1003,7 @@ public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData>, Fr
alias.addIndex(indexMetaData);
} else if (aliasOrIndex instanceof AliasOrIndex.Index) {
AliasOrIndex.Index index = (AliasOrIndex.Index) aliasOrIndex;
- throw new IllegalStateException("index and alias names need to be unique, but alias [" + aliasMetaData.getAlias() + "] and index [" + index.getIndex().getIndex() + "] have the same name");
+ throw new IllegalStateException("index and alias names need to be unique, but alias [" + aliasMetaData.getAlias() + "] and index " + index.getIndex().getIndex() + " have the same name");
} else {
throw new IllegalStateException("unexpected alias [" + aliasMetaData.getAlias() + "][" + aliasOrIndex + "]");
}
diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java
index 2344e9af77..fbf3446b2d 100644
--- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java
+++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java
@@ -137,22 +137,22 @@ public class MetaDataCreateIndexService extends AbstractComponent {
public void validateIndexName(String index, ClusterState state) {
if (state.routingTable().hasIndex(index)) {
- throw new IndexAlreadyExistsException(new Index(index));
+ throw new IndexAlreadyExistsException(state.routingTable().index(index).getIndex());
}
if (state.metaData().hasIndex(index)) {
- throw new IndexAlreadyExistsException(new Index(index));
+ throw new IndexAlreadyExistsException(state.metaData().index(index).getIndex());
}
if (!Strings.validFileName(index)) {
- throw new InvalidIndexNameException(new Index(index), index, "must not contain the following characters " + Strings.INVALID_FILENAME_CHARS);
+ throw new InvalidIndexNameException(index, "must not contain the following characters " + Strings.INVALID_FILENAME_CHARS);
}
if (index.contains("#")) {
- throw new InvalidIndexNameException(new Index(index), index, "must not contain '#'");
+ throw new InvalidIndexNameException(index, "must not contain '#'");
}
if (index.charAt(0) == '_') {
- throw new InvalidIndexNameException(new Index(index), index, "must not start with '_'");
+ throw new InvalidIndexNameException(index, "must not start with '_'");
}
if (!index.toLowerCase(Locale.ROOT).equals(index)) {
- throw new InvalidIndexNameException(new Index(index), index, "must be lowercase");
+ throw new InvalidIndexNameException(index, "must be lowercase");
}
int byteCount = 0;
try {
@@ -162,15 +162,15 @@ public class MetaDataCreateIndexService extends AbstractComponent {
throw new ElasticsearchException("Unable to determine length of index name", e);
}
if (byteCount > MAX_INDEX_NAME_BYTES) {
- throw new InvalidIndexNameException(new Index(index), index,
+ throw new InvalidIndexNameException(index,
"index name is too long, (" + byteCount +
- " > " + MAX_INDEX_NAME_BYTES + ")");
+ " > " + MAX_INDEX_NAME_BYTES + ")");
}
if (state.metaData().hasAlias(index)) {
- throw new InvalidIndexNameException(new Index(index), index, "already exists as alias");
+ throw new InvalidIndexNameException(index, "already exists as alias");
}
if (index.equals(".") || index.equals("..")) {
- throw new InvalidIndexNameException(new Index(index), index, "must not be '.' or '..'");
+ throw new InvalidIndexNameException(index, "must not be '.' or '..'");
}
}
@@ -187,242 +187,242 @@ public class MetaDataCreateIndexService extends AbstractComponent {
return new ClusterStateUpdateResponse(acknowledged);
}
- @Override
- public ClusterState execute(ClusterState currentState) throws Exception {
- boolean indexCreated = false;
- String removalReason = null;
- try {
- validate(request, currentState);
-
- for (Alias alias : request.aliases()) {
- aliasValidator.validateAlias(alias, request.index(), currentState.metaData());
- }
+ @Override
+ public ClusterState execute(ClusterState currentState) throws Exception {
+ boolean indexCreated = false;
+ String removalReason = null;
+ try {
+ validate(request, currentState);
- // we only find a template when its an API call (a new index)
- // find templates, highest order are better matching
- List<IndexTemplateMetaData> templates = findTemplates(request, currentState, indexTemplateFilter);
+ for (Alias alias : request.aliases()) {
+ aliasValidator.validateAlias(alias, request.index(), currentState.metaData());
+ }
- Map<String, Custom> customs = new HashMap<>();
+ // we only find a template when its an API call (a new index)
+ // find templates, highest order are better matching
+ List<IndexTemplateMetaData> templates = findTemplates(request, currentState, indexTemplateFilter);
- // add the request mapping
- Map<String, Map<String, Object>> mappings = new HashMap<>();
+ Map<String, Custom> customs = new HashMap<>();
- Map<String, AliasMetaData> templatesAliases = new HashMap<>();
+ // add the request mapping
+ Map<String, Map<String, Object>> mappings = new HashMap<>();
- List<String> templateNames = new ArrayList<>();
+ Map<String, AliasMetaData> templatesAliases = new HashMap<>();
- for (Map.Entry<String, String> entry : request.mappings().entrySet()) {
- mappings.put(entry.getKey(), parseMapping(entry.getValue()));
- }
+ List<String> templateNames = new ArrayList<>();
- for (Map.Entry<String, Custom> entry : request.customs().entrySet()) {
- customs.put(entry.getKey(), entry.getValue());
- }
+ for (Map.Entry<String, String> entry : request.mappings().entrySet()) {
+ mappings.put(entry.getKey(), parseMapping(entry.getValue()));
+ }
- // apply templates, merging the mappings into the request mapping if exists
- for (IndexTemplateMetaData template : templates) {
- templateNames.add(template.getName());
- for (ObjectObjectCursor<String, CompressedXContent> cursor : template.mappings()) {
- if (mappings.containsKey(cursor.key)) {
- XContentHelper.mergeDefaults(mappings.get(cursor.key), parseMapping(cursor.value.string()));
- } else {
- mappings.put(cursor.key, parseMapping(cursor.value.string()));
+ for (Map.Entry<String, Custom> entry : request.customs().entrySet()) {
+ customs.put(entry.getKey(), entry.getValue());
}
- }
- // handle custom
- for (ObjectObjectCursor<String, Custom> cursor : template.customs()) {
- String type = cursor.key;
- IndexMetaData.Custom custom = cursor.value;
- IndexMetaData.Custom existing = customs.get(type);
- if (existing == null) {
- customs.put(type, custom);
- } else {
- IndexMetaData.Custom merged = existing.mergeWith(custom);
- customs.put(type, merged);
+
+ // apply templates, merging the mappings into the request mapping if exists
+ for (IndexTemplateMetaData template : templates) {
+ templateNames.add(template.getName());
+ for (ObjectObjectCursor<String, CompressedXContent> cursor : template.mappings()) {
+ if (mappings.containsKey(cursor.key)) {
+ XContentHelper.mergeDefaults(mappings.get(cursor.key), parseMapping(cursor.value.string()));
+ } else {
+ mappings.put(cursor.key, parseMapping(cursor.value.string()));
+ }
+ }
+ // handle custom
+ for (ObjectObjectCursor<String, Custom> cursor : template.customs()) {
+ String type = cursor.key;
+ IndexMetaData.Custom custom = cursor.value;
+ IndexMetaData.Custom existing = customs.get(type);
+ if (existing == null) {
+ customs.put(type, custom);
+ } else {
+ IndexMetaData.Custom merged = existing.mergeWith(custom);
+ customs.put(type, merged);
+ }
+ }
+ //handle aliases
+ for (ObjectObjectCursor<String, AliasMetaData> cursor : template.aliases()) {
+ AliasMetaData aliasMetaData = cursor.value;
+ //if an alias with same name came with the create index request itself,
+ // ignore this one taken from the index template
+ if (request.aliases().contains(new Alias(aliasMetaData.alias()))) {
+ continue;
+ }
+ //if an alias with same name was already processed, ignore this one
+ if (templatesAliases.containsKey(cursor.key)) {
+ continue;
+ }
+
+ //Allow templatesAliases to be templated by replacing a token with the name of the index that we are applying it to
+ if (aliasMetaData.alias().contains("{index}")) {
+ String templatedAlias = aliasMetaData.alias().replace("{index}", request.index());
+ aliasMetaData = AliasMetaData.newAliasMetaData(aliasMetaData, templatedAlias);
+ }
+
+ aliasValidator.validateAliasMetaData(aliasMetaData, request.index(), currentState.metaData());
+ templatesAliases.put(aliasMetaData.alias(), aliasMetaData);
+ }
}
- }
- //handle aliases
- for (ObjectObjectCursor<String, AliasMetaData> cursor : template.aliases()) {
- AliasMetaData aliasMetaData = cursor.value;
- //if an alias with same name came with the create index request itself,
- // ignore this one taken from the index template
- if (request.aliases().contains(new Alias(aliasMetaData.alias()))) {
- continue;
+
+ Settings.Builder indexSettingsBuilder = settingsBuilder();
+ // apply templates, here, in reverse order, since first ones are better matching
+ for (int i = templates.size() - 1; i >= 0; i--) {
+ indexSettingsBuilder.put(templates.get(i).settings());
}
- //if an alias with same name was already processed, ignore this one
- if (templatesAliases.containsKey(cursor.key)) {
- continue;
+ // now, put the request settings, so they override templates
+ indexSettingsBuilder.put(request.settings());
+ if (request.index().equals(ScriptService.SCRIPT_INDEX)) {
+ indexSettingsBuilder.put(SETTING_NUMBER_OF_SHARDS, settings.getAsInt(SETTING_NUMBER_OF_SHARDS, 1));
+ } else {
+ if (indexSettingsBuilder.get(SETTING_NUMBER_OF_SHARDS) == null) {
+ indexSettingsBuilder.put(SETTING_NUMBER_OF_SHARDS, settings.getAsInt(SETTING_NUMBER_OF_SHARDS, 5));
+ }
}
-
- //Allow templatesAliases to be templated by replacing a token with the name of the index that we are applying it to
- if (aliasMetaData.alias().contains("{index}")) {
- String templatedAlias = aliasMetaData.alias().replace("{index}", request.index());
- aliasMetaData = AliasMetaData.newAliasMetaData(aliasMetaData, templatedAlias);
+ if (request.index().equals(ScriptService.SCRIPT_INDEX)) {
+ indexSettingsBuilder.put(SETTING_NUMBER_OF_REPLICAS, settings.getAsInt(SETTING_NUMBER_OF_REPLICAS, 0));
+ indexSettingsBuilder.put(SETTING_AUTO_EXPAND_REPLICAS, "0-all");
+ } else {
+ if (indexSettingsBuilder.get(SETTING_NUMBER_OF_REPLICAS) == null) {
+ indexSettingsBuilder.put(SETTING_NUMBER_OF_REPLICAS, settings.getAsInt(SETTING_NUMBER_OF_REPLICAS, 1));
+ }
}
- aliasValidator.validateAliasMetaData(aliasMetaData, request.index(), currentState.metaData());
- templatesAliases.put(aliasMetaData.alias(), aliasMetaData);
- }
- }
-
- Settings.Builder indexSettingsBuilder = settingsBuilder();
- // apply templates, here, in reverse order, since first ones are better matching
- for (int i = templates.size() - 1; i >= 0; i--) {
- indexSettingsBuilder.put(templates.get(i).settings());
- }
- // now, put the request settings, so they override templates
- indexSettingsBuilder.put(request.settings());
- if (request.index().equals(ScriptService.SCRIPT_INDEX)) {
- indexSettingsBuilder.put(SETTING_NUMBER_OF_SHARDS, settings.getAsInt(SETTING_NUMBER_OF_SHARDS, 1));
- } else {
- if (indexSettingsBuilder.get(SETTING_NUMBER_OF_SHARDS) == null) {
- indexSettingsBuilder.put(SETTING_NUMBER_OF_SHARDS, settings.getAsInt(SETTING_NUMBER_OF_SHARDS, 5));
- }
- }
- if (request.index().equals(ScriptService.SCRIPT_INDEX)) {
- indexSettingsBuilder.put(SETTING_NUMBER_OF_REPLICAS, settings.getAsInt(SETTING_NUMBER_OF_REPLICAS, 0));
- indexSettingsBuilder.put(SETTING_AUTO_EXPAND_REPLICAS, "0-all");
- } else {
- if (indexSettingsBuilder.get(SETTING_NUMBER_OF_REPLICAS) == null) {
- indexSettingsBuilder.put(SETTING_NUMBER_OF_REPLICAS, settings.getAsInt(SETTING_NUMBER_OF_REPLICAS, 1));
- }
- }
-
- if (settings.get(SETTING_AUTO_EXPAND_REPLICAS) != null && indexSettingsBuilder.get(SETTING_AUTO_EXPAND_REPLICAS) == null) {
- indexSettingsBuilder.put(SETTING_AUTO_EXPAND_REPLICAS, settings.get(SETTING_AUTO_EXPAND_REPLICAS));
- }
+ if (settings.get(SETTING_AUTO_EXPAND_REPLICAS) != null && indexSettingsBuilder.get(SETTING_AUTO_EXPAND_REPLICAS) == null) {
+ indexSettingsBuilder.put(SETTING_AUTO_EXPAND_REPLICAS, settings.get(SETTING_AUTO_EXPAND_REPLICAS));
+ }
- if (indexSettingsBuilder.get(SETTING_VERSION_CREATED) == null) {
- DiscoveryNodes nodes = currentState.nodes();
- final Version createdVersion = Version.smallest(version, nodes.smallestNonClientNodeVersion());
- indexSettingsBuilder.put(SETTING_VERSION_CREATED, createdVersion);
- }
+ if (indexSettingsBuilder.get(SETTING_VERSION_CREATED) == null) {
+ DiscoveryNodes nodes = currentState.nodes();
+ final Version createdVersion = Version.smallest(version, nodes.smallestNonClientNodeVersion());
+ indexSettingsBuilder.put(SETTING_VERSION_CREATED, createdVersion);
+ }
- if (indexSettingsBuilder.get(SETTING_CREATION_DATE) == null) {
- indexSettingsBuilder.put(SETTING_CREATION_DATE, new DateTime(DateTimeZone.UTC).getMillis());
- }
+ if (indexSettingsBuilder.get(SETTING_CREATION_DATE) == null) {
+ indexSettingsBuilder.put(SETTING_CREATION_DATE, new DateTime(DateTimeZone.UTC).getMillis());
+ }
- indexSettingsBuilder.put(SETTING_INDEX_UUID, Strings.randomBase64UUID());
+ indexSettingsBuilder.put(SETTING_INDEX_UUID, Strings.randomBase64UUID());
+
+ Settings actualIndexSettings = indexSettingsBuilder.build();
+
+ // Set up everything, now locally create the index to see that things are ok, and apply
+ final IndexMetaData tmpImd = IndexMetaData.builder(request.index()).settings(actualIndexSettings).build();
+ // create the index here (on the master) to validate it can be created, as well as adding the mapping
+ indicesService.createIndex(nodeServicesProvider, tmpImd, Collections.emptyList());
+ indexCreated = true;
+ // now add the mappings
+ IndexService indexService = indicesService.indexServiceSafe(request.index());
+ MapperService mapperService = indexService.mapperService();
+ // first, add the default mapping
+ if (mappings.containsKey(MapperService.DEFAULT_MAPPING)) {
+ try {
+ mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedXContent(XContentFactory.jsonBuilder().map(mappings.get(MapperService.DEFAULT_MAPPING)).string()), MapperService.MergeReason.MAPPING_UPDATE, request.updateAllTypes());
+ } catch (Exception e) {
+ removalReason = "failed on parsing default mapping on index creation";
+ throw new MapperParsingException("Failed to parse mapping [{}]: {}", e, MapperService.DEFAULT_MAPPING, e.getMessage());
+ }
+ }
+ for (Map.Entry<String, Map<String, Object>> entry : mappings.entrySet()) {
+ if (entry.getKey().equals(MapperService.DEFAULT_MAPPING)) {
+ continue;
+ }
+ try {
+ // apply the default here, its the first time we parse it
+ mapperService.merge(entry.getKey(), new CompressedXContent(XContentFactory.jsonBuilder().map(entry.getValue()).string()), MapperService.MergeReason.MAPPING_UPDATE, request.updateAllTypes());
+ } catch (Exception e) {
+ removalReason = "failed on parsing mappings on index creation";
+ throw new MapperParsingException("Failed to parse mapping [{}]: {}", e, entry.getKey(), e.getMessage());
+ }
+ }
- Settings actualIndexSettings = indexSettingsBuilder.build();
+ QueryShardContext queryShardContext = indexService.getQueryShardContext();
+ for (Alias alias : request.aliases()) {
+ if (Strings.hasLength(alias.filter())) {
+ aliasValidator.validateAliasFilter(alias.name(), alias.filter(), queryShardContext);
+ }
+ }
+ for (AliasMetaData aliasMetaData : templatesAliases.values()) {
+ if (aliasMetaData.filter() != null) {
+ aliasValidator.validateAliasFilter(aliasMetaData.alias(), aliasMetaData.filter().uncompressed(), queryShardContext);
+ }
+ }
- // Set up everything, now locally create the index to see that things are ok, and apply
- final IndexMetaData tmpImd = IndexMetaData.builder(request.index()).settings(actualIndexSettings).build();
- // create the index here (on the master) to validate it can be created, as well as adding the mapping
- indicesService.createIndex(nodeServicesProvider, tmpImd, Collections.emptyList());
- indexCreated = true;
- // now add the mappings
- IndexService indexService = indicesService.indexServiceSafe(request.index());
- MapperService mapperService = indexService.mapperService();
- // first, add the default mapping
- if (mappings.containsKey(MapperService.DEFAULT_MAPPING)) {
- try {
- mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedXContent(XContentFactory.jsonBuilder().map(mappings.get(MapperService.DEFAULT_MAPPING)).string()), MapperService.MergeReason.MAPPING_UPDATE, request.updateAllTypes());
- } catch (Exception e) {
- removalReason = "failed on parsing default mapping on index creation";
- throw new MapperParsingException("Failed to parse mapping [{}]: {}", e, MapperService.DEFAULT_MAPPING, e.getMessage());
- }
- }
- for (Map.Entry<String, Map<String, Object>> entry : mappings.entrySet()) {
- if (entry.getKey().equals(MapperService.DEFAULT_MAPPING)) {
- continue;
- }
- try {
- // apply the default here, its the first time we parse it
- mapperService.merge(entry.getKey(), new CompressedXContent(XContentFactory.jsonBuilder().map(entry.getValue()).string()), MapperService.MergeReason.MAPPING_UPDATE, request.updateAllTypes());
- } catch (Exception e) {
- removalReason = "failed on parsing mappings on index creation";
- throw new MapperParsingException("Failed to parse mapping [{}]: {}", e, entry.getKey(), e.getMessage());
- }
- }
+ // now, update the mappings with the actual source
+ Map<String, MappingMetaData> mappingsMetaData = new HashMap<>();
+ for (DocumentMapper mapper : mapperService.docMappers(true)) {
+ MappingMetaData mappingMd = new MappingMetaData(mapper);
+ mappingsMetaData.put(mapper.type(), mappingMd);
+ }
- QueryShardContext queryShardContext = indexService.getQueryShardContext();
- for (Alias alias : request.aliases()) {
- if (Strings.hasLength(alias.filter())) {
- aliasValidator.validateAliasFilter(alias.name(), alias.filter(), queryShardContext);
- }
- }
- for (AliasMetaData aliasMetaData : templatesAliases.values()) {
- if (aliasMetaData.filter() != null) {
- aliasValidator.validateAliasFilter(aliasMetaData.alias(), aliasMetaData.filter().uncompressed(), queryShardContext);
- }
- }
+ final IndexMetaData.Builder indexMetaDataBuilder = IndexMetaData.builder(request.index()).settings(actualIndexSettings);
+ for (MappingMetaData mappingMd : mappingsMetaData.values()) {
+ indexMetaDataBuilder.putMapping(mappingMd);
+ }
- // now, update the mappings with the actual source
- Map<String, MappingMetaData> mappingsMetaData = new HashMap<>();
- for (DocumentMapper mapper : mapperService.docMappers(true)) {
- MappingMetaData mappingMd = new MappingMetaData(mapper);
- mappingsMetaData.put(mapper.type(), mappingMd);
- }
+ for (AliasMetaData aliasMetaData : templatesAliases.values()) {
+ indexMetaDataBuilder.putAlias(aliasMetaData);
+ }
+ for (Alias alias : request.aliases()) {
+ AliasMetaData aliasMetaData = AliasMetaData.builder(alias.name()).filter(alias.filter())
+ .indexRouting(alias.indexRouting()).searchRouting(alias.searchRouting()).build();
+ indexMetaDataBuilder.putAlias(aliasMetaData);
+ }
- final IndexMetaData.Builder indexMetaDataBuilder = IndexMetaData.builder(request.index()).settings(actualIndexSettings);
- for (MappingMetaData mappingMd : mappingsMetaData.values()) {
- indexMetaDataBuilder.putMapping(mappingMd);
- }
+ for (Map.Entry<String, Custom> customEntry : customs.entrySet()) {
+ indexMetaDataBuilder.putCustom(customEntry.getKey(), customEntry.getValue());
+ }
- for (AliasMetaData aliasMetaData : templatesAliases.values()) {
- indexMetaDataBuilder.putAlias(aliasMetaData);
- }
- for (Alias alias : request.aliases()) {
- AliasMetaData aliasMetaData = AliasMetaData.builder(alias.name()).filter(alias.filter())
- .indexRouting(alias.indexRouting()).searchRouting(alias.searchRouting()).build();
- indexMetaDataBuilder.putAlias(aliasMetaData);
- }
+ indexMetaDataBuilder.state(request.state());
- for (Map.Entry<String, Custom> customEntry : customs.entrySet()) {
- indexMetaDataBuilder.putCustom(customEntry.getKey(), customEntry.getValue());
- }
+ final IndexMetaData indexMetaData;
+ try {
+ indexMetaData = indexMetaDataBuilder.build();
+ } catch (Exception e) {
+ removalReason = "failed to build index metadata";
+ throw e;
+ }
- indexMetaDataBuilder.state(request.state());
+ indexService.getIndexEventListener().beforeIndexAddedToCluster(indexMetaData.getIndex(),
+ indexMetaData.getSettings());
- final IndexMetaData indexMetaData;
- try {
- indexMetaData = indexMetaDataBuilder.build();
- } catch (Exception e) {
- removalReason = "failed to build index metadata";
- throw e;
- }
+ MetaData newMetaData = MetaData.builder(currentState.metaData())
+ .put(indexMetaData, false)
+ .build();
- indexService.getIndexEventListener().beforeIndexAddedToCluster(new Index(request.index()),
- indexMetaData.getSettings());
+ String maybeShadowIndicator = IndexMetaData.isIndexUsingShadowReplicas(indexMetaData.getSettings()) ? "s" : "";
+ logger.info("[{}] creating index, cause [{}], templates {}, shards [{}]/[{}{}], mappings {}",
+ request.index(), request.cause(), templateNames, indexMetaData.getNumberOfShards(),
+ indexMetaData.getNumberOfReplicas(), maybeShadowIndicator, mappings.keySet());
- MetaData newMetaData = MetaData.builder(currentState.metaData())
- .put(indexMetaData, false)
- .build();
+ ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks());
+ if (!request.blocks().isEmpty()) {
+ for (ClusterBlock block : request.blocks()) {
+ blocks.addIndexBlock(request.index(), block);
+ }
+ }
+ blocks.updateBlocks(indexMetaData);
- String maybeShadowIndicator = IndexMetaData.isIndexUsingShadowReplicas(indexMetaData.getSettings()) ? "s" : "";
- logger.info("[{}] creating index, cause [{}], templates {}, shards [{}]/[{}{}], mappings {}",
- request.index(), request.cause(), templateNames, indexMetaData.getNumberOfShards(),
- indexMetaData.getNumberOfReplicas(), maybeShadowIndicator, mappings.keySet());
+ ClusterState updatedState = ClusterState.builder(currentState).blocks(blocks).metaData(newMetaData).build();
- ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks());
- if (!request.blocks().isEmpty()) {
- for (ClusterBlock block : request.blocks()) {
- blocks.addIndexBlock(request.index(), block);
+ if (request.state() == State.OPEN) {
+ RoutingTable.Builder routingTableBuilder = RoutingTable.builder(updatedState.routingTable())
+ .addAsNew(updatedState.metaData().index(request.index()));
+ RoutingAllocation.Result routingResult = allocationService.reroute(
+ ClusterState.builder(updatedState).routingTable(routingTableBuilder.build()).build(),
+ "index [" + request.index() + "] created");
+ updatedState = ClusterState.builder(updatedState).routingResult(routingResult).build();
+ }
+ removalReason = "cleaning up after validating index on master";
+ return updatedState;
+ } finally {
+ if (indexCreated) {
+ // Index was already partially created - need to clean up
+ indicesService.removeIndex(request.index(), removalReason != null ? removalReason : "failed to create index");
+ }
}
}
- blocks.updateBlocks(indexMetaData);
-
- ClusterState updatedState = ClusterState.builder(currentState).blocks(blocks).metaData(newMetaData).build();
-
- if (request.state() == State.OPEN) {
- RoutingTable.Builder routingTableBuilder = RoutingTable.builder(updatedState.routingTable())
- .addAsNew(updatedState.metaData().index(request.index()));
- RoutingAllocation.Result routingResult = allocationService.reroute(
- ClusterState.builder(updatedState).routingTable(routingTableBuilder.build()).build(),
- "index [" + request.index() + "] created");
- updatedState = ClusterState.builder(updatedState).routingResult(routingResult).build();
- }
- removalReason = "cleaning up after validating index on master";
- return updatedState;
- } finally {
- if (indexCreated) {
- // Index was already partially created - need to clean up
- indicesService.removeIndex(request.index(), removalReason != null ? removalReason : "failed to create index");
- }
- }
- }
- });
+ });
}
private Map<String, Object> parseMapping(String mappingSource) throws Exception {
@@ -459,7 +459,7 @@ public class MetaDataCreateIndexService extends AbstractComponent {
if (validationErrors.isEmpty() == false) {
ValidationException validationException = new ValidationException();
validationException.addValidationErrors(validationErrors);
- throw new IndexCreationException(new Index(indexName), validationException);
+ throw new IndexCreationException(indexName, validationException);
}
}
diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java
index 1e9f968f7a..fe53f206b1 100644
--- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java
+++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java
@@ -112,9 +112,9 @@ public class MetaDataIndexAliasesService extends AbstractComponent {
logger.warn("[{}] failed to temporary create in order to apply alias action", e, indexMetaData.getIndex());
continue;
}
- indicesToClose.add(indexMetaData.getIndex());
+ indicesToClose.add(indexMetaData.getIndex().getName());
}
- indices.put(indexMetaData.getIndex(), indexService);
+ indices.put(indexMetaData.getIndex().getName(), indexService);
}
aliasValidator.validateAliasFilter(aliasAction.alias(), filter, indexService.getQueryShardContext());
diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java
index c6ec2a4376..8bbd6f09d7 100644
--- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java
+++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java
@@ -20,11 +20,15 @@ package org.elasticsearch.cluster.metadata;
import com.carrotsearch.hppc.cursors.ObjectCursor;
import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.misc.IndexMergeTool;
import org.elasticsearch.Version;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
+import org.elasticsearch.common.settings.IndexScopedSettings;
+import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexSettings;
+import org.elasticsearch.index.MergePolicyConfig;
import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.mapper.MapperService;
@@ -32,6 +36,7 @@ import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.indices.mapper.MapperRegistry;
import java.util.Collections;
+import java.util.Map;
import java.util.Set;
import static java.util.Collections.unmodifiableSet;
@@ -48,11 +53,13 @@ import static org.elasticsearch.common.util.set.Sets.newHashSet;
public class MetaDataIndexUpgradeService extends AbstractComponent {
private final MapperRegistry mapperRegistry;
+ private final IndexScopedSettings indexScopedSettigns;
@Inject
- public MetaDataIndexUpgradeService(Settings settings, MapperRegistry mapperRegistry) {
+ public MetaDataIndexUpgradeService(Settings settings, MapperRegistry mapperRegistry, IndexScopedSettings indexScopedSettings) {
super(settings);
this.mapperRegistry = mapperRegistry;
+ this.indexScopedSettigns = indexScopedSettings;
}
/**
@@ -65,21 +72,25 @@ public class MetaDataIndexUpgradeService extends AbstractComponent {
public IndexMetaData upgradeIndexMetaData(IndexMetaData indexMetaData) {
// Throws an exception if there are too-old segments:
if (isUpgraded(indexMetaData)) {
+ assert indexMetaData == archiveBrokenIndexSettings(indexMetaData) : "all settings must have been upgraded before";
return indexMetaData;
}
checkSupportedVersion(indexMetaData);
IndexMetaData newMetaData = indexMetaData;
+ // we have to run this first otherwise in we try to create IndexSettings
+ // with broken settings and fail in checkMappingsCompatibility
+ newMetaData = archiveBrokenIndexSettings(newMetaData);
+ // only run the check with the upgraded settings!!
checkMappingsCompatibility(newMetaData);
- newMetaData = markAsUpgraded(newMetaData);
- return newMetaData;
+ return markAsUpgraded(newMetaData);
}
/**
* Checks if the index was already opened by this version of Elasticsearch and doesn't require any additional checks.
*/
- private boolean isUpgraded(IndexMetaData indexMetaData) {
- return indexMetaData.getUpgradedVersion().onOrAfter(Version.V_3_0_0);
+ boolean isUpgraded(IndexMetaData indexMetaData) {
+ return indexMetaData.getUpgradedVersion().onOrAfter(Version.CURRENT);
}
/**
@@ -171,4 +182,39 @@ public class MetaDataIndexUpgradeService extends AbstractComponent {
}
}
+ private static final String ARCHIVED_SETTINGS_PREFIX = "archived.";
+
+ IndexMetaData archiveBrokenIndexSettings(IndexMetaData indexMetaData) {
+ Settings settings = indexMetaData.getSettings();
+ Settings.Builder builder = Settings.builder();
+ boolean changed = false;
+ for (Map.Entry<String, String> entry : settings.getAsMap().entrySet()) {
+ try {
+ Setting<?> setting = indexScopedSettigns.get(entry.getKey());
+ if (setting != null) {
+ setting.get(settings);
+ builder.put(entry.getKey(), entry.getValue());
+ } else {
+ if (indexScopedSettigns.isPrivateSetting(entry.getKey()) || entry.getKey().startsWith(ARCHIVED_SETTINGS_PREFIX)) {
+ builder.put(entry.getKey(), entry.getValue());
+ } else {
+ changed = true;
+ logger.warn("[{}] found unknown index setting: {} value: {} - archiving", indexMetaData.getIndex(), entry.getKey(), entry.getValue());
+ // we put them back in here such that tools can check from the outside if there are any indices with broken settings. The setting can remain there
+ // but we want users to be aware that some of their setting are broken and they can research why and what they need to do to replace them.
+ builder.put(ARCHIVED_SETTINGS_PREFIX + entry.getKey(), entry.getValue());
+ }
+ }
+ } catch (IllegalArgumentException ex) {
+ changed = true;
+ logger.warn("[{}] found invalid index setting: {} value: {} - archiving",ex, indexMetaData.getIndex(), entry.getKey(), entry.getValue());
+ // we put them back in here such that tools can check from the outside if there are any indices with broken settings. The setting can remain there
+ // but we want users to be aware that some of their setting sare broken and they can research why and what they need to do to replace them.
+ builder.put(ARCHIVED_SETTINGS_PREFIX + entry.getKey(), entry.getValue());
+ }
+ }
+
+ return changed ? IndexMetaData.builder(indexMetaData).settings(builder.build()).build() : indexMetaData;
+ }
+
}
diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java
index 1d13fc2079..a86d65779b 100644
--- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java
+++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java
@@ -169,7 +169,7 @@ public class MetaDataMappingService extends AbstractComponent {
private boolean refreshIndexMapping(IndexService indexService, IndexMetaData.Builder builder) {
boolean dirty = false;
- String index = indexService.index().name();
+ String index = indexService.index().getName();
try {
List<String> updatedTypes = new ArrayList<>();
for (DocumentMapper mapper : indexService.mapperService().docMappers(true)) {
diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java
index 8e9dbc6b67..2d7ba4c3c0 100644
--- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java
+++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java
@@ -117,7 +117,7 @@ public class MetaDataUpdateSettingsService extends AbstractComponent implements
nrReplicasChanged.put(numberOfReplicas, new ArrayList<>());
}
- nrReplicasChanged.get(numberOfReplicas).add(indexMetaData.getIndex());
+ nrReplicasChanged.get(numberOfReplicas).add(indexMetaData.getIndex().getName());
}
}
}
diff --git a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java
index 7dce217287..d8504a210c 100644
--- a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java
+++ b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java
@@ -32,6 +32,7 @@ import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.common.transport.TransportAddressSerializers;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.node.Node;
import java.io.IOException;
import java.util.Collections;
@@ -46,11 +47,11 @@ import static org.elasticsearch.common.transport.TransportAddressSerializers.add
public class DiscoveryNode implements Streamable, ToXContent {
public static boolean localNode(Settings settings) {
- if (settings.get("node.local") != null) {
- return settings.getAsBoolean("node.local", false);
+ if (Node.NODE_LOCAL_SETTING.exists(settings)) {
+ return Node.NODE_LOCAL_SETTING.get(settings);
}
- if (settings.get("node.mode") != null) {
- String nodeMode = settings.get("node.mode");
+ if (Node.NODE_MODE_SETTING.exists(settings)) {
+ String nodeMode = Node.NODE_MODE_SETTING.get(settings);
if ("local".equals(nodeMode)) {
return true;
} else if ("network".equals(nodeMode)) {
@@ -63,28 +64,29 @@ public class DiscoveryNode implements Streamable, ToXContent {
}
public static boolean nodeRequiresLocalStorage(Settings settings) {
- return !(settings.getAsBoolean("node.client", false) || (!settings.getAsBoolean("node.data", true) && !settings.getAsBoolean("node.master", true)));
+ return (Node.NODE_CLIENT_SETTING.get(settings) || (Node.NODE_DATA_SETTING.get(settings) == false && Node.NODE_MASTER_SETTING.get(settings) == false)) == false;
}
public static boolean clientNode(Settings settings) {
- String client = settings.get("node.client");
- return Booleans.isExplicitTrue(client);
+ return Node.NODE_CLIENT_SETTING.get(settings);
}
public static boolean masterNode(Settings settings) {
- String master = settings.get("node.master");
- if (master == null) {
- return !clientNode(settings);
+ if (Node.NODE_MASTER_SETTING.exists(settings)) {
+ return Node.NODE_MASTER_SETTING.get(settings);
}
- return Booleans.isExplicitTrue(master);
+ return clientNode(settings) == false;
}
public static boolean dataNode(Settings settings) {
- String data = settings.get("node.data");
- if (data == null) {
- return !clientNode(settings);
+ if (Node.NODE_DATA_SETTING.exists(settings)) {
+ return Node.NODE_DATA_SETTING.get(settings);
}
- return Booleans.isExplicitTrue(data);
+ return clientNode(settings) == false;
+ }
+
+ public static boolean ingestNode(Settings settings) {
+ return Node.NODE_INGEST_SETTING.get(settings);
}
public static final List<DiscoveryNode> EMPTY_LIST = Collections.emptyList();
@@ -316,6 +318,14 @@ public class DiscoveryNode implements Streamable, ToXContent {
return masterNode();
}
+ /**
+ * Returns a boolean that tells whether this an ingest node or not
+ */
+ public boolean isIngestNode() {
+ String ingest = attributes.get("ingest");
+ return ingest == null ? true : Booleans.parseBooleanExact(ingest);
+ }
+
public Version version() {
return this.version;
}
diff --git a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java
index d07d3c334a..e24c25dacb 100644
--- a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java
+++ b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java
@@ -52,16 +52,20 @@ public class DiscoveryNodes extends AbstractDiffable<DiscoveryNodes> implements
private final ImmutableOpenMap<String, DiscoveryNode> nodes;
private final ImmutableOpenMap<String, DiscoveryNode> dataNodes;
private final ImmutableOpenMap<String, DiscoveryNode> masterNodes;
+ private final ImmutableOpenMap<String, DiscoveryNode> ingestNodes;
private final String masterNodeId;
private final String localNodeId;
private final Version minNodeVersion;
private final Version minNonClientNodeVersion;
- private DiscoveryNodes(ImmutableOpenMap<String, DiscoveryNode> nodes, ImmutableOpenMap<String, DiscoveryNode> dataNodes, ImmutableOpenMap<String, DiscoveryNode> masterNodes, String masterNodeId, String localNodeId, Version minNodeVersion, Version minNonClientNodeVersion) {
+ private DiscoveryNodes(ImmutableOpenMap<String, DiscoveryNode> nodes, ImmutableOpenMap<String, DiscoveryNode> dataNodes,
+ ImmutableOpenMap<String, DiscoveryNode> masterNodes, ImmutableOpenMap<String, DiscoveryNode> ingestNodes,
+ String masterNodeId, String localNodeId, Version minNodeVersion, Version minNonClientNodeVersion) {
this.nodes = nodes;
this.dataNodes = dataNodes;
this.masterNodes = masterNodes;
+ this.ingestNodes = ingestNodes;
this.masterNodeId = masterNodeId;
this.localNodeId = localNodeId;
this.minNodeVersion = minNodeVersion;
@@ -165,6 +169,13 @@ public class DiscoveryNodes extends AbstractDiffable<DiscoveryNodes> implements
}
/**
+ * @return All the ingest nodes arranged by their ids
+ */
+ public ImmutableOpenMap<String, DiscoveryNode> getIngestNodes() {
+ return ingestNodes;
+ }
+
+ /**
* Get a {@link Map} of the discovered master and data nodes arranged by their ids
*
* @return {@link Map} of the discovered master and data nodes arranged by their ids
@@ -654,6 +665,7 @@ public class DiscoveryNodes extends AbstractDiffable<DiscoveryNodes> implements
public DiscoveryNodes build() {
ImmutableOpenMap.Builder<String, DiscoveryNode> dataNodesBuilder = ImmutableOpenMap.builder();
ImmutableOpenMap.Builder<String, DiscoveryNode> masterNodesBuilder = ImmutableOpenMap.builder();
+ ImmutableOpenMap.Builder<String, DiscoveryNode> ingestNodesBuilder = ImmutableOpenMap.builder();
Version minNodeVersion = Version.CURRENT;
Version minNonClientNodeVersion = Version.CURRENT;
for (ObjectObjectCursor<String, DiscoveryNode> nodeEntry : nodes) {
@@ -665,10 +677,16 @@ public class DiscoveryNodes extends AbstractDiffable<DiscoveryNodes> implements
masterNodesBuilder.put(nodeEntry.key, nodeEntry.value);
minNonClientNodeVersion = Version.smallest(minNonClientNodeVersion, nodeEntry.value.version());
}
+ if (nodeEntry.value.isIngestNode()) {
+ ingestNodesBuilder.put(nodeEntry.key, nodeEntry.value);
+ }
minNodeVersion = Version.smallest(minNodeVersion, nodeEntry.value.version());
}
- return new DiscoveryNodes(nodes.build(), dataNodesBuilder.build(), masterNodesBuilder.build(), masterNodeId, localNodeId, minNodeVersion, minNonClientNodeVersion);
+ return new DiscoveryNodes(
+ nodes.build(), dataNodesBuilder.build(), masterNodesBuilder.build(), ingestNodesBuilder.build(),
+ masterNodeId, localNodeId, minNodeVersion, minNonClientNodeVersion
+ );
}
public static DiscoveryNodes readFrom(StreamInput in, @Nullable DiscoveryNode localNode) throws IOException {
diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java b/core/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java
index bb186a64a8..2778d28797 100644
--- a/core/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java
+++ b/core/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java
@@ -30,12 +30,12 @@ import org.elasticsearch.common.Randomness;
import org.elasticsearch.common.collect.ImmutableOpenIntMap;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.index.Index;
import org.elasticsearch.index.shard.ShardId;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
-import java.util.Comparator;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
@@ -58,9 +58,9 @@ import java.util.Set;
*/
public class IndexRoutingTable extends AbstractDiffable<IndexRoutingTable> implements Iterable<IndexShardRoutingTable> {
- public static final IndexRoutingTable PROTO = builder("").build();
+ public static final IndexRoutingTable PROTO = builder(new Index("", "_na_")).build();
- private final String index;
+ private final Index index;
private final ShardShuffler shuffler;
// note, we assume that when the index routing is created, ShardRoutings are created for all possible number of
@@ -69,7 +69,7 @@ public class IndexRoutingTable extends AbstractDiffable<IndexRoutingTable> imple
private final List<ShardRouting> allActiveShards;
- IndexRoutingTable(String index, ImmutableOpenIntMap<IndexShardRoutingTable> shards) {
+ IndexRoutingTable(Index index, ImmutableOpenIntMap<IndexShardRoutingTable> shards) {
this.index = index;
this.shuffler = new RotationShardShuffler(Randomness.get().nextInt());
this.shards = shards;
@@ -90,18 +90,8 @@ public class IndexRoutingTable extends AbstractDiffable<IndexRoutingTable> imple
*
* @return id of the index
*/
- public String index() {
- return this.index;
- }
-
-
- /**
- * Return the index id
- *
- * @return id of the index
- */
- public String getIndex() {
- return index();
+ public Index getIndex() {
+ return index;
}
/**
@@ -118,13 +108,17 @@ public class IndexRoutingTable extends AbstractDiffable<IndexRoutingTable> imple
}
public void validate(RoutingTableValidation validation, MetaData metaData) {
- if (!metaData.hasIndex(index())) {
- validation.addIndexFailure(index(), "Exists in routing does not exists in metadata");
+ if (!metaData.hasIndex(index.getName())) {
+ validation.addIndexFailure(index.getName(), "Exists in routing does not exists in metadata");
+ return;
+ }
+ IndexMetaData indexMetaData = metaData.index(index.getName());
+ if (indexMetaData.getIndexUUID().equals(index.getUUID()) == false) {
+ validation.addIndexFailure(index.getName(), "Exists in routing does not exists in metadata with the same uuid");
return;
}
- IndexMetaData indexMetaData = metaData.index(index());
for (String failure : validate(indexMetaData)) {
- validation.addIndexFailure(index, failure);
+ validation.addIndexFailure(index.getName(), failure);
}
}
@@ -154,7 +148,7 @@ public class IndexRoutingTable extends AbstractDiffable<IndexRoutingTable> imple
+ "] routing table has wrong number of replicas, expected [" + indexMetaData.getNumberOfReplicas() + "], got [" + routingNumberOfReplicas + "]");
}
for (ShardRouting shardRouting : indexShardRoutingTable) {
- if (!shardRouting.index().equals(index())) {
+ if (!shardRouting.index().equals(index)) {
failures.add("shard routing has an index [" + shardRouting.index() + "] that is different than the routing table");
}
}
@@ -332,7 +326,7 @@ public class IndexRoutingTable extends AbstractDiffable<IndexRoutingTable> imple
@Override
public IndexRoutingTable readFrom(StreamInput in) throws IOException {
- String index = in.readString();
+ Index index = Index.readIndex(in);
Builder builder = new Builder(index);
int size = in.readVInt();
@@ -345,23 +339,23 @@ public class IndexRoutingTable extends AbstractDiffable<IndexRoutingTable> imple
@Override
public void writeTo(StreamOutput out) throws IOException {
- out.writeString(index);
+ index.writeTo(out);
out.writeVInt(shards.size());
for (IndexShardRoutingTable indexShard : this) {
IndexShardRoutingTable.Builder.writeToThin(indexShard, out);
}
}
- public static Builder builder(String index) {
+ public static Builder builder(Index index) {
return new Builder(index);
}
public static class Builder {
- private final String index;
+ private final Index index;
private final ImmutableOpenIntMap.Builder<IndexShardRoutingTable> shards = ImmutableOpenIntMap.builder();
- public Builder(String index) {
+ public Builder(Index index) {
this.index = index;
}
@@ -422,11 +416,12 @@ public class IndexRoutingTable extends AbstractDiffable<IndexRoutingTable> imple
* Initializes an index, to be restored from snapshot
*/
private Builder initializeAsRestore(IndexMetaData indexMetaData, RestoreSource restoreSource, IntSet ignoreShards, boolean asNew, UnassignedInfo unassignedInfo) {
+ assert indexMetaData.getIndex().equals(index);
if (!shards.isEmpty()) {
throw new IllegalStateException("trying to initialize an index with fresh shards, but already has shards created");
}
for (int shardId = 0; shardId < indexMetaData.getNumberOfShards(); shardId++) {
- IndexShardRoutingTable.Builder indexShardRoutingBuilder = new IndexShardRoutingTable.Builder(new ShardId(indexMetaData.getIndex(), shardId));
+ IndexShardRoutingTable.Builder indexShardRoutingBuilder = new IndexShardRoutingTable.Builder(new ShardId(index, shardId));
for (int i = 0; i <= indexMetaData.getNumberOfReplicas(); i++) {
if (asNew && ignoreShards.contains(shardId)) {
// This shards wasn't completely snapshotted - restore it as new shard
@@ -444,11 +439,12 @@ public class IndexRoutingTable extends AbstractDiffable<IndexRoutingTable> imple
* Initializes a new empty index, with an option to control if its from an API or not.
*/
private Builder initializeEmpty(IndexMetaData indexMetaData, UnassignedInfo unassignedInfo) {
+ assert indexMetaData.getIndex().equals(index);
if (!shards.isEmpty()) {
throw new IllegalStateException("trying to initialize an index with fresh shards, but already has shards created");
}
for (int shardId = 0; shardId < indexMetaData.getNumberOfShards(); shardId++) {
- IndexShardRoutingTable.Builder indexShardRoutingBuilder = new IndexShardRoutingTable.Builder(new ShardId(indexMetaData.getIndex(), shardId));
+ IndexShardRoutingTable.Builder indexShardRoutingBuilder = new IndexShardRoutingTable.Builder(new ShardId(index, shardId));
for (int i = 0; i <= indexMetaData.getNumberOfReplicas(); i++) {
indexShardRoutingBuilder.addShard(ShardRouting.newUnassigned(index, shardId, null, i == 0, unassignedInfo));
}
@@ -539,21 +535,18 @@ public class IndexRoutingTable extends AbstractDiffable<IndexRoutingTable> imple
ordered.add(indexShard);
}
- CollectionUtil.timSort(ordered, new Comparator<IndexShardRoutingTable>() {
- @Override
- public int compare(IndexShardRoutingTable o1, IndexShardRoutingTable o2) {
- int v = o1.shardId().index().name().compareTo(
- o2.shardId().index().name());
- if (v == 0) {
- v = Integer.compare(o1.shardId().id(),
- o2.shardId().id());
- }
- return v;
+ CollectionUtil.timSort(ordered, (o1, o2) -> {
+ int v = o1.shardId().getIndex().getName().compareTo(
+ o2.shardId().getIndex().getName());
+ if (v == 0) {
+ v = Integer.compare(o1.shardId().id(),
+ o2.shardId().id());
}
+ return v;
});
for (IndexShardRoutingTable indexShard : ordered) {
- sb.append("----shard_id [").append(indexShard.shardId().index().name()).append("][").append(indexShard.shardId().id()).append("]\n");
+ sb.append("----shard_id [").append(indexShard.shardId().getIndex().getName()).append("][").append(indexShard.shardId().id()).append("]\n");
for (ShardRouting shard : indexShard) {
sb.append("--------").append(shard.shortSummary()).append("\n");
}
diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java b/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java
index bcdb7a43fe..d516942845 100644
--- a/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java
+++ b/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java
@@ -26,6 +26,7 @@ import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.util.set.Sets;
+import org.elasticsearch.index.Index;
import org.elasticsearch.index.shard.ShardId;
import java.io.IOException;
@@ -617,11 +618,11 @@ public class IndexShardRoutingTable implements Iterable<ShardRouting> {
}
public static IndexShardRoutingTable readFrom(StreamInput in) throws IOException {
- String index = in.readString();
+ Index index = Index.readIndex(in);
return readFromThin(in, index);
}
- public static IndexShardRoutingTable readFromThin(StreamInput in, String index) throws IOException {
+ public static IndexShardRoutingTable readFromThin(StreamInput in, Index index) throws IOException {
int iShardId = in.readVInt();
Builder builder = new Builder(new ShardId(index, iShardId));
@@ -635,7 +636,7 @@ public class IndexShardRoutingTable implements Iterable<ShardRouting> {
}
public static void writeTo(IndexShardRoutingTable indexShard, StreamOutput out) throws IOException {
- out.writeString(indexShard.shardId().index().name());
+ out.writeString(indexShard.shardId().getIndex().getName());
writeToThin(indexShard, out);
}
diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java b/core/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java
index 267dae80d5..184db017c1 100644
--- a/core/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java
+++ b/core/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java
@@ -29,6 +29,7 @@ import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.math.MathUtils;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexNotFoundException;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.shard.ShardNotFoundException;
@@ -95,13 +96,14 @@ public class OperationRouting extends AbstractComponent {
// we use set here and not list since we might get duplicates
for (String index : concreteIndices) {
final IndexRoutingTable indexRouting = indexRoutingTable(clusterState, index);
+ final IndexMetaData indexMetaData = indexMetaData(clusterState, index);
final Set<String> effectiveRouting = routing.get(index);
if (effectiveRouting != null) {
for (String r : effectiveRouting) {
- int shardId = generateShardId(clusterState, index, null, r);
+ int shardId = generateShardId(indexMetaData, null, r);
IndexShardRoutingTable indexShard = indexRouting.shard(shardId);
if (indexShard == null) {
- throw new ShardNotFoundException(new ShardId(index, shardId));
+ throw new ShardNotFoundException(new ShardId(indexRouting.getIndex(), shardId));
}
// we might get duplicates, but that's ok, they will override one another
set.add(indexShard);
@@ -204,20 +206,25 @@ public class OperationRouting extends AbstractComponent {
return indexRouting;
}
+ protected IndexMetaData indexMetaData(ClusterState clusterState, String index) {
+ IndexMetaData indexMetaData = clusterState.metaData().index(index);
+ if (indexMetaData == null) {
+ throw new IndexNotFoundException(index);
+ }
+ return indexMetaData;
+ }
+
protected IndexShardRoutingTable shards(ClusterState clusterState, String index, String id, String routing) {
- int shardId = generateShardId(clusterState, index, id, routing);
+ int shardId = generateShardId(indexMetaData(clusterState, index), id, routing);
return clusterState.getRoutingTable().shardRoutingTable(index, shardId);
}
public ShardId shardId(ClusterState clusterState, String index, String id, @Nullable String routing) {
- return new ShardId(index, generateShardId(clusterState, index, id, routing));
+ IndexMetaData indexMetaData = indexMetaData(clusterState, index);
+ return new ShardId(indexMetaData.getIndex(), generateShardId(indexMetaData, id, routing));
}
- private int generateShardId(ClusterState clusterState, String index, String id, @Nullable String routing) {
- IndexMetaData indexMetaData = clusterState.metaData().index(index);
- if (indexMetaData == null) {
- throw new IndexNotFoundException(index);
- }
+ private int generateShardId(IndexMetaData indexMetaData, String id, @Nullable String routing) {
final int hash;
if (routing == null) {
hash = Murmur3HashFunction.hash(id);
diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNode.java b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNode.java
index ff6c829342..77ae7b41d9 100644
--- a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNode.java
+++ b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNode.java
@@ -87,7 +87,7 @@ public class RoutingNode implements Iterable<ShardRouting> {
// TODO use Set with ShardIds for faster lookup.
for (ShardRouting shardRouting : shards) {
if (shardRouting.isSameShard(shard)) {
- throw new IllegalStateException("Trying to add a shard [" + shard.shardId().index().name() + "][" + shard.shardId().id() + "] to a node [" + nodeId + "] where it already exists");
+ throw new IllegalStateException("Trying to add a shard [" + shard.shardId().getIndex().getName() + "][" + shard.shardId().id() + "] to a node [" + nodeId + "] where it already exists");
}
}
shards.add(shard);
@@ -137,7 +137,7 @@ public class RoutingNode implements Iterable<ShardRouting> {
List<ShardRouting> shards = new ArrayList<>();
for (ShardRouting shardEntry : this) {
- if (!shardEntry.index().equals(index)) {
+ if (!shardEntry.getIndexName().equals(index)) {
continue;
}
for (ShardRoutingState state : states) {
diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java
index 3a2567e3f4..6a6373f977 100644
--- a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java
+++ b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java
@@ -28,6 +28,7 @@ import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.Randomness;
import org.elasticsearch.common.collect.ImmutableOpenMap;
+import org.elasticsearch.index.Index;
import org.elasticsearch.index.shard.ShardId;
import java.util.ArrayList;
@@ -169,7 +170,7 @@ public class RoutingNodes implements Iterable<RoutingNode> {
} else if (routing.primary() == false) { // primary without relocationID is initial recovery
ShardRouting primary = findPrimary(routing);
if (primary == null && initializing) {
- primary = routingTable.index(routing.index()).shard(routing.shardId().id()).primary;
+ primary = routingTable.index(routing.index().getName()).shard(routing.shardId().id()).primary;
} else if (primary == null) {
throw new IllegalStateException("replica is initializing but primary is unassigned");
}
@@ -348,7 +349,7 @@ public class RoutingNodes implements Iterable<RoutingNode> {
*/
public boolean allReplicasActive(ShardRouting shardRouting) {
final List<ShardRouting> shards = assignedShards(shardRouting.shardId());
- if (shards.isEmpty() || shards.size() < this.routingTable.index(shardRouting.index()).shard(shardRouting.id()).size()) {
+ if (shards.isEmpty() || shards.size() < this.routingTable.index(shardRouting.index().getName()).shard(shardRouting.id()).size()) {
return false; // if we are empty nothing is active if we have less than total at least one is unassigned
}
for (ShardRouting shard : shards) {
@@ -778,7 +779,7 @@ public class RoutingNodes implements Iterable<RoutingNode> {
int inactivePrimaryCount = 0;
int inactiveShardCount = 0;
int relocating = 0;
- Map<String, Integer> indicesAndShards = new HashMap<>();
+ Map<Index, Integer> indicesAndShards = new HashMap<>();
for (RoutingNode node : routingNodes) {
for (ShardRouting shard : node) {
if (!shard.active() && shard.relocatingNodeId() == null) {
@@ -800,10 +801,10 @@ public class RoutingNodes implements Iterable<RoutingNode> {
}
}
// Assert that the active shard routing are identical.
- Set<Map.Entry<String, Integer>> entries = indicesAndShards.entrySet();
+ Set<Map.Entry<Index, Integer>> entries = indicesAndShards.entrySet();
final List<ShardRouting> shards = new ArrayList<>();
- for (Map.Entry<String, Integer> e : entries) {
- String index = e.getKey();
+ for (Map.Entry<Index, Integer> e : entries) {
+ Index index = e.getKey();
for (int i = 0; i < e.getValue(); i++) {
for (RoutingNode routingNode : routingNodes) {
for (ShardRouting shardRouting : routingNode) {
diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java
index 4bf196d07d..6d81556eb2 100644
--- a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java
+++ b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java
@@ -31,6 +31,7 @@ import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.util.iterable.Iterables;
+import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexNotFoundException;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.shard.ShardNotFoundException;
@@ -88,6 +89,10 @@ public class RoutingTable implements Iterable<IndexRoutingTable>, Diffable<Routi
return indicesRouting.get(index);
}
+ public IndexRoutingTable index(Index index) {
+ return indicesRouting.get(index.getName());
+ }
+
public ImmutableOpenMap<String, IndexRoutingTable> indicesRouting() {
return indicesRouting;
}
@@ -109,7 +114,7 @@ public class RoutingTable implements Iterable<IndexRoutingTable>, Diffable<Routi
}
IndexShardRoutingTable shard = indexRouting.shard(shardId);
if (shard == null) {
- throw new ShardNotFoundException(new ShardId(index, shardId));
+ throw new ShardNotFoundException(new ShardId(indexRouting.getIndex(), shardId));
}
return shard;
}
@@ -121,7 +126,15 @@ public class RoutingTable implements Iterable<IndexRoutingTable>, Diffable<Routi
* @throws ShardNotFoundException if provided shard id is unknown
*/
public IndexShardRoutingTable shardRoutingTable(ShardId shardId) {
- return shardRoutingTable(shardId.getIndex(), shardId.getId());
+ IndexRoutingTable indexRouting = index(shardId.getIndexName());
+ if (indexRouting == null || indexRouting.getIndex().equals(shardId.getIndex()) == false) {
+ throw new IndexNotFoundException(shardId.getIndex());
+ }
+ IndexShardRoutingTable shard = indexRouting.shard(shardId.id());
+ if (shard == null) {
+ throw new ShardNotFoundException(shardId);
+ }
+ return shard;
}
public RoutingTable validateRaiseException(MetaData metaData) throws RoutingValidationException {
@@ -386,7 +399,7 @@ public class RoutingTable implements Iterable<IndexRoutingTable>, Diffable<Routi
public Builder(RoutingTable routingTable) {
version = routingTable.version;
for (IndexRoutingTable indexRoutingTable : routingTable) {
- indicesRouting.put(indexRoutingTable.index(), indexRoutingTable);
+ indicesRouting.put(indexRoutingTable.getIndex().getName(), indexRoutingTable);
}
}
@@ -401,14 +414,14 @@ public class RoutingTable implements Iterable<IndexRoutingTable>, Diffable<Routi
if (shardRoutingEntry.initializing() && shardRoutingEntry.relocatingNodeId() != null)
continue;
- String index = shardRoutingEntry.index();
- IndexRoutingTable.Builder indexBuilder = indexRoutingTableBuilders.get(index);
+ Index index = shardRoutingEntry.index();
+ IndexRoutingTable.Builder indexBuilder = indexRoutingTableBuilders.get(index.getName());
if (indexBuilder == null) {
indexBuilder = new IndexRoutingTable.Builder(index);
- indexRoutingTableBuilders.put(index, indexBuilder);
+ indexRoutingTableBuilders.put(index.getName(), indexBuilder);
}
- IndexShardRoutingTable refData = routingNodes.routingTable().index(shardRoutingEntry.index()).shard(shardRoutingEntry.id());
+ IndexShardRoutingTable refData = routingNodes.routingTable().index(shardRoutingEntry.index().getName()).shard(shardRoutingEntry.id());
indexBuilder.addShard(refData, shardRoutingEntry);
}
}
@@ -416,13 +429,13 @@ public class RoutingTable implements Iterable<IndexRoutingTable>, Diffable<Routi
Iterable<ShardRouting> shardRoutingEntries = Iterables.concat(routingNodes.unassigned(), routingNodes.unassigned().ignored());
for (ShardRouting shardRoutingEntry : shardRoutingEntries) {
- String index = shardRoutingEntry.index();
- IndexRoutingTable.Builder indexBuilder = indexRoutingTableBuilders.get(index);
+ Index index = shardRoutingEntry.index();
+ IndexRoutingTable.Builder indexBuilder = indexRoutingTableBuilders.get(index.getName());
if (indexBuilder == null) {
indexBuilder = new IndexRoutingTable.Builder(index);
- indexRoutingTableBuilders.put(index, indexBuilder);
+ indexRoutingTableBuilders.put(index.getName(), indexBuilder);
}
- IndexShardRoutingTable refData = routingNodes.routingTable().index(shardRoutingEntry.index()).shard(shardRoutingEntry.id());
+ IndexShardRoutingTable refData = routingNodes.routingTable().index(shardRoutingEntry.index().getName()).shard(shardRoutingEntry.id());
indexBuilder.addShard(refData, shardRoutingEntry);
}
@@ -446,7 +459,7 @@ public class RoutingTable implements Iterable<IndexRoutingTable>, Diffable<Routi
continue;
}
int currentNumberOfReplicas = indexRoutingTable.shards().get(0).size() - 1; // remove the required primary
- IndexRoutingTable.Builder builder = new IndexRoutingTable.Builder(index);
+ IndexRoutingTable.Builder builder = new IndexRoutingTable.Builder(indexRoutingTable.getIndex());
// re-add all the shards
for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) {
builder.addIndexShard(indexShardRoutingTable);
@@ -526,7 +539,7 @@ public class RoutingTable implements Iterable<IndexRoutingTable>, Diffable<Routi
throw new IllegalStateException("once build is called the builder cannot be reused");
}
indexRoutingTable.validate();
- indicesRouting.put(indexRoutingTable.index(), indexRoutingTable);
+ indicesRouting.put(indexRoutingTable.getIndex().getName(), indexRoutingTable);
return this;
}
@@ -567,7 +580,7 @@ public class RoutingTable implements Iterable<IndexRoutingTable>, Diffable<Routi
}
// normalize the versions right before we build it...
for (ObjectCursor<IndexRoutingTable> indexRoutingTable : indicesRouting.values()) {
- indicesRouting.put(indexRoutingTable.value.index(), indexRoutingTable.value.normalizeVersions());
+ indicesRouting.put(indexRoutingTable.value.getIndex().getName(), indexRoutingTable.value.normalizeVersions());
}
RoutingTable table = new RoutingTable(version, indicesRouting.build());
indicesRouting = null;
diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java b/core/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java
index 5ffaee0f2f..47509852d9 100644
--- a/core/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java
+++ b/core/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java
@@ -27,6 +27,7 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.index.Index;
import org.elasticsearch.index.shard.ShardId;
import java.io.IOException;
@@ -44,7 +45,7 @@ public final class ShardRouting implements Streamable, ToXContent {
*/
public static final long UNAVAILABLE_EXPECTED_SHARD_SIZE = -1;
- private String index;
+ private Index index;
private int shardId;
private String currentNodeId;
private String relocatingNodeId;
@@ -75,7 +76,7 @@ public final class ShardRouting implements Streamable, ToXContent {
* A constructor to internally create shard routing instances, note, the internal flag should only be set to true
* by either this class or tests. Visible for testing.
*/
- ShardRouting(String index, int shardId, String currentNodeId,
+ ShardRouting(Index index, int shardId, String currentNodeId,
String relocatingNodeId, RestoreSource restoreSource, boolean primary, ShardRoutingState state, long version,
UnassignedInfo unassignedInfo, AllocationId allocationId, boolean internal, long expectedShardSize) {
this.index = index;
@@ -105,22 +106,19 @@ public final class ShardRouting implements Streamable, ToXContent {
/**
* Creates a new unassigned shard.
*/
- public static ShardRouting newUnassigned(String index, int shardId, RestoreSource restoreSource, boolean primary, UnassignedInfo unassignedInfo) {
+ public static ShardRouting newUnassigned(Index index, int shardId, RestoreSource restoreSource, boolean primary, UnassignedInfo unassignedInfo) {
return new ShardRouting(index, shardId, null, null, restoreSource, primary, ShardRoutingState.UNASSIGNED, 0, unassignedInfo, null, true, UNAVAILABLE_EXPECTED_SHARD_SIZE);
}
- /**
- * The index name.
- */
- public String index() {
+ public Index index() {
return this.index;
}
/**
* The index name.
*/
- public String getIndex() {
- return index();
+ public String getIndexName() {
+ return index().getName();
}
/**
@@ -302,13 +300,13 @@ public final class ShardRouting implements Streamable, ToXContent {
return entry;
}
- public static ShardRouting readShardRoutingEntry(StreamInput in, String index, int shardId) throws IOException {
+ public static ShardRouting readShardRoutingEntry(StreamInput in, Index index, int shardId) throws IOException {
ShardRouting entry = new ShardRouting();
entry.readFrom(in, index, shardId);
return entry;
}
- public void readFrom(StreamInput in, String index, int shardId) throws IOException {
+ public void readFrom(StreamInput in, Index index, int shardId) throws IOException {
this.index = index;
this.shardId = shardId;
readFromThin(in);
@@ -344,7 +342,7 @@ public final class ShardRouting implements Streamable, ToXContent {
@Override
public void readFrom(StreamInput in) throws IOException {
- readFrom(in, in.readString(), in.readVInt());
+ readFrom(in, Index.readIndex(in), in.readVInt());
}
/**
@@ -398,7 +396,7 @@ public final class ShardRouting implements Streamable, ToXContent {
@Override
public void writeTo(StreamOutput out) throws IOException {
- out.writeString(index);
+ index.writeTo(out);
out.writeVInt(shardId);
writeToThin(out);
}
@@ -720,7 +718,7 @@ public final class ShardRouting implements Streamable, ToXContent {
.field("node", currentNodeId())
.field("relocating_node", relocatingNodeId())
.field("shard", shardId().id())
- .field("index", shardId().index().name())
+ .field("index", shardId().getIndex().getName())
.field("version", version);
if (expectedShardSize != UNAVAILABLE_EXPECTED_SHARD_SIZE) {
builder.field("expected_shard_size_in_bytes", expectedShardSize);
diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java b/core/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java
index 7c446aa13d..714c1e4913 100644
--- a/core/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java
+++ b/core/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java
@@ -106,7 +106,7 @@ public class UnassignedInfo implements ToXContent, Writeable<UnassignedInfo> {
private final Reason reason;
private final long unassignedTimeMillis; // used for display and log messages, in milliseconds
private final long unassignedTimeNanos; // in nanoseconds, used to calculate delay for delayed shard allocation
- private volatile long lastComputedLeftDelayNanos = 0l; // how long to delay shard allocation, not serialized (always positive, 0 means no delay)
+ private volatile long lastComputedLeftDelayNanos = 0L; // how long to delay shard allocation, not serialized (always positive, 0 means no delay)
private final String message;
private final Throwable failure;
@@ -217,7 +217,7 @@ public class UnassignedInfo implements ToXContent, Writeable<UnassignedInfo> {
return 0;
}
TimeValue delayTimeout = INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.get(indexSettings, settings);
- return Math.max(0l, delayTimeout.nanos());
+ return Math.max(0L, delayTimeout.nanos());
}
/**
@@ -236,8 +236,8 @@ public class UnassignedInfo implements ToXContent, Writeable<UnassignedInfo> {
public long updateDelay(long nanoTimeNow, Settings settings, Settings indexSettings) {
long delayTimeoutNanos = getAllocationDelayTimeoutSettingNanos(settings, indexSettings);
final long newComputedLeftDelayNanos;
- if (delayTimeoutNanos == 0l) {
- newComputedLeftDelayNanos = 0l;
+ if (delayTimeoutNanos == 0L) {
+ newComputedLeftDelayNanos = 0L;
} else {
assert nanoTimeNow >= unassignedTimeNanos;
newComputedLeftDelayNanos = Math.max(0L, delayTimeoutNanos - (nanoTimeNow - unassignedTimeNanos));
@@ -269,7 +269,7 @@ public class UnassignedInfo implements ToXContent, Writeable<UnassignedInfo> {
long minDelaySetting = Long.MAX_VALUE;
for (ShardRouting shard : state.routingTable().shardsWithState(ShardRoutingState.UNASSIGNED)) {
if (shard.primary() == false) {
- IndexMetaData indexMetaData = state.metaData().index(shard.getIndex());
+ IndexMetaData indexMetaData = state.metaData().index(shard.getIndexName());
boolean delayed = shard.unassignedInfo().getLastComputedLeftDelayNanos() > 0;
long delayTimeoutSetting = shard.unassignedInfo().getAllocationDelayTimeoutSettingNanos(settings, indexMetaData.getSettings());
if (delayed && delayTimeoutSetting > 0 && delayTimeoutSetting < minDelaySetting) {
@@ -277,7 +277,7 @@ public class UnassignedInfo implements ToXContent, Writeable<UnassignedInfo> {
}
}
}
- return minDelaySetting == Long.MAX_VALUE ? 0l : minDelaySetting;
+ return minDelaySetting == Long.MAX_VALUE ? 0L : minDelaySetting;
}
@@ -294,7 +294,7 @@ public class UnassignedInfo implements ToXContent, Writeable<UnassignedInfo> {
}
}
}
- return nextDelay == Long.MAX_VALUE ? 0l : nextDelay;
+ return nextDelay == Long.MAX_VALUE ? 0L : nextDelay;
}
public String shortSummary() {
diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java
index 2593759555..99b1974f5d 100644
--- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java
+++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java
@@ -131,7 +131,7 @@ public class AllocationService extends AbstractComponent {
for (IndexRoutingTable indexRoutingTable : newRoutingTable) {
final IndexMetaData indexMetaData = currentMetaData.index(indexRoutingTable.getIndex());
if (indexMetaData == null) {
- throw new IllegalStateException("no metadata found for index [" + indexRoutingTable.index() + "]");
+ throw new IllegalStateException("no metadata found for index " + indexRoutingTable.getIndex().getName());
}
IndexMetaData.Builder indexMetaDataBuilder = null;
for (IndexShardRoutingTable shardRoutings : indexRoutingTable) {
diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java
index 80f634e13c..574f12265a 100644
--- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java
+++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java
@@ -42,7 +42,6 @@ import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.gateway.PriorityComparator;
-import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
@@ -513,7 +512,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards
final ModelNode sourceNode = nodes.get(node.nodeId());
assert sourceNode != null;
final NodeSorter sorter = newNodeSorter();
- sorter.reset(shard.getIndex());
+ sorter.reset(shard.getIndexName());
final ModelNode[] nodes = sorter.modelNodes;
assert sourceNode.containsShard(shard);
/*
@@ -591,24 +590,20 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards
*/
final AllocationDeciders deciders = allocation.deciders();
final PriorityComparator secondaryComparator = PriorityComparator.getAllocationComparator(allocation);
- final Comparator<ShardRouting> comparator = new Comparator<ShardRouting>() {
- @Override
- public int compare(ShardRouting o1,
- ShardRouting o2) {
- if (o1.primary() ^ o2.primary()) {
- return o1.primary() ? -1 : o2.primary() ? 1 : 0;
- }
- final int indexCmp;
- if ((indexCmp = o1.index().compareTo(o2.index())) == 0) {
- return o1.getId() - o2.getId();
- }
- // this comparator is more expensive than all the others up there
- // that's why it's added last even though it could be easier to read
- // if we'd apply it earlier. this comparator will only differentiate across
- // indices all shards of the same index is treated equally.
- final int secondary = secondaryComparator.compare(o1, o2);
- return secondary == 0 ? indexCmp : secondary;
+ final Comparator<ShardRouting> comparator = (o1, o2) -> {
+ if (o1.primary() ^ o2.primary()) {
+ return o1.primary() ? -1 : o2.primary() ? 1 : 0;
+ }
+ final int indexCmp;
+ if ((indexCmp = o1.getIndexName().compareTo(o2.getIndexName())) == 0) {
+ return o1.getId() - o2.getId();
}
+ // this comparator is more expensive than all the others up there
+ // that's why it's added last even though it could be easier to read
+ // if we'd apply it earlier. this comparator will only differentiate across
+ // indices all shards of the same index is treated equally.
+ final int secondary = secondaryComparator.compare(o1, o2);
+ return secondary == 0 ? indexCmp : secondary;
};
/*
* we use 2 arrays and move replicas to the second array once we allocated an identical
@@ -655,7 +650,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards
}
if (!node.containsShard(shard)) {
// simulate weight if we would add shard to node
- float currentWeight = weight.weightShardAdded(this, node, shard.index());
+ float currentWeight = weight.weightShardAdded(this, node, shard.getIndexName());
/*
* Unless the operation is not providing any gains we
* don't check deciders
@@ -678,8 +673,8 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards
*/
if (currentDecision.type() == decision.type()) {
final int repId = shard.id();
- final int nodeHigh = node.highestPrimary(shard.index());
- final int minNodeHigh = minNode.highestPrimary(shard.index());
+ final int nodeHigh = node.highestPrimary(shard.index().getName());
+ final int minNodeHigh = minNode.highestPrimary(shard.getIndexName());
if ((((nodeHigh > repId && minNodeHigh > repId) || (nodeHigh < repId && minNodeHigh < repId)) && (nodeHigh < minNodeHigh))
|| (nodeHigh > minNodeHigh && nodeHigh > repId && minNodeHigh < repId)) {
minNode = node;
@@ -855,9 +850,9 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards
}
public void addShard(ShardRouting shard, Decision decision) {
- ModelIndex index = indices.get(shard.index());
+ ModelIndex index = indices.get(shard.getIndexName());
if (index == null) {
- index = new ModelIndex(shard.index());
+ index = new ModelIndex(shard.getIndexName());
indices.put(index.getIndexId(), index);
}
index.addShard(shard, decision);
@@ -865,12 +860,12 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards
}
public Decision removeShard(ShardRouting shard) {
- ModelIndex index = indices.get(shard.index());
+ ModelIndex index = indices.get(shard.getIndexName());
Decision removed = null;
if (index != null) {
removed = index.removeShard(shard);
if (removed != null && index.numShards() == 0) {
- indices.remove(shard.index());
+ indices.remove(shard.getIndexName());
}
}
numShards--;
@@ -890,7 +885,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards
}
public boolean containsShard(ShardRouting shard) {
- ModelIndex index = getIndex(shard.getIndex());
+ ModelIndex index = getIndex(shard.getIndexName());
return index == null ? false : index.containsShard(shard);
}
diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AbstractAllocateAllocationCommand.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AbstractAllocateAllocationCommand.java
index 31fc51a897..ed136d67d5 100644
--- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AbstractAllocateAllocationCommand.java
+++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AbstractAllocateAllocationCommand.java
@@ -35,7 +35,6 @@ import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
-import org.elasticsearch.index.shard.ShardId;
import java.io.IOException;
import java.util.function.Consumer;
@@ -101,15 +100,15 @@ public abstract class AbstractAllocateAllocationCommand implements AllocationCom
@Override
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
- builder.field(INDEX_KEY, shardId().index().name());
- builder.field(SHARD_KEY, shardId().id());
+ builder.field(INDEX_KEY, index());
+ builder.field(SHARD_KEY, shardId());
builder.field(NODE_KEY, node());
return builder;
}
public void writeTo(StreamOutput out) throws IOException {
- out.writeString(shardId.getIndex());
- out.writeVInt(shardId.getId());
+ out.writeString(index);
+ out.writeVInt(shardId);
out.writeString(node);
}
@@ -143,20 +142,32 @@ public abstract class AbstractAllocateAllocationCommand implements AllocationCom
}
}
- protected final ShardId shardId;
+ protected final String index;
+ protected final int shardId;
protected final String node;
- protected AbstractAllocateAllocationCommand(ShardId shardId, String node) {
+ protected AbstractAllocateAllocationCommand(String index, int shardId, String node) {
+ this.index = index;
this.shardId = shardId;
this.node = node;
}
+
+ /**
+ * Get the index name
+ *
+ * @return name of the index
+ */
+ public String index() {
+ return this.index;
+ }
+
/**
* Get the shard id
*
* @return id of the shard
*/
- public ShardId shardId() {
+ public int shardId() {
return this.shardId;
}
diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateEmptyPrimaryAllocationCommand.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateEmptyPrimaryAllocationCommand.java
index f607755bca..c7ddefedc2 100644
--- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateEmptyPrimaryAllocationCommand.java
+++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateEmptyPrimaryAllocationCommand.java
@@ -51,8 +51,8 @@ public class AllocateEmptyPrimaryAllocationCommand extends BasePrimaryAllocation
* @param node node id of the node to assign the shard to
* @param acceptDataLoss whether the user agrees to data loss
*/
- public AllocateEmptyPrimaryAllocationCommand(ShardId shardId, String node, boolean acceptDataLoss) {
- super(shardId, node, acceptDataLoss);
+ public AllocateEmptyPrimaryAllocationCommand(String index, int shardId, String node, boolean acceptDataLoss) {
+ super(index, shardId, node, acceptDataLoss);
}
@Override
@@ -70,7 +70,7 @@ public class AllocateEmptyPrimaryAllocationCommand extends BasePrimaryAllocation
@Override
public AllocateEmptyPrimaryAllocationCommand build() {
validate();
- return new AllocateEmptyPrimaryAllocationCommand(new ShardId(index, shard), node, acceptDataLoss);
+ return new AllocateEmptyPrimaryAllocationCommand(index, shard, node, acceptDataLoss);
}
}
@@ -98,17 +98,17 @@ public class AllocateEmptyPrimaryAllocationCommand extends BasePrimaryAllocation
final ShardRouting shardRouting;
try {
- shardRouting = allocation.routingTable().shardRoutingTable(shardId).primaryShard();
+ shardRouting = allocation.routingTable().shardRoutingTable(index, shardId).primaryShard();
} catch (IndexNotFoundException | ShardNotFoundException e) {
return explainOrThrowRejectedCommand(explain, allocation, e);
}
if (shardRouting.unassigned() == false) {
- return explainOrThrowRejectedCommand(explain, allocation, "primary " + shardId + " is already assigned");
+ return explainOrThrowRejectedCommand(explain, allocation, "primary [" + index + "][" + shardId + "] is already assigned");
}
if (shardRouting.unassignedInfo().getReason() != UnassignedInfo.Reason.INDEX_CREATED && acceptDataLoss == false) {
return explainOrThrowRejectedCommand(explain, allocation,
- "allocating an empty primary for " + shardId + " can result in data loss. Please confirm by setting the accept_data_loss parameter to true");
+ "allocating an empty primary for [" + index + "][" + shardId + "] can result in data loss. Please confirm by setting the accept_data_loss parameter to true");
}
initializeUnassignedShard(allocation, routingNodes, routingNode, shardRouting,
diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateReplicaAllocationCommand.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateReplicaAllocationCommand.java
index f9d443a661..616e08b8f3 100644
--- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateReplicaAllocationCommand.java
+++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateReplicaAllocationCommand.java
@@ -47,11 +47,12 @@ public class AllocateReplicaAllocationCommand extends AbstractAllocateAllocation
/**
* Creates a new {@link AllocateReplicaAllocationCommand}
*
- * @param shardId {@link ShardId} of the shard to assign
+ * @param index index of the shard to assign
+ * @param shardId id of the shard to assign
* @param node node id of the node to assign the shard to
*/
- public AllocateReplicaAllocationCommand(ShardId shardId, String node) {
- super(shardId, node);
+ public AllocateReplicaAllocationCommand(String index, int shardId, String node) {
+ super(index, shardId, node);
}
@Override
@@ -69,7 +70,7 @@ public class AllocateReplicaAllocationCommand extends AbstractAllocateAllocation
@Override
public AllocateReplicaAllocationCommand build() {
validate();
- return new AllocateReplicaAllocationCommand(new ShardId(index, shard), node);
+ return new AllocateReplicaAllocationCommand(index, shard, node);
}
}
@@ -96,20 +97,20 @@ public class AllocateReplicaAllocationCommand extends AbstractAllocateAllocation
final ShardRouting primaryShardRouting;
try {
- primaryShardRouting = allocation.routingTable().shardRoutingTable(shardId).primaryShard();
+ primaryShardRouting = allocation.routingTable().shardRoutingTable(index, shardId).primaryShard();
} catch (IndexNotFoundException | ShardNotFoundException e) {
return explainOrThrowRejectedCommand(explain, allocation, e);
}
if (primaryShardRouting.unassigned()) {
return explainOrThrowRejectedCommand(explain, allocation,
- "trying to allocate a replica shard " + shardId + ", while corresponding primary shard is still unassigned");
+ "trying to allocate a replica shard [" + index + "][" + shardId + "], while corresponding primary shard is still unassigned");
}
- List<ShardRouting> replicaShardRoutings = allocation.routingTable().shardRoutingTable(shardId).replicaShardsWithState(ShardRoutingState.UNASSIGNED);
+ List<ShardRouting> replicaShardRoutings = allocation.routingTable().shardRoutingTable(index, shardId).replicaShardsWithState(ShardRoutingState.UNASSIGNED);
ShardRouting shardRouting;
if (replicaShardRoutings.isEmpty()) {
return explainOrThrowRejectedCommand(explain, allocation,
- "all copies of " + shardId +" are already assigned. Use the move allocation command instead");
+ "all copies of [" + index + "][" + shardId + "] are already assigned. Use the move allocation command instead");
} else {
shardRouting = replicaShardRoutings.get(0);
}
@@ -120,7 +121,7 @@ public class AllocateReplicaAllocationCommand extends AbstractAllocateAllocation
if (explain) {
return new RerouteExplanation(this, decision);
}
- throw new IllegalArgumentException("[" + name() + "] allocation of " + shardId + " on node " + discoNode + " is not allowed, reason: " + decision);
+ throw new IllegalArgumentException("[" + name() + "] allocation of [" + index + "][" + shardId + "] on node " + discoNode + " is not allowed, reason: " + decision);
}
initializeUnassignedShard(allocation, routingNodes, routingNode, shardRouting);
diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateStalePrimaryAllocationCommand.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateStalePrimaryAllocationCommand.java
index 22cedfc6aa..5ccd9e9bb6 100644
--- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateStalePrimaryAllocationCommand.java
+++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateStalePrimaryAllocationCommand.java
@@ -47,12 +47,13 @@ public class AllocateStalePrimaryAllocationCommand extends BasePrimaryAllocation
/**
* Creates a new {@link AllocateStalePrimaryAllocationCommand}
*
- * @param shardId {@link ShardId} of the shard to assign
+ * @param index index of the shard to assign
+ * @param shardId id of the shard to assign
* @param node node id of the node to assign the shard to
* @param acceptDataLoss whether the user agrees to data loss
*/
- public AllocateStalePrimaryAllocationCommand(ShardId shardId, String node, boolean acceptDataLoss) {
- super(shardId, node, acceptDataLoss);
+ public AllocateStalePrimaryAllocationCommand(String index, int shardId, String node, boolean acceptDataLoss) {
+ super(index, shardId, node, acceptDataLoss);
}
@Override
@@ -70,7 +71,7 @@ public class AllocateStalePrimaryAllocationCommand extends BasePrimaryAllocation
@Override
public AllocateStalePrimaryAllocationCommand build() {
validate();
- return new AllocateStalePrimaryAllocationCommand(new ShardId(index, shard), node, acceptDataLoss);
+ return new AllocateStalePrimaryAllocationCommand(index, shard, node, acceptDataLoss);
}
}
@@ -98,23 +99,23 @@ public class AllocateStalePrimaryAllocationCommand extends BasePrimaryAllocation
final ShardRouting shardRouting;
try {
- shardRouting = allocation.routingTable().shardRoutingTable(shardId).primaryShard();
+ shardRouting = allocation.routingTable().shardRoutingTable(index, shardId).primaryShard();
} catch (IndexNotFoundException | ShardNotFoundException e) {
return explainOrThrowRejectedCommand(explain, allocation, e);
}
if (shardRouting.unassigned() == false) {
- return explainOrThrowRejectedCommand(explain, allocation, "primary " + shardId + " is already assigned");
+ return explainOrThrowRejectedCommand(explain, allocation, "primary [" + index + "][" + shardId + "] is already assigned");
}
if (acceptDataLoss == false) {
return explainOrThrowRejectedCommand(explain, allocation,
- "allocating an empty primary for " + shardId + " can result in data loss. Please confirm by setting the accept_data_loss parameter to true");
+ "allocating an empty primary for [" + index + "][" + shardId + "] can result in data loss. Please confirm by setting the accept_data_loss parameter to true");
}
- final IndexMetaData indexMetaData = allocation.metaData().index(shardRouting.getIndex());
+ final IndexMetaData indexMetaData = allocation.metaData().index(shardRouting.getIndexName());
if (shardRouting.allocatedPostIndexCreate(indexMetaData) == false) {
return explainOrThrowRejectedCommand(explain, allocation,
- "trying to allocate an existing primary shard " + shardId + ", while no such shard has ever been active");
+ "trying to allocate an existing primary shard [" + index + "][" + shardId + "], while no such shard has ever been active");
}
initializeUnassignedShard(allocation, routingNodes, routingNode, shardRouting);
diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/BasePrimaryAllocationCommand.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/BasePrimaryAllocationCommand.java
index 35c1711d64..b982952c1f 100644
--- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/BasePrimaryAllocationCommand.java
+++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/BasePrimaryAllocationCommand.java
@@ -44,8 +44,8 @@ public abstract class BasePrimaryAllocationCommand extends AbstractAllocateAlloc
protected final boolean acceptDataLoss;
- protected BasePrimaryAllocationCommand(ShardId shardId, String node, boolean acceptDataLoss) {
- super(shardId, node);
+ protected BasePrimaryAllocationCommand(String index, int shardId, String node, boolean acceptDataLoss) {
+ super(index, shardId, node);
this.acceptDataLoss = acceptDataLoss;
}
diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/CancelAllocationCommand.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/CancelAllocationCommand.java
index c485cb3eab..32eae8c19d 100644
--- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/CancelAllocationCommand.java
+++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/CancelAllocationCommand.java
@@ -33,7 +33,6 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
-import org.elasticsearch.index.shard.ShardId;
import java.io.IOException;
@@ -53,12 +52,13 @@ public class CancelAllocationCommand implements AllocationCommand {
@Override
public CancelAllocationCommand readFrom(StreamInput in) throws IOException {
- return new CancelAllocationCommand(ShardId.readShardId(in), in.readString(), in.readBoolean());
+ return new CancelAllocationCommand(in.readString(), in.readVInt(), in.readString(), in.readBoolean());
}
@Override
public void writeTo(CancelAllocationCommand command, StreamOutput out) throws IOException {
- command.shardId().writeTo(out);
+ out.writeString(command.index());
+ out.writeVInt(command.shardId());
out.writeString(command.node());
out.writeBoolean(command.allowPrimary());
}
@@ -100,7 +100,7 @@ public class CancelAllocationCommand implements AllocationCommand {
if (nodeId == null) {
throw new ElasticsearchParseException("[{}] command missing the node parameter", NAME);
}
- return new CancelAllocationCommand(new ShardId(index, shardId), nodeId, allowPrimary);
+ return new CancelAllocationCommand(index, shardId, nodeId, allowPrimary);
}
@Override
@@ -110,8 +110,8 @@ public class CancelAllocationCommand implements AllocationCommand {
} else {
builder.startObject(objectName);
}
- builder.field("index", command.shardId().index().name());
- builder.field("shard", command.shardId().id());
+ builder.field("index", command.index());
+ builder.field("shard", command.shardId());
builder.field("node", command.node());
builder.field("allow_primary", command.allowPrimary());
builder.endObject();
@@ -119,17 +119,20 @@ public class CancelAllocationCommand implements AllocationCommand {
}
- private final ShardId shardId;
+ private final String index;
+ private final int shardId;
private final String node;
private final boolean allowPrimary;
/**
* Creates a new {@link CancelAllocationCommand}
*
+ * @param index index of the shard which allocation should be canceled
* @param shardId id of the shard which allocation should be canceled
* @param node id of the node that manages the shard which allocation should be canceled
*/
- public CancelAllocationCommand(ShardId shardId, String node, boolean allowPrimary) {
+ public CancelAllocationCommand(String index, int shardId, String node, boolean allowPrimary) {
+ this.index = index;
this.shardId = shardId;
this.node = node;
this.allowPrimary = allowPrimary;
@@ -141,10 +144,18 @@ public class CancelAllocationCommand implements AllocationCommand {
}
/**
+ * Get the index of the shard which allocation should be canceled
+ * @return index of the shard which allocation should be canceled
+ */
+ public String index() {
+ return this.index;
+ }
+ /**
+
* Get the id of the shard which allocation should be canceled
* @return id of the shard which allocation should be canceled
*/
- public ShardId shardId() {
+ public int shardId() {
return this.shardId;
}
@@ -166,7 +177,10 @@ public class CancelAllocationCommand implements AllocationCommand {
boolean found = false;
for (RoutingNodes.RoutingNodeIterator it = allocation.routingNodes().routingNodeIter(discoNode.id()); it.hasNext(); ) {
ShardRouting shardRouting = it.next();
- if (!shardRouting.shardId().equals(shardId)) {
+ if (!shardRouting.shardId().getIndex().getName().equals(index)) {
+ continue;
+ }
+ if (shardRouting.shardId().id() != shardId) {
continue;
}
found = true;
diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/MoveAllocationCommand.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/MoveAllocationCommand.java
index ed535df2f4..75a9400807 100644
--- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/MoveAllocationCommand.java
+++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/MoveAllocationCommand.java
@@ -32,7 +32,6 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
-import org.elasticsearch.index.shard.ShardId;
import java.io.IOException;
@@ -49,12 +48,13 @@ public class MoveAllocationCommand implements AllocationCommand {
@Override
public MoveAllocationCommand readFrom(StreamInput in) throws IOException {
- return new MoveAllocationCommand(ShardId.readShardId(in), in.readString(), in.readString());
+ return new MoveAllocationCommand(in.readString(), in.readVInt(), in.readString(), in.readString());
}
@Override
public void writeTo(MoveAllocationCommand command, StreamOutput out) throws IOException {
- command.shardId().writeTo(out);
+ out.writeString(command.index());
+ out.writeVInt(command.shardId());
out.writeString(command.fromNode());
out.writeString(command.toNode());
}
@@ -99,7 +99,7 @@ public class MoveAllocationCommand implements AllocationCommand {
if (toNode == null) {
throw new ElasticsearchParseException("[{}] command missing the to_node parameter", NAME);
}
- return new MoveAllocationCommand(new ShardId(index, shardId), fromNode, toNode);
+ return new MoveAllocationCommand(index, shardId, fromNode, toNode);
}
@Override
@@ -109,19 +109,21 @@ public class MoveAllocationCommand implements AllocationCommand {
} else {
builder.startObject(objectName);
}
- builder.field("index", command.shardId().index().name());
- builder.field("shard", command.shardId().id());
+ builder.field("index", command.index());
+ builder.field("shard", command.shardId());
builder.field("from_node", command.fromNode());
builder.field("to_node", command.toNode());
builder.endObject();
}
}
- private final ShardId shardId;
+ private final String index;
+ private final int shardId;
private final String fromNode;
private final String toNode;
- public MoveAllocationCommand(ShardId shardId, String fromNode, String toNode) {
+ public MoveAllocationCommand(String index, int shardId, String fromNode, String toNode) {
+ this.index = index;
this.shardId = shardId;
this.fromNode = fromNode;
this.toNode = toNode;
@@ -132,7 +134,9 @@ public class MoveAllocationCommand implements AllocationCommand {
return NAME;
}
- public ShardId shardId() {
+ public String index() {return index; }
+
+ public int shardId() {
return this.shardId;
}
@@ -152,7 +156,10 @@ public class MoveAllocationCommand implements AllocationCommand {
boolean found = false;
for (ShardRouting shardRouting : allocation.routingNodes().node(fromDiscoNode.id())) {
- if (!shardRouting.shardId().equals(shardId)) {
+ if (!shardRouting.shardId().getIndexName().equals(index)) {
+ continue;
+ }
+ if (shardRouting.shardId().id() != shardId) {
continue;
}
found = true;
diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java
index b1be2a6fce..11fce397b2 100644
--- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java
+++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java
@@ -90,7 +90,7 @@ public class ClusterRebalanceAllocationDecider extends AllocationDecider {
logger.warn("[{}] has a wrong value {}, defaulting to 'indices_all_active'", CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING, CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getRaw(settings));
type = ClusterRebalanceType.INDICES_ALL_ACTIVE;
}
- logger.debug("using [{}] with [{}]", CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING, type.toString().toLowerCase(Locale.ROOT));
+ logger.debug("using [{}] with [{}]", CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), type.toString().toLowerCase(Locale.ROOT));
clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING, this::setType);
}
diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java
index 23624f050a..c012017976 100644
--- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java
+++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java
@@ -330,7 +330,7 @@ public class DiskThresholdDecider extends AllocationDecider {
}
// a flag for whether the primary shard has been previously allocated
- IndexMetaData indexMetaData = allocation.metaData().index(shardRouting.getIndex());
+ IndexMetaData indexMetaData = allocation.metaData().index(shardRouting.getIndexName());
boolean primaryHasBeenAllocated = shardRouting.primary() && shardRouting.allocatedPostIndexCreate(indexMetaData);
// checks for exact byte comparisons
diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java
index 3c2e649387..9131355876 100644
--- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java
+++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java
@@ -92,7 +92,7 @@ public class EnableAllocationDecider extends AllocationDecider {
return allocation.decision(Decision.YES, NAME, "allocation disabling is ignored");
}
- final IndexMetaData indexMetaData = allocation.metaData().index(shardRouting.getIndex());
+ final IndexMetaData indexMetaData = allocation.metaData().index(shardRouting.getIndexName());
final Allocation enable;
if (INDEX_ROUTING_ALLOCATION_ENABLE_SETTING.exists(indexMetaData.getSettings())) {
enable = INDEX_ROUTING_ALLOCATION_ENABLE_SETTING.get(indexMetaData.getSettings());
diff --git a/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java b/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java
index 98d98414db..b592eeb146 100644
--- a/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java
+++ b/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java
@@ -190,7 +190,7 @@ public class InternalClusterService extends AbstractLifecycleComponent<ClusterSe
protected void doStart() {
add(localNodeMasterListeners);
this.clusterState = ClusterState.builder(clusterState).blocks(initialBlocks).build();
- this.updateTasksExecutor = EsExecutors.newSinglePrioritizing(UPDATE_THREAD_NAME, daemonThreadFactory(settings, UPDATE_THREAD_NAME));
+ this.updateTasksExecutor = EsExecutors.newSinglePrioritizing(UPDATE_THREAD_NAME, daemonThreadFactory(settings, UPDATE_THREAD_NAME), threadPool.getThreadContext());
this.reconnectToNodes = threadPool.schedule(reconnectInterval, ThreadPool.Names.GENERIC, new ReconnectToNodes());
Map<String, String> nodeAttributes = discoveryNodeService.buildAttributes();
// note, we rely on the fact that its a new id each time we start, see FD and "kill -9" handling
diff --git a/core/src/main/java/org/elasticsearch/common/ContextAndHeaderHolder.java b/core/src/main/java/org/elasticsearch/common/ContextAndHeaderHolder.java
deleted file mode 100644
index 9a3140dba1..0000000000
--- a/core/src/main/java/org/elasticsearch/common/ContextAndHeaderHolder.java
+++ /dev/null
@@ -1,153 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.common;
-
-import com.carrotsearch.hppc.ObjectObjectAssociativeContainer;
-import com.carrotsearch.hppc.ObjectObjectHashMap;
-import org.elasticsearch.common.collect.ImmutableOpenMap;
-
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Set;
-
-/**
- *
- */
-public class ContextAndHeaderHolder implements HasContextAndHeaders {
-
- private ObjectObjectHashMap<Object, Object> context;
- protected Map<String, Object> headers;
-
- @SuppressWarnings("unchecked")
- @Override
- public final synchronized <V> V putInContext(Object key, Object value) {
- if (context == null) {
- context = new ObjectObjectHashMap<>(2);
- }
- return (V) context.put(key, value);
- }
-
- @Override
- public final synchronized void putAllInContext(ObjectObjectAssociativeContainer<Object, Object> map) {
- if (map == null) {
- return;
- }
- if (context == null) {
- context = new ObjectObjectHashMap<>(map);
- } else {
- context.putAll(map);
- }
- }
-
- @SuppressWarnings("unchecked")
- @Override
- public final synchronized <V> V getFromContext(Object key) {
- return context != null ? (V) context.get(key) : null;
- }
-
- @SuppressWarnings("unchecked")
- @Override
- public final synchronized <V> V getFromContext(Object key, V defaultValue) {
- V value = getFromContext(key);
- return value == null ? defaultValue : value;
- }
-
- @Override
- public final synchronized boolean hasInContext(Object key) {
- return context != null && context.containsKey(key);
- }
-
- @Override
- public final synchronized int contextSize() {
- return context != null ? context.size() : 0;
- }
-
- @Override
- public final synchronized boolean isContextEmpty() {
- return context == null || context.isEmpty();
- }
-
- @Override
- public synchronized ImmutableOpenMap<Object, Object> getContext() {
- return context != null ? ImmutableOpenMap.copyOf(context) : ImmutableOpenMap.of();
- }
-
- @Override
- public synchronized void copyContextFrom(HasContext other) {
- if (other == null) {
- return;
- }
-
- synchronized (other) {
- ImmutableOpenMap<Object, Object> otherContext = other.getContext();
- if (otherContext == null) {
- return;
- }
- if (context == null) {
- ObjectObjectHashMap<Object, Object> map = new ObjectObjectHashMap<>(other.getContext().size());
- map.putAll(otherContext);
- this.context = map;
- } else {
- context.putAll(otherContext);
- }
- }
- }
-
- @SuppressWarnings("unchecked")
- @Override
- public final void putHeader(String key, Object value) {
- if (headers == null) {
- headers = new HashMap<>();
- }
- headers.put(key, value);
- }
-
- @SuppressWarnings("unchecked")
- @Override
- public final <V> V getHeader(String key) {
- return headers != null ? (V) headers.get(key) : null;
- }
-
- @Override
- public final boolean hasHeader(String key) {
- return headers != null && headers.containsKey(key);
- }
-
- @Override
- public Set<String> getHeaders() {
- return headers != null ? headers.keySet() : Collections.<String>emptySet();
- }
-
- @Override
- public void copyHeadersFrom(HasHeaders from) {
- if (from != null && from.getHeaders() != null && !from.getHeaders().isEmpty()) {
- for (String headerName : from.getHeaders()) {
- putHeader(headerName, from.getHeader(headerName));
- }
- }
- }
-
- @Override
- public void copyContextAndHeadersFrom(HasContextAndHeaders other) {
- copyContextFrom(other);
- copyHeadersFrom(other);
- }
-}
diff --git a/core/src/main/java/org/elasticsearch/common/DelegatingHasContextAndHeaders.java b/core/src/main/java/org/elasticsearch/common/DelegatingHasContextAndHeaders.java
deleted file mode 100644
index 52d5af5e33..0000000000
--- a/core/src/main/java/org/elasticsearch/common/DelegatingHasContextAndHeaders.java
+++ /dev/null
@@ -1,111 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.common;
-
-import com.carrotsearch.hppc.ObjectObjectAssociativeContainer;
-import org.elasticsearch.common.collect.ImmutableOpenMap;
-
-import java.util.Set;
-
-public class DelegatingHasContextAndHeaders implements HasContextAndHeaders {
-
- private HasContextAndHeaders delegate;
-
- public DelegatingHasContextAndHeaders(HasContextAndHeaders delegate) {
- this.delegate = delegate;
- }
-
- @Override
- public <V> void putHeader(String key, V value) {
- delegate.putHeader(key, value);
- }
-
- @Override
- public void copyContextAndHeadersFrom(HasContextAndHeaders other) {
- delegate.copyContextAndHeadersFrom(other);
- }
-
- @Override
- public <V> V getHeader(String key) {
- return delegate.getHeader(key);
- }
-
- @Override
- public boolean hasHeader(String key) {
- return delegate.hasHeader(key);
- }
-
- @Override
- public <V> V putInContext(Object key, Object value) {
- return delegate.putInContext(key, value);
- }
-
- @Override
- public Set<String> getHeaders() {
- return delegate.getHeaders();
- }
-
- @Override
- public void copyHeadersFrom(HasHeaders from) {
- delegate.copyHeadersFrom(from);
- }
-
- @Override
- public void putAllInContext(ObjectObjectAssociativeContainer<Object, Object> map) {
- delegate.putAllInContext(map);
- }
-
- @Override
- public <V> V getFromContext(Object key) {
- return delegate.getFromContext(key);
- }
-
- @Override
- public <V> V getFromContext(Object key, V defaultValue) {
- return delegate.getFromContext(key, defaultValue);
- }
-
- @Override
- public boolean hasInContext(Object key) {
- return delegate.hasInContext(key);
- }
-
- @Override
- public int contextSize() {
- return delegate.contextSize();
- }
-
- @Override
- public boolean isContextEmpty() {
- return delegate.isContextEmpty();
- }
-
- @Override
- public ImmutableOpenMap<Object, Object> getContext() {
- return delegate.getContext();
- }
-
- @Override
- public void copyContextFrom(HasContext other) {
- delegate.copyContextFrom(other);
- }
-
-
-}
diff --git a/core/src/main/java/org/elasticsearch/common/HasContext.java b/core/src/main/java/org/elasticsearch/common/HasContext.java
deleted file mode 100644
index 6a303e39ba..0000000000
--- a/core/src/main/java/org/elasticsearch/common/HasContext.java
+++ /dev/null
@@ -1,82 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.common;
-
-import com.carrotsearch.hppc.ObjectObjectAssociativeContainer;
-import org.elasticsearch.common.collect.ImmutableOpenMap;
-
-public interface HasContext {
-
- /**
- * Attaches the given value to the context.
- *
- * @return The previous value that was associated with the given key in the context, or
- * {@code null} if there was none.
- */
- <V> V putInContext(Object key, Object value);
-
- /**
- * Attaches the given values to the context
- */
- void putAllInContext(ObjectObjectAssociativeContainer<Object, Object> map);
-
- /**
- * @return The context value that is associated with the given key
- *
- * @see #putInContext(Object, Object)
- */
- <V> V getFromContext(Object key);
-
- /**
- * @param defaultValue The default value that should be returned for the given key, if no
- * value is currently associated with it.
- *
- * @return The value that is associated with the given key in the context
- *
- * @see #putInContext(Object, Object)
- */
- <V> V getFromContext(Object key, V defaultValue);
-
- /**
- * Checks if the context contains an entry with the given key
- */
- boolean hasInContext(Object key);
-
- /**
- * @return The number of values attached in the context.
- */
- int contextSize();
-
- /**
- * Checks if the context is empty.
- */
- boolean isContextEmpty();
-
- /**
- * @return A safe immutable copy of the current context.
- */
- ImmutableOpenMap<Object, Object> getContext();
-
- /**
- * Copies the context from the given context holder to this context holder. Any shared keys between
- * the two context will be overridden by the given context holder.
- */
- void copyContextFrom(HasContext other);
-}
diff --git a/core/src/main/java/org/elasticsearch/common/Randomness.java b/core/src/main/java/org/elasticsearch/common/Randomness.java
index 7f71afc1c7..154ebf3736 100644
--- a/core/src/main/java/org/elasticsearch/common/Randomness.java
+++ b/core/src/main/java/org/elasticsearch/common/Randomness.java
@@ -19,6 +19,7 @@
package org.elasticsearch.common;
+import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import java.lang.reflect.Method;
@@ -40,7 +41,7 @@ import java.util.concurrent.ThreadLocalRandom;
* setting a reproducible seed. When running the Elasticsearch server
* process, non-reproducible sources of randomness are provided (unless
* a setting is provided for a module that exposes a seed setting (e.g.,
- * DiscoveryService#SETTING_DISCOVERY_SEED)).
+ * DiscoveryService#DISCOVERY_SEED_SETTING)).
*/
public final class Randomness {
private static final Method currentMethod;
@@ -68,13 +69,12 @@ public final class Randomness {
* seed in the settings with the key setting.
*
* @param settings the settings containing the seed
- * @param setting the key to access the seed
+ * @param setting the setting to access the seed
* @return a reproducible source of randomness
*/
- public static Random get(Settings settings, String setting) {
- Long maybeSeed = settings.getAsLong(setting, null);
- if (maybeSeed != null) {
- return new Random(maybeSeed);
+ public static Random get(Settings settings, Setting<Long> setting) {
+ if (setting.exists(settings)) {
+ return new Random(setting.get(settings));
} else {
return get();
}
diff --git a/core/src/main/java/org/elasticsearch/common/blobstore/BlobStore.java b/core/src/main/java/org/elasticsearch/common/blobstore/BlobStore.java
index df3a8c6be8..9275b37915 100644
--- a/core/src/main/java/org/elasticsearch/common/blobstore/BlobStore.java
+++ b/core/src/main/java/org/elasticsearch/common/blobstore/BlobStore.java
@@ -18,16 +18,16 @@
*/
package org.elasticsearch.common.blobstore;
+import java.io.Closeable;
import java.io.IOException;
/**
*
*/
-public interface BlobStore {
+public interface BlobStore extends Closeable {
BlobContainer blobContainer(BlobPath path);
void delete(BlobPath path) throws IOException;
- void close();
}
diff --git a/core/src/main/java/org/elasticsearch/common/cli/HelpPrinter.java b/core/src/main/java/org/elasticsearch/common/cli/HelpPrinter.java
index 4f694e9af3..5a463258eb 100644
--- a/core/src/main/java/org/elasticsearch/common/cli/HelpPrinter.java
+++ b/core/src/main/java/org/elasticsearch/common/cli/HelpPrinter.java
@@ -50,7 +50,7 @@ public class HelpPrinter {
}
});
} catch (IOException ioe) {
- ioe.printStackTrace(terminal.writer());
+ throw new RuntimeException(ioe);
}
terminal.println();
}
diff --git a/core/src/main/java/org/elasticsearch/common/cli/Terminal.java b/core/src/main/java/org/elasticsearch/common/cli/Terminal.java
index 82898b3e45..5e4bc09ad9 100644
--- a/core/src/main/java/org/elasticsearch/common/cli/Terminal.java
+++ b/core/src/main/java/org/elasticsearch/common/cli/Terminal.java
@@ -132,8 +132,6 @@ public abstract class Terminal {
protected abstract void doPrint(String msg, Object... args);
- public abstract PrintWriter writer();
-
private static class ConsoleTerminal extends Terminal {
final Console console = System.console();
@@ -159,11 +157,6 @@ public abstract class Terminal {
}
@Override
- public PrintWriter writer() {
- return console.writer();
- }
-
- @Override
public void printStackTrace(Throwable t) {
t.printStackTrace(console.writer());
}
@@ -199,10 +192,5 @@ public abstract class Terminal {
public void printStackTrace(Throwable t) {
t.printStackTrace(printWriter);
}
-
- @Override
- public PrintWriter writer() {
- return printWriter;
- }
}
}
diff --git a/core/src/main/java/org/elasticsearch/common/geo/GeoUtils.java b/core/src/main/java/org/elasticsearch/common/geo/GeoUtils.java
index cec805e7a8..c5c36b5b0c 100644
--- a/core/src/main/java/org/elasticsearch/common/geo/GeoUtils.java
+++ b/core/src/main/java/org/elasticsearch/common/geo/GeoUtils.java
@@ -179,7 +179,7 @@ public class GeoUtils {
final double width = Math.sqrt((meters*meters)/(ratio*ratio)); // convert to cell width
final long part = Math.round(Math.ceil(EARTH_EQUATOR / width));
final int level = Long.SIZE - Long.numberOfLeadingZeros(part)-1; // (log_2)
- return (part<=(1l<<level)) ?level :(level+1); // adjust level
+ return (part<=(1L<<level)) ?level :(level+1); // adjust level
}
}
diff --git a/core/src/main/java/org/elasticsearch/common/http/client/HttpDownloadHelper.java b/core/src/main/java/org/elasticsearch/common/http/client/HttpDownloadHelper.java
deleted file mode 100644
index b99ef89543..0000000000
--- a/core/src/main/java/org/elasticsearch/common/http/client/HttpDownloadHelper.java
+++ /dev/null
@@ -1,488 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.common.http.client;
-
-import org.apache.lucene.util.IOUtils;
-import org.elasticsearch.Build;
-import org.elasticsearch.ElasticsearchCorruptionException;
-import org.elasticsearch.ElasticsearchTimeoutException;
-import org.elasticsearch.Version;
-import org.elasticsearch.common.Base64;
-import org.elasticsearch.common.Nullable;
-import org.elasticsearch.common.Strings;
-import org.elasticsearch.common.hash.MessageDigests;
-import org.elasticsearch.common.unit.TimeValue;
-
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.io.PrintWriter;
-import java.net.HttpURLConnection;
-import java.net.URL;
-import java.net.URLConnection;
-import java.nio.charset.StandardCharsets;
-import java.nio.file.Files;
-import java.nio.file.NoSuchFileException;
-import java.nio.file.Path;
-import java.nio.file.attribute.FileTime;
-import java.util.List;
-
-/**
- *
- */
-public class HttpDownloadHelper {
-
- private boolean useTimestamp = false;
- private boolean skipExisting = false;
-
- public boolean download(URL source, Path dest, @Nullable DownloadProgress progress, TimeValue timeout) throws Exception {
- if (Files.exists(dest) && skipExisting) {
- return true;
- }
-
- //don't do any progress, unless asked
- if (progress == null) {
- progress = new NullProgress();
- }
-
- //set the timestamp to the file date.
- long timestamp = 0;
-
- boolean hasTimestamp = false;
- if (useTimestamp && Files.exists(dest) ) {
- timestamp = Files.getLastModifiedTime(dest).toMillis();
- hasTimestamp = true;
- }
-
- GetThread getThread = new GetThread(source, dest, hasTimestamp, timestamp, progress);
-
- try {
- getThread.setDaemon(true);
- getThread.start();
- getThread.join(timeout.millis());
-
- if (getThread.isAlive()) {
- throw new ElasticsearchTimeoutException("The GET operation took longer than " + timeout + ", stopping it.");
- }
- }
- catch (InterruptedException ie) {
- return false;
- } finally {
- getThread.closeStreams();
- }
-
- return getThread.wasSuccessful();
- }
-
- public interface Checksummer {
- /** Return the hex string for the given byte array */
- String checksum(byte[] filebytes);
- /** Human-readable name for the checksum format */
- String name();
- }
-
- /** Checksummer for SHA1 */
- public static Checksummer SHA1_CHECKSUM = new Checksummer() {
- @Override
- public String checksum(byte[] filebytes) {
- return MessageDigests.toHexString(MessageDigests.sha1().digest(filebytes));
- }
-
- @Override
- public String name() {
- return "SHA1";
- }
- };
-
- /** Checksummer for MD5 */
- public static Checksummer MD5_CHECKSUM = new Checksummer() {
- @Override
- public String checksum(byte[] filebytes) {
- return MessageDigests.toHexString(MessageDigests.md5().digest(filebytes));
- }
-
- @Override
- public String name() {
- return "MD5";
- }
- };
-
- /**
- * Download the given checksum URL to the destination and check the checksum
- * @param checksumURL URL for the checksum file
- * @param originalFile original file to calculate checksum of
- * @param checksumFile destination to download the checksum file to
- * @param hashFunc class used to calculate the checksum of the file
- * @return true if the checksum was validated, false if it did not exist
- * @throws Exception if the checksum failed to match
- */
- public boolean downloadAndVerifyChecksum(URL checksumURL, Path originalFile, Path checksumFile,
- @Nullable DownloadProgress progress,
- TimeValue timeout, Checksummer hashFunc) throws Exception {
- try {
- if (download(checksumURL, checksumFile, progress, timeout)) {
- byte[] fileBytes = Files.readAllBytes(originalFile);
- List<String> checksumLines = Files.readAllLines(checksumFile, StandardCharsets.UTF_8);
- if (checksumLines.size() != 1) {
- throw new ElasticsearchCorruptionException("invalid format for checksum file (" +
- hashFunc.name() + "), expected 1 line, got: " + checksumLines.size());
- }
- String checksumHex = checksumLines.get(0);
- String fileHex = hashFunc.checksum(fileBytes);
- if (fileHex.equals(checksumHex) == false) {
- throw new ElasticsearchCorruptionException("incorrect hash (" + hashFunc.name() +
- "), file hash: [" + fileHex + "], expected: [" + checksumHex + "]");
- }
- return true;
- }
- } catch (FileNotFoundException | NoSuchFileException e) {
- // checksum file doesn't exist
- return false;
- } finally {
- IOUtils.deleteFilesIgnoringExceptions(checksumFile);
- }
- return false;
- }
-
- /**
- * Interface implemented for reporting
- * progress of downloading.
- */
- public interface DownloadProgress {
- /**
- * begin a download
- */
- void beginDownload();
-
- /**
- * tick handler
- */
- void onTick();
-
- /**
- * end a download
- */
- void endDownload();
- }
-
- /**
- * do nothing with progress info
- */
- public static class NullProgress implements DownloadProgress {
-
- /**
- * begin a download
- */
- @Override
- public void beginDownload() {
-
- }
-
- /**
- * tick handler
- */
- @Override
- public void onTick() {
- }
-
- /**
- * end a download
- */
- @Override
- public void endDownload() {
-
- }
- }
-
- /**
- * verbose progress system prints to some output stream
- */
- public static class VerboseProgress implements DownloadProgress {
- private int dots = 0;
- // CheckStyle:VisibilityModifier OFF - bc
- PrintWriter writer;
- // CheckStyle:VisibilityModifier ON
-
- /**
- * Construct a verbose progress reporter.
- *
- * @param writer the output stream.
- */
- public VerboseProgress(PrintWriter writer) {
- this.writer = writer;
- }
-
- /**
- * begin a download
- */
- @Override
- public void beginDownload() {
- writer.print("Downloading ");
- dots = 0;
- }
-
- /**
- * tick handler
- */
- @Override
- public void onTick() {
- writer.print(".");
- if (dots++ > 50) {
- writer.flush();
- dots = 0;
- }
- }
-
- /**
- * end a download
- */
- @Override
- public void endDownload() {
- writer.println("DONE");
- writer.flush();
- }
- }
-
- private class GetThread extends Thread {
-
- private final URL source;
- private final Path dest;
- private final boolean hasTimestamp;
- private final long timestamp;
- private final DownloadProgress progress;
-
- private boolean success = false;
- private IOException ioexception = null;
- private InputStream is = null;
- private OutputStream os = null;
- private URLConnection connection;
- private int redirections = 0;
-
- GetThread(URL source, Path dest, boolean h, long t, DownloadProgress p) {
- this.source = source;
- this.dest = dest;
- hasTimestamp = h;
- timestamp = t;
- progress = p;
- }
-
- @Override
- public void run() {
- try {
- success = get();
- } catch (IOException ioex) {
- ioexception = ioex;
- }
- }
-
- private boolean get() throws IOException {
-
- connection = openConnection(source);
-
- if (connection == null) {
- return false;
- }
-
- boolean downloadSucceeded = downloadFile();
-
- //if (and only if) the use file time option is set, then
- //the saved file now has its timestamp set to that of the
- //downloaded file
- if (downloadSucceeded && useTimestamp) {
- updateTimeStamp();
- }
-
- return downloadSucceeded;
- }
-
-
- private boolean redirectionAllowed(URL aSource, URL aDest) throws IOException {
- // Argh, github does this...
-// if (!(aSource.getProtocol().equals(aDest.getProtocol()) || ("http"
-// .equals(aSource.getProtocol()) && "https".equals(aDest
-// .getProtocol())))) {
-// String message = "Redirection detected from "
-// + aSource.getProtocol() + " to " + aDest.getProtocol()
-// + ". Protocol switch unsafe, not allowed.";
-// throw new IOException(message);
-// }
-
- redirections++;
- if (redirections > 5) {
- String message = "More than " + 5 + " times redirected, giving up";
- throw new IOException(message);
- }
-
-
- return true;
- }
-
- private URLConnection openConnection(URL aSource) throws IOException {
-
- // set up the URL connection
- URLConnection connection = aSource.openConnection();
- // modify the headers
- // NB: things like user authentication could go in here too.
- if (hasTimestamp) {
- connection.setIfModifiedSince(timestamp);
- }
-
- // in case the plugin manager is its own project, this can become an authenticator
- boolean isSecureProcotol = "https".equalsIgnoreCase(aSource.getProtocol());
- boolean isAuthInfoSet = !Strings.isNullOrEmpty(aSource.getUserInfo());
- if (isAuthInfoSet) {
- if (!isSecureProcotol) {
- throw new IOException("Basic auth is only supported for HTTPS!");
- }
- String basicAuth = Base64.encodeBytes(aSource.getUserInfo().getBytes(StandardCharsets.UTF_8));
- connection.setRequestProperty("Authorization", "Basic " + basicAuth);
- }
-
- if (connection instanceof HttpURLConnection) {
- ((HttpURLConnection) connection).setInstanceFollowRedirects(false);
- connection.setUseCaches(true);
- connection.setConnectTimeout(5000);
- }
- connection.setRequestProperty("ES-Version", Version.CURRENT.toString());
- connection.setRequestProperty("ES-Build-Hash", Build.CURRENT.shortHash());
- connection.setRequestProperty("User-Agent", "elasticsearch-plugin-manager");
-
- // connect to the remote site (may take some time)
- connection.connect();
-
- // First check on a 301 / 302 (moved) response (HTTP only)
- if (connection instanceof HttpURLConnection) {
- HttpURLConnection httpConnection = (HttpURLConnection) connection;
- int responseCode = httpConnection.getResponseCode();
- if (responseCode == HttpURLConnection.HTTP_MOVED_PERM ||
- responseCode == HttpURLConnection.HTTP_MOVED_TEMP ||
- responseCode == HttpURLConnection.HTTP_SEE_OTHER) {
- String newLocation = httpConnection.getHeaderField("Location");
- URL newURL = new URL(newLocation);
- if (!redirectionAllowed(aSource, newURL)) {
- return null;
- }
- return openConnection(newURL);
- }
- // next test for a 304 result (HTTP only)
- long lastModified = httpConnection.getLastModified();
- if (responseCode == HttpURLConnection.HTTP_NOT_MODIFIED
- || (lastModified != 0 && hasTimestamp && timestamp >= lastModified)) {
- // not modified so no file download. just return
- // instead and trace out something so the user
- // doesn't think that the download happened when it
- // didn't
- return null;
- }
- // test for 401 result (HTTP only)
- if (responseCode == HttpURLConnection.HTTP_UNAUTHORIZED) {
- String message = "HTTP Authorization failure";
- throw new IOException(message);
- }
- }
-
- //REVISIT: at this point even non HTTP connections may
- //support the if-modified-since behaviour -we just check
- //the date of the content and skip the write if it is not
- //newer. Some protocols (FTP) don't include dates, of
- //course.
- return connection;
- }
-
- private boolean downloadFile() throws FileNotFoundException, IOException {
- IOException lastEx = null;
- for (int i = 0; i < 3; i++) {
- // this three attempt trick is to get round quirks in different
- // Java implementations. Some of them take a few goes to bind
- // property; we ignore the first couple of such failures.
- try {
- is = connection.getInputStream();
- break;
- } catch (IOException ex) {
- lastEx = ex;
- }
- }
- if (is == null) {
- throw lastEx;
- }
-
- os = Files.newOutputStream(dest);
- progress.beginDownload();
- boolean finished = false;
- try {
- byte[] buffer = new byte[1024 * 100];
- int length;
- while (!isInterrupted() && (length = is.read(buffer)) >= 0) {
- os.write(buffer, 0, length);
- progress.onTick();
- }
- finished = !isInterrupted();
- } finally {
- if (!finished) {
- // we have started to (over)write dest, but failed.
- // Try to delete the garbage we'd otherwise leave
- // behind.
- IOUtils.closeWhileHandlingException(os, is);
- IOUtils.deleteFilesIgnoringExceptions(dest);
- } else {
- IOUtils.close(os, is);
- }
- }
- progress.endDownload();
- return true;
- }
-
- private void updateTimeStamp() throws IOException {
- long remoteTimestamp = connection.getLastModified();
- if (remoteTimestamp != 0) {
- Files.setLastModifiedTime(dest, FileTime.fromMillis(remoteTimestamp));
- }
- }
-
- /**
- * Has the download completed successfully?
- * <p>
- * Re-throws any exception caught during executaion.</p>
- */
- boolean wasSuccessful() throws IOException {
- if (ioexception != null) {
- throw ioexception;
- }
- return success;
- }
-
- /**
- * Closes streams, interrupts the download, may delete the
- * output file.
- */
- void closeStreams() throws IOException {
- interrupt();
- if (success) {
- IOUtils.close(is, os);
- } else {
- IOUtils.closeWhileHandlingException(is, os);
- if (dest != null && Files.exists(dest)) {
- IOUtils.deleteFilesIgnoringExceptions(dest);
- }
- }
- }
- }
-}
diff --git a/core/src/main/java/org/elasticsearch/common/io/FileSystemUtils.java b/core/src/main/java/org/elasticsearch/common/io/FileSystemUtils.java
index 08761f84ff..ab65f09036 100644
--- a/core/src/main/java/org/elasticsearch/common/io/FileSystemUtils.java
+++ b/core/src/main/java/org/elasticsearch/common/io/FileSystemUtils.java
@@ -53,33 +53,6 @@ public final class FileSystemUtils {
private FileSystemUtils() {} // only static methods
/**
- * Returns <code>true</code> iff a file under the given root has one of the given extensions. This method
- * will travers directories recursively and will terminate once any of the extensions was found. This
- * methods will not follow any links.
- *
- * @param root the root directory to travers. Must be a directory
- * @param extensions the file extensions to look for
- * @return <code>true</code> iff a file under the given root has one of the given extensions, otherwise <code>false</code>
- * @throws IOException if an IOException occurs or if the given root path is not a directory.
- */
- public static boolean hasExtensions(Path root, final String... extensions) throws IOException {
- final AtomicBoolean retVal = new AtomicBoolean(false);
- Files.walkFileTree(root, new SimpleFileVisitor<Path>() {
- @Override
- public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
- for (String extension : extensions) {
- if (file.getFileName().toString().endsWith(extension)) {
- retVal.set(true);
- return FileVisitResult.TERMINATE;
- }
- }
- return super.visitFile(file, attrs);
- }
- });
- return retVal.get();
- }
-
- /**
* Returns <code>true</code> iff one of the files exists otherwise <code>false</code>
*/
public static boolean exists(Path... files) {
@@ -169,167 +142,6 @@ public final class FileSystemUtils {
}
/**
- * This utility copy a full directory content (excluded) under
- * a new directory but without overwriting existing files.
- *
- * When a file already exists in destination dir, the source file is copied under
- * destination directory but with a suffix appended if set or source file is ignored
- * if suffix is not set (null).
- * @param source Source directory (for example /tmp/es/src)
- * @param destination Destination directory (destination directory /tmp/es/dst)
- * @param suffix When not null, files are copied with a suffix appended to the original name (eg: ".new")
- * When null, files are ignored
- */
- public static void moveFilesWithoutOverwriting(Path source, final Path destination, final String suffix) throws IOException {
-
- // Create destination dir
- Files.createDirectories(destination);
-
- final int configPathRootLevel = source.getNameCount();
-
- // We walk through the file tree from
- Files.walkFileTree(source, new SimpleFileVisitor<Path>() {
- private Path buildPath(Path path) {
- return destination.resolve(path);
- }
-
- @Override
- public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException {
- // We are now in dir. We need to remove root of config files to have a relative path
-
- // If we are not walking in root dir, we might be able to copy its content
- // if it does not already exist
- if (configPathRootLevel != dir.getNameCount()) {
- Path subpath = dir.subpath(configPathRootLevel, dir.getNameCount());
- Path path = buildPath(subpath);
- if (!Files.exists(path)) {
- // We just move the structure to new dir
- // we can't do atomic move here since src / dest might be on different mounts?
- move(dir, path);
- // We just ignore sub files from here
- return FileVisitResult.SKIP_SUBTREE;
- }
- }
-
- return FileVisitResult.CONTINUE;
- }
-
- @Override
- public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
- Path subpath = null;
-
- if (configPathRootLevel != file.getNameCount()) {
- subpath = file.subpath(configPathRootLevel, file.getNameCount());
- }
- Path path = buildPath(subpath);
-
- if (!Files.exists(path)) {
- // We just move the new file to new dir
- move(file, path);
- } else if (suffix != null) {
- if (!isSameFile(file, path)) {
- // If it already exists we try to copy this new version appending suffix to its name
- path = path.resolveSibling(path.getFileName().toString().concat(suffix));
- // We just move the file to new dir but with a new name (appended with suffix)
- Files.move(file, path, StandardCopyOption.REPLACE_EXISTING);
- }
- }
-
- return FileVisitResult.CONTINUE;
- }
-
- /**
- * Compares the content of two paths by comparing them
- */
- private boolean isSameFile(Path first, Path second) throws IOException {
- // do quick file size comparison before hashing
- boolean sameFileSize = Files.size(first) == Files.size(second);
- if (!sameFileSize) {
- return false;
- }
-
- byte[] firstBytes = Files.readAllBytes(first);
- byte[] secondBytes = Files.readAllBytes(second);
- return Arrays.equals(firstBytes, secondBytes);
- }
- });
- }
-
- /**
- * Copy recursively a dir to a new location
- * @param source source dir
- * @param destination destination dir
- */
- public static void copyDirectoryRecursively(Path source, Path destination) throws IOException {
- Files.walkFileTree(source, new TreeCopier(source, destination, false));
- }
-
- /**
- * Move or rename a file to a target file. This method supports moving a file from
- * different filesystems (not supported by Files.move()).
- *
- * @param source source file
- * @param destination destination file
- */
- public static void move(Path source, Path destination) throws IOException {
- try {
- // We can't use atomic move here since source & target can be on different filesystems.
- Files.move(source, destination);
- } catch (DirectoryNotEmptyException e) {
- Files.walkFileTree(source, new TreeCopier(source, destination, true));
- }
- }
-
- // TODO: note that this will fail if source and target are on different NIO.2 filesystems.
-
- static class TreeCopier extends SimpleFileVisitor<Path> {
- private final Path source;
- private final Path target;
- private final boolean delete;
-
- TreeCopier(Path source, Path target, boolean delete) {
- this.source = source;
- this.target = target;
- this.delete = delete;
- }
-
- @Override
- public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) {
- Path newDir = target.resolve(source.relativize(dir));
- try {
- Files.copy(dir, newDir);
- } catch (FileAlreadyExistsException x) {
- // We ignore this
- } catch (IOException x) {
- return SKIP_SUBTREE;
- }
- return CONTINUE;
- }
-
- @Override
- public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException {
- if (delete) {
- IOUtils.rm(dir);
- }
- return CONTINUE;
- }
-
- @Override
- public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
- Path newFile = target.resolve(source.relativize(file));
- try {
- Files.copy(file, newFile);
- if (delete) {
- Files.deleteIfExists(file);
- }
- } catch (IOException x) {
- // We ignore this
- }
- return CONTINUE;
- }
- }
-
- /**
* Returns an array of all files in the given directory matching.
*/
public static Path[] files(Path from, DirectoryStream.Filter<Path> filter) throws IOException {
diff --git a/core/src/main/java/org/elasticsearch/common/io/stream/NotSerializableExceptionWrapper.java b/core/src/main/java/org/elasticsearch/common/io/stream/NotSerializableExceptionWrapper.java
index 8252fb6d97..ec83546f57 100644
--- a/core/src/main/java/org/elasticsearch/common/io/stream/NotSerializableExceptionWrapper.java
+++ b/core/src/main/java/org/elasticsearch/common/io/stream/NotSerializableExceptionWrapper.java
@@ -38,7 +38,8 @@ public final class NotSerializableExceptionWrapper extends ElasticsearchExceptio
private final RestStatus status;
public NotSerializableExceptionWrapper(Throwable other) {
- super(other.getMessage(), other.getCause());
+ super(ElasticsearchException.getExceptionName(other) +
+ ": " + other.getMessage(), other.getCause());
this.name = ElasticsearchException.getExceptionName(other);
this.status = ExceptionsHelper.status(other);
setStackTrace(other.getStackTrace());
diff --git a/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java b/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java
index 1e01d4c36f..02e937dbd8 100644
--- a/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java
+++ b/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java
@@ -37,7 +37,7 @@ import org.elasticsearch.common.geo.builders.ShapeBuilder;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder;
-import org.elasticsearch.search.rescore.RescoreBuilder.Rescorer;
+import org.elasticsearch.search.rescore.RescoreBuilder;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
@@ -61,7 +61,6 @@ import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
-import java.util.function.Function;
import java.util.function.Supplier;
import static org.elasticsearch.ElasticsearchException.readException;
@@ -678,10 +677,10 @@ public abstract class StreamInput extends InputStream {
}
/**
- * Reads a {@link QueryBuilder} from the current stream
+ * Reads a {@link RescoreBuilder} from the current stream
*/
- public Rescorer readRescorer() throws IOException {
- return readNamedWriteable(Rescorer.class);
+ public RescoreBuilder<?> readRescorer() throws IOException {
+ return readNamedWriteable(RescoreBuilder.class);
}
/**
diff --git a/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java b/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java
index 74c7acfa2a..0863717a5a 100644
--- a/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java
+++ b/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java
@@ -36,7 +36,7 @@ import org.elasticsearch.common.geo.builders.ShapeBuilder;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder;
-import org.elasticsearch.search.rescore.RescoreBuilder.Rescorer;
+import org.elasticsearch.search.rescore.RescoreBuilder;
import org.joda.time.ReadableInstant;
import java.io.EOFException;
@@ -679,9 +679,9 @@ public abstract class StreamOutput extends OutputStream {
}
/**
- * Writes a {@link Rescorer} to the current stream
+ * Writes a {@link RescoreBuilder} to the current stream
*/
- public void writeRescorer(Rescorer rescorer) throws IOException {
+ public void writeRescorer(RescoreBuilder<?> rescorer) throws IOException {
writeNamedWriteable(rescorer);
}
}
diff --git a/core/src/main/java/org/elasticsearch/common/lease/Releasable.java b/core/src/main/java/org/elasticsearch/common/lease/Releasable.java
index 29ee1d59e6..a86d812016 100644
--- a/core/src/main/java/org/elasticsearch/common/lease/Releasable.java
+++ b/core/src/main/java/org/elasticsearch/common/lease/Releasable.java
@@ -21,10 +21,12 @@ package org.elasticsearch.common.lease;
import org.elasticsearch.ElasticsearchException;
+import java.io.Closeable;
+
/**
* Specialization of {@link AutoCloseable} that may only throw an {@link ElasticsearchException}.
*/
-public interface Releasable extends AutoCloseable {
+public interface Releasable extends Closeable {
@Override
void close();
diff --git a/core/src/main/java/org/elasticsearch/common/lease/Releasables.java b/core/src/main/java/org/elasticsearch/common/lease/Releasables.java
index e91bc5c0f7..5e2d5af522 100644
--- a/core/src/main/java/org/elasticsearch/common/lease/Releasables.java
+++ b/core/src/main/java/org/elasticsearch/common/lease/Releasables.java
@@ -19,38 +19,24 @@
package org.elasticsearch.common.lease;
+import org.apache.lucene.util.IOUtils;
+
+import java.io.IOException;
import java.util.Arrays;
/** Utility methods to work with {@link Releasable}s. */
public enum Releasables {
;
- private static void rethrow(Throwable t) {
- if (t instanceof RuntimeException) {
- throw (RuntimeException) t;
- }
- if (t instanceof Error) {
- throw (Error) t;
- }
- throw new RuntimeException(t);
- }
-
private static void close(Iterable<? extends Releasable> releasables, boolean ignoreException) {
- Throwable th = null;
- for (Releasable releasable : releasables) {
- if (releasable != null) {
- try {
- releasable.close();
- } catch (Throwable t) {
- if (th == null) {
- th = t;
- }
- }
+ try {
+ // this does the right thing with respect to add suppressed and not wrapping errors etc.
+ IOUtils.close(releasables);
+ } catch (Throwable t) {
+ if (ignoreException == false) {
+ IOUtils.reThrowUnchecked(t);
}
}
- if (th != null && !ignoreException) {
- rethrow(th);
- }
}
/** Release the provided {@link Releasable}s. */
@@ -99,25 +85,11 @@ public enum Releasables {
* </pre>
*/
public static Releasable wrap(final Iterable<Releasable> releasables) {
- return new Releasable() {
-
- @Override
- public void close() {
- Releasables.close(releasables);
- }
-
- };
+ return () -> close(releasables);
}
/** @see #wrap(Iterable) */
public static Releasable wrap(final Releasable... releasables) {
- return new Releasable() {
-
- @Override
- public void close() {
- Releasables.close(releasables);
- }
-
- };
+ return () -> close(releasables);
}
}
diff --git a/core/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java b/core/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java
index a0645c6a4d..4fdde3db89 100644
--- a/core/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java
+++ b/core/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java
@@ -22,12 +22,23 @@ package org.elasticsearch.common.logging;
import org.elasticsearch.common.logging.jdk.JdkESLoggerFactory;
import org.elasticsearch.common.logging.log4j.Log4jESLoggerFactory;
import org.elasticsearch.common.logging.slf4j.Slf4jESLoggerFactory;
+import org.elasticsearch.common.settings.AbstractScopedSettings;
+import org.elasticsearch.common.settings.Setting;
+import org.elasticsearch.common.settings.Settings;
+
+import java.util.Locale;
+import java.util.Map;
+import java.util.function.Consumer;
+import java.util.regex.Pattern;
/**
* Factory to get {@link ESLogger}s
*/
public abstract class ESLoggerFactory {
+ public static final Setting<LogLevel> LOG_DEFAULT_LEVEL_SETTING = new Setting<>("logger.level", LogLevel.INFO.name(), LogLevel::parse, false, Setting.Scope.CLUSTER);
+ public static final Setting<LogLevel> LOG_LEVEL_SETTING = Setting.dynamicKeySetting("logger.", LogLevel.INFO.name(), LogLevel::parse, true, Setting.Scope.CLUSTER);
+
private static volatile ESLoggerFactory defaultFactory = new JdkESLoggerFactory();
static {
@@ -85,4 +96,11 @@ public abstract class ESLoggerFactory {
protected abstract ESLogger rootLogger();
protected abstract ESLogger newInstance(String prefix, String name);
+
+ public enum LogLevel {
+ WARN, TRACE, INFO, DEBUG, ERROR;
+ public static LogLevel parse(String level) {
+ return valueOf(level.toUpperCase(Locale.ROOT));
+ }
+ }
}
diff --git a/core/src/main/java/org/elasticsearch/common/logging/Loggers.java b/core/src/main/java/org/elasticsearch/common/logging/Loggers.java
index 1dafcff62d..1447959176 100644
--- a/core/src/main/java/org/elasticsearch/common/logging/Loggers.java
+++ b/core/src/main/java/org/elasticsearch/common/logging/Loggers.java
@@ -59,16 +59,16 @@ public class Loggers {
}
public static ESLogger getLogger(Class clazz, Settings settings, ShardId shardId, String... prefixes) {
- return getLogger(clazz, settings, shardId.index(), asArrayList(Integer.toString(shardId.id()), prefixes).toArray(new String[0]));
+ return getLogger(clazz, settings, shardId.getIndex(), asArrayList(Integer.toString(shardId.id()), prefixes).toArray(new String[0]));
}
/** Just like {@link #getLogger(Class, org.elasticsearch.common.settings.Settings,ShardId,String...)} but String loggerName instead of Class. */
public static ESLogger getLogger(String loggerName, Settings settings, ShardId shardId, String... prefixes) {
- return getLogger(loggerName, settings, asArrayList(shardId.index().name(), Integer.toString(shardId.id()), prefixes).toArray(new String[0]));
+ return getLogger(loggerName, settings, asArrayList(shardId.getIndexName(), Integer.toString(shardId.id()), prefixes).toArray(new String[0]));
}
public static ESLogger getLogger(Class clazz, Settings settings, Index index, String... prefixes) {
- return getLogger(clazz, settings, asArrayList(SPACE, index.name(), prefixes).toArray(new String[0]));
+ return getLogger(clazz, settings, asArrayList(SPACE, index.getName(), prefixes).toArray(new String[0]));
}
public static ESLogger getLogger(Class clazz, Settings settings, String... prefixes) {
diff --git a/core/src/main/java/org/elasticsearch/common/logging/log4j/TerminalAppender.java b/core/src/main/java/org/elasticsearch/common/logging/log4j/TerminalAppender.java
index 3c60c44d3e..6e62606054 100644
--- a/core/src/main/java/org/elasticsearch/common/logging/log4j/TerminalAppender.java
+++ b/core/src/main/java/org/elasticsearch/common/logging/log4j/TerminalAppender.java
@@ -25,7 +25,7 @@ import org.apache.log4j.spi.LoggingEvent;
import org.elasticsearch.common.cli.Terminal;
/**
- * TerminalAppender logs event to Terminal.DEFAULT. It is used for example by the PluginManagerCliParser.
+ * TerminalAppender logs event to Terminal.DEFAULT. It is used for example by the PluginCli.
* */
public class TerminalAppender extends AppenderSkeleton {
@Override
diff --git a/core/src/main/java/org/elasticsearch/common/lucene/ShardCoreKeyMap.java b/core/src/main/java/org/elasticsearch/common/lucene/ShardCoreKeyMap.java
index 92aa02ba00..146fb7ba05 100644
--- a/core/src/main/java/org/elasticsearch/common/lucene/ShardCoreKeyMap.java
+++ b/core/src/main/java/org/elasticsearch/common/lucene/ShardCoreKeyMap.java
@@ -64,7 +64,7 @@ public final class ShardCoreKeyMap {
throw new IllegalArgumentException("Could not extract shard id from " + reader);
}
final Object coreKey = reader.getCoreCacheKey();
- final String index = shardId.getIndex();
+ final String index = shardId.getIndexName();
synchronized (this) {
if (coreKeyToShard.put(coreKey, shardId) == null) {
Set<Object> objects = indexToCoreKey.get(index);
diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/Queries.java b/core/src/main/java/org/elasticsearch/common/lucene/search/Queries.java
index 01184d1cff..73c3fc9400 100644
--- a/core/src/main/java/org/elasticsearch/common/lucene/search/Queries.java
+++ b/core/src/main/java/org/elasticsearch/common/lucene/search/Queries.java
@@ -117,12 +117,6 @@ public class Queries {
if (minimumShouldMatch == null) {
return query;
}
- // Queries with a single word expanded with synonyms
- // have their coordination factor disabled (@see org.apache.lucene.util.QueryBuilder#analyzeBoolean()).
- // minimumShouldMatch should not be applicable in such case.
- if (query.isCoordDisabled()) {
- return query;
- }
int optionalClauses = 0;
for (BooleanClause c : query.clauses()) {
if (c.getOccur() == BooleanClause.Occur.SHOULD) {
diff --git a/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java b/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java
index a1e8261e63..49d20d88b9 100644
--- a/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java
+++ b/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java
@@ -22,13 +22,14 @@ package org.elasticsearch.common.network;
import java.util.Arrays;
import java.util.List;
-import org.elasticsearch.client.support.Headers;
import org.elasticsearch.client.transport.TransportClientNodesService;
import org.elasticsearch.client.transport.support.TransportProxyClient;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
+import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.settings.Setting.Scope;
import org.elasticsearch.common.util.ExtensionPoint;
import org.elasticsearch.http.HttpServer;
import org.elasticsearch.http.HttpServerTransport;
@@ -116,6 +117,10 @@ import org.elasticsearch.rest.action.get.RestGetSourceAction;
import org.elasticsearch.rest.action.get.RestHeadAction;
import org.elasticsearch.rest.action.get.RestMultiGetAction;
import org.elasticsearch.rest.action.index.RestIndexAction;
+import org.elasticsearch.rest.action.ingest.RestDeletePipelineAction;
+import org.elasticsearch.rest.action.ingest.RestGetPipelineAction;
+import org.elasticsearch.rest.action.ingest.RestPutPipelineAction;
+import org.elasticsearch.rest.action.ingest.RestSimulatePipelineAction;
import org.elasticsearch.rest.action.main.RestMainAction;
import org.elasticsearch.rest.action.percolate.RestMultiPercolateAction;
import org.elasticsearch.rest.action.percolate.RestPercolateAction;
@@ -150,7 +155,7 @@ public class NetworkModule extends AbstractModule {
public static final String NETTY_TRANSPORT = "netty";
public static final String HTTP_TYPE_KEY = "http.type";
- public static final String HTTP_ENABLED = "http.enabled";
+ public static final Setting<Boolean> HTTP_ENABLED = Setting.boolSetting("http.enabled", true, false, Scope.CLUSTER);
private static final List<Class<? extends RestHandler>> builtinRestHandlers = Arrays.asList(
RestMainAction.class,
@@ -256,7 +261,13 @@ public class NetworkModule extends AbstractModule {
RestCatAction.class,
// Tasks API
- RestListTasksAction.class
+ RestListTasksAction.class,
+
+ // Ingest API
+ RestPutPipelineAction.class,
+ RestGetPipelineAction.class,
+ RestDeletePipelineAction.class,
+ RestSimulatePipelineAction.class
);
private static final List<Class<? extends AbstractCatAction>> builtinCatHandlers = Arrays.asList(
@@ -364,11 +375,10 @@ public class NetworkModule extends AbstractModule {
transportTypes.bindType(binder(), settings, TRANSPORT_TYPE_KEY, defaultTransport);
if (transportClient) {
- bind(Headers.class).asEagerSingleton();
bind(TransportProxyClient.class).asEagerSingleton();
bind(TransportClientNodesService.class).asEagerSingleton();
} else {
- if (settings.getAsBoolean(HTTP_ENABLED, true)) {
+ if (HTTP_ENABLED.get(settings)) {
bind(HttpServer.class).asEagerSingleton();
httpTransportTypes.bindType(binder(), settings, HTTP_TYPE_KEY, NETTY_TRANSPORT);
}
diff --git a/core/src/main/java/org/elasticsearch/common/network/NetworkService.java b/core/src/main/java/org/elasticsearch/common/network/NetworkService.java
index 835a35d238..1debc6960a 100644
--- a/core/src/main/java/org/elasticsearch/common/network/NetworkService.java
+++ b/core/src/main/java/org/elasticsearch/common/network/NetworkService.java
@@ -19,7 +19,9 @@
package org.elasticsearch.common.network;
+import org.elasticsearch.common.Strings;
import org.elasticsearch.common.component.AbstractComponent;
+import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
@@ -41,31 +43,31 @@ public class NetworkService extends AbstractComponent {
/** By default, we bind to loopback interfaces */
public static final String DEFAULT_NETWORK_HOST = "_local_";
- private static final String GLOBAL_NETWORK_HOST_SETTING = "network.host";
- private static final String GLOBAL_NETWORK_BINDHOST_SETTING = "network.bind_host";
- private static final String GLOBAL_NETWORK_PUBLISHHOST_SETTING = "network.publish_host";
+ public static final Setting<List<String>> GLOBAL_NETWORK_HOST_SETTING = Setting.listSetting("network.host", Arrays.asList(DEFAULT_NETWORK_HOST),
+ s -> s, false, Setting.Scope.CLUSTER);
+ public static final Setting<List<String>> GLOBAL_NETWORK_BINDHOST_SETTING = Setting.listSetting("network.bind_host", GLOBAL_NETWORK_HOST_SETTING,
+ s -> s, false, Setting.Scope.CLUSTER);
+ public static final Setting<List<String>> GLOBAL_NETWORK_PUBLISHHOST_SETTING = Setting.listSetting("network.publish_host", GLOBAL_NETWORK_HOST_SETTING,
+ s -> s, false, Setting.Scope.CLUSTER);
+ public static final Setting<Boolean> NETWORK_SERVER = Setting.boolSetting("network.server", true, false, Setting.Scope.CLUSTER);
public static final class TcpSettings {
- public static final String TCP_NO_DELAY = "network.tcp.no_delay";
- public static final String TCP_KEEP_ALIVE = "network.tcp.keep_alive";
- public static final String TCP_REUSE_ADDRESS = "network.tcp.reuse_address";
- public static final String TCP_SEND_BUFFER_SIZE = "network.tcp.send_buffer_size";
- public static final String TCP_RECEIVE_BUFFER_SIZE = "network.tcp.receive_buffer_size";
- public static final String TCP_BLOCKING = "network.tcp.blocking";
- public static final String TCP_BLOCKING_SERVER = "network.tcp.blocking_server";
- public static final String TCP_BLOCKING_CLIENT = "network.tcp.blocking_client";
- public static final String TCP_CONNECT_TIMEOUT = "network.tcp.connect_timeout";
-
- public static final ByteSizeValue TCP_DEFAULT_SEND_BUFFER_SIZE = null;
- public static final ByteSizeValue TCP_DEFAULT_RECEIVE_BUFFER_SIZE = null;
- public static final TimeValue TCP_DEFAULT_CONNECT_TIMEOUT = new TimeValue(30, TimeUnit.SECONDS);
+ public static final Setting<Boolean> TCP_NO_DELAY = Setting.boolSetting("network.tcp.no_delay", true, false, Setting.Scope.CLUSTER);
+ public static final Setting<Boolean> TCP_KEEP_ALIVE = Setting.boolSetting("network.tcp.keep_alive", true, false, Setting.Scope.CLUSTER);
+ public static final Setting<Boolean> TCP_REUSE_ADDRESS = Setting.boolSetting("network.tcp.reuse_address", NetworkUtils.defaultReuseAddress(), false, Setting.Scope.CLUSTER);
+ public static final Setting<ByteSizeValue> TCP_SEND_BUFFER_SIZE = Setting.byteSizeSetting("network.tcp.send_buffer_size", new ByteSizeValue(-1), false, Setting.Scope.CLUSTER);
+ public static final Setting<ByteSizeValue> TCP_RECEIVE_BUFFER_SIZE = Setting.byteSizeSetting("network.tcp.receive_buffer_size", new ByteSizeValue(-1), false, Setting.Scope.CLUSTER);
+ public static final Setting<Boolean> TCP_BLOCKING = Setting.boolSetting("network.tcp.blocking", false, false, Setting.Scope.CLUSTER);
+ public static final Setting<Boolean> TCP_BLOCKING_SERVER = Setting.boolSetting("network.tcp.blocking_server", TCP_BLOCKING, false, Setting.Scope.CLUSTER);
+ public static final Setting<Boolean> TCP_BLOCKING_CLIENT = Setting.boolSetting("network.tcp.blocking_client", TCP_BLOCKING, false, Setting.Scope.CLUSTER);
+ public static final Setting<TimeValue> TCP_CONNECT_TIMEOUT = Setting.timeSetting("network.tcp.connect_timeout", new TimeValue(30, TimeUnit.SECONDS), false, Setting.Scope.CLUSTER);
}
/**
* A custom name resolver can support custom lookup keys (my_net_key:ipv4) and also change
* the default inet address used in case no settings is provided.
*/
- public static interface CustomNameResolver {
+ public interface CustomNameResolver {
/**
* Resolves the default value if possible. If not, return <tt>null</tt>.
*/
@@ -94,6 +96,7 @@ public class NetworkService extends AbstractComponent {
/**
* Resolves {@code bindHosts} to a list of internet addresses. The list will
* not contain duplicate addresses.
+ *
* @param bindHosts list of hosts to bind to. this may contain special pseudo-hostnames
* such as _local_ (see the documentation). if it is null, it will be populated
* based on global default settings.
@@ -102,21 +105,22 @@ public class NetworkService extends AbstractComponent {
public InetAddress[] resolveBindHostAddresses(String bindHosts[]) throws IOException {
// first check settings
if (bindHosts == null) {
- bindHosts = settings.getAsArray(GLOBAL_NETWORK_BINDHOST_SETTING, settings.getAsArray(GLOBAL_NETWORK_HOST_SETTING, null));
- }
- // next check any registered custom resolvers
- if (bindHosts == null) {
- for (CustomNameResolver customNameResolver : customNameResolvers) {
- InetAddress addresses[] = customNameResolver.resolveDefault();
- if (addresses != null) {
- return addresses;
+ if (GLOBAL_NETWORK_BINDHOST_SETTING.exists(settings) || GLOBAL_NETWORK_HOST_SETTING.exists(settings)) {
+ // if we have settings use them (we have a fallback to GLOBAL_NETWORK_HOST_SETTING inline
+ bindHosts = GLOBAL_NETWORK_BINDHOST_SETTING.get(settings).toArray(Strings.EMPTY_ARRAY);
+ } else {
+ // next check any registered custom resolvers
+ for (CustomNameResolver customNameResolver : customNameResolvers) {
+ InetAddress addresses[] = customNameResolver.resolveDefault();
+ if (addresses != null) {
+ return addresses;
+ }
}
+ // we know it's not here. get the defaults
+ bindHosts = GLOBAL_NETWORK_BINDHOST_SETTING.get(settings).toArray(Strings.EMPTY_ARRAY);
}
}
- // finally, fill with our default
- if (bindHosts == null) {
- bindHosts = new String[] { DEFAULT_NETWORK_HOST };
- }
+
InetAddress addresses[] = resolveInetAddresses(bindHosts);
// try to deal with some (mis)configuration
@@ -138,6 +142,7 @@ public class NetworkService extends AbstractComponent {
* only one address is just a current limitation.
* <p>
* If {@code publishHosts} resolves to more than one address, <b>then one is selected with magic</b>
+ *
* @param publishHosts list of hosts to publish as. this may contain special pseudo-hostnames
* such as _local_ (see the documentation). if it is null, it will be populated
* based on global default settings.
@@ -145,23 +150,23 @@ public class NetworkService extends AbstractComponent {
*/
// TODO: needs to be InetAddress[]
public InetAddress resolvePublishHostAddresses(String publishHosts[]) throws IOException {
- // first check settings
- if (publishHosts == null) {
- publishHosts = settings.getAsArray(GLOBAL_NETWORK_PUBLISHHOST_SETTING, settings.getAsArray(GLOBAL_NETWORK_HOST_SETTING, null));
- }
- // next check any registered custom resolvers
- if (publishHosts == null) {
- for (CustomNameResolver customNameResolver : customNameResolvers) {
- InetAddress addresses[] = customNameResolver.resolveDefault();
- if (addresses != null) {
- return addresses[0];
+ if (publishHosts == null || publishHosts.length == 0) {
+ if (GLOBAL_NETWORK_PUBLISHHOST_SETTING.exists(settings) || GLOBAL_NETWORK_HOST_SETTING.exists(settings)) {
+ // if we have settings use them (we have a fallback to GLOBAL_NETWORK_HOST_SETTING inline
+ publishHosts = GLOBAL_NETWORK_PUBLISHHOST_SETTING.get(settings).toArray(Strings.EMPTY_ARRAY);
+ } else {
+ // next check any registered custom resolvers
+ for (CustomNameResolver customNameResolver : customNameResolvers) {
+ InetAddress addresses[] = customNameResolver.resolveDefault();
+ if (addresses != null) {
+ return addresses[0];
+ }
}
+ // we know it's not here. get the defaults
+ publishHosts = GLOBAL_NETWORK_PUBLISHHOST_SETTING.get(settings).toArray(Strings.EMPTY_ARRAY);
}
}
- // finally, fill with our default
- if (publishHosts == null) {
- publishHosts = new String[] { DEFAULT_NETWORK_HOST };
- }
+
InetAddress addresses[] = resolveInetAddresses(publishHosts);
// TODO: allow publishing multiple addresses
// for now... the hack begins
@@ -184,17 +189,17 @@ public class NetworkService extends AbstractComponent {
throw new IllegalArgumentException("publish address: {" + NetworkAddress.format(address) + "} is wildcard, but multiple addresses specified: this makes no sense");
}
}
-
+
// 3. if we end out with multiple publish addresses, select by preference.
// don't warn the user, or they will get confused by bind_host vs publish_host etc.
if (addresses.length > 1) {
List<InetAddress> sorted = new ArrayList<>(Arrays.asList(addresses));
NetworkUtils.sortAddresses(sorted);
- addresses = new InetAddress[] { sorted.get(0) };
+ addresses = new InetAddress[]{sorted.get(0)};
}
return addresses[0];
}
-
+
/** resolves (and deduplicates) host specification */
private InetAddress[] resolveInetAddresses(String hosts[]) throws IOException {
if (hosts.length == 0) {
diff --git a/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java b/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java
index 73d13b2de2..b30178857e 100644
--- a/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java
+++ b/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java
@@ -31,8 +31,10 @@ import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
+import java.util.concurrent.CopyOnWriteArrayList;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
+import java.util.regex.Pattern;
/**
* A basic setting service that can be used for per-index and per-cluster settings.
@@ -40,38 +42,54 @@ import java.util.function.Consumer;
*/
public abstract class AbstractScopedSettings extends AbstractComponent {
private Settings lastSettingsApplied = Settings.EMPTY;
- private final List<SettingUpdater<?>> settingUpdaters = new ArrayList<>();
- private final Map<String, Setting<?>> complexMatchers = new HashMap<>();
- private final Map<String, Setting<?>> keySettings = new HashMap<>();
+ private final List<SettingUpdater<?>> settingUpdaters = new CopyOnWriteArrayList<>();
+ private final Map<String, Setting<?>> complexMatchers;
+ private final Map<String, Setting<?>> keySettings;
private final Setting.Scope scope;
+ private static final Pattern KEY_PATTERN = Pattern.compile("^(?:[-\\w]+[.])*[-\\w]+$");
+ private static final Pattern GROUP_KEY_PATTERN = Pattern.compile("^(?:[-\\w]+[.])+$");
protected AbstractScopedSettings(Settings settings, Set<Setting<?>> settingsSet, Setting.Scope scope) {
super(settings);
this.lastSettingsApplied = Settings.EMPTY;
this.scope = scope;
- for (Setting<?> entry : settingsSet) {
- addSetting(entry);
+ Map<String, Setting<?>> complexMatchers = new HashMap<>();
+ Map<String, Setting<?>> keySettings = new HashMap<>();
+ for (Setting<?> setting : settingsSet) {
+ if (setting.getScope() != scope) {
+ throw new IllegalArgumentException("Setting must be a " + scope + " setting but was: " + setting.getScope());
+ }
+ if (isValidKey(setting.getKey()) == false && (setting.isGroupSetting() && isValidGroupKey(setting.getKey())) == false) {
+ throw new IllegalArgumentException("illegal settings key: [" + setting.getKey() + "]");
+ }
+ if (setting.hasComplexMatcher()) {
+ complexMatchers.putIfAbsent(setting.getKey(), setting);
+ } else {
+ keySettings.putIfAbsent(setting.getKey(), setting);
+ }
}
+ this.complexMatchers = Collections.unmodifiableMap(complexMatchers);
+ this.keySettings = Collections.unmodifiableMap(keySettings);
}
protected AbstractScopedSettings(Settings nodeSettings, Settings scopeSettings, AbstractScopedSettings other) {
super(nodeSettings);
this.lastSettingsApplied = scopeSettings;
this.scope = other.scope;
- complexMatchers.putAll(other.complexMatchers);
- keySettings.putAll(other.keySettings);
+ complexMatchers = other.complexMatchers;
+ keySettings = other.keySettings;
settingUpdaters.addAll(other.settingUpdaters);
}
- protected final void addSetting(Setting<?> setting) {
- if (setting.getScope() != scope) {
- throw new IllegalArgumentException("Setting must be a " + scope + " setting but was: " + setting.getScope());
- }
- if (setting.hasComplexMatcher()) {
- complexMatchers.putIfAbsent(setting.getKey(), setting);
- } else {
- keySettings.putIfAbsent(setting.getKey(), setting);
- }
+ /**
+ * Returns <code>true</code> iff the given key is a valid settings key otherwise <code>false</code>
+ */
+ public static boolean isValidKey(String key) {
+ return KEY_PATTERN.matcher(key).matches();
+ }
+
+ private static boolean isValidGroupKey(String key) {
+ return GROUP_KEY_PATTERN.matcher(key).matches();
}
public Setting.Scope getScope() {
@@ -148,7 +166,11 @@ public abstract class AbstractScopedSettings extends AbstractComponent {
if (setting != get(setting.getKey())) {
throw new IllegalArgumentException("Setting is not registered for key [" + setting.getKey() + "]");
}
- this.settingUpdaters.add(setting.newUpdater(consumer, logger, validator));
+ addSettingsUpdater(setting.newUpdater(consumer, logger, validator));
+ }
+
+ synchronized void addSettingsUpdater(SettingUpdater<?> updater) {
+ this.settingUpdaters.add(updater);
}
/**
@@ -166,7 +188,7 @@ public abstract class AbstractScopedSettings extends AbstractComponent {
if (b != get(b.getKey())) {
throw new IllegalArgumentException("Setting is not registered for key [" + b.getKey() + "]");
}
- this.settingUpdaters.add(Setting.compoundUpdater(consumer, a, b, logger));
+ addSettingsUpdater(Setting.compoundUpdater(consumer, a, b, logger));
}
/**
@@ -270,7 +292,7 @@ public abstract class AbstractScopedSettings extends AbstractComponent {
}
for (Map.Entry<String, Setting<?>> entry : complexMatchers.entrySet()) {
if (entry.getValue().match(key)) {
- return entry.getValue();
+ return entry.getValue().getConcreteSetting(key);
}
}
return null;
diff --git a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java
index 1e764dce42..ea16c6aabd 100644
--- a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java
+++ b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java
@@ -19,7 +19,14 @@
package org.elasticsearch.common.settings;
import org.elasticsearch.action.admin.indices.close.TransportCloseIndexAction;
+import org.elasticsearch.action.support.AutoCreateIndex;
import org.elasticsearch.action.support.DestructiveOperations;
+import org.elasticsearch.action.support.master.TransportMasterNodeReadAction;
+import org.elasticsearch.cache.recycler.PageCacheRecycler;
+import org.elasticsearch.client.Client;
+import org.elasticsearch.client.transport.TransportClientNodesService;
+import org.elasticsearch.cluster.ClusterModule;
+import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.InternalClusterInfoService;
import org.elasticsearch.cluster.action.index.MappingUpdatedAction;
import org.elasticsearch.cluster.metadata.MetaData;
@@ -35,69 +42,116 @@ import org.elasticsearch.cluster.routing.allocation.decider.SnapshotInProgressAl
import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider;
import org.elasticsearch.cluster.service.InternalClusterService;
import org.elasticsearch.common.logging.ESLoggerFactory;
+import org.elasticsearch.common.network.NetworkModule;
+import org.elasticsearch.common.network.NetworkService;
+import org.elasticsearch.common.util.concurrent.EsExecutors;
+import org.elasticsearch.common.util.concurrent.ThreadContext;
+import org.elasticsearch.discovery.DiscoveryModule;
+import org.elasticsearch.discovery.DiscoveryService;
import org.elasticsearch.discovery.DiscoverySettings;
import org.elasticsearch.discovery.zen.ZenDiscovery;
import org.elasticsearch.discovery.zen.elect.ElectMasterService;
+import org.elasticsearch.discovery.zen.fd.FaultDetection;
+import org.elasticsearch.discovery.zen.ping.unicast.UnicastZenPing;
+import org.elasticsearch.env.Environment;
+import org.elasticsearch.env.NodeEnvironment;
+import org.elasticsearch.gateway.GatewayService;
import org.elasticsearch.gateway.PrimaryShardAllocator;
+import org.elasticsearch.http.HttpTransportSettings;
+import org.elasticsearch.http.netty.NettyHttpServerTransport;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.store.IndexStoreConfig;
+import org.elasticsearch.indices.analysis.HunspellService;
import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService;
+import org.elasticsearch.indices.cache.query.IndicesQueryCache;
+import org.elasticsearch.indices.cache.request.IndicesRequestCache;
+import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache;
import org.elasticsearch.indices.recovery.RecoverySettings;
+import org.elasticsearch.indices.store.IndicesStore;
import org.elasticsearch.indices.ttl.IndicesTTLService;
+import org.elasticsearch.monitor.fs.FsService;
+import org.elasticsearch.monitor.jvm.JvmGcMonitorService;
+import org.elasticsearch.monitor.jvm.JvmService;
+import org.elasticsearch.monitor.os.OsService;
+import org.elasticsearch.monitor.process.ProcessService;
+import org.elasticsearch.node.Node;
+import org.elasticsearch.node.internal.InternalSettingsPreparer;
+import org.elasticsearch.repositories.fs.FsRepository;
+import org.elasticsearch.repositories.uri.URLRepository;
+import org.elasticsearch.rest.BaseRestHandler;
+import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.SearchService;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.Transport;
import org.elasticsearch.transport.TransportService;
+import org.elasticsearch.transport.TransportSettings;
+import org.elasticsearch.transport.netty.NettyTransport;
+import org.elasticsearch.tribe.TribeService;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
-import java.util.Map;
import java.util.Set;
+import java.util.function.Predicate;
/**
* Encapsulates all valid cluster level settings.
*/
public final class ClusterSettings extends AbstractScopedSettings {
+ public ClusterSettings(Settings nodeSettings, Set<Setting<?>> settingsSet) {
+ super(nodeSettings, settingsSet, Setting.Scope.CLUSTER);
+ addSettingsUpdater(new LoggingSettingUpdater(nodeSettings));
+ }
+
+ private static final class LoggingSettingUpdater implements SettingUpdater<Settings> {
+ final Predicate<String> loggerPredicate = ESLoggerFactory.LOG_LEVEL_SETTING::match;
+ private final Settings settings;
+
+ LoggingSettingUpdater(Settings settings) {
+ this.settings = settings;
+ }
- public ClusterSettings(Settings settings, Set<Setting<?>> settingsSet) {
- super(settings, settingsSet, Setting.Scope.CLUSTER);
+ @Override
+ public boolean hasChanged(Settings current, Settings previous) {
+ return current.filter(loggerPredicate).getAsMap().equals(previous.filter(loggerPredicate).getAsMap()) == false;
}
@Override
- public synchronized Settings applySettings(Settings newSettings) {
- Settings settings = super.applySettings(newSettings);
- try {
- for (Map.Entry<String, String> entry : settings.getAsMap().entrySet()) {
- if (entry.getKey().startsWith("logger.")) {
- String component = entry.getKey().substring("logger.".length());
- if ("_root".equals(component)) {
- ESLoggerFactory.getRootLogger().setLevel(entry.getValue());
+ public Settings getValue(Settings current, Settings previous) {
+ Settings.Builder builder = Settings.builder();
+ builder.put(current.filter(loggerPredicate).getAsMap());
+ for (String key : previous.getAsMap().keySet()) {
+ if (loggerPredicate.test(key) && builder.internalMap().containsKey(key) == false) {
+ if (ESLoggerFactory.LOG_LEVEL_SETTING.getConcreteSetting(key).exists(settings) == false) {
+ builder.putNull(key);
} else {
- ESLoggerFactory.getLogger(component).setLevel(entry.getValue());
+ builder.put(key, ESLoggerFactory.LOG_LEVEL_SETTING.getConcreteSetting(key).get(settings).name());
}
}
}
- } catch (Exception e) {
- logger.warn("failed to refresh settings for [{}]", e, "logger");
+ return builder.build();
}
- return settings;
- }
@Override
- public boolean hasDynamicSetting(String key) {
- return isLoggerSetting(key) || super.hasDynamicSetting(key);
+ public void apply(Settings value, Settings current, Settings previous) {
+ for (String key : value.getAsMap().keySet()) {
+ assert loggerPredicate.test(key);
+ String component = key.substring("logger.".length());
+ if ("_root".equals(component)) {
+ final String rootLevel = value.get(key);
+ ESLoggerFactory.getRootLogger().setLevel(rootLevel == null ? ESLoggerFactory.LOG_DEFAULT_LEVEL_SETTING.get(settings).name() : rootLevel);
+ } else {
+ ESLoggerFactory.getLogger(component).setLevel(value.get(key));
+ }
}
-
- /**
- * Returns <code>true</code> if the settings is a logger setting.
- */
- public boolean isLoggerSetting(String key) {
- return key.startsWith("logger.");
}
+ };
-
- public static Set<Setting<?>> BUILT_IN_CLUSTER_SETTINGS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList(AwarenessAllocationDecider.CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTE_SETTING,
+ public static Set<Setting<?>> BUILT_IN_CLUSTER_SETTINGS = Collections.unmodifiableSet(new HashSet<>(
+ Arrays.asList(AwarenessAllocationDecider.CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTE_SETTING,
+ TransportClientNodesService.CLIENT_TRANSPORT_NODES_SAMPLER_INTERVAL, // TODO these transport client settings are kind of odd here and should only be valid if we are a transport client
+ TransportClientNodesService.CLIENT_TRANSPORT_PING_TIMEOUT,
+ TransportClientNodesService.CLIENT_TRANSPORT_IGNORE_CLUSTER_NAME,
AwarenessAllocationDecider.CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP_SETTING,
BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING,
BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING,
@@ -110,8 +164,13 @@ public final class ClusterSettings extends AbstractScopedSettings {
FilterAllocationDecider.CLUSTER_ROUTING_INCLUDE_GROUP_SETTING,
FilterAllocationDecider.CLUSTER_ROUTING_EXCLUDE_GROUP_SETTING,
FilterAllocationDecider.CLUSTER_ROUTING_REQUIRE_GROUP_SETTING,
+ FsRepository.REPOSITORIES_CHUNK_SIZE_SETTING,
+ FsRepository.REPOSITORIES_COMPRESS_SETTING,
+ FsRepository.REPOSITORIES_LOCATION_SETTING,
IndexStoreConfig.INDICES_STORE_THROTTLE_TYPE_SETTING,
IndexStoreConfig.INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING,
+ IndicesQueryCache.INDICES_CACHE_QUERY_SIZE_SETTING,
+ IndicesQueryCache.INDICES_CACHE_QUERY_COUNT_SETTING,
IndicesTTLService.INDICES_TTL_INTERVAL_SETTING,
MappingUpdatedAction.INDICES_MAPPING_DYNAMIC_TIMEOUT_SETTING,
MetaData.SETTING_READ_ONLY_SETTING,
@@ -139,6 +198,33 @@ public final class ClusterSettings extends AbstractScopedSettings {
DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING,
DiscoverySettings.COMMIT_TIMEOUT_SETTING,
DiscoverySettings.NO_MASTER_BLOCK_SETTING,
+ GatewayService.EXPECTED_DATA_NODES_SETTING,
+ GatewayService.EXPECTED_MASTER_NODES_SETTING,
+ GatewayService.EXPECTED_NODES_SETTING,
+ GatewayService.RECOVER_AFTER_DATA_NODES_SETTING,
+ GatewayService.RECOVER_AFTER_MASTER_NODES_SETTING,
+ GatewayService.RECOVER_AFTER_NODES_SETTING,
+ GatewayService.RECOVER_AFTER_TIME_SETTING,
+ NetworkModule.HTTP_ENABLED,
+ HttpTransportSettings.SETTING_CORS_ALLOW_CREDENTIALS,
+ HttpTransportSettings.SETTING_CORS_ENABLED,
+ HttpTransportSettings.SETTING_CORS_MAX_AGE,
+ HttpTransportSettings.SETTING_HTTP_DETAILED_ERRORS_ENABLED,
+ HttpTransportSettings.SETTING_PIPELINING,
+ HttpTransportSettings.SETTING_CORS_ALLOW_ORIGIN,
+ HttpTransportSettings.SETTING_HTTP_PORT,
+ HttpTransportSettings.SETTING_HTTP_PUBLISH_PORT,
+ HttpTransportSettings.SETTING_PIPELINING_MAX_EVENTS,
+ HttpTransportSettings.SETTING_HTTP_COMPRESSION,
+ HttpTransportSettings.SETTING_HTTP_COMPRESSION_LEVEL,
+ HttpTransportSettings.SETTING_CORS_ALLOW_METHODS,
+ HttpTransportSettings.SETTING_CORS_ALLOW_HEADERS,
+ HttpTransportSettings.SETTING_HTTP_DETAILED_ERRORS_ENABLED,
+ HttpTransportSettings.SETTING_HTTP_MAX_CONTENT_LENGTH,
+ HttpTransportSettings.SETTING_HTTP_MAX_CHUNK_SIZE,
+ HttpTransportSettings.SETTING_HTTP_MAX_HEADER_SIZE,
+ HttpTransportSettings.SETTING_HTTP_MAX_INITIAL_LINE_LENGTH,
+ HttpTransportSettings.SETTING_HTTP_RESET_COOKIES,
HierarchyCircuitBreakerService.TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING,
HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING,
HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING,
@@ -154,9 +240,134 @@ public final class ClusterSettings extends AbstractScopedSettings {
InternalClusterService.CLUSTER_SERVICE_RECONNECT_INTERVAL_SETTING,
HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_TYPE_SETTING,
HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_TYPE_SETTING,
- Transport.TRANSPORT_PROFILES_SETTING,
Transport.TRANSPORT_TCP_COMPRESS,
+ TransportSettings.TRANSPORT_PROFILES_SETTING,
+ TransportSettings.HOST,
+ TransportSettings.PUBLISH_HOST,
+ TransportSettings.BIND_HOST,
+ TransportSettings.PUBLISH_PORT,
+ TransportSettings.PORT,
+ NettyTransport.WORKER_COUNT,
+ NettyTransport.CONNECTIONS_PER_NODE_RECOVERY,
+ NettyTransport.CONNECTIONS_PER_NODE_BULK,
+ NettyTransport.CONNECTIONS_PER_NODE_REG,
+ NettyTransport.CONNECTIONS_PER_NODE_STATE,
+ NettyTransport.CONNECTIONS_PER_NODE_PING,
+ NettyTransport.PING_SCHEDULE,
+ NettyTransport.TCP_BLOCKING_CLIENT,
+ NettyTransport.TCP_CONNECT_TIMEOUT,
+ NettyTransport.NETTY_MAX_CUMULATION_BUFFER_CAPACITY,
+ NettyTransport.NETTY_MAX_COMPOSITE_BUFFER_COMPONENTS,
+ NettyTransport.NETTY_RECEIVE_PREDICTOR_SIZE,
+ NettyTransport.NETTY_RECEIVE_PREDICTOR_MIN,
+ NettyTransport.NETTY_RECEIVE_PREDICTOR_MAX,
+ NetworkService.NETWORK_SERVER,
+ NettyTransport.NETTY_BOSS_COUNT,
+ NettyTransport.TCP_NO_DELAY,
+ NettyTransport.TCP_KEEP_ALIVE,
+ NettyTransport.TCP_REUSE_ADDRESS,
+ NettyTransport.TCP_SEND_BUFFER_SIZE,
+ NettyTransport.TCP_RECEIVE_BUFFER_SIZE,
+ NettyTransport.TCP_BLOCKING_SERVER,
+ NetworkService.GLOBAL_NETWORK_HOST_SETTING,
+ NetworkService.GLOBAL_NETWORK_BINDHOST_SETTING,
+ NetworkService.GLOBAL_NETWORK_PUBLISHHOST_SETTING,
+ NetworkService.TcpSettings.TCP_NO_DELAY,
+ NetworkService.TcpSettings.TCP_KEEP_ALIVE,
+ NetworkService.TcpSettings.TCP_REUSE_ADDRESS,
+ NetworkService.TcpSettings.TCP_SEND_BUFFER_SIZE,
+ NetworkService.TcpSettings.TCP_RECEIVE_BUFFER_SIZE,
+ NetworkService.TcpSettings.TCP_BLOCKING,
+ NetworkService.TcpSettings.TCP_BLOCKING_SERVER,
+ NetworkService.TcpSettings.TCP_BLOCKING_CLIENT,
+ NetworkService.TcpSettings.TCP_CONNECT_TIMEOUT,
IndexSettings.QUERY_STRING_ANALYZE_WILDCARD,
IndexSettings.QUERY_STRING_ALLOW_LEADING_WILDCARD,
- PrimaryShardAllocator.NODE_INITIAL_SHARDS_SETTING)));
+ PrimaryShardAllocator.NODE_INITIAL_SHARDS_SETTING,
+ ScriptService.SCRIPT_CACHE_SIZE_SETTING,
+ IndicesFieldDataCache.INDICES_FIELDDATA_CLEAN_INTERVAL_SETTING,
+ IndicesFieldDataCache.INDICES_FIELDDATA_CACHE_SIZE_KEY,
+ IndicesRequestCache.INDICES_CACHE_QUERY_SIZE,
+ IndicesRequestCache.INDICES_CACHE_QUERY_EXPIRE,
+ IndicesRequestCache.INDICES_CACHE_REQUEST_CLEAN_INTERVAL,
+ HunspellService.HUNSPELL_LAZY_LOAD,
+ HunspellService.HUNSPELL_IGNORE_CASE,
+ HunspellService.HUNSPELL_DICTIONARY_OPTIONS,
+ IndicesStore.INDICES_STORE_DELETE_SHARD_TIMEOUT,
+ Environment.PATH_CONF_SETTING,
+ Environment.PATH_DATA_SETTING,
+ Environment.PATH_HOME_SETTING,
+ Environment.PATH_LOGS_SETTING,
+ Environment.PATH_PLUGINS_SETTING,
+ Environment.PATH_REPO_SETTING,
+ Environment.PATH_SCRIPTS_SETTING,
+ Environment.PATH_SHARED_DATA_SETTING,
+ Environment.PIDFILE_SETTING,
+ DiscoveryService.DISCOVERY_SEED_SETTING,
+ DiscoveryService.INITIAL_STATE_TIMEOUT_SETTING,
+ DiscoveryModule.DISCOVERY_TYPE_SETTING,
+ DiscoveryModule.ZEN_MASTER_SERVICE_TYPE_SETTING,
+ FaultDetection.PING_RETRIES_SETTING,
+ FaultDetection.PING_TIMEOUT_SETTING,
+ FaultDetection.REGISTER_CONNECTION_LISTENER_SETTING,
+ FaultDetection.PING_INTERVAL_SETTING,
+ FaultDetection.CONNECT_ON_NETWORK_DISCONNECT_SETTING,
+ ZenDiscovery.PING_TIMEOUT_SETTING,
+ ZenDiscovery.JOIN_TIMEOUT_SETTING,
+ ZenDiscovery.JOIN_RETRY_ATTEMPTS_SETTING,
+ ZenDiscovery.JOIN_RETRY_DELAY_SETTING,
+ ZenDiscovery.MAX_PINGS_FROM_ANOTHER_MASTER_SETTING,
+ ZenDiscovery.SEND_LEAVE_REQUEST_SETTING,
+ ZenDiscovery.MASTER_ELECTION_FILTER_CLIENT_SETTING,
+ ZenDiscovery.MASTER_ELECTION_WAIT_FOR_JOINS_TIMEOUT_SETTING,
+ ZenDiscovery.MASTER_ELECTION_FILTER_DATA_SETTING,
+ UnicastZenPing.DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING,
+ UnicastZenPing.DISCOVERY_ZEN_PING_UNICAST_CONCURRENT_CONNECTS_SETTING,
+ SearchService.DEFAULT_KEEPALIVE_SETTING,
+ SearchService.KEEPALIVE_INTERVAL_SETTING,
+ Node.WRITE_PORTS_FIELD_SETTING,
+ Node.NODE_CLIENT_SETTING,
+ Node.NODE_DATA_SETTING,
+ Node.NODE_MASTER_SETTING,
+ Node.NODE_LOCAL_SETTING,
+ Node.NODE_MODE_SETTING,
+ Node.NODE_INGEST_SETTING,
+ URLRepository.ALLOWED_URLS_SETTING,
+ URLRepository.REPOSITORIES_LIST_DIRECTORIES_SETTING,
+ URLRepository.REPOSITORIES_URL_SETTING,
+ URLRepository.SUPPORTED_PROTOCOLS_SETTING,
+ TransportMasterNodeReadAction.FORCE_LOCAL_SETTING,
+ AutoCreateIndex.AUTO_CREATE_INDEX_SETTING,
+ BaseRestHandler.MULTI_ALLOW_EXPLICIT_INDEX,
+ ClusterName.CLUSTER_NAME_SETTING,
+ Client.CLIENT_TYPE_SETTING_S,
+ InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING,
+ ClusterModule.SHARDS_ALLOCATOR_TYPE_SETTING,
+ EsExecutors.PROCESSORS_SETTING,
+ ThreadContext.DEFAULT_HEADERS_SETTING,
+ ESLoggerFactory.LOG_DEFAULT_LEVEL_SETTING,
+ ESLoggerFactory.LOG_LEVEL_SETTING,
+ TribeService.BLOCKS_METADATA_SETTING,
+ TribeService.BLOCKS_WRITE_SETTING,
+ TribeService.BLOCKS_WRITE_INDICES_SETTING,
+ TribeService.BLOCKS_READ_INDICES_SETTING,
+ TribeService.BLOCKS_METADATA_INDICES_SETTING,
+ TribeService.ON_CONFLICT_SETTING,
+ NodeEnvironment.MAX_LOCAL_STORAGE_NODES_SETTING,
+ NodeEnvironment.ENABLE_LUCENE_SEGMENT_INFOS_TRACE_SETTING,
+ NodeEnvironment.ADD_NODE_ID_TO_CUSTOM_PATH,
+ OsService.REFRESH_INTERVAL_SETTING,
+ ProcessService.REFRESH_INTERVAL_SETTING,
+ JvmService.REFRESH_INTERVAL_SETTING,
+ FsService.REFRESH_INTERVAL_SETTING,
+ JvmGcMonitorService.ENABLED_SETTING,
+ JvmGcMonitorService.REFRESH_INTERVAL_SETTING,
+ JvmGcMonitorService.GC_SETTING,
+ PageCacheRecycler.LIMIT_HEAP_SETTING,
+ PageCacheRecycler.WEIGHT_BYTES_SETTING,
+ PageCacheRecycler.WEIGHT_INT_SETTING,
+ PageCacheRecycler.WEIGHT_LONG_SETTING,
+ PageCacheRecycler.WEIGHT_OBJECTS_SETTING,
+ PageCacheRecycler.TYPE_SETTING
+ )));
}
diff --git a/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java b/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java
index 997ea794c9..86ead8c7ff 100644
--- a/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java
+++ b/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java
@@ -152,4 +152,17 @@ public final class IndexScopedSettings extends AbstractScopedSettings {
public IndexScopedSettings copy(Settings settings, IndexMetaData metaData) {
return new IndexScopedSettings(settings, this, metaData);
}
+
+ public boolean isPrivateSetting(String key) {
+ switch (key) {
+ case IndexMetaData.SETTING_CREATION_DATE:
+ case IndexMetaData.SETTING_INDEX_UUID:
+ case IndexMetaData.SETTING_VERSION_CREATED:
+ case IndexMetaData.SETTING_VERSION_UPGRADED:
+ case MergePolicyConfig.INDEX_MERGE_ENABLED:
+ return true;
+ default:
+ return false;
+ }
+ }
}
diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java
index 0fc5b062e6..5c208e7e18 100644
--- a/core/src/main/java/org/elasticsearch/common/settings/Setting.java
+++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java
@@ -41,6 +41,7 @@ import java.util.function.BiConsumer;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.regex.Pattern;
+import java.util.stream.Collectors;
/**
* A setting. Encapsulates typical stuff like default value, parsing, and scope.
@@ -71,7 +72,20 @@ public class Setting<T> extends ToXContentToBytes {
}
/**
+ * Creates a new Setting instance
+ * @param key the settings key for this setting.
+ * @param fallBackSetting a setting to fall back to if the current setting is not set.
+ * @param parser a parser that parses the string rep into a complex datatype.
+ * @param dynamic true iff this setting can be dynamically updateable
+ * @param scope the scope of this setting
+ */
+ public Setting(String key, Setting<T> fallBackSetting, Function<String, T> parser, boolean dynamic, Scope scope) {
+ this(key, fallBackSetting::getRaw, parser, dynamic, scope);
+ }
+
+ /**
* Returns the settings key or a prefix if this setting is a group setting
+ *
* @see #isGroupSetting()
*/
public final String getKey() {
@@ -106,14 +120,22 @@ public class Setting<T> extends ToXContentToBytes {
}
/**
- * Returns the default values string representation for this setting.
+ * Returns the default value string representation for this setting.
* @param settings a settings object for settings that has a default value depending on another setting if available
*/
- public final String getDefault(Settings settings) {
+ public final String getDefaultRaw(Settings settings) {
return defaultValue.apply(settings);
}
/**
+ * Returns the default value for this setting.
+ * @param settings a settings object for settings that has a default value depending on another setting if available
+ */
+ public final T getDefault(Settings settings) {
+ return parser.apply(getDefaultRaw(settings));
+ }
+
+ /**
* Returns <code>true</code> iff this setting is present in the given settings object. Otherwise <code>false</code>
*/
public final boolean exists(Settings settings) {
@@ -178,6 +200,11 @@ public class Setting<T> extends ToXContentToBytes {
return get(secondary);
}
+ public Setting<T> getConcreteSetting(String key) {
+ assert key.startsWith(this.getKey()) : "was " + key + " expected: " + getKey(); // we use startsWith here since the key might be foo.bar.0 if it's an array
+ return this;
+ }
+
/**
* The settings scope - settings can either be cluster settings or per index settings.
*/
@@ -301,6 +328,10 @@ public class Setting<T> extends ToXContentToBytes {
}, dynamic, scope);
}
+ public static Setting<Integer> intSetting(String key, int defaultValue, int minValue, int maxValue, boolean dynamic, Scope scope) {
+ return new Setting<>(key, (s) -> Integer.toString(defaultValue), (s) -> parseInt(s, minValue, maxValue, key), dynamic, scope);
+ }
+
public static Setting<Integer> intSetting(String key, int defaultValue, int minValue, boolean dynamic, Scope scope) {
return new Setting<>(key, (s) -> Integer.toString(defaultValue), (s) -> parseInt(s, minValue, key), dynamic, scope);
}
@@ -309,11 +340,22 @@ public class Setting<T> extends ToXContentToBytes {
return new Setting<>(key, (s) -> Long.toString(defaultValue), (s) -> parseLong(s, minValue, key), dynamic, scope);
}
+ public static Setting<String> simpleString(String key, boolean dynamic, Scope scope) {
+ return new Setting<>(key, "", Function.identity(), dynamic, scope);
+ }
+
public static int parseInt(String s, int minValue, String key) {
+ return parseInt(s, minValue, Integer.MAX_VALUE, key);
+ }
+
+ public static int parseInt(String s, int minValue, int maxValue, String key) {
int value = Integer.parseInt(s);
if (value < minValue) {
throw new IllegalArgumentException("Failed to parse value [" + s + "] for setting [" + key + "] must be >= " + minValue);
}
+ if (value > maxValue) {
+ throw new IllegalArgumentException("Failed to parse value [" + s + "] for setting [" + key + "] must be =< " + maxValue);
+ }
return value;
}
@@ -333,12 +375,24 @@ public class Setting<T> extends ToXContentToBytes {
return new Setting<>(key, (s) -> Boolean.toString(defaultValue), Booleans::parseBooleanExact, dynamic, scope);
}
+ public static Setting<Boolean> boolSetting(String key, Setting<Boolean> fallbackSetting, boolean dynamic, Scope scope) {
+ return new Setting<>(key, fallbackSetting, Booleans::parseBooleanExact, dynamic, scope);
+ }
+
public static Setting<ByteSizeValue> byteSizeSetting(String key, String percentage, boolean dynamic, Scope scope) {
return new Setting<>(key, (s) -> percentage, (s) -> MemorySizeValue.parseBytesSizeValueOrHeapRatio(s, key), dynamic, scope);
}
public static Setting<ByteSizeValue> byteSizeSetting(String key, ByteSizeValue value, boolean dynamic, Scope scope) {
- return new Setting<>(key, (s) -> value.toString(), (s) -> ByteSizeValue.parseBytesSizeValue(s, key), dynamic, scope);
+ return byteSizeSetting(key, (s) -> value.toString(), dynamic, scope);
+ }
+
+ public static Setting<ByteSizeValue> byteSizeSetting(String key, Setting<ByteSizeValue> fallbackSettings, boolean dynamic, Scope scope) {
+ return byteSizeSetting(key, fallbackSettings::getRaw, dynamic, scope);
+ }
+
+ public static Setting<ByteSizeValue> byteSizeSetting(String key, Function<Settings, String> defaultValue, boolean dynamic, Scope scope) {
+ return new Setting<>(key, defaultValue, (s) -> ByteSizeValue.parseBytesSizeValue(s, key), dynamic, scope);
}
public static Setting<TimeValue> positiveTimeSetting(String key, TimeValue defaultValue, boolean dynamic, Scope scope) {
@@ -348,25 +402,15 @@ public class Setting<T> extends ToXContentToBytes {
public static <T> Setting<List<T>> listSetting(String key, List<String> defaultStringValue, Function<String, T> singleValueParser, boolean dynamic, Scope scope) {
return listSetting(key, (s) -> defaultStringValue, singleValueParser, dynamic, scope);
}
+
+ public static <T> Setting<List<T>> listSetting(String key, Setting<List<T>> fallbackSetting, Function<String, T> singleValueParser, boolean dynamic, Scope scope) {
+ return listSetting(key, (s) -> parseableStringToList(fallbackSetting.getRaw(s)), singleValueParser, dynamic, scope);
+ }
+
public static <T> Setting<List<T>> listSetting(String key, Function<Settings, List<String>> defaultStringValue, Function<String, T> singleValueParser, boolean dynamic, Scope scope) {
- Function<String, List<T>> parser = (s) -> {
- try (XContentParser xContentParser = XContentType.JSON.xContent().createParser(s)){
- XContentParser.Token token = xContentParser.nextToken();
- if (token != XContentParser.Token.START_ARRAY) {
- throw new IllegalArgumentException("expected START_ARRAY but got " + token);
- }
- ArrayList<T> list = new ArrayList<>();
- while ((token = xContentParser.nextToken()) !=XContentParser.Token.END_ARRAY) {
- if (token != XContentParser.Token.VALUE_STRING) {
- throw new IllegalArgumentException("expected VALUE_STRING but got " + token);
- }
- list.add(singleValueParser.apply(xContentParser.text()));
- }
- return list;
- } catch (IOException e) {
- throw new IllegalArgumentException("failed to parse array", e);
- }
- };
+ Function<String, List<T>> parser = (s) ->
+ parseableStringToList(s).stream().map(singleValueParser).collect(Collectors.toList());
+
return new Setting<List<T>>(key, (s) -> arrayToParsableString(defaultStringValue.apply(s).toArray(Strings.EMPTY_ARRAY)), parser, dynamic, scope) {
private final Pattern pattern = Pattern.compile(Pattern.quote(key)+"(\\.\\d+)?");
@Override
@@ -387,6 +431,26 @@ public class Setting<T> extends ToXContentToBytes {
};
}
+ private static List<String> parseableStringToList(String parsableString) {
+ try (XContentParser xContentParser = XContentType.JSON.xContent().createParser(parsableString)) {
+ XContentParser.Token token = xContentParser.nextToken();
+ if (token != XContentParser.Token.START_ARRAY) {
+ throw new IllegalArgumentException("expected START_ARRAY but got " + token);
+ }
+ ArrayList<String> list = new ArrayList<>();
+ while ((token = xContentParser.nextToken()) != XContentParser.Token.END_ARRAY) {
+ if (token != XContentParser.Token.VALUE_STRING) {
+ throw new IllegalArgumentException("expected VALUE_STRING but got " + token);
+ }
+ list.add(xContentParser.text());
+ }
+ return list;
+ } catch (IOException e) {
+ throw new IllegalArgumentException("failed to parse array", e);
+ }
+ }
+
+
private static String arrayToParsableString(String[] array) {
try {
XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent());
@@ -401,8 +465,6 @@ public class Setting<T> extends ToXContentToBytes {
}
}
-
-
public static Setting<Settings> groupSetting(String key, boolean dynamic, Scope scope) {
if (key.endsWith(".") == false) {
throw new IllegalArgumentException("key must end with a '.'");
@@ -481,7 +543,11 @@ public class Setting<T> extends ToXContentToBytes {
}
public static Setting<TimeValue> timeSetting(String key, TimeValue defaultValue, boolean dynamic, Scope scope) {
- return new Setting<>(key, (s) -> defaultValue.toString(), (s) -> TimeValue.parseTimeValue(s, defaultValue, key), dynamic, scope);
+ return new Setting<>(key, (s) -> defaultValue.toString(), (s) -> TimeValue.parseTimeValue(s, key), dynamic, scope);
+ }
+
+ public static Setting<TimeValue> timeSetting(String key, Setting<TimeValue> fallbackSetting, boolean dynamic, Scope scope) {
+ return new Setting<>(key, fallbackSetting::getRaw, (s) -> TimeValue.parseTimeValue(s, key), dynamic, scope);
}
public static Setting<Double> doubleSetting(String key, double defaultValue, double minValue, boolean dynamic, Scope scope) {
@@ -506,4 +572,38 @@ public class Setting<T> extends ToXContentToBytes {
public int hashCode() {
return Objects.hash(key);
}
+
+ /**
+ * This setting type allows to validate settings that have the same type and a common prefix. For instance feature.${type}=[true|false]
+ * can easily be added with this setting. Yet, dynamic key settings don't support updaters our of the box unless {@link #getConcreteSetting(String)}
+ * is used to pull the updater.
+ */
+ public static <T> Setting<T> dynamicKeySetting(String key, String defaultValue, Function<String, T> parser, boolean dynamic, Scope scope) {
+ return new Setting<T>(key, defaultValue, parser, dynamic, scope) {
+
+ @Override
+ boolean isGroupSetting() {
+ return true;
+ }
+
+ @Override
+ public boolean match(String toTest) {
+ return toTest.startsWith(getKey());
+ }
+
+ @Override
+ AbstractScopedSettings.SettingUpdater<T> newUpdater(Consumer<T> consumer, ESLogger logger, Consumer<T> validator) {
+ throw new UnsupportedOperationException("dynamic settings can't be updated use #getConcreteSetting for updating");
+ }
+
+ @Override
+ public Setting<T> getConcreteSetting(String key) {
+ if (match(key)) {
+ return new Setting<>(key, defaultValue, parser, dynamic, scope);
+ } else {
+ throw new IllegalArgumentException("key must match setting but didn't ["+key +"]");
+ }
+ }
+ };
+ }
}
diff --git a/core/src/main/java/org/elasticsearch/common/settings/Settings.java b/core/src/main/java/org/elasticsearch/common/settings/Settings.java
index 601ec7a4bf..8be01a624c 100644
--- a/core/src/main/java/org/elasticsearch/common/settings/Settings.java
+++ b/core/src/main/java/org/elasticsearch/common/settings/Settings.java
@@ -946,7 +946,26 @@ public final class Settings implements ToXContent {
* @param values The values
* @return The builder
*/
+
+ /**
+ * Sets the setting with the provided setting key and an array of values.
+ *
+ * @param setting The setting key
+ * @param values The values
+ * @return The builder
+ */
public Builder putArray(String setting, String... values) {
+ return putArray(setting, Arrays.asList(values));
+ }
+
+ /**
+ * Sets the setting with the provided setting key and a list of values.
+ *
+ * @param setting The setting key
+ * @param values The values
+ * @return The builder
+ */
+ public Builder putArray(String setting, List<String> values) {
remove(setting);
int counter = 0;
while (true) {
@@ -955,8 +974,8 @@ public final class Settings implements ToXContent {
break;
}
}
- for (int i = 0; i < values.length; i++) {
- put(setting + "." + i, values[i]);
+ for (int i = 0; i < values.size(); i++) {
+ put(setting + "." + i, values.get(i));
}
return this;
}
diff --git a/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java b/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java
index 8298c0c127..f95cc1f6f6 100644
--- a/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java
+++ b/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java
@@ -24,6 +24,7 @@ import org.elasticsearch.common.inject.AbstractModule;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
+import java.util.function.Predicate;
/**
* A module that binds the provided settings to the {@link Settings} interface.
@@ -54,16 +55,13 @@ public class SettingsModule extends AbstractModule {
final ClusterSettings clusterSettings = new ClusterSettings(settings, new HashSet<>(this.clusterSettings.values()));
// by now we are fully configured, lets check node level settings for unregistered index settings
indexScopedSettings.validate(settings.filter(IndexScopedSettings.INDEX_SETTINGS_KEY_PREDICATE));
- // we can't call this method yet since we have not all node level settings registered.
- // yet we can validate the ones we have registered to not have invalid values. this is better than nothing
- // and progress over perfection and we fail as soon as possible.
- // clusterSettings.validate(settings.filter(IndexScopedSettings.INDEX_SETTINGS_KEY_PREDICATE.negate()));
- for (Map.Entry<String, String> entry : settings.filter(IndexScopedSettings.INDEX_SETTINGS_KEY_PREDICATE.negate()).getAsMap().entrySet()) {
- if (clusterSettings.get(entry.getKey()) != null) {
- clusterSettings.validate(entry.getKey(), settings);
- }
+ Predicate<String> noIndexSettingPredicate = IndexScopedSettings.INDEX_SETTINGS_KEY_PREDICATE.negate();
+ Predicate<String> noTribePredicate = (s) -> s.startsWith("tribe.") == false;
+ for (Map.Entry<String, String> entry : settings.filter(noTribePredicate.and(noIndexSettingPredicate)).getAsMap().entrySet()) {
+ validateClusterSetting(clusterSettings, entry.getKey(), settings);
}
+ validateTribeSettings(settings, clusterSettings);
bind(Settings.class).toInstance(settings);
bind(SettingsFilter.class).toInstance(settingsFilter);
@@ -88,4 +86,25 @@ public class SettingsModule extends AbstractModule {
}
}
+ public void validateTribeSettings(Settings settings, ClusterSettings clusterSettings) {
+ Map<String, Settings> groups = settings.getGroups("tribe.", true);
+ for (Map.Entry<String, Settings> tribeSettings : groups.entrySet()) {
+ for (Map.Entry<String, String> entry : tribeSettings.getValue().getAsMap().entrySet()) {
+ validateClusterSetting(clusterSettings, entry.getKey(), tribeSettings.getValue());
+ }
+ }
+ }
+
+ private final void validateClusterSetting(ClusterSettings clusterSettings, String key, Settings settings) {
+ // we can't call this method yet since we have not all node level settings registered.
+ // yet we can validate the ones we have registered to not have invalid values. this is better than nothing
+ // and progress over perfection and we fail as soon as possible.
+ // clusterSettings.validate(settings.filter(IndexScopedSettings.INDEX_SETTINGS_KEY_PREDICATE.negate()));
+ if (clusterSettings.get(key) != null) {
+ clusterSettings.validate(key, settings);
+ } else if (AbstractScopedSettings.isValidKey(key) == false) {
+ throw new IllegalArgumentException("illegal settings key: [" + key + "]");
+ }
+ }
+
}
diff --git a/core/src/main/java/org/elasticsearch/common/transport/PortsRange.java b/core/src/main/java/org/elasticsearch/common/transport/PortsRange.java
index e1e4571eda..4f5a3966d4 100644
--- a/core/src/main/java/org/elasticsearch/common/transport/PortsRange.java
+++ b/core/src/main/java/org/elasticsearch/common/transport/PortsRange.java
@@ -35,6 +35,10 @@ public class PortsRange {
this.portRange = portRange;
}
+ public String getPortRangeString() {
+ return portRange;
+ }
+
public int[] ports() throws NumberFormatException {
final IntArrayList ports = new IntArrayList();
iterate(new PortCallback() {
diff --git a/core/src/main/java/org/elasticsearch/common/unit/DistanceUnit.java b/core/src/main/java/org/elasticsearch/common/unit/DistanceUnit.java
index feebd93c5a..d0e91646c0 100644
--- a/core/src/main/java/org/elasticsearch/common/unit/DistanceUnit.java
+++ b/core/src/main/java/org/elasticsearch/common/unit/DistanceUnit.java
@@ -22,6 +22,7 @@ package org.elasticsearch.common.unit;
import org.elasticsearch.common.geo.GeoUtils;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.common.io.stream.Writeable;
import java.io.IOException;
@@ -32,7 +33,7 @@ import java.io.IOException;
* the earth ellipsoid defined in {@link GeoUtils}. The default unit used within
* this project is <code>METERS</code> which is defined by <code>DEFAULT</code>
*/
-public enum DistanceUnit {
+public enum DistanceUnit implements Writeable<DistanceUnit> {
INCH(0.0254, "in", "inch"),
YARD(0.9144, "yd", "yards"),
FEET(0.3048, "ft", "feet"),
@@ -322,4 +323,24 @@ public enum DistanceUnit {
return new Distance(Double.parseDouble(distance), defaultUnit);
}
}
+
+ private static final DistanceUnit PROTOTYPE = DEFAULT;
+
+ @Override
+ public DistanceUnit readFrom(StreamInput in) throws IOException {
+ int ordinal = in.readVInt();
+ if (ordinal < 0 || ordinal >= values().length) {
+ throw new IOException("Unknown DistanceUnit ordinal [" + ordinal + "]");
+ }
+ return values()[ordinal];
+ }
+
+ public static DistanceUnit readUnitFrom(StreamInput in) throws IOException {
+ return PROTOTYPE.readFrom(in);
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ out.writeVInt(this.ordinal());
+ }
}
diff --git a/core/src/main/java/org/elasticsearch/common/unit/TimeValue.java b/core/src/main/java/org/elasticsearch/common/unit/TimeValue.java
index b1081c2c62..bbe1fbbf05 100644
--- a/core/src/main/java/org/elasticsearch/common/unit/TimeValue.java
+++ b/core/src/main/java/org/elasticsearch/common/unit/TimeValue.java
@@ -250,6 +250,12 @@ public class TimeValue implements Streamable {
}
}
+ public static TimeValue parseTimeValue(String sValue, String settingName) {
+ Objects.requireNonNull(settingName);
+ Objects.requireNonNull(sValue);
+ return parseTimeValue(sValue, null, settingName);
+ }
+
public static TimeValue parseTimeValue(String sValue, TimeValue defaultValue, String settingName) {
settingName = Objects.requireNonNull(settingName);
if (sValue == null) {
diff --git a/core/src/main/java/org/elasticsearch/common/util/MultiDataPathUpgrader.java b/core/src/main/java/org/elasticsearch/common/util/MultiDataPathUpgrader.java
deleted file mode 100644
index 8d04900382..0000000000
--- a/core/src/main/java/org/elasticsearch/common/util/MultiDataPathUpgrader.java
+++ /dev/null
@@ -1,383 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.elasticsearch.common.util;
-
-import org.apache.lucene.index.CheckIndex;
-import org.apache.lucene.index.IndexWriter;
-import org.apache.lucene.store.Directory;
-import org.apache.lucene.store.Lock;
-import org.apache.lucene.store.LockObtainFailedException;
-import org.apache.lucene.store.SimpleFSDirectory;
-import org.apache.lucene.util.IOUtils;
-import org.elasticsearch.cluster.metadata.IndexMetaData;
-import org.elasticsearch.common.io.FileSystemUtils;
-import org.elasticsearch.common.io.stream.BytesStreamOutput;
-import org.elasticsearch.common.logging.ESLogger;
-import org.elasticsearch.common.logging.Loggers;
-import org.elasticsearch.common.unit.ByteSizeValue;
-import org.elasticsearch.env.NodeEnvironment;
-import org.elasticsearch.env.ShardLock;
-import org.elasticsearch.gateway.MetaDataStateFormat;
-import org.elasticsearch.index.Index;
-import org.elasticsearch.index.shard.ShardId;
-import org.elasticsearch.index.shard.ShardPath;
-import org.elasticsearch.index.shard.ShardStateMetaData;
-
-import java.io.IOException;
-import java.io.PrintStream;
-import java.nio.charset.StandardCharsets;
-import java.nio.file.DirectoryStream;
-import java.nio.file.FileStore;
-import java.nio.file.FileVisitResult;
-import java.nio.file.FileVisitor;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.StandardCopyOption;
-import java.nio.file.attribute.BasicFileAttributes;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-
-/**
- */
-public class MultiDataPathUpgrader {
-
- private final NodeEnvironment nodeEnvironment;
- private final ESLogger logger = Loggers.getLogger(getClass());
-
-
- /**
- * Creates a new upgrader instance
- * @param nodeEnvironment the node env to operate on.
- *
- */
- public MultiDataPathUpgrader(NodeEnvironment nodeEnvironment) {
- this.nodeEnvironment = nodeEnvironment;
- }
-
-
- /**
- * Upgrades the given shard Id from multiple shard paths into the given target path.
- *
- * @see #pickShardPath(org.elasticsearch.index.shard.ShardId)
- */
- public void upgrade(ShardId shard, ShardPath targetPath) throws IOException {
- final Path[] paths = nodeEnvironment.availableShardPaths(shard); // custom data path doesn't need upgrading
- if (isTargetPathConfigured(paths, targetPath) == false) {
- throw new IllegalArgumentException("shard path must be one of the shards data paths");
- }
- assert needsUpgrading(shard) : "Should not upgrade a path that needs no upgrading";
- logger.info("{} upgrading multi data dir to {}", shard, targetPath.getDataPath());
- final ShardStateMetaData loaded = ShardStateMetaData.FORMAT.loadLatestState(logger, paths);
- if (loaded == null) {
- throw new IllegalStateException(shard + " no shard state found in any of: " + Arrays.toString(paths) + " please check and remove them if possible");
- }
- logger.info("{} loaded shard state {}", shard, loaded);
-
- ShardStateMetaData.FORMAT.write(loaded, loaded.version, targetPath.getShardStatePath());
- Files.createDirectories(targetPath.resolveIndex());
- try (SimpleFSDirectory directory = new SimpleFSDirectory(targetPath.resolveIndex())) {
- try (final Lock lock = directory.obtainLock(IndexWriter.WRITE_LOCK_NAME)) {
- upgradeFiles(shard, targetPath, targetPath.resolveIndex(), ShardPath.INDEX_FOLDER_NAME, paths);
- } catch (LockObtainFailedException ex) {
- throw new IllegalStateException("Can't obtain lock on " + targetPath.resolveIndex(), ex);
- }
-
- }
-
-
- upgradeFiles(shard, targetPath, targetPath.resolveTranslog(), ShardPath.TRANSLOG_FOLDER_NAME, paths);
-
- logger.info("{} wipe upgraded directories", shard);
- for (Path path : paths) {
- if (path.equals(targetPath.getShardStatePath()) == false) {
- logger.info("{} wipe shard directories: [{}]", shard, path);
- IOUtils.rm(path);
- }
- }
-
- if (FileSystemUtils.files(targetPath.resolveIndex()).length == 0) {
- throw new IllegalStateException("index folder [" + targetPath.resolveIndex() + "] is empty");
- }
-
- if (FileSystemUtils.files(targetPath.resolveTranslog()).length == 0) {
- throw new IllegalStateException("translog folder [" + targetPath.resolveTranslog() + "] is empty");
- }
- }
-
- /**
- * Runs check-index on the target shard and throws an exception if it failed
- */
- public void checkIndex(ShardPath targetPath) throws IOException {
- BytesStreamOutput os = new BytesStreamOutput();
- PrintStream out = new PrintStream(os, false, StandardCharsets.UTF_8.name());
- try (Directory directory = new SimpleFSDirectory(targetPath.resolveIndex());
- final CheckIndex checkIndex = new CheckIndex(directory)) {
- checkIndex.setInfoStream(out);
- CheckIndex.Status status = checkIndex.checkIndex();
- out.flush();
- if (!status.clean) {
- logger.warn("check index [failure]\n{}", new String(os.bytes().toBytes(), StandardCharsets.UTF_8));
- throw new IllegalStateException("index check failure");
- }
- }
- }
-
- /**
- * Returns true iff the given shard needs upgrading.
- */
- public boolean needsUpgrading(ShardId shard) {
- final Path[] paths = nodeEnvironment.availableShardPaths(shard);
- // custom data path doesn't need upgrading neither single path envs
- if (paths.length > 1) {
- int numPathsExist = 0;
- for (Path path : paths) {
- if (Files.exists(path.resolve(MetaDataStateFormat.STATE_DIR_NAME))) {
- numPathsExist++;
- if (numPathsExist > 1) {
- return true;
- }
- }
- }
- }
- return false;
- }
-
- /**
- * Picks a target ShardPath to allocate and upgrade the given shard to. It picks the target based on a simple
- * heuristic:
- * <ul>
- * <li>if the smallest datapath has 2x more space available that the shards total size the datapath with the most bytes for that shard is picked to minimize the amount of bytes to copy</li>
- * <li>otherwise the largest available datapath is used as the target no matter how big of a slice of the shard it already holds.</li>
- * </ul>
- */
- public ShardPath pickShardPath(ShardId shard) throws IOException {
- if (needsUpgrading(shard) == false) {
- throw new IllegalStateException("Shard doesn't need upgrading");
- }
- final NodeEnvironment.NodePath[] paths = nodeEnvironment.nodePaths();
-
- // if we need upgrading make sure we have all paths.
- for (NodeEnvironment.NodePath path : paths) {
- Files.createDirectories(path.resolve(shard));
- }
- final ShardFileInfo[] shardFileInfo = getShardFileInfo(shard, paths);
- long totalBytesUsedByShard = 0;
- long leastUsableSpace = Long.MAX_VALUE;
- long mostUsableSpace = Long.MIN_VALUE;
- assert shardFileInfo.length == nodeEnvironment.availableShardPaths(shard).length;
- for (ShardFileInfo info : shardFileInfo) {
- totalBytesUsedByShard += info.spaceUsedByShard;
- leastUsableSpace = Math.min(leastUsableSpace, info.usableSpace + info.spaceUsedByShard);
- mostUsableSpace = Math.max(mostUsableSpace, info.usableSpace + info.spaceUsedByShard);
- }
-
- if (mostUsableSpace < totalBytesUsedByShard) {
- throw new IllegalStateException("Can't upgrade path available space: " + new ByteSizeValue(mostUsableSpace) + " required space: " + new ByteSizeValue(totalBytesUsedByShard));
- }
- ShardFileInfo target = shardFileInfo[0];
- if (leastUsableSpace >= (2 * totalBytesUsedByShard)) {
- for (ShardFileInfo info : shardFileInfo) {
- if (info.spaceUsedByShard > target.spaceUsedByShard) {
- target = info;
- }
- }
- } else {
- for (ShardFileInfo info : shardFileInfo) {
- if (info.usableSpace > target.usableSpace) {
- target = info;
- }
- }
- }
- return new ShardPath(false, target.path, target.path, IndexMetaData.INDEX_UUID_NA_VALUE /* we don't know */, shard);
- }
-
- private ShardFileInfo[] getShardFileInfo(ShardId shard, NodeEnvironment.NodePath[] paths) throws IOException {
- final ShardFileInfo[] info = new ShardFileInfo[paths.length];
- for (int i = 0; i < info.length; i++) {
- Path path = paths[i].resolve(shard);
- final long usabelSpace = getUsabelSpace(paths[i]);
- info[i] = new ShardFileInfo(path, usabelSpace, getSpaceUsedByShard(path));
- }
- return info;
- }
-
- protected long getSpaceUsedByShard(Path path) throws IOException {
- final long[] spaceUsedByShard = new long[] {0};
- if (Files.exists(path)) {
- Files.walkFileTree(path, new FileVisitor<Path>() {
- @Override
- public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException {
- return FileVisitResult.CONTINUE;
- }
-
- @Override
- public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
- if (attrs.isRegularFile()) {
- spaceUsedByShard[0] += attrs.size();
- }
- return FileVisitResult.CONTINUE;
- }
-
- @Override
- public FileVisitResult visitFileFailed(Path file, IOException exc) throws IOException {
- return FileVisitResult.CONTINUE;
- }
-
- @Override
- public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException {
- return FileVisitResult.CONTINUE;
- }
- });
- }
- return spaceUsedByShard[0];
- }
-
- protected long getUsabelSpace(NodeEnvironment.NodePath path) throws IOException {
- FileStore fileStore = path.fileStore;
- return fileStore.getUsableSpace();
- }
-
- static class ShardFileInfo {
- final Path path;
- final long usableSpace;
- final long spaceUsedByShard;
-
- ShardFileInfo(Path path, long usableSpace, long spaceUsedByShard) {
- this.path = path;
- this.usableSpace = usableSpace;
- this.spaceUsedByShard = spaceUsedByShard;
- }
- }
-
-
-
- private void upgradeFiles(ShardId shard, ShardPath targetPath, final Path targetDir, String folderName, Path[] paths) throws IOException {
- List<Path> movedFiles = new ArrayList<>();
- for (Path path : paths) {
- if (path.equals(targetPath.getDataPath()) == false) {
- final Path sourceDir = path.resolve(folderName);
- if (Files.exists(sourceDir)) {
- logger.info("{} upgrading [{}] from [{}] to [{}]", shard, folderName, sourceDir, targetDir);
- try (DirectoryStream<Path> stream = Files.newDirectoryStream(sourceDir)) {
- Files.createDirectories(targetDir);
- for (Path file : stream) {
- if (IndexWriter.WRITE_LOCK_NAME.equals(file.getFileName().toString()) || Files.isDirectory(file)) {
- continue; // skip write.lock
- }
- logger.info("{} move file [{}] size: [{}]", shard, file.getFileName(), Files.size(file));
- final Path targetFile = targetDir.resolve(file.getFileName());
- /* We are pessimistic and do a copy first to the other path and then and atomic move to rename it such that
- in the worst case the file exists twice but is never lost or half written.*/
- final Path targetTempFile = Files.createTempFile(targetDir, "upgrade_", "_" + file.getFileName().toString());
- Files.copy(file, targetTempFile, StandardCopyOption.COPY_ATTRIBUTES, StandardCopyOption.REPLACE_EXISTING);
- Files.move(targetTempFile, targetFile, StandardCopyOption.ATOMIC_MOVE); // we are on the same FS - this must work otherwise all bets are off
- Files.delete(file);
- movedFiles.add(targetFile);
- }
- }
- }
- }
- }
- if (movedFiles.isEmpty() == false) {
- // fsync later it might be on disk already
- logger.info("{} fsync files", shard);
- for (Path moved : movedFiles) {
- logger.info("{} syncing [{}]", shard, moved.getFileName());
- IOUtils.fsync(moved, false);
- }
- logger.info("{} syncing directory [{}]", shard, targetDir);
- IOUtils.fsync(targetDir, true);
- }
- }
-
-
- /**
- * Returns <code>true</code> iff the target path is one of the given paths.
- */
- private boolean isTargetPathConfigured(final Path[] paths, ShardPath targetPath) {
- for (Path path : paths) {
- if (path.equals(targetPath.getDataPath())) {
- return true;
- }
- }
- return false;
- }
-
- /**
- * Runs an upgrade on all shards located under the given node environment if there is more than 1 data.path configured
- * otherwise this method will return immediately.
- */
- public static void upgradeMultiDataPath(NodeEnvironment nodeEnv, ESLogger logger) throws IOException {
- if (nodeEnv.nodeDataPaths().length > 1) {
- final MultiDataPathUpgrader upgrader = new MultiDataPathUpgrader(nodeEnv);
- final Set<String> allIndices = nodeEnv.findAllIndices();
-
- for (String index : allIndices) {
- for (ShardId shardId : findAllShardIds(nodeEnv.indexPaths(new Index(index)))) {
- try (ShardLock lock = nodeEnv.shardLock(shardId, 0)) {
- if (upgrader.needsUpgrading(shardId)) {
- final ShardPath shardPath = upgrader.pickShardPath(shardId);
- upgrader.upgrade(shardId, shardPath);
- // we have to check if the index path exists since we might
- // have only upgraded the shard state that is written under /indexname/shardid/_state
- // in the case we upgraded a dedicated index directory index
- if (Files.exists(shardPath.resolveIndex())) {
- upgrader.checkIndex(shardPath);
- }
- } else {
- logger.debug("{} no upgrade needed - already upgraded");
- }
- }
- }
- }
- }
- }
-
- private static Set<ShardId> findAllShardIds(Path... locations) throws IOException {
- final Set<ShardId> shardIds = new HashSet<>();
- for (final Path location : locations) {
- if (Files.isDirectory(location)) {
- shardIds.addAll(findAllShardsForIndex(location));
- }
- }
- return shardIds;
- }
-
- private static Set<ShardId> findAllShardsForIndex(Path indexPath) throws IOException {
- Set<ShardId> shardIds = new HashSet<>();
- if (Files.isDirectory(indexPath)) {
- try (DirectoryStream<Path> stream = Files.newDirectoryStream(indexPath)) {
- String currentIndex = indexPath.getFileName().toString();
- for (Path shardPath : stream) {
- String fileName = shardPath.getFileName().toString();
- if (Files.isDirectory(shardPath) && fileName.chars().allMatch(Character::isDigit)) {
- int shardId = Integer.parseInt(fileName);
- ShardId id = new ShardId(currentIndex, shardId);
- shardIds.add(id);
- }
- }
- }
- }
- return shardIds;
- }
-
-}
diff --git a/core/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java b/core/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java
index 140f026c35..723d7df511 100644
--- a/core/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java
+++ b/core/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java
@@ -19,6 +19,7 @@
package org.elasticsearch.common.util.concurrent;
+import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import java.util.Arrays;
@@ -40,10 +41,7 @@ public class EsExecutors {
* Settings key to manually set the number of available processors.
* This is used to adjust thread pools sizes etc. per node.
*/
- public static final String PROCESSORS = "processors";
-
- /** Useful for testing */
- public static final String DEFAULT_SYSPROP = "es.processors.override";
+ public static final Setting<Integer> PROCESSORS_SETTING = Setting.intSetting("processors", Math.min(32, Runtime.getRuntime().availableProcessors()), 1, false, Setting.Scope.CLUSTER) ;
/**
* Returns the number of processors available but at most <tt>32</tt>.
@@ -53,37 +51,33 @@ public class EsExecutors {
* ie. >= 48 create too many threads and run into OOM see #3478
* We just use an 32 core upper-bound here to not stress the system
* too much with too many created threads */
- int defaultValue = Math.min(32, Runtime.getRuntime().availableProcessors());
- try {
- defaultValue = Integer.parseInt(System.getProperty(DEFAULT_SYSPROP));
- } catch (Throwable ignored) {}
- return settings.getAsInt(PROCESSORS, defaultValue);
+ return PROCESSORS_SETTING.get(settings);
}
- public static PrioritizedEsThreadPoolExecutor newSinglePrioritizing(String name, ThreadFactory threadFactory) {
- return new PrioritizedEsThreadPoolExecutor(name, 1, 1, 0L, TimeUnit.MILLISECONDS, threadFactory);
+ public static PrioritizedEsThreadPoolExecutor newSinglePrioritizing(String name, ThreadFactory threadFactory, ThreadContext contextHolder) {
+ return new PrioritizedEsThreadPoolExecutor(name, 1, 1, 0L, TimeUnit.MILLISECONDS, threadFactory, contextHolder);
}
- public static EsThreadPoolExecutor newScaling(String name, int min, int max, long keepAliveTime, TimeUnit unit, ThreadFactory threadFactory) {
+ public static EsThreadPoolExecutor newScaling(String name, int min, int max, long keepAliveTime, TimeUnit unit, ThreadFactory threadFactory, ThreadContext contextHolder) {
ExecutorScalingQueue<Runnable> queue = new ExecutorScalingQueue<>();
// we force the execution, since we might run into concurrency issues in offer for ScalingBlockingQueue
- EsThreadPoolExecutor executor = new EsThreadPoolExecutor(name, min, max, keepAliveTime, unit, queue, threadFactory, new ForceQueuePolicy());
+ EsThreadPoolExecutor executor = new EsThreadPoolExecutor(name, min, max, keepAliveTime, unit, queue, threadFactory, new ForceQueuePolicy(), contextHolder);
queue.executor = executor;
return executor;
}
- public static EsThreadPoolExecutor newCached(String name, long keepAliveTime, TimeUnit unit, ThreadFactory threadFactory) {
- return new EsThreadPoolExecutor(name, 0, Integer.MAX_VALUE, keepAliveTime, unit, new SynchronousQueue<Runnable>(), threadFactory, new EsAbortPolicy());
+ public static EsThreadPoolExecutor newCached(String name, long keepAliveTime, TimeUnit unit, ThreadFactory threadFactory, ThreadContext contextHolder) {
+ return new EsThreadPoolExecutor(name, 0, Integer.MAX_VALUE, keepAliveTime, unit, new SynchronousQueue<Runnable>(), threadFactory, new EsAbortPolicy(), contextHolder);
}
- public static EsThreadPoolExecutor newFixed(String name, int size, int queueCapacity, ThreadFactory threadFactory) {
+ public static EsThreadPoolExecutor newFixed(String name, int size, int queueCapacity, ThreadFactory threadFactory, ThreadContext contextHolder) {
BlockingQueue<Runnable> queue;
if (queueCapacity < 0) {
queue = ConcurrentCollections.newBlockingQueue();
} else {
queue = new SizeBlockingQueue<>(ConcurrentCollections.<Runnable>newBlockingQueue(), queueCapacity);
}
- return new EsThreadPoolExecutor(name, size, size, 0, TimeUnit.MILLISECONDS, queue, threadFactory, new EsAbortPolicy());
+ return new EsThreadPoolExecutor(name, size, size, 0, TimeUnit.MILLISECONDS, queue, threadFactory, new EsAbortPolicy(), contextHolder);
}
public static String threadName(Settings settings, String ... names) {
diff --git a/core/src/main/java/org/elasticsearch/common/util/concurrent/EsThreadPoolExecutor.java b/core/src/main/java/org/elasticsearch/common/util/concurrent/EsThreadPoolExecutor.java
index 4c02aab1fe..fde8d82829 100644
--- a/core/src/main/java/org/elasticsearch/common/util/concurrent/EsThreadPoolExecutor.java
+++ b/core/src/main/java/org/elasticsearch/common/util/concurrent/EsThreadPoolExecutor.java
@@ -24,12 +24,14 @@ import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
+import java.util.stream.Stream;
/**
* An extension to thread pool executor, allowing (in the future) to add specific additional stats to it.
*/
public class EsThreadPoolExecutor extends ThreadPoolExecutor {
+ private final ThreadContext contextHolder;
private volatile ShutdownListener listener;
private final Object monitor = new Object();
@@ -38,13 +40,14 @@ public class EsThreadPoolExecutor extends ThreadPoolExecutor {
*/
private final String name;
- EsThreadPoolExecutor(String name, int corePoolSize, int maximumPoolSize, long keepAliveTime, TimeUnit unit, BlockingQueue<Runnable> workQueue, ThreadFactory threadFactory) {
- this(name, corePoolSize, maximumPoolSize, keepAliveTime, unit, workQueue, threadFactory, new EsAbortPolicy());
+ EsThreadPoolExecutor(String name, int corePoolSize, int maximumPoolSize, long keepAliveTime, TimeUnit unit, BlockingQueue<Runnable> workQueue, ThreadFactory threadFactory, ThreadContext contextHolder) {
+ this(name, corePoolSize, maximumPoolSize, keepAliveTime, unit, workQueue, threadFactory, new EsAbortPolicy(), contextHolder);
}
- EsThreadPoolExecutor(String name, int corePoolSize, int maximumPoolSize, long keepAliveTime, TimeUnit unit, BlockingQueue<Runnable> workQueue, ThreadFactory threadFactory, XRejectedExecutionHandler handler) {
+ EsThreadPoolExecutor(String name, int corePoolSize, int maximumPoolSize, long keepAliveTime, TimeUnit unit, BlockingQueue<Runnable> workQueue, ThreadFactory threadFactory, XRejectedExecutionHandler handler, ThreadContext contextHolder) {
super(corePoolSize, maximumPoolSize, keepAliveTime, unit, workQueue, threadFactory, handler);
this.name = name;
+ this.contextHolder = contextHolder;
}
public void shutdown(ShutdownListener listener) {
@@ -80,7 +83,11 @@ public class EsThreadPoolExecutor extends ThreadPoolExecutor {
}
@Override
- public void execute(Runnable command) {
+ public void execute(final Runnable command) {
+ doExecute(wrapRunnable(command));
+ }
+
+ protected void doExecute(final Runnable command) {
try {
super.execute(command);
} catch (EsRejectedExecutionException ex) {
@@ -99,6 +106,14 @@ public class EsThreadPoolExecutor extends ThreadPoolExecutor {
}
}
+ /**
+ * Returns a stream of all pending tasks. This is similar to {@link #getQueue()} but will expose the originally submitted
+ * {@link Runnable} instances rather than potentially wrapped ones.
+ */
+ public Stream<Runnable> getTasks() {
+ return this.getQueue().stream().map(this::unwrap);
+ }
+
@Override
public String toString() {
StringBuilder b = new StringBuilder();
@@ -116,4 +131,114 @@ public class EsThreadPoolExecutor extends ThreadPoolExecutor {
b.append(super.toString()).append(']');
return b.toString();
}
+
+ protected Runnable wrapRunnable(Runnable command) {
+ final Runnable wrappedCommand;
+ if (command instanceof AbstractRunnable) {
+ wrappedCommand = new FilterAbstractRunnable(contextHolder, (AbstractRunnable) command);
+ } else {
+ wrappedCommand = new FilterRunnable(contextHolder, command);
+ }
+ return wrappedCommand;
+ }
+
+ protected Runnable unwrap(Runnable runnable) {
+ if (runnable instanceof FilterAbstractRunnable) {
+ return ((FilterAbstractRunnable) runnable).in;
+ } else if (runnable instanceof FilterRunnable) {
+ return ((FilterRunnable) runnable).in;
+ }
+ return runnable;
+ }
+
+ private class FilterAbstractRunnable extends AbstractRunnable {
+ private final ThreadContext contextHolder;
+ private final AbstractRunnable in;
+ private final ThreadContext.StoredContext ctx;
+
+ FilterAbstractRunnable(ThreadContext contextHolder, AbstractRunnable in) {
+ this.contextHolder = contextHolder;
+ ctx = contextHolder.newStoredContext();
+ this.in = in;
+ }
+
+ @Override
+ public boolean isForceExecution() {
+ return in.isForceExecution();
+ }
+
+ @Override
+ public void onAfter() {
+ in.onAfter();
+ }
+
+ @Override
+ public void onFailure(Throwable t) {
+ in.onFailure(t);
+ }
+
+ @Override
+ public void onRejection(Throwable t) {
+ in.onRejection(t);
+ }
+
+ @Override
+ protected void doRun() throws Exception {
+ boolean whileRunning = false;
+ try (ThreadContext.StoredContext ingore = contextHolder.stashContext()){
+ ctx.restore();
+ whileRunning = true;
+ in.doRun();
+ whileRunning = false;
+ } catch (IllegalStateException ex) {
+ if (whileRunning || isShutdown() == false) {
+ throw ex;
+ }
+ // if we hit an ISE here we have been shutting down
+ // this comes from the threadcontext and barfs if
+ // our threadpool has been shutting down
+ }
+ }
+
+ @Override
+ public String toString() {
+ return in.toString();
+ }
+
+ }
+
+ private class FilterRunnable implements Runnable {
+ private final ThreadContext contextHolder;
+ private final Runnable in;
+ private final ThreadContext.StoredContext ctx;
+
+ FilterRunnable(ThreadContext contextHolder, Runnable in) {
+ this.contextHolder = contextHolder;
+ ctx = contextHolder.newStoredContext();
+ this.in = in;
+ }
+
+ @Override
+ public void run() {
+ boolean whileRunning = false;
+ try (ThreadContext.StoredContext ingore = contextHolder.stashContext()){
+ ctx.restore();
+ whileRunning = true;
+ in.run();
+ whileRunning = false;
+ } catch (IllegalStateException ex) {
+ if (whileRunning || isShutdown() == false) {
+ throw ex;
+ }
+ // if we hit an ISE here we have been shutting down
+ // this comes from the threadcontext and barfs if
+ // our threadpool has been shutting down
+ }
+ }
+ @Override
+ public String toString() {
+ return in.toString();
+ }
+ }
+
}
diff --git a/core/src/main/java/org/elasticsearch/common/util/concurrent/PrioritizedEsThreadPoolExecutor.java b/core/src/main/java/org/elasticsearch/common/util/concurrent/PrioritizedEsThreadPoolExecutor.java
index d0d2906dee..f55c84e943 100644
--- a/core/src/main/java/org/elasticsearch/common/util/concurrent/PrioritizedEsThreadPoolExecutor.java
+++ b/core/src/main/java/org/elasticsearch/common/util/concurrent/PrioritizedEsThreadPoolExecutor.java
@@ -47,8 +47,8 @@ public class PrioritizedEsThreadPoolExecutor extends EsThreadPoolExecutor {
private AtomicLong insertionOrder = new AtomicLong();
private Queue<Runnable> current = ConcurrentCollections.newQueue();
- PrioritizedEsThreadPoolExecutor(String name, int corePoolSize, int maximumPoolSize, long keepAliveTime, TimeUnit unit, ThreadFactory threadFactory) {
- super(name, corePoolSize, maximumPoolSize, keepAliveTime, unit, new PriorityBlockingQueue<Runnable>(), threadFactory);
+ PrioritizedEsThreadPoolExecutor(String name, int corePoolSize, int maximumPoolSize, long keepAliveTime, TimeUnit unit, ThreadFactory threadFactory, ThreadContext contextHolder) {
+ super(name, corePoolSize, maximumPoolSize, keepAliveTime, unit, new PriorityBlockingQueue<>(), threadFactory, contextHolder);
}
public Pending[] getPending() {
@@ -88,10 +88,14 @@ public class PrioritizedEsThreadPoolExecutor extends EsThreadPoolExecutor {
for (Runnable runnable : runnables) {
if (runnable instanceof TieBreakingPrioritizedRunnable) {
TieBreakingPrioritizedRunnable t = (TieBreakingPrioritizedRunnable) runnable;
- pending.add(new Pending(t.runnable, t.priority(), t.insertionOrder, executing));
+ pending.add(new Pending(unwrap(t.runnable), t.priority(), t.insertionOrder, executing));
} else if (runnable instanceof PrioritizedFutureTask) {
PrioritizedFutureTask t = (PrioritizedFutureTask) runnable;
- pending.add(new Pending(t.task, t.priority, t.insertionOrder, executing));
+ Object task = t.task;
+ if (t.task instanceof Runnable) {
+ task = unwrap((Runnable) t.task);
+ }
+ pending.add(new Pending(task, t.priority, t.insertionOrder, executing));
}
}
}
@@ -107,12 +111,8 @@ public class PrioritizedEsThreadPoolExecutor extends EsThreadPoolExecutor {
}
public void execute(Runnable command, final ScheduledExecutorService timer, final TimeValue timeout, final Runnable timeoutCallback) {
- if (command instanceof PrioritizedRunnable) {
- command = new TieBreakingPrioritizedRunnable((PrioritizedRunnable) command, insertionOrder.incrementAndGet());
- } else if (!(command instanceof PrioritizedFutureTask)) { // it might be a callable wrapper...
- command = new TieBreakingPrioritizedRunnable(command, Priority.NORMAL, insertionOrder.incrementAndGet());
- }
- super.execute(command);
+ command = wrapRunnable(command);
+ doExecute(command);
if (timeout.nanos() >= 0) {
if (command instanceof TieBreakingPrioritizedRunnable) {
((TieBreakingPrioritizedRunnable) command).scheduleTimeout(timer, timeoutCallback, timeout);
@@ -125,21 +125,31 @@ public class PrioritizedEsThreadPoolExecutor extends EsThreadPoolExecutor {
}
@Override
- public void execute(Runnable command) {
+ protected Runnable wrapRunnable(Runnable command) {
if (command instanceof PrioritizedRunnable) {
- command = new TieBreakingPrioritizedRunnable((PrioritizedRunnable) command, insertionOrder.incrementAndGet());
- } else if (!(command instanceof PrioritizedFutureTask)) { // it might be a callable wrapper...
- command = new TieBreakingPrioritizedRunnable(command, Priority.NORMAL, insertionOrder.incrementAndGet());
+ if ((command instanceof TieBreakingPrioritizedRunnable)) {
+ return command;
+ }
+ Priority priority = ((PrioritizedRunnable) command).priority();
+ return new TieBreakingPrioritizedRunnable(super.wrapRunnable(command), priority, insertionOrder.incrementAndGet());
+ } else if (command instanceof PrioritizedFutureTask) {
+ return command;
+ } else { // it might be a callable wrapper...
+ if (command instanceof TieBreakingPrioritizedRunnable) {
+ return command;
+ }
+ return new TieBreakingPrioritizedRunnable(super.wrapRunnable(command), Priority.NORMAL, insertionOrder.incrementAndGet());
}
- super.execute(command);
}
+
@Override
protected <T> RunnableFuture<T> newTaskFor(Runnable runnable, T value) {
if (!(runnable instanceof PrioritizedRunnable)) {
runnable = PrioritizedRunnable.wrap(runnable, Priority.NORMAL);
}
- return new PrioritizedFutureTask<>((PrioritizedRunnable) runnable, value, insertionOrder.incrementAndGet());
+ Priority priority = ((PrioritizedRunnable) runnable).priority();
+ return new PrioritizedFutureTask<>(runnable, priority, value, insertionOrder.incrementAndGet());
}
@Override
@@ -147,7 +157,7 @@ public class PrioritizedEsThreadPoolExecutor extends EsThreadPoolExecutor {
if (!(callable instanceof PrioritizedCallable)) {
callable = PrioritizedCallable.wrap(callable, Priority.NORMAL);
}
- return new PrioritizedFutureTask<>((PrioritizedCallable<T>) callable, insertionOrder.incrementAndGet());
+ return new PrioritizedFutureTask<>((PrioritizedCallable)callable, insertionOrder.incrementAndGet());
}
public static class Pending {
@@ -173,10 +183,6 @@ public class PrioritizedEsThreadPoolExecutor extends EsThreadPoolExecutor {
private ScheduledFuture<?> timeoutFuture;
private boolean started = false;
- TieBreakingPrioritizedRunnable(PrioritizedRunnable runnable, long insertionOrder) {
- this(runnable, runnable.priority(), insertionOrder);
- }
-
TieBreakingPrioritizedRunnable(Runnable runnable, Priority priority, long insertionOrder) {
super(priority);
this.runnable = runnable;
@@ -233,6 +239,7 @@ public class PrioritizedEsThreadPoolExecutor extends EsThreadPoolExecutor {
runnable = null;
timeoutFuture = null;
}
+
}
}
@@ -242,10 +249,10 @@ public class PrioritizedEsThreadPoolExecutor extends EsThreadPoolExecutor {
final Priority priority;
final long insertionOrder;
- public PrioritizedFutureTask(PrioritizedRunnable runnable, T value, long insertionOrder) {
+ public PrioritizedFutureTask(Runnable runnable, Priority priority, T value, long insertionOrder) {
super(runnable, value);
this.task = runnable;
- this.priority = runnable.priority();
+ this.priority = priority;
this.insertionOrder = insertionOrder;
}
@@ -265,4 +272,5 @@ public class PrioritizedEsThreadPoolExecutor extends EsThreadPoolExecutor {
return insertionOrder < pft.insertionOrder ? -1 : 1;
}
}
+
}
diff --git a/core/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java b/core/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java
new file mode 100644
index 0000000000..1928392fe4
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java
@@ -0,0 +1,359 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.common.util.concurrent;
+
+import org.apache.lucene.util.CloseableThreadLocal;
+import org.elasticsearch.common.collect.Iterators;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.common.io.stream.Writeable;
+import org.elasticsearch.common.settings.Setting;
+import org.elasticsearch.common.settings.Settings;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+/**
+ * A ThreadContext is a map of string headers and a transient map of keyed objects that are associated with
+ * a thread. It allows to store and retrieve header information across method calls, network calls as well as threads spawned from a
+ * thread that has a {@link ThreadContext} associated with. Threads spawned from a {@link org.elasticsearch.threadpool.ThreadPool} have out of the box
+ * support for {@link ThreadContext} and all threads spawned will inherit the {@link ThreadContext} from the thread that it is forking from.".
+ * Network calls will also preserve the senders headers automatically.
+ * <p>
+ * Consumers of ThreadContext usually don't need to interact with adding or stashing contexts. Every elasticsearch thread is managed by a thread pool or executor
+ * being responsible for stashing and restoring the threads context. For instance if a network request is received, all headers are deserialized from the network
+ * and directly added as the headers of the threads {@link ThreadContext} (see {@link #readHeaders(StreamInput)}. In order to not modify the context that is currently
+ * active on this thread the network code uses a try/with pattern to stash it's current context, read headers into a fresh one and once the request is handled or a handler thread
+ * is forked (which in turn inherits the context) it restores the previous context. For instance:
+ * </p>
+ * <pre>
+ * // current context is stashed and replaced with a default context
+ * try (StoredContext context = threadContext.stashContext()) {
+ * threadContext.readHeaders(in); // read headers into current context
+ * if (fork) {
+ * threadPool.execute(() -&gt; request.handle()); // inherits context
+ * } else {
+ * request.handle();
+ * }
+ * }
+ * // previous context is restored on StoredContext#close()
+ * </pre>
+ *
+ */
+public final class ThreadContext implements Closeable, Writeable<ThreadContext.ThreadContextStruct>{
+
+ public static final String PREFIX = "request.headers";
+ public static final Setting<Settings> DEFAULT_HEADERS_SETTING = Setting.groupSetting(PREFIX + ".", false, Setting.Scope.CLUSTER);
+ private final Map<String, String> defaultHeader;
+ private static final ThreadContextStruct DEFAULT_CONTEXT = new ThreadContextStruct(Collections.emptyMap());
+ private final ContextThreadLocal threadLocal;
+
+ /**
+ * Creates a new ThreadContext instance
+ * @param settings the settings to read the default request headers from
+ */
+ public ThreadContext(Settings settings) {
+ Settings headers = DEFAULT_HEADERS_SETTING.get(settings);
+ if (headers == null) {
+ this.defaultHeader = Collections.emptyMap();
+ } else {
+ Map<String, String> defaultHeader = new HashMap<>();
+ for (String key : headers.names()) {
+ defaultHeader.put(key, headers.get(key));
+ }
+ this.defaultHeader = Collections.unmodifiableMap(defaultHeader);
+ }
+ threadLocal = new ContextThreadLocal();
+ }
+
+ @Override
+ public void close() throws IOException {
+ threadLocal.close();
+ }
+
+ /**
+ * Removes the current context and resets a default context. The removed context can be
+ * restored when closing the returned {@link StoredContext}
+ */
+ public StoredContext stashContext() {
+ final ThreadContextStruct context = threadLocal.get();
+ threadLocal.set(null);
+ return () -> {
+ threadLocal.set(context);
+ };
+ }
+
+ /**
+ * Removes the current context and resets a new context that contains a merge of the current headers and the given headers. The removed context can be
+ * restored when closing the returned {@link StoredContext}. The merge strategy is that headers that are already existing are preserved unless they are defaults.
+ */
+ public StoredContext stashAndMergeHeaders(Map<String, String> headers) {
+ final ThreadContextStruct context = threadLocal.get();
+ Map<String, String> newHeader = new HashMap<>(headers);
+ newHeader.putAll(context.headers);
+ threadLocal.set(DEFAULT_CONTEXT.putHeaders(newHeader));
+ return () -> {
+ threadLocal.set(context);
+ };
+ }
+
+ /**
+ * Just like {@link #stashContext()} but no default context is set.
+ */
+ public StoredContext newStoredContext() {
+ final ThreadContextStruct context = threadLocal.get();
+ return () -> {
+ threadLocal.set(context);
+ };
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ threadLocal.get().writeTo(out, defaultHeader);
+ }
+
+ @Override
+ public ThreadContextStruct readFrom(StreamInput in) throws IOException {
+ return DEFAULT_CONTEXT.readFrom(in);
+ }
+
+ /**
+ * Reads the headers from the stream into the current context
+ */
+ public void readHeaders(StreamInput in) throws IOException {
+ threadLocal.set(readFrom(in));
+ }
+
+
+ /**
+ * Returns the header for the given key or <code>null</code> if not present
+ */
+ public String getHeader(String key) {
+ String value = threadLocal.get().headers.get(key);
+ if (value == null) {
+ return defaultHeader.get(key);
+ }
+ return value;
+ }
+
+ /**
+ * Returns all of the current contexts headers
+ */
+ public Map<String, String> getHeaders() {
+ HashMap<String, String> map = new HashMap<>(defaultHeader);
+ map.putAll(threadLocal.get().headers);
+ return Collections.unmodifiableMap(map);
+ }
+
+ /**
+ * Copies all header key, value pairs into the current context
+ */
+ public void copyHeaders(Iterable<Map.Entry<String, String>> headers) {
+ threadLocal.set(threadLocal.get().copyHeaders(headers));
+ }
+
+ /**
+ * Puts a header into the context
+ */
+ public void putHeader(String key, String value) {
+ threadLocal.set(threadLocal.get().putPersistent(key, value));
+ }
+
+ /**
+ * Puts all of the given headers into this context
+ */
+ public void putHeader(Map<String, String> header) {
+ threadLocal.set(threadLocal.get().putHeaders(header));
+ }
+
+ /**
+ * Puts a transient header object into this context
+ */
+ public void putTransient(String key, Object value) {
+ threadLocal.set(threadLocal.get().putTransient(key, value));
+ }
+
+ /**
+ * Returns a transient header object or <code>null</code> if there is no header for the given key
+ */
+ public <T> T getTransient(String key) {
+ return (T) threadLocal.get().transientHeaders.get(key);
+ }
+
+ public interface StoredContext extends AutoCloseable {
+ @Override
+ void close();
+
+ default void restore() {
+ close();
+ }
+ }
+
+ static final class ThreadContextStruct implements Writeable<ThreadContextStruct> {
+ private final Map<String,String> headers;
+ private final Map<String, Object> transientHeaders;
+
+ private ThreadContextStruct(StreamInput in) throws IOException {
+ int numValues = in.readVInt();
+ Map<String, String> headers = numValues == 0 ? Collections.emptyMap() : new HashMap<>(numValues);
+ for (int i = 0; i < numValues; i++) {
+ headers.put(in.readString(), in.readString());
+ }
+ this.headers = headers;
+ this.transientHeaders = Collections.emptyMap();
+ }
+
+ private ThreadContextStruct(Map<String, String> headers, Map<String, Object> transientHeaders) {
+ this.headers = headers;
+ this.transientHeaders = transientHeaders;
+ }
+
+ private ThreadContextStruct(Map<String, String> headers) {
+ this(headers, Collections.emptyMap());
+ }
+
+ private ThreadContextStruct putPersistent(String key, String value) {
+ Map<String, String> newHeaders = new HashMap<>(this.headers);
+ putSingleHeader(key, value, newHeaders);
+ return new ThreadContextStruct(newHeaders, transientHeaders);
+ }
+
+ private void putSingleHeader(String key, String value, Map<String, String> newHeaders) {
+ final String existingValue;
+ if ((existingValue = newHeaders.putIfAbsent(key, value)) != null) {
+ throw new IllegalArgumentException("value for key [" + key + "] already present");
+ }
+ }
+
+ private ThreadContextStruct putHeaders(Map<String, String> headers) {
+ if (headers.isEmpty()) {
+ return this;
+ } else {
+ final Map<String, String> newHeaders = new HashMap<>();
+ for (Map.Entry<String, String> entry : headers.entrySet()) {
+ putSingleHeader(entry.getKey(), entry.getValue(), newHeaders);
+ }
+ newHeaders.putAll(this.headers);
+ return new ThreadContextStruct(newHeaders, transientHeaders);
+ }
+ }
+
+ private ThreadContextStruct putTransient(String key, Object value) {
+ Map<String, Object> newTransient = new HashMap<>(this.transientHeaders);
+ if (newTransient.putIfAbsent(key, value) != null) {
+ throw new IllegalArgumentException("value for key [" + key + "] already present");
+ }
+ return new ThreadContextStruct(headers, newTransient);
+ }
+
+ boolean isEmpty() {
+ return headers.isEmpty() && transientHeaders.isEmpty();
+ }
+
+
+ private ThreadContextStruct copyHeaders(Iterable<Map.Entry<String, String>> headers) {
+ Map<String, String> newHeaders = new HashMap<>();
+ for (Map.Entry<String, String> header : headers) {
+ newHeaders.put(header.getKey(), header.getValue());
+ }
+ return putHeaders(newHeaders);
+ }
+
+ @Override
+ public ThreadContextStruct readFrom(StreamInput in) throws IOException {
+ return new ThreadContextStruct(in);
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ throw new UnsupportedOperationException("use the other write to");
+ }
+
+ public void writeTo(StreamOutput out, Map<String, String> defaultHeaders) throws IOException {
+ final Map<String, String> headers;
+ if (defaultHeaders.isEmpty()) {
+ headers = this.headers;
+ } else {
+ headers = new HashMap<>(defaultHeaders);
+ headers.putAll(this.headers);
+ }
+
+ int keys = headers.size();
+ out.writeVInt(keys);
+ for (Map.Entry<String, String> entry : headers.entrySet()) {
+ out.writeString(entry.getKey());
+ out.writeString(entry.getValue());
+ }
+ }
+
+ }
+
+ private static class ContextThreadLocal extends CloseableThreadLocal<ThreadContextStruct> {
+ private final AtomicBoolean closed = new AtomicBoolean(false);
+
+ @Override
+ public void set(ThreadContextStruct object) {
+ try {
+ if (object == DEFAULT_CONTEXT) {
+ super.set(null);
+ } else {
+ super.set(object);
+ }
+ } catch (NullPointerException ex) {
+ /* This is odd but CloseableThreadLocal throws a NPE if it was closed but still accessed.
+ to get a real exception we call ensureOpen() to tell the user we are already closed.*/
+ ensureOpen();
+ throw ex;
+ }
+ }
+
+ @Override
+ public ThreadContextStruct get() {
+ try {
+ ThreadContextStruct threadContextStruct = super.get();
+ if (threadContextStruct != null) {
+ return threadContextStruct;
+ }
+ return DEFAULT_CONTEXT;
+ } catch (NullPointerException ex) {
+ /* This is odd but CloseableThreadLocal throws a NPE if it was closed but still accessed.
+ to get a real exception we call ensureOpen() to tell the user we are already closed.*/
+ ensureOpen();
+ throw ex;
+ }
+ }
+
+ private void ensureOpen() {
+ if (closed.get()) {
+ throw new IllegalStateException("threadcontext is already closed");
+ }
+ }
+
+ @Override
+ public void close() {
+ if (closed.compareAndSet(false, true)) {
+ super.close();
+ }
+ }
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/common/xcontent/ObjectParser.java b/core/src/main/java/org/elasticsearch/common/xcontent/ObjectParser.java
index 979a1f2522..395dcad822 100644
--- a/core/src/main/java/org/elasticsearch/common/xcontent/ObjectParser.java
+++ b/core/src/main/java/org/elasticsearch/common/xcontent/ObjectParser.java
@@ -223,7 +223,7 @@ public final class ObjectParser<Value, Context> implements BiFunction<XContentPa
list.add(supplier.get()); // single value
} else {
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
- if (parser.currentToken().isValue()) {
+ if (parser.currentToken().isValue() || parser.currentToken() == XContentParser.Token.START_OBJECT) {
list.add(supplier.get());
} else {
throw new IllegalStateException("expected value but got [" + parser.currentToken() + "]");
@@ -237,6 +237,11 @@ public final class ObjectParser<Value, Context> implements BiFunction<XContentPa
declareField((p, v, c) -> consumer.accept(v, objectParser.apply(p, c)), field, ValueType.OBJECT);
}
+ public <T> void declareObjectArray(BiConsumer<Value, List<T>> consumer, BiFunction<XContentParser, Context, T> objectParser, ParseField field) {
+ declareField((p, v, c) -> consumer.accept(v, parseArray(p, () -> objectParser.apply(p, c))), field, ValueType.OBJECT_ARRAY);
+ }
+
+
public <T> void declareObjectOrDefault(BiConsumer<Value, T> consumer, BiFunction<XContentParser, Context, T> objectParser, Supplier<T> defaultValue, ParseField field) {
declareField((p, v, c) -> {
if (p.currentToken() == XContentParser.Token.VALUE_BOOLEAN) {
@@ -333,6 +338,7 @@ public final class ObjectParser<Value, Context> implements BiFunction<XContentPa
INT_ARRAY(EnumSet.of(XContentParser.Token.START_ARRAY, XContentParser.Token.VALUE_NUMBER, XContentParser.Token.VALUE_STRING)),
BOOLEAN_ARRAY(EnumSet.of(XContentParser.Token.START_ARRAY, XContentParser.Token.VALUE_BOOLEAN)),
OBJECT(EnumSet.of(XContentParser.Token.START_OBJECT)),
+ OBJECT_ARRAY(EnumSet.of(XContentParser.Token.START_OBJECT, XContentParser.Token.START_ARRAY)),
OBJECT_OR_BOOLEAN(EnumSet.of(XContentParser.Token.START_OBJECT, XContentParser.Token.VALUE_BOOLEAN)),
VALUE(EnumSet.of(XContentParser.Token.VALUE_BOOLEAN, XContentParser.Token.VALUE_NULL ,XContentParser.Token.VALUE_EMBEDDED_OBJECT,XContentParser.Token.VALUE_NUMBER,XContentParser.Token.VALUE_STRING));
diff --git a/core/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java b/core/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java
index 73f16b2e78..4612d3f05d 100644
--- a/core/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java
+++ b/core/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java
@@ -347,14 +347,20 @@ public class XContentMapValues {
return Long.parseLong(node.toString());
}
- public static boolean nodeBooleanValue(Object node, boolean defaultValue) {
+ /**
+ * This method is very lenient, use {@link #nodeBooleanValue} instead.
+ */
+ public static boolean lenientNodeBooleanValue(Object node, boolean defaultValue) {
if (node == null) {
return defaultValue;
}
- return nodeBooleanValue(node);
+ return lenientNodeBooleanValue(node);
}
- public static boolean nodeBooleanValue(Object node) {
+ /**
+ * This method is very lenient, use {@link #nodeBooleanValue} instead.
+ */
+ public static boolean lenientNodeBooleanValue(Object node) {
if (node instanceof Boolean) {
return (Boolean) node;
}
@@ -365,6 +371,17 @@ public class XContentMapValues {
return !(value.equals("false") || value.equals("0") || value.equals("off"));
}
+ public static boolean nodeBooleanValue(Object node) {
+ switch (node.toString()) {
+ case "true":
+ return true;
+ case "false":
+ return false;
+ default:
+ throw new IllegalArgumentException("Can't parse boolean value [" + node + "], expected [true] or [false]");
+ }
+ }
+
public static TimeValue nodeTimeValue(Object node, TimeValue defaultValue) {
if (node == null) {
return defaultValue;
diff --git a/core/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java b/core/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java
index 1ab608761f..b51339aac9 100644
--- a/core/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java
+++ b/core/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java
@@ -22,6 +22,7 @@ package org.elasticsearch.discovery;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.inject.multibindings.Multibinder;
+import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.ExtensionPoint;
import org.elasticsearch.discovery.local.LocalDiscovery;
@@ -36,14 +37,17 @@ import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import java.util.function.Function;
/**
* A module for loading classes for node discovery.
*/
public class DiscoveryModule extends AbstractModule {
- public static final String DISCOVERY_TYPE_KEY = "discovery.type";
- public static final String ZEN_MASTER_SERVICE_TYPE_KEY = "discovery.zen.masterservice.type";
+ public static final Setting<String> DISCOVERY_TYPE_SETTING = new Setting<>("discovery.type",
+ settings -> DiscoveryNode.localNode(settings) ? "local" : "zen", Function.identity(), false, Setting.Scope.CLUSTER);
+ public static final Setting<String> ZEN_MASTER_SERVICE_TYPE_SETTING = new Setting<>("discovery.zen.masterservice.type",
+ "zen", Function.identity(), false, Setting.Scope.CLUSTER);
private final Settings settings;
private final List<Class<? extends UnicastHostsProvider>> unicastHostProviders = new ArrayList<>();
@@ -93,15 +97,14 @@ public class DiscoveryModule extends AbstractModule {
@Override
protected void configure() {
- String defaultType = DiscoveryNode.localNode(settings) ? "local" : "zen";
- String discoveryType = settings.get(DISCOVERY_TYPE_KEY, defaultType);
+ String discoveryType = DISCOVERY_TYPE_SETTING.get(settings);
Class<? extends Discovery> discoveryClass = discoveryTypes.get(discoveryType);
if (discoveryClass == null) {
throw new IllegalArgumentException("Unknown Discovery type [" + discoveryType + "]");
}
if (discoveryType.equals("local") == false) {
- String masterServiceTypeKey = settings.get(ZEN_MASTER_SERVICE_TYPE_KEY, "zen");
+ String masterServiceTypeKey = ZEN_MASTER_SERVICE_TYPE_SETTING.get(settings);
final Class<? extends ElectMasterService> masterService = masterServiceType.get(masterServiceTypeKey);
if (masterService == null) {
throw new IllegalArgumentException("Unknown master service type [" + masterServiceTypeKey + "]");
@@ -121,4 +124,4 @@ public class DiscoveryModule extends AbstractModule {
bind(Discovery.class).to(discoveryClass).asEagerSingleton();
bind(DiscoveryService.class).asEagerSingleton();
}
-} \ No newline at end of file
+}
diff --git a/core/src/main/java/org/elasticsearch/discovery/DiscoveryService.java b/core/src/main/java/org/elasticsearch/discovery/DiscoveryService.java
index a82099658e..bef1c8fe5e 100644
--- a/core/src/main/java/org/elasticsearch/discovery/DiscoveryService.java
+++ b/core/src/main/java/org/elasticsearch/discovery/DiscoveryService.java
@@ -27,6 +27,7 @@ import org.elasticsearch.common.Randomness;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.inject.Inject;
+import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
@@ -39,8 +40,8 @@ import java.util.concurrent.TimeUnit;
*/
public class DiscoveryService extends AbstractLifecycleComponent<DiscoveryService> {
- public static final String SETTING_INITIAL_STATE_TIMEOUT = "discovery.initial_state_timeout";
- public static final String SETTING_DISCOVERY_SEED = "discovery.id.seed";
+ public static final Setting<TimeValue> INITIAL_STATE_TIMEOUT_SETTING = Setting.positiveTimeSetting("discovery.initial_state_timeout", TimeValue.timeValueSeconds(30), false, Setting.Scope.CLUSTER);
+ public static final Setting<Long> DISCOVERY_SEED_SETTING = Setting.longSetting("discovery.id.seed", 0L, Long.MIN_VALUE, false, Setting.Scope.CLUSTER);
private static class InitialStateListener implements InitialStateDiscoveryListener {
@@ -71,7 +72,7 @@ public class DiscoveryService extends AbstractLifecycleComponent<DiscoveryServic
super(settings);
this.discoverySettings = discoverySettings;
this.discovery = discovery;
- this.initialStateTimeout = settings.getAsTime(SETTING_INITIAL_STATE_TIMEOUT, TimeValue.timeValueSeconds(30));
+ this.initialStateTimeout = INITIAL_STATE_TIMEOUT_SETTING.get(settings);
}
public ClusterBlock getNoMasterBlock() {
@@ -132,7 +133,7 @@ public class DiscoveryService extends AbstractLifecycleComponent<DiscoveryServic
}
public static String generateNodeId(Settings settings) {
- Random random = Randomness.get(settings, DiscoveryService.SETTING_DISCOVERY_SEED);
+ Random random = Randomness.get(settings, DiscoveryService.DISCOVERY_SEED_SETTING);
return Strings.randomBase64UUID(random);
}
}
diff --git a/core/src/main/java/org/elasticsearch/discovery/local/LocalDiscovery.java b/core/src/main/java/org/elasticsearch/discovery/local/LocalDiscovery.java
index 4a3771c8e5..03a14fe9cf 100644
--- a/core/src/main/java/org/elasticsearch/discovery/local/LocalDiscovery.java
+++ b/core/src/main/java/org/elasticsearch/discovery/local/LocalDiscovery.java
@@ -19,7 +19,6 @@
package org.elasticsearch.discovery.local;
-import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterService;
@@ -29,7 +28,6 @@ import org.elasticsearch.cluster.Diff;
import org.elasticsearch.cluster.IncompatibleClusterStateVersionException;
import org.elasticsearch.cluster.block.ClusterBlocks;
import org.elasticsearch.cluster.node.DiscoveryNode;
-import org.elasticsearch.cluster.node.DiscoveryNodeService;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.RoutingService;
import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
@@ -44,12 +42,10 @@ import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.discovery.AckClusterStatePublishResponseHandler;
import org.elasticsearch.discovery.BlockingClusterStatePublishResponseHandler;
import org.elasticsearch.discovery.Discovery;
-import org.elasticsearch.discovery.DiscoveryService;
import org.elasticsearch.discovery.DiscoverySettings;
import org.elasticsearch.discovery.DiscoveryStats;
import org.elasticsearch.discovery.InitialStateDiscoveryListener;
import org.elasticsearch.node.service.NodeService;
-import org.elasticsearch.transport.TransportService;
import java.util.HashSet;
import java.util.Queue;
@@ -67,17 +63,12 @@ public class LocalDiscovery extends AbstractLifecycleComponent<Discovery> implem
private static final LocalDiscovery[] NO_MEMBERS = new LocalDiscovery[0];
- private final TransportService transportService;
private final ClusterService clusterService;
- private final DiscoveryNodeService discoveryNodeService;
private RoutingService routingService;
private final ClusterName clusterName;
- private final Version version;
private final DiscoverySettings discoverySettings;
- private DiscoveryNode localNode;
-
private volatile boolean master = false;
private final AtomicBoolean initialStateSent = new AtomicBoolean();
@@ -89,14 +80,11 @@ public class LocalDiscovery extends AbstractLifecycleComponent<Discovery> implem
private volatile ClusterState lastProcessedClusterState;
@Inject
- public LocalDiscovery(Settings settings, ClusterName clusterName, TransportService transportService, ClusterService clusterService,
- DiscoveryNodeService discoveryNodeService, Version version, DiscoverySettings discoverySettings) {
+ public LocalDiscovery(Settings settings, ClusterName clusterName, ClusterService clusterService,
+ DiscoverySettings discoverySettings) {
super(settings);
this.clusterName = clusterName;
this.clusterService = clusterService;
- this.transportService = transportService;
- this.discoveryNodeService = discoveryNodeService;
- this.version = version;
this.discoverySettings = discoverySettings;
}
@@ -119,8 +107,6 @@ public class LocalDiscovery extends AbstractLifecycleComponent<Discovery> implem
clusterGroups.put(clusterName, clusterGroup);
}
logger.debug("Connected to cluster [{}]", clusterName);
- this.localNode = new DiscoveryNode(settings.get("name"), DiscoveryService.generateNodeId(settings), transportService.boundAddress().publishAddress(),
- discoveryNodeService.buildAttributes(), version);
clusterGroup.members().add(this);
@@ -147,7 +133,7 @@ public class LocalDiscovery extends AbstractLifecycleComponent<Discovery> implem
public ClusterState execute(ClusterState currentState) {
DiscoveryNodes.Builder nodesBuilder = DiscoveryNodes.builder();
for (LocalDiscovery discovery : clusterGroups.get(clusterName).members()) {
- nodesBuilder.put(discovery.localNode);
+ nodesBuilder.put(discovery.localNode());
}
nodesBuilder.localNodeId(master.localNode().id()).masterNodeId(master.localNode().id());
// remove the NO_MASTER block in this case
@@ -166,30 +152,9 @@ public class LocalDiscovery extends AbstractLifecycleComponent<Discovery> implem
}
});
} else if (firstMaster != null) {
- // update as fast as we can the local node state with the new metadata (so we create indices for example)
- final ClusterState masterState = firstMaster.clusterService.state();
- clusterService.submitStateUpdateTask("local-disco(detected_master)", new ClusterStateUpdateTask() {
- @Override
- public boolean runOnlyOnMaster() {
- return false;
- }
-
- @Override
- public ClusterState execute(ClusterState currentState) {
- // make sure we have the local node id set, we might need it as a result of the new metadata
- DiscoveryNodes.Builder nodesBuilder = DiscoveryNodes.builder(currentState.nodes()).put(localNode).localNodeId(localNode.id());
- return ClusterState.builder(currentState).metaData(masterState.metaData()).nodes(nodesBuilder).build();
- }
-
- @Override
- public void onFailure(String source, Throwable t) {
- logger.error("unexpected failure during [{}]", t, source);
- }
- });
-
// tell the master to send the fact that we are here
final LocalDiscovery master = firstMaster;
- firstMaster.clusterService.submitStateUpdateTask("local-disco-receive(from node[" + localNode + "])", new ClusterStateUpdateTask() {
+ firstMaster.clusterService.submitStateUpdateTask("local-disco-receive(from node[" + localNode() + "])", new ClusterStateUpdateTask() {
@Override
public boolean runOnlyOnMaster() {
return false;
@@ -199,7 +164,7 @@ public class LocalDiscovery extends AbstractLifecycleComponent<Discovery> implem
public ClusterState execute(ClusterState currentState) {
DiscoveryNodes.Builder nodesBuilder = DiscoveryNodes.builder();
for (LocalDiscovery discovery : clusterGroups.get(clusterName).members()) {
- nodesBuilder.put(discovery.localNode);
+ nodesBuilder.put(discovery.localNode());
}
nodesBuilder.localNodeId(master.localNode().id()).masterNodeId(master.localNode().id());
return ClusterState.builder(currentState).nodes(nodesBuilder).build();
@@ -254,7 +219,7 @@ public class LocalDiscovery extends AbstractLifecycleComponent<Discovery> implem
final Set<String> newMembers = new HashSet<>();
for (LocalDiscovery discovery : clusterGroup.members()) {
- newMembers.add(discovery.localNode.id());
+ newMembers.add(discovery.localNode().id());
}
final LocalDiscovery master = firstMaster;
@@ -266,7 +231,7 @@ public class LocalDiscovery extends AbstractLifecycleComponent<Discovery> implem
@Override
public ClusterState execute(ClusterState currentState) {
- DiscoveryNodes newNodes = currentState.nodes().removeDeadMembers(newMembers, master.localNode.id());
+ DiscoveryNodes newNodes = currentState.nodes().removeDeadMembers(newMembers, master.localNode().id());
DiscoveryNodes.Delta delta = newNodes.delta(currentState.nodes());
if (delta.added()) {
logger.warn("No new nodes should be created when a new discovery view is accepted");
@@ -293,7 +258,7 @@ public class LocalDiscovery extends AbstractLifecycleComponent<Discovery> implem
@Override
public DiscoveryNode localNode() {
- return localNode;
+ return clusterService.localNode();
}
@Override
@@ -308,7 +273,7 @@ public class LocalDiscovery extends AbstractLifecycleComponent<Discovery> implem
@Override
public String nodeDescription() {
- return clusterName.value() + "/" + localNode.id();
+ return clusterName.value() + "/" + localNode().id();
}
@Override
@@ -323,7 +288,7 @@ public class LocalDiscovery extends AbstractLifecycleComponent<Discovery> implem
if (localDiscovery.master) {
continue;
}
- nodesToPublishTo.add(localDiscovery.localNode);
+ nodesToPublishTo.add(localDiscovery.localNode());
}
publish(members, clusterChangedEvent, new AckClusterStatePublishResponseHandler(nodesToPublishTo, ackListener));
}
@@ -359,7 +324,7 @@ public class LocalDiscovery extends AbstractLifecycleComponent<Discovery> implem
synchronized (this) {
// we do the marshaling intentionally, to check it works well...
// check if we publsihed cluster state at least once and node was in the cluster when we published cluster state the last time
- if (discovery.lastProcessedClusterState != null && clusterChangedEvent.previousState().nodes().nodeExists(discovery.localNode.id())) {
+ if (discovery.lastProcessedClusterState != null && clusterChangedEvent.previousState().nodes().nodeExists(discovery.localNode().id())) {
// both conditions are true - which means we can try sending cluster state as diffs
if (clusterStateDiffBytes == null) {
Diff diff = clusterState.diff(clusterChangedEvent.previousState());
@@ -369,7 +334,7 @@ public class LocalDiscovery extends AbstractLifecycleComponent<Discovery> implem
}
try {
newNodeSpecificClusterState = discovery.lastProcessedClusterState.readDiffFrom(StreamInput.wrap(clusterStateDiffBytes)).apply(discovery.lastProcessedClusterState);
- logger.trace("sending diff cluster state version [{}] with size {} to [{}]", clusterState.version(), clusterStateDiffBytes.length, discovery.localNode.getName());
+ logger.trace("sending diff cluster state version [{}] with size {} to [{}]", clusterState.version(), clusterStateDiffBytes.length, discovery.localNode().getName());
} catch (IncompatibleClusterStateVersionException ex) {
logger.warn("incompatible cluster state version [{}] - resending complete cluster state", ex, clusterState.version());
}
@@ -378,7 +343,7 @@ public class LocalDiscovery extends AbstractLifecycleComponent<Discovery> implem
if (clusterStateBytes == null) {
clusterStateBytes = Builder.toBytes(clusterState);
}
- newNodeSpecificClusterState = ClusterState.Builder.fromBytes(clusterStateBytes, discovery.localNode);
+ newNodeSpecificClusterState = ClusterState.Builder.fromBytes(clusterStateBytes, discovery.localNode());
}
discovery.lastProcessedClusterState = newNodeSpecificClusterState;
}
@@ -423,17 +388,17 @@ public class LocalDiscovery extends AbstractLifecycleComponent<Discovery> implem
@Override
public void onFailure(String source, Throwable t) {
logger.error("unexpected failure during [{}]", t, source);
- publishResponseHandler.onFailure(discovery.localNode, t);
+ publishResponseHandler.onFailure(discovery.localNode(), t);
}
@Override
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
sendInitialStateEventIfNeeded();
- publishResponseHandler.onResponse(discovery.localNode);
+ publishResponseHandler.onResponse(discovery.localNode());
}
});
} else {
- publishResponseHandler.onResponse(discovery.localNode);
+ publishResponseHandler.onResponse(discovery.localNode());
}
}
diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java
index 6398f31a8f..55eaf78b7a 100644
--- a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java
+++ b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java
@@ -90,15 +90,17 @@ import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds;
public class ZenDiscovery extends AbstractLifecycleComponent<Discovery> implements Discovery, PingContextProvider {
public final static Setting<Boolean> REJOIN_ON_MASTER_GONE_SETTING = Setting.boolSetting("discovery.zen.rejoin_on_master_gone", true, true, Setting.Scope.CLUSTER);
- public final static String SETTING_PING_TIMEOUT = "discovery.zen.ping_timeout";
- public final static String SETTING_JOIN_TIMEOUT = "discovery.zen.join_timeout";
- public final static String SETTING_JOIN_RETRY_ATTEMPTS = "discovery.zen.join_retry_attempts";
- public final static String SETTING_JOIN_RETRY_DELAY = "discovery.zen.join_retry_delay";
- public final static String SETTING_MAX_PINGS_FROM_ANOTHER_MASTER = "discovery.zen.max_pings_from_another_master";
- public final static String SETTING_SEND_LEAVE_REQUEST = "discovery.zen.send_leave_request";
- public final static String SETTING_MASTER_ELECTION_FILTER_CLIENT = "discovery.zen.master_election.filter_client";
- public final static String SETTING_MASTER_ELECTION_WAIT_FOR_JOINS_TIMEOUT = "discovery.zen.master_election.wait_for_joins_timeout";
- public final static String SETTING_MASTER_ELECTION_FILTER_DATA = "discovery.zen.master_election.filter_data";
+ public final static Setting<TimeValue> PING_TIMEOUT_SETTING = Setting.positiveTimeSetting("discovery.zen.ping_timeout", timeValueSeconds(3), false, Setting.Scope.CLUSTER);
+ public final static Setting<TimeValue> JOIN_TIMEOUT_SETTING = Setting.timeSetting("discovery.zen.join_timeout",
+ settings -> TimeValue.timeValueMillis(PING_TIMEOUT_SETTING.get(settings).millis() * 20).toString(), TimeValue.timeValueMillis(0), false, Setting.Scope.CLUSTER);
+ public final static Setting<Integer> JOIN_RETRY_ATTEMPTS_SETTING = Setting.intSetting("discovery.zen.join_retry_attempts", 3, 1, false, Setting.Scope.CLUSTER);
+ public final static Setting<TimeValue> JOIN_RETRY_DELAY_SETTING = Setting.positiveTimeSetting("discovery.zen.join_retry_delay", TimeValue.timeValueMillis(100), false, Setting.Scope.CLUSTER);
+ public final static Setting<Integer> MAX_PINGS_FROM_ANOTHER_MASTER_SETTING = Setting.intSetting("discovery.zen.max_pings_from_another_master", 3, 1, false, Setting.Scope.CLUSTER);
+ public final static Setting<Boolean> SEND_LEAVE_REQUEST_SETTING = Setting.boolSetting("discovery.zen.send_leave_request", true, false, Setting.Scope.CLUSTER);
+ public final static Setting<Boolean> MASTER_ELECTION_FILTER_CLIENT_SETTING = Setting.boolSetting("discovery.zen.master_election.filter_client", true, false, Setting.Scope.CLUSTER);
+ public final static Setting<TimeValue> MASTER_ELECTION_WAIT_FOR_JOINS_TIMEOUT_SETTING = Setting.timeSetting("discovery.zen.master_election.wait_for_joins_timeout",
+ settings -> TimeValue.timeValueMillis(JOIN_TIMEOUT_SETTING.get(settings).millis() / 2).toString(), TimeValue.timeValueMillis(0), false, Setting.Scope.CLUSTER);
+ public final static Setting<Boolean> MASTER_ELECTION_FILTER_DATA_SETTING = Setting.boolSetting("discovery.zen.master_election.filter_data", false, false, Setting.Scope.CLUSTER);
public static final String DISCOVERY_REJOIN_ACTION_NAME = "internal:discovery/zen/rejoin";
@@ -164,26 +166,19 @@ public class ZenDiscovery extends AbstractLifecycleComponent<Discovery> implemen
this.discoverySettings = discoverySettings;
this.pingService = pingService;
this.electMaster = electMasterService;
- this.pingTimeout = settings.getAsTime(SETTING_PING_TIMEOUT, timeValueSeconds(3));
+ this.pingTimeout = PING_TIMEOUT_SETTING.get(settings);
- this.joinTimeout = settings.getAsTime(SETTING_JOIN_TIMEOUT, TimeValue.timeValueMillis(this.pingTimeout.millis() * 20));
- this.joinRetryAttempts = settings.getAsInt(SETTING_JOIN_RETRY_ATTEMPTS, 3);
- this.joinRetryDelay = settings.getAsTime(SETTING_JOIN_RETRY_DELAY, TimeValue.timeValueMillis(100));
- this.maxPingsFromAnotherMaster = settings.getAsInt(SETTING_MAX_PINGS_FROM_ANOTHER_MASTER, 3);
- this.sendLeaveRequest = settings.getAsBoolean(SETTING_SEND_LEAVE_REQUEST, true);
+ this.joinTimeout = JOIN_TIMEOUT_SETTING.get(settings);
+ this.joinRetryAttempts = JOIN_RETRY_ATTEMPTS_SETTING.get(settings);
+ this.joinRetryDelay = JOIN_RETRY_DELAY_SETTING.get(settings);
+ this.maxPingsFromAnotherMaster = MAX_PINGS_FROM_ANOTHER_MASTER_SETTING.get(settings);
+ this.sendLeaveRequest = SEND_LEAVE_REQUEST_SETTING.get(settings);
- this.masterElectionFilterClientNodes = settings.getAsBoolean(SETTING_MASTER_ELECTION_FILTER_CLIENT, true);
- this.masterElectionFilterDataNodes = settings.getAsBoolean(SETTING_MASTER_ELECTION_FILTER_DATA, false);
- this.masterElectionWaitForJoinsTimeout = settings.getAsTime(SETTING_MASTER_ELECTION_WAIT_FOR_JOINS_TIMEOUT, TimeValue.timeValueMillis(joinTimeout.millis() / 2));
+ this.masterElectionFilterClientNodes = MASTER_ELECTION_FILTER_CLIENT_SETTING.get(settings);
+ this.masterElectionFilterDataNodes = MASTER_ELECTION_FILTER_DATA_SETTING.get(settings);
+ this.masterElectionWaitForJoinsTimeout = MASTER_ELECTION_WAIT_FOR_JOINS_TIMEOUT_SETTING.get(settings);
this.rejoinOnMasterGone = REJOIN_ON_MASTER_GONE_SETTING.get(settings);
- if (this.joinRetryAttempts < 1) {
- throw new IllegalArgumentException("'" + SETTING_JOIN_RETRY_ATTEMPTS + "' must be a positive number. got [" + SETTING_JOIN_RETRY_ATTEMPTS + "]");
- }
- if (this.maxPingsFromAnotherMaster < 1) {
- throw new IllegalArgumentException("'" + SETTING_MAX_PINGS_FROM_ANOTHER_MASTER + "' must be a positive number. got [" + this.maxPingsFromAnotherMaster + "]");
- }
-
logger.debug("using ping_timeout [{}], join.timeout [{}], master_election.filter_client [{}], master_election.filter_data [{}]", this.pingTimeout, joinTimeout, masterElectionFilterClientNodes, masterElectionFilterDataNodes);
clusterSettings.addSettingsUpdateConsumer(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING, this::handleMinimumMasterNodesChanged, (value) -> {
diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/fd/FaultDetection.java b/core/src/main/java/org/elasticsearch/discovery/zen/fd/FaultDetection.java
index 436ef6bc2b..62b0250315 100644
--- a/core/src/main/java/org/elasticsearch/discovery/zen/fd/FaultDetection.java
+++ b/core/src/main/java/org/elasticsearch/discovery/zen/fd/FaultDetection.java
@@ -21,6 +21,8 @@ package org.elasticsearch.discovery.zen.fd;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.component.AbstractComponent;
+import org.elasticsearch.common.settings.Setting;
+import org.elasticsearch.common.settings.Setting.Scope;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.threadpool.ThreadPool;
@@ -35,11 +37,11 @@ import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds;
*/
public abstract class FaultDetection extends AbstractComponent {
- public static final String SETTING_CONNECT_ON_NETWORK_DISCONNECT = "discovery.zen.fd.connect_on_network_disconnect";
- public static final String SETTING_PING_INTERVAL = "discovery.zen.fd.ping_interval";
- public static final String SETTING_PING_TIMEOUT = "discovery.zen.fd.ping_timeout";
- public static final String SETTING_PING_RETRIES = "discovery.zen.fd.ping_retries";
- public static final String SETTING_REGISTER_CONNECTION_LISTENER = "discovery.zen.fd.register_connection_listener";
+ public static final Setting<Boolean> CONNECT_ON_NETWORK_DISCONNECT_SETTING = Setting.boolSetting("discovery.zen.fd.connect_on_network_disconnect", false, false, Scope.CLUSTER);
+ public static final Setting<TimeValue> PING_INTERVAL_SETTING = Setting.positiveTimeSetting("discovery.zen.fd.ping_interval", timeValueSeconds(1), false, Scope.CLUSTER);
+ public static final Setting<TimeValue> PING_TIMEOUT_SETTING = Setting.timeSetting("discovery.zen.fd.ping_timeout", timeValueSeconds(30), false, Scope.CLUSTER);
+ public static final Setting<Integer> PING_RETRIES_SETTING = Setting.intSetting("discovery.zen.fd.ping_retries", 3, false, Scope.CLUSTER);
+ public static final Setting<Boolean> REGISTER_CONNECTION_LISTENER_SETTING = Setting.boolSetting("discovery.zen.fd.register_connection_listener", true, false, Scope.CLUSTER);
protected final ThreadPool threadPool;
protected final ClusterName clusterName;
@@ -60,11 +62,11 @@ public abstract class FaultDetection extends AbstractComponent {
this.transportService = transportService;
this.clusterName = clusterName;
- this.connectOnNetworkDisconnect = settings.getAsBoolean(SETTING_CONNECT_ON_NETWORK_DISCONNECT, false);
- this.pingInterval = settings.getAsTime(SETTING_PING_INTERVAL, timeValueSeconds(1));
- this.pingRetryTimeout = settings.getAsTime(SETTING_PING_TIMEOUT, timeValueSeconds(30));
- this.pingRetryCount = settings.getAsInt(SETTING_PING_RETRIES, 3);
- this.registerConnectionListener = settings.getAsBoolean(SETTING_REGISTER_CONNECTION_LISTENER, true);
+ this.connectOnNetworkDisconnect = CONNECT_ON_NETWORK_DISCONNECT_SETTING.get(settings);
+ this.pingInterval = PING_INTERVAL_SETTING.get(settings);
+ this.pingRetryTimeout = PING_TIMEOUT_SETTING.get(settings);
+ this.pingRetryCount = PING_RETRIES_SETTING.get(settings);
+ this.registerConnectionListener = REGISTER_CONNECTION_LISTENER_SETTING.get(settings);
this.connectionListener = new FDConnectionListener();
if (registerConnectionListener) {
diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPing.java b/core/src/main/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPing.java
index 99feb4b7f7..e661002e64 100644
--- a/core/src/main/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPing.java
+++ b/core/src/main/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPing.java
@@ -31,6 +31,7 @@ import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.common.unit.TimeValue;
@@ -58,6 +59,7 @@ import java.io.Closeable;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
+import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
@@ -72,6 +74,7 @@ import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
+import java.util.function.Function;
import static org.elasticsearch.common.unit.TimeValue.readTimeValue;
import static org.elasticsearch.common.util.concurrent.ConcurrentCollections.newConcurrentMap;
@@ -83,7 +86,8 @@ import static org.elasticsearch.discovery.zen.ping.ZenPing.PingResponse.readPing
public class UnicastZenPing extends AbstractLifecycleComponent<ZenPing> implements ZenPing {
public static final String ACTION_NAME = "internal:discovery/zen/unicast";
- public static final String DISCOVERY_ZEN_PING_UNICAST_HOSTS = "discovery.zen.ping.unicast.hosts";
+ public static final Setting<List<String>> DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING = Setting.listSetting("discovery.zen.ping.unicast.hosts", Collections.emptyList(), Function.identity(), false, Setting.Scope.CLUSTER);
+ public static final Setting<Integer> DISCOVERY_ZEN_PING_UNICAST_CONCURRENT_CONNECTS_SETTING = Setting.intSetting("discovery.zen.ping.unicast.concurrent_connects", 10, 0, false, Setting.Scope.CLUSTER);
// these limits are per-address
public static final int LIMIT_FOREIGN_PORTS_COUNT = 1;
@@ -135,13 +139,8 @@ public class UnicastZenPing extends AbstractLifecycleComponent<ZenPing> implemen
}
}
- this.concurrentConnects = this.settings.getAsInt("discovery.zen.ping.unicast.concurrent_connects", 10);
- String[] hostArr = this.settings.getAsArray(DISCOVERY_ZEN_PING_UNICAST_HOSTS);
- // trim the hosts
- for (int i = 0; i < hostArr.length; i++) {
- hostArr[i] = hostArr[i].trim();
- }
- List<String> hosts = CollectionUtils.arrayAsArrayList(hostArr);
+ this.concurrentConnects = DISCOVERY_ZEN_PING_UNICAST_CONCURRENT_CONNECTS_SETTING.get(settings);
+ List<String> hosts = DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING.get(settings);
final int limitPortCounts;
if (hosts.isEmpty()) {
// if unicast hosts are not specified, fill with simple defaults on the local machine
@@ -170,7 +169,7 @@ public class UnicastZenPing extends AbstractLifecycleComponent<ZenPing> implemen
transportService.registerRequestHandler(ACTION_NAME, UnicastPingRequest::new, ThreadPool.Names.SAME, new UnicastPingRequestHandler());
ThreadFactory threadFactory = EsExecutors.daemonThreadFactory(settings, "[unicast_connect]");
- unicastConnectExecutor = EsExecutors.newScaling("unicast_connect", 0, concurrentConnects, 60, TimeUnit.SECONDS, threadFactory);
+ unicastConnectExecutor = EsExecutors.newScaling("unicast_connect", 0, concurrentConnects, 60, TimeUnit.SECONDS, threadFactory, threadPool.getThreadContext());
}
@Override
diff --git a/core/src/main/java/org/elasticsearch/env/Environment.java b/core/src/main/java/org/elasticsearch/env/Environment.java
index b6453a4707..65d62bd9e3 100644
--- a/core/src/main/java/org/elasticsearch/env/Environment.java
+++ b/core/src/main/java/org/elasticsearch/env/Environment.java
@@ -23,6 +23,7 @@ import org.apache.lucene.util.Constants;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.io.PathUtils;
+import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import java.io.IOException;
@@ -33,6 +34,9 @@ import java.nio.file.FileStore;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.function.Function;
import static org.elasticsearch.common.Strings.cleanPath;
@@ -43,6 +47,15 @@ import static org.elasticsearch.common.Strings.cleanPath;
// TODO: move PathUtils to be package-private here instead of
// public+forbidden api!
public class Environment {
+ public static final Setting<String> PATH_HOME_SETTING = Setting.simpleString("path.home", false, Setting.Scope.CLUSTER);
+ public static final Setting<String> PATH_CONF_SETTING = Setting.simpleString("path.conf", false, Setting.Scope.CLUSTER);
+ public static final Setting<String> PATH_SCRIPTS_SETTING = Setting.simpleString("path.scripts", false, Setting.Scope.CLUSTER);
+ public static final Setting<List<String>> PATH_DATA_SETTING = Setting.listSetting("path.data", Collections.emptyList(), Function.identity(), false, Setting.Scope.CLUSTER);
+ public static final Setting<String> PATH_LOGS_SETTING = Setting.simpleString("path.logs", false, Setting.Scope.CLUSTER);
+ public static final Setting<String> PATH_PLUGINS_SETTING = Setting.simpleString("path.plugins", false, Setting.Scope.CLUSTER);
+ public static final Setting<List<String>> PATH_REPO_SETTING = Setting.listSetting("path.repo", Collections.emptyList(), Function.identity(), false, Setting.Scope.CLUSTER);
+ public static final Setting<String> PATH_SHARED_DATA_SETTING = Setting.simpleString("path.shared_data", false, Setting.Scope.CLUSTER);
+ public static final Setting<String> PIDFILE_SETTING = Setting.simpleString("pidfile", false, Setting.Scope.CLUSTER);
private final Settings settings;
@@ -95,64 +108,64 @@ public class Environment {
public Environment(Settings settings) {
this.settings = settings;
final Path homeFile;
- if (settings.get("path.home") != null) {
- homeFile = PathUtils.get(cleanPath(settings.get("path.home")));
+ if (PATH_HOME_SETTING.exists(settings)) {
+ homeFile = PathUtils.get(cleanPath(PATH_HOME_SETTING.get(settings)));
} else {
- throw new IllegalStateException("path.home is not configured");
+ throw new IllegalStateException(PATH_HOME_SETTING.getKey() + " is not configured");
}
- if (settings.get("path.conf") != null) {
- configFile = PathUtils.get(cleanPath(settings.get("path.conf")));
+ if (PATH_CONF_SETTING.exists(settings)) {
+ configFile = PathUtils.get(cleanPath(PATH_CONF_SETTING.get(settings)));
} else {
configFile = homeFile.resolve("config");
}
- if (settings.get("path.scripts") != null) {
- scriptsFile = PathUtils.get(cleanPath(settings.get("path.scripts")));
+ if (PATH_SCRIPTS_SETTING.exists(settings)) {
+ scriptsFile = PathUtils.get(cleanPath(PATH_SCRIPTS_SETTING.get(settings)));
} else {
scriptsFile = configFile.resolve("scripts");
}
- if (settings.get("path.plugins") != null) {
- pluginsFile = PathUtils.get(cleanPath(settings.get("path.plugins")));
+ if (PATH_PLUGINS_SETTING.exists(settings)) {
+ pluginsFile = PathUtils.get(cleanPath(PATH_PLUGINS_SETTING.get(settings)));
} else {
pluginsFile = homeFile.resolve("plugins");
}
- String[] dataPaths = settings.getAsArray("path.data");
- if (dataPaths.length > 0) {
- dataFiles = new Path[dataPaths.length];
- dataWithClusterFiles = new Path[dataPaths.length];
- for (int i = 0; i < dataPaths.length; i++) {
- dataFiles[i] = PathUtils.get(dataPaths[i]);
+ List<String> dataPaths = PATH_DATA_SETTING.get(settings);
+ if (dataPaths.isEmpty() == false) {
+ dataFiles = new Path[dataPaths.size()];
+ dataWithClusterFiles = new Path[dataPaths.size()];
+ for (int i = 0; i < dataPaths.size(); i++) {
+ dataFiles[i] = PathUtils.get(dataPaths.get(i));
dataWithClusterFiles[i] = dataFiles[i].resolve(ClusterName.clusterNameFromSettings(settings).value());
}
} else {
dataFiles = new Path[]{homeFile.resolve("data")};
dataWithClusterFiles = new Path[]{homeFile.resolve("data").resolve(ClusterName.clusterNameFromSettings(settings).value())};
}
- if (settings.get("path.shared_data") != null) {
- sharedDataFile = PathUtils.get(cleanPath(settings.get("path.shared_data")));
+ if (PATH_SHARED_DATA_SETTING.exists(settings)) {
+ sharedDataFile = PathUtils.get(cleanPath(PATH_SHARED_DATA_SETTING.get(settings)));
} else {
sharedDataFile = null;
}
- String[] repoPaths = settings.getAsArray("path.repo");
- if (repoPaths.length > 0) {
- repoFiles = new Path[repoPaths.length];
- for (int i = 0; i < repoPaths.length; i++) {
- repoFiles[i] = PathUtils.get(repoPaths[i]);
+ List<String> repoPaths = PATH_REPO_SETTING.get(settings);
+ if (repoPaths.isEmpty() == false) {
+ repoFiles = new Path[repoPaths.size()];
+ for (int i = 0; i < repoPaths.size(); i++) {
+ repoFiles[i] = PathUtils.get(repoPaths.get(i));
}
} else {
repoFiles = new Path[0];
}
- if (settings.get("path.logs") != null) {
- logsFile = PathUtils.get(cleanPath(settings.get("path.logs")));
+ if (PATH_LOGS_SETTING.exists(settings)) {
+ logsFile = PathUtils.get(cleanPath(PATH_LOGS_SETTING.get(settings)));
} else {
logsFile = homeFile.resolve("logs");
}
- if (settings.get("pidfile") != null) {
- pidFile = PathUtils.get(cleanPath(settings.get("pidfile")));
+ if (PIDFILE_SETTING.exists(settings)) {
+ pidFile = PathUtils.get(cleanPath(PIDFILE_SETTING.get(settings)));
} else {
pidFile = null;
}
diff --git a/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java b/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java
index 658a48ecc8..80c427c9cd 100644
--- a/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java
+++ b/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java
@@ -35,6 +35,8 @@ import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.FileSystemUtils;
+import org.elasticsearch.common.settings.Setting;
+import org.elasticsearch.common.settings.Setting.Scope;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
@@ -66,6 +68,7 @@ import java.util.Set;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.stream.Collectors;
import static java.util.Collections.unmodifiableSet;
@@ -73,7 +76,6 @@ import static java.util.Collections.unmodifiableSet;
* A component that holds all data paths for a single node.
*/
public class NodeEnvironment extends AbstractComponent implements Closeable {
-
public static class NodePath {
/* ${data.paths}/nodes/{node.id} */
public final Path path;
@@ -100,14 +102,14 @@ public class NodeEnvironment extends AbstractComponent implements Closeable {
* Resolves the given shards directory against this NodePath
*/
public Path resolve(ShardId shardId) {
- return resolve(shardId.index()).resolve(Integer.toString(shardId.id()));
+ return resolve(shardId.getIndex()).resolve(Integer.toString(shardId.id()));
}
/**
* Resolves the given indexes directory against this NodePath
*/
public Path resolve(Index index) {
- return indicesPath.resolve(index.name());
+ return indicesPath.resolve(index.getName());
}
@Override
@@ -127,24 +129,35 @@ public class NodeEnvironment extends AbstractComponent implements Closeable {
private final int localNodeId;
private final AtomicBoolean closed = new AtomicBoolean(false);
- private final Map<ShardId, InternalShardLock> shardLocks = new HashMap<>();
+ private final Map<ShardLockKey, InternalShardLock> shardLocks = new HashMap<>();
- // Setting to automatically append node id to custom data paths
- public static final String ADD_NODE_ID_TO_CUSTOM_PATH = "node.add_id_to_custom_path";
+ /**
+ * Maximum number of data nodes that should run in an environment.
+ */
+ public static final Setting<Integer> MAX_LOCAL_STORAGE_NODES_SETTING = Setting.intSetting("node.max_local_storage_nodes", 50, 1, false,
+ Scope.CLUSTER);
+
+ /**
+ * If true automatically append node id to custom data paths.
+ */
+ public static final Setting<Boolean> ADD_NODE_ID_TO_CUSTOM_PATH = Setting.boolSetting("node.add_id_to_custom_path", true, false,
+ Scope.CLUSTER);
- // If enabled, the [verbose] SegmentInfos.infoStream logging is sent to System.out:
- public static final String SETTING_ENABLE_LUCENE_SEGMENT_INFOS_TRACE = "node.enable_lucene_segment_infos_trace";
+ /**
+ * If true the [verbose] SegmentInfos.infoStream logging is sent to System.out.
+ */
+ public static final Setting<Boolean> ENABLE_LUCENE_SEGMENT_INFOS_TRACE_SETTING = Setting
+ .boolSetting("node.enable_lucene_segment_infos_trace", false, false, Scope.CLUSTER);
public static final String NODES_FOLDER = "nodes";
public static final String INDICES_FOLDER = "indices";
public static final String NODE_LOCK_FILENAME = "node.lock";
@Inject
- @SuppressForbidden(reason = "System.out.*")
public NodeEnvironment(Settings settings, Environment environment) throws IOException {
super(settings);
- this.addNodeId = settings.getAsBoolean(ADD_NODE_ID_TO_CUSTOM_PATH, true);
+ this.addNodeId = ADD_NODE_ID_TO_CUSTOM_PATH.get(settings);
if (!DiscoveryNode.nodeRequiresLocalStorage(settings)) {
nodePaths = null;
@@ -160,7 +173,7 @@ public class NodeEnvironment extends AbstractComponent implements Closeable {
int localNodeId = -1;
IOException lastException = null;
- int maxLocalStorageNodes = settings.getAsInt("node.max_local_storage_nodes", 50);
+ int maxLocalStorageNodes = MAX_LOCAL_STORAGE_NODES_SETTING.get(settings);
for (int possibleLockId = 0; possibleLockId < maxLocalStorageNodes; possibleLockId++) {
for (int dirIndex = 0; dirIndex < environment.dataWithClusterFiles().length; dirIndex++) {
Path dir = environment.dataWithClusterFiles()[dirIndex].resolve(NODES_FOLDER).resolve(Integer.toString(possibleLockId));
@@ -209,9 +222,7 @@ public class NodeEnvironment extends AbstractComponent implements Closeable {
maybeLogPathDetails();
maybeLogHeapDetails();
- if (settings.getAsBoolean(SETTING_ENABLE_LUCENE_SEGMENT_INFOS_TRACE, false)) {
- SegmentInfos.setInfoStream(System.out);
- }
+ applySegmentInfosTrace(settings);
}
private static void releaseAndNullLocks(Lock[] locks) {
@@ -302,6 +313,13 @@ public class NodeEnvironment extends AbstractComponent implements Closeable {
logger.info("heap size [{}], compressed ordinary object pointers [{}]", maxHeapSize, useCompressedOops);
}
+ @SuppressForbidden(reason = "System.out.*")
+ static void applySegmentInfosTrace(Settings settings) {
+ if (ENABLE_LUCENE_SEGMENT_INFOS_TRACE_SETTING.get(settings)) {
+ SegmentInfos.setInfoStream(System.out);
+ }
+ }
+
private static String toString(Collection<String> items) {
StringBuilder b = new StringBuilder();
for(String item : items) {
@@ -422,11 +440,11 @@ public class NodeEnvironment extends AbstractComponent implements Closeable {
* @param indexSettings settings for the index being deleted
*/
public void deleteIndexDirectoryUnderLock(Index index, IndexSettings indexSettings) throws IOException {
- final Path[] indexPaths = indexPaths(index);
+ final Path[] indexPaths = indexPaths(index.getName());
logger.trace("deleting index {} directory, paths({}): [{}]", index, indexPaths.length, indexPaths);
IOUtils.rm(indexPaths);
if (indexSettings.hasCustomDataPath()) {
- Path customLocation = resolveCustomLocation(indexSettings, index.name());
+ Path customLocation = resolveCustomLocation(indexSettings, index.getName());
logger.trace("deleting custom index {} directory [{}]", index, customLocation);
IOUtils.rm(customLocation);
}
@@ -487,23 +505,24 @@ public class NodeEnvironment extends AbstractComponent implements Closeable {
* write operation on a shards data directory like deleting files, creating a new index writer
* or recover from a different shard instance into it. If the shard lock can not be acquired
* an {@link org.apache.lucene.store.LockObtainFailedException} is thrown
- * @param id the shard ID to lock
+ * @param shardId the shard ID to lock
* @param lockTimeoutMS the lock timeout in milliseconds
* @return the shard lock. Call {@link ShardLock#close()} to release the lock
* @throws IOException if an IOException occurs.
*/
- public ShardLock shardLock(final ShardId id, long lockTimeoutMS) throws IOException {
- logger.trace("acquiring node shardlock on [{}], timeout [{}]", id, lockTimeoutMS);
+ public ShardLock shardLock(final ShardId shardId, long lockTimeoutMS) throws IOException {
+ logger.trace("acquiring node shardlock on [{}], timeout [{}]", shardId, lockTimeoutMS);
+ final ShardLockKey shardLockKey = new ShardLockKey(shardId);
final InternalShardLock shardLock;
final boolean acquired;
synchronized (shardLocks) {
- if (shardLocks.containsKey(id)) {
- shardLock = shardLocks.get(id);
+ if (shardLocks.containsKey(shardLockKey)) {
+ shardLock = shardLocks.get(shardLockKey);
shardLock.incWaitCount();
acquired = false;
} else {
- shardLock = new InternalShardLock(id);
- shardLocks.put(id, shardLock);
+ shardLock = new InternalShardLock(shardLockKey);
+ shardLocks.put(shardLockKey, shardLock);
acquired = true;
}
}
@@ -518,22 +537,68 @@ public class NodeEnvironment extends AbstractComponent implements Closeable {
}
}
}
- logger.trace("successfully acquired shardlock for [{}]", id);
- return new ShardLock(id) { // new instance prevents double closing
+ logger.trace("successfully acquired shardlock for [{}]", shardId);
+ return new ShardLock(shardId) { // new instance prevents double closing
@Override
protected void closeInternal() {
shardLock.release();
- logger.trace("released shard lock for [{}]", id);
+ logger.trace("released shard lock for [{}]", shardLockKey);
}
};
}
/**
* Returns all currently lock shards.
+ *
+ * Note: the shard ids return do not contain a valid Index UUID
*/
public Set<ShardId> lockedShards() {
synchronized (shardLocks) {
- return unmodifiableSet(new HashSet<>(shardLocks.keySet()));
+ Set<ShardId> lockedShards = shardLocks.keySet().stream()
+ .map(shardLockKey -> new ShardId(new Index(shardLockKey.indexName, "_na_"), shardLockKey.shardId)).collect(Collectors.toSet());
+ return unmodifiableSet(lockedShards);
+ }
+ }
+
+ // a key for the shard lock. we can't use shardIds, because the contain
+ // the index uuid, but we want the lock semantics to the same as we map indices to disk folders, i.e., without the uuid (for now).
+ private final class ShardLockKey {
+ final String indexName;
+ final int shardId;
+
+ public ShardLockKey(final ShardId shardId) {
+ this.indexName = shardId.getIndexName();
+ this.shardId = shardId.id();
+ }
+
+ @Override
+ public String toString() {
+ return "[" + indexName + "][" + shardId + "]";
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ }
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
+
+ ShardLockKey that = (ShardLockKey) o;
+
+ if (shardId != that.shardId) {
+ return false;
+ }
+ return indexName.equals(that.indexName);
+
+ }
+
+ @Override
+ public int hashCode() {
+ int result = indexName.hashCode();
+ result = 31 * result + shardId;
+ return result;
}
}
@@ -546,10 +611,10 @@ public class NodeEnvironment extends AbstractComponent implements Closeable {
*/
private final Semaphore mutex = new Semaphore(1);
private int waitCount = 1; // guarded by shardLocks
- private ShardId shardId;
+ private final ShardLockKey lockKey;
- InternalShardLock(ShardId id) {
- shardId = id;
+ InternalShardLock(ShardLockKey id) {
+ lockKey = id;
mutex.acquireUninterruptibly();
}
@@ -569,10 +634,10 @@ public class NodeEnvironment extends AbstractComponent implements Closeable {
synchronized (shardLocks) {
assert waitCount > 0 : "waitCount is " + waitCount + " but should be > 0";
--waitCount;
- logger.trace("shard lock wait count for [{}] is now [{}]", shardId, waitCount);
+ logger.trace("shard lock wait count for [{}] is now [{}]", lockKey, waitCount);
if (waitCount == 0) {
- logger.trace("last shard lock wait decremented, removing lock for [{}]", shardId);
- InternalShardLock remove = shardLocks.remove(shardId);
+ logger.trace("last shard lock wait decremented, removing lock for [{}]", lockKey);
+ InternalShardLock remove = shardLocks.remove(lockKey);
assert remove != null : "Removed lock was null";
}
}
@@ -581,11 +646,11 @@ public class NodeEnvironment extends AbstractComponent implements Closeable {
void acquire(long timeoutInMillis) throws LockObtainFailedException{
try {
if (mutex.tryAcquire(timeoutInMillis, TimeUnit.MILLISECONDS) == false) {
- throw new LockObtainFailedException("Can't lock shard " + shardId + ", timed out after " + timeoutInMillis + "ms");
+ throw new LockObtainFailedException("Can't lock shard " + lockKey + ", timed out after " + timeoutInMillis + "ms");
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
- throw new LockObtainFailedException("Can't lock shard " + shardId + ", interrupted", e);
+ throw new LockObtainFailedException("Can't lock shard " + lockKey + ", interrupted", e);
}
}
}
@@ -628,11 +693,11 @@ public class NodeEnvironment extends AbstractComponent implements Closeable {
/**
* Returns all index paths.
*/
- public Path[] indexPaths(Index index) {
+ public Path[] indexPaths(String indexName) {
assert assertEnvIsLocked();
Path[] indexPaths = new Path[nodePaths.length];
for (int i = 0; i < nodePaths.length; i++) {
- indexPaths[i] = nodePaths[i].indicesPath.resolve(index.name());
+ indexPaths[i] = nodePaths[i].indicesPath.resolve(indexName);
}
return indexPaths;
}
@@ -691,14 +756,14 @@ public class NodeEnvironment extends AbstractComponent implements Closeable {
}
assert assertEnvIsLocked();
final Set<ShardId> shardIds = new HashSet<>();
- String indexName = index.name();
+ String indexName = index.getName();
for (final NodePath nodePath : nodePaths) {
Path location = nodePath.indicesPath;
if (Files.isDirectory(location)) {
try (DirectoryStream<Path> indexStream = Files.newDirectoryStream(location)) {
for (Path indexPath : indexStream) {
if (indexName.equals(indexPath.getFileName().toString())) {
- shardIds.addAll(findAllShardsForIndex(indexPath));
+ shardIds.addAll(findAllShardsForIndex(indexPath, index));
}
}
}
@@ -707,16 +772,16 @@ public class NodeEnvironment extends AbstractComponent implements Closeable {
return shardIds;
}
- private static Set<ShardId> findAllShardsForIndex(Path indexPath) throws IOException {
+ private static Set<ShardId> findAllShardsForIndex(Path indexPath, Index index) throws IOException {
+ assert indexPath.getFileName().toString().equals(index.getName());
Set<ShardId> shardIds = new HashSet<>();
if (Files.isDirectory(indexPath)) {
try (DirectoryStream<Path> stream = Files.newDirectoryStream(indexPath)) {
- String currentIndex = indexPath.getFileName().toString();
for (Path shardPath : stream) {
String fileName = shardPath.getFileName().toString();
if (Files.isDirectory(shardPath) && fileName.chars().allMatch(Character::isDigit)) {
int shardId = Integer.parseInt(fileName);
- ShardId id = new ShardId(currentIndex, shardId);
+ ShardId id = new ShardId(index, shardId);
shardIds.add(id);
}
}
@@ -827,7 +892,7 @@ public class NodeEnvironment extends AbstractComponent implements Closeable {
* @param shardId shard to resolve the path to
*/
public Path resolveCustomLocation(IndexSettings indexSettings, final ShardId shardId) {
- return resolveCustomLocation(indexSettings, shardId.index().name()).resolve(Integer.toString(shardId.id()));
+ return resolveCustomLocation(indexSettings, shardId.getIndexName()).resolve(Integer.toString(shardId.id()));
}
/**
diff --git a/core/src/main/java/org/elasticsearch/gateway/DanglingIndicesState.java b/core/src/main/java/org/elasticsearch/gateway/DanglingIndicesState.java
index d2bbeafed4..e2fcb56b1e 100644
--- a/core/src/main/java/org/elasticsearch/gateway/DanglingIndicesState.java
+++ b/core/src/main/java/org/elasticsearch/gateway/DanglingIndicesState.java
@@ -120,7 +120,7 @@ public class DanglingIndicesState extends AbstractComponent {
IndexMetaData indexMetaData = metaStateService.loadIndexState(indexName);
if (indexMetaData != null) {
logger.info("[{}] dangling index, exists on local file system, but not in cluster metadata, auto import to cluster state", indexName);
- if (!indexMetaData.getIndex().equals(indexName)) {
+ if (!indexMetaData.getIndex().getName().equals(indexName)) {
logger.info("dangled index directory name is [{}], state name is [{}], renaming to directory name", indexName, indexMetaData.getIndex());
indexMetaData = IndexMetaData.builder(indexMetaData).index(indexName).build();
}
diff --git a/core/src/main/java/org/elasticsearch/gateway/Gateway.java b/core/src/main/java/org/elasticsearch/gateway/Gateway.java
index bbb2670e19..c42cc43dac 100644
--- a/core/src/main/java/org/elasticsearch/gateway/Gateway.java
+++ b/core/src/main/java/org/elasticsearch/gateway/Gateway.java
@@ -99,7 +99,7 @@ public class Gateway extends AbstractComponent implements ClusterStateListener {
electedGlobalState = nodeState.metaData();
}
for (ObjectCursor<IndexMetaData> cursor : nodeState.metaData().indices().values()) {
- indices.addTo(cursor.value.getIndex(), 1);
+ indices.addTo(cursor.value.getIndex().getName(), 1);
}
}
if (found < requiredAllocation) {
diff --git a/core/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java b/core/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java
index 117a0c6959..e90cb750cf 100644
--- a/core/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java
+++ b/core/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java
@@ -34,7 +34,6 @@ import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
-import org.elasticsearch.common.util.MultiDataPathUpgrader;
import org.elasticsearch.env.NodeEnvironment;
import java.nio.file.DirectoryStream;
@@ -77,7 +76,6 @@ public class GatewayMetaState extends AbstractComponent implements ClusterStateL
if (DiscoveryNode.dataNode(settings)) {
ensureNoPre019ShardState(nodeEnv);
- MultiDataPathUpgrader.upgradeMultiDataPath(nodeEnv, logger);
}
if (DiscoveryNode.masterNode(settings) || DiscoveryNode.dataNode(settings)) {
@@ -132,10 +130,10 @@ public class GatewayMetaState extends AbstractComponent implements ClusterStateL
for (IndexMetaData indexMetaData : newMetaData) {
IndexMetaData indexMetaDataOnDisk = null;
if (indexMetaData.getState().equals(IndexMetaData.State.CLOSE)) {
- indexMetaDataOnDisk = metaStateService.loadIndexState(indexMetaData.getIndex());
+ indexMetaDataOnDisk = metaStateService.loadIndexState(indexMetaData.getIndex().getName());
}
if (indexMetaDataOnDisk != null) {
- newPreviouslyWrittenIndices.add(indexMetaDataOnDisk.getIndex());
+ newPreviouslyWrittenIndices.add(indexMetaDataOnDisk.getIndex().getName());
}
}
newPreviouslyWrittenIndices.addAll(previouslyWrittenIndices);
@@ -291,19 +289,19 @@ public class GatewayMetaState extends AbstractComponent implements ClusterStateL
}
Set<String> indices = new HashSet<>();
for (ShardRouting routing : newRoutingNode) {
- indices.add(routing.index());
+ indices.add(routing.index().getName());
}
// we have to check the meta data also: closed indices will not appear in the routing table, but we must still write the state if we have it written on disk previously
for (IndexMetaData indexMetaData : state.metaData()) {
boolean isOrWasClosed = indexMetaData.getState().equals(IndexMetaData.State.CLOSE);
// if the index is open we might still have to write the state if it just transitioned from closed to open
// so we have to check for that as well.
- IndexMetaData previousMetaData = previousState.metaData().getIndices().get(indexMetaData.getIndex());
+ IndexMetaData previousMetaData = previousState.metaData().index(indexMetaData.getIndex());
if (previousMetaData != null) {
isOrWasClosed = isOrWasClosed || previousMetaData.getState().equals(IndexMetaData.State.CLOSE);
}
- if (previouslyWrittenIndices.contains(indexMetaData.getIndex()) && isOrWasClosed) {
- indices.add(indexMetaData.getIndex());
+ if (previouslyWrittenIndices.contains(indexMetaData.getIndex().getName()) && isOrWasClosed) {
+ indices.add(indexMetaData.getIndex().getName());
}
}
return indices;
@@ -314,7 +312,7 @@ public class GatewayMetaState extends AbstractComponent implements ClusterStateL
relevantIndices = new HashSet<>();
// we have to iterate over the metadata to make sure we also capture closed indices
for (IndexMetaData indexMetaData : state.metaData()) {
- relevantIndices.add(indexMetaData.getIndex());
+ relevantIndices.add(indexMetaData.getIndex().getName());
}
return relevantIndices;
}
diff --git a/core/src/main/java/org/elasticsearch/gateway/GatewayService.java b/core/src/main/java/org/elasticsearch/gateway/GatewayService.java
index 80e3be7809..af565a6002 100644
--- a/core/src/main/java/org/elasticsearch/gateway/GatewayService.java
+++ b/core/src/main/java/org/elasticsearch/gateway/GatewayService.java
@@ -36,6 +36,7 @@ import org.elasticsearch.cluster.routing.allocation.AllocationService;
import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.inject.Inject;
+import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.discovery.DiscoveryService;
@@ -49,6 +50,21 @@ import java.util.concurrent.atomic.AtomicBoolean;
*/
public class GatewayService extends AbstractLifecycleComponent<GatewayService> implements ClusterStateListener {
+ public static final Setting<Integer> EXPECTED_NODES_SETTING = Setting.intSetting(
+ "gateway.expected_nodes", -1, -1, false, Setting.Scope.CLUSTER);
+ public static final Setting<Integer> EXPECTED_DATA_NODES_SETTING = Setting.intSetting(
+ "gateway.expected_data_nodes", -1, -1, false, Setting.Scope.CLUSTER);
+ public static final Setting<Integer> EXPECTED_MASTER_NODES_SETTING = Setting.intSetting(
+ "gateway.expected_master_nodes", -1, -1, false, Setting.Scope.CLUSTER);
+ public static final Setting<TimeValue> RECOVER_AFTER_TIME_SETTING = Setting.positiveTimeSetting(
+ "gateway.recover_after_time", TimeValue.timeValueMillis(0), false, Setting.Scope.CLUSTER);
+ public static final Setting<Integer> RECOVER_AFTER_NODES_SETTING = Setting.intSetting(
+ "gateway.recover_after_nodes", -1, -1, false, Setting.Scope.CLUSTER);
+ public static final Setting<Integer> RECOVER_AFTER_DATA_NODES_SETTING = Setting.intSetting(
+ "gateway.recover_after_data_nodes", -1, -1, false, Setting.Scope.CLUSTER);
+ public static final Setting<Integer> RECOVER_AFTER_MASTER_NODES_SETTING = Setting.intSetting(
+ "gateway.recover_after_master_nodes", 0, 0, false, Setting.Scope.CLUSTER);
+
public static final ClusterBlock STATE_NOT_RECOVERED_BLOCK = new ClusterBlock(1, "state not recovered / initialized", true, true, RestStatus.SERVICE_UNAVAILABLE, ClusterBlockLevel.ALL);
public static final TimeValue DEFAULT_RECOVER_AFTER_TIME_IF_EXPECTED_NODES_IS_SET = TimeValue.timeValueMinutes(5);
@@ -84,20 +100,26 @@ public class GatewayService extends AbstractLifecycleComponent<GatewayService> i
this.discoveryService = discoveryService;
this.threadPool = threadPool;
// allow to control a delay of when indices will get created
- this.expectedNodes = this.settings.getAsInt("gateway.expected_nodes", -1);
- this.expectedDataNodes = this.settings.getAsInt("gateway.expected_data_nodes", -1);
- this.expectedMasterNodes = this.settings.getAsInt("gateway.expected_master_nodes", -1);
-
- TimeValue defaultRecoverAfterTime = null;
- if (expectedNodes >= 0 || expectedDataNodes >= 0 || expectedMasterNodes >= 0) {
- defaultRecoverAfterTime = DEFAULT_RECOVER_AFTER_TIME_IF_EXPECTED_NODES_IS_SET;
+ this.expectedNodes = EXPECTED_NODES_SETTING.get(this.settings);
+ this.expectedDataNodes = EXPECTED_DATA_NODES_SETTING.get(this.settings);
+ this.expectedMasterNodes = EXPECTED_MASTER_NODES_SETTING.get(this.settings);
+
+ if (RECOVER_AFTER_TIME_SETTING.exists(this.settings)) {
+ recoverAfterTime = RECOVER_AFTER_TIME_SETTING.get(this.settings);
+ } else if (expectedNodes >= 0 || expectedDataNodes >= 0 || expectedMasterNodes >= 0) {
+ recoverAfterTime = DEFAULT_RECOVER_AFTER_TIME_IF_EXPECTED_NODES_IS_SET;
+ } else {
+ recoverAfterTime = null;
}
-
- this.recoverAfterTime = this.settings.getAsTime("gateway.recover_after_time", defaultRecoverAfterTime);
- this.recoverAfterNodes = this.settings.getAsInt("gateway.recover_after_nodes", -1);
- this.recoverAfterDataNodes = this.settings.getAsInt("gateway.recover_after_data_nodes", -1);
+ this.recoverAfterNodes = RECOVER_AFTER_NODES_SETTING.get(this.settings);
+ this.recoverAfterDataNodes = RECOVER_AFTER_DATA_NODES_SETTING.get(this.settings);
// default the recover after master nodes to the minimum master nodes in the discovery
- this.recoverAfterMasterNodes = this.settings.getAsInt("gateway.recover_after_master_nodes", settings.getAsInt("discovery.zen.minimum_master_nodes", -1));
+ if (RECOVER_AFTER_MASTER_NODES_SETTING.exists(this.settings)) {
+ recoverAfterMasterNodes = RECOVER_AFTER_MASTER_NODES_SETTING.get(this.settings);
+ } else {
+ // TODO: change me once the minimum_master_nodes is changed too
+ recoverAfterMasterNodes = settings.getAsInt("discovery.zen.minimum_master_nodes", -1);
+ }
// Add the not recovered as initial state block, we don't allow anything until
this.clusterService.addInitialStateBlock(STATE_NOT_RECOVERED_BLOCK);
diff --git a/core/src/main/java/org/elasticsearch/gateway/LocalAllocateDangledIndices.java b/core/src/main/java/org/elasticsearch/gateway/LocalAllocateDangledIndices.java
index 0e151cec5e..041b8cafec 100644
--- a/core/src/main/java/org/elasticsearch/gateway/LocalAllocateDangledIndices.java
+++ b/core/src/main/java/org/elasticsearch/gateway/LocalAllocateDangledIndices.java
@@ -116,7 +116,7 @@ public class LocalAllocateDangledIndices extends AbstractComponent {
public void messageReceived(final AllocateDangledRequest request, final TransportChannel channel) throws Exception {
String[] indexNames = new String[request.indices.length];
for (int i = 0; i < request.indices.length; i++) {
- indexNames[i] = request.indices[i].getIndex();
+ indexNames[i] = request.indices[i].getIndex().getName();
}
clusterService.submitStateUpdateTask("allocation dangled indices " + Arrays.toString(indexNames), new ClusterStateUpdateTask() {
@Override
@@ -131,10 +131,10 @@ public class LocalAllocateDangledIndices extends AbstractComponent {
boolean importNeeded = false;
StringBuilder sb = new StringBuilder();
for (IndexMetaData indexMetaData : request.indices) {
- if (currentState.metaData().hasIndex(indexMetaData.getIndex())) {
+ if (currentState.metaData().hasIndex(indexMetaData.getIndex().getName())) {
continue;
}
- if (currentState.metaData().hasAlias(indexMetaData.getIndex())) {
+ if (currentState.metaData().hasAlias(indexMetaData.getIndex().getName())) {
logger.warn("ignoring dangled index [{}] on node [{}] due to an existing alias with the same name",
indexMetaData.getIndex(), request.fromNode);
continue;
diff --git a/core/src/main/java/org/elasticsearch/gateway/MetaStateService.java b/core/src/main/java/org/elasticsearch/gateway/MetaStateService.java
index 43f9ffbe26..6f38b039fe 100644
--- a/core/src/main/java/org/elasticsearch/gateway/MetaStateService.java
+++ b/core/src/main/java/org/elasticsearch/gateway/MetaStateService.java
@@ -109,7 +109,7 @@ public class MetaStateService extends AbstractComponent {
*/
@Nullable
IndexMetaData loadIndexState(String index) throws IOException {
- return indexStateFormat.loadLatestState(logger, nodeEnv.indexPaths(new Index(index)));
+ return indexStateFormat.loadLatestState(logger, nodeEnv.indexPaths(index));
}
/**
@@ -132,8 +132,7 @@ public class MetaStateService extends AbstractComponent {
void writeIndex(String reason, IndexMetaData indexMetaData, @Nullable IndexMetaData previousIndexMetaData) throws Exception {
logger.trace("[{}] writing state, reason [{}]", indexMetaData.getIndex(), reason);
try {
- indexStateFormat.write(indexMetaData, indexMetaData.getVersion(),
- nodeEnv.indexPaths(new Index(indexMetaData.getIndex())));
+ indexStateFormat.write(indexMetaData, indexMetaData.getVersion(), nodeEnv.indexPaths(indexMetaData.getIndex().getName()));
} catch (Throwable ex) {
logger.warn("[{}]: failed to write index state", ex, indexMetaData.getIndex());
throw new IOException("failed to write state for [" + indexMetaData.getIndex() + "]", ex);
diff --git a/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java b/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java
index 0c4431ef89..4ac45a3e2f 100644
--- a/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java
+++ b/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java
@@ -32,12 +32,12 @@ import org.elasticsearch.cluster.routing.allocation.decider.Decision;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.index.IndexSettings;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
+import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
@@ -89,9 +89,9 @@ public abstract class PrimaryShardAllocator extends AbstractComponent {
continue;
}
- final IndexMetaData indexMetaData = metaData.index(shard.getIndex());
- final IndexSettings indexSettings = new IndexSettings(indexMetaData, settings);
-
+ final IndexMetaData indexMetaData = metaData.index(shard.getIndexName());
+ // don't go wild here and create a new IndexSetting object for every shard this could cause a lot of garbage
+ // on cluster restart if we allocate a boat load of shards
if (shard.allocatedPostIndexCreate(indexMetaData) == false) {
// when we create a fresh index
continue;
@@ -107,13 +107,13 @@ public abstract class PrimaryShardAllocator extends AbstractComponent {
final Set<String> lastActiveAllocationIds = indexMetaData.activeAllocationIds(shard.id());
final boolean snapshotRestore = shard.restoreSource() != null;
- final boolean recoverOnAnyNode = recoverOnAnyNode(indexSettings);
+ final boolean recoverOnAnyNode = recoverOnAnyNode(indexMetaData);
final NodesAndVersions nodesAndVersions;
final boolean enoughAllocationsFound;
if (lastActiveAllocationIds.isEmpty()) {
- assert indexSettings.getIndexVersionCreated().before(Version.V_3_0_0) : "trying to allocated a primary with an empty allocation id set, but index is new";
+ assert Version.indexCreated(indexMetaData.getSettings()).before(Version.V_3_0_0) : "trying to allocated a primary with an empty allocation id set, but index is new";
// when we load an old index (after upgrading cluster) or restore a snapshot of an old index
// fall back to old version-based allocation mode
// Note that once the shard has been active, lastActiveAllocationIds will be non-empty
@@ -175,8 +175,8 @@ public abstract class PrimaryShardAllocator extends AbstractComponent {
*/
protected NodesAndVersions buildAllocationIdBasedNodes(ShardRouting shard, boolean matchAnyShard, Set<String> ignoreNodes,
Set<String> lastActiveAllocationIds, AsyncShardFetch.FetchResult<TransportNodesListGatewayStartedShards.NodeGatewayStartedShards> shardState) {
- List<DiscoveryNode> matchingNodes = new ArrayList<>();
- List<DiscoveryNode> nonMatchingNodes = new ArrayList<>();
+ LinkedList<DiscoveryNode> matchingNodes = new LinkedList<>();
+ LinkedList<DiscoveryNode> nonMatchingNodes = new LinkedList<>();
long highestVersion = -1;
for (TransportNodesListGatewayStartedShards.NodeGatewayStartedShards nodeShardState : shardState.getData().values()) {
DiscoveryNode node = nodeShardState.getNode();
@@ -200,10 +200,18 @@ public abstract class PrimaryShardAllocator extends AbstractComponent {
if (allocationId != null) {
if (lastActiveAllocationIds.contains(allocationId)) {
- matchingNodes.add(node);
+ if (nodeShardState.primary()) {
+ matchingNodes.addFirst(node);
+ } else {
+ matchingNodes.addLast(node);
+ }
highestVersion = Math.max(highestVersion, nodeShardState.version());
} else if (matchAnyShard) {
- nonMatchingNodes.add(node);
+ if (nodeShardState.primary()) {
+ nonMatchingNodes.addFirst(node);
+ } else {
+ nonMatchingNodes.addLast(node);
+ }
highestVersion = Math.max(highestVersion, nodeShardState.version());
}
}
@@ -347,9 +355,9 @@ public abstract class PrimaryShardAllocator extends AbstractComponent {
* Return {@code true} if the index is configured to allow shards to be
* recovered on any node
*/
- private boolean recoverOnAnyNode(IndexSettings indexSettings) {
- return indexSettings.isOnSharedFilesystem()
- && IndexMetaData.INDEX_SHARED_FS_ALLOW_RECOVERY_ON_ANY_NODE_SETTING.get(indexSettings.getSettings());
+ private boolean recoverOnAnyNode(IndexMetaData metaData) {
+ return (IndexMetaData.isOnSharedFilesystem(metaData.getSettings()) || IndexMetaData.isOnSharedFilesystem(this.settings))
+ && IndexMetaData.INDEX_SHARED_FS_ALLOW_RECOVERY_ON_ANY_NODE_SETTING.get(metaData.getSettings(), this.settings);
}
protected abstract AsyncShardFetch.FetchResult<TransportNodesListGatewayStartedShards.NodeGatewayStartedShards> fetchData(ShardRouting shard, RoutingAllocation allocation);
diff --git a/core/src/main/java/org/elasticsearch/gateway/PriorityComparator.java b/core/src/main/java/org/elasticsearch/gateway/PriorityComparator.java
index c5c5794a78..04f438c70f 100644
--- a/core/src/main/java/org/elasticsearch/gateway/PriorityComparator.java
+++ b/core/src/main/java/org/elasticsearch/gateway/PriorityComparator.java
@@ -38,8 +38,8 @@ public abstract class PriorityComparator implements Comparator<ShardRouting> {
@Override
public final int compare(ShardRouting o1, ShardRouting o2) {
- final String o1Index = o1.index();
- final String o2Index = o2.index();
+ final String o1Index = o1.getIndexName();
+ final String o2Index = o2.getIndexName();
int cmp = 0;
if (o1Index.equals(o2Index) == false) {
final Settings settingsO1 = getIndexSettings(o1Index);
@@ -60,7 +60,7 @@ public abstract class PriorityComparator implements Comparator<ShardRouting> {
}
private long timeCreated(Settings settings) {
- return settings.getAsLong(IndexMetaData.SETTING_CREATION_DATE, -1l);
+ return settings.getAsLong(IndexMetaData.SETTING_CREATION_DATE, -1L);
}
protected abstract Settings getIndexSettings(String index);
diff --git a/core/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java b/core/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java
index 0b5f2bc58d..a171cafd22 100644
--- a/core/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java
+++ b/core/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java
@@ -74,7 +74,7 @@ public abstract class ReplicaShardAllocator extends AbstractComponent {
}
// if we are allocating a replica because of index creation, no need to go and find a copy, there isn't one...
- IndexMetaData indexMetaData = metaData.index(shard.getIndex());
+ IndexMetaData indexMetaData = metaData.index(shard.getIndexName());
if (shard.allocatedPostIndexCreate(indexMetaData) == false) {
continue;
}
@@ -127,7 +127,7 @@ public abstract class ReplicaShardAllocator extends AbstractComponent {
}
// if we are allocating a replica because of index creation, no need to go and find a copy, there isn't one...
- IndexMetaData indexMetaData = metaData.index(shard.getIndex());
+ IndexMetaData indexMetaData = metaData.index(shard.getIndexName());
if (shard.allocatedPostIndexCreate(indexMetaData) == false) {
continue;
}
diff --git a/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java b/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java
index a117eb709a..fb174f4bd4 100644
--- a/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java
+++ b/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java
@@ -183,7 +183,7 @@ public class TransportNodesListGatewayMetaState extends TransportNodesAction<Tra
}
NodeRequest(String nodeId, TransportNodesListGatewayMetaState.Request request) {
- super(request, nodeId);
+ super(nodeId);
}
@Override
diff --git a/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java b/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java
index ca991f14e2..505dc61a2b 100644
--- a/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java
+++ b/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java
@@ -49,7 +49,6 @@ import org.elasticsearch.transport.TransportService;
import java.io.IOException;
import java.util.ArrayList;
-import java.util.Collections;
import java.util.List;
import java.util.concurrent.atomic.AtomicReferenceArray;
@@ -126,7 +125,7 @@ public class TransportNodesListGatewayStartedShards extends TransportNodesAction
logger.trace("{} loading local shard state info", shardId);
ShardStateMetaData shardStateMetaData = ShardStateMetaData.FORMAT.loadLatestState(logger, nodeEnv.availableShardPaths(request.shardId));
if (shardStateMetaData != null) {
- final IndexMetaData metaData = clusterService.state().metaData().index(shardId.index().name()); // it's a mystery why this is sometimes null
+ final IndexMetaData metaData = clusterService.state().metaData().index(shardId.getIndexName()); // it's a mystery why this is sometimes null
if (metaData != null) {
ShardPath shardPath = null;
try {
@@ -135,11 +134,11 @@ public class TransportNodesListGatewayStartedShards extends TransportNodesAction
if (shardPath == null) {
throw new IllegalStateException(shardId + " no shard path found");
}
- Store.tryOpenIndex(shardPath.resolveIndex());
+ Store.tryOpenIndex(shardPath.resolveIndex(), shardId);
} catch (Exception exception) {
logger.trace("{} can't open index for shard [{}] in path [{}]", exception, shardId, shardStateMetaData, (shardPath != null) ? shardPath.resolveIndex() : "");
String allocationId = shardStateMetaData.allocationId != null ? shardStateMetaData.allocationId.getId() : null;
- return new NodeGatewayStartedShards(clusterService.localNode(), shardStateMetaData.version, allocationId, exception);
+ return new NodeGatewayStartedShards(clusterService.localNode(), shardStateMetaData.version, allocationId, shardStateMetaData.primary, exception);
}
}
// old shard metadata doesn't have the actual index UUID so we need to check if the actual uuid in the metadata
@@ -150,11 +149,11 @@ public class TransportNodesListGatewayStartedShards extends TransportNodesAction
} else {
logger.debug("{} shard state info found: [{}]", shardId, shardStateMetaData);
String allocationId = shardStateMetaData.allocationId != null ? shardStateMetaData.allocationId.getId() : null;
- return new NodeGatewayStartedShards(clusterService.localNode(), shardStateMetaData.version, allocationId);
+ return new NodeGatewayStartedShards(clusterService.localNode(), shardStateMetaData.version, allocationId, shardStateMetaData.primary);
}
}
logger.trace("{} no local shard info found", shardId);
- return new NodeGatewayStartedShards(clusterService.localNode(), -1, null);
+ return new NodeGatewayStartedShards(clusterService.localNode(), -1, null, false);
} catch (Exception e) {
throw new ElasticsearchException("failed to load started shards", e);
}
@@ -247,7 +246,7 @@ public class TransportNodesListGatewayStartedShards extends TransportNodesAction
}
NodeRequest(String nodeId, TransportNodesListGatewayStartedShards.Request request) {
- super(request, nodeId);
+ super(nodeId);
this.shardId = request.shardId();
this.indexUUID = request.getIndexUUID();
}
@@ -279,18 +278,20 @@ public class TransportNodesListGatewayStartedShards extends TransportNodesAction
private long version = -1;
private String allocationId = null;
+ private boolean primary = false;
private Throwable storeException = null;
public NodeGatewayStartedShards() {
}
- public NodeGatewayStartedShards(DiscoveryNode node, long version, String allocationId) {
- this(node, version, allocationId, null);
+ public NodeGatewayStartedShards(DiscoveryNode node, long version, String allocationId, boolean primary) {
+ this(node, version, allocationId, primary, null);
}
- public NodeGatewayStartedShards(DiscoveryNode node, long version, String allocationId, Throwable storeException) {
+ public NodeGatewayStartedShards(DiscoveryNode node, long version, String allocationId, boolean primary, Throwable storeException) {
super(node);
this.version = version;
this.allocationId = allocationId;
+ this.primary = primary;
this.storeException = storeException;
}
@@ -302,6 +303,10 @@ public class TransportNodesListGatewayStartedShards extends TransportNodesAction
return this.allocationId;
}
+ public boolean primary() {
+ return this.primary;
+ }
+
public Throwable storeException() {
return this.storeException;
}
@@ -311,6 +316,7 @@ public class TransportNodesListGatewayStartedShards extends TransportNodesAction
super.readFrom(in);
version = in.readLong();
allocationId = in.readOptionalString();
+ primary = in.readBoolean();
if (in.readBoolean()) {
storeException = in.readThrowable();
}
@@ -321,6 +327,7 @@ public class TransportNodesListGatewayStartedShards extends TransportNodesAction
super.writeTo(out);
out.writeLong(version);
out.writeOptionalString(allocationId);
+ out.writeBoolean(primary);
if (storeException != null) {
out.writeBoolean(true);
out.writeThrowable(storeException);
diff --git a/core/src/main/java/org/elasticsearch/http/HttpServer.java b/core/src/main/java/org/elasticsearch/http/HttpServer.java
index 9971ce7722..5b1c0a32f8 100644
--- a/core/src/main/java/org/elasticsearch/http/HttpServer.java
+++ b/core/src/main/java/org/elasticsearch/http/HttpServer.java
@@ -21,39 +21,27 @@ package org.elasticsearch.http;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.inject.Inject;
-import org.elasticsearch.common.io.FileSystemUtils;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.env.Environment;
import org.elasticsearch.node.service.NodeService;
import org.elasticsearch.rest.BytesRestResponse;
-import org.elasticsearch.rest.RestChannel;
import org.elasticsearch.rest.RestController;
-import org.elasticsearch.rest.RestFilter;
-import org.elasticsearch.rest.RestFilterChain;
import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.rest.RestStatus;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.attribute.BasicFileAttributes;
-import java.util.HashMap;
-import java.util.Locale;
-import java.util.Map;
-import static java.util.Collections.unmodifiableMap;
import static org.elasticsearch.rest.RestStatus.FORBIDDEN;
import static org.elasticsearch.rest.RestStatus.INTERNAL_SERVER_ERROR;
-import static org.elasticsearch.rest.RestStatus.NOT_FOUND;
-import static org.elasticsearch.rest.RestStatus.OK;
/**
- *
+ * A component to serve http requests, backed by rest handlers.
*/
-public class HttpServer extends AbstractLifecycleComponent<HttpServer> {
+public class HttpServer extends AbstractLifecycleComponent<HttpServer> implements HttpServerAdapter {
private final Environment environment;
@@ -63,10 +51,6 @@ public class HttpServer extends AbstractLifecycleComponent<HttpServer> {
private final NodeService nodeService;
- private final boolean disableSites;
-
- private final PluginSiteFilter pluginSiteFilter = new PluginSiteFilter();
-
@Inject
public HttpServer(Settings settings, Environment environment, HttpServerTransport transport,
RestController restController,
@@ -77,25 +61,9 @@ public class HttpServer extends AbstractLifecycleComponent<HttpServer> {
this.restController = restController;
this.nodeService = nodeService;
nodeService.setHttpServer(this);
-
- this.disableSites = this.settings.getAsBoolean("http.disable_sites", false);
-
- transport.httpServerAdapter(new Dispatcher(this));
+ transport.httpServerAdapter(this);
}
- static class Dispatcher implements HttpServerAdapter {
-
- private final HttpServer server;
-
- Dispatcher(HttpServer server) {
- this.server = server;
- }
-
- @Override
- public void dispatchRequest(HttpRequest request, HttpChannel channel) {
- server.internalDispatchRequest(request, channel);
- }
- }
@Override
protected void doStart() {
@@ -125,26 +93,12 @@ public class HttpServer extends AbstractLifecycleComponent<HttpServer> {
return transport.stats();
}
- public void internalDispatchRequest(final HttpRequest request, final HttpChannel channel) {
- String rawPath = request.rawPath();
- if (rawPath.startsWith("/_plugin/")) {
- RestFilterChain filterChain = restController.filterChain(pluginSiteFilter);
- filterChain.continueProcessing(request, channel);
- return;
- } else if (rawPath.equals("/favicon.ico")) {
+ public void dispatchRequest(HttpRequest request, HttpChannel channel, ThreadContext threadContext) {
+ if (request.rawPath().equals("/favicon.ico")) {
handleFavicon(request, channel);
return;
}
- restController.dispatchRequest(request, channel);
- }
-
-
- class PluginSiteFilter extends RestFilter {
-
- @Override
- public void process(RestRequest request, RestChannel channel, RestFilterChain filterChain) throws IOException {
- handlePluginSite((HttpRequest) request, (HttpChannel) channel);
- }
+ restController.dispatchRequest(request, channel, threadContext);
}
void handleFavicon(HttpRequest request, HttpChannel channel) {
@@ -163,129 +117,4 @@ public class HttpServer extends AbstractLifecycleComponent<HttpServer> {
channel.sendResponse(new BytesRestResponse(FORBIDDEN));
}
}
-
- void handlePluginSite(HttpRequest request, HttpChannel channel) throws IOException {
- if (disableSites) {
- channel.sendResponse(new BytesRestResponse(FORBIDDEN));
- return;
- }
- if (request.method() == RestRequest.Method.OPTIONS) {
- // when we have OPTIONS request, simply send OK by default (with the Access Control Origin header which gets automatically added)
- channel.sendResponse(new BytesRestResponse(OK));
- return;
- }
- if (request.method() != RestRequest.Method.GET) {
- channel.sendResponse(new BytesRestResponse(FORBIDDEN));
- return;
- }
- // TODO for a "/_plugin" endpoint, we should have a page that lists all the plugins?
-
- String path = request.rawPath().substring("/_plugin/".length());
- int i1 = path.indexOf('/');
- String pluginName;
- String sitePath;
- if (i1 == -1) {
- pluginName = path;
- sitePath = null;
- // If a trailing / is missing, we redirect to the right page #2654
- String redirectUrl = request.rawPath() + "/";
- BytesRestResponse restResponse = new BytesRestResponse(RestStatus.MOVED_PERMANENTLY, "text/html", "<head><meta http-equiv=\"refresh\" content=\"0; URL=" + redirectUrl + "\"></head>");
- restResponse.addHeader("Location", redirectUrl);
- channel.sendResponse(restResponse);
- return;
- } else {
- pluginName = path.substring(0, i1);
- sitePath = path.substring(i1 + 1);
- }
-
- // we default to index.html, or what the plugin provides (as a unix-style path)
- // this is a relative path under _site configured by the plugin.
- if (sitePath.length() == 0) {
- sitePath = "index.html";
- } else {
- // remove extraneous leading slashes, its not an absolute path.
- while (sitePath.length() > 0 && sitePath.charAt(0) == '/') {
- sitePath = sitePath.substring(1);
- }
- }
- final Path siteFile = environment.pluginsFile().resolve(pluginName).resolve("_site");
-
- final String separator = siteFile.getFileSystem().getSeparator();
- // Convert file separators.
- sitePath = sitePath.replace("/", separator);
-
- Path file = siteFile.resolve(sitePath);
-
- // return not found instead of forbidden to prevent malicious requests to find out if files exist or dont exist
- if (!Files.exists(file) || FileSystemUtils.isHidden(file) || !file.toAbsolutePath().normalize().startsWith(siteFile.toAbsolutePath().normalize())) {
- channel.sendResponse(new BytesRestResponse(NOT_FOUND));
- return;
- }
-
- BasicFileAttributes attributes = Files.readAttributes(file, BasicFileAttributes.class);
- if (!attributes.isRegularFile()) {
- // If it's not a dir, we send a 403
- if (!attributes.isDirectory()) {
- channel.sendResponse(new BytesRestResponse(FORBIDDEN));
- return;
- }
- // We don't serve dir but if index.html exists in dir we should serve it
- file = file.resolve("index.html");
- if (!Files.exists(file) || FileSystemUtils.isHidden(file) || !Files.isRegularFile(file)) {
- channel.sendResponse(new BytesRestResponse(FORBIDDEN));
- return;
- }
- }
-
- try {
- byte[] data = Files.readAllBytes(file);
- channel.sendResponse(new BytesRestResponse(OK, guessMimeType(sitePath), data));
- } catch (IOException e) {
- channel.sendResponse(new BytesRestResponse(INTERNAL_SERVER_ERROR));
- }
- }
-
-
- // TODO: Don't respond with a mime type that violates the request's Accept header
- private String guessMimeType(String path) {
- int lastDot = path.lastIndexOf('.');
- if (lastDot == -1) {
- return "";
- }
- String extension = path.substring(lastDot + 1).toLowerCase(Locale.ROOT);
- String mimeType = DEFAULT_MIME_TYPES.get(extension);
- if (mimeType == null) {
- return "";
- }
- return mimeType;
- }
-
- static {
- // This is not an exhaustive list, just the most common types. Call registerMimeType() to add more.
- Map<String, String> mimeTypes = new HashMap<>();
- mimeTypes.put("txt", "text/plain");
- mimeTypes.put("css", "text/css");
- mimeTypes.put("csv", "text/csv");
- mimeTypes.put("htm", "text/html");
- mimeTypes.put("html", "text/html");
- mimeTypes.put("xml", "text/xml");
- mimeTypes.put("js", "text/javascript"); // Technically it should be application/javascript (RFC 4329), but IE8 struggles with that
- mimeTypes.put("xhtml", "application/xhtml+xml");
- mimeTypes.put("json", "application/json");
- mimeTypes.put("pdf", "application/pdf");
- mimeTypes.put("zip", "application/zip");
- mimeTypes.put("tar", "application/x-tar");
- mimeTypes.put("gif", "image/gif");
- mimeTypes.put("jpeg", "image/jpeg");
- mimeTypes.put("jpg", "image/jpeg");
- mimeTypes.put("tiff", "image/tiff");
- mimeTypes.put("tif", "image/tiff");
- mimeTypes.put("png", "image/png");
- mimeTypes.put("svg", "image/svg+xml");
- mimeTypes.put("ico", "image/vnd.microsoft.icon");
- mimeTypes.put("mp3", "audio/mpeg");
- DEFAULT_MIME_TYPES = unmodifiableMap(mimeTypes);
- }
-
- public static final Map<String, String> DEFAULT_MIME_TYPES;
}
diff --git a/core/src/main/java/org/elasticsearch/http/HttpServerAdapter.java b/core/src/main/java/org/elasticsearch/http/HttpServerAdapter.java
index a73456f6b5..c49265cae8 100644
--- a/core/src/main/java/org/elasticsearch/http/HttpServerAdapter.java
+++ b/core/src/main/java/org/elasticsearch/http/HttpServerAdapter.java
@@ -19,10 +19,12 @@
package org.elasticsearch.http;
+import org.elasticsearch.common.util.concurrent.ThreadContext;
+
/**
*
*/
public interface HttpServerAdapter {
- void dispatchRequest(HttpRequest request, HttpChannel channel);
+ void dispatchRequest(HttpRequest request, HttpChannel channel, ThreadContext context);
}
diff --git a/core/src/main/java/org/elasticsearch/http/HttpTransportSettings.java b/core/src/main/java/org/elasticsearch/http/HttpTransportSettings.java
new file mode 100644
index 0000000000..c5a1844f7f
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/http/HttpTransportSettings.java
@@ -0,0 +1,53 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.http;
+
+import org.elasticsearch.common.settings.Setting;
+import org.elasticsearch.common.settings.Setting.Scope;
+import org.elasticsearch.common.transport.PortsRange;
+import org.elasticsearch.common.unit.ByteSizeUnit;
+import org.elasticsearch.common.unit.ByteSizeValue;
+
+public final class HttpTransportSettings {
+
+ public static final Setting<Boolean> SETTING_CORS_ENABLED = Setting.boolSetting("http.cors.enabled", false, false, Scope.CLUSTER);
+ public static final Setting<String> SETTING_CORS_ALLOW_ORIGIN = new Setting<String>("http.cors.allow-origin", "", (value) -> value, false, Scope.CLUSTER);
+ public static final Setting<Integer> SETTING_CORS_MAX_AGE = Setting.intSetting("http.cors.max-age", 1728000, false, Scope.CLUSTER);
+ public static final Setting<String> SETTING_CORS_ALLOW_METHODS = new Setting<String>("http.cors.allow-methods", "OPTIONS, HEAD, GET, POST, PUT, DELETE", (value) -> value, false, Scope.CLUSTER);
+ public static final Setting<String> SETTING_CORS_ALLOW_HEADERS = new Setting<String>("http.cors.allow-headers", "X-Requested-With, Content-Type, Content-Length", (value) -> value, false, Scope.CLUSTER);
+ public static final Setting<Boolean> SETTING_CORS_ALLOW_CREDENTIALS = Setting.boolSetting("http.cors.allow-credentials", false, false, Scope.CLUSTER);
+ public static final Setting<Boolean> SETTING_PIPELINING = Setting.boolSetting("http.pipelining", true, false, Scope.CLUSTER);
+ public static final Setting<Integer> SETTING_PIPELINING_MAX_EVENTS = Setting.intSetting("http.pipelining.max_events", 10000, false, Scope.CLUSTER);
+ public static final Setting<Boolean> SETTING_HTTP_COMPRESSION = Setting.boolSetting("http.compression", false, false, Scope.CLUSTER);
+ public static final Setting<Integer> SETTING_HTTP_COMPRESSION_LEVEL = Setting.intSetting("http.compression_level", 6, false, Scope.CLUSTER);
+ public static final Setting<PortsRange> SETTING_HTTP_PORT = new Setting<PortsRange>("http.port", "9200-9300", PortsRange::new, false, Scope.CLUSTER);
+ public static final Setting<Integer> SETTING_HTTP_PUBLISH_PORT = Setting.intSetting("http.publish_port", 0, 0, false, Scope.CLUSTER);
+ public static final Setting<Boolean> SETTING_HTTP_DETAILED_ERRORS_ENABLED = Setting.boolSetting("http.detailed_errors.enabled", true, false, Scope.CLUSTER);
+ public static final Setting<ByteSizeValue> SETTING_HTTP_MAX_CONTENT_LENGTH = Setting.byteSizeSetting("http.max_content_length", new ByteSizeValue(100, ByteSizeUnit.MB), false, Scope.CLUSTER) ;
+ public static final Setting<ByteSizeValue> SETTING_HTTP_MAX_CHUNK_SIZE = Setting.byteSizeSetting("http.max_chunk_size", new ByteSizeValue(8, ByteSizeUnit.KB), false, Scope.CLUSTER) ;
+ public static final Setting<ByteSizeValue> SETTING_HTTP_MAX_HEADER_SIZE = Setting.byteSizeSetting("http.max_header_size", new ByteSizeValue(8, ByteSizeUnit.KB), false, Scope.CLUSTER) ;
+ public static final Setting<ByteSizeValue> SETTING_HTTP_MAX_INITIAL_LINE_LENGTH = Setting.byteSizeSetting("http.max_initial_line_length", new ByteSizeValue(4, ByteSizeUnit.KB), false, Scope.CLUSTER) ;
+ // don't reset cookies by default, since I don't think we really need to
+ // note, parsing cookies was fixed in netty 3.5.1 regarding stack allocation, but still, currently, we don't need cookies
+ public static final Setting<Boolean> SETTING_HTTP_RESET_COOKIES = Setting.boolSetting("http.reset_cookies", false, false, Scope.CLUSTER);
+
+ private HttpTransportSettings() {
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/http/netty/HttpRequestHandler.java b/core/src/main/java/org/elasticsearch/http/netty/HttpRequestHandler.java
index 5c05efcd17..17e14fe83f 100644
--- a/core/src/main/java/org/elasticsearch/http/netty/HttpRequestHandler.java
+++ b/core/src/main/java/org/elasticsearch/http/netty/HttpRequestHandler.java
@@ -19,6 +19,8 @@
package org.elasticsearch.http.netty;
+import org.elasticsearch.common.util.concurrent.ThreadContext;
+import org.elasticsearch.http.HttpTransportSettings;
import org.elasticsearch.http.netty.pipelining.OrderedUpstreamMessageEvent;
import org.elasticsearch.rest.support.RestUtils;
import org.jboss.netty.channel.ChannelHandler;
@@ -41,12 +43,15 @@ public class HttpRequestHandler extends SimpleChannelUpstreamHandler {
private final Pattern corsPattern;
private final boolean httpPipeliningEnabled;
private final boolean detailedErrorsEnabled;
+ private final ThreadContext threadContext;
- public HttpRequestHandler(NettyHttpServerTransport serverTransport, boolean detailedErrorsEnabled) {
+ public HttpRequestHandler(NettyHttpServerTransport serverTransport, boolean detailedErrorsEnabled, ThreadContext threadContext) {
this.serverTransport = serverTransport;
- this.corsPattern = RestUtils.checkCorsSettingForRegex(serverTransport.settings().get(NettyHttpServerTransport.SETTING_CORS_ALLOW_ORIGIN));
+ this.corsPattern = RestUtils
+ .checkCorsSettingForRegex(HttpTransportSettings.SETTING_CORS_ALLOW_ORIGIN.get(serverTransport.settings()));
this.httpPipeliningEnabled = serverTransport.pipelining;
this.detailedErrorsEnabled = detailedErrorsEnabled;
+ this.threadContext = threadContext;
}
@Override
@@ -60,6 +65,7 @@ public class HttpRequestHandler extends SimpleChannelUpstreamHandler {
request = (HttpRequest) e.getMessage();
}
+ threadContext.copyHeaders(request.headers());
// the netty HTTP handling always copy over the buffer to its own buffer, either in NioWorker internally
// when reading, or using a cumalation buffer
NettyHttpRequest httpRequest = new NettyHttpRequest(request, e.getChannel());
diff --git a/core/src/main/java/org/elasticsearch/http/netty/NettyHttpChannel.java b/core/src/main/java/org/elasticsearch/http/netty/NettyHttpChannel.java
index 7fcc7b65fb..1d3a2966e3 100644
--- a/core/src/main/java/org/elasticsearch/http/netty/NettyHttpChannel.java
+++ b/core/src/main/java/org/elasticsearch/http/netty/NettyHttpChannel.java
@@ -49,12 +49,12 @@ import java.util.Map;
import java.util.Set;
import java.util.regex.Pattern;
-import static org.elasticsearch.http.netty.NettyHttpServerTransport.SETTING_CORS_ALLOW_CREDENTIALS;
-import static org.elasticsearch.http.netty.NettyHttpServerTransport.SETTING_CORS_ALLOW_HEADERS;
-import static org.elasticsearch.http.netty.NettyHttpServerTransport.SETTING_CORS_ALLOW_METHODS;
-import static org.elasticsearch.http.netty.NettyHttpServerTransport.SETTING_CORS_ALLOW_ORIGIN;
-import static org.elasticsearch.http.netty.NettyHttpServerTransport.SETTING_CORS_ENABLED;
-import static org.elasticsearch.http.netty.NettyHttpServerTransport.SETTING_CORS_MAX_AGE;
+import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ALLOW_CREDENTIALS;
+import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ALLOW_HEADERS;
+import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ALLOW_METHODS;
+import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ALLOW_ORIGIN;
+import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ENABLED;
+import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_MAX_AGE;
import static org.jboss.netty.handler.codec.http.HttpHeaders.Names.ACCESS_CONTROL_ALLOW_CREDENTIALS;
import static org.jboss.netty.handler.codec.http.HttpHeaders.Names.ACCESS_CONTROL_ALLOW_HEADERS;
import static org.jboss.netty.handler.codec.http.HttpHeaders.Names.ACCESS_CONTROL_ALLOW_METHODS;
@@ -113,11 +113,11 @@ public class NettyHttpChannel extends HttpChannel {
resp = new DefaultHttpResponse(HttpVersion.HTTP_1_1, status);
}
if (RestUtils.isBrowser(nettyRequest.headers().get(USER_AGENT))) {
- if (transport.settings().getAsBoolean(SETTING_CORS_ENABLED, false)) {
+ if (SETTING_CORS_ENABLED.get(transport.settings())) {
String originHeader = request.header(ORIGIN);
if (!Strings.isNullOrEmpty(originHeader)) {
if (corsPattern == null) {
- String allowedOrigins = transport.settings().get(SETTING_CORS_ALLOW_ORIGIN, null);
+ String allowedOrigins = SETTING_CORS_ALLOW_ORIGIN.get(transport.settings());
if (!Strings.isNullOrEmpty(allowedOrigins)) {
resp.headers().add(ACCESS_CONTROL_ALLOW_ORIGIN, allowedOrigins);
}
@@ -127,12 +127,12 @@ public class NettyHttpChannel extends HttpChannel {
}
if (nettyRequest.getMethod() == HttpMethod.OPTIONS) {
// Allow Ajax requests based on the CORS "preflight" request
- resp.headers().add(ACCESS_CONTROL_MAX_AGE, transport.settings().getAsInt(SETTING_CORS_MAX_AGE, 1728000));
- resp.headers().add(ACCESS_CONTROL_ALLOW_METHODS, transport.settings().get(SETTING_CORS_ALLOW_METHODS, "OPTIONS, HEAD, GET, POST, PUT, DELETE"));
- resp.headers().add(ACCESS_CONTROL_ALLOW_HEADERS, transport.settings().get(SETTING_CORS_ALLOW_HEADERS, "X-Requested-With, Content-Type, Content-Length"));
+ resp.headers().add(ACCESS_CONTROL_MAX_AGE, SETTING_CORS_MAX_AGE.get(transport.settings()));
+ resp.headers().add(ACCESS_CONTROL_ALLOW_METHODS, SETTING_CORS_ALLOW_METHODS.get(transport.settings()));
+ resp.headers().add(ACCESS_CONTROL_ALLOW_HEADERS, SETTING_CORS_ALLOW_HEADERS.get(transport.settings()));
}
- if (transport.settings().getAsBoolean(SETTING_CORS_ALLOW_CREDENTIALS, false)) {
+ if (SETTING_CORS_ALLOW_CREDENTIALS.get(transport.settings())) {
resp.headers().add(ACCESS_CONTROL_ALLOW_CREDENTIALS, "true");
}
}
diff --git a/core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java b/core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java
index 899bbdc86e..83e6823f6f 100644
--- a/core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java
+++ b/core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java
@@ -19,7 +19,6 @@
package org.elasticsearch.http.netty;
-import org.elasticsearch.common.Booleans;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.inject.Inject;
@@ -27,7 +26,6 @@ import org.elasticsearch.common.netty.NettyUtils;
import org.elasticsearch.common.netty.OpenChannelsHandler;
import org.elasticsearch.common.network.NetworkAddress;
import org.elasticsearch.common.network.NetworkService;
-import org.elasticsearch.common.network.NetworkUtils;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.BoundTransportAddress;
import org.elasticsearch.common.transport.InetSocketTransportAddress;
@@ -38,6 +36,7 @@ import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.concurrent.EsExecutors;
+import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.http.BindHttpException;
import org.elasticsearch.http.HttpChannel;
import org.elasticsearch.http.HttpInfo;
@@ -45,8 +44,10 @@ import org.elasticsearch.http.HttpRequest;
import org.elasticsearch.http.HttpServerAdapter;
import org.elasticsearch.http.HttpServerTransport;
import org.elasticsearch.http.HttpStats;
+import org.elasticsearch.http.HttpTransportSettings;
import org.elasticsearch.http.netty.pipelining.HttpPipeliningHandler;
import org.elasticsearch.monitor.jvm.JvmInfo;
+import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.BindTransportException;
import org.jboss.netty.bootstrap.ServerBootstrap;
import org.jboss.netty.channel.AdaptiveReceiveBufferSizePredictorFactory;
@@ -73,11 +74,7 @@ import java.util.Arrays;
import java.util.List;
import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicReference;
-
import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_BLOCKING;
-import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_BLOCKING_SERVER;
-import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_DEFAULT_RECEIVE_BUFFER_SIZE;
-import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_DEFAULT_SEND_BUFFER_SIZE;
import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_KEEP_ALIVE;
import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_NO_DELAY;
import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_RECEIVE_BUFFER_SIZE;
@@ -94,22 +91,6 @@ public class NettyHttpServerTransport extends AbstractLifecycleComponent<HttpSer
NettyUtils.setup();
}
- public static final String SETTING_CORS_ENABLED = "http.cors.enabled";
- public static final String SETTING_CORS_ALLOW_ORIGIN = "http.cors.allow-origin";
- public static final String SETTING_CORS_MAX_AGE = "http.cors.max-age";
- public static final String SETTING_CORS_ALLOW_METHODS = "http.cors.allow-methods";
- public static final String SETTING_CORS_ALLOW_HEADERS = "http.cors.allow-headers";
- public static final String SETTING_CORS_ALLOW_CREDENTIALS = "http.cors.allow-credentials";
- public static final String SETTING_PIPELINING = "http.pipelining";
- public static final String SETTING_PIPELINING_MAX_EVENTS = "http.pipelining.max_events";
- public static final String SETTING_HTTP_COMPRESSION = "http.compression";
- public static final String SETTING_HTTP_COMPRESSION_LEVEL = "http.compression_level";
- public static final String SETTING_HTTP_DETAILED_ERRORS_ENABLED = "http.detailed_errors.enabled";
-
- public static final boolean DEFAULT_SETTING_PIPELINING = true;
- public static final int DEFAULT_SETTING_PIPELINING_MAX_EVENTS = 10000;
- public static final String DEFAULT_PORT_RANGE = "9200-9300";
-
protected final NetworkService networkService;
protected final BigArrays bigArrays;
@@ -132,18 +113,19 @@ public class NettyHttpServerTransport extends AbstractLifecycleComponent<HttpSer
protected final boolean resetCookies;
- protected final String port;
+ protected final PortsRange port;
protected final String bindHosts[];
protected final String publishHosts[];
protected final boolean detailedErrorsEnabled;
+ protected final ThreadPool threadPool;
protected int publishPort;
- protected final String tcpNoDelay;
- protected final String tcpKeepAlive;
+ protected final boolean tcpNoDelay;
+ protected final boolean tcpKeepAlive;
protected final boolean reuseAddress;
protected final ByteSizeValue tcpSendBufferSize;
@@ -167,36 +149,34 @@ public class NettyHttpServerTransport extends AbstractLifecycleComponent<HttpSer
@Inject
@SuppressForbidden(reason = "sets org.jboss.netty.epollBugWorkaround based on netty.epollBugWorkaround")
// TODO: why be confusing like this? just let the user do it with the netty parameter instead!
- public NettyHttpServerTransport(Settings settings, NetworkService networkService, BigArrays bigArrays) {
+ public NettyHttpServerTransport(Settings settings, NetworkService networkService, BigArrays bigArrays, ThreadPool threadPool) {
super(settings);
this.networkService = networkService;
this.bigArrays = bigArrays;
+ this.threadPool = threadPool;
if (settings.getAsBoolean("netty.epollBugWorkaround", false)) {
System.setProperty("org.jboss.netty.epollBugWorkaround", "true");
}
-
- ByteSizeValue maxContentLength = settings.getAsBytesSize("http.netty.max_content_length", settings.getAsBytesSize("http.max_content_length", new ByteSizeValue(100, ByteSizeUnit.MB)));
- this.maxChunkSize = settings.getAsBytesSize("http.netty.max_chunk_size", settings.getAsBytesSize("http.max_chunk_size", new ByteSizeValue(8, ByteSizeUnit.KB)));
- this.maxHeaderSize = settings.getAsBytesSize("http.netty.max_header_size", settings.getAsBytesSize("http.max_header_size", new ByteSizeValue(8, ByteSizeUnit.KB)));
- this.maxInitialLineLength = settings.getAsBytesSize("http.netty.max_initial_line_length", settings.getAsBytesSize("http.max_initial_line_length", new ByteSizeValue(4, ByteSizeUnit.KB)));
- // don't reset cookies by default, since I don't think we really need to
- // note, parsing cookies was fixed in netty 3.5.1 regarding stack allocation, but still, currently, we don't need cookies
- this.resetCookies = settings.getAsBoolean("http.netty.reset_cookies", settings.getAsBoolean("http.reset_cookies", false));
+ ByteSizeValue maxContentLength = HttpTransportSettings.SETTING_HTTP_MAX_CONTENT_LENGTH.get(settings);
+ this.maxChunkSize = HttpTransportSettings.SETTING_HTTP_MAX_CHUNK_SIZE.get(settings);
+ this.maxHeaderSize = HttpTransportSettings.SETTING_HTTP_MAX_HEADER_SIZE.get(settings);
+ this.maxInitialLineLength = HttpTransportSettings.SETTING_HTTP_MAX_INITIAL_LINE_LENGTH.get(settings);
+ this.resetCookies = HttpTransportSettings.SETTING_HTTP_RESET_COOKIES.get(settings);
this.maxCumulationBufferCapacity = settings.getAsBytesSize("http.netty.max_cumulation_buffer_capacity", null);
this.maxCompositeBufferComponents = settings.getAsInt("http.netty.max_composite_buffer_components", -1);
this.workerCount = settings.getAsInt("http.netty.worker_count", EsExecutors.boundedNumberOfProcessors(settings) * 2);
- this.blockingServer = settings.getAsBoolean("http.netty.http.blocking_server", settings.getAsBoolean(TCP_BLOCKING_SERVER, settings.getAsBoolean(TCP_BLOCKING, false)));
- this.port = settings.get("http.netty.port", settings.get("http.port", DEFAULT_PORT_RANGE));
+ this.blockingServer = settings.getAsBoolean("http.netty.http.blocking_server", TCP_BLOCKING.get(settings));
+ this.port = HttpTransportSettings.SETTING_HTTP_PORT.get(settings);
this.bindHosts = settings.getAsArray("http.netty.bind_host", settings.getAsArray("http.bind_host", settings.getAsArray("http.host", null)));
this.publishHosts = settings.getAsArray("http.netty.publish_host", settings.getAsArray("http.publish_host", settings.getAsArray("http.host", null)));
- this.publishPort = settings.getAsInt("http.netty.publish_port", settings.getAsInt("http.publish_port", 0));
- this.tcpNoDelay = settings.get("http.netty.tcp_no_delay", settings.get(TCP_NO_DELAY, "true"));
- this.tcpKeepAlive = settings.get("http.netty.tcp_keep_alive", settings.get(TCP_KEEP_ALIVE, "true"));
- this.reuseAddress = settings.getAsBoolean("http.netty.reuse_address", settings.getAsBoolean(TCP_REUSE_ADDRESS, NetworkUtils.defaultReuseAddress()));
- this.tcpSendBufferSize = settings.getAsBytesSize("http.netty.tcp_send_buffer_size", settings.getAsBytesSize(TCP_SEND_BUFFER_SIZE, TCP_DEFAULT_SEND_BUFFER_SIZE));
- this.tcpReceiveBufferSize = settings.getAsBytesSize("http.netty.tcp_receive_buffer_size", settings.getAsBytesSize(TCP_RECEIVE_BUFFER_SIZE, TCP_DEFAULT_RECEIVE_BUFFER_SIZE));
- this.detailedErrorsEnabled = settings.getAsBoolean(SETTING_HTTP_DETAILED_ERRORS_ENABLED, true);
+ this.publishPort = HttpTransportSettings.SETTING_HTTP_PUBLISH_PORT.get(settings);
+ this.tcpNoDelay = settings.getAsBoolean("http.netty.tcp_no_delay", TCP_NO_DELAY.get(settings));
+ this.tcpKeepAlive = settings.getAsBoolean("http.netty.tcp_keep_alive", TCP_KEEP_ALIVE.get(settings));
+ this.reuseAddress = settings.getAsBoolean("http.netty.reuse_address", TCP_REUSE_ADDRESS.get(settings));
+ this.tcpSendBufferSize = settings.getAsBytesSize("http.netty.tcp_send_buffer_size", TCP_SEND_BUFFER_SIZE.get(settings));
+ this.tcpReceiveBufferSize = settings.getAsBytesSize("http.netty.tcp_receive_buffer_size", TCP_RECEIVE_BUFFER_SIZE.get(settings));
+ this.detailedErrorsEnabled = HttpTransportSettings.SETTING_HTTP_DETAILED_ERRORS_ENABLED.get(settings);
long defaultReceiverPredictor = 512 * 1024;
if (JvmInfo.jvmInfo().getMem().getDirectMemoryMax().bytes() > 0) {
@@ -214,10 +194,10 @@ public class NettyHttpServerTransport extends AbstractLifecycleComponent<HttpSer
receiveBufferSizePredictorFactory = new AdaptiveReceiveBufferSizePredictorFactory((int) receivePredictorMin.bytes(), (int) receivePredictorMin.bytes(), (int) receivePredictorMax.bytes());
}
- this.compression = settings.getAsBoolean(SETTING_HTTP_COMPRESSION, false);
- this.compressionLevel = settings.getAsInt(SETTING_HTTP_COMPRESSION_LEVEL, 6);
- this.pipelining = settings.getAsBoolean(SETTING_PIPELINING, DEFAULT_SETTING_PIPELINING);
- this.pipeliningMaxEvents = settings.getAsInt(SETTING_PIPELINING_MAX_EVENTS, DEFAULT_SETTING_PIPELINING_MAX_EVENTS);
+ this.compression = HttpTransportSettings.SETTING_HTTP_COMPRESSION.get(settings);
+ this.compressionLevel = HttpTransportSettings.SETTING_HTTP_COMPRESSION_LEVEL.get(settings);
+ this.pipelining = HttpTransportSettings.SETTING_PIPELINING.get(settings);
+ this.pipeliningMaxEvents = HttpTransportSettings.SETTING_PIPELINING_MAX_EVENTS.get(settings);
// validate max content length
if (maxContentLength.bytes() > Integer.MAX_VALUE) {
@@ -257,16 +237,13 @@ public class NettyHttpServerTransport extends AbstractLifecycleComponent<HttpSer
serverBootstrap.setPipelineFactory(configureServerChannelPipelineFactory());
- if (!"default".equals(tcpNoDelay)) {
- serverBootstrap.setOption("child.tcpNoDelay", Booleans.parseBoolean(tcpNoDelay, null));
- }
- if (!"default".equals(tcpKeepAlive)) {
- serverBootstrap.setOption("child.keepAlive", Booleans.parseBoolean(tcpKeepAlive, null));
- }
- if (tcpSendBufferSize != null && tcpSendBufferSize.bytes() > 0) {
+ serverBootstrap.setOption("child.tcpNoDelay", tcpNoDelay);
+ serverBootstrap.setOption("child.keepAlive", tcpKeepAlive);
+ if (tcpSendBufferSize.bytes() > 0) {
+
serverBootstrap.setOption("child.sendBufferSize", tcpSendBufferSize.bytes());
}
- if (tcpReceiveBufferSize != null && tcpReceiveBufferSize.bytes() > 0) {
+ if (tcpReceiveBufferSize.bytes() > 0) {
serverBootstrap.setOption("child.receiveBufferSize", tcpReceiveBufferSize.bytes());
}
serverBootstrap.setOption("receiveBufferSizePredictorFactory", receiveBufferSizePredictorFactory);
@@ -308,15 +285,15 @@ public class NettyHttpServerTransport extends AbstractLifecycleComponent<HttpSer
throw new BindHttpException("Publish address [" + publishInetAddress + "] does not match any of the bound addresses [" + boundAddresses + "]");
}
- final InetSocketAddress publishAddress = new InetSocketAddress(publishInetAddress, publishPort);;
+ final InetSocketAddress publishAddress = new InetSocketAddress(publishInetAddress, publishPort);
+ ;
this.boundAddress = new BoundTransportAddress(boundAddresses.toArray(new TransportAddress[boundAddresses.size()]), new InetSocketTransportAddress(publishAddress));
}
private InetSocketTransportAddress bindAddress(final InetAddress hostAddress) {
- PortsRange portsRange = new PortsRange(port);
final AtomicReference<Exception> lastException = new AtomicReference<>();
final AtomicReference<InetSocketAddress> boundSocket = new AtomicReference<>();
- boolean success = portsRange.iterate(new PortsRange.PortCallback() {
+ boolean success = port.iterate(new PortsRange.PortCallback() {
@Override
public boolean onPortNumber(int portNumber) {
try {
@@ -389,7 +366,7 @@ public class NettyHttpServerTransport extends AbstractLifecycleComponent<HttpSer
}
protected void dispatchRequest(HttpRequest request, HttpChannel channel) {
- httpServerAdapter.dispatchRequest(request, channel);
+ httpServerAdapter.dispatchRequest(request, channel, threadPool.getThreadContext());
}
protected void exceptionCaught(ChannelHandlerContext ctx, ExceptionEvent e) throws Exception {
@@ -414,7 +391,7 @@ public class NettyHttpServerTransport extends AbstractLifecycleComponent<HttpSer
}
public ChannelPipelineFactory configureServerChannelPipelineFactory() {
- return new HttpChannelPipelineFactory(this, detailedErrorsEnabled);
+ return new HttpChannelPipelineFactory(this, detailedErrorsEnabled, threadPool.getThreadContext());
}
protected static class HttpChannelPipelineFactory implements ChannelPipelineFactory {
@@ -422,9 +399,9 @@ public class NettyHttpServerTransport extends AbstractLifecycleComponent<HttpSer
protected final NettyHttpServerTransport transport;
protected final HttpRequestHandler requestHandler;
- public HttpChannelPipelineFactory(NettyHttpServerTransport transport, boolean detailedErrorsEnabled) {
+ public HttpChannelPipelineFactory(NettyHttpServerTransport transport, boolean detailedErrorsEnabled, ThreadContext threadContext) {
this.transport = transport;
- this.requestHandler = new HttpRequestHandler(transport, detailedErrorsEnabled);
+ this.requestHandler = new HttpRequestHandler(transport, detailedErrorsEnabled, threadContext);
}
@Override
diff --git a/core/src/main/java/org/elasticsearch/index/Index.java b/core/src/main/java/org/elasticsearch/index/Index.java
index 4bf2f62144..80bf3c31b4 100644
--- a/core/src/main/java/org/elasticsearch/index/Index.java
+++ b/core/src/main/java/org/elasticsearch/index/Index.java
@@ -21,31 +21,31 @@ package org.elasticsearch.index;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.common.io.stream.Streamable;
+import org.elasticsearch.common.io.stream.Writeable;
import java.io.IOException;
/**
*
*/
-public class Index implements Streamable {
+public class Index implements Writeable<Index> {
- private String name;
+ private final static Index PROTO = new Index("", "");
- private Index() {
+ private final String name;
+ private final String uuid;
- }
-
- public Index(String name) {
+ public Index(String name, String uuid) {
this.name = name.intern();
+ this.uuid = uuid.intern();
}
- public String name() {
+ public String getName() {
return this.name;
}
- public String getName() {
- return name();
+ public String getUUID() {
+ return uuid;
}
@Override
@@ -55,30 +55,35 @@ public class Index implements Streamable {
@Override
public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null) return false;
+ if (this == o) {
+ return true;
+ }
+ if (o == null) {
+ return false;
+ }
Index index1 = (Index) o;
- return name.equals(index1.name);
+ return uuid.equals(index1.uuid) && name.equals(index1.name); // allow for _na_ uuid
}
@Override
public int hashCode() {
- return name.hashCode();
+ int result = name.hashCode();
+ result = 31 * result + uuid.hashCode();
+ return result;
}
- public static Index readIndexName(StreamInput in) throws IOException {
- Index index = new Index();
- index.readFrom(in);
- return index;
+ public static Index readIndex(StreamInput in) throws IOException {
+ return PROTO.readFrom(in);
}
@Override
- public void readFrom(StreamInput in) throws IOException {
- name = in.readString().intern();
+ public Index readFrom(StreamInput in) throws IOException {
+ return new Index(in.readString(), in.readString());
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(name);
+ out.writeString(uuid);
}
}
diff --git a/core/src/main/java/org/elasticsearch/index/IndexModule.java b/core/src/main/java/org/elasticsearch/index/IndexModule.java
index b09d91b1c2..4688fba503 100644
--- a/core/src/main/java/org/elasticsearch/index/IndexModule.java
+++ b/core/src/main/java/org/elasticsearch/index/IndexModule.java
@@ -258,8 +258,8 @@ public final class IndexModule {
throw new IllegalStateException("store must not be null");
}
}
- indexSettings.getScopedSettings().addSettingsUpdateConsumer(IndexStore.INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING, store::setMaxRate);
indexSettings.getScopedSettings().addSettingsUpdateConsumer(IndexStore.INDEX_STORE_THROTTLE_TYPE_SETTING, store::setType);
+ indexSettings.getScopedSettings().addSettingsUpdateConsumer(IndexStore.INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING, store::setMaxRate);
final String queryCacheType = indexSettings.getValue(INDEX_QUERY_CACHE_TYPE_SETTING);
final BiFunction<IndexSettings, IndicesQueryCache, QueryCache> queryCacheProvider = queryCaches.get(queryCacheType);
final QueryCache queryCache = queryCacheProvider.apply(indexSettings, servicesProvider.getIndicesQueryCache());
diff --git a/core/src/main/java/org/elasticsearch/index/IndexNotFoundException.java b/core/src/main/java/org/elasticsearch/index/IndexNotFoundException.java
index bc7e55d5a0..035b90dd25 100644
--- a/core/src/main/java/org/elasticsearch/index/IndexNotFoundException.java
+++ b/core/src/main/java/org/elasticsearch/index/IndexNotFoundException.java
@@ -34,6 +34,15 @@ public final class IndexNotFoundException extends ResourceNotFoundException {
setIndex(index);
}
+ public IndexNotFoundException(Index index) {
+ this(index, null);
+ }
+
+ public IndexNotFoundException(Index index, Throwable cause) {
+ super("no such index", cause);
+ setIndex(index);
+ }
+
public IndexNotFoundException(StreamInput in) throws IOException {
super(in);
}
diff --git a/core/src/main/java/org/elasticsearch/index/IndexService.java b/core/src/main/java/org/elasticsearch/index/IndexService.java
index f848c70683..8c87b2b560 100644
--- a/core/src/main/java/org/elasticsearch/index/IndexService.java
+++ b/core/src/main/java/org/elasticsearch/index/IndexService.java
@@ -108,7 +108,7 @@ public final class IndexService extends AbstractIndexComponent implements IndexC
private final IndexingSlowLog slowLog;
private final IndexingOperationListener[] listeners;
private volatile AsyncRefreshTask refreshTask;
- private final AsyncTranslogFSync fsyncTask;
+ private volatile AsyncTranslogFSync fsyncTask;
private final SearchSlowLog searchSlowLog;
public IndexService(IndexSettings indexSettings, NodeEnvironment nodeEnv,
@@ -147,13 +147,9 @@ public final class IndexService extends AbstractIndexComponent implements IndexC
this.listeners[0] = slowLog;
System.arraycopy(listenersIn, 0, this.listeners, 1, listenersIn.length);
// kick off async ops for the first shard in this index
- if (this.indexSettings.getTranslogSyncInterval().millis() != 0) {
- this.fsyncTask = new AsyncTranslogFSync(this);
- } else {
- this.fsyncTask = null;
- }
this.refreshTask = new AsyncRefreshTask(this);
searchSlowLog = new SearchSlowLog(indexSettings);
+ rescheduleFsyncTask(indexSettings.getTranslogDurability());
}
public int numberOfShards() {
@@ -460,7 +456,7 @@ public final class IndexService extends AbstractIndexComponent implements IndexC
if (shardId != null) {
final IndexShard shard = indexService.getShardOrNull(shardId.id());
if (shard != null) {
- long ramBytesUsed = accountable != null ? accountable.ramBytesUsed() : 0l;
+ long ramBytesUsed = accountable != null ? accountable.ramBytesUsed() : 0L;
shard.shardBitsetFilterCache().onCached(ramBytesUsed);
}
}
@@ -471,7 +467,7 @@ public final class IndexService extends AbstractIndexComponent implements IndexC
if (shardId != null) {
final IndexShard shard = indexService.getShardOrNull(shardId.id());
if (shard != null) {
- long ramBytesUsed = accountable != null ? accountable.ramBytesUsed() : 0l;
+ long ramBytesUsed = accountable != null ? accountable.ramBytesUsed() : 0L;
shard.shardBitsetFilterCache().onRemoval(ramBytesUsed);
}
}
@@ -565,6 +561,7 @@ public final class IndexService extends AbstractIndexComponent implements IndexC
}
public synchronized void updateMetaData(final IndexMetaData metadata) {
+ final Translog.Durability oldTranslogDurability = indexSettings.getTranslogDurability();
if (indexSettings.updateIndexMetaData(metadata)) {
for (final IndexShard shard : this.shards.values()) {
try {
@@ -576,6 +573,20 @@ public final class IndexService extends AbstractIndexComponent implements IndexC
if (refreshTask.getInterval().equals(indexSettings.getRefreshInterval()) == false) {
rescheduleRefreshTasks();
}
+ final Translog.Durability durability = indexSettings.getTranslogDurability();
+ if (durability != oldTranslogDurability) {
+ rescheduleFsyncTask(durability);
+ }
+ }
+ }
+
+ private void rescheduleFsyncTask(Translog.Durability durability) {
+ try {
+ if (fsyncTask != null) {
+ fsyncTask.close();
+ }
+ } finally {
+ fsyncTask = durability == Translog.Durability.REQUEST ? null : new AsyncTranslogFSync(this);
}
}
@@ -635,7 +646,9 @@ public final class IndexService extends AbstractIndexComponent implements IndexC
case STARTED:
case RELOCATED:
try {
- shard.refresh("schedule");
+ if (shard.isRefreshNeeded()) {
+ shard.refresh("schedule");
+ }
} catch (EngineClosedException | AlreadyClosedException ex) {
// fine - continue;
}
diff --git a/core/src/main/java/org/elasticsearch/index/IndexSettings.java b/core/src/main/java/org/elasticsearch/index/IndexSettings.java
index 715bea5169..3919584b78 100644
--- a/core/src/main/java/org/elasticsearch/index/IndexSettings.java
+++ b/core/src/main/java/org/elasticsearch/index/IndexSettings.java
@@ -36,6 +36,7 @@ import org.elasticsearch.index.translog.Translog;
import java.util.Locale;
import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Predicate;
@@ -54,7 +55,7 @@ public final class IndexSettings {
public static final Setting<Boolean> QUERY_STRING_ANALYZE_WILDCARD = Setting.boolSetting("indices.query.query_string.analyze_wildcard", false, false, Setting.Scope.CLUSTER);
public static final Setting<Boolean> QUERY_STRING_ALLOW_LEADING_WILDCARD = Setting.boolSetting("indices.query.query_string.allowLeadingWildcard", true, false, Setting.Scope.CLUSTER);
public static final Setting<Boolean> ALLOW_UNMAPPED = Setting.boolSetting("index.query.parse.allow_unmapped_fields", true, false, Setting.Scope.INDEX);
- public static final Setting<TimeValue> INDEX_TRANSLOG_SYNC_INTERVAL_SETTING = Setting.timeSetting("index.translog.sync_interval", TimeValue.timeValueSeconds(5), false, Setting.Scope.INDEX);
+ public static final Setting<TimeValue> INDEX_TRANSLOG_SYNC_INTERVAL_SETTING = Setting.timeSetting("index.translog.sync_interval", TimeValue.timeValueSeconds(5), TimeValue.timeValueMillis(100), false, Setting.Scope.INDEX);
public static final Setting<Translog.Durability> INDEX_TRANSLOG_DURABILITY_SETTING = new Setting<>("index.translog.durability", Translog.Durability.REQUEST.name(), (value) -> Translog.Durability.valueOf(value.toUpperCase(Locale.ROOT)), true, Setting.Scope.INDEX);
public static final Setting<Boolean> INDEX_WARMER_ENABLED_SETTING = Setting.boolSetting("index.warmer.enabled", true, true, Setting.Scope.INDEX);
public static final Setting<Boolean> INDEX_TTL_DISABLE_PURGE_SETTING = Setting.boolSetting("index.ttl.disable_purge", false, true, Setting.Scope.INDEX);
@@ -91,7 +92,6 @@ public final class IndexSettings {
public static final TimeValue DEFAULT_GC_DELETES = TimeValue.timeValueSeconds(60);
public static final Setting<TimeValue> INDEX_GC_DELETES_SETTING = Setting.timeSetting("index.gc_deletes", DEFAULT_GC_DELETES, new TimeValue(-1, TimeUnit.MILLISECONDS), true, Setting.Scope.INDEX);
- private final String uuid;
private final Index index;
private final Version version;
private final ESLogger logger;
@@ -164,11 +164,7 @@ public final class IndexSettings {
* @param nodeSettings the nodes settings this index is allocated on.
*/
public IndexSettings(final IndexMetaData indexMetaData, final Settings nodeSettings) {
- this(indexMetaData, nodeSettings, (index) -> Regex.simpleMatch(index, indexMetaData.getIndex()), IndexScopedSettings.DEFAULT_SCOPED_SETTINGS);
- }
-
- IndexSettings(final IndexMetaData indexMetaData, final Settings nodeSettings, IndexScopedSettings indexScopedSettings) {
- this(indexMetaData, nodeSettings, (index) -> Regex.simpleMatch(index, indexMetaData.getIndex()), indexScopedSettings);
+ this(indexMetaData, nodeSettings, (index) -> Regex.simpleMatch(index, indexMetaData.getIndex().getName()), IndexScopedSettings.DEFAULT_SCOPED_SETTINGS);
}
/**
@@ -183,9 +179,8 @@ public final class IndexSettings {
scopedSettings = indexScopedSettings.copy(nodeSettings, indexMetaData);
this.nodeSettings = nodeSettings;
this.settings = Settings.builder().put(nodeSettings).put(indexMetaData.getSettings()).build();
- this.index = new Index(indexMetaData.getIndex());
+ this.index = indexMetaData.getIndex();
version = Version.indexCreated(settings);
- uuid = settings.get(IndexMetaData.SETTING_INDEX_UUID, IndexMetaData.INDEX_UUID_NA_VALUE);
logger = Loggers.getLogger(getClass(), settings, index);
nodeName = settings.get("name", "");
this.indexMetaData = indexMetaData;
@@ -200,24 +195,35 @@ public final class IndexSettings {
this.defaultAllowUnmappedFields = scopedSettings.get(ALLOW_UNMAPPED);
this.indexNameMatcher = indexNameMatcher;
this.durability = scopedSettings.get(INDEX_TRANSLOG_DURABILITY_SETTING);
- scopedSettings.addSettingsUpdateConsumer(INDEX_TRANSLOG_DURABILITY_SETTING, this::setTranslogDurability);
syncInterval = INDEX_TRANSLOG_SYNC_INTERVAL_SETTING.get(settings);
refreshInterval = scopedSettings.get(INDEX_REFRESH_INTERVAL_SETTING);
- scopedSettings.addSettingsUpdateConsumer(INDEX_REFRESH_INTERVAL_SETTING, this::setRefreshInterval);
flushThresholdSize = scopedSettings.get(INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING);
- scopedSettings.addSettingsUpdateConsumer(INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING, this::setTranslogFlushThresholdSize);
mergeSchedulerConfig = new MergeSchedulerConfig(this);
- scopedSettings.addSettingsUpdateConsumer(INDEX_GC_DELETES_SETTING, this::setGCDeletes);
gcDeletesInMillis = scopedSettings.get(INDEX_GC_DELETES_SETTING).getMillis();
warmerEnabled = scopedSettings.get(INDEX_WARMER_ENABLED_SETTING);
- scopedSettings.addSettingsUpdateConsumer(INDEX_WARMER_ENABLED_SETTING, this::setEnableWarmer);
maxResultWindow = scopedSettings.get(MAX_RESULT_WINDOW_SETTING);
- scopedSettings.addSettingsUpdateConsumer(MAX_RESULT_WINDOW_SETTING, this::setMaxResultWindow);
TTLPurgeDisabled = scopedSettings.get(INDEX_TTL_DISABLE_PURGE_SETTING);
- scopedSettings.addSettingsUpdateConsumer(INDEX_TTL_DISABLE_PURGE_SETTING, this::setTTLPurgeDisabled);
this.mergePolicyConfig = new MergePolicyConfig(logger, this);
- assert indexNameMatcher.test(indexMetaData.getIndex());
-
+ assert indexNameMatcher.test(indexMetaData.getIndex().getName());
+
+ scopedSettings.addSettingsUpdateConsumer(MergePolicyConfig.INDEX_COMPOUND_FORMAT_SETTING, mergePolicyConfig::setNoCFSRatio);
+ scopedSettings.addSettingsUpdateConsumer(MergePolicyConfig.INDEX_MERGE_POLICY_EXPUNGE_DELETES_ALLOWED_SETTING, mergePolicyConfig::setExpungeDeletesAllowed);
+ scopedSettings.addSettingsUpdateConsumer(MergePolicyConfig.INDEX_MERGE_POLICY_FLOOR_SEGMENT_SETTING, mergePolicyConfig::setFloorSegmentSetting);
+ scopedSettings.addSettingsUpdateConsumer(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_SETTING, mergePolicyConfig::setMaxMergesAtOnce);
+ scopedSettings.addSettingsUpdateConsumer(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_EXPLICIT_SETTING, mergePolicyConfig::setMaxMergesAtOnceExplicit);
+ scopedSettings.addSettingsUpdateConsumer(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGED_SEGMENT_SETTING, mergePolicyConfig::setMaxMergedSegment);
+ scopedSettings.addSettingsUpdateConsumer(MergePolicyConfig.INDEX_MERGE_POLICY_SEGMENTS_PER_TIER_SETTING, mergePolicyConfig::setSegmentsPerTier);
+ scopedSettings.addSettingsUpdateConsumer(MergePolicyConfig.INDEX_MERGE_POLICY_RECLAIM_DELETES_WEIGHT_SETTING, mergePolicyConfig::setReclaimDeletesWeight);
+ scopedSettings.addSettingsUpdateConsumer(MergeSchedulerConfig.MAX_THREAD_COUNT_SETTING, mergeSchedulerConfig::setMaxThreadCount);
+ scopedSettings.addSettingsUpdateConsumer(MergeSchedulerConfig.MAX_MERGE_COUNT_SETTING, mergeSchedulerConfig::setMaxMergeCount);
+ scopedSettings.addSettingsUpdateConsumer(MergeSchedulerConfig.AUTO_THROTTLE_SETTING, mergeSchedulerConfig::setAutoThrottle);
+ scopedSettings.addSettingsUpdateConsumer(INDEX_TRANSLOG_DURABILITY_SETTING, this::setTranslogDurability);
+ scopedSettings.addSettingsUpdateConsumer(INDEX_TTL_DISABLE_PURGE_SETTING, this::setTTLPurgeDisabled);
+ scopedSettings.addSettingsUpdateConsumer(MAX_RESULT_WINDOW_SETTING, this::setMaxResultWindow);
+ scopedSettings.addSettingsUpdateConsumer(INDEX_WARMER_ENABLED_SETTING, this::setEnableWarmer);
+ scopedSettings.addSettingsUpdateConsumer(INDEX_GC_DELETES_SETTING, this::setGCDeletes);
+ scopedSettings.addSettingsUpdateConsumer(INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING, this::setTranslogFlushThresholdSize);
+ scopedSettings.addSettingsUpdateConsumer(INDEX_REFRESH_INTERVAL_SETTING, this::setRefreshInterval);
}
private void setTranslogFlushThresholdSize(ByteSizeValue byteSizeValue) {
@@ -249,7 +255,7 @@ public final class IndexSettings {
* Returns the indexes UUID
*/
public String getUUID() {
- return uuid;
+ return getIndex().getUUID();
}
/**
@@ -354,7 +360,7 @@ public final class IndexSettings {
}
final String newUUID = newSettings.get(IndexMetaData.SETTING_INDEX_UUID, IndexMetaData.INDEX_UUID_NA_VALUE);
if (newUUID.equals(getUUID()) == false) {
- throw new IllegalArgumentException("uuid mismatch on settings update expected: " + uuid + " but was: " + newUUID);
+ throw new IllegalArgumentException("uuid mismatch on settings update expected: " + getUUID() + " but was: " + newUUID);
}
this.indexMetaData = indexMetaData;
final Settings existingSettings = this.settings;
@@ -457,5 +463,5 @@ public final class IndexSettings {
}
- public IndexScopedSettings getScopedSettings() { return scopedSettings;}
+ IndexScopedSettings getScopedSettings() { return scopedSettings;}
}
diff --git a/core/src/main/java/org/elasticsearch/index/MergePolicyConfig.java b/core/src/main/java/org/elasticsearch/index/MergePolicyConfig.java
index 362e9099ee..fc9f30cf3f 100644
--- a/core/src/main/java/org/elasticsearch/index/MergePolicyConfig.java
+++ b/core/src/main/java/org/elasticsearch/index/MergePolicyConfig.java
@@ -23,6 +23,7 @@ import org.apache.lucene.index.MergePolicy;
import org.apache.lucene.index.NoMergePolicy;
import org.apache.lucene.index.TieredMergePolicy;
import org.elasticsearch.common.logging.ESLogger;
+import org.elasticsearch.common.settings.IndexScopedSettings;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
@@ -137,16 +138,9 @@ public final class MergePolicyConfig {
public static final String INDEX_MERGE_ENABLED = "index.merge.enabled"; // don't convert to Setting<> and register... we only set this in tests and register via a plugin
- MergePolicyConfig(ESLogger logger, IndexSettings indexSettings) {
+ MergePolicyConfig(ESLogger logger, IndexSettings indexSettings) {
this.logger = logger;
- indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_COMPOUND_FORMAT_SETTING, this::setNoCFSRatio);
- indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_MERGE_POLICY_EXPUNGE_DELETES_ALLOWED_SETTING, this::expungeDeletesAllowed);
- indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_MERGE_POLICY_FLOOR_SEGMENT_SETTING, this::floorSegmentSetting);
- indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_SETTING, this::maxMergesAtOnce);
- indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_EXPLICIT_SETTING, this::maxMergesAtOnceExplicit);
- indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_MERGE_POLICY_MAX_MERGED_SEGMENT_SETTING, this::maxMergedSegment);
- indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_MERGE_POLICY_SEGMENTS_PER_TIER_SETTING, this::segmentsPerTier);
- indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_MERGE_POLICY_RECLAIM_DELETES_WEIGHT_SETTING, this::reclaimDeletesWeight);
+ IndexScopedSettings scopedSettings = indexSettings.getScopedSettings();
double forceMergeDeletesPctAllowed = indexSettings.getValue(INDEX_MERGE_POLICY_EXPUNGE_DELETES_ALLOWED_SETTING); // percentage
ByteSizeValue floorSegment = indexSettings.getValue(INDEX_MERGE_POLICY_FLOOR_SEGMENT_SETTING);
int maxMergeAtOnce = indexSettings.getValue(INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_SETTING);
@@ -168,39 +162,41 @@ public final class MergePolicyConfig {
mergePolicy.setMaxMergedSegmentMB(maxMergedSegment.mbFrac());
mergePolicy.setSegmentsPerTier(segmentsPerTier);
mergePolicy.setReclaimDeletesWeight(reclaimDeletesWeight);
- logger.debug("using [tiered] merge mergePolicy with expunge_deletes_allowed[{}], floor_segment[{}], max_merge_at_once[{}], max_merge_at_once_explicit[{}], max_merged_segment[{}], segments_per_tier[{}], reclaim_deletes_weight[{}]",
+ if (logger.isTraceEnabled()) {
+ logger.trace("using [tiered] merge mergePolicy with expunge_deletes_allowed[{}], floor_segment[{}], max_merge_at_once[{}], max_merge_at_once_explicit[{}], max_merged_segment[{}], segments_per_tier[{}], reclaim_deletes_weight[{}]",
forceMergeDeletesPctAllowed, floorSegment, maxMergeAtOnce, maxMergeAtOnceExplicit, maxMergedSegment, segmentsPerTier, reclaimDeletesWeight);
+ }
}
- private void reclaimDeletesWeight(Double reclaimDeletesWeight) {
+ void setReclaimDeletesWeight(Double reclaimDeletesWeight) {
mergePolicy.setReclaimDeletesWeight(reclaimDeletesWeight);
}
- private void segmentsPerTier(Double segmentsPerTier) {
+ void setSegmentsPerTier(Double segmentsPerTier) {
mergePolicy.setSegmentsPerTier(segmentsPerTier);
}
- private void maxMergedSegment(ByteSizeValue maxMergedSegment) {
+ void setMaxMergedSegment(ByteSizeValue maxMergedSegment) {
mergePolicy.setMaxMergedSegmentMB(maxMergedSegment.mbFrac());
}
- private void maxMergesAtOnceExplicit(Integer maxMergeAtOnceExplicit) {
+ void setMaxMergesAtOnceExplicit(Integer maxMergeAtOnceExplicit) {
mergePolicy.setMaxMergeAtOnceExplicit(maxMergeAtOnceExplicit);
}
- private void maxMergesAtOnce(Integer maxMergeAtOnce) {
+ void setMaxMergesAtOnce(Integer maxMergeAtOnce) {
mergePolicy.setMaxMergeAtOnce(maxMergeAtOnce);
}
- private void floorSegmentSetting(ByteSizeValue floorSegementSetting) {
+ void setFloorSegmentSetting(ByteSizeValue floorSegementSetting) {
mergePolicy.setFloorSegmentMB(floorSegementSetting.mbFrac());
}
- private void expungeDeletesAllowed(Double value) {
+ void setExpungeDeletesAllowed(Double value) {
mergePolicy.setForceMergeDeletesPctAllowed(value);
}
- private void setNoCFSRatio(Double noCFSRatio) {
+ void setNoCFSRatio(Double noCFSRatio) {
mergePolicy.setNoCFSRatio(noCFSRatio);
}
diff --git a/core/src/main/java/org/elasticsearch/index/MergeSchedulerConfig.java b/core/src/main/java/org/elasticsearch/index/MergeSchedulerConfig.java
index 59576f1869..0d212a4eb3 100644
--- a/core/src/main/java/org/elasticsearch/index/MergeSchedulerConfig.java
+++ b/core/src/main/java/org/elasticsearch/index/MergeSchedulerConfig.java
@@ -21,9 +21,7 @@ package org.elasticsearch.index;
import org.apache.lucene.index.ConcurrentMergeScheduler;
import org.elasticsearch.common.settings.Setting;
-import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.EsExecutors;
-import org.elasticsearch.index.IndexSettings;
/**
* The merge scheduler (<code>ConcurrentMergeScheduler</code>) controls the execution of
@@ -62,9 +60,6 @@ public final class MergeSchedulerConfig {
private volatile int maxMergeCount;
MergeSchedulerConfig(IndexSettings indexSettings) {
- indexSettings.getScopedSettings().addSettingsUpdateConsumer(MAX_THREAD_COUNT_SETTING, this::setMaxThreadCount);
- indexSettings.getScopedSettings().addSettingsUpdateConsumer(MAX_MERGE_COUNT_SETTING, this::setMaxMergeCount);
- indexSettings.getScopedSettings().addSettingsUpdateConsumer(AUTO_THROTTLE_SETTING, this::setAutoThrottle);
maxThreadCount = indexSettings.getValue(MAX_THREAD_COUNT_SETTING);
maxMergeCount = indexSettings.getValue(MAX_MERGE_COUNT_SETTING);
this.autoThrottle = indexSettings.getValue(AUTO_THROTTLE_SETTING);
diff --git a/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java b/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java
index f18cc631ae..52858416ee 100644
--- a/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java
+++ b/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java
@@ -23,7 +23,6 @@ import org.apache.lucene.index.IndexReaderContext;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.ReaderUtil;
-import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer;
@@ -120,10 +119,10 @@ public final class BitsetFilterCache extends AbstractIndexComponent implements L
final Object coreCacheReader = context.reader().getCoreCacheKey();
final ShardId shardId = ShardUtils.extractShardId(context.reader());
if (shardId != null // can't require it because of the percolator
- && indexSettings.getIndex().getName().equals(shardId.getIndex()) == false) {
+ && indexSettings.getIndex().equals(shardId.getIndex()) == false) {
// insanity
- throw new IllegalStateException("Trying to load bit set for index [" + shardId.getIndex()
- + "] with cache of index [" + indexSettings.getIndex().getName() + "]");
+ throw new IllegalStateException("Trying to load bit set for index " + shardId.getIndex()
+ + " with cache of index " + indexSettings.getIndex());
}
Cache<Query, Value> filterToFbs = loadedFilters.computeIfAbsent(coreCacheReader, key -> {
context.reader().addCoreClosedListener(BitsetFilterCache.this);
diff --git a/core/src/main/java/org/elasticsearch/index/cache/query/index/IndexQueryCache.java b/core/src/main/java/org/elasticsearch/index/cache/query/index/IndexQueryCache.java
index 04f66290a5..352d6af5ee 100644
--- a/core/src/main/java/org/elasticsearch/index/cache/query/index/IndexQueryCache.java
+++ b/core/src/main/java/org/elasticsearch/index/cache/query/index/IndexQueryCache.java
@@ -22,7 +22,6 @@ package org.elasticsearch.index.cache.query.index;
import org.apache.lucene.search.QueryCachingPolicy;
import org.apache.lucene.search.Weight;
import org.elasticsearch.ElasticsearchException;
-import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.cache.query.QueryCache;
@@ -36,7 +35,6 @@ public class IndexQueryCache extends AbstractIndexComponent implements QueryCach
final IndicesQueryCache indicesQueryCache;
- @Inject
public IndexQueryCache(IndexSettings indexSettings, IndicesQueryCache indicesQueryCache) {
super(indexSettings);
this.indicesQueryCache = indicesQueryCache;
diff --git a/core/src/main/java/org/elasticsearch/index/cache/query/none/NoneQueryCache.java b/core/src/main/java/org/elasticsearch/index/cache/query/none/NoneQueryCache.java
index 6740865572..70cbfeb383 100644
--- a/core/src/main/java/org/elasticsearch/index/cache/query/none/NoneQueryCache.java
+++ b/core/src/main/java/org/elasticsearch/index/cache/query/none/NoneQueryCache.java
@@ -31,7 +31,6 @@ import org.elasticsearch.index.cache.query.QueryCache;
*/
public class NoneQueryCache extends AbstractIndexComponent implements QueryCache {
- @Inject
public NoneQueryCache(IndexSettings indexSettings) {
super(indexSettings);
logger.debug("Using no query cache");
diff --git a/core/src/main/java/org/elasticsearch/index/engine/ElasticsearchConcurrentMergeScheduler.java b/core/src/main/java/org/elasticsearch/index/engine/ElasticsearchConcurrentMergeScheduler.java
index 38410ab0cb..6dd710e4e8 100644
--- a/core/src/main/java/org/elasticsearch/index/engine/ElasticsearchConcurrentMergeScheduler.java
+++ b/core/src/main/java/org/elasticsearch/index/engine/ElasticsearchConcurrentMergeScheduler.java
@@ -162,7 +162,7 @@ class ElasticsearchConcurrentMergeScheduler extends ConcurrentMergeScheduler {
@Override
protected MergeThread getMergeThread(IndexWriter writer, MergePolicy.OneMerge merge) throws IOException {
MergeThread thread = super.getMergeThread(writer, merge);
- thread.setName(EsExecutors.threadName(indexSettings, "[" + shardId.index().name() + "][" + shardId.id() + "]: " + thread.getName()));
+ thread.setName(EsExecutors.threadName(indexSettings, "[" + shardId.getIndexName() + "][" + shardId.id() + "]: " + thread.getName()));
return thread;
}
diff --git a/core/src/main/java/org/elasticsearch/index/engine/Engine.java b/core/src/main/java/org/elasticsearch/index/engine/Engine.java
index 79610906b1..0e112118da 100644
--- a/core/src/main/java/org/elasticsearch/index/engine/Engine.java
+++ b/core/src/main/java/org/elasticsearch/index/engine/Engine.java
@@ -1065,7 +1065,7 @@ public abstract class Engine implements Closeable {
}
}
- public static class CommitId implements Writeable<CommitId> {
+ public static class CommitId implements Writeable {
private final byte[] id;
diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java b/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java
index b898f3ffd2..f02f924bc3 100644
--- a/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java
+++ b/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java
@@ -28,7 +28,6 @@ import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.fielddata.plain.AbstractGeoPointDVIndexFieldData;
import org.elasticsearch.index.fielddata.plain.BytesBinaryDVIndexFieldData;
-import org.elasticsearch.index.fielddata.plain.DisabledIndexFieldData;
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
import org.elasticsearch.index.fielddata.plain.GeoPointArrayIndexFieldData;
import org.elasticsearch.index.fielddata.plain.IndexIndexFieldData;
@@ -79,6 +78,14 @@ public class IndexFieldDataService extends AbstractIndexComponent implements Clo
private static final String DOC_VALUES_FORMAT = "doc_values";
private static final String PAGED_BYTES_FORMAT = "paged_bytes";
+ private static final IndexFieldData.Builder DISABLED_BUILDER = new IndexFieldData.Builder() {
+ @Override
+ public IndexFieldData<?> build(IndexSettings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache,
+ CircuitBreakerService breakerService, MapperService mapperService) {
+ throw new IllegalStateException("Field data loading is forbidden on [" + fieldType.name() + "]");
+ }
+ };
+
private final static Map<String, IndexFieldData.Builder> buildersByType;
private final static Map<String, IndexFieldData.Builder> docValuesBuildersByType;
private final static Map<Tuple<String, String>, IndexFieldData.Builder> buildersByTypeAndFormat;
@@ -96,7 +103,7 @@ public class IndexFieldDataService extends AbstractIndexComponent implements Clo
buildersByTypeBuilder.put("geo_point", new GeoPointArrayIndexFieldData.Builder());
buildersByTypeBuilder.put(ParentFieldMapper.NAME, new ParentChildIndexFieldData.Builder());
buildersByTypeBuilder.put(IndexFieldMapper.NAME, new IndexIndexFieldData.Builder());
- buildersByTypeBuilder.put("binary", new DisabledIndexFieldData.Builder());
+ buildersByTypeBuilder.put("binary", DISABLED_BUILDER);
buildersByTypeBuilder.put(BooleanFieldMapper.CONTENT_TYPE, MISSING_DOC_VALUES_BUILDER);
buildersByType = unmodifiableMap(buildersByTypeBuilder);
@@ -117,35 +124,35 @@ public class IndexFieldDataService extends AbstractIndexComponent implements Clo
buildersByTypeAndFormat = MapBuilder.<Tuple<String, String>, IndexFieldData.Builder>newMapBuilder()
.put(Tuple.tuple("string", PAGED_BYTES_FORMAT), new PagedBytesIndexFieldData.Builder())
.put(Tuple.tuple("string", DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder())
- .put(Tuple.tuple("string", DISABLED_FORMAT), new DisabledIndexFieldData.Builder())
+ .put(Tuple.tuple("string", DISABLED_FORMAT), DISABLED_BUILDER)
.put(Tuple.tuple("float", DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.FLOAT))
- .put(Tuple.tuple("float", DISABLED_FORMAT), new DisabledIndexFieldData.Builder())
+ .put(Tuple.tuple("float", DISABLED_FORMAT), DISABLED_BUILDER)
.put(Tuple.tuple("double", DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.DOUBLE))
- .put(Tuple.tuple("double", DISABLED_FORMAT), new DisabledIndexFieldData.Builder())
+ .put(Tuple.tuple("double", DISABLED_FORMAT), DISABLED_BUILDER)
.put(Tuple.tuple("byte", DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.BYTE))
- .put(Tuple.tuple("byte", DISABLED_FORMAT), new DisabledIndexFieldData.Builder())
+ .put(Tuple.tuple("byte", DISABLED_FORMAT), DISABLED_BUILDER)
.put(Tuple.tuple("short", DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.SHORT))
- .put(Tuple.tuple("short", DISABLED_FORMAT), new DisabledIndexFieldData.Builder())
+ .put(Tuple.tuple("short", DISABLED_FORMAT), DISABLED_BUILDER)
.put(Tuple.tuple("int", DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.INT))
- .put(Tuple.tuple("int", DISABLED_FORMAT), new DisabledIndexFieldData.Builder())
+ .put(Tuple.tuple("int", DISABLED_FORMAT), DISABLED_BUILDER)
.put(Tuple.tuple("long", DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.LONG))
- .put(Tuple.tuple("long", DISABLED_FORMAT), new DisabledIndexFieldData.Builder())
+ .put(Tuple.tuple("long", DISABLED_FORMAT), DISABLED_BUILDER)
.put(Tuple.tuple("geo_point", ARRAY_FORMAT), new GeoPointArrayIndexFieldData.Builder())
.put(Tuple.tuple("geo_point", DOC_VALUES_FORMAT), new AbstractGeoPointDVIndexFieldData.Builder())
- .put(Tuple.tuple("geo_point", DISABLED_FORMAT), new DisabledIndexFieldData.Builder())
+ .put(Tuple.tuple("geo_point", DISABLED_FORMAT), DISABLED_BUILDER)
.put(Tuple.tuple("binary", DOC_VALUES_FORMAT), new BytesBinaryDVIndexFieldData.Builder())
- .put(Tuple.tuple("binary", DISABLED_FORMAT), new DisabledIndexFieldData.Builder())
+ .put(Tuple.tuple("binary", DISABLED_FORMAT), DISABLED_BUILDER)
.put(Tuple.tuple(BooleanFieldMapper.CONTENT_TYPE, DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.BOOLEAN))
- .put(Tuple.tuple(BooleanFieldMapper.CONTENT_TYPE, DISABLED_FORMAT), new DisabledIndexFieldData.Builder())
+ .put(Tuple.tuple(BooleanFieldMapper.CONTENT_TYPE, DISABLED_FORMAT), DISABLED_BUILDER)
.immutableMap();
}
diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/ScriptDocValues.java b/core/src/main/java/org/elasticsearch/index/fielddata/ScriptDocValues.java
index da12065780..9e0d0fcc40 100644
--- a/core/src/main/java/org/elasticsearch/index/fielddata/ScriptDocValues.java
+++ b/core/src/main/java/org/elasticsearch/index/fielddata/ScriptDocValues.java
@@ -121,7 +121,7 @@ public interface ScriptDocValues<T> extends List<T> {
public long getValue() {
int numValues = values.count();
if (numValues == 0) {
- return 0l;
+ return 0L;
}
return values.valueAt(0);
}
diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/DisabledIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/DisabledIndexFieldData.java
deleted file mode 100644
index 86daaf1a25..0000000000
--- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/DisabledIndexFieldData.java
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.index.fielddata.plain;
-
-import org.apache.lucene.index.LeafReaderContext;
-import org.elasticsearch.index.IndexSettings;
-import org.elasticsearch.index.fielddata.AtomicFieldData;
-import org.elasticsearch.index.fielddata.FieldDataType;
-import org.elasticsearch.index.fielddata.IndexFieldData;
-import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
-import org.elasticsearch.index.fielddata.IndexFieldDataCache;
-import org.elasticsearch.index.mapper.MappedFieldType;
-import org.elasticsearch.index.mapper.MapperService;
-import org.elasticsearch.indices.breaker.CircuitBreakerService;
-import org.elasticsearch.search.MultiValueMode;
-
-/**
- * A field data implementation that forbids loading and will throw an {@link IllegalStateException} if you try to load
- * {@link AtomicFieldData} instances.
- */
-public final class DisabledIndexFieldData extends AbstractIndexFieldData<AtomicFieldData> {
-
- public static class Builder implements IndexFieldData.Builder {
- @Override
- public IndexFieldData<AtomicFieldData> build(IndexSettings indexSettings, MappedFieldType fieldType,
- IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) {
- // Ignore Circuit Breaker
- return new DisabledIndexFieldData(indexSettings, fieldType.name(), fieldType.fieldDataType(), cache);
- }
- }
-
- public DisabledIndexFieldData(IndexSettings indexSettings, String fieldName, FieldDataType fieldDataType, IndexFieldDataCache cache) {
- super(indexSettings, fieldName, fieldDataType, cache);
- }
-
- @Override
- public AtomicFieldData loadDirect(LeafReaderContext context) throws Exception {
- throw fail();
- }
-
- @Override
- protected AtomicFieldData empty(int maxDoc) {
- throw fail();
- }
-
- @Override
- public IndexFieldData.XFieldComparatorSource comparatorSource(Object missingValue, MultiValueMode sortMode, Nested nested) {
- throw fail();
- }
-
- private IllegalStateException fail() {
- return new IllegalStateException("Field data loading is forbidden on " + getFieldName());
- }
-
-}
diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/IndexIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/IndexIndexFieldData.java
index f2c4fa826f..7fdacbf53c 100644
--- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/IndexIndexFieldData.java
+++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/IndexIndexFieldData.java
@@ -102,7 +102,7 @@ public class IndexIndexFieldData extends AbstractIndexOrdinalsFieldData {
private IndexIndexFieldData(IndexSettings indexSettings, String name) {
super(indexSettings, name, new FieldDataType("string"), null, null);
- atomicFieldData = new IndexAtomicFieldData(index().name());
+ atomicFieldData = new IndexAtomicFieldData(index().getName());
}
@Override
diff --git a/core/src/main/java/org/elasticsearch/index/get/ShardGetService.java b/core/src/main/java/org/elasticsearch/index/get/ShardGetService.java
index 32e21be032..51391061a6 100644
--- a/core/src/main/java/org/elasticsearch/index/get/ShardGetService.java
+++ b/core/src/main/java/org/elasticsearch/index/get/ShardGetService.java
@@ -50,6 +50,7 @@ import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.shard.AbstractIndexShardComponent;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.translog.Translog;
+import org.elasticsearch.search.fetch.parent.ParentFieldSubFetchPhase;
import org.elasticsearch.search.fetch.source.FetchSourceContext;
import org.elasticsearch.search.lookup.LeafSearchLookup;
import org.elasticsearch.search.lookup.SearchLookup;
@@ -111,7 +112,7 @@ public final class ShardGetService extends AbstractIndexShardComponent {
*/
public GetResult get(Engine.GetResult engineGetResult, String id, String type, String[] fields, FetchSourceContext fetchSourceContext, boolean ignoreErrorsOnGeneratedFields) {
if (!engineGetResult.exists()) {
- return new GetResult(shardId.index().name(), type, id, -1, false, null, null);
+ return new GetResult(shardId.getIndexName(), type, id, -1, false, null, null);
}
currentMetric.inc();
@@ -120,7 +121,7 @@ public final class ShardGetService extends AbstractIndexShardComponent {
DocumentMapper docMapper = mapperService.documentMapper(type);
if (docMapper == null) {
missingMetric.inc(System.nanoTime() - now);
- return new GetResult(shardId.index().name(), type, id, -1, false, null, null);
+ return new GetResult(shardId.getIndexName(), type, id, -1, false, null, null);
}
fetchSourceContext = normalizeFetchSourceContent(fetchSourceContext, fields);
GetResult getResult = innerGetLoadFromStoredFields(type, id, fields, fetchSourceContext, engineGetResult, docMapper, ignoreErrorsOnGeneratedFields);
@@ -169,25 +170,25 @@ public final class ShardGetService extends AbstractIndexShardComponent {
}
}
if (get == null) {
- return new GetResult(shardId.index().name(), type, id, -1, false, null, null);
+ return new GetResult(shardId.getIndexName(), type, id, -1, false, null, null);
}
if (!get.exists()) {
// no need to release here as well..., we release in the for loop for non exists
- return new GetResult(shardId.index().name(), type, id, -1, false, null, null);
+ return new GetResult(shardId.getIndexName(), type, id, -1, false, null, null);
}
} else {
get = indexShard.get(new Engine.Get(realtime, new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(type, id)))
.version(version).versionType(versionType));
if (!get.exists()) {
get.release();
- return new GetResult(shardId.index().name(), type, id, -1, false, null, null);
+ return new GetResult(shardId.getIndexName(), type, id, -1, false, null, null);
}
}
DocumentMapper docMapper = mapperService.documentMapper(type);
if (docMapper == null) {
get.release();
- return new GetResult(shardId.index().name(), type, id, -1, false, null, null);
+ return new GetResult(shardId.getIndexName(), type, id, -1, false, null, null);
}
try {
@@ -301,7 +302,7 @@ public final class ShardGetService extends AbstractIndexShardComponent {
}
}
- return new GetResult(shardId.index().name(), type, id, get.version(), get.exists(), sourceToBeReturned, fields);
+ return new GetResult(shardId.getIndexName(), type, id, get.version(), get.exists(), sourceToBeReturned, fields);
}
} finally {
get.release();
@@ -350,6 +351,14 @@ public final class ShardGetService extends AbstractIndexShardComponent {
}
}
+ if (docMapper.parentFieldMapper().active()) {
+ String parentId = ParentFieldSubFetchPhase.getParentId(docMapper.parentFieldMapper(), docIdAndVersion.context.reader(), docIdAndVersion.docId);
+ if (fields == null) {
+ fields = new HashMap<>(1);
+ }
+ fields.put(ParentFieldMapper.NAME, new GetField(ParentFieldMapper.NAME, Collections.singletonList(parentId)));
+ }
+
// now, go and do the script thingy if needed
if (gFields != null && gFields.length > 0) {
@@ -409,7 +418,7 @@ public final class ShardGetService extends AbstractIndexShardComponent {
}
}
- return new GetResult(shardId.index().name(), type, id, get.version(), get.exists(), source, fields);
+ return new GetResult(shardId.getIndexName(), type, id, get.version(), get.exists(), source, fields);
}
private static FieldsVisitor buildFieldsVisitors(String[] fields, FetchSourceContext fetchSourceContext) {
diff --git a/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java
index 3eca73cffe..a983850356 100644
--- a/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java
+++ b/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java
@@ -24,6 +24,7 @@ import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.IndexOptions;
+import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.lucene.Lucene;
@@ -70,6 +71,7 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
this.fieldType = fieldType.clone();
this.defaultFieldType = defaultFieldType.clone();
this.defaultOptions = fieldType.indexOptions(); // we have to store it the fieldType is mutable
+ this.docValuesSet = fieldType.hasDocValues();
multiFieldsBuilder = new MultiFields.Builder();
}
@@ -222,6 +224,15 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
return context.path().pathAsText(name);
}
+ protected boolean defaultDocValues(Version indexCreated) {
+ if (indexCreated.onOrAfter(Version.V_3_0_0)) {
+ // add doc values by default to keyword (boolean, numerics, etc.) fields
+ return fieldType.tokenized() == false;
+ } else {
+ return fieldType.tokenized() == false && fieldType.indexOptions() != IndexOptions.NONE;
+ }
+ }
+
protected void setupFieldType(BuilderContext context) {
fieldType.setName(buildFullName(context));
if (fieldType.indexAnalyzer() == null && fieldType.tokenized() == false && fieldType.indexOptions() != IndexOptions.NONE) {
@@ -232,17 +243,10 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
Settings settings = Settings.builder().put(fieldType.fieldDataType().getSettings()).put(fieldDataSettings).build();
fieldType.setFieldDataType(new FieldDataType(fieldType.fieldDataType().getType(), settings));
}
- boolean defaultDocValues = fieldType.tokenized() == false && fieldType.indexOptions() != IndexOptions.NONE;
- // backcompat for "fielddata: format: docvalues" for now...
- boolean fieldDataDocValues = fieldType.fieldDataType() != null
- && FieldDataType.DOC_VALUES_FORMAT_VALUE.equals(fieldType.fieldDataType().getFormat(context.indexSettings()));
- if (fieldDataDocValues && docValuesSet && fieldType.hasDocValues() == false) {
- // this forces the doc_values setting to be written, so fielddata does not mask the original setting
- defaultDocValues = true;
- }
+ boolean defaultDocValues = defaultDocValues(context.indexCreatedVersion());
defaultFieldType.setHasDocValues(defaultDocValues);
if (docValuesSet == false) {
- fieldType.setHasDocValues(defaultDocValues || fieldDataDocValues);
+ fieldType.setHasDocValues(defaultDocValues);
}
}
}
@@ -394,7 +398,7 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
boolean defaultIndexed = defaultFieldType.indexOptions() != IndexOptions.NONE;
if (includeDefaults || indexed != defaultIndexed ||
fieldType().tokenized() != defaultFieldType.tokenized()) {
- builder.field("index", indexTokenizeOptionToString(indexed, fieldType().tokenized()));
+ builder.field("index", indexTokenizeOption(indexed, fieldType().tokenized()));
}
if (includeDefaults || fieldType().stored() != defaultFieldType.stored()) {
builder.field("store", fieldType().stored());
@@ -491,14 +495,9 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
}
}
- protected static String indexTokenizeOptionToString(boolean indexed, boolean tokenized) {
- if (!indexed) {
- return "no";
- } else if (tokenized) {
- return "analyzed";
- } else {
- return "not_analyzed";
- }
+ /* Only protected so that string can override it */
+ protected Object indexTokenizeOption(boolean indexed, boolean tokenized) {
+ return indexed;
}
protected boolean hasCustomFieldDataSettings() {
diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java b/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java
index 09d459fc4a..f030ebed7a 100644
--- a/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java
+++ b/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java
@@ -199,10 +199,8 @@ public abstract class MappedFieldType extends FieldType {
if (stored() != other.stored()) {
conflicts.add("mapper [" + name() + "] has different [store] values");
}
- if (hasDocValues() == false && other.hasDocValues()) {
- // don't add conflict if this mapper has doc values while the mapper to merge doesn't since doc values are implicitly set
- // when the doc_values field data format is configured
- conflicts.add("mapper [" + name() + "] has different [doc_values] values, cannot change from disabled to enabled");
+ if (hasDocValues() != other.hasDocValues()) {
+ conflicts.add("mapper [" + name() + "] has different [doc_values] values");
}
if (omitNorms() && !other.omitNorms()) {
conflicts.add("mapper [" + name() + "] has different [omit_norms] values, cannot change from disable to enabled");
diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java
index 999eeb2edf..b25f5f6a02 100755
--- a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java
+++ b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java
@@ -23,27 +23,14 @@ import com.carrotsearch.hppc.ObjectHashSet;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.DelegatingAnalyzerWrapper;
-import org.apache.lucene.index.IndexOptions;
-import org.apache.lucene.index.Term;
-import org.apache.lucene.queries.TermsQuery;
-import org.apache.lucene.search.BooleanClause;
-import org.apache.lucene.search.BooleanClause.Occur;
-import org.apache.lucene.search.BooleanQuery;
-import org.apache.lucene.search.ConstantScoreQuery;
-import org.apache.lucene.search.Query;
-import org.apache.lucene.search.TermQuery;
-import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticsearchGenerationException;
-import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.compress.CompressedXContent;
-import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.mapper.Mapper.BuilderContext;
-import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.similarity.SimilarityService;
@@ -63,7 +50,6 @@ import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
-import java.util.concurrent.CopyOnWriteArrayList;
import java.util.function.Function;
import java.util.function.Supplier;
import java.util.stream.Collectors;
@@ -95,7 +81,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
}
public static final String DEFAULT_MAPPING = "_default_";
- public static final Setting<Long> INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING = Setting.longSetting("index.mapping.nested_fields.limit", 50l, 0, true, Setting.Scope.INDEX);
+ public static final Setting<Long> INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING = Setting.longSetting("index.mapping.nested_fields.limit", 50L, 0, true, Setting.Scope.INDEX);
public static final boolean INDEX_MAPPER_DYNAMIC_DEFAULT = true;
public static final Setting<Boolean> INDEX_MAPPER_DYNAMIC_SETTING = Setting.boolSetting("index.mapper.dynamic", INDEX_MAPPER_DYNAMIC_DEFAULT, false, Setting.Scope.INDEX);
private static ObjectHashSet<String> META_FIELDS = ObjectHashSet.from(
@@ -125,8 +111,6 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
private final MapperAnalyzerWrapper searchAnalyzer;
private final MapperAnalyzerWrapper searchQuoteAnalyzer;
- private final List<DocumentTypeListener> typeListeners = new CopyOnWriteArrayList<>();
-
private volatile Map<String, MappedFieldType> unmappedFieldTypes = emptyMap();
private volatile Set<String> parentTypes = emptySet();
@@ -212,14 +196,6 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
return this.documentParser;
}
- public void addTypeListener(DocumentTypeListener listener) {
- typeListeners.add(listener);
- }
-
- public void removeTypeListener(DocumentTypeListener listener) {
- typeListeners.remove(listener);
- }
-
public DocumentMapper merge(String type, CompressedXContent mappingSource, MergeReason reason, boolean updateAllTypes) {
if (DEFAULT_MAPPING.equals(type)) {
// verify we can parse it
@@ -335,14 +311,6 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
this.fullPathObjectMappers = fullPathObjectMappers;
this.parentTypes = parentTypes;
- // 5. send notifications about the change
- if (oldMapper == null) {
- // means the mapping was created
- for (DocumentTypeListener typeListener : typeListeners) {
- typeListener.beforeCreate(mapper);
- }
- }
-
assert assertSerialization(newMapper);
assert assertMappersShareSameFieldType();
@@ -428,7 +396,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
}
}
if (allowedNestedFields >= 0 && actualNestedFields > allowedNestedFields) {
- throw new IllegalArgumentException("Limit of nested fields [" + allowedNestedFields + "] in index [" + index().name() + "] has been exceeded");
+ throw new IllegalArgumentException("Limit of nested fields [" + allowedNestedFields + "] in index [" + index().getName() + "] has been exceeded");
}
}
@@ -482,105 +450,6 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
}
/**
- * A filter for search. If a filter is required, will return it, otherwise, will return <tt>null</tt>.
- */
- @Nullable
- public Query searchFilter(String... types) {
- boolean filterPercolateType = hasMapping(PercolatorService.TYPE_NAME);
- if (types != null && filterPercolateType) {
- for (String type : types) {
- if (PercolatorService.TYPE_NAME.equals(type)) {
- filterPercolateType = false;
- break;
- }
- }
- }
- Query percolatorType = null;
- if (filterPercolateType) {
- percolatorType = documentMapper(PercolatorService.TYPE_NAME).typeFilter();
- }
-
- if (types == null || types.length == 0) {
- if (hasNested && filterPercolateType) {
- BooleanQuery.Builder bq = new BooleanQuery.Builder();
- bq.add(percolatorType, Occur.MUST_NOT);
- bq.add(Queries.newNonNestedFilter(), Occur.MUST);
- return new ConstantScoreQuery(bq.build());
- } else if (hasNested) {
- return Queries.newNonNestedFilter();
- } else if (filterPercolateType) {
- return new ConstantScoreQuery(Queries.not(percolatorType));
- } else {
- return null;
- }
- }
- // if we filter by types, we don't need to filter by non nested docs
- // since they have different types (starting with __)
- if (types.length == 1) {
- DocumentMapper docMapper = documentMapper(types[0]);
- Query filter = docMapper != null ? docMapper.typeFilter() : new TermQuery(new Term(TypeFieldMapper.NAME, types[0]));
- if (filterPercolateType) {
- BooleanQuery.Builder bq = new BooleanQuery.Builder();
- bq.add(percolatorType, Occur.MUST_NOT);
- bq.add(filter, Occur.MUST);
- return new ConstantScoreQuery(bq.build());
- } else {
- return filter;
- }
- }
- // see if we can use terms filter
- boolean useTermsFilter = true;
- for (String type : types) {
- DocumentMapper docMapper = documentMapper(type);
- if (docMapper == null) {
- useTermsFilter = false;
- break;
- }
- if (docMapper.typeMapper().fieldType().indexOptions() == IndexOptions.NONE) {
- useTermsFilter = false;
- break;
- }
- }
-
- // We only use terms filter is there is a type filter, this means we don't need to check for hasNested here
- if (useTermsFilter) {
- BytesRef[] typesBytes = new BytesRef[types.length];
- for (int i = 0; i < typesBytes.length; i++) {
- typesBytes[i] = new BytesRef(types[i]);
- }
- TermsQuery termsFilter = new TermsQuery(TypeFieldMapper.NAME, typesBytes);
- if (filterPercolateType) {
- BooleanQuery.Builder bq = new BooleanQuery.Builder();
- bq.add(percolatorType, Occur.MUST_NOT);
- bq.add(termsFilter, Occur.MUST);
- return new ConstantScoreQuery(bq.build());
- } else {
- return termsFilter;
- }
- } else {
- BooleanQuery.Builder typesBool = new BooleanQuery.Builder();
- for (String type : types) {
- DocumentMapper docMapper = documentMapper(type);
- if (docMapper == null) {
- typesBool.add(new TermQuery(new Term(TypeFieldMapper.NAME, type)), BooleanClause.Occur.SHOULD);
- } else {
- typesBool.add(docMapper.typeFilter(), BooleanClause.Occur.SHOULD);
- }
- }
- BooleanQuery.Builder bool = new BooleanQuery.Builder();
- bool.add(typesBool.build(), Occur.MUST);
- if (filterPercolateType) {
- bool.add(percolatorType, BooleanClause.Occur.MUST_NOT);
- }
- if (hasNested) {
- bool.add(Queries.newNonNestedFilter(), BooleanClause.Occur.MUST);
- }
-
- return new ConstantScoreQuery(bool.build());
- }
- }
-
- /**
* Returns the {@link MappedFieldType} for the give fullName.
*
* If multiple types have fields with the same full name, the first is returned.
@@ -642,33 +511,6 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
return this.searchQuoteAnalyzer;
}
- /**
- * Resolves the closest inherited {@link ObjectMapper} that is nested.
- */
- public ObjectMapper resolveClosestNestedObjectMapper(String fieldName) {
- int indexOf = fieldName.lastIndexOf('.');
- if (indexOf == -1) {
- return null;
- } else {
- do {
- String objectPath = fieldName.substring(0, indexOf);
- ObjectMapper objectMapper = fullPathObjectMappers.get(objectPath);
- if (objectMapper == null) {
- indexOf = objectPath.lastIndexOf('.');
- continue;
- }
-
- if (objectMapper.nested().isNested()) {
- return objectMapper;
- }
-
- indexOf = objectPath.lastIndexOf('.');
- } while (indexOf != -1);
- }
-
- return null;
- }
-
public Set<String> getParentTypes() {
return parentTypes;
}
diff --git a/core/src/main/java/org/elasticsearch/index/mapper/ParseContext.java b/core/src/main/java/org/elasticsearch/index/mapper/ParseContext.java
index 3c12f51a7f..938dd778b0 100644
--- a/core/src/main/java/org/elasticsearch/index/mapper/ParseContext.java
+++ b/core/src/main/java/org/elasticsearch/index/mapper/ParseContext.java
@@ -182,11 +182,6 @@ public abstract class ParseContext {
}
@Override
- public boolean flyweight() {
- return in.flyweight();
- }
-
- @Override
public DocumentMapperParser docMapperParser() {
return in.docMapperParser();
}
@@ -412,11 +407,6 @@ public abstract class ParseContext {
}
@Override
- public boolean flyweight() {
- return sourceToParse.flyweight();
- }
-
- @Override
public DocumentMapperParser docMapperParser() {
return this.docMapperParser;
}
@@ -580,8 +570,6 @@ public abstract class ParseContext {
}
}
- public abstract boolean flyweight();
-
public abstract DocumentMapperParser docMapperParser();
/**
@@ -658,6 +646,7 @@ public abstract class ParseContext {
public abstract SourceToParse sourceToParse();
+ @Nullable
public abstract BytesReference source();
// only should be used by SourceFieldMapper to update with a compressed source
diff --git a/core/src/main/java/org/elasticsearch/index/mapper/SourceToParse.java b/core/src/main/java/org/elasticsearch/index/mapper/SourceToParse.java
index f65072d489..6094caa319 100644
--- a/core/src/main/java/org/elasticsearch/index/mapper/SourceToParse.java
+++ b/core/src/main/java/org/elasticsearch/index/mapper/SourceToParse.java
@@ -46,8 +46,6 @@ public class SourceToParse {
private final XContentParser parser;
- private boolean flyweight = false;
-
private String index;
private String type;
@@ -106,15 +104,6 @@ public class SourceToParse {
return this;
}
- public SourceToParse flyweight(boolean flyweight) {
- this.flyweight = flyweight;
- return this;
- }
-
- public boolean flyweight() {
- return this.flyweight;
- }
-
public String id() {
return this.id;
}
diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/BooleanFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/BooleanFieldMapper.java
index 76f8eb34a7..29d2ce2176 100644
--- a/core/src/main/java/org/elasticsearch/index/mapper/core/BooleanFieldMapper.java
+++ b/core/src/main/java/org/elasticsearch/index/mapper/core/BooleanFieldMapper.java
@@ -40,7 +40,7 @@ import java.util.Iterator;
import java.util.List;
import java.util.Map;
-import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue;
+import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue;
import static org.elasticsearch.index.mapper.MapperBuilders.booleanField;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseField;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField;
@@ -106,7 +106,7 @@ public class BooleanFieldMapper extends FieldMapper {
if (propNode == null) {
throw new MapperParsingException("Property [null_value] cannot be null.");
}
- builder.nullValue(nodeBooleanValue(propNode));
+ builder.nullValue(lenientNodeBooleanValue(propNode));
iterator.remove();
} else if (parseMultiField(builder, name, parserContext, propName, propNode)) {
iterator.remove();
@@ -225,7 +225,9 @@ public class BooleanFieldMapper extends FieldMapper {
if (value == null) {
return;
}
- fields.add(new Field(fieldType().name(), value ? "T" : "F", fieldType()));
+ if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
+ fields.add(new Field(fieldType().name(), value ? "T" : "F", fieldType()));
+ }
if (fieldType().hasDocValues()) {
fields.add(new SortedNumericDocValuesField(fieldType().name(), value ? 1 : 0));
}
diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java
index 46b4097c2c..918731d024 100644
--- a/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java
+++ b/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java
@@ -31,6 +31,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.index.analysis.NamedAnalyzer;
+import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
@@ -146,6 +147,27 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
@Override
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
StringFieldMapper.Builder builder = stringField(name);
+ // hack for the fact that string can't just accept true/false for
+ // the index property and still accepts no/not_analyzed/analyzed
+ final Object index = node.remove("index");
+ if (index != null) {
+ final String normalizedIndex = Strings.toUnderscoreCase(index.toString());
+ switch (normalizedIndex) {
+ case "analyzed":
+ builder.tokenized(true);
+ node.put("index", true);
+ break;
+ case "not_analyzed":
+ builder.tokenized(false);
+ node.put("index", true);
+ break;
+ case "no":
+ node.put("index", false);
+ break;
+ default:
+ throw new IllegalArgumentException("Can't parse [index] value [" + index + "], expected [true], [false], [no], [not_analyzed] or [analyzed]");
+ }
+ }
parseTextField(builder, name, node, parserContext);
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
@@ -369,6 +391,17 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
}
@Override
+ protected String indexTokenizeOption(boolean indexed, boolean tokenized) {
+ if (!indexed) {
+ return "no";
+ } else if (tokenized) {
+ return "analyzed";
+ } else {
+ return "not_analyzed";
+ }
+ }
+
+ @Override
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
super.doXContentBody(builder, includeDefaults, params);
diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java b/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java
index d7f3570a53..e2f59b3ef4 100644
--- a/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java
+++ b/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java
@@ -28,6 +28,7 @@ import org.elasticsearch.common.joda.Joda;
import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.loader.SettingsLoader;
+import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.FieldMapper;
@@ -45,7 +46,7 @@ import java.util.Map;
import java.util.Map.Entry;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.isArray;
-import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue;
+import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeFloatValue;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeIntegerValue;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeMapValue;
@@ -62,6 +63,14 @@ public class TypeParsers {
public static final String INDEX_OPTIONS_POSITIONS = "positions";
public static final String INDEX_OPTIONS_OFFSETS = "offsets";
+ private static boolean nodeBooleanValue(Object node, Mapper.TypeParser.ParserContext parserContext) {
+ if (parserContext.indexVersionCreated().onOrAfter(Version.V_3_0_0)) {
+ return XContentMapValues.nodeBooleanValue(node);
+ } else {
+ return XContentMapValues.lenientNodeBooleanValue(node);
+ }
+ }
+
public static void parseNumberField(NumberFieldMapper.Builder builder, String name, Map<String, Object> numberNode, Mapper.TypeParser.ParserContext parserContext) {
parseField(builder, name, numberNode, parserContext);
for (Iterator<Map.Entry<String, Object>> iterator = numberNode.entrySet().iterator(); iterator.hasNext();) {
@@ -72,13 +81,13 @@ public class TypeParsers {
builder.precisionStep(nodeIntegerValue(propNode));
iterator.remove();
} else if (propName.equals("ignore_malformed")) {
- builder.ignoreMalformed(nodeBooleanValue(propNode));
+ builder.ignoreMalformed(nodeBooleanValue(propNode, parserContext));
iterator.remove();
} else if (propName.equals("coerce")) {
- builder.coerce(nodeBooleanValue(propNode));
+ builder.coerce(nodeBooleanValue(propNode, parserContext));
iterator.remove();
} else if (propName.equals("omit_norms")) {
- builder.omitNorms(nodeBooleanValue(propNode));
+ builder.omitNorms(nodeBooleanValue(propNode, parserContext));
iterator.remove();
} else if (propName.equals("similarity")) {
SimilarityProvider similarityProvider = resolveSimilarity(parserContext, name, propNode.toString());
@@ -102,16 +111,16 @@ public class TypeParsers {
parseTermVector(name, propNode.toString(), builder);
iterator.remove();
} else if (propName.equals("store_term_vectors")) {
- builder.storeTermVectors(nodeBooleanValue(propNode));
+ builder.storeTermVectors(nodeBooleanValue(propNode, parserContext));
iterator.remove();
} else if (propName.equals("store_term_vector_offsets")) {
- builder.storeTermVectorOffsets(nodeBooleanValue(propNode));
+ builder.storeTermVectorOffsets(nodeBooleanValue(propNode, parserContext));
iterator.remove();
} else if (propName.equals("store_term_vector_positions")) {
- builder.storeTermVectorPositions(nodeBooleanValue(propNode));
+ builder.storeTermVectorPositions(nodeBooleanValue(propNode, parserContext));
iterator.remove();
} else if (propName.equals("store_term_vector_payloads")) {
- builder.storeTermVectorPayloads(nodeBooleanValue(propNode));
+ builder.storeTermVectorPayloads(nodeBooleanValue(propNode, parserContext));
iterator.remove();
} else if (propName.equals("analyzer")) {
NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString());
@@ -160,19 +169,19 @@ public class TypeParsers {
final String propName = Strings.toUnderscoreCase(entry.getKey());
final Object propNode = entry.getValue();
if (propName.equals("store")) {
- builder.store(parseStore(name, propNode.toString()));
+ builder.store(parseStore(name, propNode.toString(), parserContext));
iterator.remove();
} else if (propName.equals("index")) {
- parseIndex(name, propNode.toString(), builder);
+ builder.index(parseIndex(name, propNode.toString(), parserContext));
iterator.remove();
} else if (propName.equals(DOC_VALUES)) {
- builder.docValues(nodeBooleanValue(propNode));
+ builder.docValues(nodeBooleanValue(propNode, parserContext));
iterator.remove();
} else if (propName.equals("boost")) {
builder.boost(nodeFloatValue(propNode));
iterator.remove();
} else if (propName.equals("omit_norms")) {
- builder.omitNorms(nodeBooleanValue(propNode));
+ builder.omitNorms(nodeBooleanValue(propNode, parserContext));
iterator.remove();
} else if (propName.equals("norms")) {
final Map<String, Object> properties = nodeMapValue(propNode, "norms");
@@ -181,7 +190,7 @@ public class TypeParsers {
final String propName2 = Strings.toUnderscoreCase(entry2.getKey());
final Object propNode2 = entry2.getValue();
if (propName2.equals("enabled")) {
- builder.omitNorms(!nodeBooleanValue(propNode2));
+ builder.omitNorms(!lenientNodeBooleanValue(propNode2));
propsIterator.remove();
} else if (propName2.equals(Loading.KEY)) {
builder.normsLoading(Loading.parse(nodeStringValue(propNode2, null), null));
@@ -194,7 +203,7 @@ public class TypeParsers {
builder.indexOptions(nodeIndexOptionValue(propNode));
iterator.remove();
} else if (propName.equals("include_in_all")) {
- builder.includeInAll(nodeBooleanValue(propNode));
+ builder.includeInAll(nodeBooleanValue(propNode, parserContext));
iterator.remove();
} else if (propName.equals("similarity")) {
SimilarityProvider similarityProvider = resolveSimilarity(parserContext, name, propNode.toString());
@@ -319,28 +328,43 @@ public class TypeParsers {
}
}
- public static void parseIndex(String fieldName, String index, FieldMapper.Builder builder) throws MapperParsingException {
- index = Strings.toUnderscoreCase(index);
- if ("no".equals(index)) {
- builder.index(false);
- } else if ("not_analyzed".equals(index)) {
- builder.index(true);
- builder.tokenized(false);
- } else if ("analyzed".equals(index)) {
- builder.index(true);
- builder.tokenized(true);
+ public static boolean parseIndex(String fieldName, String index, Mapper.TypeParser.ParserContext parserContext) throws MapperParsingException {
+ if (parserContext.indexVersionCreated().onOrAfter(Version.V_3_0_0)) {
+ switch (index) {
+ case "true":
+ return true;
+ case "false":
+ return false;
+ default:
+ throw new IllegalArgumentException("Can't parse [index] value [" + index + "], expected [true] or [false]");
+ }
} else {
- throw new MapperParsingException("wrong value for index [" + index + "] for field [" + fieldName + "]");
+ final String normalizedIndex = Strings.toUnderscoreCase(index);
+ switch (normalizedIndex) {
+ case "true":
+ case "not_analyzed":
+ case "analyzed":
+ return true;
+ case "false":
+ case "no":
+ return false;
+ default:
+ throw new IllegalArgumentException("Can't parse [index] value [" + index + "], expected [true], [false], [no], [not_analyzed] or [analyzed]");
+ }
}
}
- public static boolean parseStore(String fieldName, String store) throws MapperParsingException {
- if ("no".equals(store)) {
- return false;
- } else if ("yes".equals(store)) {
- return true;
+ public static boolean parseStore(String fieldName, String store, Mapper.TypeParser.ParserContext parserContext) throws MapperParsingException {
+ if (parserContext.indexVersionCreated().onOrAfter(Version.V_3_0_0)) {
+ return XContentMapValues.nodeBooleanValue(store);
} else {
- return nodeBooleanValue(store);
+ if ("no".equals(store)) {
+ return false;
+ } else if ("yes".equals(store)) {
+ return true;
+ } else {
+ return lenientNodeBooleanValue(store);
+ }
}
}
diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java
index 29a2aca7be..0a992aeb27 100644
--- a/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java
+++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java
@@ -199,17 +199,17 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr
String propName = Strings.toUnderscoreCase(entry.getKey());
Object propNode = entry.getValue();
if (propName.equals("lat_lon")) {
- builder.enableLatLon(XContentMapValues.nodeBooleanValue(propNode));
+ builder.enableLatLon(XContentMapValues.lenientNodeBooleanValue(propNode));
iterator.remove();
} else if (propName.equals("precision_step")) {
builder.precisionStep(XContentMapValues.nodeIntegerValue(propNode));
iterator.remove();
} else if (propName.equals("geohash")) {
- builder.enableGeoHash(XContentMapValues.nodeBooleanValue(propNode));
+ builder.enableGeoHash(XContentMapValues.lenientNodeBooleanValue(propNode));
iterator.remove();
} else if (propName.equals("geohash_prefix")) {
- builder.geoHashPrefix(XContentMapValues.nodeBooleanValue(propNode));
- if (XContentMapValues.nodeBooleanValue(propNode)) {
+ builder.geoHashPrefix(XContentMapValues.lenientNodeBooleanValue(propNode));
+ if (XContentMapValues.lenientNodeBooleanValue(propNode)) {
builder.enableGeoHash(true);
}
iterator.remove();
@@ -221,7 +221,7 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr
}
iterator.remove();
} else if (propName.equals(Names.IGNORE_MALFORMED)) {
- builder.ignoreMalformed(XContentMapValues.nodeBooleanValue(propNode));
+ builder.ignoreMalformed(XContentMapValues.lenientNodeBooleanValue(propNode));
iterator.remove();
} else if (parseMultiField(builder, name, parserContext, propName, propNode)) {
iterator.remove();
diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperLegacy.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperLegacy.java
index c008be6f67..dcd57a42dc 100644
--- a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperLegacy.java
+++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperLegacy.java
@@ -132,7 +132,7 @@ public class GeoPointFieldMapperLegacy extends BaseGeoPointFieldMapper implement
String propName = Strings.toUnderscoreCase(entry.getKey());
Object propNode = entry.getValue();
if (propName.equals(Names.COERCE)) {
- builder.coerce = XContentMapValues.nodeBooleanValue(propNode);
+ builder.coerce = XContentMapValues.lenientNodeBooleanValue(propNode);
iterator.remove();
}
}
diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java
index 0de2cd2b60..c98744bb75 100644
--- a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java
+++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java
@@ -52,7 +52,7 @@ import java.util.List;
import java.util.Map;
import java.util.Objects;
-import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue;
+import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue;
import static org.elasticsearch.index.mapper.MapperBuilders.geoShapeField;
@@ -184,11 +184,11 @@ public class GeoShapeFieldMapper extends FieldMapper {
builder.fieldType().setStrategyName(fieldNode.toString());
iterator.remove();
} else if (Names.COERCE.equals(fieldName)) {
- builder.coerce(nodeBooleanValue(fieldNode));
+ builder.coerce(lenientNodeBooleanValue(fieldNode));
iterator.remove();
} else if (Names.STRATEGY_POINTS_ONLY.equals(fieldName)
&& builder.fieldType().strategyName.equals(SpatialStrategy.TERM.getStrategyName()) == false) {
- builder.fieldType().setPointsOnly(XContentMapValues.nodeBooleanValue(fieldNode));
+ builder.fieldType().setPointsOnly(XContentMapValues.lenientNodeBooleanValue(fieldNode));
iterator.remove();
}
}
diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java
index d9a345caf2..97c2fa3933 100644
--- a/core/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java
+++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java
@@ -46,7 +46,7 @@ import java.util.Iterator;
import java.util.List;
import java.util.Map;
-import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue;
+import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeMapValue;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseTextField;
@@ -133,7 +133,7 @@ public class AllFieldMapper extends MetadataFieldMapper {
// the AllFieldMapper ctor in the builder since it is not valid. Here we validate
// the doc values settings (old and new) are rejected
Object docValues = node.get("doc_values");
- if (docValues != null && nodeBooleanValue(docValues)) {
+ if (docValues != null && lenientNodeBooleanValue(docValues)) {
throw new MapperParsingException("Field [" + name + "] is always tokenized and cannot have doc values");
}
// convoluted way of specifying doc values
@@ -152,7 +152,7 @@ public class AllFieldMapper extends MetadataFieldMapper {
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals("enabled")) {
- builder.enabled(nodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED);
+ builder.enabled(lenientNodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED);
iterator.remove();
}
}
diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapper.java
index 17d1c2b9f0..03ebcb9fe9 100644
--- a/core/src/main/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapper.java
+++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapper.java
@@ -40,7 +40,7 @@ import java.util.List;
import java.util.Map;
import java.util.Objects;
-import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue;
+import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue;
/**
* A mapper that indexes the field names of a document under <code>_field_names</code>. This mapper is typically useful in order
@@ -112,7 +112,7 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper {
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals("enabled")) {
- builder.enabled(nodeBooleanValue(fieldNode));
+ builder.enabled(lenientNodeBooleanValue(fieldNode));
iterator.remove();
}
}
diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java
index a586a7b5b9..1f26dd6084 100644
--- a/core/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java
+++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java
@@ -220,7 +220,7 @@ public class IdFieldMapper extends MetadataFieldMapper {
@Override
public void postParse(ParseContext context) throws IOException {
- if (context.id() == null && !context.sourceToParse().flyweight()) {
+ if (context.id() == null) {
throw new MapperParsingException("No id found while parsing the content source");
}
// it either get built in the preParse phase, or get parsed...
diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java
index abb9178b87..e7cd1b107a 100644
--- a/core/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java
+++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java
@@ -22,25 +22,28 @@ import org.apache.lucene.document.Field;
import org.apache.lucene.document.SortedDocValuesField;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexOptions;
-import org.apache.lucene.queries.TermsQuery;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.BooleanClause;
+import org.apache.lucene.search.BooleanQuery;
+import org.apache.lucene.search.DocValuesTermsQuery;
import org.apache.lucene.search.Query;
+import org.apache.lucene.search.TermQuery;
import org.apache.lucene.util.BytesRef;
-import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
-import org.elasticsearch.common.lucene.BytesRefs;
-import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.loader.SettingsLoader;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.fielddata.FieldDataType;
-import org.elasticsearch.index.mapper.DocumentMapper;
+import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
+import org.elasticsearch.index.mapper.MapperBuilders;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MetadataFieldMapper;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.Uid;
+import org.elasticsearch.index.mapper.core.StringFieldMapper;
import org.elasticsearch.index.query.QueryShardContext;
import java.io.IOException;
@@ -65,22 +68,13 @@ public class ParentFieldMapper extends MetadataFieldMapper {
public static class Defaults {
public static final String NAME = ParentFieldMapper.NAME;
- public static final MappedFieldType FIELD_TYPE = new ParentFieldType();
- public static final MappedFieldType JOIN_FIELD_TYPE = new ParentFieldType();
+ public static final ParentFieldType FIELD_TYPE = new ParentFieldType();
static {
- FIELD_TYPE.setIndexOptions(IndexOptions.DOCS);
- FIELD_TYPE.setTokenized(false);
- FIELD_TYPE.setStored(true);
- FIELD_TYPE.setOmitNorms(true);
- FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
- FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER);
- FIELD_TYPE.setName(NAME);
+ FIELD_TYPE.setIndexOptions(IndexOptions.NONE);
+ FIELD_TYPE.setHasDocValues(true);
+ FIELD_TYPE.setDocValuesType(DocValuesType.SORTED);
FIELD_TYPE.freeze();
-
- JOIN_FIELD_TYPE.setHasDocValues(true);
- JOIN_FIELD_TYPE.setDocValuesType(DocValuesType.SORTED);
- JOIN_FIELD_TYPE.freeze();
}
}
@@ -88,17 +82,10 @@ public class ParentFieldMapper extends MetadataFieldMapper {
private String parentType;
- protected String indexName;
-
private final String documentType;
- private final MappedFieldType parentJoinFieldType = Defaults.JOIN_FIELD_TYPE.clone();
-
- private final MappedFieldType childJoinFieldType = Defaults.JOIN_FIELD_TYPE.clone();
-
public Builder(String documentType) {
- super(Defaults.NAME, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE);
- this.indexName = name;
+ super(Defaults.NAME, new ParentFieldType(Defaults.FIELD_TYPE, documentType), Defaults.FIELD_TYPE);
this.documentType = documentType;
builder = this;
}
@@ -109,21 +96,13 @@ public class ParentFieldMapper extends MetadataFieldMapper {
}
@Override
- public Builder fieldDataSettings(Settings fieldDataSettings) {
- Settings settings = Settings.builder().put(childJoinFieldType.fieldDataType().getSettings()).put(fieldDataSettings).build();
- childJoinFieldType.setFieldDataType(new FieldDataType(childJoinFieldType.fieldDataType().getType(), settings));
- return this;
- }
-
- @Override
public ParentFieldMapper build(BuilderContext context) {
if (parentType == null) {
throw new MapperParsingException("[_parent] field mapping must contain the [type] option");
}
- parentJoinFieldType.setName(joinField(documentType));
- parentJoinFieldType.setFieldDataType(null);
- childJoinFieldType.setName(joinField(parentType));
- return new ParentFieldMapper(fieldType, parentJoinFieldType, childJoinFieldType, parentType, context.indexSettings());
+ name = joinField(parentType);
+ setupFieldType(context);
+ return new ParentFieldMapper(createParentJoinFieldMapper(documentType, context), fieldType, parentType, context.indexSettings());
}
}
@@ -152,19 +131,40 @@ public class ParentFieldMapper extends MetadataFieldMapper {
}
@Override
- public MetadataFieldMapper getDefault(Settings indexSettings, MappedFieldType fieldType, String parentType) {
- return new ParentFieldMapper(indexSettings, fieldType, parentType);
+ public MetadataFieldMapper getDefault(Settings indexSettings, MappedFieldType fieldType, String typeName) {
+ StringFieldMapper parentJoinField = createParentJoinFieldMapper(typeName, new BuilderContext(indexSettings, new ContentPath(0)));
+ MappedFieldType childJoinFieldType = Defaults.FIELD_TYPE.clone();
+ childJoinFieldType.setName(joinField(null));
+ return new ParentFieldMapper(parentJoinField, childJoinFieldType, null, indexSettings);
}
}
+ static StringFieldMapper createParentJoinFieldMapper(String docType, BuilderContext context) {
+ StringFieldMapper.Builder parentJoinField = MapperBuilders.stringField(joinField(docType));
+ parentJoinField.indexOptions(IndexOptions.NONE);
+ parentJoinField.docValues(true);
+ parentJoinField.fieldType().setDocValuesType(DocValuesType.SORTED);
+ parentJoinField.fieldType().setFieldDataType(null);
+ return parentJoinField.build(context);
+ }
+
static final class ParentFieldType extends MappedFieldType {
+ final String documentType;
+
public ParentFieldType() {
- setFieldDataType(new FieldDataType("_parent", settingsBuilder().put(MappedFieldType.Loading.KEY, Loading.EAGER_VALUE)));
+ setFieldDataType(new FieldDataType(NAME, settingsBuilder().put(MappedFieldType.Loading.KEY, Loading.EAGER_VALUE)));
+ documentType = null;
+ }
+
+ ParentFieldType(ParentFieldType ref, String documentType) {
+ super(ref);
+ this.documentType = documentType;
}
- protected ParentFieldType(ParentFieldType ref) {
+ private ParentFieldType(ParentFieldType ref) {
super(ref);
+ this.documentType = ref.documentType;
}
@Override
@@ -177,30 +177,6 @@ public class ParentFieldMapper extends MetadataFieldMapper {
return CONTENT_TYPE;
}
- @Override
- public Uid value(Object value) {
- if (value == null) {
- return null;
- }
- return Uid.createUid(value.toString());
- }
-
- @Override
- public Object valueForSearch(Object value) {
- if (value == null) {
- return null;
- }
- String sValue = value.toString();
- if (sValue == null) {
- return null;
- }
- int index = sValue.indexOf(Uid.DELIMITER);
- if (index == -1) {
- return sValue;
- }
- return sValue.substring(index + 1);
- }
-
/**
* We don't need to analyzer the text, and we need to convert it to UID...
*/
@@ -216,67 +192,30 @@ public class ParentFieldMapper extends MetadataFieldMapper {
@Override
public Query termsQuery(List values, @Nullable QueryShardContext context) {
- if (context == null) {
- return super.termsQuery(values, context);
+ BytesRef[] ids = new BytesRef[values.size()];
+ for (int i = 0; i < ids.length; i++) {
+ ids[i] = indexedValueForSearch(values.get(i));
}
-
- List<String> types = new ArrayList<>(context.getMapperService().types().size());
- for (DocumentMapper documentMapper : context.getMapperService().docMappers(false)) {
- if (!documentMapper.parentFieldMapper().active()) {
- types.add(documentMapper.type());
- }
- }
-
- List<BytesRef> bValues = new ArrayList<>(values.size());
- for (Object value : values) {
- BytesRef bValue = BytesRefs.toBytesRef(value);
- if (Uid.hasDelimiter(bValue)) {
- bValues.add(bValue);
- } else {
- // we use all non child types, cause we don't know if its exact or not...
- for (String type : types) {
- bValues.add(Uid.createUidAsBytes(type, bValue));
- }
- }
- }
- return new TermsQuery(name(), bValues);
+ BooleanQuery.Builder query = new BooleanQuery.Builder();
+ query.add(new DocValuesTermsQuery(name(), ids), BooleanClause.Occur.MUST);
+ query.add(new TermQuery(new Term(TypeFieldMapper.NAME, documentType)), BooleanClause.Occur.FILTER);
+ return query.build();
}
}
private final String parentType;
- // determines the field data settings
- private MappedFieldType childJoinFieldType;
- // has no impact of field data settings, is just here for creating a join field, the parent field mapper in the child type pointing to this type determines the field data settings for this join field
- private final MappedFieldType parentJoinFieldType;
+ // has no impact of field data settings, is just here for creating a join field,
+ // the parent field mapper in the child type pointing to this type determines the field data settings for this join field
+ private final StringFieldMapper parentJoinField;
- private ParentFieldMapper(MappedFieldType fieldType, MappedFieldType parentJoinFieldType, MappedFieldType childJoinFieldType, String parentType, Settings indexSettings) {
- super(NAME, fieldType, Defaults.FIELD_TYPE, indexSettings);
+ private ParentFieldMapper(StringFieldMapper parentJoinField, MappedFieldType childJoinFieldType, String parentType, Settings indexSettings) {
+ super(NAME, childJoinFieldType, Defaults.FIELD_TYPE, indexSettings);
this.parentType = parentType;
- this.parentJoinFieldType = parentJoinFieldType;
- this.parentJoinFieldType.freeze();
- this.childJoinFieldType = childJoinFieldType;
- if (childJoinFieldType != null) {
- this.childJoinFieldType.freeze();
- }
- }
-
- private ParentFieldMapper(Settings indexSettings, MappedFieldType existing, String parentType) {
- this(existing == null ? Defaults.FIELD_TYPE.clone() : existing.clone(), joinFieldTypeForParentType(parentType, indexSettings), null, null, indexSettings);
- }
-
- private static MappedFieldType joinFieldTypeForParentType(String parentType, Settings indexSettings) {
- MappedFieldType parentJoinFieldType = Defaults.JOIN_FIELD_TYPE.clone();
- parentJoinFieldType.setName(joinField(parentType));
- parentJoinFieldType.freeze();
- return parentJoinFieldType;
+ this.parentJoinField = parentJoinField;
}
public MappedFieldType getParentJoinFieldType() {
- return parentJoinFieldType;
- }
-
- public MappedFieldType getChildJoinFieldType() {
- return childJoinFieldType;
+ return parentJoinField.fieldType();
}
public String type() {
@@ -289,16 +228,14 @@ public class ParentFieldMapper extends MetadataFieldMapper {
@Override
public void postParse(ParseContext context) throws IOException {
- if (context.sourceToParse().flyweight() == false) {
- parse(context);
- }
+ parse(context);
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
boolean parent = context.docMapper().isParent(context.type());
if (parent) {
- addJoinFieldIfNeeded(fields, parentJoinFieldType, context.id());
+ fields.add(new SortedDocValuesField(parentJoinField.fieldType().name(), new BytesRef(context.id())));
}
if (!active()) {
@@ -309,8 +246,7 @@ public class ParentFieldMapper extends MetadataFieldMapper {
// we are in the parsing of _parent phase
String parentId = context.parser().text();
context.sourceToParse().parent(parentId);
- fields.add(new Field(fieldType().name(), Uid.createUid(context.stringBuilder(), parentType, parentId), fieldType()));
- addJoinFieldIfNeeded(fields, childJoinFieldType, parentId);
+ fields.add(new SortedDocValuesField(fieldType.name(), new BytesRef(parentId)));
} else {
// otherwise, we are running it post processing of the xcontent
String parsedParentId = context.doc().get(Defaults.NAME);
@@ -321,8 +257,7 @@ public class ParentFieldMapper extends MetadataFieldMapper {
throw new MapperParsingException("No parent id provided, not within the document, and not externally");
}
// we did not add it in the parsing phase, add it now
- fields.add(new Field(fieldType().name(), Uid.createUid(context.stringBuilder(), parentType, parentId), fieldType()));
- addJoinFieldIfNeeded(fields, childJoinFieldType, parentId);
+ fields.add(new SortedDocValuesField(fieldType.name(), new BytesRef(parentId)));
} else if (parentId != null && !parsedParentId.equals(Uid.createUid(context.stringBuilder(), parentType, parentId))) {
throw new MapperParsingException("Parent id mismatch, document value is [" + Uid.createUid(parsedParentId).id() + "], while external value is [" + parentId + "]");
}
@@ -331,12 +266,6 @@ public class ParentFieldMapper extends MetadataFieldMapper {
// we have parent mapping, yet no value was set, ignore it...
}
- private void addJoinFieldIfNeeded(List<Field> fields, MappedFieldType fieldType, String id) {
- if (fieldType.hasDocValues()) {
- fields.add(new SortedDocValuesField(fieldType.name(), new BytesRef(id)));
- }
- }
-
public static String joinField(String parentType) {
return ParentFieldMapper.NAME + "#" + parentType;
}
@@ -346,8 +275,9 @@ public class ParentFieldMapper extends MetadataFieldMapper {
return CONTENT_TYPE;
}
- private boolean joinFieldHasCustomFieldDataSettings() {
- return childJoinFieldType != null && childJoinFieldType.fieldDataType() != null && childJoinFieldType.fieldDataType().equals(Defaults.JOIN_FIELD_TYPE.fieldDataType()) == false;
+ @Override
+ public Iterator<Mapper> iterator() {
+ return Collections.<Mapper>singleton(parentJoinField).iterator();
}
@Override
@@ -360,12 +290,16 @@ public class ParentFieldMapper extends MetadataFieldMapper {
builder.startObject(CONTENT_TYPE);
builder.field("type", parentType);
if (includeDefaults || joinFieldHasCustomFieldDataSettings()) {
- builder.field("fielddata", (Map) childJoinFieldType.fieldDataType().getSettings().getAsMap());
+ builder.field("fielddata", (Map) fieldType().fieldDataType().getSettings().getAsMap());
}
builder.endObject();
return builder;
}
+ private boolean joinFieldHasCustomFieldDataSettings() {
+ return fieldType != null && fieldType.fieldDataType() != null && fieldType.fieldDataType().equals(Defaults.FIELD_TYPE.fieldDataType()) == false;
+ }
+
@Override
protected void doMerge(Mapper mergeWith, boolean updateAllTypes) {
super.doMerge(mergeWith, updateAllTypes);
@@ -375,18 +309,13 @@ public class ParentFieldMapper extends MetadataFieldMapper {
}
List<String> conflicts = new ArrayList<>();
- fieldType().checkCompatibility(fieldMergeWith.fieldType(), conflicts, true); // always strict, this cannot change
- parentJoinFieldType.checkCompatibility(fieldMergeWith.parentJoinFieldType, conflicts, true); // same here
- if (childJoinFieldType != null) {
- // TODO: this can be set to false when the old parent/child impl is removed, we can do eager global ordinals loading per type.
- childJoinFieldType.checkCompatibility(fieldMergeWith.childJoinFieldType, conflicts, updateAllTypes == false);
- }
+ fieldType().checkCompatibility(fieldMergeWith.fieldType, conflicts, true);
if (conflicts.isEmpty() == false) {
throw new IllegalArgumentException("Merge conflicts: " + conflicts);
}
if (active()) {
- childJoinFieldType = fieldMergeWith.childJoinFieldType.clone();
+ fieldType = fieldMergeWith.fieldType.clone();
}
}
diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java
index ee06b51ecf..b1d24e53ab 100644
--- a/core/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java
+++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java
@@ -38,7 +38,7 @@ import java.util.Iterator;
import java.util.List;
import java.util.Map;
-import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue;
+import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue;
/**
*
@@ -95,7 +95,7 @@ public class RoutingFieldMapper extends MetadataFieldMapper {
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals("required")) {
- builder.required(nodeBooleanValue(fieldNode));
+ builder.required(lenientNodeBooleanValue(fieldNode));
iterator.remove();
}
}
diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java
index b0de09edaf..519a38c4ff 100644
--- a/core/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java
+++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java
@@ -51,7 +51,7 @@ import java.util.Iterator;
import java.util.List;
import java.util.Map;
-import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue;
+import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue;
/**
*
@@ -122,7 +122,7 @@ public class SourceFieldMapper extends MetadataFieldMapper {
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals("enabled")) {
- builder.enabled(nodeBooleanValue(fieldNode));
+ builder.enabled(lenientNodeBooleanValue(fieldNode));
iterator.remove();
} else if ("format".equals(fieldName) && parserContext.indexVersionCreated().before(Version.V_3_0_0)) {
// ignore on old indices, reject on and after 3.0
@@ -251,10 +251,11 @@ public class SourceFieldMapper extends MetadataFieldMapper {
if (!fieldType().stored()) {
return;
}
- if (context.flyweight()) {
+ BytesReference source = context.source();
+ // Percolate and tv APIs may not set the source and that is ok, because these APIs will not index any data
+ if (source == null) {
return;
}
- BytesReference source = context.source();
boolean filtered = (includes != null && includes.length > 0) || (excludes != null && excludes.length > 0);
if (filtered) {
diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/TTLFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/TTLFieldMapper.java
index 4612b9fb85..7c51b05cb4 100644
--- a/core/src/main/java/org/elasticsearch/index/mapper/internal/TTLFieldMapper.java
+++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/TTLFieldMapper.java
@@ -44,7 +44,7 @@ import java.util.Iterator;
import java.util.List;
import java.util.Map;
-import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue;
+import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeTimeValue;
public class TTLFieldMapper extends MetadataFieldMapper {
@@ -108,7 +108,7 @@ public class TTLFieldMapper extends MetadataFieldMapper {
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals("enabled")) {
- EnabledAttributeMapper enabledState = nodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED;
+ EnabledAttributeMapper enabledState = lenientNodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED;
builder.enabled(enabledState);
iterator.remove();
} else if (fieldName.equals("default")) {
@@ -212,7 +212,7 @@ public class TTLFieldMapper extends MetadataFieldMapper {
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException, AlreadyExpiredException {
- if (enabledState.enabled && !context.sourceToParse().flyweight()) {
+ if (enabledState.enabled) {
long ttl = context.sourceToParse().ttl();
if (ttl <= 0 && defaultTTL > 0) { // no ttl provided so we use the default value
ttl = defaultTTL;
diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java
index e750f973ad..570155a79c 100644
--- a/core/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java
+++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java
@@ -43,7 +43,7 @@ import java.util.Iterator;
import java.util.List;
import java.util.Map;
-import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue;
+import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseDateTimeFormatter;
public class TimestampFieldMapper extends MetadataFieldMapper {
@@ -134,7 +134,7 @@ public class TimestampFieldMapper extends MetadataFieldMapper {
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals("enabled")) {
- EnabledAttributeMapper enabledState = nodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED;
+ EnabledAttributeMapper enabledState = lenientNodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED;
builder.enabled(enabledState);
iterator.remove();
} else if (fieldName.equals("format")) {
@@ -149,7 +149,7 @@ public class TimestampFieldMapper extends MetadataFieldMapper {
}
iterator.remove();
} else if (fieldName.equals("ignore_missing")) {
- ignoreMissing = nodeBooleanValue(fieldNode);
+ ignoreMissing = lenientNodeBooleanValue(fieldNode);
builder.ignoreMissing(ignoreMissing);
iterator.remove();
}
diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/UidFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/UidFieldMapper.java
index 828651409b..f8fea4071e 100644
--- a/core/src/main/java/org/elasticsearch/index/mapper/internal/UidFieldMapper.java
+++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/UidFieldMapper.java
@@ -149,7 +149,7 @@ public class UidFieldMapper extends MetadataFieldMapper {
@Override
public void postParse(ParseContext context) throws IOException {
- if (context.id() == null && !context.sourceToParse().flyweight()) {
+ if (context.id() == null) {
throw new MapperParsingException("No id found while parsing the content source");
}
// if we did not have the id as part of the sourceToParse, then we need to parse it here
diff --git a/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java
index c83428d223..fc9660d5c1 100644
--- a/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java
+++ b/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java
@@ -66,7 +66,7 @@ import static org.elasticsearch.index.mapper.core.TypeParsers.parseNumberField;
public class IpFieldMapper extends NumberFieldMapper {
public static final String CONTENT_TYPE = "ip";
- public static final long MAX_IP = 4294967296l;
+ public static final long MAX_IP = 4294967296L;
public static String longToIp(long longIp) {
int octet3 = (int) ((longIp >> 24) % 256);
diff --git a/core/src/main/java/org/elasticsearch/index/mapper/object/ObjectMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/object/ObjectMapper.java
index 9f3b503ab4..b5934a4011 100644
--- a/core/src/main/java/org/elasticsearch/index/mapper/object/ObjectMapper.java
+++ b/core/src/main/java/org/elasticsearch/index/mapper/object/ObjectMapper.java
@@ -49,7 +49,7 @@ import java.util.List;
import java.util.Locale;
import java.util.Map;
-import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue;
+import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue;
import static org.elasticsearch.index.mapper.MapperBuilders.object;
/**
@@ -191,11 +191,11 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll,
if (value.equalsIgnoreCase("strict")) {
builder.dynamic(Dynamic.STRICT);
} else {
- builder.dynamic(nodeBooleanValue(fieldNode) ? Dynamic.TRUE : Dynamic.FALSE);
+ builder.dynamic(lenientNodeBooleanValue(fieldNode) ? Dynamic.TRUE : Dynamic.FALSE);
}
return true;
} else if (fieldName.equals("enabled")) {
- builder.enabled(nodeBooleanValue(fieldNode));
+ builder.enabled(lenientNodeBooleanValue(fieldNode));
return true;
} else if (fieldName.equals("properties")) {
if (fieldNode instanceof Collection && ((Collection) fieldNode).isEmpty()) {
@@ -207,7 +207,7 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll,
}
return true;
} else if (fieldName.equals("include_in_all")) {
- builder.includeInAll(nodeBooleanValue(fieldNode));
+ builder.includeInAll(lenientNodeBooleanValue(fieldNode));
return true;
}
return false;
@@ -230,12 +230,12 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll,
}
fieldNode = node.get("include_in_parent");
if (fieldNode != null) {
- nestedIncludeInParent = nodeBooleanValue(fieldNode);
+ nestedIncludeInParent = lenientNodeBooleanValue(fieldNode);
node.remove("include_in_parent");
}
fieldNode = node.get("include_in_root");
if (fieldNode != null) {
- nestedIncludeInRoot = nodeBooleanValue(fieldNode);
+ nestedIncludeInRoot = lenientNodeBooleanValue(fieldNode);
node.remove("include_in_root");
}
if (nested) {
diff --git a/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java
index 64a60305b1..5e87130865 100644
--- a/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java
+++ b/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java
@@ -42,7 +42,7 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
-import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue;
+import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseDateTimeFormatter;
/**
@@ -189,10 +189,10 @@ public class RootObjectMapper extends ObjectMapper {
}
return true;
} else if (fieldName.equals("date_detection")) {
- ((Builder) builder).dateDetection = nodeBooleanValue(fieldNode);
+ ((Builder) builder).dateDetection = lenientNodeBooleanValue(fieldNode);
return true;
} else if (fieldName.equals("numeric_detection")) {
- ((Builder) builder).numericDetection = nodeBooleanValue(fieldNode);
+ ((Builder) builder).numericDetection = lenientNodeBooleanValue(fieldNode);
return true;
}
return false;
diff --git a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorFieldMapper.java b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorFieldMapper.java
index c4b2b06e0e..9a10319574 100644
--- a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorFieldMapper.java
+++ b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorFieldMapper.java
@@ -126,9 +126,7 @@ public class PercolatorFieldMapper extends FieldMapper {
public Mapper parse(ParseContext context) throws IOException {
QueryShardContext queryShardContext = new QueryShardContext(this.queryShardContext);
Query query = PercolatorQueriesRegistry.parseQuery(queryShardContext, mapUnmappedFieldAsString, context.parser());
- if (context.flyweight() == false) {
- ExtractQueryTermsService.extractQueryTerms(query, context.doc(), queryTermsField.name(), unknownQueryField.name(), queryTermsField.fieldType());
- }
+ ExtractQueryTermsService.extractQueryTerms(query, context.doc(), queryTermsField.name(), unknownQueryField.name(), queryTermsField.fieldType());
return null;
}
diff --git a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueriesRegistry.java b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueriesRegistry.java
index f55a739caf..67ba0aaf1d 100644
--- a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueriesRegistry.java
+++ b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueriesRegistry.java
@@ -127,7 +127,7 @@ public final class PercolatorQueriesRegistry extends AbstractIndexShardComponent
}
}
} catch (Exception e) {
- throw new PercolatorException(shardId().index(), "failed to parse query [" + id + "]", e);
+ throw new PercolatorException(shardId().getIndex(), "failed to parse query [" + id + "]", e);
}
return null;
}
@@ -174,7 +174,7 @@ public final class PercolatorQueriesRegistry extends AbstractIndexShardComponent
}
loadedQueries = queries.size();
} catch (Exception e) {
- throw new PercolatorException(shardId.index(), "failed to load queries from percolator index", e);
+ throw new PercolatorException(shardId.getIndex(), "failed to load queries from percolator index", e);
}
logger.debug("done loading [{}] percolator queries", loadedQueries);
}
diff --git a/core/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java
index 3be82f3970..2031abc007 100644
--- a/core/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java
+++ b/core/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java
@@ -37,7 +37,11 @@ import java.util.Objects;
/**
* A Query that does fuzzy matching for a specific value.
+ *
+ * @deprecated Fuzzy queries are not useful enough. This class will be removed with Elasticsearch 4.0. In most cases you may want to use
+ * a match query with the fuzziness parameter for strings or range queries for numeric and date fields.
*/
+@Deprecated
public class FuzzyQueryBuilder extends AbstractQueryBuilder<FuzzyQueryBuilder> implements MultiTermQueryBuilder<FuzzyQueryBuilder> {
public static final String NAME = "fuzzy";
diff --git a/core/src/main/java/org/elasticsearch/index/query/FuzzyQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/FuzzyQueryParser.java
index 85365f84fb..55dddbe4af 100644
--- a/core/src/main/java/org/elasticsearch/index/query/FuzzyQueryParser.java
+++ b/core/src/main/java/org/elasticsearch/index/query/FuzzyQueryParser.java
@@ -26,6 +26,11 @@ import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
+/**
+ * @deprecated Fuzzy queries are not useful enough. This class will be removed with Elasticsearch 4.0. In most cases you may want to use
+ * a match query with the fuzziness parameter for strings or range queries for numeric and date fields.
+ */
+@Deprecated
public class FuzzyQueryParser implements QueryParser<FuzzyQueryBuilder> {
public static final ParseField TERM_FIELD = new ParseField("term");
diff --git a/core/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java
index 454465727b..f7d8b22d78 100644
--- a/core/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java
+++ b/core/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java
@@ -240,7 +240,6 @@ public class GeoShapeQueryBuilder extends AbstractQueryBuilder<GeoShapeQueryBuil
ShapeBuilder shapeToQuery = shape;
if (shapeToQuery == null) {
GetRequest getRequest = new GetRequest(indexedShapeIndex, indexedShapeType, indexedShapeId);
- getRequest.copyContextAndHeadersFrom(SearchContext.current());
shapeToQuery = fetch(context.getClient(), getRequest, indexedShapePath);
}
MappedFieldType fieldType = context.fieldMapper(fieldName);
diff --git a/core/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java
index d9a99cc50c..9184281607 100644
--- a/core/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java
+++ b/core/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java
@@ -372,7 +372,10 @@ public class MatchQueryBuilder extends AbstractQueryBuilder<MatchQueryBuilder> {
return null;
}
- if (query instanceof BooleanQuery) {
+ // If the coordination factor is disabled on a boolean query we don't apply the minimum should match.
+ // This is done to make sure that the minimum_should_match doesn't get applied when there is only one word
+ // and multiple variations of the same word in the query (synonyms for instance).
+ if (query instanceof BooleanQuery && !((BooleanQuery) query).isCoordDisabled()) {
query = Queries.applyMinimumShouldMatch((BooleanQuery) query, minimumShouldMatch);
} else if (query instanceof ExtendedCommonTermsQuery) {
((ExtendedCommonTermsQuery)query).setLowFreqMinimumNumberShouldMatch(minimumShouldMatch);
diff --git a/core/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java
index ffb21a32dd..49842a652f 100644
--- a/core/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java
+++ b/core/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java
@@ -888,7 +888,7 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
private static void setDefaultIndexTypeFields(QueryShardContext context, Item item, List<String> moreLikeFields,
boolean useDefaultField) {
if (item.index() == null) {
- item.index(context.index().name());
+ item.index(context.index().getName());
}
if (item.type() == null) {
if (context.queryTypes().size() > 1) {
@@ -917,7 +917,6 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
for (Item item : unlikeItems) {
request.add(item.toTermVectorsRequest());
}
- request.copyContextAndHeadersFrom(searchContext);
return client.multiTermVectors(request).actionGet();
}
diff --git a/core/src/main/java/org/elasticsearch/index/query/ParentIdQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/ParentIdQueryBuilder.java
new file mode 100644
index 0000000000..f9bd7623f3
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/index/query/ParentIdQueryBuilder.java
@@ -0,0 +1,104 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.index.query;
+
+import org.apache.lucene.search.DocValuesTermsQuery;
+import org.apache.lucene.search.Query;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.index.mapper.DocumentMapper;
+import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
+
+import java.io.IOException;
+import java.util.Objects;
+
+public final class ParentIdQueryBuilder extends AbstractQueryBuilder<ParentIdQueryBuilder> {
+
+ public static final String NAME = "parent_id";
+ static final ParentIdQueryBuilder PROTO = new ParentIdQueryBuilder(null, null);
+
+ private final String type;
+ private final String id;
+
+ public ParentIdQueryBuilder(String type, String id) {
+ this.type = type;
+ this.id = id;
+ }
+
+ public String getType() {
+ return type;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ @Override
+ protected void doXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject(NAME);
+ builder.field(ParentIdQueryParser.TYPE_FIELD.getPreferredName(), type);
+ builder.field(ParentIdQueryParser.ID_FIELD.getPreferredName(), id);
+ printBoostAndQueryName(builder);
+ builder.endObject();
+ }
+
+ @Override
+ protected Query doToQuery(QueryShardContext context) throws IOException {
+ DocumentMapper childDocMapper = context.getMapperService().documentMapper(type);
+ if (childDocMapper == null) {
+ throw new QueryShardException(context, "[" + NAME + "] no mapping found for type [" + type + "]");
+ }
+ ParentFieldMapper parentFieldMapper = childDocMapper.parentFieldMapper();
+ if (parentFieldMapper.active() == false) {
+ throw new QueryShardException(context, "[" + NAME + "] _parent field has no parent type configured");
+ }
+ String fieldName = ParentFieldMapper.joinField(parentFieldMapper.type());
+ return new DocValuesTermsQuery(fieldName, id);
+ }
+
+ @Override
+ protected ParentIdQueryBuilder doReadFrom(StreamInput in) throws IOException {
+ String type = in.readString();
+ String id = in.readString();
+ return new ParentIdQueryBuilder(type, id);
+ }
+
+ @Override
+ protected void doWriteTo(StreamOutput out) throws IOException {
+ out.writeString(type);
+ out.writeString(id);
+ }
+
+ @Override
+ protected boolean doEquals(ParentIdQueryBuilder that) {
+ return Objects.equals(type, that.type) && Objects.equals(id, that.id);
+ }
+
+ @Override
+ protected int doHashCode() {
+ return Objects.hash(type, id);
+ }
+
+ @Override
+ public String getWriteableName() {
+ return NAME;
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/index/query/ParentIdQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/ParentIdQueryParser.java
new file mode 100644
index 0000000000..43788624c5
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/index/query/ParentIdQueryParser.java
@@ -0,0 +1,76 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.index.query;
+
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.ParsingException;
+import org.elasticsearch.common.xcontent.XContentParser;
+
+import java.io.IOException;
+
+public final class ParentIdQueryParser implements QueryParser<ParentIdQueryBuilder> {
+
+ public static final ParseField ID_FIELD = new ParseField("id");
+ public static final ParseField TYPE_FIELD = new ParseField("type", "child_type");
+
+ @Override
+ public String[] names() {
+ return new String[]{ParentIdQueryBuilder.NAME};
+ }
+
+ @Override
+ public ParentIdQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
+ XContentParser parser = parseContext.parser();
+ float boost = AbstractQueryBuilder.DEFAULT_BOOST;
+ String type = null;
+ String id = null;
+ String queryName = null;
+ String currentFieldName = null;
+ XContentParser.Token token;
+ while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
+ if (token == XContentParser.Token.FIELD_NAME) {
+ currentFieldName = parser.currentName();
+ } else if (token.isValue()) {
+ if (parseContext.parseFieldMatcher().match(currentFieldName, TYPE_FIELD)) {
+ type = parser.text();
+ } else if (parseContext.parseFieldMatcher().match(currentFieldName, ID_FIELD)) {
+ id = parser.text();
+ } else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
+ boost = parser.floatValue();
+ } else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
+ queryName = parser.text();
+ } else {
+ throw new ParsingException(parser.getTokenLocation(), "[parent_id] query does not support [" + currentFieldName + "]");
+ }
+ } else {
+ throw new ParsingException(parser.getTokenLocation(), "[parent_id] query does not support [" + currentFieldName + "]");
+ }
+ }
+ ParentIdQueryBuilder queryBuilder = new ParentIdQueryBuilder(type, id);
+ queryBuilder.queryName(queryName);
+ queryBuilder.boost(boost);
+ return queryBuilder;
+ }
+
+ @Override
+ public ParentIdQueryBuilder getBuilderPrototype() {
+ return ParentIdQueryBuilder.PROTO;
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryBuilders.java b/core/src/main/java/org/elasticsearch/index/query/QueryBuilders.java
index 6e9c86b4c6..03ccebf047 100644
--- a/core/src/main/java/org/elasticsearch/index/query/QueryBuilders.java
+++ b/core/src/main/java/org/elasticsearch/index/query/QueryBuilders.java
@@ -199,7 +199,14 @@ public abstract class QueryBuilders {
*
* @param name The name of the field
* @param value The value of the term
+ *
+ * @deprecated Fuzzy queries are not useful enough and will be removed with Elasticsearch 4.0. In most cases you may want to use
+ * a match query with the fuzziness parameter for strings or range queries for numeric and date fields.
+ *
+ * @see #matchQuery(String, Object)
+ * @see #rangeQuery(String)
*/
+ @Deprecated
public static FuzzyQueryBuilder fuzzyQuery(String name, String value) {
return new FuzzyQueryBuilder(name, value);
}
@@ -209,7 +216,14 @@ public abstract class QueryBuilders {
*
* @param name The name of the field
* @param value The value of the term
+ *
+ * @deprecated Fuzzy queries are not useful enough and will be removed with Elasticsearch 4.0. In most cases you may want to use
+ * a match query with the fuzziness parameter for strings or range queries for numeric and date fields.
+ *
+ * @see #matchQuery(String, Object)
+ * @see #rangeQuery(String)
*/
+ @Deprecated
public static FuzzyQueryBuilder fuzzyQuery(String name, Object value) {
return new FuzzyQueryBuilder(name, value);
}
@@ -490,6 +504,14 @@ public abstract class QueryBuilders {
return new HasParentQueryBuilder(type, query);
}
+ /**
+ * Constructs a new parent id query that returns all child documents of the specified type that
+ * point to the specified id.
+ */
+ public static ParentIdQueryBuilder parentId(String type, String id) {
+ return new ParentIdQueryBuilder(type, id);
+ }
+
public static NestedQueryBuilder nestedQuery(String path, QueryBuilder query) {
return new NestedQueryBuilder(path, query);
}
diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java b/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java
index 3c2ab5b0d8..a9d6a98941 100644
--- a/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java
+++ b/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java
@@ -236,13 +236,7 @@ public class QueryShardContext {
throw new QueryShardException(this, "inner_hits unsupported");
}
- InnerHitsContext innerHitsContext;
- if (sc.innerHits() == null) {
- innerHitsContext = new InnerHitsContext(new HashMap<>());
- sc.innerHits(innerHitsContext);
- } else {
- innerHitsContext = sc.innerHits();
- }
+ InnerHitsContext innerHitsContext = sc.innerHits();
innerHitsContext.addInnerHitDefinition(name, context);
}
@@ -288,20 +282,14 @@ public class QueryShardContext {
this.mapUnmappedFieldAsString = mapUnmappedFieldAsString;
}
- private MappedFieldType failIfFieldMappingNotFound(String name, MappedFieldType fieldMapping) {
- if (allowUnmappedFields) {
+ MappedFieldType failIfFieldMappingNotFound(String name, MappedFieldType fieldMapping) {
+ if (fieldMapping != null || allowUnmappedFields) {
return fieldMapping;
} else if (mapUnmappedFieldAsString) {
StringFieldMapper.Builder builder = MapperBuilders.stringField(name);
return builder.build(new Mapper.BuilderContext(indexSettings.getSettings(), new ContentPath(1))).fieldType();
} else {
- Version indexCreatedVersion = indexSettings.getIndexVersionCreated();
- if (fieldMapping == null && indexCreatedVersion.onOrAfter(Version.V_1_4_0_Beta1)) {
- throw new QueryShardException(this, "Strict field resolution and no field mapping can be found for the field with name ["
- + name + "]");
- } else {
- return fieldMapping;
- }
+ throw new QueryShardException(this, "No field mapping can be found for the field with name [{}]", name);
}
}
@@ -364,8 +352,8 @@ public class QueryShardContext {
/*
* Executes the given template, and returns the response.
*/
- public BytesReference executeQueryTemplate(Template template, SearchContext searchContext) {
- ExecutableScript executable = getScriptService().executable(template, ScriptContext.Standard.SEARCH, searchContext, Collections.emptyMap());
+ public BytesReference executeQueryTemplate(Template template) {
+ ExecutableScript executable = getScriptService().executable(template, ScriptContext.Standard.SEARCH, Collections.emptyMap());
return (BytesReference) executable.run();
}
diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java
index 59e04e4095..fcab39b96e 100644
--- a/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java
+++ b/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java
@@ -735,7 +735,10 @@ public class QueryStringQueryBuilder extends AbstractQueryBuilder<QueryStringQue
}
query = Queries.fixNegativeQueryIfNeeded(query);
- if (query instanceof BooleanQuery) {
+ // If the coordination factor is disabled on a boolean query we don't apply the minimum should match.
+ // This is done to make sure that the minimum_should_match doesn't get applied when there is only one word
+ // and multiple variations of the same word in the query (synonyms for instance).
+ if (query instanceof BooleanQuery && !((BooleanQuery) query).isCoordDisabled()) {
query = Queries.applyMinimumShouldMatch((BooleanQuery) query, this.minimumShouldMatch());
}
diff --git a/core/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java b/core/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java
index 17240a22bc..5bb10b74a2 100644
--- a/core/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java
+++ b/core/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java
@@ -285,7 +285,10 @@ public class SimpleQueryStringBuilder extends AbstractQueryBuilder<SimpleQuerySt
sqp.setDefaultOperator(defaultOperator.toBooleanClauseOccur());
Query query = sqp.parse(queryText);
- if (minimumShouldMatch != null && query instanceof BooleanQuery) {
+ // If the coordination factor is disabled on a boolean query we don't apply the minimum should match.
+ // This is done to make sure that the minimum_should_match doesn't get applied when there is only one word
+ // and multiple variations of the same word in the query (synonyms for instance).
+ if (minimumShouldMatch != null && query instanceof BooleanQuery && !((BooleanQuery) query).isCoordDisabled()) {
query = Queries.applyMinimumShouldMatch((BooleanQuery) query, minimumShouldMatch);
}
return query;
diff --git a/core/src/main/java/org/elasticsearch/index/query/TemplateQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/TemplateQueryBuilder.java
index 59ff19748a..02a9bc42e3 100644
--- a/core/src/main/java/org/elasticsearch/index/query/TemplateQueryBuilder.java
+++ b/core/src/main/java/org/elasticsearch/index/query/TemplateQueryBuilder.java
@@ -100,7 +100,7 @@ public class TemplateQueryBuilder extends AbstractQueryBuilder<TemplateQueryBuil
@Override
protected Query doToQuery(QueryShardContext context) throws IOException {
- BytesReference querySource = context.executeQueryTemplate(template, SearchContext.current());
+ BytesReference querySource = context.executeQueryTemplate(template);
try (XContentParser qSourceParser = XContentFactory.xContent(querySource).createParser(querySource)) {
final QueryShardContext contextCopy = new QueryShardContext(context);
contextCopy.reset(qSourceParser);
diff --git a/core/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java
index 388a21c7a5..326a6ed8b8 100644
--- a/core/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java
+++ b/core/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java
@@ -232,7 +232,7 @@ public class TermsQueryBuilder extends AbstractQueryBuilder<TermsQueryBuilder> {
if (this.termsLookup != null) {
termsLookup = new TermsLookup(this.termsLookup);
if (termsLookup.index() == null) {
- termsLookup.index(context.index().name());
+ termsLookup.index(context.index().getName());
}
Client client = context.getClient();
terms = fetch(termsLookup, client);
@@ -249,7 +249,6 @@ public class TermsQueryBuilder extends AbstractQueryBuilder<TermsQueryBuilder> {
List<Object> terms = new ArrayList<>();
GetRequest getRequest = new GetRequest(termsLookup.index(), termsLookup.type(), termsLookup.id())
.preference("_local").routing(termsLookup.routing());
- getRequest.copyContextAndHeadersFrom(SearchContext.current());
final GetResponse getResponse = client.get(getRequest).actionGet();
if (getResponse.isExists()) {
List<Object> extractedValues = XContentMapValues.extractRawValues(termsLookup.path(), getResponse.getSourceAsMap());
diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/random/RandomScoreFunctionBuilder.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/random/RandomScoreFunctionBuilder.java
index 2a7169d950..f67e60f33f 100644
--- a/core/src/main/java/org/elasticsearch/index/query/functionscore/random/RandomScoreFunctionBuilder.java
+++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/random/RandomScoreFunctionBuilder.java
@@ -125,7 +125,7 @@ public class RandomScoreFunctionBuilder extends ScoreFunctionBuilder<RandomScore
//TODO find a way to not get the shard_id from the current search context? make it available in QueryShardContext?
//this currently causes NPE in FunctionScoreQueryBuilderTests#testToQuery
final ShardId shardId = SearchContext.current().indexShard().shardId();
- final int salt = (context.index().name().hashCode() << 10) | shardId.id();
+ final int salt = (context.index().getName().hashCode() << 10) | shardId.id();
final IndexFieldData<?> uidFieldData = context.getForField(fieldType);
return new RandomScoreFunction(this.seed == null ? hash(context.nowInMillis()) : seed, salt, uidFieldData);
}
diff --git a/core/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java b/core/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java
index c9e59b57d8..dbe4de5ad8 100644
--- a/core/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java
+++ b/core/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java
@@ -54,7 +54,10 @@ public class MultiMatchQuery extends MatchQuery {
private Query parseAndApply(Type type, String fieldName, Object value, String minimumShouldMatch, Float boostValue) throws IOException {
Query query = parse(type, fieldName, value);
- if (query instanceof BooleanQuery) {
+ // If the coordination factor is disabled on a boolean query we don't apply the minimum should match.
+ // This is done to make sure that the minimum_should_match doesn't get applied when there is only one word
+ // and multiple variations of the same word in the query (synonyms for instance).
+ if (query instanceof BooleanQuery && !((BooleanQuery) query).isCoordDisabled()) {
query = Queries.applyMinimumShouldMatch((BooleanQuery) query, minimumShouldMatch);
}
if (query != null && boostValue != null && boostValue != AbstractQueryBuilder.DEFAULT_BOOST) {
diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java
index 1fb0b7ec4d..03c7e4e82e 100644
--- a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java
+++ b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java
@@ -541,25 +541,23 @@ public class IndexShard extends AbstractIndexShardComponent {
/** Writes all indexing changes to disk and opens a new searcher reflecting all changes. This can throw {@link EngineClosedException}. */
public void refresh(String source) {
verifyNotClosed();
- if (getEngine().refreshNeeded()) {
- if (canIndex()) {
- long bytes = getEngine().getIndexBufferRAMBytesUsed();
- writingBytes.addAndGet(bytes);
- try {
- logger.debug("refresh with source [{}] indexBufferRAMBytesUsed [{}]", source, new ByteSizeValue(bytes));
- long time = System.nanoTime();
- getEngine().refresh(source);
- refreshMetric.inc(System.nanoTime() - time);
- } finally {
- logger.debug("remove [{}] writing bytes for shard [{}]", new ByteSizeValue(bytes), shardId());
- writingBytes.addAndGet(-bytes);
- }
- } else {
- logger.debug("refresh with source [{}]", source);
+ if (canIndex()) {
+ long bytes = getEngine().getIndexBufferRAMBytesUsed();
+ writingBytes.addAndGet(bytes);
+ try {
+ logger.debug("refresh with source [{}] indexBufferRAMBytesUsed [{}]", source, new ByteSizeValue(bytes));
long time = System.nanoTime();
getEngine().refresh(source);
refreshMetric.inc(System.nanoTime() - time);
+ } finally {
+ logger.debug("remove [{}] writing bytes for shard [{}]", new ByteSizeValue(bytes), shardId());
+ writingBytes.addAndGet(-bytes);
}
+ } else {
+ logger.debug("refresh with source [{}]", source);
+ long time = System.nanoTime();
+ getEngine().refresh(source);
+ refreshMetric.inc(System.nanoTime() - time);
}
}
@@ -856,6 +854,10 @@ public class IndexShard extends AbstractIndexShardComponent {
if (state != IndexShardState.RECOVERING) {
throw new IndexShardNotRecoveringException(shardId, state);
}
+ // We set active because we are now writing operations to the engine; this way, if we go idle after some time and become inactive,
+ // we still invoke any onShardInactive listeners ... we won't sync'd flush in this case because we only do that on primary and this
+ // is a replica
+ active.set(true);
return engineConfig.getTranslogRecoveryPerformer().performBatchRecovery(getEngine(), operations);
}
@@ -885,6 +887,11 @@ public class IndexShard extends AbstractIndexShardComponent {
// but we need to make sure we don't loose deletes until we are done recovering
engineConfig.setEnableGcDeletes(false);
engineConfig.setCreate(indexExists == false);
+ if (skipTranslogRecovery == false) {
+ // We set active because we are now writing operations to the engine; this way, if we go idle after some time and become inactive,
+ // we still give sync'd flush a chance to run:
+ active.set(true);
+ }
createNewEngine(skipTranslogRecovery, engineConfig);
}
@@ -1045,6 +1052,10 @@ public class IndexShard extends AbstractIndexShardComponent {
MetaDataStateFormat.deleteMetaState(shardPath().getDataPath());
}
+ public boolean isActive() {
+ return active.get();
+ }
+
public ShardPath shardPath() {
return path;
}
@@ -1304,6 +1315,15 @@ public class IndexShard extends AbstractIndexShardComponent {
assert this.currentEngineReference.get() == null;
this.currentEngineReference.set(newEngine(skipTranslogRecovery, config));
}
+
+ // time elapses after the engine is created above (pulling the config settings) until we set the engine reference, during which
+ // settings changes could possibly have happened, so here we forcefully push any config changes to the new engine:
+ Engine engine = getEngineOrNull();
+
+ // engine could perhaps be null if we were e.g. concurrently closed:
+ if (engine != null) {
+ engine.onSettingsChanged();
+ }
}
protected Engine newEngine(boolean skipTranslogRecovery, EngineConfig config) {
@@ -1514,4 +1534,15 @@ public class IndexShard extends AbstractIndexShardComponent {
return engineFactory;
}
+ /**
+ * Returns <code>true</code> iff one or more changes to the engine are not visible to via the current searcher.
+ * Otherwise <code>false</code>.
+ *
+ * @throws EngineClosedException if the engine is already closed
+ * @throws AlreadyClosedException if the internal indexwriter in the engine is already closed
+ */
+ public boolean isRefreshNeeded() {
+ return getEngine().refreshNeeded();
+ }
+
}
diff --git a/core/src/main/java/org/elasticsearch/index/shard/ShardId.java b/core/src/main/java/org/elasticsearch/index/shard/ShardId.java
index 2c0eb7c9e3..3dea5501c6 100644
--- a/core/src/main/java/org/elasticsearch/index/shard/ShardId.java
+++ b/core/src/main/java/org/elasticsearch/index/shard/ShardId.java
@@ -40,22 +40,22 @@ public class ShardId implements Streamable, Comparable<ShardId> {
private ShardId() {
}
- public ShardId(String index, int shardId) {
- this(new Index(index), shardId);
- }
-
public ShardId(Index index, int shardId) {
this.index = index;
this.shardId = shardId;
this.hashCode = computeHashCode();
}
- public Index index() {
- return this.index;
+ public ShardId(String index, String indexUUID, int shardId) {
+ this(new Index(index, indexUUID), shardId);
+ }
+
+ public Index getIndex() {
+ return index;
}
- public String getIndex() {
- return index().name();
+ public String getIndexName() {
+ return index.getName();
}
public int id() {
@@ -68,7 +68,7 @@ public class ShardId implements Streamable, Comparable<ShardId> {
@Override
public String toString() {
- return "[" + index.name() + "][" + shardId + "]";
+ return "[" + index.getName() + "][" + shardId + "]";
}
@Override
@@ -76,7 +76,7 @@ public class ShardId implements Streamable, Comparable<ShardId> {
if (this == o) return true;
if (o == null) return false;
ShardId shardId1 = (ShardId) o;
- return shardId == shardId1.shardId && index.name().equals(shardId1.index.name());
+ return shardId == shardId1.shardId && index.equals(shardId1.index);
}
@Override
@@ -98,7 +98,7 @@ public class ShardId implements Streamable, Comparable<ShardId> {
@Override
public void readFrom(StreamInput in) throws IOException {
- index = Index.readIndexName(in);
+ index = Index.readIndex(in);
shardId = in.readVInt();
hashCode = computeHashCode();
}
@@ -112,8 +112,12 @@ public class ShardId implements Streamable, Comparable<ShardId> {
@Override
public int compareTo(ShardId o) {
if (o.getId() == shardId) {
- return index.name().compareTo(o.getIndex());
+ int compare = index.getName().compareTo(o.getIndex().getName());
+ if (compare != 0) {
+ return compare;
+ }
+ return index.getUUID().compareTo(o.getIndex().getUUID());
}
return Integer.compare(shardId, o.getId());
}
-} \ No newline at end of file
+}
diff --git a/core/src/main/java/org/elasticsearch/index/shard/ShardPath.java b/core/src/main/java/org/elasticsearch/index/shard/ShardPath.java
index d940d1a93c..e870057a14 100644
--- a/core/src/main/java/org/elasticsearch/index/shard/ShardPath.java
+++ b/core/src/main/java/org/elasticsearch/index/shard/ShardPath.java
@@ -45,8 +45,8 @@ public final class ShardPath {
public ShardPath(boolean isCustomDataPath, Path dataPath, Path shardStatePath, String indexUUID, ShardId shardId) {
assert dataPath.getFileName().toString().equals(Integer.toString(shardId.id())) : "dataPath must end with the shard ID but didn't: " + dataPath.toString();
assert shardStatePath.getFileName().toString().equals(Integer.toString(shardId.id())) : "shardStatePath must end with the shard ID but didn't: " + dataPath.toString();
- assert dataPath.getParent().getFileName().toString().equals(shardId.getIndex()) : "dataPath must end with index/shardID but didn't: " + dataPath.toString();
- assert shardStatePath.getParent().getFileName().toString().equals(shardId.getIndex()) : "shardStatePath must end with index/shardID but didn't: " + dataPath.toString();
+ assert dataPath.getParent().getFileName().toString().equals(shardId.getIndexName()) : "dataPath must end with index/shardID but didn't: " + dataPath.toString();
+ assert shardStatePath.getParent().getFileName().toString().equals(shardId.getIndexName()) : "shardStatePath must end with index/shardID but didn't: " + dataPath.toString();
if (isCustomDataPath && dataPath.equals(shardStatePath)) {
throw new IllegalArgumentException("shard state path must be different to the data path when using custom data paths");
}
diff --git a/core/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java b/core/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java
index 0fc166cacc..5f5aa95a99 100644
--- a/core/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java
+++ b/core/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java
@@ -24,6 +24,7 @@ import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.SegmentInfos;
import org.apache.lucene.store.Directory;
import org.elasticsearch.ExceptionsHelper;
+import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.routing.RestoreSource;
import org.elasticsearch.cluster.routing.ShardRouting;
@@ -254,8 +255,8 @@ final class StoreRecovery {
translogState.totalOperationsOnStart(0);
indexShard.prepareForIndexRecovery();
ShardId snapshotShardId = shardId;
- if (!shardId.getIndex().equals(restoreSource.index())) {
- snapshotShardId = new ShardId(restoreSource.index(), shardId.id());
+ if (!shardId.getIndexName().equals(restoreSource.index())) {
+ snapshotShardId = new ShardId(restoreSource.index(), IndexMetaData.INDEX_UUID_NA_VALUE, shardId.id());
}
indexShardRepository.restore(restoreSource.snapshotId(), restoreSource.version(), shardId, snapshotShardId, indexShard.recoveryState());
indexShard.skipTranslogRecovery();
diff --git a/core/src/main/java/org/elasticsearch/index/similarity/DFISimilarityProvider.java b/core/src/main/java/org/elasticsearch/index/similarity/DFISimilarityProvider.java
new file mode 100644
index 0000000000..30b07a06c3
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/index/similarity/DFISimilarityProvider.java
@@ -0,0 +1,51 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.index.similarity;
+
+import org.apache.lucene.search.similarities.DFISimilarity;
+import org.apache.lucene.search.similarities.Similarity;
+import org.elasticsearch.common.settings.Settings;
+
+/**
+ * {@link SimilarityProvider} for the {@link DFISimilarity}.
+ * <p>
+ * Configuration options available:
+ * <ul>
+ * <li>discount_overlaps</li>
+ * </ul>
+ * @see DFISimilarity For more information about configuration
+ */
+public class DFISimilarityProvider extends AbstractSimilarityProvider {
+
+ private final DFISimilarity similarity;
+
+ public DFISimilarityProvider(String name, Settings settings) {
+ super(name);
+ boolean discountOverlaps = settings.getAsBoolean("discount_overlaps", true);
+
+ this.similarity = new DFISimilarity();
+ this.similarity.setDiscountOverlaps(discountOverlaps);
+ }
+
+ @Override
+ public Similarity get() {
+ return similarity;
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java b/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java
index f564b0e91d..e950ebda1b 100644
--- a/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java
+++ b/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java
@@ -52,6 +52,7 @@ public final class SimilarityService extends AbstractIndexComponent {
buildIn.put("IB", IBSimilarityProvider::new);
buildIn.put("LMDirichlet", LMDirichletSimilarityProvider::new);
buildIn.put("LMJelinekMercer", LMJelinekMercerSimilarityProvider::new);
+ buildIn.put("DFI", DFISimilarityProvider::new);
DEFAULTS = Collections.unmodifiableMap(defaults);
BUILT_IN = Collections.unmodifiableMap(buildIn);
}
diff --git a/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardRepository.java b/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardRepository.java
index f05f64fa19..c2a134b08e 100644
--- a/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardRepository.java
+++ b/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardRepository.java
@@ -299,7 +299,7 @@ public class BlobStoreIndexShardRepository extends AbstractComponent implements
this.snapshotId = snapshotId;
this.version = version;
this.shardId = shardId;
- blobContainer = blobStore.blobContainer(basePath.add("indices").add(snapshotShardId.getIndex()).add(Integer.toString(snapshotShardId.getId())));
+ blobContainer = blobStore.blobContainer(basePath.add("indices").add(snapshotShardId.getIndexName()).add(Integer.toString(snapshotShardId.getId())));
}
/**
diff --git a/core/src/main/java/org/elasticsearch/index/store/IndexStore.java b/core/src/main/java/org/elasticsearch/index/store/IndexStore.java
index 29401fdfd5..e98ad7cc6e 100644
--- a/core/src/main/java/org/elasticsearch/index/store/IndexStore.java
+++ b/core/src/main/java/org/elasticsearch/index/store/IndexStore.java
@@ -21,7 +21,6 @@ package org.elasticsearch.index.store;
import org.apache.lucene.store.StoreRateLimiting;
import org.elasticsearch.common.settings.Setting;
-import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.IndexSettings;
@@ -30,16 +29,17 @@ import org.elasticsearch.index.shard.ShardPath;
*
*/
public class IndexStore extends AbstractIndexComponent {
- public static final Setting<StoreRateLimiting.Type> INDEX_STORE_THROTTLE_TYPE_SETTING = new Setting<>("index.store.throttle.type", "none", StoreRateLimiting.Type::fromString, true, Setting.Scope.INDEX) ;
+ public static final Setting<IndexRateLimitingType> INDEX_STORE_THROTTLE_TYPE_SETTING = new Setting<>("index.store.throttle.type", "none", IndexRateLimitingType::fromString, true, Setting.Scope.INDEX) ;
public static final Setting<ByteSizeValue> INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING = Setting.byteSizeSetting("index.store.throttle.max_bytes_per_sec", new ByteSizeValue(0), true, Setting.Scope.INDEX);
protected final IndexStoreConfig indexStoreConfig;
private final StoreRateLimiting rateLimiting = new StoreRateLimiting();
+ private volatile IndexRateLimitingType type;
public IndexStore(IndexSettings indexSettings, IndexStoreConfig indexStoreConfig) {
super(indexSettings);
this.indexStoreConfig = indexStoreConfig;
- rateLimiting.setType(indexSettings.getValue(INDEX_STORE_THROTTLE_TYPE_SETTING));
+ setType(indexSettings.getValue(INDEX_STORE_THROTTLE_TYPE_SETTING));
rateLimiting.setMaxRate(indexSettings.getValue(INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING));
logger.debug("using index.store.throttle.type [{}], with index.store.throttle.max_bytes_per_sec [{}]", rateLimiting.getType(), rateLimiting.getRateLimiter());
}
@@ -49,7 +49,7 @@ public class IndexStore extends AbstractIndexComponent {
* the node level one (defaults to the node level one).
*/
public StoreRateLimiting rateLimiting() {
- return rateLimiting.getType() == StoreRateLimiting.Type.NONE ? indexStoreConfig.getNodeRateLimiter() : this.rateLimiting;
+ return type.useStoreLimiter() ? indexStoreConfig.getNodeRateLimiter() : this.rateLimiting;
}
/**
@@ -59,11 +59,44 @@ public class IndexStore extends AbstractIndexComponent {
return new FsDirectoryService(indexSettings, this, path);
}
- public void setType(StoreRateLimiting.Type type) {
- rateLimiting.setType(type);
+ public void setType(IndexRateLimitingType type) {
+ this.type = type;
+ if (type.useStoreLimiter() == false) {
+ rateLimiting.setType(type.type);
+ }
}
public void setMaxRate(ByteSizeValue rate) {
rateLimiting.setMaxRate(rate);
}
+
+ /**
+ * On an index level we can configure all of {@link org.apache.lucene.store.StoreRateLimiting.Type} as well as
+ * <tt>node</tt> which will then use a global rate limiter that has it's own configuration. The global one is
+ * configured in {@link IndexStoreConfig} which is managed by the per-node {@link org.elasticsearch.indices.IndicesService}
+ */
+ public static final class IndexRateLimitingType {
+ private final StoreRateLimiting.Type type;
+
+ private IndexRateLimitingType(StoreRateLimiting.Type type) {
+ this.type = type;
+ }
+
+ private boolean useStoreLimiter() {
+ return type == null;
+ }
+
+ static IndexRateLimitingType fromString(String type) {
+ if ("node".equalsIgnoreCase(type)) {
+ return new IndexRateLimitingType(null);
+ } else {
+ try {
+ return new IndexRateLimitingType(StoreRateLimiting.Type.fromString(type));
+ } catch (IllegalArgumentException ex) {
+ throw new IllegalArgumentException("rate limiting type [" + type + "] not valid, can be one of [all|merge|none|node]");
+ }
+ }
+ }
+ }
+
}
diff --git a/core/src/main/java/org/elasticsearch/index/store/Store.java b/core/src/main/java/org/elasticsearch/index/store/Store.java
index f6e48e718b..c7377a4ab6 100644
--- a/core/src/main/java/org/elasticsearch/index/store/Store.java
+++ b/core/src/main/java/org/elasticsearch/index/store/Store.java
@@ -70,6 +70,7 @@ import org.elasticsearch.common.util.concurrent.AbstractRefCounted;
import org.elasticsearch.common.util.concurrent.RefCounted;
import org.elasticsearch.common.util.iterable.Iterables;
import org.elasticsearch.env.ShardLock;
+import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.shard.AbstractIndexShardComponent;
@@ -408,9 +409,9 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref
*
* @throws IOException if the index we try to read is corrupted
*/
- public static MetadataSnapshot readMetadataSnapshot(Path indexLocation, ESLogger logger) throws IOException {
+ public static MetadataSnapshot readMetadataSnapshot(Path indexLocation, ShardId shardId, ESLogger logger) throws IOException {
try (Directory dir = new SimpleFSDirectory(indexLocation)) {
- failIfCorrupted(dir, new ShardId("", 1));
+ failIfCorrupted(dir, shardId);
return new MetadataSnapshot(null, dir, logger);
} catch (IndexNotFoundException ex) {
// that's fine - happens all the time no need to log
@@ -425,9 +426,9 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref
* can be successfully opened. This includes reading the segment infos and possible
* corruption markers.
*/
- public static boolean canOpenIndex(ESLogger logger, Path indexLocation) throws IOException {
+ public static boolean canOpenIndex(ESLogger logger, Path indexLocation, ShardId shardId) throws IOException {
try {
- tryOpenIndex(indexLocation);
+ tryOpenIndex(indexLocation, shardId);
} catch (Exception ex) {
logger.trace("Can't open index for path [{}]", ex, indexLocation);
return false;
@@ -440,9 +441,9 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref
* segment infos and possible corruption markers. If the index can not
* be opened, an exception is thrown
*/
- public static void tryOpenIndex(Path indexLocation) throws IOException {
+ public static void tryOpenIndex(Path indexLocation, ShardId shardId) throws IOException {
try (Directory dir = new SimpleFSDirectory(indexLocation)) {
- failIfCorrupted(dir, new ShardId("", 1));
+ failIfCorrupted(dir, shardId);
Lucene.readSegmentInfos(dir);
}
}
@@ -929,7 +930,7 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref
return new Tuple<>(indexInput.readStringStringMap(), lastFound);
}
}
- return new Tuple<>(new HashMap<>(), -1l);
+ return new Tuple<>(new HashMap<>(), -1L);
}
}
diff --git a/core/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java b/core/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java
index 1777775605..97416e1721 100644
--- a/core/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java
+++ b/core/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java
@@ -71,18 +71,16 @@ import static org.elasticsearch.index.mapper.SourceToParse.source;
public class TermVectorsService {
- private final MappingUpdatedAction mappingUpdatedAction;
private final TransportDfsOnlyAction dfsAction;
@Inject
- public TermVectorsService(MappingUpdatedAction mappingUpdatedAction, TransportDfsOnlyAction dfsAction) {
- this.mappingUpdatedAction = mappingUpdatedAction;
+ public TermVectorsService(TransportDfsOnlyAction dfsAction) {
this.dfsAction = dfsAction;
}
public TermVectorsResponse getTermVectors(IndexShard indexShard, TermVectorsRequest request) {
- final TermVectorsResponse termVectorsResponse = new TermVectorsResponse(indexShard.shardId().index().name(), request.type(), request.id());
+ final TermVectorsResponse termVectorsResponse = new TermVectorsResponse(indexShard.shardId().getIndex().getName(), request.type(), request.id());
final Term uidTerm = new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(request.type(), request.id()));
Engine.GetResult get = indexShard.get(new Engine.Get(request.realtime(), uidTerm).version(request.version()).versionType(request.versionType()));
@@ -262,7 +260,7 @@ public class TermVectorsService {
private Fields generateTermVectorsFromDoc(IndexShard indexShard, TermVectorsRequest request, boolean doAllFields) throws Throwable {
// parse the document, at the moment we do update the mapping, just like percolate
- ParsedDocument parsedDocument = parseDocument(indexShard, indexShard.shardId().getIndex(), request.type(), request.doc());
+ ParsedDocument parsedDocument = parseDocument(indexShard, indexShard.shardId().getIndexName(), request.type(), request.doc());
// select the right fields and generate term vectors
ParseContext.Document doc = parsedDocument.rootDoc();
@@ -293,16 +291,11 @@ public class TermVectorsService {
private ParsedDocument parseDocument(IndexShard indexShard, String index, String type, BytesReference doc) throws Throwable {
MapperService mapperService = indexShard.mapperService();
-
- // TODO: make parsing not dynamically create fields not in the original mapping
DocumentMapperForType docMapper = mapperService.documentMapperWithAutoCreate(type);
- ParsedDocument parsedDocument = docMapper.getDocumentMapper().parse(source(doc).index(index).type(type).flyweight(true));
+ ParsedDocument parsedDocument = docMapper.getDocumentMapper().parse(source(doc).index(index).type(type).id("_id_for_tv_api"));
if (docMapper.getMapping() != null) {
parsedDocument.addDynamicMappingsUpdate(docMapper.getMapping());
}
- if (parsedDocument.dynamicMappingsUpdate() != null) {
- mappingUpdatedAction.updateMappingOnMasterSynchronously(index, type, parsedDocument.dynamicMappingsUpdate());
- }
return parsedDocument;
}
diff --git a/core/src/main/java/org/elasticsearch/index/translog/Translog.java b/core/src/main/java/org/elasticsearch/index/translog/Translog.java
index b2e81de044..7579f55f0e 100644
--- a/core/src/main/java/org/elasticsearch/index/translog/Translog.java
+++ b/core/src/main/java/org/elasticsearch/index/translog/Translog.java
@@ -429,9 +429,6 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
try (ReleasableLock lock = readLock.acquire()) {
ensureOpen();
Location location = current.add(bytes);
- if (config.isSyncOnEachOperation()) {
- current.sync();
- }
assert assertBytesAtLocation(location, bytes);
return location;
}
diff --git a/core/src/main/java/org/elasticsearch/index/translog/TranslogConfig.java b/core/src/main/java/org/elasticsearch/index/translog/TranslogConfig.java
index 682c310102..a4ee7961fc 100644
--- a/core/src/main/java/org/elasticsearch/index/translog/TranslogConfig.java
+++ b/core/src/main/java/org/elasticsearch/index/translog/TranslogConfig.java
@@ -66,13 +66,6 @@ public final class TranslogConfig {
}
/**
- * Returns <code>true</code> iff each low level operation shoudl be fsynced
- */
- public boolean isSyncOnEachOperation() {
- return indexSettings.getTranslogSyncInterval().millis() == 0;
- }
-
- /**
* Returns the index indexSettings
*/
public IndexSettings getIndexSettings() {
diff --git a/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java b/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java
index f7d0cd571e..a1fc708dda 100644
--- a/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java
+++ b/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java
@@ -192,7 +192,17 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable {
throw e;
}
if (closed.compareAndSet(false, true)) {
- return new TranslogReader(generation, channel, path, firstOperationOffset, getWrittenOffset(), operationCounter);
+ boolean success = false;
+ try {
+ final TranslogReader reader = new TranslogReader(generation, channel, path, firstOperationOffset, getWrittenOffset(), operationCounter);
+ success = true;
+ return reader;
+ } finally {
+ if (success == false) {
+ // close the channel, as we are closed and failed to create a new reader
+ IOUtils.closeWhileHandlingException(channel);
+ }
+ }
} else {
throw new AlreadyClosedException("translog [" + getGeneration() + "] is already closed (path [" + path + "]", tragedy);
}
diff --git a/core/src/main/java/org/elasticsearch/indices/IndexCreationException.java b/core/src/main/java/org/elasticsearch/indices/IndexCreationException.java
index 09b6696e11..1248d73b81 100644
--- a/core/src/main/java/org/elasticsearch/indices/IndexCreationException.java
+++ b/core/src/main/java/org/elasticsearch/indices/IndexCreationException.java
@@ -30,8 +30,8 @@ import java.io.IOException;
*/
public class IndexCreationException extends ElasticsearchException implements ElasticsearchWrapperException {
- public IndexCreationException(Index index, Throwable cause) {
- super("failed to create index", cause);
+ public IndexCreationException(String index, Throwable cause) {
+ super("failed to create index [{}]", cause, index);
setIndex(index);
}
diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesService.java b/core/src/main/java/org/elasticsearch/indices/IndicesService.java
index fdc448989d..bb61fed436 100644
--- a/core/src/main/java/org/elasticsearch/indices/IndicesService.java
+++ b/core/src/main/java/org/elasticsearch/indices/IndicesService.java
@@ -53,7 +53,6 @@ import org.elasticsearch.index.NodeServicesProvider;
import org.elasticsearch.index.analysis.AnalysisRegistry;
import org.elasticsearch.index.flush.FlushStats;
import org.elasticsearch.index.get.GetStats;
-import org.elasticsearch.index.shard.IndexingStats;
import org.elasticsearch.index.merge.MergeStats;
import org.elasticsearch.index.recovery.RecoveryStats;
import org.elasticsearch.index.refresh.RefreshStats;
@@ -61,6 +60,7 @@ import org.elasticsearch.index.search.stats.SearchStats;
import org.elasticsearch.index.shard.IllegalIndexShardStateException;
import org.elasticsearch.index.shard.IndexEventListener;
import org.elasticsearch.index.shard.IndexShard;
+import org.elasticsearch.index.shard.IndexingStats;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.store.IndexStoreConfig;
import org.elasticsearch.indices.mapper.MapperRegistry;
@@ -258,6 +258,15 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
}
/**
+ * Returns an IndexService for the specified index if exists otherwise returns <code>null</code>.
+ *
+ */
+ @Nullable
+ public IndexService indexService(Index index) {
+ return indexService(index.getName());
+ }
+
+ /**
* Returns an IndexService for the specified index if exists otherwise a {@link IndexNotFoundException} is thrown.
*/
public IndexService indexServiceSafe(String index) {
@@ -268,6 +277,17 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
return indexService;
}
+ /**
+ * Returns an IndexService for the specified index if exists otherwise a {@link IndexNotFoundException} is thrown.
+ */
+ public IndexService indexServiceSafe(Index index) {
+ IndexService indexService = indexServiceSafe(index.getName());
+ if (indexService.indexUUID().equals(index.getUUID()) == false) {
+ throw new IndexNotFoundException(index);
+ }
+ return indexService;
+ }
+
/**
@@ -280,11 +300,10 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
if (!lifecycle.started()) {
throw new IllegalStateException("Can't create an index [" + indexMetaData.getIndex() + "], node is closed");
}
- final String indexName = indexMetaData.getIndex();
- final Predicate<String> indexNameMatcher = (indexExpression) -> indexNameExpressionResolver.matchesIndex(indexName, indexExpression, clusterService.state());
+ final Index index = indexMetaData.getIndex();
+ final Predicate<String> indexNameMatcher = (indexExpression) -> indexNameExpressionResolver.matchesIndex(index.getName(), indexExpression, clusterService.state());
final IndexSettings idxSettings = new IndexSettings(indexMetaData, this.settings, indexNameMatcher, indexScopeSetting);
- Index index = new Index(indexMetaData.getIndex());
- if (indices.containsKey(index.name())) {
+ if (indices.containsKey(index.getName())) {
throw new IndexAlreadyExistsException(index);
}
logger.debug("creating Index [{}], shards [{}]/[{}{}]",
@@ -306,7 +325,7 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
try {
assert indexService.getIndexEventListener() == listener;
listener.afterIndexCreated(indexService);
- indices = newMapBuilder(indices).put(index.name(), indexService).immutableMap();
+ indices = newMapBuilder(indices).put(index.getName(), indexService).immutableMap();
success = true;
return indexService;
} finally {
@@ -401,7 +420,7 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
public void deleteClosedIndex(String reason, IndexMetaData metaData, ClusterState clusterState) {
if (nodeEnv.hasNodeFile()) {
- String indexName = metaData.getIndex();
+ String indexName = metaData.getIndex().getName();
try {
if (clusterState.metaData().hasIndex(indexName)) {
final IndexMetaData index = clusterState.metaData().index(indexName);
@@ -421,7 +440,7 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
public void deleteIndexStore(String reason, IndexMetaData metaData, ClusterState clusterState, boolean closed) throws IOException {
if (nodeEnv.hasNodeFile()) {
synchronized (this) {
- String indexName = metaData.getIndex();
+ String indexName = metaData.getIndex().getName();
if (indices.containsKey(indexName)) {
String localUUid = indices.get(indexName).indexUUID();
throw new IllegalStateException("Can't delete index store for [" + indexName + "] - it's still part of the indices service [" + localUUid + "] [" + metaData.getIndexUUID() + "]");
@@ -433,9 +452,8 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
throw new IllegalStateException("Can't delete closed index store for [" + indexName + "] - it's still part of the cluster state [" + index.getIndexUUID() + "] [" + metaData.getIndexUUID() + "]");
}
}
- Index index = new Index(metaData.getIndex());
final IndexSettings indexSettings = buildIndexSettings(metaData);
- deleteIndexStore(reason, index, indexSettings, closed);
+ deleteIndexStore(reason, indexSettings.getIndex(), indexSettings, closed);
}
}
@@ -460,7 +478,7 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
}
// this is a pure protection to make sure this index doesn't get re-imported as a dangeling index.
// we should in the future rather write a tombstone rather than wiping the metadata.
- MetaDataStateFormat.deleteMetaState(nodeEnv.indexPaths(index));
+ MetaDataStateFormat.deleteMetaState(nodeEnv.indexPaths(index.getName()));
}
}
@@ -490,7 +508,7 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
* @throws IOException if an IOException occurs
*/
public void deleteShardStore(String reason, ShardId shardId, ClusterState clusterState) throws IOException {
- final IndexMetaData metaData = clusterState.getMetaData().indices().get(shardId.getIndex());
+ final IndexMetaData metaData = clusterState.getMetaData().indices().get(shardId.getIndexName());
final IndexSettings indexSettings = buildIndexSettings(metaData);
if (canDeleteShardContent(shardId, indexSettings) == false) {
@@ -500,8 +518,8 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
logger.debug("{} deleted shard reason [{}]", shardId, reason);
if (clusterState.nodes().localNode().isMasterNode() == false && // master nodes keep the index meta data, even if having no shards..
- canDeleteIndexContents(shardId.index(), indexSettings, false)) {
- if (nodeEnv.findAllShardIds(shardId.index()).isEmpty()) {
+ canDeleteIndexContents(shardId.getIndex(), indexSettings, false)) {
+ if (nodeEnv.findAllShardIds(shardId.getIndex()).isEmpty()) {
try {
// note that deleteIndexStore have more safety checks and may throw an exception if index was concurrently created.
deleteIndexStore("no longer used", metaData, clusterState, false);
@@ -510,7 +528,7 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
throw new ElasticsearchException("failed to delete unused index after deleting its last shard (" + shardId + ")", e);
}
} else {
- logger.trace("[{}] still has shard stores, leaving as is", shardId.index());
+ logger.trace("[{}] still has shard stores, leaving as is", shardId.getIndex());
}
}
}
@@ -524,7 +542,7 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
* @return true if the index can be deleted on this node
*/
public boolean canDeleteIndexContents(Index index, IndexSettings indexSettings, boolean closed) {
- final IndexService indexService = this.indices.get(index.name());
+ final IndexService indexService = this.indices.get(index.getName());
// Closed indices may be deleted, even if they are on a shared
// filesystem. Since it is closed we aren't deleting it for relocation
if (indexSettings.isOnSharedFilesystem() == false || closed) {
@@ -550,8 +568,8 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
* @param indexSettings the shards's relevant {@link IndexSettings}. This is required to access the indexes settings etc.
*/
public boolean canDeleteShardContent(ShardId shardId, IndexSettings indexSettings) {
- assert shardId.getIndex().equals(indexSettings.getIndex().name());
- final IndexService indexService = this.indices.get(shardId.getIndex());
+ assert shardId.getIndex().equals(indexSettings.getIndex());
+ final IndexService indexService = this.indices.get(shardId.getIndexName());
if (indexSettings.isOnSharedFilesystem() == false) {
if (indexService != null && nodeEnv.hasNodeFile()) {
return indexService.hasShard(shardId.id()) == false;
@@ -586,7 +604,7 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
throw new IllegalArgumentException("settings must not be null");
}
PendingDelete pendingDelete = new PendingDelete(shardId, settings);
- addPendingDelete(shardId.index(), pendingDelete);
+ addPendingDelete(shardId.getIndex(), pendingDelete);
}
/**
@@ -609,7 +627,7 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
}
private static final class PendingDelete implements Comparable<PendingDelete> {
- final String index;
+ final Index index;
final int shardId;
final IndexSettings settings;
final boolean deleteIndex;
@@ -628,7 +646,7 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
* Creates a new pending delete of a shard
*/
public PendingDelete(Index index, IndexSettings settings) {
- this.index = index.getName();
+ this.index = index;
this.shardId = -1;
this.settings = settings;
this.deleteIndex = true;
@@ -742,5 +760,4 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
public AnalysisRegistry getAnalysis() {
return analysisRegistry;
}
-
}
diff --git a/core/src/main/java/org/elasticsearch/indices/InvalidIndexNameException.java b/core/src/main/java/org/elasticsearch/indices/InvalidIndexNameException.java
index 163f4df26a..34dd327c91 100644
--- a/core/src/main/java/org/elasticsearch/indices/InvalidIndexNameException.java
+++ b/core/src/main/java/org/elasticsearch/indices/InvalidIndexNameException.java
@@ -31,6 +31,10 @@ import java.io.IOException;
*/
public class InvalidIndexNameException extends ElasticsearchException {
+ public InvalidIndexNameException(String name, String desc) {
+ super("Invalid index name [" + name + "], " + desc);
+ setIndex(name);
+ }
public InvalidIndexNameException(Index index, String name, String desc) {
super("Invalid index name [" + name + "], " + desc);
setIndex(index);
diff --git a/core/src/main/java/org/elasticsearch/indices/NodeIndicesStats.java b/core/src/main/java/org/elasticsearch/indices/NodeIndicesStats.java
index 0a036cbd80..7d24d4fa89 100644
--- a/core/src/main/java/org/elasticsearch/indices/NodeIndicesStats.java
+++ b/core/src/main/java/org/elasticsearch/indices/NodeIndicesStats.java
@@ -172,7 +172,7 @@ public class NodeIndicesStats implements Streamable, ToXContent {
int entries = in.readVInt();
statsByShard = new HashMap<>();
for (int i = 0; i < entries; i++) {
- Index index = Index.readIndexName(in);
+ Index index = Index.readIndex(in);
int indexShardListSize = in.readVInt();
List<IndexShardStats> indexShardStats = new ArrayList<>(indexShardListSize);
for (int j = 0; j < indexShardListSize; j++) {
@@ -215,7 +215,7 @@ public class NodeIndicesStats implements Streamable, ToXContent {
Map<Index, CommonStats> indexStats = createStatsByIndex();
builder.startObject(Fields.INDICES);
for (Map.Entry<Index, CommonStats> entry : indexStats.entrySet()) {
- builder.startObject(entry.getKey().name());
+ builder.startObject(entry.getKey().getName());
entry.getValue().toXContent(builder, params);
builder.endObject();
}
@@ -223,7 +223,7 @@ public class NodeIndicesStats implements Streamable, ToXContent {
} else if ("shards".equals(level)) {
builder.startObject("shards");
for (Map.Entry<Index, List<IndexShardStats>> entry : statsByShard.entrySet()) {
- builder.startArray(entry.getKey().name());
+ builder.startArray(entry.getKey().getName());
for (IndexShardStats indexShardStats : entry.getValue()) {
builder.startObject().startObject(String.valueOf(indexShardStats.getShardId().getId()));
for (ShardStats shardStats : indexShardStats.getShards()) {
diff --git a/core/src/main/java/org/elasticsearch/indices/TypeMissingException.java b/core/src/main/java/org/elasticsearch/indices/TypeMissingException.java
index 0a332dbaf1..6c952fb241 100644
--- a/core/src/main/java/org/elasticsearch/indices/TypeMissingException.java
+++ b/core/src/main/java/org/elasticsearch/indices/TypeMissingException.java
@@ -37,6 +37,11 @@ public class TypeMissingException extends ElasticsearchException {
setIndex(index);
}
+ public TypeMissingException(String index, String... types) {
+ super("type[" + Arrays.toString(types) + "] missing");
+ setIndex(index);
+ }
+
public TypeMissingException(StreamInput in) throws IOException{
super(in);
}
diff --git a/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java b/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java
index 3e63b6fba6..f99b39ef62 100644
--- a/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java
+++ b/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java
@@ -23,6 +23,7 @@ import org.apache.lucene.util.IOUtils;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.io.FileSystemUtils;
+import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
@@ -70,9 +71,9 @@ import java.util.function.Function;
*/
public class HunspellService extends AbstractComponent {
- public final static String HUNSPELL_LAZY_LOAD = "indices.analysis.hunspell.dictionary.lazy";
- public final static String HUNSPELL_IGNORE_CASE = "indices.analysis.hunspell.dictionary.ignore_case";
- private final static String OLD_HUNSPELL_LOCATION = "indices.analysis.hunspell.dictionary.location";
+ public final static Setting<Boolean> HUNSPELL_LAZY_LOAD = Setting.boolSetting("indices.analysis.hunspell.dictionary.lazy", Boolean.FALSE, false, Setting.Scope.CLUSTER);
+ public final static Setting<Boolean> HUNSPELL_IGNORE_CASE = Setting.boolSetting("indices.analysis.hunspell.dictionary.ignore_case", Boolean.FALSE, false, Setting.Scope.CLUSTER);
+ public final static Setting<Settings> HUNSPELL_DICTIONARY_OPTIONS = Setting.groupSetting("indices.analysis.hunspell.dictionary.", false, Setting.Scope.CLUSTER);
private final ConcurrentHashMap<String, Dictionary> dictionaries = new ConcurrentHashMap<>();
private final Map<String, Dictionary> knownDictionaries;
private final boolean defaultIgnoreCase;
@@ -82,8 +83,8 @@ public class HunspellService extends AbstractComponent {
public HunspellService(final Settings settings, final Environment env, final Map<String, Dictionary> knownDictionaries) throws IOException {
super(settings);
this.knownDictionaries = Collections.unmodifiableMap(knownDictionaries);
- this.hunspellDir = resolveHunspellDirectory(settings, env);
- this.defaultIgnoreCase = settings.getAsBoolean(HUNSPELL_IGNORE_CASE, false);
+ this.hunspellDir = resolveHunspellDirectory(env);
+ this.defaultIgnoreCase = HUNSPELL_IGNORE_CASE.get(settings);
this.loadingFunction = (locale) -> {
try {
return loadDictionary(locale, settings, env);
@@ -91,7 +92,7 @@ public class HunspellService extends AbstractComponent {
throw new IllegalStateException("failed to load hunspell dictionary for locale: " + locale, e);
}
};
- if (!settings.getAsBoolean(HUNSPELL_LAZY_LOAD, false)) {
+ if (!HUNSPELL_LAZY_LOAD.get(settings)) {
scanAndLoadDictionaries();
}
@@ -110,11 +111,7 @@ public class HunspellService extends AbstractComponent {
return dictionary;
}
- private Path resolveHunspellDirectory(Settings settings, Environment env) {
- String location = settings.get(OLD_HUNSPELL_LOCATION, null);
- if (location != null) {
- throw new IllegalArgumentException("please, put your hunspell dictionaries under config/hunspell !");
- }
+ private Path resolveHunspellDirectory(Environment env) {
return env.configFile().resolve("hunspell");
}
@@ -162,7 +159,8 @@ public class HunspellService extends AbstractComponent {
}
// merging node settings with hunspell dictionary specific settings
- nodeSettings = loadDictionarySettings(dicDir, nodeSettings.getByPrefix("indices.analysis.hunspell.dictionary." + locale + "."));
+ Settings dictSettings = HUNSPELL_DICTIONARY_OPTIONS.get(nodeSettings);
+ nodeSettings = loadDictionarySettings(dicDir, dictSettings.getByPrefix(locale));
boolean ignoreCase = nodeSettings.getAsBoolean("ignore_case", defaultIgnoreCase);
diff --git a/core/src/main/java/org/elasticsearch/indices/cache/query/IndicesQueryCache.java b/core/src/main/java/org/elasticsearch/indices/cache/query/IndicesQueryCache.java
index 23b4bc84c4..58c2cd5a95 100644
--- a/core/src/main/java/org/elasticsearch/indices/cache/query/IndicesQueryCache.java
+++ b/core/src/main/java/org/elasticsearch/indices/cache/query/IndicesQueryCache.java
@@ -32,9 +32,10 @@ import org.apache.lucene.search.Weight;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.ShardCoreKeyMap;
+import org.elasticsearch.common.settings.Setting;
+import org.elasticsearch.common.settings.Setting.Scope;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeValue;
-import org.elasticsearch.common.unit.MemorySizeValue;
import org.elasticsearch.index.cache.query.QueryCacheStats;
import org.elasticsearch.index.shard.ShardId;
@@ -48,10 +49,10 @@ import java.util.concurrent.ConcurrentHashMap;
public class IndicesQueryCache extends AbstractComponent implements QueryCache, Closeable {
- public static final String INDICES_CACHE_QUERY_SIZE = "indices.queries.cache.size";
- @Deprecated
- public static final String DEPRECATED_INDICES_CACHE_QUERY_SIZE = "indices.cache.filter.size";
- public static final String INDICES_CACHE_QUERY_COUNT = "indices.queries.cache.count";
+ public static final Setting<ByteSizeValue> INDICES_CACHE_QUERY_SIZE_SETTING = Setting.byteSizeSetting(
+ "indices.queries.cache.size", "10%", false, Scope.CLUSTER);
+ public static final Setting<Integer> INDICES_CACHE_QUERY_COUNT_SETTING = Setting.intSetting(
+ "indices.queries.cache.count", 10000, 1, false, Scope.CLUSTER);
private final LRUQueryCache cache;
private final ShardCoreKeyMap shardKeyMap = new ShardCoreKeyMap();
@@ -66,21 +67,10 @@ public class IndicesQueryCache extends AbstractComponent implements QueryCache,
@Inject
public IndicesQueryCache(Settings settings) {
super(settings);
- String sizeString = settings.get(INDICES_CACHE_QUERY_SIZE);
- if (sizeString == null) {
- sizeString = settings.get(DEPRECATED_INDICES_CACHE_QUERY_SIZE);
- if (sizeString != null) {
- deprecationLogger.deprecated("The [" + DEPRECATED_INDICES_CACHE_QUERY_SIZE
- + "] settings is now deprecated, use [" + INDICES_CACHE_QUERY_SIZE + "] instead");
- }
- }
- if (sizeString == null) {
- sizeString = "10%";
- }
- final ByteSizeValue size = MemorySizeValue.parseBytesSizeValueOrHeapRatio(sizeString, INDICES_CACHE_QUERY_SIZE);
- final int count = settings.getAsInt(INDICES_CACHE_QUERY_COUNT, 1000);
- logger.debug("using [node] query cache with size [{}], actual_size [{}], max filter count [{}]",
- sizeString, size, count);
+ final ByteSizeValue size = INDICES_CACHE_QUERY_SIZE_SETTING.get(settings);
+ final int count = INDICES_CACHE_QUERY_COUNT_SETTING.get(settings);
+ logger.debug("using [node] query cache with size [{}] max filter count [{}]",
+ size, count);
cache = new LRUQueryCache(count, size.bytes()) {
private Stats getStats(Object coreKey) {
diff --git a/core/src/main/java/org/elasticsearch/indices/cache/request/IndicesRequestCache.java b/core/src/main/java/org/elasticsearch/indices/cache/request/IndicesRequestCache.java
index 6a252178d4..36ac787855 100644
--- a/core/src/main/java/org/elasticsearch/indices/cache/request/IndicesRequestCache.java
+++ b/core/src/main/java/org/elasticsearch/indices/cache/request/IndicesRequestCache.java
@@ -40,6 +40,7 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.MemorySizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
@@ -79,11 +80,11 @@ public class IndicesRequestCache extends AbstractComponent implements RemovalLis
* A setting to enable or disable request caching on an index level. Its dynamic by default
* since we are checking on the cluster state IndexMetaData always.
*/
- public static final Setting<Boolean> INDEX_CACHE_REQUEST_ENABLED_SETTING = Setting.boolSetting("index.requests.cache.enable", true, true, Setting.Scope.INDEX);
- public static final String INDICES_CACHE_REQUEST_CLEAN_INTERVAL = "indices.requests.cache.clean_interval";
+ public static final Setting<Boolean> INDEX_CACHE_REQUEST_ENABLED_SETTING = Setting.boolSetting("index.requests.cache.enable", false, true, Setting.Scope.INDEX);
+ public static final Setting<TimeValue> INDICES_CACHE_REQUEST_CLEAN_INTERVAL = Setting.positiveTimeSetting("indices.requests.cache.clean_interval", TimeValue.timeValueSeconds(60), false, Setting.Scope.CLUSTER);
- public static final String INDICES_CACHE_QUERY_SIZE = "indices.requests.cache.size";
- public static final String INDICES_CACHE_QUERY_EXPIRE = "indices.requests.cache.expire";
+ public static final Setting<ByteSizeValue> INDICES_CACHE_QUERY_SIZE = Setting.byteSizeSetting("indices.requests.cache.size", "1%", false, Setting.Scope.CLUSTER);
+ public static final Setting<TimeValue> INDICES_CACHE_QUERY_EXPIRE = Setting.positiveTimeSetting("indices.requests.cache.expire", new TimeValue(0), false, Setting.Scope.CLUSTER);
private static final Set<SearchType> CACHEABLE_SEARCH_TYPES = EnumSet.of(SearchType.QUERY_THEN_FETCH, SearchType.QUERY_AND_FETCH);
@@ -98,7 +99,7 @@ public class IndicesRequestCache extends AbstractComponent implements RemovalLis
//TODO make these changes configurable on the cluster level
- private final String size;
+ private final ByteSizeValue size;
private final TimeValue expire;
private volatile Cache<Key, Value> cache;
@@ -108,11 +109,11 @@ public class IndicesRequestCache extends AbstractComponent implements RemovalLis
super(settings);
this.clusterService = clusterService;
this.threadPool = threadPool;
- this.cleanInterval = settings.getAsTime(INDICES_CACHE_REQUEST_CLEAN_INTERVAL, TimeValue.timeValueSeconds(60));
+ this.cleanInterval = INDICES_CACHE_REQUEST_CLEAN_INTERVAL.get(settings);
- this.size = settings.get(INDICES_CACHE_QUERY_SIZE, "1%");
+ this.size = INDICES_CACHE_QUERY_SIZE.get(settings);
- this.expire = settings.getAsTime(INDICES_CACHE_QUERY_EXPIRE, null);
+ this.expire = INDICES_CACHE_QUERY_EXPIRE.exists(settings) ? INDICES_CACHE_QUERY_EXPIRE.get(settings) : null;
buildCache();
this.reaper = new Reaper();
@@ -121,7 +122,7 @@ public class IndicesRequestCache extends AbstractComponent implements RemovalLis
private void buildCache() {
- long sizeInBytes = MemorySizeValue.parseBytesSizeValueOrHeapRatio(size, INDICES_CACHE_QUERY_SIZE).bytes();
+ long sizeInBytes = size.bytes();
CacheBuilder<Key, Value> cacheBuilder = CacheBuilder.<Key, Value>builder()
.setMaximumWeight(sizeInBytes).weigher((k, v) -> k.ramBytesUsed() + v.ramBytesUsed()).removalListener(this);
diff --git a/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java b/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java
index 49068eec00..8c2f23f708 100644
--- a/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java
+++ b/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java
@@ -27,7 +27,6 @@ import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateListener;
import org.elasticsearch.cluster.action.index.NodeIndexDeletedAction;
import org.elasticsearch.cluster.action.index.NodeMappingRefreshAction;
-import org.elasticsearch.cluster.action.shard.NoOpShardStateActionListener;
import org.elasticsearch.cluster.action.shard.ShardStateAction;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MappingMetaData;
@@ -73,7 +72,6 @@ import org.elasticsearch.snapshots.RestoreService;
import org.elasticsearch.threadpool.ThreadPool;
import java.util.Arrays;
-import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
@@ -93,7 +91,7 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent<Indic
private final NodeMappingRefreshAction nodeMappingRefreshAction;
private final NodeServicesProvider nodeServicesProvider;
- private static final ShardStateAction.Listener SHARD_STATE_ACTION_LISTENER = new NoOpShardStateActionListener();
+ private static final ShardStateAction.Listener SHARD_STATE_ACTION_LISTENER = new ShardStateAction.Listener() {};
// a map of mappings type we have seen per index due to cluster state
// we need this so we won't remove types automatically created as part of the indexing process
@@ -236,11 +234,11 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent<Indic
assert localNodeId != null;
for (IndexService indexService : indicesService) {
- IndexMetaData indexMetaData = event.state().metaData().index(indexService.index().name());
+ IndexMetaData indexMetaData = event.state().metaData().index(indexService.index().getName());
if (indexMetaData != null) {
if (!indexMetaData.isSameUUID(indexService.indexUUID())) {
logger.debug("[{}] mismatch on index UUIDs between cluster state and local state, cleaning the index so it will be recreated", indexMetaData.getIndex());
- deleteIndex(indexMetaData.getIndex(), "mismatch on index UUIDs between cluster state and local state, cleaning the index so it will be recreated");
+ deleteIndex(indexMetaData.getIndex().getName(), "mismatch on index UUIDs between cluster state and local state, cleaning the index so it will be recreated");
}
}
}
@@ -277,15 +275,15 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent<Indic
}
IntHashSet newShardIds = new IntHashSet();
for (IndexService indexService : indicesService) {
- String index = indexService.index().name();
- IndexMetaData indexMetaData = event.state().metaData().index(index);
+ String indexName = indexService.index().getName();
+ IndexMetaData indexMetaData = event.state().metaData().index(indexName);
if (indexMetaData == null) {
continue;
}
// now, go over and delete shards that needs to get deleted
newShardIds.clear();
for (ShardRouting shard : routingNode) {
- if (shard.index().equals(index)) {
+ if (shard.index().getName().equals(indexName)) {
newShardIds.add(shard.id());
}
}
@@ -293,14 +291,14 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent<Indic
if (!newShardIds.contains(existingShardId)) {
if (indexMetaData.getState() == IndexMetaData.State.CLOSE) {
if (logger.isDebugEnabled()) {
- logger.debug("[{}][{}] removing shard (index is closed)", index, existingShardId);
+ logger.debug("[{}][{}] removing shard (index is closed)", indexName, existingShardId);
}
indexService.removeShard(existingShardId, "removing shard (index is closed)");
} else {
// we can just remove the shard, without cleaning it locally, since we will clean it
// when all shards are allocated in the IndicesStore
if (logger.isDebugEnabled()) {
- logger.debug("[{}][{}] removing shard (not allocated)", index, existingShardId);
+ logger.debug("[{}][{}] removing shard (not allocated)", indexName, existingShardId);
}
indexService.removeShard(existingShardId, "removing shard (not allocated)");
}
@@ -316,7 +314,7 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent<Indic
return;
}
for (ShardRouting shard : routingNode) {
- if (!indicesService.hasIndex(shard.index())) {
+ if (!indicesService.hasIndex(shard.getIndexName())) {
final IndexMetaData indexMetaData = event.state().metaData().index(shard.index());
if (logger.isDebugEnabled()) {
logger.debug("[{}] creating index", indexMetaData.getIndex());
@@ -335,7 +333,7 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent<Indic
return;
}
for (IndexMetaData indexMetaData : event.state().metaData()) {
- if (!indicesService.hasIndex(indexMetaData.getIndex())) {
+ if (!indicesService.hasIndex(indexMetaData.getIndex().getName())) {
// we only create / update here
continue;
}
@@ -343,7 +341,7 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent<Indic
if (!event.indexMetaDataChanged(indexMetaData)) {
continue;
}
- String index = indexMetaData.getIndex();
+ String index = indexMetaData.getIndex().getName();
IndexService indexService = indicesService.indexService(index);
if (indexService == null) {
// already deleted on us, ignore it
@@ -357,12 +355,12 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent<Indic
private void applyMappings(ClusterChangedEvent event) {
// go over and update mappings
for (IndexMetaData indexMetaData : event.state().metaData()) {
- if (!indicesService.hasIndex(indexMetaData.getIndex())) {
+ if (!indicesService.hasIndex(indexMetaData.getIndex().getName())) {
// we only create / update here
continue;
}
boolean requireRefresh = false;
- String index = indexMetaData.getIndex();
+ String index = indexMetaData.getIndex().getName();
IndexService indexService = indicesService.indexService(index);
if (indexService == null) {
// got deleted on us, ignore (closing the node)
@@ -755,7 +753,6 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent<Indic
} catch (Throwable e) {
logger.warn("failed to clean index ({})", e, reason);
}
-
}
private void deleteIndex(String index, String reason) {
@@ -776,7 +773,7 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent<Indic
} catch (ShardNotFoundException e) {
// the node got closed on us, ignore it
} catch (Throwable e1) {
- logger.warn("[{}][{}] failed to remove shard after failure ([{}])", e1, shardRouting.getIndex(), shardRouting.getId(), message);
+ logger.warn("[{}][{}] failed to remove shard after failure ([{}])", e1, shardRouting.getIndexName(), shardRouting.getId(), message);
}
}
if (sendShardFailure) {
@@ -790,14 +787,14 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent<Indic
failedShards.put(shardRouting.shardId(), new FailedShard(shardRouting.version()));
shardStateAction.shardFailed(shardRouting, indexUUID, message, failure, SHARD_STATE_ACTION_LISTENER);
} catch (Throwable e1) {
- logger.warn("[{}][{}] failed to mark shard as failed (because of [{}])", e1, shardRouting.getIndex(), shardRouting.getId(), message);
+ logger.warn("[{}][{}] failed to mark shard as failed (because of [{}])", e1, shardRouting.getIndexName(), shardRouting.getId(), message);
}
}
private class FailedShardHandler implements Callback<IndexShard.ShardFailure> {
@Override
public void handle(final IndexShard.ShardFailure shardFailure) {
- final IndexService indexService = indicesService.indexService(shardFailure.routing.shardId().index().name());
+ final IndexService indexService = indicesService.indexService(shardFailure.routing.shardId().getIndex().getName());
final ShardRouting shardRouting = shardFailure.routing;
threadPool.generic().execute(() -> {
synchronized (mutex) {
diff --git a/core/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java b/core/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java
index 9181c6248e..144f8b7f77 100644
--- a/core/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java
+++ b/core/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java
@@ -31,8 +31,10 @@ import org.elasticsearch.common.cache.RemovalListener;
import org.elasticsearch.common.cache.RemovalNotification;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
+import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
+import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
@@ -51,10 +53,10 @@ import java.util.function.ToLongBiFunction;
/**
*/
-public class IndicesFieldDataCache extends AbstractComponent implements RemovalListener<IndicesFieldDataCache.Key, Accountable> {
+public class IndicesFieldDataCache extends AbstractComponent implements RemovalListener<IndicesFieldDataCache.Key, Accountable>, Releasable{
- public static final String FIELDDATA_CLEAN_INTERVAL_SETTING = "indices.fielddata.cache.cleanup_interval";
- public static final String INDICES_FIELDDATA_CACHE_SIZE_KEY = "indices.fielddata.cache.size";
+ public static final Setting<TimeValue> INDICES_FIELDDATA_CLEAN_INTERVAL_SETTING = Setting.positiveTimeSetting("indices.fielddata.cache.cleanup_interval", TimeValue.timeValueMinutes(1), false, Setting.Scope.CLUSTER);
+ public static final Setting<ByteSizeValue> INDICES_FIELDDATA_CACHE_SIZE_KEY = Setting.byteSizeSetting("indices.fielddata.cache.size", new ByteSizeValue(-1), false, Setting.Scope.CLUSTER);
private final IndicesFieldDataCacheListener indicesFieldDataCacheListener;
@@ -68,23 +70,22 @@ public class IndicesFieldDataCache extends AbstractComponent implements RemovalL
super(settings);
this.threadPool = threadPool;
this.indicesFieldDataCacheListener = indicesFieldDataCacheListener;
- final String size = settings.get(INDICES_FIELDDATA_CACHE_SIZE_KEY, "-1");
- final long sizeInBytes = settings.getAsMemory(INDICES_FIELDDATA_CACHE_SIZE_KEY, "-1").bytes();
+ final long sizeInBytes = INDICES_FIELDDATA_CACHE_SIZE_KEY.get(settings).bytes();
CacheBuilder<Key, Accountable> cacheBuilder = CacheBuilder.<Key, Accountable>builder()
.removalListener(this);
if (sizeInBytes > 0) {
cacheBuilder.setMaximumWeight(sizeInBytes).weigher(new FieldDataWeigher());
}
- logger.debug("using size [{}] [{}]", size, new ByteSizeValue(sizeInBytes));
cache = cacheBuilder.build();
- this.cleanInterval = settings.getAsTime(FIELDDATA_CLEAN_INTERVAL_SETTING, TimeValue.timeValueMinutes(1));
+ this.cleanInterval = INDICES_FIELDDATA_CLEAN_INTERVAL_SETTING.get(settings);
// Start thread that will manage cleaning the field data cache periodically
threadPool.schedule(this.cleanInterval, ThreadPool.Names.SAME,
new FieldDataCacheCleaner(this.cache, this.logger, this.threadPool, this.cleanInterval));
}
+ @Override
public void close() {
cache.invalidateAll();
this.closed = true;
diff --git a/core/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java b/core/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java
index 0918ad2afe..90f2cb5073 100644
--- a/core/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java
+++ b/core/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java
@@ -119,7 +119,7 @@ public class SyncedFlushService extends AbstractComponent implements IndexEventL
final IndexMetaData indexMetaData = state.metaData().index(index);
totalNumberOfShards += indexMetaData.getTotalNumberOfShards();
numberOfShards += indexMetaData.getNumberOfShards();
- results.put(index, Collections.synchronizedList(new ArrayList<ShardsSyncedFlushResult>()));
+ results.put(index, Collections.synchronizedList(new ArrayList<>()));
}
if (numberOfShards == 0) {
@@ -130,9 +130,10 @@ public class SyncedFlushService extends AbstractComponent implements IndexEventL
final CountDown countDown = new CountDown(numberOfShards);
for (final String index : concreteIndices) {
- final int indexNumberOfShards = state.metaData().index(index).getNumberOfShards();
+ final IndexMetaData indexMetaData = state.metaData().index(index);
+ final int indexNumberOfShards = indexMetaData.getNumberOfShards();
for (int shard = 0; shard < indexNumberOfShards; shard++) {
- final ShardId shardId = new ShardId(index, shard);
+ final ShardId shardId = new ShardId(indexMetaData.getIndex(), shard);
attemptSyncedFlush(shardId, new ActionListener<ShardsSyncedFlushResult>() {
@Override
public void onResponse(ShardsSyncedFlushResult syncedFlushResult) {
@@ -237,13 +238,13 @@ public class SyncedFlushService extends AbstractComponent implements IndexEventL
}
final IndexShardRoutingTable getShardRoutingTable(ShardId shardId, ClusterState state) {
- final IndexRoutingTable indexRoutingTable = state.routingTable().index(shardId.index().name());
+ final IndexRoutingTable indexRoutingTable = state.routingTable().index(shardId.getIndexName());
if (indexRoutingTable == null) {
- IndexMetaData index = state.getMetaData().index(shardId.index().getName());
+ IndexMetaData index = state.getMetaData().index(shardId.getIndexName());
if (index != null && index.getState() == IndexMetaData.State.CLOSE) {
- throw new IndexClosedException(shardId.index());
+ throw new IndexClosedException(shardId.getIndex());
}
- throw new IndexNotFoundException(shardId.index().getName());
+ throw new IndexNotFoundException(shardId.getIndexName());
}
final IndexShardRoutingTable shardRoutingTable = indexRoutingTable.shard(shardId.id());
if (shardRoutingTable == null) {
diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java
index c86309db13..8d610dce05 100644
--- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java
+++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java
@@ -83,7 +83,6 @@ public class RecoverySettings extends AbstractComponent {
this.internalActionLongTimeout = INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT_SETTING.get(settings);
this.activityTimeout = INDICES_RECOVERY_ACTIVITY_TIMEOUT_SETTING.get(settings);
-
this.maxBytesPerSec = INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.get(settings);
if (maxBytesPerSec.bytes() <= 0) {
rateLimiter = null;
diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySource.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySource.java
index 1ef9215b7b..4c2d3d7f60 100644
--- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySource.java
+++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySource.java
@@ -84,7 +84,7 @@ public class RecoverySource extends AbstractComponent implements IndexEventListe
}
private RecoveryResponse recover(final StartRecoveryRequest request) throws IOException {
- final IndexService indexService = indicesService.indexServiceSafe(request.shardId().index().name());
+ final IndexService indexService = indicesService.indexServiceSafe(request.shardId().getIndex().getName());
final IndexShard shard = indexService.getShard(request.shardId().id());
// starting recovery from that our (the source) shard state is marking the shard to be in recovery mode as well, otherwise
@@ -111,7 +111,7 @@ public class RecoverySource extends AbstractComponent implements IndexEventListe
throw new DelayRecoveryException("source node has the state of the target shard to be [" + targetShardRouting.state() + "], expecting to be [initializing]");
}
- logger.trace("[{}][{}] starting recovery to {}, mark_as_relocated {}", request.shardId().index().name(), request.shardId().id(), request.targetNode(), request.markAsRelocated());
+ logger.trace("[{}][{}] starting recovery to {}, mark_as_relocated {}", request.shardId().getIndex().getName(), request.shardId().id(), request.targetNode(), request.markAsRelocated());
final RecoverySourceHandler handler;
if (shard.indexSettings().isOnSharedFilesystem()) {
handler = new SharedFSRecoverySourceHandler(shard, request, recoverySettings, transportService, logger);
diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java
index 4699e8d5ac..8cbdfca022 100644
--- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java
+++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java
@@ -111,7 +111,7 @@ public class RecoverySourceHandler {
this.recoverySettings = recoverySettings;
this.logger = logger;
this.transportService = transportService;
- this.indexName = this.request.shardId().index().name();
+ this.indexName = this.request.shardId().getIndex().getName();
this.shardId = this.request.shardId().id();
this.chunkSizeInBytes = recoverySettings.getChunkSize().bytesAsInt();
this.response = new RecoveryResponse();
@@ -252,60 +252,58 @@ public class RecoverySourceHandler {
final AtomicLong bytesSinceLastPause = new AtomicLong();
final Function<StoreFileMetaData, OutputStream> outputStreamFactories = (md) -> new BufferedOutputStream(new RecoveryOutputStream(md, bytesSinceLastPause, translogView), chunkSizeInBytes);
sendFiles(store, phase1Files.toArray(new StoreFileMetaData[phase1Files.size()]), outputStreamFactories);
- cancellableThreads.execute(() -> {
- // Send the CLEAN_FILES request, which takes all of the files that
- // were transferred and renames them from their temporary file
- // names to the actual file names. It also writes checksums for
- // the files after they have been renamed.
- //
- // Once the files have been renamed, any other files that are not
- // related to this recovery (out of date segments, for example)
- // are deleted
- try {
+ // Send the CLEAN_FILES request, which takes all of the files that
+ // were transferred and renames them from their temporary file
+ // names to the actual file names. It also writes checksums for
+ // the files after they have been renamed.
+ //
+ // Once the files have been renamed, any other files that are not
+ // related to this recovery (out of date segments, for example)
+ // are deleted
+ try {
+ cancellableThreads.execute(() -> {
transportService.submitRequest(request.targetNode(), RecoveryTarget.Actions.CLEAN_FILES,
- new RecoveryCleanFilesRequest(request.recoveryId(), shard.shardId(), recoverySourceMetadata, translogView.totalOperations()),
- TransportRequestOptions.builder().withTimeout(recoverySettings.internalActionTimeout()).build(),
- EmptyTransportResponseHandler.INSTANCE_SAME).txGet();
- } catch (RemoteTransportException remoteException) {
- final IOException corruptIndexException;
- // we realized that after the index was copied and we wanted to finalize the recovery
- // the index was corrupted:
- // - maybe due to a broken segments file on an empty index (transferred with no checksum)
- // - maybe due to old segments without checksums or length only checks
- if ((corruptIndexException = ExceptionsHelper.unwrapCorruption(remoteException)) != null) {
- try {
- final Store.MetadataSnapshot recoverySourceMetadata1 = store.getMetadata(snapshot);
- StoreFileMetaData[] metadata =
- StreamSupport.stream(recoverySourceMetadata1.spliterator(), false).toArray(size -> new StoreFileMetaData[size]);
- ArrayUtil.timSort(metadata, new Comparator<StoreFileMetaData>() {
- @Override
- public int compare(StoreFileMetaData o1, StoreFileMetaData o2) {
- return Long.compare(o1.length(), o2.length()); // check small files first
- }
- });
- for (StoreFileMetaData md : metadata) {
- logger.debug("{} checking integrity for file {} after remove corruption exception", shard.shardId(), md);
- if (store.checkIntegrityNoException(md) == false) { // we are corrupted on the primary -- fail!
- shard.failShard("recovery", corruptIndexException);
- logger.warn("{} Corrupted file detected {} checksum mismatch", shard.shardId(), md);
- throw corruptIndexException;
- }
+ new RecoveryCleanFilesRequest(request.recoveryId(), shard.shardId(), recoverySourceMetadata, translogView.totalOperations()),
+ TransportRequestOptions.builder().withTimeout(recoverySettings.internalActionTimeout()).build(),
+ EmptyTransportResponseHandler.INSTANCE_SAME).txGet();
+ });
+ } catch (RemoteTransportException remoteException) {
+ final IOException corruptIndexException;
+ // we realized that after the index was copied and we wanted to finalize the recovery
+ // the index was corrupted:
+ // - maybe due to a broken segments file on an empty index (transferred with no checksum)
+ // - maybe due to old segments without checksums or length only checks
+ if ((corruptIndexException = ExceptionsHelper.unwrapCorruption(remoteException)) != null) {
+ try {
+ final Store.MetadataSnapshot recoverySourceMetadata1 = store.getMetadata(snapshot);
+ StoreFileMetaData[] metadata =
+ StreamSupport.stream(recoverySourceMetadata1.spliterator(), false).toArray(size -> new StoreFileMetaData[size]);
+ ArrayUtil.timSort(metadata, (o1, o2) -> {
+ return Long.compare(o1.length(), o2.length()); // check small files first
+ });
+ for (StoreFileMetaData md : metadata) {
+ cancellableThreads.checkForCancel();
+ logger.debug("{} checking integrity for file {} after remove corruption exception", shard.shardId(), md);
+ if (store.checkIntegrityNoException(md) == false) { // we are corrupted on the primary -- fail!
+ shard.failShard("recovery", corruptIndexException);
+ logger.warn("{} Corrupted file detected {} checksum mismatch", shard.shardId(), md);
+ throw corruptIndexException;
}
- } catch (IOException ex) {
- remoteException.addSuppressed(ex);
- throw remoteException;
}
- // corruption has happened on the way to replica
- RemoteTransportException exception = new RemoteTransportException("File corruption occurred on recovery but checksums are ok", null);
- exception.addSuppressed(remoteException);
- logger.warn("{} Remote file corruption during finalization on node {}, recovering {}. local checksum OK",
- corruptIndexException, shard.shardId(), request.targetNode());
- throw exception;
- } else {
+ } catch (IOException ex) {
+ remoteException.addSuppressed(ex);
throw remoteException;
}
+ // corruption has happened on the way to replica
+ RemoteTransportException exception = new RemoteTransportException("File corruption occurred on recovery but checksums are ok", null);
+ exception.addSuppressed(remoteException);
+ logger.warn("{} Remote file corruption during finalization on node {}, recovering {}. local checksum OK",
+ corruptIndexException, shard.shardId(), request.targetNode());
+ throw exception;
+ } else {
+ throw remoteException;
}
- });
+ }
}
prepareTargetForTranslog(translogView.totalOperations());
diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java
index f7e683b8f1..0912a22a0f 100644
--- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java
+++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java
@@ -182,7 +182,7 @@ public class RecoveryTarget extends AbstractComponent implements IndexEventListe
final AtomicReference<RecoveryResponse> responseHolder = new AtomicReference<>();
try {
- logger.trace("[{}][{}] starting recovery from {}", request.shardId().index().name(), request.shardId().id(), request.sourceNode());
+ logger.trace("[{}][{}] starting recovery from {}", request.shardId().getIndex().getName(), request.shardId().id(), request.sourceNode());
recoveryStatus.indexShard().prepareForIndexRecovery();
recoveryStatus.CancellableThreads().execute(new CancellableThreads.Interruptable() {
@Override
@@ -202,7 +202,7 @@ public class RecoveryTarget extends AbstractComponent implements IndexEventListe
onGoingRecoveries.markRecoveryAsDone(recoveryStatus.recoveryId());
if (logger.isTraceEnabled()) {
StringBuilder sb = new StringBuilder();
- sb.append('[').append(request.shardId().index().name()).append(']').append('[').append(request.shardId().id()).append("] ");
+ sb.append('[').append(request.shardId().getIndex().getName()).append(']').append('[').append(request.shardId().id()).append("] ");
sb.append("recovery completed from ").append(request.sourceNode()).append(", took[").append(recoveryTime).append("]\n");
sb.append(" phase1: recovered_files [").append(recoveryResponse.phase1FileNames.size()).append("]").append(" with total_size of [").append(new ByteSizeValue(recoveryResponse.phase1TotalSize)).append("]")
.append(", took [").append(timeValueMillis(recoveryResponse.phase1Time)).append("], throttling_wait [").append(timeValueMillis(recoveryResponse.phase1ThrottlingWaitTime)).append(']')
@@ -220,7 +220,7 @@ public class RecoveryTarget extends AbstractComponent implements IndexEventListe
logger.trace("recovery cancelled", e);
} catch (Throwable e) {
if (logger.isTraceEnabled()) {
- logger.trace("[{}][{}] Got exception on recovery", e, request.shardId().index().name(), request.shardId().id());
+ logger.trace("[{}][{}] Got exception on recovery", e, request.shardId().getIndex().getName(), request.shardId().id());
}
Throwable cause = ExceptionsHelper.unwrapCause(e);
if (cause instanceof CancellableThreads.ExecutionCancelledException) {
@@ -308,7 +308,7 @@ public class RecoveryTarget extends AbstractComponent implements IndexEventListe
@Override
public void messageReceived(final RecoveryTranslogOperationsRequest request, final TransportChannel channel) throws Exception {
try (RecoveriesCollection.StatusRef statusRef = onGoingRecoveries.getStatusSafe(request.recoveryId(), request.shardId())) {
- final ClusterStateObserver observer = new ClusterStateObserver(clusterService, null, logger);
+ final ClusterStateObserver observer = new ClusterStateObserver(clusterService, null, logger, threadPool.getThreadContext());
final RecoveryStatus recoveryStatus = statusRef.status();
final RecoveryState.Translog translog = recoveryStatus.state().getTranslog();
translog.totalOperations(request.totalTranslogOps());
diff --git a/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java b/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java
index dc483932fe..6d225af43d 100644
--- a/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java
+++ b/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java
@@ -35,6 +35,7 @@ import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
@@ -68,33 +69,29 @@ import java.util.concurrent.atomic.AtomicInteger;
public class IndicesStore extends AbstractComponent implements ClusterStateListener, Closeable {
// TODO this class can be foled into either IndicesService and partially into IndicesClusterStateService there is no need for a seperate public service
- public static final String INDICES_STORE_DELETE_SHARD_TIMEOUT = "indices.store.delete.shard.timeout";
+ public static final Setting<TimeValue> INDICES_STORE_DELETE_SHARD_TIMEOUT = Setting.positiveTimeSetting("indices.store.delete.shard.timeout", new TimeValue(30, TimeUnit.SECONDS), false, Setting.Scope.CLUSTER);
public static final String ACTION_SHARD_EXISTS = "internal:index/shard/exists";
private static final EnumSet<IndexShardState> ACTIVE_STATES = EnumSet.of(IndexShardState.STARTED, IndexShardState.RELOCATED);
private final IndicesService indicesService;
private final ClusterService clusterService;
private final TransportService transportService;
+ private final ThreadPool threadPool;
private TimeValue deleteShardTimeout;
@Inject
public IndicesStore(Settings settings, IndicesService indicesService,
- ClusterService clusterService, TransportService transportService) {
+ ClusterService clusterService, TransportService transportService, ThreadPool threadPool) {
super(settings);
this.indicesService = indicesService;
this.clusterService = clusterService;
this.transportService = transportService;
+ this.threadPool = threadPool;
transportService.registerRequestHandler(ACTION_SHARD_EXISTS, ShardActiveRequest::new, ThreadPool.Names.SAME, new ShardActiveRequestHandler());
- this.deleteShardTimeout = settings.getAsTime(INDICES_STORE_DELETE_SHARD_TIMEOUT, new TimeValue(30, TimeUnit.SECONDS));
+ this.deleteShardTimeout = INDICES_STORE_DELETE_SHARD_TIMEOUT.get(settings);
clusterService.addLast(this);
}
- IndicesStore() {
- super(Settings.EMPTY);
- indicesService = null;
- this.clusterService = null;
- this.transportService = null;
- }
@Override
public void close() {
clusterService.remove(this);
@@ -111,11 +108,12 @@ public class IndicesStore extends AbstractComponent implements ClusterStateListe
}
for (IndexRoutingTable indexRoutingTable : event.state().routingTable()) {
- IndexSettings indexSettings = new IndexSettings(event.state().getMetaData().index(indexRoutingTable.index()), settings);
// Note, closed indices will not have any routing information, so won't be deleted
for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) {
if (shardCanBeDeleted(event.state(), indexShardRoutingTable)) {
ShardId shardId = indexShardRoutingTable.shardId();
+ IndexService indexService = indicesService.indexService(indexRoutingTable.getIndex());
+ IndexSettings indexSettings = indexService != null ? indexService.getIndexSettings() : new IndexSettings(event.state().getMetaData().index(indexRoutingTable.getIndex()), settings);
if (indicesService.canDeleteShardContent(shardId, indexSettings)) {
deleteShardIfExistElseWhere(event.state(), indexShardRoutingTable);
}
@@ -278,6 +276,7 @@ public class IndicesStore extends AbstractComponent implements ClusterStateListe
@Override
public void messageReceived(final ShardActiveRequest request, final TransportChannel channel) throws Exception {
IndexShard indexShard = getShard(request);
+
// make sure shard is really there before register cluster state observer
if (indexShard == null) {
channel.sendResponse(new ShardActiveResponse(false, clusterService.localNode()));
@@ -288,7 +287,7 @@ public class IndicesStore extends AbstractComponent implements ClusterStateListe
// in general, using a cluster state observer here is a workaround for the fact that we cannot listen on shard state changes explicitly.
// instead we wait for the cluster state changes because we know any shard state change will trigger or be
// triggered by a cluster state change.
- ClusterStateObserver observer = new ClusterStateObserver(clusterService, request.timeout, logger);
+ ClusterStateObserver observer = new ClusterStateObserver(clusterService, request.timeout, logger, threadPool.getThreadContext());
// check if shard is active. if so, all is good
boolean shardActive = shardActive(indexShard);
if (shardActive) {
@@ -348,14 +347,14 @@ public class IndicesStore extends AbstractComponent implements ClusterStateListe
logger.trace("shard exists request meant for cluster[{}], but this is cluster[{}], ignoring request", request.clusterName, thisClusterName);
return null;
}
-
ShardId shardId = request.shardId;
- IndexService indexService = indicesService.indexService(shardId.index().getName());
+ IndexService indexService = indicesService.indexService(shardId.getIndexName());
if (indexService != null && indexService.indexUUID().equals(request.indexUUID)) {
return indexService.getShardOrNull(shardId.id());
}
return null;
}
+
}
private static class ShardActiveRequest extends TransportRequest {
diff --git a/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java b/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java
index 6a6b05c4ad..bcc2d7f74c 100644
--- a/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java
+++ b/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java
@@ -34,7 +34,6 @@ import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.node.DiscoveryNode;
-import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
@@ -43,7 +42,6 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.gateway.AsyncShardFetch;
-import org.elasticsearch.index.IndexModule;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.shard.IndexShard;
@@ -57,7 +55,6 @@ import org.elasticsearch.transport.TransportService;
import java.io.IOException;
import java.util.ArrayList;
-import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
@@ -129,7 +126,7 @@ public class TransportNodesListShardStoreMetaData extends TransportNodesAction<T
@Override
protected NodeStoreFilesMetaData nodeOperation(NodeRequest request) {
if (request.unallocated) {
- IndexService indexService = indicesService.indexService(request.shardId.index().name());
+ IndexService indexService = indicesService.indexService(request.shardId.getIndexName());
if (indexService == null) {
return new NodeStoreFilesMetaData(clusterService.localNode(), null);
}
@@ -137,7 +134,7 @@ public class TransportNodesListShardStoreMetaData extends TransportNodesAction<T
return new NodeStoreFilesMetaData(clusterService.localNode(), null);
}
}
- IndexMetaData metaData = clusterService.state().metaData().index(request.shardId.index().name());
+ IndexMetaData metaData = clusterService.state().metaData().index(request.shardId.getIndexName());
if (metaData == null) {
return new NodeStoreFilesMetaData(clusterService.localNode(), null);
}
@@ -153,7 +150,7 @@ public class TransportNodesListShardStoreMetaData extends TransportNodesAction<T
long startTimeNS = System.nanoTime();
boolean exists = false;
try {
- IndexService indexService = indicesService.indexService(shardId.index().name());
+ IndexService indexService = indicesService.indexService(shardId.getIndexName());
if (indexService != null) {
IndexShard indexShard = indexService.getShardOrNull(shardId.id());
if (indexShard != null) {
@@ -168,7 +165,7 @@ public class TransportNodesListShardStoreMetaData extends TransportNodesAction<T
}
}
// try and see if we an list unallocated
- IndexMetaData metaData = clusterService.state().metaData().index(shardId.index().name());
+ IndexMetaData metaData = clusterService.state().metaData().index(shardId.getIndexName());
if (metaData == null) {
return new StoreFilesMetaData(false, shardId, Store.MetadataSnapshot.EMPTY);
}
@@ -177,7 +174,7 @@ public class TransportNodesListShardStoreMetaData extends TransportNodesAction<T
if (shardPath == null) {
return new StoreFilesMetaData(false, shardId, Store.MetadataSnapshot.EMPTY);
}
- return new StoreFilesMetaData(false, shardId, Store.readMetadataSnapshot(shardPath.resolveIndex(), logger));
+ return new StoreFilesMetaData(false, shardId, Store.readMetadataSnapshot(shardPath.resolveIndex(), shardId, logger));
} finally {
TimeValue took = new TimeValue(System.nanoTime() - startTimeNS, TimeUnit.NANOSECONDS);
if (exists) {
@@ -341,7 +338,7 @@ public class TransportNodesListShardStoreMetaData extends TransportNodesAction<T
}
NodeRequest(String nodeId, TransportNodesListShardStoreMetaData.Request request) {
- super(request, nodeId);
+ super(nodeId);
this.shardId = request.shardId;
this.unallocated = request.unallocated;
}
diff --git a/core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java b/core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java
index e1b6f2ddbd..e4537b876f 100644
--- a/core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java
+++ b/core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java
@@ -159,7 +159,7 @@ public class IndicesTTLService extends AbstractLifecycleComponent<IndicesTTLServ
MetaData metaData = clusterService.state().metaData();
for (IndexService indexService : indicesService) {
// check the value of disable_purge for this index
- IndexMetaData indexMetaData = metaData.index(indexService.index().name());
+ IndexMetaData indexMetaData = metaData.index(indexService.index().getName());
if (indexMetaData == null) {
continue;
}
@@ -205,7 +205,7 @@ public class IndicesTTLService extends AbstractLifecycleComponent<IndicesTTLServ
BulkRequest bulkRequest = new BulkRequest();
for (DocToPurge docToPurge : docsToPurge) {
- bulkRequest.add(new DeleteRequest().index(shardToPurge.routingEntry().index()).type(docToPurge.type).id(docToPurge.id).version(docToPurge.version).routing(docToPurge.routing));
+ bulkRequest.add(new DeleteRequest().index(shardToPurge.routingEntry().getIndexName()).type(docToPurge.type).id(docToPurge.id).version(docToPurge.version).routing(docToPurge.routing));
bulkRequest = processBulkIfNeeded(bulkRequest, false);
}
processBulkIfNeeded(bulkRequest, true);
diff --git a/core/src/main/java/org/elasticsearch/ingest/IngestMetadata.java b/core/src/main/java/org/elasticsearch/ingest/IngestMetadata.java
new file mode 100644
index 0000000000..32fade45e9
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/ingest/IngestMetadata.java
@@ -0,0 +1,119 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest;
+
+import org.elasticsearch.cluster.AbstractDiffable;
+import org.elasticsearch.cluster.metadata.MetaData;
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.common.xcontent.ObjectParser;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentParser;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Holds the ingest pipelines that are available in the cluster
+ */
+public final class IngestMetadata extends AbstractDiffable<MetaData.Custom> implements MetaData.Custom {
+
+ public final static String TYPE = "ingest";
+ public final static IngestMetadata PROTO = new IngestMetadata();
+ private static final ParseField PIPELINES_FIELD = new ParseField("pipeline");
+ private static final ObjectParser<List<PipelineConfiguration>, Void> INGEST_METADATA_PARSER = new ObjectParser<>("ingest_metadata", ArrayList::new);
+
+ static {
+ INGEST_METADATA_PARSER.declareObjectArray(List::addAll , PipelineConfiguration.getParser(), PIPELINES_FIELD);
+ }
+
+
+ // We can't use Pipeline class directly in cluster state, because we don't have the processor factories around when
+ // IngestMetadata is registered as custom metadata.
+ private final Map<String, PipelineConfiguration> pipelines;
+
+ private IngestMetadata() {
+ this.pipelines = Collections.emptyMap();
+ }
+
+ public IngestMetadata(Map<String, PipelineConfiguration> pipelines) {
+ this.pipelines = Collections.unmodifiableMap(pipelines);
+ }
+
+ @Override
+ public String type() {
+ return TYPE;
+ }
+
+ public Map<String, PipelineConfiguration> getPipelines() {
+ return pipelines;
+ }
+
+ @Override
+ public MetaData.Custom readFrom(StreamInput in) throws IOException {
+ int size = in.readVInt();
+ Map<String, PipelineConfiguration> pipelines = new HashMap<>(size);
+ for (int i = 0; i < size; i++) {
+ PipelineConfiguration pipeline = PipelineConfiguration.readPipelineConfiguration(in);
+ pipelines.put(pipeline.getId(), pipeline);
+ }
+ return new IngestMetadata(pipelines);
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ out.writeVInt(pipelines.size());
+ for (PipelineConfiguration pipeline : pipelines.values()) {
+ pipeline.writeTo(out);
+ }
+ }
+
+ @Override
+ public MetaData.Custom fromXContent(XContentParser parser) throws IOException {
+ Map<String, PipelineConfiguration> pipelines = new HashMap<>();
+ List<PipelineConfiguration> configs = INGEST_METADATA_PARSER.parse(parser);
+ for (PipelineConfiguration pipeline : configs) {
+ pipelines.put(pipeline.getId(), pipeline);
+ }
+ return new IngestMetadata(pipelines);
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startArray(PIPELINES_FIELD.getPreferredName());
+ for (PipelineConfiguration pipeline : pipelines.values()) {
+ pipeline.toXContent(builder, params);
+ }
+ builder.endArray();
+ return builder;
+ }
+
+ @Override
+ public EnumSet<MetaData.XContentContext> context() {
+ return MetaData.API_AND_GATEWAY;
+ }
+
+}
diff --git a/core/src/main/java/org/elasticsearch/ingest/IngestService.java b/core/src/main/java/org/elasticsearch/ingest/IngestService.java
new file mode 100644
index 0000000000..8af82b28a3
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/ingest/IngestService.java
@@ -0,0 +1,61 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest;
+
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.script.ScriptService;
+import org.elasticsearch.threadpool.ThreadPool;
+
+import java.io.Closeable;
+import java.io.IOException;
+
+/**
+ * Holder class for several ingest related services.
+ */
+public class IngestService implements Closeable {
+
+ private final PipelineStore pipelineStore;
+ private final PipelineExecutionService pipelineExecutionService;
+ private final ProcessorsRegistry processorsRegistry;
+
+ public IngestService(Settings settings, ThreadPool threadPool, ProcessorsRegistry processorsRegistry) {
+ this.processorsRegistry = processorsRegistry;
+ this.pipelineStore = new PipelineStore(settings);
+ this.pipelineExecutionService = new PipelineExecutionService(pipelineStore, threadPool);
+ }
+
+ public PipelineStore getPipelineStore() {
+ return pipelineStore;
+ }
+
+ public PipelineExecutionService getPipelineExecutionService() {
+ return pipelineExecutionService;
+ }
+
+ public void setScriptService(ScriptService scriptService) {
+ pipelineStore.buildProcessorFactoryRegistry(processorsRegistry, scriptService);
+ }
+
+ @Override
+ public void close() throws IOException {
+ pipelineStore.close();
+ }
+
+}
diff --git a/core/src/main/java/org/elasticsearch/ingest/InternalTemplateService.java b/core/src/main/java/org/elasticsearch/ingest/InternalTemplateService.java
new file mode 100644
index 0000000000..f13fba942a
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/ingest/InternalTemplateService.java
@@ -0,0 +1,91 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest;
+
+import org.elasticsearch.common.bytes.BytesReference;
+import org.elasticsearch.ingest.core.TemplateService;
+import org.elasticsearch.script.CompiledScript;
+import org.elasticsearch.script.ExecutableScript;
+import org.elasticsearch.script.Script;
+import org.elasticsearch.script.ScriptContext;
+import org.elasticsearch.script.ScriptService;
+
+import java.util.Collections;
+import java.util.Map;
+
+public class InternalTemplateService implements TemplateService {
+
+ private final ScriptService scriptService;
+
+ InternalTemplateService(ScriptService scriptService) {
+ this.scriptService = scriptService;
+ }
+
+ @Override
+ public Template compile(String template) {
+ int mustacheStart = template.indexOf("{{");
+ int mustacheEnd = template.indexOf("}}");
+ if (mustacheStart != -1 && mustacheEnd != -1 && mustacheStart < mustacheEnd) {
+ Script script = new Script(template, ScriptService.ScriptType.INLINE, "mustache", Collections.emptyMap());
+ CompiledScript compiledScript = scriptService.compile(
+ script,
+ ScriptContext.Standard.INGEST,
+ Collections.emptyMap()
+ );
+ return new Template() {
+ @Override
+ public String execute(Map<String, Object> model) {
+ ExecutableScript executableScript = scriptService.executable(compiledScript, model);
+ Object result = executableScript.run();
+ if (result instanceof BytesReference) {
+ return ((BytesReference) result).toUtf8();
+ }
+ return String.valueOf(result);
+ }
+
+ @Override
+ public String getKey() {
+ return template;
+ }
+ };
+ } else {
+ return new StringTemplate(template);
+ }
+ }
+
+ class StringTemplate implements Template {
+
+ private final String value;
+
+ public StringTemplate(String value) {
+ this.value = value;
+ }
+
+ @Override
+ public String execute(Map<String, Object> model) {
+ return value;
+ }
+
+ @Override
+ public String getKey() {
+ return value;
+ }
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java b/core/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java
new file mode 100644
index 0000000000..3bd80edc30
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java
@@ -0,0 +1,116 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest;
+
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.bytes.BytesReference;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.common.io.stream.Writeable;
+import org.elasticsearch.common.xcontent.ObjectParser;
+import org.elasticsearch.common.xcontent.ToXContent;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentHelper;
+import org.elasticsearch.common.xcontent.XContentParser;
+
+import java.io.IOException;
+import java.util.Map;
+import java.util.function.BiFunction;
+
+/**
+ * Encapsulates a pipeline's id and configuration as a blob
+ */
+public final class PipelineConfiguration implements Writeable<PipelineConfiguration>, ToXContent {
+
+ private final static PipelineConfiguration PROTOTYPE = new PipelineConfiguration(null, null);
+
+ public static PipelineConfiguration readPipelineConfiguration(StreamInput in) throws IOException {
+ return PROTOTYPE.readFrom(in);
+ }
+ private final static ObjectParser<Builder, Void> PARSER = new ObjectParser<>("pipeline_config", Builder::new);
+ static {
+ PARSER.declareString(Builder::setId, new ParseField("id"));
+ PARSER.declareField((parser, builder, aVoid) -> {
+ XContentBuilder contentBuilder = XContentBuilder.builder(parser.contentType().xContent());
+ XContentHelper.copyCurrentStructure(contentBuilder.generator(), parser);
+ builder.setConfig(contentBuilder.bytes());
+ }, new ParseField("config"), ObjectParser.ValueType.OBJECT);
+ }
+
+ public static BiFunction<XContentParser, Void,PipelineConfiguration> getParser() {
+ return (p, c) -> PARSER.apply(p ,c).build();
+ }
+ private static class Builder {
+
+ private String id;
+ private BytesReference config;
+
+ void setId(String id) {
+ this.id = id;
+ }
+
+ void setConfig(BytesReference config) {
+ this.config = config;
+ }
+
+ PipelineConfiguration build() {
+ return new PipelineConfiguration(id, config);
+ }
+ }
+
+ private final String id;
+ // Store config as bytes reference, because the config is only used when the pipeline store reads the cluster state
+ // and the way the map of maps config is read requires a deep copy (it removes instead of gets entries to check for unused options)
+ // also the get pipeline api just directly returns this to the caller
+ private final BytesReference config;
+
+ public PipelineConfiguration(String id, BytesReference config) {
+ this.id = id;
+ this.config = config;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ public Map<String, Object> getConfigAsMap() {
+ return XContentHelper.convertToMap(config, true).v2();
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ builder.field("id", id);
+ builder.field("config", getConfigAsMap());
+ builder.endObject();
+ return builder;
+ }
+
+ @Override
+ public PipelineConfiguration readFrom(StreamInput in) throws IOException {
+ return new PipelineConfiguration(in.readString(), in.readBytesReference());
+ }
+
+ public void writeTo(StreamOutput out) throws IOException {
+ out.writeString(id);
+ out.writeBytesReference(config);
+ }
+
+}
diff --git a/core/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java b/core/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java
new file mode 100644
index 0000000000..c6a3b4b843
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java
@@ -0,0 +1,124 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest;
+
+import org.elasticsearch.action.ActionRequest;
+import org.elasticsearch.action.index.IndexRequest;
+import org.elasticsearch.common.Strings;
+import org.elasticsearch.common.util.concurrent.AbstractRunnable;
+import org.elasticsearch.ingest.core.IngestDocument;
+import org.elasticsearch.ingest.core.Pipeline;
+import org.elasticsearch.threadpool.ThreadPool;
+
+import java.util.Map;
+import java.util.function.BiConsumer;
+import java.util.function.Consumer;
+
+public class PipelineExecutionService {
+
+ private final PipelineStore store;
+ private final ThreadPool threadPool;
+
+ public PipelineExecutionService(PipelineStore store, ThreadPool threadPool) {
+ this.store = store;
+ this.threadPool = threadPool;
+ }
+
+ public void execute(IndexRequest request, Consumer<Throwable> failureHandler, Consumer<Boolean> completionHandler) {
+ Pipeline pipeline = getPipeline(request.getPipeline());
+ threadPool.executor(ThreadPool.Names.INDEX).execute(new AbstractRunnable() {
+
+ @Override
+ public void onFailure(Throwable t) {
+ failureHandler.accept(t);
+ }
+
+ @Override
+ protected void doRun() throws Exception {
+ innerExecute(request, pipeline);
+ completionHandler.accept(true);
+ }
+ });
+ }
+
+ public void execute(Iterable<ActionRequest<?>> actionRequests,
+ BiConsumer<IndexRequest, Throwable> itemFailureHandler,
+ Consumer<Throwable> completionHandler) {
+ threadPool.executor(ThreadPool.Names.INDEX).execute(new AbstractRunnable() {
+
+ @Override
+ public void onFailure(Throwable t) {
+ completionHandler.accept(t);
+ }
+
+ @Override
+ protected void doRun() throws Exception {
+ for (ActionRequest actionRequest : actionRequests) {
+ if ((actionRequest instanceof IndexRequest)) {
+ IndexRequest indexRequest = (IndexRequest) actionRequest;
+ if (Strings.hasText(indexRequest.getPipeline())) {
+ try {
+ innerExecute(indexRequest, getPipeline(indexRequest.getPipeline()));
+ //this shouldn't be needed here but we do it for consistency with index api which requires it to prevent double execution
+ indexRequest.setPipeline(null);
+ } catch (Throwable e) {
+ itemFailureHandler.accept(indexRequest, e);
+ }
+ }
+ }
+ }
+ completionHandler.accept(null);
+ }
+ });
+ }
+
+ private void innerExecute(IndexRequest indexRequest, Pipeline pipeline) throws Exception {
+ String index = indexRequest.index();
+ String type = indexRequest.type();
+ String id = indexRequest.id();
+ String routing = indexRequest.routing();
+ String parent = indexRequest.parent();
+ String timestamp = indexRequest.timestamp();
+ String ttl = indexRequest.ttl() == null ? null : indexRequest.ttl().toString();
+ Map<String, Object> sourceAsMap = indexRequest.sourceAsMap();
+ IngestDocument ingestDocument = new IngestDocument(index, type, id, routing, parent, timestamp, ttl, sourceAsMap);
+ pipeline.execute(ingestDocument);
+
+ Map<IngestDocument.MetaData, String> metadataMap = ingestDocument.extractMetadata();
+ //it's fine to set all metadata fields all the time, as ingest document holds their starting values
+ //before ingestion, which might also get modified during ingestion.
+ indexRequest.index(metadataMap.get(IngestDocument.MetaData.INDEX));
+ indexRequest.type(metadataMap.get(IngestDocument.MetaData.TYPE));
+ indexRequest.id(metadataMap.get(IngestDocument.MetaData.ID));
+ indexRequest.routing(metadataMap.get(IngestDocument.MetaData.ROUTING));
+ indexRequest.parent(metadataMap.get(IngestDocument.MetaData.PARENT));
+ indexRequest.timestamp(metadataMap.get(IngestDocument.MetaData.TIMESTAMP));
+ indexRequest.ttl(metadataMap.get(IngestDocument.MetaData.TTL));
+ indexRequest.source(ingestDocument.getSourceAndMetadata());
+ }
+
+ private Pipeline getPipeline(String pipelineId) {
+ Pipeline pipeline = store.get(pipelineId);
+ if (pipeline == null) {
+ throw new IllegalArgumentException("pipeline with id [" + pipelineId + "] does not exist");
+ }
+ return pipeline;
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java b/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java
new file mode 100644
index 0000000000..21128a94b6
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/ingest/PipelineStore.java
@@ -0,0 +1,249 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest;
+
+import org.apache.lucene.util.IOUtils;
+import org.elasticsearch.ResourceNotFoundException;
+import org.elasticsearch.action.ActionListener;
+import org.elasticsearch.action.ingest.DeletePipelineRequest;
+import org.elasticsearch.action.ingest.PutPipelineRequest;
+import org.elasticsearch.action.ingest.WritePipelineResponse;
+import org.elasticsearch.cluster.AckedClusterStateUpdateTask;
+import org.elasticsearch.cluster.ClusterChangedEvent;
+import org.elasticsearch.cluster.ClusterService;
+import org.elasticsearch.cluster.ClusterState;
+import org.elasticsearch.cluster.ClusterStateListener;
+import org.elasticsearch.cluster.metadata.MetaData;
+import org.elasticsearch.common.component.AbstractComponent;
+import org.elasticsearch.common.regex.Regex;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.xcontent.XContentHelper;
+import org.elasticsearch.ingest.core.Pipeline;
+import org.elasticsearch.ingest.core.PipelineFactoryError;
+import org.elasticsearch.ingest.core.Processor;
+import org.elasticsearch.ingest.core.TemplateService;
+import org.elasticsearch.ingest.processor.ConfigurationPropertyException;
+import org.elasticsearch.script.ScriptService;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.function.Function;
+
+public class PipelineStore extends AbstractComponent implements Closeable, ClusterStateListener {
+
+ private final Pipeline.Factory factory = new Pipeline.Factory();
+ private Map<String, Processor.Factory> processorFactoryRegistry;
+
+ // Ideally this should be in IngestMetadata class, but we don't have the processor factories around there.
+ // We know of all the processor factories when a node with all its plugin have been initialized. Also some
+ // processor factories rely on other node services. Custom metadata is statically registered when classes
+ // are loaded, so in the cluster state we just save the pipeline config and here we keep the actual pipelines around.
+ volatile Map<String, Pipeline> pipelines = new HashMap<>();
+
+ public PipelineStore(Settings settings) {
+ super(settings);
+ }
+
+ public void buildProcessorFactoryRegistry(ProcessorsRegistry processorsRegistry, ScriptService scriptService) {
+ Map<String, Processor.Factory> processorFactories = new HashMap<>();
+ TemplateService templateService = new InternalTemplateService(scriptService);
+ for (Map.Entry<String, Function<TemplateService, Processor.Factory<?>>> entry : processorsRegistry.entrySet()) {
+ Processor.Factory processorFactory = entry.getValue().apply(templateService);
+ processorFactories.put(entry.getKey(), processorFactory);
+ }
+ this.processorFactoryRegistry = Collections.unmodifiableMap(processorFactories);
+ }
+
+ @Override
+ public void close() throws IOException {
+ // TODO: When org.elasticsearch.node.Node can close Closable instances we should try to remove this code,
+ // since any wired closable should be able to close itself
+ List<Closeable> closeables = new ArrayList<>();
+ for (Processor.Factory factory : processorFactoryRegistry.values()) {
+ if (factory instanceof Closeable) {
+ closeables.add((Closeable) factory);
+ }
+ }
+ IOUtils.close(closeables);
+ }
+
+ @Override
+ public void clusterChanged(ClusterChangedEvent event) {
+ innerUpdatePipelines(event.state());
+ }
+
+ void innerUpdatePipelines(ClusterState state) {
+ IngestMetadata ingestMetadata = state.getMetaData().custom(IngestMetadata.TYPE);
+ if (ingestMetadata == null) {
+ return;
+ }
+
+ Map<String, Pipeline> pipelines = new HashMap<>();
+ for (PipelineConfiguration pipeline : ingestMetadata.getPipelines().values()) {
+ try {
+ pipelines.put(pipeline.getId(), factory.create(pipeline.getId(), pipeline.getConfigAsMap(), processorFactoryRegistry));
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
+ this.pipelines = Collections.unmodifiableMap(pipelines);
+ }
+
+ /**
+ * Deletes the pipeline specified by id in the request.
+ */
+ public void delete(ClusterService clusterService, DeletePipelineRequest request, ActionListener<WritePipelineResponse> listener) {
+ clusterService.submitStateUpdateTask("delete-pipeline-" + request.getId(), new AckedClusterStateUpdateTask<WritePipelineResponse>(request, listener) {
+
+ @Override
+ protected WritePipelineResponse newResponse(boolean acknowledged) {
+ return new WritePipelineResponse(acknowledged);
+ }
+
+ @Override
+ public ClusterState execute(ClusterState currentState) throws Exception {
+ return innerDelete(request, currentState);
+ }
+ });
+ }
+
+ ClusterState innerDelete(DeletePipelineRequest request, ClusterState currentState) {
+ IngestMetadata currentIngestMetadata = currentState.metaData().custom(IngestMetadata.TYPE);
+ if (currentIngestMetadata == null) {
+ return currentState;
+ }
+ Map<String, PipelineConfiguration> pipelines = currentIngestMetadata.getPipelines();
+ if (pipelines.containsKey(request.getId()) == false) {
+ throw new ResourceNotFoundException("pipeline [{}] is missing", request.getId());
+ } else {
+ pipelines = new HashMap<>(pipelines);
+ pipelines.remove(request.getId());
+ ClusterState.Builder newState = ClusterState.builder(currentState);
+ newState.metaData(MetaData.builder(currentState.getMetaData())
+ .putCustom(IngestMetadata.TYPE, new IngestMetadata(pipelines))
+ .build());
+ return newState.build();
+ }
+ }
+
+ /**
+ * Stores the specified pipeline definition in the request.
+ */
+ public void put(ClusterService clusterService, PutPipelineRequest request, ActionListener<WritePipelineResponse> listener) {
+ // validates the pipeline and processor configuration before submitting a cluster update task:
+ Map<String, Object> pipelineConfig = XContentHelper.convertToMap(request.getSource(), false).v2();
+ WritePipelineResponse response = validatePipelineResponse(request.getId(), pipelineConfig);
+ if (response != null) {
+ listener.onResponse(response);
+ return;
+ }
+ clusterService.submitStateUpdateTask("put-pipeline-" + request.getId(), new AckedClusterStateUpdateTask<WritePipelineResponse>(request, listener) {
+
+ @Override
+ protected WritePipelineResponse newResponse(boolean acknowledged) {
+ return new WritePipelineResponse(acknowledged);
+ }
+
+ @Override
+ public ClusterState execute(ClusterState currentState) throws Exception {
+ return innerPut(request, currentState);
+ }
+ });
+ }
+
+ ClusterState innerPut(PutPipelineRequest request, ClusterState currentState) {
+ IngestMetadata currentIngestMetadata = currentState.metaData().custom(IngestMetadata.TYPE);
+ Map<String, PipelineConfiguration> pipelines;
+ if (currentIngestMetadata != null) {
+ pipelines = new HashMap<>(currentIngestMetadata.getPipelines());
+ } else {
+ pipelines = new HashMap<>();
+ }
+
+ pipelines.put(request.getId(), new PipelineConfiguration(request.getId(), request.getSource()));
+ ClusterState.Builder newState = ClusterState.builder(currentState);
+ newState.metaData(MetaData.builder(currentState.getMetaData())
+ .putCustom(IngestMetadata.TYPE, new IngestMetadata(pipelines))
+ .build());
+ return newState.build();
+ }
+
+ /**
+ * Returns the pipeline by the specified id
+ */
+ public Pipeline get(String id) {
+ return pipelines.get(id);
+ }
+
+ public Map<String, Processor.Factory> getProcessorFactoryRegistry() {
+ return processorFactoryRegistry;
+ }
+
+ /**
+ * @return pipeline configuration specified by id. If multiple ids or wildcards are specified multiple pipelines
+ * may be returned
+ */
+ // Returning PipelineConfiguration instead of Pipeline, because Pipeline and Processor interface don't
+ // know how to serialize themselves.
+ public List<PipelineConfiguration> getPipelines(ClusterState clusterState, String... ids) {
+ IngestMetadata ingestMetadata = clusterState.getMetaData().custom(IngestMetadata.TYPE);
+ return innerGetPipelines(ingestMetadata, ids);
+ }
+
+ List<PipelineConfiguration> innerGetPipelines(IngestMetadata ingestMetadata, String... ids) {
+ if (ingestMetadata == null) {
+ return Collections.emptyList();
+ }
+
+ List<PipelineConfiguration> result = new ArrayList<>(ids.length);
+ for (String id : ids) {
+ if (Regex.isSimpleMatchPattern(id)) {
+ for (Map.Entry<String, PipelineConfiguration> entry : ingestMetadata.getPipelines().entrySet()) {
+ if (Regex.simpleMatch(id, entry.getKey())) {
+ result.add(entry.getValue());
+ }
+ }
+ } else {
+ PipelineConfiguration pipeline = ingestMetadata.getPipelines().get(id);
+ if (pipeline != null) {
+ result.add(pipeline);
+ }
+ }
+ }
+ return result;
+ }
+
+ WritePipelineResponse validatePipelineResponse(String id, Map<String, Object> config) {
+ try {
+ factory.create(id, config, processorFactoryRegistry);
+ return null;
+ } catch (ConfigurationPropertyException e) {
+ return new WritePipelineResponse(new PipelineFactoryError(e));
+ } catch (Exception e) {
+ return new WritePipelineResponse(new PipelineFactoryError(e.getMessage()));
+ }
+ }
+
+}
diff --git a/core/src/main/java/org/elasticsearch/ingest/ProcessorsRegistry.java b/core/src/main/java/org/elasticsearch/ingest/ProcessorsRegistry.java
new file mode 100644
index 0000000000..766ba77293
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/ingest/ProcessorsRegistry.java
@@ -0,0 +1,47 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest;
+
+import org.elasticsearch.ingest.core.Processor;
+import org.elasticsearch.ingest.core.TemplateService;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Set;
+import java.util.function.Function;
+
+public class ProcessorsRegistry {
+
+ private final Map<String, Function<TemplateService, Processor.Factory<?>>> processorFactoryProviders = new HashMap<>();
+
+ /**
+ * Adds a processor factory under a specific name.
+ */
+ public void registerProcessor(String name, Function<TemplateService, Processor.Factory<?>> processorFactoryProvider) {
+ Function<TemplateService, Processor.Factory<?>> provider = processorFactoryProviders.putIfAbsent(name, processorFactoryProvider);
+ if (provider != null) {
+ throw new IllegalArgumentException("Processor factory already registered for name [" + name + "]");
+ }
+ }
+
+ public Set<Map.Entry<String, Function<TemplateService, Processor.Factory<?>>>> entrySet() {
+ return processorFactoryProviders.entrySet();
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/plugins/loading/classpath/InClassPathPlugin.java b/core/src/main/java/org/elasticsearch/ingest/core/AbstractProcessor.java
index 79b1f244ef..e709ae395c 100644
--- a/core/src/test/java/org/elasticsearch/plugins/loading/classpath/InClassPathPlugin.java
+++ b/core/src/main/java/org/elasticsearch/ingest/core/AbstractProcessor.java
@@ -17,19 +17,22 @@
* under the License.
*/
-package org.elasticsearch.plugins.loading.classpath;
-import org.elasticsearch.plugins.Plugin;
+package org.elasticsearch.ingest.core;
-public class InClassPathPlugin extends Plugin {
+/**
+ * An Abstract Processor that holds a processorTag field to be used
+ * by other processors.
+ */
+public abstract class AbstractProcessor implements Processor {
+ protected final String tag;
- @Override
- public String name() {
- return "in-classpath-plugin";
+ protected AbstractProcessor(String tag) {
+ this.tag = tag;
}
@Override
- public String description() {
- return "A plugin defined in class path";
+ public String getTag() {
+ return tag;
}
}
diff --git a/core/src/main/java/org/elasticsearch/ingest/core/AbstractProcessorFactory.java b/core/src/main/java/org/elasticsearch/ingest/core/AbstractProcessorFactory.java
new file mode 100644
index 0000000000..323344f8f4
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/ingest/core/AbstractProcessorFactory.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+package org.elasticsearch.ingest.core;
+
+import java.util.Map;
+
+/**
+ * A processor implementation may modify the data belonging to a document.
+ * Whether changes are made and what exactly is modified is up to the implementation.
+ */
+public abstract class AbstractProcessorFactory<P extends Processor> implements Processor.Factory<P> {
+ public static final String TAG_KEY = "tag";
+
+ @Override
+ public P create(Map<String, Object> config) throws Exception {
+ String tag = ConfigurationUtils.readOptionalStringProperty(null, null, config, TAG_KEY);
+ return doCreate(tag, config);
+ }
+
+ protected abstract P doCreate(String tag, Map<String, Object> config) throws Exception;
+}
diff --git a/core/src/main/java/org/elasticsearch/ingest/core/CompoundProcessor.java b/core/src/main/java/org/elasticsearch/ingest/core/CompoundProcessor.java
new file mode 100644
index 0000000000..c784ea1c57
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/ingest/core/CompoundProcessor.java
@@ -0,0 +1,100 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+//TODO(simonw): can all these classes go into org.elasticsearch.ingest?
+
+package org.elasticsearch.ingest.core;
+
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+
+/**
+ * A Processor that executes a list of other "processors". It executes a separate list of
+ * "onFailureProcessors" when any of the processors throw an {@link Exception}.
+ */
+public class CompoundProcessor implements Processor {
+ static final String ON_FAILURE_MESSAGE_FIELD = "on_failure_message";
+ static final String ON_FAILURE_PROCESSOR_TYPE_FIELD = "on_failure_processor_type";
+ static final String ON_FAILURE_PROCESSOR_TAG_FIELD = "on_failure_processor_tag";
+
+ private final List<Processor> processors;
+ private final List<Processor> onFailureProcessors;
+
+ public CompoundProcessor(Processor... processor) {
+ this(Arrays.asList(processor), Collections.emptyList());
+ }
+
+ public CompoundProcessor(List<Processor> processors, List<Processor> onFailureProcessors) {
+ super();
+ this.processors = processors;
+ this.onFailureProcessors = onFailureProcessors;
+ }
+
+ public List<Processor> getOnFailureProcessors() {
+ return onFailureProcessors;
+ }
+
+ public List<Processor> getProcessors() {
+ return processors;
+ }
+
+ @Override
+ public String getType() {
+ return "compound";
+ }
+
+ @Override
+ public String getTag() {
+ return "compound-processor-" + Objects.hash(processors, onFailureProcessors);
+ }
+
+ @Override
+ public void execute(IngestDocument ingestDocument) throws Exception {
+ for (Processor processor : processors) {
+ try {
+ processor.execute(ingestDocument);
+ } catch (Exception e) {
+ if (onFailureProcessors.isEmpty()) {
+ throw e;
+ } else {
+ executeOnFailure(ingestDocument, e, processor.getType(), processor.getTag());
+ }
+ break;
+ }
+ }
+ }
+
+ void executeOnFailure(IngestDocument ingestDocument, Exception cause, String failedProcessorType, String failedProcessorTag) throws Exception {
+ Map<String, String> ingestMetadata = ingestDocument.getIngestMetadata();
+ try {
+ ingestMetadata.put(ON_FAILURE_MESSAGE_FIELD, cause.getMessage());
+ ingestMetadata.put(ON_FAILURE_PROCESSOR_TYPE_FIELD, failedProcessorType);
+ ingestMetadata.put(ON_FAILURE_PROCESSOR_TAG_FIELD, failedProcessorTag);
+ for (Processor processor : onFailureProcessors) {
+ processor.execute(ingestDocument);
+ }
+ } finally {
+ ingestMetadata.remove(ON_FAILURE_MESSAGE_FIELD);
+ ingestMetadata.remove(ON_FAILURE_PROCESSOR_TYPE_FIELD);
+ ingestMetadata.remove(ON_FAILURE_PROCESSOR_TAG_FIELD);
+ }
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/ingest/core/ConfigurationUtils.java b/core/src/main/java/org/elasticsearch/ingest/core/ConfigurationUtils.java
new file mode 100644
index 0000000000..69adc0f949
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/ingest/core/ConfigurationUtils.java
@@ -0,0 +1,165 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.core;
+
+import org.elasticsearch.ingest.processor.ConfigurationPropertyException;
+
+import java.util.List;
+import java.util.Map;
+
+public final class ConfigurationUtils {
+
+ private ConfigurationUtils() {
+ }
+
+ /**
+ * Returns and removes the specified optional property from the specified configuration map.
+ *
+ * If the property value isn't of type string a {@link ConfigurationPropertyException} is thrown.
+ */
+ public static String readOptionalStringProperty(String processorType, String processorTag, Map<String, Object> configuration, String propertyName) {
+ Object value = configuration.remove(propertyName);
+ return readString(processorType, processorTag, propertyName, value);
+ }
+
+ /**
+ * Returns and removes the specified property from the specified configuration map.
+ *
+ * If the property value isn't of type string an {@link ConfigurationPropertyException} is thrown.
+ * If the property is missing an {@link ConfigurationPropertyException} is thrown
+ */
+ public static String readStringProperty(String processorType, String processorTag, Map<String, Object> configuration, String propertyName) {
+ return readStringProperty(processorType, processorTag, configuration, propertyName, null);
+ }
+
+ /**
+ * Returns and removes the specified property from the specified configuration map.
+ *
+ * If the property value isn't of type string a {@link ConfigurationPropertyException} is thrown.
+ * If the property is missing and no default value has been specified a {@link ConfigurationPropertyException} is thrown
+ */
+ public static String readStringProperty(String processorType, String processorTag, Map<String, Object> configuration, String propertyName, String defaultValue) {
+ Object value = configuration.remove(propertyName);
+ if (value == null && defaultValue != null) {
+ return defaultValue;
+ } else if (value == null) {
+ throw new ConfigurationPropertyException(processorType, processorTag, propertyName, "required property is missing");
+ }
+ return readString(processorType, processorTag, propertyName, value);
+ }
+
+ private static String readString(String processorType, String processorTag, String propertyName, Object value) {
+ if (value == null) {
+ return null;
+ }
+ if (value instanceof String) {
+ return (String) value;
+ }
+ throw new ConfigurationPropertyException(processorType, processorTag, propertyName, "property isn't a string, but of type [" + value.getClass().getName() + "]");
+ }
+
+ /**
+ * Returns and removes the specified property of type list from the specified configuration map.
+ *
+ * If the property value isn't of type list an {@link ConfigurationPropertyException} is thrown.
+ */
+ public static <T> List<T> readOptionalList(String processorType, String processorTag, Map<String, Object> configuration, String propertyName) {
+ Object value = configuration.remove(propertyName);
+ if (value == null) {
+ return null;
+ }
+ return readList(processorType, processorTag, propertyName, value);
+ }
+
+ /**
+ * Returns and removes the specified property of type list from the specified configuration map.
+ *
+ * If the property value isn't of type list an {@link ConfigurationPropertyException} is thrown.
+ * If the property is missing an {@link ConfigurationPropertyException} is thrown
+ */
+ public static <T> List<T> readList(String processorType, String processorTag, Map<String, Object> configuration, String propertyName) {
+ Object value = configuration.remove(propertyName);
+ if (value == null) {
+ throw new ConfigurationPropertyException(processorType, processorTag, propertyName, "required property is missing");
+ }
+
+ return readList(processorType, processorTag, propertyName, value);
+ }
+
+ private static <T> List<T> readList(String processorType, String processorTag, String propertyName, Object value) {
+ if (value instanceof List) {
+ @SuppressWarnings("unchecked")
+ List<T> stringList = (List<T>) value;
+ return stringList;
+ } else {
+ throw new ConfigurationPropertyException(processorType, processorTag, propertyName, "property isn't a list, but of type [" + value.getClass().getName() + "]");
+ }
+ }
+
+ /**
+ * Returns and removes the specified property of type map from the specified configuration map.
+ *
+ * If the property value isn't of type map an {@link ConfigurationPropertyException} is thrown.
+ * If the property is missing an {@link ConfigurationPropertyException} is thrown
+ */
+ public static <T> Map<String, T> readMap(String processorType, String processorTag, Map<String, Object> configuration, String propertyName) {
+ Object value = configuration.remove(propertyName);
+ if (value == null) {
+ throw new ConfigurationPropertyException(processorType, processorTag, propertyName, "required property is missing");
+ }
+
+ return readMap(processorType, processorTag, propertyName, value);
+ }
+
+ /**
+ * Returns and removes the specified property of type map from the specified configuration map.
+ *
+ * If the property value isn't of type map an {@link ConfigurationPropertyException} is thrown.
+ */
+ public static <T> Map<String, T> readOptionalMap(String processorType, String processorTag, Map<String, Object> configuration, String propertyName) {
+ Object value = configuration.remove(propertyName);
+ if (value == null) {
+ return null;
+ }
+
+ return readMap(processorType, processorTag, propertyName, value);
+ }
+
+ private static <T> Map<String, T> readMap(String processorType, String processorTag, String propertyName, Object value) {
+ if (value instanceof Map) {
+ @SuppressWarnings("unchecked")
+ Map<String, T> map = (Map<String, T>) value;
+ return map;
+ } else {
+ throw new ConfigurationPropertyException(processorType, processorTag, propertyName, "property isn't a map, but of type [" + value.getClass().getName() + "]");
+ }
+ }
+
+ /**
+ * Returns and removes the specified property as an {@link Object} from the specified configuration map.
+ */
+ public static Object readObject(String processorType, String processorTag, Map<String, Object> configuration, String propertyName) {
+ Object value = configuration.remove(propertyName);
+ if (value == null) {
+ throw new ConfigurationPropertyException(processorType, processorTag, propertyName, "required property is missing");
+ }
+ return value;
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/ingest/core/IngestDocument.java b/core/src/main/java/org/elasticsearch/ingest/core/IngestDocument.java
new file mode 100644
index 0000000000..4b0f6ac866
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/ingest/core/IngestDocument.java
@@ -0,0 +1,573 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.core;
+
+import org.elasticsearch.common.Strings;
+import org.elasticsearch.index.mapper.internal.IdFieldMapper;
+import org.elasticsearch.index.mapper.internal.IndexFieldMapper;
+import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
+import org.elasticsearch.index.mapper.internal.RoutingFieldMapper;
+import org.elasticsearch.index.mapper.internal.SourceFieldMapper;
+import org.elasticsearch.index.mapper.internal.TTLFieldMapper;
+import org.elasticsearch.index.mapper.internal.TimestampFieldMapper;
+import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
+
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Objects;
+import java.util.TimeZone;
+
+/**
+ * Represents a single document being captured before indexing and holds the source and metadata (like id, type and index).
+ */
+public final class IngestDocument {
+
+ public final static String INGEST_KEY = "_ingest";
+
+ static final String TIMESTAMP = "timestamp";
+
+ private final Map<String, Object> sourceAndMetadata;
+ private final Map<String, String> ingestMetadata;
+
+ public IngestDocument(String index, String type, String id, String routing, String parent, String timestamp, String ttl, Map<String, Object> source) {
+ this.sourceAndMetadata = new HashMap<>();
+ this.sourceAndMetadata.putAll(source);
+ this.sourceAndMetadata.put(MetaData.INDEX.getFieldName(), index);
+ this.sourceAndMetadata.put(MetaData.TYPE.getFieldName(), type);
+ this.sourceAndMetadata.put(MetaData.ID.getFieldName(), id);
+ if (routing != null) {
+ this.sourceAndMetadata.put(MetaData.ROUTING.getFieldName(), routing);
+ }
+ if (parent != null) {
+ this.sourceAndMetadata.put(MetaData.PARENT.getFieldName(), parent);
+ }
+ if (timestamp != null) {
+ this.sourceAndMetadata.put(MetaData.TIMESTAMP.getFieldName(), timestamp);
+ }
+ if (ttl != null) {
+ this.sourceAndMetadata.put(MetaData.TTL.getFieldName(), ttl);
+ }
+
+ this.ingestMetadata = new HashMap<>();
+ DateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZZ", Locale.ROOT);
+ df.setTimeZone(TimeZone.getTimeZone("UTC"));
+ this.ingestMetadata.put(TIMESTAMP, df.format(new Date()));
+ }
+
+ /**
+ * Copy constructor that creates a new {@link IngestDocument} which has exactly the same properties as the one provided as argument
+ */
+ public IngestDocument(IngestDocument other) {
+ this(deepCopyMap(other.sourceAndMetadata), deepCopyMap(other.ingestMetadata));
+ }
+
+ /**
+ * Constructor needed for testing that allows to create a new {@link IngestDocument} given the provided elasticsearch metadata,
+ * source and ingest metadata. This is needed because the ingest metadata will be initialized with the current timestamp at
+ * init time, which makes equality comparisons impossible in tests.
+ */
+ public IngestDocument(Map<String, Object> sourceAndMetadata, Map<String, String> ingestMetadata) {
+ this.sourceAndMetadata = sourceAndMetadata;
+ this.ingestMetadata = ingestMetadata;
+ }
+
+ /**
+ * Returns the value contained in the document for the provided path
+ * @param path The path within the document in dot-notation
+ * @param clazz The expected class of the field value
+ * @return the value for the provided path if existing, null otherwise
+ * @throws IllegalArgumentException if the path is null, empty, invalid, if the field doesn't exist
+ * or if the field that is found at the provided path is not of the expected type.
+ */
+ public <T> T getFieldValue(String path, Class<T> clazz) {
+ FieldPath fieldPath = new FieldPath(path);
+ Object context = fieldPath.initialContext;
+ for (String pathElement : fieldPath.pathElements) {
+ context = resolve(pathElement, path, context);
+ }
+ return cast(path, context, clazz);
+ }
+
+ /**
+ * Checks whether the document contains a value for the provided path
+ * @param path The path within the document in dot-notation
+ * @return true if the document contains a value for the field, false otherwise
+ * @throws IllegalArgumentException if the path is null, empty or invalid.
+ */
+ public boolean hasField(String path) {
+ FieldPath fieldPath = new FieldPath(path);
+ Object context = fieldPath.initialContext;
+ for (int i = 0; i < fieldPath.pathElements.length - 1; i++) {
+ String pathElement = fieldPath.pathElements[i];
+ if (context == null) {
+ return false;
+ }
+ if (context instanceof Map) {
+ @SuppressWarnings("unchecked")
+ Map<String, Object> map = (Map<String, Object>) context;
+ context = map.get(pathElement);
+ } else if (context instanceof List) {
+ @SuppressWarnings("unchecked")
+ List<Object> list = (List<Object>) context;
+ try {
+ int index = Integer.parseInt(pathElement);
+ if (index < 0 || index >= list.size()) {
+ return false;
+ }
+ context = list.get(index);
+ } catch (NumberFormatException e) {
+ return false;
+ }
+
+ } else {
+ return false;
+ }
+ }
+
+ String leafKey = fieldPath.pathElements[fieldPath.pathElements.length - 1];
+ if (context instanceof Map) {
+ @SuppressWarnings("unchecked")
+ Map<String, Object> map = (Map<String, Object>) context;
+ return map.containsKey(leafKey);
+ }
+ if (context instanceof List) {
+ @SuppressWarnings("unchecked")
+ List<Object> list = (List<Object>) context;
+ try {
+ int index = Integer.parseInt(leafKey);
+ return index >= 0 && index < list.size();
+ } catch (NumberFormatException e) {
+ return false;
+ }
+ }
+ return false;
+ }
+
+ /**
+ * Removes the field identified by the provided path.
+ * @param fieldPathTemplate Resolves to the path with dot-notation within the document
+ * @throws IllegalArgumentException if the path is null, empty, invalid or if the field doesn't exist.
+ */
+ public void removeField(TemplateService.Template fieldPathTemplate) {
+ removeField(renderTemplate(fieldPathTemplate));
+ }
+
+ /**
+ * Removes the field identified by the provided path.
+ * @param path the path of the field to be removed
+ * @throws IllegalArgumentException if the path is null, empty, invalid or if the field doesn't exist.
+ */
+ public void removeField(String path) {
+ FieldPath fieldPath = new FieldPath(path);
+ Object context = fieldPath.initialContext;
+ for (int i = 0; i < fieldPath.pathElements.length - 1; i++) {
+ context = resolve(fieldPath.pathElements[i], path, context);
+ }
+
+ String leafKey = fieldPath.pathElements[fieldPath.pathElements.length - 1];
+ if (context instanceof Map) {
+ @SuppressWarnings("unchecked")
+ Map<String, Object> map = (Map<String, Object>) context;
+ if (map.containsKey(leafKey)) {
+ map.remove(leafKey);
+ return;
+ }
+ throw new IllegalArgumentException("field [" + leafKey + "] not present as part of path [" + path + "]");
+ }
+ if (context instanceof List) {
+ @SuppressWarnings("unchecked")
+ List<Object> list = (List<Object>) context;
+ int index;
+ try {
+ index = Integer.parseInt(leafKey);
+ } catch (NumberFormatException e) {
+ throw new IllegalArgumentException("[" + leafKey + "] is not an integer, cannot be used as an index as part of path [" + path + "]", e);
+ }
+ if (index < 0 || index >= list.size()) {
+ throw new IllegalArgumentException("[" + index + "] is out of bounds for array with length [" + list.size() + "] as part of path [" + path + "]");
+ }
+ list.remove(index);
+ return;
+ }
+
+ if (context == null) {
+ throw new IllegalArgumentException("cannot remove [" + leafKey + "] from null as part of path [" + path + "]");
+ }
+ throw new IllegalArgumentException("cannot remove [" + leafKey + "] from object of type [" + context.getClass().getName() + "] as part of path [" + path + "]");
+ }
+
+ private static Object resolve(String pathElement, String fullPath, Object context) {
+ if (context == null) {
+ throw new IllegalArgumentException("cannot resolve [" + pathElement + "] from null as part of path [" + fullPath + "]");
+ }
+ if (context instanceof Map) {
+ @SuppressWarnings("unchecked")
+ Map<String, Object> map = (Map<String, Object>) context;
+ if (map.containsKey(pathElement)) {
+ return map.get(pathElement);
+ }
+ throw new IllegalArgumentException("field [" + pathElement + "] not present as part of path [" + fullPath + "]");
+ }
+ if (context instanceof List) {
+ @SuppressWarnings("unchecked")
+ List<Object> list = (List<Object>) context;
+ int index;
+ try {
+ index = Integer.parseInt(pathElement);
+ } catch (NumberFormatException e) {
+ throw new IllegalArgumentException("[" + pathElement + "] is not an integer, cannot be used as an index as part of path [" + fullPath + "]", e);
+ }
+ if (index < 0 || index >= list.size()) {
+ throw new IllegalArgumentException("[" + index + "] is out of bounds for array with length [" + list.size() + "] as part of path [" + fullPath + "]");
+ }
+ return list.get(index);
+ }
+ throw new IllegalArgumentException("cannot resolve [" + pathElement + "] from object of type [" + context.getClass().getName() + "] as part of path [" + fullPath + "]");
+ }
+
+ /**
+ * Appends the provided value to the provided path in the document.
+ * Any non existing path element will be created.
+ * If the path identifies a list, the value will be appended to the existing list.
+ * If the path identifies a scalar, the scalar will be converted to a list and
+ * the provided value will be added to the newly created list.
+ * Supports multiple values too provided in forms of list, in that case all the values will be appeneded to the
+ * existing (or newly created) list.
+ * @param path The path within the document in dot-notation
+ * @param value The value or values to append to the existing ones
+ * @throws IllegalArgumentException if the path is null, empty or invalid.
+ */
+ public void appendFieldValue(String path, Object value) {
+ setFieldValue(path, value, true);
+ }
+
+ /**
+ * Appends the provided value to the provided path in the document.
+ * Any non existing path element will be created.
+ * If the path identifies a list, the value will be appended to the existing list.
+ * If the path identifies a scalar, the scalar will be converted to a list and
+ * the provided value will be added to the newly created list.
+ * Supports multiple values too provided in forms of list, in that case all the values will be appeneded to the
+ * existing (or newly created) list.
+ * @param fieldPathTemplate Resolves to the path with dot-notation within the document
+ * @param valueSource The value source that will produce the value or values to append to the existing ones
+ * @throws IllegalArgumentException if the path is null, empty or invalid.
+ */
+ public void appendFieldValue(TemplateService.Template fieldPathTemplate, ValueSource valueSource) {
+ Map<String, Object> model = createTemplateModel();
+ appendFieldValue(fieldPathTemplate.execute(model), valueSource.copyAndResolve(model));
+ }
+
+ /**
+ * Sets the provided value to the provided path in the document.
+ * Any non existing path element will be created.
+ * If the last item in the path is a list, the value will replace the existing list as a whole.
+ * Use {@link #appendFieldValue(String, Object)} to append values to lists instead.
+ * @param path The path within the document in dot-notation
+ * @param value The value to put in for the path key
+ * @throws IllegalArgumentException if the path is null, empty, invalid or if the value cannot be set to the
+ * item identified by the provided path.
+ */
+ public void setFieldValue(String path, Object value) {
+ setFieldValue(path, value, false);
+ }
+
+ /**
+ * Sets the provided value to the provided path in the document.
+ * Any non existing path element will be created. If the last element is a list,
+ * the value will replace the existing list.
+ * @param fieldPathTemplate Resolves to the path with dot-notation within the document
+ * @param valueSource The value source that will produce the value to put in for the path key
+ * @throws IllegalArgumentException if the path is null, empty, invalid or if the value cannot be set to the
+ * item identified by the provided path.
+ */
+ public void setFieldValue(TemplateService.Template fieldPathTemplate, ValueSource valueSource) {
+ Map<String, Object> model = createTemplateModel();
+ setFieldValue(fieldPathTemplate.execute(model), valueSource.copyAndResolve(model), false);
+ }
+
+ private void setFieldValue(String path, Object value, boolean append) {
+ FieldPath fieldPath = new FieldPath(path);
+ Object context = fieldPath.initialContext;
+ for (int i = 0; i < fieldPath.pathElements.length - 1; i++) {
+ String pathElement = fieldPath.pathElements[i];
+ if (context == null) {
+ throw new IllegalArgumentException("cannot resolve [" + pathElement + "] from null as part of path [" + path + "]");
+ }
+ if (context instanceof Map) {
+ @SuppressWarnings("unchecked")
+ Map<String, Object> map = (Map<String, Object>) context;
+ if (map.containsKey(pathElement)) {
+ context = map.get(pathElement);
+ } else {
+ HashMap<Object, Object> newMap = new HashMap<>();
+ map.put(pathElement, newMap);
+ context = newMap;
+ }
+ } else if (context instanceof List) {
+ @SuppressWarnings("unchecked")
+ List<Object> list = (List<Object>) context;
+ int index;
+ try {
+ index = Integer.parseInt(pathElement);
+ } catch (NumberFormatException e) {
+ throw new IllegalArgumentException("[" + pathElement + "] is not an integer, cannot be used as an index as part of path [" + path + "]", e);
+ }
+ if (index < 0 || index >= list.size()) {
+ throw new IllegalArgumentException("[" + index + "] is out of bounds for array with length [" + list.size() + "] as part of path [" + path + "]");
+ }
+ context = list.get(index);
+ } else {
+ throw new IllegalArgumentException("cannot resolve [" + pathElement + "] from object of type [" + context.getClass().getName() + "] as part of path [" + path + "]");
+ }
+ }
+
+ String leafKey = fieldPath.pathElements[fieldPath.pathElements.length - 1];
+ if (context == null) {
+ throw new IllegalArgumentException("cannot set [" + leafKey + "] with null parent as part of path [" + path + "]");
+ }
+ if (context instanceof Map) {
+ @SuppressWarnings("unchecked")
+ Map<String, Object> map = (Map<String, Object>) context;
+ if (append) {
+ if (map.containsKey(leafKey)) {
+ Object object = map.get(leafKey);
+ List<Object> list = appendValues(object, value);
+ if (list != object) {
+ map.put(leafKey, list);
+ }
+ } else {
+ List<Object> list = new ArrayList<>();
+ appendValues(list, value);
+ map.put(leafKey, list);
+ }
+ return;
+ }
+ map.put(leafKey, value);
+ } else if (context instanceof List) {
+ @SuppressWarnings("unchecked")
+ List<Object> list = (List<Object>) context;
+ int index;
+ try {
+ index = Integer.parseInt(leafKey);
+ } catch (NumberFormatException e) {
+ throw new IllegalArgumentException("[" + leafKey + "] is not an integer, cannot be used as an index as part of path [" + path + "]", e);
+ }
+ if (index < 0 || index >= list.size()) {
+ throw new IllegalArgumentException("[" + index + "] is out of bounds for array with length [" + list.size() + "] as part of path [" + path + "]");
+ }
+ if (append) {
+ Object object = list.get(index);
+ List<Object> newList = appendValues(object, value);
+ if (newList != object) {
+ list.set(index, newList);
+ }
+ return;
+ }
+ list.set(index, value);
+ } else {
+ throw new IllegalArgumentException("cannot set [" + leafKey + "] with parent object of type [" + context.getClass().getName() + "] as part of path [" + path + "]");
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ private static List<Object> appendValues(Object maybeList, Object value) {
+ List<Object> list;
+ if (maybeList instanceof List) {
+ //maybeList is already a list, we append the provided values to it
+ list = (List<Object>) maybeList;
+ } else {
+ //maybeList is a scalar, we convert it to a list and append the provided values to it
+ list = new ArrayList<>();
+ list.add(maybeList);
+ }
+ appendValues(list, value);
+ return list;
+ }
+
+ private static void appendValues(List<Object> list, Object value) {
+ if (value instanceof List) {
+ @SuppressWarnings("unchecked")
+ List<?> valueList = (List<?>) value;
+ valueList.stream().forEach(list::add);
+ } else {
+ list.add(value);
+ }
+ }
+
+ private static <T> T cast(String path, Object object, Class<T> clazz) {
+ if (object == null) {
+ return null;
+ }
+ if (clazz.isInstance(object)) {
+ return clazz.cast(object);
+ }
+ throw new IllegalArgumentException("field [" + path + "] of type [" + object.getClass().getName() + "] cannot be cast to [" + clazz.getName() + "]");
+ }
+
+ public String renderTemplate(TemplateService.Template template) {
+ return template.execute(createTemplateModel());
+ }
+
+ private Map<String, Object> createTemplateModel() {
+ Map<String, Object> model = new HashMap<>(sourceAndMetadata);
+ model.put(SourceFieldMapper.NAME, sourceAndMetadata);
+ // If there is a field in the source with the name '_ingest' it gets overwritten here,
+ // if access to that field is required then it get accessed via '_source._ingest'
+ model.put(INGEST_KEY, ingestMetadata);
+ return model;
+ }
+
+ /**
+ * one time operation that extracts the metadata fields from the ingest document and returns them.
+ * Metadata fields that used to be accessible as ordinary top level fields will be removed as part of this call.
+ */
+ public Map<MetaData, String> extractMetadata() {
+ Map<MetaData, String> metadataMap = new HashMap<>();
+ for (MetaData metaData : MetaData.values()) {
+ metadataMap.put(metaData, cast(metaData.getFieldName(), sourceAndMetadata.remove(metaData.getFieldName()), String.class));
+ }
+ return metadataMap;
+ }
+
+ /**
+ * Returns the available ingest metadata fields, by default only timestamp, but it is possible to set additional ones.
+ * Use only for reading values, modify them instead using {@link #setFieldValue(String, Object)} and {@link #removeField(String)}
+ */
+ public Map<String, String> getIngestMetadata() {
+ return this.ingestMetadata;
+ }
+
+ /**
+ * Returns the document including its metadata fields, unless {@link #extractMetadata()} has been called, in which case the
+ * metadata fields will not be present anymore.
+ * Modify the document instead using {@link #setFieldValue(String, Object)} and {@link #removeField(String)}
+ */
+ public Map<String, Object> getSourceAndMetadata() {
+ return this.sourceAndMetadata;
+ }
+
+ @SuppressWarnings("unchecked")
+ private static <K, V> Map<K, V> deepCopyMap(Map<K, V> source) {
+ return (Map<K, V>) deepCopy(source);
+ }
+
+ private static Object deepCopy(Object value) {
+ if (value instanceof Map) {
+ Map<?, ?> mapValue = (Map<?, ?>) value;
+ Map<Object, Object> copy = new HashMap<>(mapValue.size());
+ for (Map.Entry<?, ?> entry : mapValue.entrySet()) {
+ copy.put(entry.getKey(), deepCopy(entry.getValue()));
+ }
+ return copy;
+ } else if (value instanceof List) {
+ List<?> listValue = (List<?>) value;
+ List<Object> copy = new ArrayList<>(listValue.size());
+ for (Object itemValue : listValue) {
+ copy.add(deepCopy(itemValue));
+ }
+ return copy;
+ } else if (value == null || value instanceof String || value instanceof Integer ||
+ value instanceof Long || value instanceof Float ||
+ value instanceof Double || value instanceof Boolean) {
+ return value;
+ } else {
+ throw new IllegalArgumentException("unexpected value type [" + value.getClass() + "]");
+ }
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (obj == this) { return true; }
+ if (obj == null || getClass() != obj.getClass()) {
+ return false;
+ }
+
+ IngestDocument other = (IngestDocument) obj;
+ return Objects.equals(sourceAndMetadata, other.sourceAndMetadata) &&
+ Objects.equals(ingestMetadata, other.ingestMetadata);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(sourceAndMetadata, ingestMetadata);
+ }
+
+ @Override
+ public String toString() {
+ return "IngestDocument{" +
+ " sourceAndMetadata=" + sourceAndMetadata +
+ ", ingestMetadata=" + ingestMetadata +
+ '}';
+ }
+
+ public enum MetaData {
+ INDEX(IndexFieldMapper.NAME),
+ TYPE(TypeFieldMapper.NAME),
+ ID(IdFieldMapper.NAME),
+ ROUTING(RoutingFieldMapper.NAME),
+ PARENT(ParentFieldMapper.NAME),
+ TIMESTAMP(TimestampFieldMapper.NAME),
+ TTL(TTLFieldMapper.NAME);
+
+ private final String fieldName;
+
+ MetaData(String fieldName) {
+ this.fieldName = fieldName;
+ }
+
+ public String getFieldName() {
+ return fieldName;
+ }
+ }
+
+ private class FieldPath {
+ private final String[] pathElements;
+ private final Object initialContext;
+
+ private FieldPath(String path) {
+ if (Strings.isEmpty(path)) {
+ throw new IllegalArgumentException("path cannot be null nor empty");
+ }
+ String newPath;
+ if (path.startsWith(INGEST_KEY + ".")) {
+ initialContext = ingestMetadata;
+ newPath = path.substring(8, path.length());
+ } else {
+ initialContext = sourceAndMetadata;
+ if (path.startsWith(SourceFieldMapper.NAME + ".")) {
+ newPath = path.substring(8, path.length());
+ } else {
+ newPath = path;
+ }
+ }
+ this.pathElements = Strings.splitStringToArray(newPath, '.');
+ if (pathElements.length == 0) {
+ throw new IllegalArgumentException("path [" + path + "] is not valid");
+ }
+ }
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/ingest/core/Pipeline.java b/core/src/main/java/org/elasticsearch/ingest/core/Pipeline.java
new file mode 100644
index 0000000000..5c654fbce2
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/ingest/core/Pipeline.java
@@ -0,0 +1,137 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.core;
+
+import org.elasticsearch.ingest.processor.ConfigurationPropertyException;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * A pipeline is a list of {@link Processor} instances grouped under a unique id.
+ */
+public final class Pipeline {
+
+ final static String DESCRIPTION_KEY = "description";
+ final static String PROCESSORS_KEY = "processors";
+ final static String ON_FAILURE_KEY = "on_failure";
+
+ private final String id;
+ private final String description;
+ private final CompoundProcessor compoundProcessor;
+
+ public Pipeline(String id, String description, CompoundProcessor compoundProcessor) {
+ this.id = id;
+ this.description = description;
+ this.compoundProcessor = compoundProcessor;
+ }
+
+ /**
+ * Modifies the data of a document to be indexed based on the processor this pipeline holds
+ */
+ public void execute(IngestDocument ingestDocument) throws Exception {
+ compoundProcessor.execute(ingestDocument);
+ }
+
+ /**
+ * The unique id of this pipeline
+ */
+ public String getId() {
+ return id;
+ }
+
+ /**
+ * An optional description of what this pipeline is doing to the data gets processed by this pipeline.
+ */
+ public String getDescription() {
+ return description;
+ }
+
+ /**
+ * Unmodifiable list containing each processor that operates on the data.
+ */
+ public List<Processor> getProcessors() {
+ return compoundProcessor.getProcessors();
+ }
+
+ /**
+ * Unmodifiable list containing each on_failure processor that operates on the data in case of
+ * exception thrown in pipeline processors
+ */
+ public List<Processor> getOnFailureProcessors() {
+ return compoundProcessor.getOnFailureProcessors();
+ }
+
+ public final static class Factory {
+
+ public Pipeline create(String id, Map<String, Object> config, Map<String, Processor.Factory> processorRegistry) throws ConfigurationPropertyException {
+ String description = ConfigurationUtils.readOptionalStringProperty(null, null, config, DESCRIPTION_KEY);
+ List<Map<String, Map<String, Object>>> processorConfigs = ConfigurationUtils.readList(null, null, config, PROCESSORS_KEY);
+ List<Processor> processors = readProcessorConfigs(processorConfigs, processorRegistry);
+ List<Map<String, Map<String, Object>>> onFailureProcessorConfigs = ConfigurationUtils.readOptionalList(null, null, config, ON_FAILURE_KEY);
+ List<Processor> onFailureProcessors = readProcessorConfigs(onFailureProcessorConfigs, processorRegistry);
+ if (config.isEmpty() == false) {
+ throw new ConfigurationPropertyException("pipeline [" + id + "] doesn't support one or more provided configuration parameters " + Arrays.toString(config.keySet().toArray()));
+ }
+ CompoundProcessor compoundProcessor = new CompoundProcessor(Collections.unmodifiableList(processors), Collections.unmodifiableList(onFailureProcessors));
+ return new Pipeline(id, description, compoundProcessor);
+ }
+
+ private List<Processor> readProcessorConfigs(List<Map<String, Map<String, Object>>> processorConfigs, Map<String, Processor.Factory> processorRegistry) throws ConfigurationPropertyException {
+ List<Processor> processors = new ArrayList<>();
+ if (processorConfigs != null) {
+ for (Map<String, Map<String, Object>> processorConfigWithKey : processorConfigs) {
+ for (Map.Entry<String, Map<String, Object>> entry : processorConfigWithKey.entrySet()) {
+ processors.add(readProcessor(processorRegistry, entry.getKey(), entry.getValue()));
+ }
+ }
+ }
+
+ return processors;
+ }
+
+ private Processor readProcessor(Map<String, Processor.Factory> processorRegistry, String type, Map<String, Object> config) throws ConfigurationPropertyException {
+ Processor.Factory factory = processorRegistry.get(type);
+ if (factory != null) {
+ List<Map<String, Map<String, Object>>> onFailureProcessorConfigs = ConfigurationUtils.readOptionalList(null, null, config, ON_FAILURE_KEY);
+ List<Processor> onFailureProcessors = readProcessorConfigs(onFailureProcessorConfigs, processorRegistry);
+ Processor processor;
+ try {
+ processor = factory.create(config);
+ } catch (ConfigurationPropertyException e) {
+ throw e;
+ } catch (Exception e) {
+ throw new ConfigurationPropertyException(e.getMessage());
+ }
+ if (!config.isEmpty()) {
+ throw new ConfigurationPropertyException("processor [" + type + "] doesn't support one or more provided configuration parameters " + Arrays.toString(config.keySet().toArray()));
+ }
+ if (onFailureProcessors.isEmpty()) {
+ return processor;
+ }
+ return new CompoundProcessor(Collections.singletonList(processor), onFailureProcessors);
+ }
+ throw new ConfigurationPropertyException("No processor type exists with name [" + type + "]");
+ }
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/ingest/core/PipelineFactoryError.java b/core/src/main/java/org/elasticsearch/ingest/core/PipelineFactoryError.java
new file mode 100644
index 0000000000..b987e1ee26
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/ingest/core/PipelineFactoryError.java
@@ -0,0 +1,96 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.core;
+
+
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.common.io.stream.Streamable;
+import org.elasticsearch.common.xcontent.ToXContent;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentBuilderString;
+import org.elasticsearch.ingest.processor.ConfigurationPropertyException;
+
+import java.io.IOException;
+
+public class PipelineFactoryError implements Streamable, ToXContent {
+ private String reason;
+ private String processorType;
+ private String processorTag;
+ private String processorPropertyName;
+
+ public PipelineFactoryError() {
+
+ }
+
+ public PipelineFactoryError(ConfigurationPropertyException e) {
+ this.reason = e.getMessage();
+ this.processorType = e.getProcessorType();
+ this.processorTag = e.getProcessorTag();
+ this.processorPropertyName = e.getPropertyName();
+ }
+
+ public PipelineFactoryError(String reason) {
+ this.reason = "Constructing Pipeline failed:" + reason;
+ }
+
+ public String getReason() {
+ return reason;
+ }
+
+ public String getProcessorTag() {
+ return processorTag;
+ }
+
+ public String getProcessorPropertyName() {
+ return processorPropertyName;
+ }
+
+ public String getProcessorType() {
+ return processorType;
+ }
+
+ @Override
+ public void readFrom(StreamInput in) throws IOException {
+ reason = in.readString();
+ processorType = in.readOptionalString();
+ processorTag = in.readOptionalString();
+ processorPropertyName = in.readOptionalString();
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ out.writeString(reason);
+ out.writeOptionalString(processorType);
+ out.writeOptionalString(processorTag);
+ out.writeOptionalString(processorPropertyName);
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject("error");
+ builder.field("type", processorType);
+ builder.field("tag", processorTag);
+ builder.field("reason", reason);
+ builder.field("property_name", processorPropertyName);
+ builder.endObject();
+ return builder;
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/ingest/core/PipelineFactoryResult.java b/core/src/main/java/org/elasticsearch/ingest/core/PipelineFactoryResult.java
new file mode 100644
index 0000000000..ab284981b3
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/ingest/core/PipelineFactoryResult.java
@@ -0,0 +1,43 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.core;
+
+public class PipelineFactoryResult {
+ private final Pipeline pipeline;
+ private final PipelineFactoryError error;
+
+ public PipelineFactoryResult(Pipeline pipeline) {
+ this.pipeline = pipeline;
+ this.error = null;
+ }
+
+ public PipelineFactoryResult(PipelineFactoryError error) {
+ this.error = error;
+ this.pipeline = null;
+ }
+
+ public Pipeline getPipeline() {
+ return pipeline;
+ }
+
+ public PipelineFactoryError getError() {
+ return error;
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/ingest/core/Processor.java b/core/src/main/java/org/elasticsearch/ingest/core/Processor.java
new file mode 100644
index 0000000000..2804998369
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/ingest/core/Processor.java
@@ -0,0 +1,61 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+package org.elasticsearch.ingest.core;
+
+import org.elasticsearch.ingest.processor.ConfigurationPropertyException;
+
+import java.util.Map;
+
+/**
+ * A processor implementation may modify the data belonging to a document.
+ * Whether changes are made and what exactly is modified is up to the implementation.
+ */
+public interface Processor {
+
+ /**
+ * Introspect and potentially modify the incoming data.
+ */
+ void execute(IngestDocument ingestDocument) throws Exception;
+
+ /**
+ * Gets the type of a processor
+ */
+ String getType();
+
+ /**
+ * Gets the tag of a processor.
+ */
+ String getTag();
+
+ /**
+ * A factory that knows how to construct a processor based on a map of maps.
+ */
+ interface Factory<P extends Processor> {
+
+ /**
+ * Creates a processor based on the specified map of maps config.
+ *
+ * Implementations are responsible for removing the used keys, so that after creating a pipeline ingest can
+ * verify if all configurations settings have been used.
+ */
+ P create(Map<String, Object> config) throws Exception;
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/common/HasContextAndHeaders.java b/core/src/main/java/org/elasticsearch/ingest/core/TemplateService.java
index 35bea9a777..8988c924c3 100644
--- a/core/src/main/java/org/elasticsearch/common/HasContextAndHeaders.java
+++ b/core/src/main/java/org/elasticsearch/ingest/core/TemplateService.java
@@ -16,18 +16,23 @@
* specific language governing permissions and limitations
* under the License.
*/
+package org.elasticsearch.ingest.core;
-package org.elasticsearch.common;
+import java.util.Map;
/**
- * marker interface
+ * Abstraction for the ingest template engine used to decouple {@link IngestDocument} from {@link org.elasticsearch.script.ScriptService}.
+ * Allows to compile a template into an ingest {@link Template} object.
+ * A compiled template can be executed by calling its {@link Template#execute(Map)} method.
*/
-public interface HasContextAndHeaders extends HasContext, HasHeaders {
+public interface TemplateService {
- /**
- * copies over the context and the headers
- * @param other another object supporting headers and context
- */
- void copyContextAndHeadersFrom(HasContextAndHeaders other);
+ Template compile(String template);
+ interface Template {
+
+ String execute(Map<String, Object> model);
+
+ String getKey();
+ }
}
diff --git a/core/src/main/java/org/elasticsearch/ingest/core/ValueSource.java b/core/src/main/java/org/elasticsearch/ingest/core/ValueSource.java
new file mode 100644
index 0000000000..e9f09a1a9f
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/ingest/core/ValueSource.java
@@ -0,0 +1,191 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.core;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+
+/**
+ * Holds a value. If the value is requested a copy is made and optionally template snippets are resolved too.
+ */
+public interface ValueSource {
+
+ /**
+ * Returns a copy of the value this ValueSource holds and resolves templates if there're any.
+ *
+ * For immutable values only a copy of the reference to the value is made.
+ *
+ * @param model The model to be used when resolving any templates
+ * @return copy of the wrapped value
+ */
+ Object copyAndResolve(Map<String, Object> model);
+
+ static ValueSource wrap(Object value, TemplateService templateService) {
+ if (value instanceof Map) {
+ @SuppressWarnings("unchecked")
+ Map<Object, Object> mapValue = (Map) value;
+ Map<ValueSource, ValueSource> valueTypeMap = new HashMap<>(mapValue.size());
+ for (Map.Entry<Object, Object> entry : mapValue.entrySet()) {
+ valueTypeMap.put(wrap(entry.getKey(), templateService), wrap(entry.getValue(), templateService));
+ }
+ return new MapValue(valueTypeMap);
+ } else if (value instanceof List) {
+ @SuppressWarnings("unchecked")
+ List<Object> listValue = (List) value;
+ List<ValueSource> valueSourceList = new ArrayList<>(listValue.size());
+ for (Object item : listValue) {
+ valueSourceList.add(wrap(item, templateService));
+ }
+ return new ListValue(valueSourceList);
+ } else if (value == null || value instanceof Number || value instanceof Boolean) {
+ return new ObjectValue(value);
+ } else if (value instanceof String) {
+ return new TemplatedValue(templateService.compile((String) value));
+ } else {
+ throw new IllegalArgumentException("unexpected value type [" + value.getClass() + "]");
+ }
+ }
+
+ final class MapValue implements ValueSource {
+
+ private final Map<ValueSource, ValueSource> map;
+
+ MapValue(Map<ValueSource, ValueSource> map) {
+ this.map = map;
+ }
+
+ @Override
+ public Object copyAndResolve(Map<String, Object> model) {
+ Map<Object, Object> copy = new HashMap<>();
+ for (Map.Entry<ValueSource, ValueSource> entry : this.map.entrySet()) {
+ copy.put(entry.getKey().copyAndResolve(model), entry.getValue().copyAndResolve(model));
+ }
+ return copy;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+
+ MapValue mapValue = (MapValue) o;
+ return map.equals(mapValue.map);
+
+ }
+
+ @Override
+ public int hashCode() {
+ return map.hashCode();
+ }
+ }
+
+ final class ListValue implements ValueSource {
+
+ private final List<ValueSource> values;
+
+ ListValue(List<ValueSource> values) {
+ this.values = values;
+ }
+
+ @Override
+ public Object copyAndResolve(Map<String, Object> model) {
+ List<Object> copy = new ArrayList<>(values.size());
+ for (ValueSource value : values) {
+ copy.add(value.copyAndResolve(model));
+ }
+ return copy;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+
+ ListValue listValue = (ListValue) o;
+ return values.equals(listValue.values);
+
+ }
+
+ @Override
+ public int hashCode() {
+ return values.hashCode();
+ }
+ }
+
+ final class ObjectValue implements ValueSource {
+
+ private final Object value;
+
+ ObjectValue(Object value) {
+ this.value = value;
+ }
+
+ @Override
+ public Object copyAndResolve(Map<String, Object> model) {
+ return value;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+
+ ObjectValue objectValue = (ObjectValue) o;
+ return Objects.equals(value, objectValue.value);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hashCode(value);
+ }
+ }
+
+ final class TemplatedValue implements ValueSource {
+
+ private final TemplateService.Template template;
+
+ TemplatedValue(TemplateService.Template template) {
+ this.template = template;
+ }
+
+ @Override
+ public Object copyAndResolve(Map<String, Object> model) {
+ return template.execute(model);
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+
+ TemplatedValue templatedValue = (TemplatedValue) o;
+ return Objects.equals(template.getKey(), templatedValue.template.getKey());
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hashCode(template.getKey());
+ }
+ }
+
+}
diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/AbstractStringProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/AbstractStringProcessor.java
new file mode 100644
index 0000000000..6ae9f2d352
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/ingest/processor/AbstractStringProcessor.java
@@ -0,0 +1,72 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.processor;
+
+import org.elasticsearch.ingest.core.AbstractProcessor;
+import org.elasticsearch.ingest.core.AbstractProcessorFactory;
+import org.elasticsearch.ingest.core.ConfigurationUtils;
+import org.elasticsearch.ingest.core.IngestDocument;
+import org.elasticsearch.ingest.core.Processor;
+
+import java.util.Map;
+
+/**
+ * Base class for processors that manipulate strings and require a single "fields" array config value, which
+ * holds a list of field names in string format.
+ */
+public abstract class AbstractStringProcessor extends AbstractProcessor {
+ private final String field;
+
+ protected AbstractStringProcessor(String tag, String field) {
+ super(tag);
+ this.field = field;
+ }
+
+ public String getField() {
+ return field;
+ }
+
+ @Override
+ public final void execute(IngestDocument document) {
+ String val = document.getFieldValue(field, String.class);
+ if (val == null) {
+ throw new IllegalArgumentException("field [" + field + "] is null, cannot process it.");
+ }
+ document.setFieldValue(field, process(val));
+ }
+
+ protected abstract String process(String value);
+
+ public static abstract class Factory<T extends AbstractStringProcessor> extends AbstractProcessorFactory<T> {
+ protected final String processorType;
+
+ protected Factory(String processorType) {
+ this.processorType = processorType;
+ }
+
+ @Override
+ public T doCreate(String processorTag, Map<String, Object> config) throws Exception {
+ String field = ConfigurationUtils.readStringProperty(processorType, processorTag, config, "field");
+ return newProcessor(processorTag, field);
+ }
+
+ protected abstract T newProcessor(String processorTag, String field);
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/AppendProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/AppendProcessor.java
new file mode 100644
index 0000000000..84f979083b
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/ingest/processor/AppendProcessor.java
@@ -0,0 +1,82 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.processor;
+
+import org.elasticsearch.ingest.core.AbstractProcessor;
+import org.elasticsearch.ingest.core.AbstractProcessorFactory;
+import org.elasticsearch.ingest.core.IngestDocument;
+import org.elasticsearch.ingest.core.TemplateService;
+import org.elasticsearch.ingest.core.ValueSource;
+import org.elasticsearch.ingest.core.ConfigurationUtils;
+
+import java.util.Map;
+
+/**
+ * Processor that appends value or values to existing lists. If the field is not present a new list holding the
+ * provided values will be added. If the field is a scalar it will be converted to a single item list and the provided
+ * values will be added to the newly created list.
+ */
+public class AppendProcessor extends AbstractProcessor {
+
+ public static final String TYPE = "append";
+
+ private final TemplateService.Template field;
+ private final ValueSource value;
+
+ AppendProcessor(String tag, TemplateService.Template field, ValueSource value) {
+ super(tag);
+ this.field = field;
+ this.value = value;
+ }
+
+ public TemplateService.Template getField() {
+ return field;
+ }
+
+ public ValueSource getValue() {
+ return value;
+ }
+
+ @Override
+ public void execute(IngestDocument ingestDocument) throws Exception {
+ ingestDocument.appendFieldValue(field, value);
+ }
+
+ @Override
+ public String getType() {
+ return TYPE;
+ }
+
+ public static final class Factory extends AbstractProcessorFactory<AppendProcessor> {
+
+ private final TemplateService templateService;
+
+ public Factory(TemplateService templateService) {
+ this.templateService = templateService;
+ }
+
+ @Override
+ public AppendProcessor doCreate(String processorTag, Map<String, Object> config) throws Exception {
+ String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field");
+ Object value = ConfigurationUtils.readObject(TYPE, processorTag, config, "value");
+ return new AppendProcessor(processorTag, templateService.compile(field), ValueSource.wrap(value, templateService));
+ }
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/ConfigurationPropertyException.java b/core/src/main/java/org/elasticsearch/ingest/processor/ConfigurationPropertyException.java
new file mode 100644
index 0000000000..dbc35c9334
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/ingest/processor/ConfigurationPropertyException.java
@@ -0,0 +1,53 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.processor;
+
+/**
+ * Exception class thrown by processor factories.
+ */
+public class ConfigurationPropertyException extends RuntimeException {
+ private String processorType;
+ private String processorTag;
+ private String propertyName;
+
+ public ConfigurationPropertyException(String processorType, String processorTag, String propertyName, String message) {
+ super("[" + propertyName + "] " + message);
+ this.processorTag = processorTag;
+ this.processorType = processorType;
+ this.propertyName = propertyName;
+ }
+
+ public ConfigurationPropertyException(String errorMessage) {
+ super(errorMessage);
+ }
+
+ public String getPropertyName() {
+ return propertyName;
+ }
+
+ public String getProcessorType() {
+ return processorType;
+ }
+
+ public String getProcessorTag() {
+ return processorTag;
+ }
+}
+
diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/ConvertProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/ConvertProcessor.java
new file mode 100644
index 0000000000..213e3ec2c7
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/ingest/processor/ConvertProcessor.java
@@ -0,0 +1,145 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.processor;
+
+import org.elasticsearch.ingest.core.AbstractProcessor;
+import org.elasticsearch.ingest.core.AbstractProcessorFactory;
+import org.elasticsearch.ingest.core.IngestDocument;
+import org.elasticsearch.ingest.core.ConfigurationUtils;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+
+/**
+ * Processor that converts fields content to a different type. Supported types are: integer, float, boolean and string.
+ * Throws exception if the field is not there or the conversion fails.
+ */
+public class ConvertProcessor extends AbstractProcessor {
+
+ enum Type {
+ INTEGER {
+ @Override
+ public Object convert(Object value) {
+ try {
+ return Integer.parseInt(value.toString());
+ } catch(NumberFormatException e) {
+ throw new IllegalArgumentException("unable to convert [" + value + "] to integer", e);
+ }
+
+ }
+ }, FLOAT {
+ @Override
+ public Object convert(Object value) {
+ try {
+ return Float.parseFloat(value.toString());
+ } catch(NumberFormatException e) {
+ throw new IllegalArgumentException("unable to convert [" + value + "] to float", e);
+ }
+ }
+ }, BOOLEAN {
+ @Override
+ public Object convert(Object value) {
+ if (value.toString().equalsIgnoreCase("true")) {
+ return true;
+ } else if (value.toString().equalsIgnoreCase("false")) {
+ return false;
+ } else {
+ throw new IllegalArgumentException("[" + value + "] is not a boolean value, cannot convert to boolean");
+ }
+ }
+ }, STRING {
+ @Override
+ public Object convert(Object value) {
+ return value.toString();
+ }
+ };
+
+ @Override
+ public final String toString() {
+ return name().toLowerCase(Locale.ROOT);
+ }
+
+ public abstract Object convert(Object value);
+
+ public static Type fromString(String type) {
+ try {
+ return Type.valueOf(type.toUpperCase(Locale.ROOT));
+ } catch(IllegalArgumentException e) {
+ throw new IllegalArgumentException("type [" + type + "] not supported, cannot convert field.", e);
+ }
+ }
+ }
+
+ public static final String TYPE = "convert";
+
+ private final String field;
+ private final Type convertType;
+
+ ConvertProcessor(String tag, String field, Type convertType) {
+ super(tag);
+ this.field = field;
+ this.convertType = convertType;
+ }
+
+ String getField() {
+ return field;
+ }
+
+ Type getConvertType() {
+ return convertType;
+ }
+
+ @Override
+ public void execute(IngestDocument document) {
+ Object oldValue = document.getFieldValue(field, Object.class);
+ Object newValue;
+ if (oldValue == null) {
+ throw new IllegalArgumentException("Field [" + field + "] is null, cannot be converted to type [" + convertType + "]");
+ }
+
+ if (oldValue instanceof List) {
+ List<?> list = (List<?>) oldValue;
+ List<Object> newList = new ArrayList<>();
+ for (Object value : list) {
+ newList.add(convertType.convert(value));
+ }
+ newValue = newList;
+ } else {
+ newValue = convertType.convert(oldValue);
+ }
+ document.setFieldValue(field, newValue);
+ }
+
+ @Override
+ public String getType() {
+ return TYPE;
+ }
+
+ public static class Factory extends AbstractProcessorFactory<ConvertProcessor> {
+ @Override
+ public ConvertProcessor doCreate(String processorTag, Map<String, Object> config) throws Exception {
+ String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field");
+ Type convertType = Type.fromString(ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "type"));
+ return new ConvertProcessor(processorTag, field, convertType);
+ }
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/DateFormat.java b/core/src/main/java/org/elasticsearch/ingest/processor/DateFormat.java
new file mode 100644
index 0000000000..282b29176b
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/ingest/processor/DateFormat.java
@@ -0,0 +1,90 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.processor;
+
+import org.joda.time.DateTime;
+import org.joda.time.DateTimeZone;
+import org.joda.time.format.DateTimeFormat;
+import org.joda.time.format.ISODateTimeFormat;
+
+import java.util.Locale;
+import java.util.function.Function;
+
+enum DateFormat {
+ Iso8601 {
+ @Override
+ Function<String, DateTime> getFunction(String format, DateTimeZone timezone, Locale locale) {
+ return ISODateTimeFormat.dateTimeParser().withZone(timezone)::parseDateTime;
+ }
+ },
+ Unix {
+ @Override
+ Function<String, DateTime> getFunction(String format, DateTimeZone timezone, Locale locale) {
+ return (date) -> new DateTime((long)(Float.parseFloat(date) * 1000), timezone);
+ }
+ },
+ UnixMs {
+ @Override
+ Function<String, DateTime> getFunction(String format, DateTimeZone timezone, Locale locale) {
+ return (date) -> new DateTime(Long.parseLong(date), timezone);
+ }
+ },
+ Tai64n {
+ @Override
+ Function<String, DateTime> getFunction(String format, DateTimeZone timezone, Locale locale) {
+ return (date) -> new DateTime(parseMillis(date), timezone);
+ }
+
+ private long parseMillis(String date) {
+ if (date.startsWith("@")) {
+ date = date.substring(1);
+ }
+ long base = Long.parseLong(date.substring(1, 16), 16);
+ // 1356138046000
+ long rest = Long.parseLong(date.substring(16, 24), 16);
+ return ((base * 1000) - 10000) + (rest/1000000);
+ }
+ },
+ Joda {
+ @Override
+ Function<String, DateTime> getFunction(String format, DateTimeZone timezone, Locale locale) {
+ return DateTimeFormat.forPattern(format)
+ .withDefaultYear((new DateTime(DateTimeZone.UTC)).getYear())
+ .withZone(timezone).withLocale(locale)::parseDateTime;
+ }
+ };
+
+ abstract Function<String, DateTime> getFunction(String format, DateTimeZone timezone, Locale locale);
+
+ static DateFormat fromString(String format) {
+ switch (format) {
+ case "ISO8601":
+ return Iso8601;
+ case "UNIX":
+ return Unix;
+ case "UNIX_MS":
+ return UnixMs;
+ case "TAI64N":
+ return Tai64n;
+ default:
+ return Joda;
+ }
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/DateProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/DateProcessor.java
new file mode 100644
index 0000000000..4b08b42a73
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/ingest/processor/DateProcessor.java
@@ -0,0 +1,132 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.processor;
+
+import org.elasticsearch.ExceptionsHelper;
+import org.elasticsearch.ingest.core.AbstractProcessor;
+import org.elasticsearch.ingest.core.AbstractProcessorFactory;
+import org.elasticsearch.ingest.core.ConfigurationUtils;
+import org.elasticsearch.ingest.core.IngestDocument;
+import org.joda.time.DateTime;
+import org.joda.time.DateTimeZone;
+import org.joda.time.format.ISODateTimeFormat;
+
+import java.util.ArrayList;
+import java.util.IllformedLocaleException;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.function.Function;
+
+public final class DateProcessor extends AbstractProcessor {
+
+ public static final String TYPE = "date";
+ static final String DEFAULT_TARGET_FIELD = "@timestamp";
+
+ private final DateTimeZone timezone;
+ private final Locale locale;
+ private final String matchField;
+ private final String targetField;
+ private final List<String> matchFormats;
+ private final List<Function<String, DateTime>> dateParsers;
+
+ DateProcessor(String tag, DateTimeZone timezone, Locale locale, String matchField, List<String> matchFormats, String targetField) {
+ super(tag);
+ this.timezone = timezone;
+ this.locale = locale;
+ this.matchField = matchField;
+ this.targetField = targetField;
+ this.matchFormats = matchFormats;
+ this.dateParsers = new ArrayList<>();
+ for (String matchFormat : matchFormats) {
+ DateFormat dateFormat = DateFormat.fromString(matchFormat);
+ dateParsers.add(dateFormat.getFunction(matchFormat, timezone, locale));
+ }
+ }
+
+ @Override
+ public void execute(IngestDocument ingestDocument) {
+ String value = ingestDocument.getFieldValue(matchField, String.class);
+
+ DateTime dateTime = null;
+ Exception lastException = null;
+ for (Function<String, DateTime> dateParser : dateParsers) {
+ try {
+ dateTime = dateParser.apply(value);
+ } catch (Exception e) {
+ //try the next parser and keep track of the exceptions
+ lastException = ExceptionsHelper.useOrSuppress(lastException, e);
+ }
+ }
+
+ if (dateTime == null) {
+ throw new IllegalArgumentException("unable to parse date [" + value + "]", lastException);
+ }
+
+ ingestDocument.setFieldValue(targetField, ISODateTimeFormat.dateTime().print(dateTime));
+ }
+
+ @Override
+ public String getType() {
+ return TYPE;
+ }
+
+ DateTimeZone getTimezone() {
+ return timezone;
+ }
+
+ Locale getLocale() {
+ return locale;
+ }
+
+ String getMatchField() {
+ return matchField;
+ }
+
+ String getTargetField() {
+ return targetField;
+ }
+
+ List<String> getMatchFormats() {
+ return matchFormats;
+ }
+
+ public static class Factory extends AbstractProcessorFactory<DateProcessor> {
+
+ @SuppressWarnings("unchecked")
+ public DateProcessor doCreate(String processorTag, Map<String, Object> config) throws Exception {
+ String matchField = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "match_field");
+ String targetField = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "target_field", DEFAULT_TARGET_FIELD);
+ String timezoneString = ConfigurationUtils.readOptionalStringProperty(TYPE, processorTag, config, "timezone");
+ DateTimeZone timezone = timezoneString == null ? DateTimeZone.UTC : DateTimeZone.forID(timezoneString);
+ String localeString = ConfigurationUtils.readOptionalStringProperty(TYPE, processorTag, config, "locale");
+ Locale locale = Locale.ENGLISH;
+ if (localeString != null) {
+ try {
+ locale = (new Locale.Builder()).setLanguageTag(localeString).build();
+ } catch (IllformedLocaleException e) {
+ throw new IllegalArgumentException("Invalid language tag specified: " + localeString);
+ }
+ }
+ List<String> matchFormats = ConfigurationUtils.readList(TYPE, processorTag, config, "match_formats");
+ return new DateProcessor(processorTag, timezone, locale, matchField, matchFormats, targetField);
+ }
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/DeDotProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/DeDotProcessor.java
new file mode 100644
index 0000000000..62063a49fd
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/ingest/processor/DeDotProcessor.java
@@ -0,0 +1,106 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.processor;
+
+import org.elasticsearch.ingest.core.AbstractProcessor;
+import org.elasticsearch.ingest.core.AbstractProcessorFactory;
+import org.elasticsearch.ingest.core.ConfigurationUtils;
+import org.elasticsearch.ingest.core.IngestDocument;
+
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Processor that replaces dots in document field names with a
+ * specified separator.
+ */
+public class DeDotProcessor extends AbstractProcessor {
+
+ public static final String TYPE = "dedot";
+ static final String DEFAULT_SEPARATOR = "_";
+
+ private final String separator;
+
+ DeDotProcessor(String tag, String separator) {
+ super(tag);
+ this.separator = separator;
+ }
+
+ public String getSeparator() {
+ return separator;
+ }
+
+ @Override
+ public void execute(IngestDocument document) {
+ deDot(document.getSourceAndMetadata());
+ }
+
+ @Override
+ public String getType() {
+ return TYPE;
+ }
+
+ /**
+ * Recursively iterates through Maps and Lists in search of map entries with
+ * keys containing dots. The dots in these fields are replaced with {@link #separator}.
+ *
+ * @param obj The current object in context to be checked for dots in its fields.
+ */
+ private void deDot(Object obj) {
+ if (obj instanceof Map) {
+ @SuppressWarnings("unchecked")
+ Map<String, Object> doc = (Map) obj;
+ Iterator<Map.Entry<String, Object>> it = doc.entrySet().iterator();
+ Map<String, Object> deDottedFields = new HashMap<>();
+ while (it.hasNext()) {
+ Map.Entry<String, Object> entry = it.next();
+ deDot(entry.getValue());
+ String fieldName = entry.getKey();
+ if (fieldName.contains(".")) {
+ String deDottedFieldName = fieldName.replaceAll("\\.", separator);
+ deDottedFields.put(deDottedFieldName, entry.getValue());
+ it.remove();
+ }
+ }
+ doc.putAll(deDottedFields);
+ } else if (obj instanceof List) {
+ @SuppressWarnings("unchecked")
+ List<Object> list = (List) obj;
+ for (Object value : list) {
+ deDot(value);
+ }
+ }
+ }
+
+ public static class Factory extends AbstractProcessorFactory<DeDotProcessor> {
+
+ @Override
+ public DeDotProcessor doCreate(String processorTag, Map<String, Object> config) throws Exception {
+ String separator = ConfigurationUtils.readOptionalStringProperty(TYPE, processorTag, config, "separator");
+ if (separator == null) {
+ separator = DEFAULT_SEPARATOR;
+ }
+ return new DeDotProcessor(processorTag, separator);
+ }
+ }
+}
+
diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/FailProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/FailProcessor.java
new file mode 100644
index 0000000000..86758de862
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/ingest/processor/FailProcessor.java
@@ -0,0 +1,74 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.processor;
+
+import org.elasticsearch.ingest.core.AbstractProcessor;
+import org.elasticsearch.ingest.core.AbstractProcessorFactory;
+import org.elasticsearch.ingest.core.ConfigurationUtils;
+import org.elasticsearch.ingest.core.IngestDocument;
+import org.elasticsearch.ingest.core.TemplateService;
+
+import java.util.Map;
+
+/**
+ * Processor that raises a runtime exception with a provided
+ * error message.
+ */
+public class FailProcessor extends AbstractProcessor {
+
+ public static final String TYPE = "fail";
+
+ private final TemplateService.Template message;
+
+ FailProcessor(String tag, TemplateService.Template message) {
+ super(tag);
+ this.message = message;
+ }
+
+ public TemplateService.Template getMessage() {
+ return message;
+ }
+
+ @Override
+ public void execute(IngestDocument document) {
+ throw new FailProcessorException(document.renderTemplate(message));
+ }
+
+ @Override
+ public String getType() {
+ return TYPE;
+ }
+
+ public static class Factory extends AbstractProcessorFactory<FailProcessor> {
+
+ private final TemplateService templateService;
+
+ public Factory(TemplateService templateService) {
+ this.templateService = templateService;
+ }
+
+ @Override
+ public FailProcessor doCreate(String processorTag, Map<String, Object> config) throws Exception {
+ String message = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "message");
+ return new FailProcessor(processorTag, templateService.compile(message));
+ }
+ }
+}
+
diff --git a/core/src/main/java/org/elasticsearch/common/HasHeaders.java b/core/src/main/java/org/elasticsearch/ingest/processor/FailProcessorException.java
index ab3a7da643..bfdfe11178 100644
--- a/core/src/main/java/org/elasticsearch/common/HasHeaders.java
+++ b/core/src/main/java/org/elasticsearch/ingest/processor/FailProcessorException.java
@@ -17,22 +17,19 @@
* under the License.
*/
-package org.elasticsearch.common;
-
-import java.util.Set;
+package org.elasticsearch.ingest.processor;
/**
+ * Exception class thrown by {@link FailProcessor}.
*
+ * This exception is caught in the {@link org.elasticsearch.ingest.core.CompoundProcessor} and
+ * then changes the state of {@link org.elasticsearch.ingest.core.IngestDocument}. This
+ * exception should get serialized.
*/
-public interface HasHeaders {
-
- <V> void putHeader(String key, V value);
-
- <V> V getHeader(String key);
+public class FailProcessorException extends RuntimeException {
- boolean hasHeader(String key);
-
- Set<String> getHeaders();
-
- void copyHeadersFrom(HasHeaders from);
+ public FailProcessorException(String message) {
+ super(message);
+ }
}
+
diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/GsubProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/GsubProcessor.java
new file mode 100644
index 0000000000..1118ed6b95
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/ingest/processor/GsubProcessor.java
@@ -0,0 +1,89 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.processor;
+
+import org.elasticsearch.ingest.core.AbstractProcessor;
+import org.elasticsearch.ingest.core.AbstractProcessorFactory;
+import org.elasticsearch.ingest.core.IngestDocument;
+import org.elasticsearch.ingest.core.ConfigurationUtils;
+
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+/**
+ * Processor that allows to search for patterns in field content and replace them with corresponding string replacement.
+ * Support fields of string type only, throws exception if a field is of a different type.
+ */
+public class GsubProcessor extends AbstractProcessor {
+
+ public static final String TYPE = "gsub";
+
+ private final String field;
+ private final Pattern pattern;
+ private final String replacement;
+
+ GsubProcessor(String tag, String field, Pattern pattern, String replacement) {
+ super(tag);
+ this.field = field;
+ this.pattern = pattern;
+ this.replacement = replacement;
+ }
+
+ String getField() {
+ return field;
+ }
+
+ Pattern getPattern() {
+ return pattern;
+ }
+
+ String getReplacement() {
+ return replacement;
+ }
+
+
+ @Override
+ public void execute(IngestDocument document) {
+ String oldVal = document.getFieldValue(field, String.class);
+ if (oldVal == null) {
+ throw new IllegalArgumentException("field [" + field + "] is null, cannot match pattern.");
+ }
+ Matcher matcher = pattern.matcher(oldVal);
+ String newVal = matcher.replaceAll(replacement);
+ document.setFieldValue(field, newVal);
+ }
+
+ @Override
+ public String getType() {
+ return TYPE;
+ }
+
+ public static class Factory extends AbstractProcessorFactory<GsubProcessor> {
+ @Override
+ public GsubProcessor doCreate(String processorTag, Map<String, Object> config) throws Exception {
+ String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field");
+ String pattern = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "pattern");
+ String replacement = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "replacement");
+ Pattern searchPattern = Pattern.compile(pattern);
+ return new GsubProcessor(processorTag, field, searchPattern, replacement);
+ }
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/JoinProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/JoinProcessor.java
new file mode 100644
index 0000000000..813c42a296
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/ingest/processor/JoinProcessor.java
@@ -0,0 +1,82 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.processor;
+
+import org.elasticsearch.ingest.core.AbstractProcessor;
+import org.elasticsearch.ingest.core.AbstractProcessorFactory;
+import org.elasticsearch.ingest.core.IngestDocument;
+import org.elasticsearch.ingest.core.ConfigurationUtils;
+
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+/**
+ * Processor that joins the different items of an array into a single string value using a separator between each item.
+ * Throws exception is the specified field is not an array.
+ */
+public class JoinProcessor extends AbstractProcessor {
+
+ public static final String TYPE = "join";
+
+ private final String field;
+ private final String separator;
+
+ JoinProcessor(String tag, String field, String separator) {
+ super(tag);
+ this.field = field;
+ this.separator = separator;
+ }
+
+ String getField() {
+ return field;
+ }
+
+ String getSeparator() {
+ return separator;
+ }
+
+ @Override
+ public void execute(IngestDocument document) {
+ List<?> list = document.getFieldValue(field, List.class);
+ if (list == null) {
+ throw new IllegalArgumentException("field [" + field + "] is null, cannot join.");
+ }
+ String joined = list.stream()
+ .map(Object::toString)
+ .collect(Collectors.joining(separator));
+ document.setFieldValue(field, joined);
+ }
+
+ @Override
+ public String getType() {
+ return TYPE;
+ }
+
+ public static class Factory extends AbstractProcessorFactory<JoinProcessor> {
+ @Override
+ public JoinProcessor doCreate(String processorTag, Map<String, Object> config) throws Exception {
+ String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field");
+ String separator = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "separator");
+ return new JoinProcessor(processorTag, field, separator);
+ }
+ }
+}
+
diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/LowercaseProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/LowercaseProcessor.java
new file mode 100644
index 0000000000..0931e5d77d
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/ingest/processor/LowercaseProcessor.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.processor;
+
+import java.util.Locale;
+
+/**
+ * Processor that converts the content of string fields to lowercase.
+ * Throws exception is the field is not of type string.
+ */
+
+public class LowercaseProcessor extends AbstractStringProcessor {
+
+ public static final String TYPE = "lowercase";
+
+ LowercaseProcessor(String processorTag, String field) {
+ super(processorTag, field);
+ }
+
+ @Override
+ protected String process(String value) {
+ return value.toLowerCase(Locale.ROOT);
+ }
+
+ @Override
+ public String getType() {
+ return TYPE;
+ }
+
+ public static class Factory extends AbstractStringProcessor.Factory<LowercaseProcessor> {
+
+ public Factory() {
+ super(TYPE);
+ }
+
+ @Override
+ protected LowercaseProcessor newProcessor(String tag, String field) {
+ return new LowercaseProcessor(tag, field);
+ }
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/RemoveProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/RemoveProcessor.java
new file mode 100644
index 0000000000..489822867c
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/ingest/processor/RemoveProcessor.java
@@ -0,0 +1,73 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.processor;
+
+import org.elasticsearch.ingest.core.AbstractProcessor;
+import org.elasticsearch.ingest.core.AbstractProcessorFactory;
+import org.elasticsearch.ingest.core.IngestDocument;
+import org.elasticsearch.ingest.core.TemplateService;
+import org.elasticsearch.ingest.core.ConfigurationUtils;
+
+import java.util.Map;
+
+/**
+ * Processor that removes existing fields. Nothing happens if the field is not present.
+ */
+public class RemoveProcessor extends AbstractProcessor {
+
+ public static final String TYPE = "remove";
+
+ private final TemplateService.Template field;
+
+ RemoveProcessor(String tag, TemplateService.Template field) {
+ super(tag);
+ this.field = field;
+ }
+
+ public TemplateService.Template getField() {
+ return field;
+ }
+
+ @Override
+ public void execute(IngestDocument document) {
+ document.removeField(field);
+ }
+
+ @Override
+ public String getType() {
+ return TYPE;
+ }
+
+ public static class Factory extends AbstractProcessorFactory<RemoveProcessor> {
+
+ private final TemplateService templateService;
+
+ public Factory(TemplateService templateService) {
+ this.templateService = templateService;
+ }
+
+ @Override
+ public RemoveProcessor doCreate(String processorTag, Map<String, Object> config) throws Exception {
+ String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field");
+ return new RemoveProcessor(processorTag, templateService.compile(field));
+ }
+ }
+}
+
diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/RenameProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/RenameProcessor.java
new file mode 100644
index 0000000000..8e19c4ecc1
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/ingest/processor/RenameProcessor.java
@@ -0,0 +1,86 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.processor;
+
+import org.elasticsearch.ingest.core.AbstractProcessor;
+import org.elasticsearch.ingest.core.AbstractProcessorFactory;
+import org.elasticsearch.ingest.core.IngestDocument;
+import org.elasticsearch.ingest.core.ConfigurationUtils;
+
+import java.util.Map;
+
+/**
+ * Processor that allows to rename existing fields. Will throw exception if the field is not present.
+ */
+public class RenameProcessor extends AbstractProcessor {
+
+ public static final String TYPE = "rename";
+
+ private final String oldFieldName;
+ private final String newFieldName;
+
+ RenameProcessor(String tag, String oldFieldName, String newFieldName) {
+ super(tag);
+ this.oldFieldName = oldFieldName;
+ this.newFieldName = newFieldName;
+ }
+
+ String getOldFieldName() {
+ return oldFieldName;
+ }
+
+ String getNewFieldName() {
+ return newFieldName;
+ }
+
+ @Override
+ public void execute(IngestDocument document) {
+ if (document.hasField(oldFieldName) == false) {
+ throw new IllegalArgumentException("field [" + oldFieldName + "] doesn't exist");
+ }
+ if (document.hasField(newFieldName)) {
+ throw new IllegalArgumentException("field [" + newFieldName + "] already exists");
+ }
+
+ Object oldValue = document.getFieldValue(oldFieldName, Object.class);
+ document.setFieldValue(newFieldName, oldValue);
+ try {
+ document.removeField(oldFieldName);
+ } catch (Exception e) {
+ //remove the new field if the removal of the old one failed
+ document.removeField(newFieldName);
+ throw e;
+ }
+ }
+
+ @Override
+ public String getType() {
+ return TYPE;
+ }
+
+ public static class Factory extends AbstractProcessorFactory<RenameProcessor> {
+ @Override
+ public RenameProcessor doCreate(String processorTag, Map<String, Object> config) throws Exception {
+ String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field");
+ String newField = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "to");
+ return new RenameProcessor(processorTag, field, newField);
+ }
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/SetProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/SetProcessor.java
new file mode 100644
index 0000000000..d150016cf9
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/ingest/processor/SetProcessor.java
@@ -0,0 +1,81 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.processor;
+
+import org.elasticsearch.ingest.core.AbstractProcessor;
+import org.elasticsearch.ingest.core.AbstractProcessorFactory;
+import org.elasticsearch.ingest.core.IngestDocument;
+import org.elasticsearch.ingest.core.TemplateService;
+import org.elasticsearch.ingest.core.ValueSource;
+import org.elasticsearch.ingest.core.ConfigurationUtils;
+
+import java.util.Map;
+
+/**
+ * Processor that adds new fields with their corresponding values. If the field is already present, its value
+ * will be replaced with the provided one.
+ */
+public class SetProcessor extends AbstractProcessor {
+
+ public static final String TYPE = "set";
+
+ private final TemplateService.Template field;
+ private final ValueSource value;
+
+ SetProcessor(String tag, TemplateService.Template field, ValueSource value) {
+ super(tag);
+ this.field = field;
+ this.value = value;
+ }
+
+ public TemplateService.Template getField() {
+ return field;
+ }
+
+ public ValueSource getValue() {
+ return value;
+ }
+
+ @Override
+ public void execute(IngestDocument document) {
+ document.setFieldValue(field, value);
+ }
+
+ @Override
+ public String getType() {
+ return TYPE;
+ }
+
+ public static final class Factory extends AbstractProcessorFactory<SetProcessor> {
+
+ private final TemplateService templateService;
+
+ public Factory(TemplateService templateService) {
+ this.templateService = templateService;
+ }
+
+ @Override
+ public SetProcessor doCreate(String processorTag, Map<String, Object> config) throws Exception {
+ String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field");
+ Object value = ConfigurationUtils.readObject(TYPE, processorTag, config, "value");
+ return new SetProcessor(processorTag, templateService.compile(field), ValueSource.wrap(value, templateService));
+ }
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/SplitProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/SplitProcessor.java
new file mode 100644
index 0000000000..2ecaad1a7d
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/ingest/processor/SplitProcessor.java
@@ -0,0 +1,82 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.processor;
+
+import org.elasticsearch.ingest.core.AbstractProcessor;
+import org.elasticsearch.ingest.core.AbstractProcessorFactory;
+import org.elasticsearch.ingest.core.ConfigurationUtils;
+import org.elasticsearch.ingest.core.IngestDocument;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Processor that splits fields content into different items based on the occurrence of a specified separator.
+ * New field value will be an array containing all of the different extracted items.
+ * Throws exception if the field is null or a type other than string.
+ */
+public class SplitProcessor extends AbstractProcessor {
+
+ public static final String TYPE = "split";
+
+ private final String field;
+ private final String separator;
+
+ SplitProcessor(String tag, String field, String separator) {
+ super(tag);
+ this.field = field;
+ this.separator = separator;
+ }
+
+ String getField() {
+ return field;
+ }
+
+ String getSeparator() {
+ return separator;
+ }
+
+ @Override
+ public void execute(IngestDocument document) {
+ String oldVal = document.getFieldValue(field, String.class);
+ if (oldVal == null) {
+ throw new IllegalArgumentException("field [" + field + "] is null, cannot split.");
+ }
+ String[] strings = oldVal.split(separator);
+ List<String> splitList = new ArrayList<>(strings.length);
+ Collections.addAll(splitList, strings);
+ document.setFieldValue(field, splitList);
+ }
+
+ @Override
+ public String getType() {
+ return TYPE;
+ }
+
+ public static class Factory extends AbstractProcessorFactory<SplitProcessor> {
+ @Override
+ public SplitProcessor doCreate(String processorTag, Map<String, Object> config) throws Exception {
+ String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field");
+ return new SplitProcessor(processorTag, field, ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "separator"));
+ }
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/TrimProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/TrimProcessor.java
new file mode 100644
index 0000000000..7de309b51c
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/ingest/processor/TrimProcessor.java
@@ -0,0 +1,56 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.processor;
+
+/**
+ * Processor that trims the content of string fields.
+ * Throws exception is the field is not of type string.
+ */
+public class TrimProcessor extends AbstractStringProcessor {
+
+ public static final String TYPE = "trim";
+
+ TrimProcessor(String processorTag, String field) {
+ super(processorTag, field);
+ }
+
+ @Override
+ protected String process(String value) {
+ return value.trim();
+ }
+
+ @Override
+ public String getType() {
+ return TYPE;
+ }
+
+ public static class Factory extends AbstractStringProcessor.Factory<TrimProcessor> {
+
+ public Factory() {
+ super(TYPE);
+ }
+
+ @Override
+ protected TrimProcessor newProcessor(String tag, String field) {
+ return new TrimProcessor(tag, field);
+ }
+ }
+}
+
diff --git a/core/src/main/java/org/elasticsearch/ingest/processor/UppercaseProcessor.java b/core/src/main/java/org/elasticsearch/ingest/processor/UppercaseProcessor.java
new file mode 100644
index 0000000000..7b10d02279
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/ingest/processor/UppercaseProcessor.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.processor;
+
+import java.util.Locale;
+
+/**
+ * Processor that converts the content of string fields to uppercase.
+ * Throws exception is the field is not of type string.
+ */
+public class UppercaseProcessor extends AbstractStringProcessor {
+
+ public static final String TYPE = "uppercase";
+
+ UppercaseProcessor(String processorTag, String field) {
+ super(processorTag, field);
+ }
+
+ @Override
+ protected String process(String value) {
+ return value.toUpperCase(Locale.ROOT);
+ }
+
+ @Override
+ public String getType() {
+ return TYPE;
+ }
+
+ public static class Factory extends AbstractStringProcessor.Factory<UppercaseProcessor> {
+
+ public Factory() {
+ super(TYPE);
+ }
+
+ @Override
+ protected UppercaseProcessor newProcessor(String tag, String field) {
+ return new UppercaseProcessor(tag, field);
+ }
+ }
+}
+
diff --git a/core/src/main/java/org/elasticsearch/monitor/MonitorService.java b/core/src/main/java/org/elasticsearch/monitor/MonitorService.java
index 15af2cb0a7..cf033e54d7 100644
--- a/core/src/main/java/org/elasticsearch/monitor/MonitorService.java
+++ b/core/src/main/java/org/elasticsearch/monitor/MonitorService.java
@@ -23,7 +23,7 @@ import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.monitor.fs.FsService;
-import org.elasticsearch.monitor.jvm.JvmMonitorService;
+import org.elasticsearch.monitor.jvm.JvmGcMonitorService;
import org.elasticsearch.monitor.jvm.JvmService;
import org.elasticsearch.monitor.os.OsService;
import org.elasticsearch.monitor.process.ProcessService;
@@ -36,7 +36,7 @@ import java.io.IOException;
*/
public class MonitorService extends AbstractLifecycleComponent<MonitorService> {
- private final JvmMonitorService jvmMonitorService;
+ private final JvmGcMonitorService jvmGcMonitorService;
private final OsService osService;
@@ -48,7 +48,7 @@ public class MonitorService extends AbstractLifecycleComponent<MonitorService> {
public MonitorService(Settings settings, NodeEnvironment nodeEnvironment, ThreadPool threadPool) throws IOException {
super(settings);
- this.jvmMonitorService = new JvmMonitorService(settings, threadPool);
+ this.jvmGcMonitorService = new JvmGcMonitorService(settings, threadPool);
this.osService = new OsService(settings);
this.processService = new ProcessService(settings);
this.jvmService = new JvmService(settings);
@@ -73,16 +73,16 @@ public class MonitorService extends AbstractLifecycleComponent<MonitorService> {
@Override
protected void doStart() {
- jvmMonitorService.start();
+ jvmGcMonitorService.start();
}
@Override
protected void doStop() {
- jvmMonitorService.stop();
+ jvmGcMonitorService.stop();
}
@Override
protected void doClose() {
- jvmMonitorService.close();
+ jvmGcMonitorService.close();
}
}
diff --git a/core/src/main/java/org/elasticsearch/monitor/fs/FsService.java b/core/src/main/java/org/elasticsearch/monitor/fs/FsService.java
index 7019ec48e0..99a78f13a0 100644
--- a/core/src/main/java/org/elasticsearch/monitor/fs/FsService.java
+++ b/core/src/main/java/org/elasticsearch/monitor/fs/FsService.java
@@ -20,6 +20,7 @@
package org.elasticsearch.monitor.fs;
import org.elasticsearch.common.component.AbstractComponent;
+import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.SingleObjectCache;
@@ -35,10 +36,13 @@ public class FsService extends AbstractComponent {
private final SingleObjectCache<FsInfo> fsStatsCache;
+ public final static Setting<TimeValue> REFRESH_INTERVAL_SETTING =
+ Setting.timeSetting("monitor.fs.refresh_interval", TimeValue.timeValueSeconds(1), TimeValue.timeValueSeconds(1), false, Setting.Scope.CLUSTER);
+
public FsService(Settings settings, NodeEnvironment nodeEnvironment) throws IOException {
super(settings);
this.probe = new FsProbe(settings, nodeEnvironment);
- TimeValue refreshInterval = settings.getAsTime("monitor.fs.refresh_interval", TimeValue.timeValueSeconds(1));
+ TimeValue refreshInterval = REFRESH_INTERVAL_SETTING.get(settings);
fsStatsCache = new FsInfoCache(refreshInterval, probe.stats());
logger.debug("Using probe [{}] with refresh_interval [{}]", probe, refreshInterval);
}
diff --git a/core/src/main/java/org/elasticsearch/monitor/jvm/JvmMonitorService.java b/core/src/main/java/org/elasticsearch/monitor/jvm/JvmGcMonitorService.java
index 8d83435bb9..97c813a0fe 100644
--- a/core/src/main/java/org/elasticsearch/monitor/jvm/JvmMonitorService.java
+++ b/core/src/main/java/org/elasticsearch/monitor/jvm/JvmGcMonitorService.java
@@ -20,6 +20,8 @@
package org.elasticsearch.monitor.jvm;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
+import org.elasticsearch.common.settings.Setting;
+import org.elasticsearch.common.settings.Setting.Scope;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.FutureUtils;
@@ -31,13 +33,12 @@ import java.util.Map;
import java.util.concurrent.ScheduledFuture;
import static java.util.Collections.unmodifiableMap;
-import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds;
import static org.elasticsearch.monitor.jvm.JvmStats.jvmStats;
/**
*
*/
-public class JvmMonitorService extends AbstractLifecycleComponent<JvmMonitorService> {
+public class JvmGcMonitorService extends AbstractLifecycleComponent<JvmGcMonitorService> {
private final ThreadPool threadPool;
private final boolean enabled;
@@ -46,6 +47,13 @@ public class JvmMonitorService extends AbstractLifecycleComponent<JvmMonitorServ
private volatile ScheduledFuture scheduledFuture;
+ public final static Setting<Boolean> ENABLED_SETTING = Setting.boolSetting("monitor.jvm.gc.enabled", true, false, Scope.CLUSTER);
+ public final static Setting<TimeValue> REFRESH_INTERVAL_SETTING =
+ Setting.timeSetting("monitor.jvm.gc.refresh_interval", TimeValue.timeValueSeconds(1), TimeValue.timeValueSeconds(1), false, Scope.CLUSTER);
+
+ private static String GC_COLLECTOR_PREFIX = "monitor.jvm.gc.collector.";
+ public final static Setting<Settings> GC_SETTING = Setting.groupSetting(GC_COLLECTOR_PREFIX, false, Scope.CLUSTER);
+
static class GcThreshold {
public final String name;
public final long warnThreshold;
@@ -70,25 +78,21 @@ public class JvmMonitorService extends AbstractLifecycleComponent<JvmMonitorServ
}
}
- public JvmMonitorService(Settings settings, ThreadPool threadPool) {
+ public JvmGcMonitorService(Settings settings, ThreadPool threadPool) {
super(settings);
this.threadPool = threadPool;
- this.enabled = this.settings.getAsBoolean("monitor.jvm.enabled", true);
- this.interval = this.settings.getAsTime("monitor.jvm.interval", timeValueSeconds(1));
+ this.enabled = ENABLED_SETTING.get(settings);
+ this.interval = REFRESH_INTERVAL_SETTING.get(settings);
Map<String, GcThreshold> gcThresholds = new HashMap<>();
- Map<String, Settings> gcThresholdGroups = this.settings.getGroups("monitor.jvm.gc");
+ Map<String, Settings> gcThresholdGroups = GC_SETTING.get(settings).getAsGroups();
for (Map.Entry<String, Settings> entry : gcThresholdGroups.entrySet()) {
String name = entry.getKey();
- TimeValue warn = entry.getValue().getAsTime("warn", null);
- TimeValue info = entry.getValue().getAsTime("info", null);
- TimeValue debug = entry.getValue().getAsTime("debug", null);
- if (warn == null || info == null || debug == null) {
- logger.warn("ignoring gc_threshold for [{}], missing warn/info/debug values", name);
- } else {
- gcThresholds.put(name, new GcThreshold(name, warn.millis(), info.millis(), debug.millis()));
- }
+ TimeValue warn = getValidThreshold(entry.getValue(), entry.getKey(), "warn");
+ TimeValue info = getValidThreshold(entry.getValue(), entry.getKey(), "info");
+ TimeValue debug = getValidThreshold(entry.getValue(), entry.getKey(), "debug");
+ gcThresholds.put(name, new GcThreshold(name, warn.millis(), info.millis(), debug.millis()));
}
gcThresholds.putIfAbsent(GcNames.YOUNG, new GcThreshold(GcNames.YOUNG, 1000, 700, 400));
gcThresholds.putIfAbsent(GcNames.OLD, new GcThreshold(GcNames.OLD, 10000, 5000, 2000));
@@ -98,6 +102,21 @@ public class JvmMonitorService extends AbstractLifecycleComponent<JvmMonitorServ
logger.debug("enabled [{}], interval [{}], gc_threshold [{}]", enabled, interval, this.gcThresholds);
}
+ private static TimeValue getValidThreshold(Settings settings, String key, String level) {
+ TimeValue threshold = settings.getAsTime(level, null);
+ if (threshold == null) {
+ throw new IllegalArgumentException("missing gc_threshold for [" + getThresholdName(key, level) + "]");
+ }
+ if (threshold.nanos() <= 0) {
+ throw new IllegalArgumentException("invalid gc_threshold [" + threshold + "] for [" + getThresholdName(key, level) + "]");
+ }
+ return threshold;
+ }
+
+ private static String getThresholdName(String key, String level) {
+ return GC_COLLECTOR_PREFIX + key + "." + level;
+ }
+
@Override
protected void doStart() {
if (!enabled) {
diff --git a/core/src/main/java/org/elasticsearch/monitor/jvm/JvmService.java b/core/src/main/java/org/elasticsearch/monitor/jvm/JvmService.java
index 1a6f53a1ec..fbec6cda16 100644
--- a/core/src/main/java/org/elasticsearch/monitor/jvm/JvmService.java
+++ b/core/src/main/java/org/elasticsearch/monitor/jvm/JvmService.java
@@ -20,6 +20,7 @@
package org.elasticsearch.monitor.jvm;
import org.elasticsearch.common.component.AbstractComponent;
+import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
@@ -34,12 +35,15 @@ public class JvmService extends AbstractComponent {
private JvmStats jvmStats;
+ public final static Setting<TimeValue> REFRESH_INTERVAL_SETTING =
+ Setting.timeSetting("monitor.jvm.refresh_interval", TimeValue.timeValueSeconds(1), TimeValue.timeValueSeconds(1), false, Setting.Scope.CLUSTER);
+
public JvmService(Settings settings) {
super(settings);
this.jvmInfo = JvmInfo.jvmInfo();
this.jvmStats = JvmStats.jvmStats();
- this.refreshInterval = this.settings.getAsTime("refresh_interval", TimeValue.timeValueSeconds(1));
+ this.refreshInterval = REFRESH_INTERVAL_SETTING.get(settings);
logger.debug("Using refresh_interval [{}]", refreshInterval);
}
diff --git a/core/src/main/java/org/elasticsearch/monitor/os/OsProbe.java b/core/src/main/java/org/elasticsearch/monitor/os/OsProbe.java
index 077b4218fa..5ee2232068 100644
--- a/core/src/main/java/org/elasticsearch/monitor/os/OsProbe.java
+++ b/core/src/main/java/org/elasticsearch/monitor/os/OsProbe.java
@@ -119,12 +119,15 @@ public class OsProbe {
}
// fallback
}
+ if (Constants.WINDOWS) {
+ return null;
+ }
if (getSystemLoadAverage == null) {
return null;
}
try {
double oneMinuteLoadAverage = (double) getSystemLoadAverage.invoke(osMxBean);
- return new double[] { oneMinuteLoadAverage, -1, -1 };
+ return new double[] { oneMinuteLoadAverage >= 0 ? oneMinuteLoadAverage : -1, -1, -1 };
} catch (Throwable t) {
return null;
}
diff --git a/core/src/main/java/org/elasticsearch/monitor/os/OsService.java b/core/src/main/java/org/elasticsearch/monitor/os/OsService.java
index dc1ecb643a..5f836c6f92 100644
--- a/core/src/main/java/org/elasticsearch/monitor/os/OsService.java
+++ b/core/src/main/java/org/elasticsearch/monitor/os/OsService.java
@@ -20,6 +20,7 @@
package org.elasticsearch.monitor.os;
import org.elasticsearch.common.component.AbstractComponent;
+import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.SingleObjectCache;
@@ -36,11 +37,14 @@ public class OsService extends AbstractComponent {
private SingleObjectCache<OsStats> osStatsCache;
+ public final static Setting<TimeValue> REFRESH_INTERVAL_SETTING =
+ Setting.timeSetting("monitor.os.refresh_interval", TimeValue.timeValueSeconds(1), TimeValue.timeValueSeconds(1), false, Setting.Scope.CLUSTER);
+
public OsService(Settings settings) {
super(settings);
this.probe = OsProbe.getInstance();
- TimeValue refreshInterval = settings.getAsTime("monitor.os.refresh_interval", TimeValue.timeValueSeconds(1));
+ TimeValue refreshInterval = REFRESH_INTERVAL_SETTING.get(settings);
this.info = probe.osInfo();
this.info.refreshInterval = refreshInterval.millis();
diff --git a/core/src/main/java/org/elasticsearch/monitor/os/OsStats.java b/core/src/main/java/org/elasticsearch/monitor/os/OsStats.java
index c419c4f260..569f8825aa 100644
--- a/core/src/main/java/org/elasticsearch/monitor/os/OsStats.java
+++ b/core/src/main/java/org/elasticsearch/monitor/os/OsStats.java
@@ -28,6 +28,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import java.io.IOException;
+import java.util.Arrays;
/**
*
@@ -89,7 +90,7 @@ public class OsStats implements Streamable, ToXContent {
if (cpu != null) {
builder.startObject(Fields.CPU);
builder.field(Fields.PERCENT, cpu.getPercent());
- if (cpu.getLoadAverage() != null) {
+ if (cpu.getLoadAverage() != null && Arrays.stream(cpu.getLoadAverage()).anyMatch(load -> load != -1)) {
builder.startObject(Fields.LOAD_AVERAGE);
if (cpu.getLoadAverage()[0] != -1) {
builder.field(Fields.LOAD_AVERAGE_1M, cpu.getLoadAverage()[0]);
diff --git a/core/src/main/java/org/elasticsearch/monitor/process/ProcessService.java b/core/src/main/java/org/elasticsearch/monitor/process/ProcessService.java
index 0861dfe5b0..9e3283af4f 100644
--- a/core/src/main/java/org/elasticsearch/monitor/process/ProcessService.java
+++ b/core/src/main/java/org/elasticsearch/monitor/process/ProcessService.java
@@ -20,6 +20,7 @@
package org.elasticsearch.monitor.process;
import org.elasticsearch.common.component.AbstractComponent;
+import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.SingleObjectCache;
@@ -33,11 +34,14 @@ public final class ProcessService extends AbstractComponent {
private final ProcessInfo info;
private final SingleObjectCache<ProcessStats> processStatsCache;
+ public final static Setting<TimeValue> REFRESH_INTERVAL_SETTING =
+ Setting.timeSetting("monitor.process.refresh_interval", TimeValue.timeValueSeconds(1), TimeValue.timeValueSeconds(1), false, Setting.Scope.CLUSTER);
+
public ProcessService(Settings settings) {
super(settings);
this.probe = ProcessProbe.getInstance();
- final TimeValue refreshInterval = settings.getAsTime("monitor.process.refresh_interval", TimeValue.timeValueSeconds(1));
+ final TimeValue refreshInterval = REFRESH_INTERVAL_SETTING.get(settings);
processStatsCache = new ProcessStatsCache(refreshInterval, probe.processStats());
this.info = probe.processInfo();
this.info.refreshInterval = refreshInterval.millis();
diff --git a/core/src/main/java/org/elasticsearch/node/Node.java b/core/src/main/java/org/elasticsearch/node/Node.java
index c5cf53defa..542039fe22 100644
--- a/core/src/main/java/org/elasticsearch/node/Node.java
+++ b/core/src/main/java/org/elasticsearch/node/Node.java
@@ -19,6 +19,7 @@
package org.elasticsearch.node;
+import org.apache.lucene.util.IOUtils;
import org.elasticsearch.Build;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.Version;
@@ -30,6 +31,7 @@ import org.elasticsearch.cluster.ClusterModule;
import org.elasticsearch.cluster.ClusterNameModule;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.action.index.MappingUpdatedAction;
+import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.routing.RoutingService;
import org.elasticsearch.common.StopWatch;
import org.elasticsearch.common.component.Lifecycle;
@@ -46,6 +48,7 @@ import org.elasticsearch.common.network.NetworkAddress;
import org.elasticsearch.common.network.NetworkModule;
import org.elasticsearch.common.network.NetworkService;
import org.elasticsearch.common.settings.ClusterSettings;
+import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsFilter;
import org.elasticsearch.common.settings.SettingsModule;
@@ -75,6 +78,7 @@ import org.elasticsearch.indices.ttl.IndicesTTLService;
import org.elasticsearch.monitor.MonitorService;
import org.elasticsearch.monitor.jvm.JvmInfo;
import org.elasticsearch.node.internal.InternalSettingsPreparer;
+import org.elasticsearch.node.service.NodeService;
import org.elasticsearch.percolator.PercolatorModule;
import org.elasticsearch.percolator.PercolatorService;
import org.elasticsearch.plugins.Plugin;
@@ -97,6 +101,7 @@ import org.elasticsearch.watcher.ResourceWatcherModule;
import org.elasticsearch.watcher.ResourceWatcherService;
import java.io.BufferedWriter;
+import java.io.Closeable;
import java.io.IOException;
import java.net.Inet6Address;
import java.net.InetAddress;
@@ -105,10 +110,13 @@ import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
+import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
+import java.util.List;
import java.util.concurrent.TimeUnit;
+import java.util.function.Function;
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
@@ -116,10 +124,17 @@ import static org.elasticsearch.common.settings.Settings.settingsBuilder;
* A node represent a node within a cluster (<tt>cluster.name</tt>). The {@link #client()} can be used
* in order to use a {@link Client} to perform actions/operations against the cluster.
*/
-public class Node implements Releasable {
+public class Node implements Closeable {
+
+ public static final Setting<Boolean> WRITE_PORTS_FIELD_SETTING = Setting.boolSetting("node.portsfile", false, false, Setting.Scope.CLUSTER);
+ public static final Setting<Boolean> NODE_CLIENT_SETTING = Setting.boolSetting("node.client", false, false, Setting.Scope.CLUSTER);
+ public static final Setting<Boolean> NODE_DATA_SETTING = Setting.boolSetting("node.data", true, false, Setting.Scope.CLUSTER);
+ public static final Setting<Boolean> NODE_MASTER_SETTING = Setting.boolSetting("node.master", true, false, Setting.Scope.CLUSTER);
+ public static final Setting<Boolean> NODE_LOCAL_SETTING = Setting.boolSetting("node.local", false, false, Setting.Scope.CLUSTER);
+ public static final Setting<String> NODE_MODE_SETTING = new Setting<>("node.mode", "network", Function.identity(), false, Setting.Scope.CLUSTER);
+ public static final Setting<Boolean> NODE_INGEST_SETTING = Setting.boolSetting("node.ingest", true, false, Setting.Scope.CLUSTER);
private static final String CLIENT_TYPE = "node";
- public static final String HTTP_ENABLED = "http.enabled";
private final Lifecycle lifecycle = new Lifecycle();
private final Injector injector;
private final Settings settings;
@@ -138,7 +153,7 @@ public class Node implements Releasable {
protected Node(Environment tmpEnv, Version version, Collection<Class<? extends Plugin>> classpathPlugins) {
Settings tmpSettings = settingsBuilder().put(tmpEnv.settings())
- .put(Client.CLIENT_TYPE_SETTING, CLIENT_TYPE).build();
+ .put(Client.CLIENT_TYPE_SETTING_S.getKey(), CLIENT_TYPE).build();
tmpSettings = TribeService.processSettings(tmpSettings);
ESLogger logger = Loggers.getLogger(Node.class, tmpSettings.get("name"));
@@ -177,11 +192,12 @@ public class Node implements Releasable {
modules.add(pluginModule);
}
modules.add(new PluginsModule(pluginsService));
- modules.add(new SettingsModule(this.settings, settingsFilter));
+ SettingsModule settingsModule = new SettingsModule(this.settings, settingsFilter);
+ modules.add(settingsModule);
modules.add(new EnvironmentModule(environment));
modules.add(new NodeModule(this, monitorService));
modules.add(new NetworkModule(networkService, settings, false, namedWriteableRegistry));
- modules.add(new ScriptModule(this.settings));
+ modules.add(new ScriptModule(settingsModule));
modules.add(new NodeEnvironmentModule(nodeEnvironment));
modules.add(new ClusterNameModule(this.settings));
modules.add(new ThreadPoolModule(threadPool));
@@ -189,7 +205,7 @@ public class Node implements Releasable {
modules.add(new ClusterModule(this.settings));
modules.add(new IndicesModule());
modules.add(new SearchModule(settings, namedWriteableRegistry));
- modules.add(new ActionModule(false));
+ modules.add(new ActionModule(DiscoveryNode.ingestNode(settings), false));
modules.add(new GatewayModule(settings));
modules.add(new NodeClientModule());
modules.add(new PercolatorModule());
@@ -232,6 +248,13 @@ public class Node implements Releasable {
}
/**
+ * Returns the environment of the node
+ */
+ public Environment getEnvironment() {
+ return environment;
+ }
+
+ /**
* Start the node. If the node is already started, this method is no-op.
*/
public Node start() {
@@ -275,7 +298,7 @@ public class Node implements Releasable {
injector.getInstance(ResourceWatcherService.class).start();
injector.getInstance(TribeService.class).start();
- if (System.getProperty("es.tests.portsfile", "false").equals("true")) {
+ if (WRITE_PORTS_FIELD_SETTING.get(settings)) {
if (settings.getAsBoolean("http.enabled", true)) {
HttpServerTransport http = injector.getInstance(HttpServerTransport.class);
writePortsFile("http", http.boundAddress());
@@ -332,7 +355,7 @@ public class Node implements Releasable {
// If not, the hook that is added in Bootstrap#setup() will be useless: close() might not be executed, in case another (for example api) call
// to close() has already set some lifecycles to stopped. In this case the process will be terminated even if the first call to close() has not finished yet.
@Override
- public synchronized void close() {
+ public synchronized void close() throws IOException {
if (lifecycle.started()) {
stop();
}
@@ -342,82 +365,80 @@ public class Node implements Releasable {
ESLogger logger = Loggers.getLogger(Node.class, settings.get("name"));
logger.info("closing ...");
-
+ List<Closeable> toClose = new ArrayList<>();
StopWatch stopWatch = new StopWatch("node_close");
- stopWatch.start("tribe");
- injector.getInstance(TribeService.class).close();
- stopWatch.stop().start("http");
+ toClose.add(() -> stopWatch.start("tribe"));
+ toClose.add(injector.getInstance(TribeService.class));
+ toClose.add(() -> stopWatch.stop().start("node_service"));
+ toClose.add(injector.getInstance(NodeService.class));
+ toClose.add(() ->stopWatch.stop().start("http"));
if (settings.getAsBoolean("http.enabled", true)) {
- injector.getInstance(HttpServer.class).close();
+ toClose.add(injector.getInstance(HttpServer.class));
}
- stopWatch.stop().start("snapshot_service");
- injector.getInstance(SnapshotsService.class).close();
- injector.getInstance(SnapshotShardsService.class).close();
- stopWatch.stop().start("client");
+ toClose.add(() ->stopWatch.stop().start("snapshot_service"));
+ toClose.add(injector.getInstance(SnapshotsService.class));
+ toClose.add(injector.getInstance(SnapshotShardsService.class));
+ toClose.add(() ->stopWatch.stop().start("client"));
Releasables.close(injector.getInstance(Client.class));
- stopWatch.stop().start("indices_cluster");
- injector.getInstance(IndicesClusterStateService.class).close();
- stopWatch.stop().start("indices");
- injector.getInstance(IndicesTTLService.class).close();
- injector.getInstance(IndicesService.class).close();
+ toClose.add(() ->stopWatch.stop().start("indices_cluster"));
+ toClose.add(injector.getInstance(IndicesClusterStateService.class));
+ toClose.add(() ->stopWatch.stop().start("indices"));
+ toClose.add(injector.getInstance(IndicesTTLService.class));
+ toClose.add(injector.getInstance(IndicesService.class));
// close filter/fielddata caches after indices
- injector.getInstance(IndicesQueryCache.class).close();
- injector.getInstance(IndicesFieldDataCache.class).close();
- injector.getInstance(IndicesStore.class).close();
- stopWatch.stop().start("routing");
- injector.getInstance(RoutingService.class).close();
- stopWatch.stop().start("cluster");
- injector.getInstance(ClusterService.class).close();
- stopWatch.stop().start("discovery");
- injector.getInstance(DiscoveryService.class).close();
- stopWatch.stop().start("monitor");
- injector.getInstance(MonitorService.class).close();
- stopWatch.stop().start("gateway");
- injector.getInstance(GatewayService.class).close();
- stopWatch.stop().start("search");
- injector.getInstance(SearchService.class).close();
- stopWatch.stop().start("rest");
- injector.getInstance(RestController.class).close();
- stopWatch.stop().start("transport");
- injector.getInstance(TransportService.class).close();
- stopWatch.stop().start("percolator_service");
- injector.getInstance(PercolatorService.class).close();
+ toClose.add(injector.getInstance(IndicesQueryCache.class));
+ toClose.add(injector.getInstance(IndicesFieldDataCache.class));
+ toClose.add(injector.getInstance(IndicesStore.class));
+ toClose.add(() ->stopWatch.stop().start("routing"));
+ toClose.add(injector.getInstance(RoutingService.class));
+ toClose.add(() ->stopWatch.stop().start("cluster"));
+ toClose.add(injector.getInstance(ClusterService.class));
+ toClose.add(() ->stopWatch.stop().start("discovery"));
+ toClose.add(injector.getInstance(DiscoveryService.class));
+ toClose.add(() ->stopWatch.stop().start("monitor"));
+ toClose.add(injector.getInstance(MonitorService.class));
+ toClose.add(() ->stopWatch.stop().start("gateway"));
+ toClose.add(injector.getInstance(GatewayService.class));
+ toClose.add(() ->stopWatch.stop().start("search"));
+ toClose.add(injector.getInstance(SearchService.class));
+ toClose.add(() ->stopWatch.stop().start("rest"));
+ toClose.add(injector.getInstance(RestController.class));
+ toClose.add(() ->stopWatch.stop().start("transport"));
+ toClose.add(injector.getInstance(TransportService.class));
+ toClose.add(() ->stopWatch.stop().start("percolator_service"));
+ toClose.add(injector.getInstance(PercolatorService.class));
for (Class<? extends LifecycleComponent> plugin : pluginsService.nodeServices()) {
- stopWatch.stop().start("plugin(" + plugin.getName() + ")");
- injector.getInstance(plugin).close();
+ toClose.add(() ->stopWatch.stop().start("plugin(" + plugin.getName() + ")"));
+ toClose.add(injector.getInstance(plugin));
}
- stopWatch.stop().start("script");
- try {
- injector.getInstance(ScriptService.class).close();
- } catch(IOException e) {
- logger.warn("ScriptService close failed", e);
- }
+ toClose.add(() ->stopWatch.stop().start("script"));
+ toClose.add(injector.getInstance(ScriptService.class));
- stopWatch.stop().start("thread_pool");
+ toClose.add(() ->stopWatch.stop().start("thread_pool"));
// TODO this should really use ThreadPool.terminate()
- injector.getInstance(ThreadPool.class).shutdown();
- try {
- injector.getInstance(ThreadPool.class).awaitTermination(10, TimeUnit.SECONDS);
- } catch (InterruptedException e) {
- // ignore
- }
- stopWatch.stop().start("thread_pool_force_shutdown");
- try {
- injector.getInstance(ThreadPool.class).shutdownNow();
- } catch (Exception e) {
- // ignore
- }
- stopWatch.stop();
+ toClose.add(() -> injector.getInstance(ThreadPool.class).shutdown());
+ toClose.add(() -> {
+ try {
+ injector.getInstance(ThreadPool.class).awaitTermination(10, TimeUnit.SECONDS);
+ } catch (InterruptedException e) {
+ // ignore
+ }
+ });
+
+ toClose.add(() ->stopWatch.stop().start("thread_pool_force_shutdown"));
+ toClose.add(() -> injector.getInstance(ThreadPool.class).shutdownNow());
+ toClose.add(() -> stopWatch.stop());
+
+
+ toClose.add(injector.getInstance(NodeEnvironment.class));
+ toClose.add(injector.getInstance(PageCacheRecycler.class));
if (logger.isTraceEnabled()) {
logger.trace("Close times for each service:\n{}", stopWatch.prettyPrint());
}
-
- injector.getInstance(NodeEnvironment.class).close();
- injector.getInstance(PageCacheRecycler.class).close();
-
+ IOUtils.close(toClose);
logger.info("closed");
}
diff --git a/core/src/main/java/org/elasticsearch/node/NodeModule.java b/core/src/main/java/org/elasticsearch/node/NodeModule.java
index aa52d38934..442dc72700 100644
--- a/core/src/main/java/org/elasticsearch/node/NodeModule.java
+++ b/core/src/main/java/org/elasticsearch/node/NodeModule.java
@@ -22,9 +22,28 @@ package org.elasticsearch.node;
import org.elasticsearch.cache.recycler.PageCacheRecycler;
import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.util.BigArrays;
+import org.elasticsearch.ingest.ProcessorsRegistry;
+import org.elasticsearch.ingest.core.Processor;
+import org.elasticsearch.ingest.core.TemplateService;
+import org.elasticsearch.ingest.processor.AppendProcessor;
+import org.elasticsearch.ingest.processor.ConvertProcessor;
+import org.elasticsearch.ingest.processor.DateProcessor;
+import org.elasticsearch.ingest.processor.DeDotProcessor;
+import org.elasticsearch.ingest.processor.FailProcessor;
+import org.elasticsearch.ingest.processor.GsubProcessor;
+import org.elasticsearch.ingest.processor.JoinProcessor;
+import org.elasticsearch.ingest.processor.LowercaseProcessor;
+import org.elasticsearch.ingest.processor.RemoveProcessor;
+import org.elasticsearch.ingest.processor.RenameProcessor;
+import org.elasticsearch.ingest.processor.SetProcessor;
+import org.elasticsearch.ingest.processor.SplitProcessor;
+import org.elasticsearch.ingest.processor.TrimProcessor;
+import org.elasticsearch.ingest.processor.UppercaseProcessor;
import org.elasticsearch.monitor.MonitorService;
import org.elasticsearch.node.service.NodeService;
+import java.util.function.Function;
+
/**
*
*/
@@ -32,6 +51,7 @@ public class NodeModule extends AbstractModule {
private final Node node;
private final MonitorService monitorService;
+ private final ProcessorsRegistry processorsRegistry;
// pkg private so tests can mock
Class<? extends PageCacheRecycler> pageCacheRecyclerImpl = PageCacheRecycler.class;
@@ -40,6 +60,22 @@ public class NodeModule extends AbstractModule {
public NodeModule(Node node, MonitorService monitorService) {
this.node = node;
this.monitorService = monitorService;
+ this.processorsRegistry = new ProcessorsRegistry();
+
+ registerProcessor(DateProcessor.TYPE, (templateService) -> new DateProcessor.Factory());
+ registerProcessor(SetProcessor.TYPE, SetProcessor.Factory::new);
+ registerProcessor(AppendProcessor.TYPE, AppendProcessor.Factory::new);
+ registerProcessor(RenameProcessor.TYPE, (templateService) -> new RenameProcessor.Factory());
+ registerProcessor(RemoveProcessor.TYPE, RemoveProcessor.Factory::new);
+ registerProcessor(SplitProcessor.TYPE, (templateService) -> new SplitProcessor.Factory());
+ registerProcessor(JoinProcessor.TYPE, (templateService) -> new JoinProcessor.Factory());
+ registerProcessor(UppercaseProcessor.TYPE, (templateService) -> new UppercaseProcessor.Factory());
+ registerProcessor(LowercaseProcessor.TYPE, (templateService) -> new LowercaseProcessor.Factory());
+ registerProcessor(TrimProcessor.TYPE, (templateService) -> new TrimProcessor.Factory());
+ registerProcessor(ConvertProcessor.TYPE, (templateService) -> new ConvertProcessor.Factory());
+ registerProcessor(GsubProcessor.TYPE, (templateService) -> new GsubProcessor.Factory());
+ registerProcessor(FailProcessor.TYPE, FailProcessor.Factory::new);
+ registerProcessor(DeDotProcessor.TYPE, (templateService) -> new DeDotProcessor.Factory());
}
@Override
@@ -58,5 +94,20 @@ public class NodeModule extends AbstractModule {
bind(Node.class).toInstance(node);
bind(MonitorService.class).toInstance(monitorService);
bind(NodeService.class).asEagerSingleton();
+ bind(ProcessorsRegistry.class).toInstance(processorsRegistry);
+ }
+
+ /**
+ * Returns the node
+ */
+ public Node getNode() {
+ return node;
+ }
+
+ /**
+ * Adds a processor factory under a specific type name.
+ */
+ public void registerProcessor(String type, Function<TemplateService, Processor.Factory<?>> processorFactoryProvider) {
+ processorsRegistry.registerProcessor(type, processorFactoryProvider);
}
}
diff --git a/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java b/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java
index 1c2ab33e9b..c5dd64a67b 100644
--- a/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java
+++ b/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java
@@ -21,11 +21,11 @@ package org.elasticsearch.node.internal;
import org.elasticsearch.bootstrap.BootstrapInfo;
import org.elasticsearch.cluster.ClusterName;
-import org.elasticsearch.common.Booleans;
import org.elasticsearch.common.Randomness;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.cli.Terminal;
import org.elasticsearch.common.collect.Tuple;
+import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsException;
import org.elasticsearch.env.Environment;
@@ -57,7 +57,7 @@ public class InternalSettingsPreparer {
public static final String SECRET_PROMPT_VALUE = "${prompt.secret}";
public static final String TEXT_PROMPT_VALUE = "${prompt.text}";
- public static final String IGNORE_SYSTEM_PROPERTIES_SETTING = "config.ignore_system_properties";
+ public static final Setting<Boolean> IGNORE_SYSTEM_PROPERTIES_SETTING = Setting.boolSetting("config.ignore_system_properties", false, false, Setting.Scope.CLUSTER);
/**
* Prepares the settings by gathering all elasticsearch system properties and setting defaults.
@@ -108,13 +108,12 @@ public class InternalSettingsPreparer {
environment = new Environment(output.build());
// we put back the path.logs so we can use it in the logging configuration file
- output.put("path.logs", cleanPath(environment.logsFile().toAbsolutePath().toString()));
-
+ output.put(Environment.PATH_LOGS_SETTING.getKey(), cleanPath(environment.logsFile().toAbsolutePath().toString()));
return new Environment(output.build());
}
private static boolean useSystemProperties(Settings input) {
- return !input.getAsBoolean(IGNORE_SYSTEM_PROPERTIES_SETTING, false);
+ return !IGNORE_SYSTEM_PROPERTIES_SETTING.get(input);
}
/**
@@ -164,8 +163,8 @@ public class InternalSettingsPreparer {
}
// put the cluster name
- if (output.get(ClusterName.SETTING) == null) {
- output.put(ClusterName.SETTING, ClusterName.DEFAULT.value());
+ if (output.get(ClusterName.CLUSTER_NAME_SETTING.getKey()) == null) {
+ output.put(ClusterName.CLUSTER_NAME_SETTING.getKey(), ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY));
}
replacePromptPlaceholders(output, terminal);
diff --git a/core/src/main/java/org/elasticsearch/node/service/NodeService.java b/core/src/main/java/org/elasticsearch/node/service/NodeService.java
index b4fe59e347..7c385b5b39 100644
--- a/core/src/main/java/org/elasticsearch/node/service/NodeService.java
+++ b/core/src/main/java/org/elasticsearch/node/service/NodeService.java
@@ -24,20 +24,25 @@ import org.elasticsearch.Version;
import org.elasticsearch.action.admin.cluster.node.info.NodeInfo;
import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags;
+import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.discovery.Discovery;
+import org.elasticsearch.env.Environment;
import org.elasticsearch.http.HttpServer;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
+import org.elasticsearch.ingest.IngestService;
+import org.elasticsearch.ingest.ProcessorsRegistry;
import org.elasticsearch.monitor.MonitorService;
import org.elasticsearch.plugins.PluginsService;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
+import java.io.Closeable;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
@@ -47,7 +52,7 @@ import static java.util.Collections.unmodifiableMap;
/**
*/
-public class NodeService extends AbstractComponent {
+public class NodeService extends AbstractComponent implements Closeable {
private final ThreadPool threadPool;
private final MonitorService monitorService;
@@ -55,6 +60,7 @@ public class NodeService extends AbstractComponent {
private final IndicesService indicesService;
private final PluginsService pluginService;
private final CircuitBreakerService circuitBreakerService;
+ private final IngestService ingestService;
private ScriptService scriptService;
@Nullable
@@ -67,10 +73,10 @@ public class NodeService extends AbstractComponent {
private final Discovery discovery;
@Inject
- public NodeService(Settings settings, ThreadPool threadPool, MonitorService monitorService, Discovery discovery,
- TransportService transportService, IndicesService indicesService,
- PluginsService pluginService, CircuitBreakerService circuitBreakerService,
- Version version) {
+ public NodeService(Settings settings, Environment environment, ThreadPool threadPool, MonitorService monitorService,
+ Discovery discovery, TransportService transportService, IndicesService indicesService,
+ PluginsService pluginService, CircuitBreakerService circuitBreakerService, Version version,
+ ProcessorsRegistry processorsRegistry, ClusterService clusterService) {
super(settings);
this.threadPool = threadPool;
this.monitorService = monitorService;
@@ -81,12 +87,15 @@ public class NodeService extends AbstractComponent {
this.version = version;
this.pluginService = pluginService;
this.circuitBreakerService = circuitBreakerService;
+ this.ingestService = new IngestService(settings, threadPool, processorsRegistry);
+ clusterService.add(ingestService.getPipelineStore());
}
// can not use constructor injection or there will be a circular dependency
@Inject(optional = true)
public void setScriptService(ScriptService scriptService) {
this.scriptService = scriptService;
+ this.ingestService.setScriptService(scriptService);
}
public void setHttpServer(@Nullable HttpServer httpServer) {
@@ -176,4 +185,13 @@ public class NodeService extends AbstractComponent {
discoveryStats ? discovery.stats() : null
);
}
+
+ public IngestService getIngestService() {
+ return ingestService;
+ }
+
+ @Override
+ public void close() throws IOException {
+ indicesService.close();
+ }
}
diff --git a/core/src/main/java/org/elasticsearch/percolator/MultiDocumentPercolatorIndex.java b/core/src/main/java/org/elasticsearch/percolator/MultiDocumentPercolatorIndex.java
index 10eeec75cb..9d091a4c0b 100644
--- a/core/src/main/java/org/elasticsearch/percolator/MultiDocumentPercolatorIndex.java
+++ b/core/src/main/java/org/elasticsearch/percolator/MultiDocumentPercolatorIndex.java
@@ -39,6 +39,8 @@ import org.apache.lucene.util.CloseableThreadLocal;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.index.engine.Engine;
+import org.elasticsearch.index.mapper.DocumentMapper;
+import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
@@ -76,8 +78,7 @@ class MultiDocumentPercolatorIndex implements PercolatorIndex {
} else {
memoryIndex = new MemoryIndex(true);
}
- Analyzer analyzer = context.mapperService().documentMapper(parsedDocument.type()).mappers().indexAnalyzer();
- memoryIndices[i] = indexDoc(d, analyzer, memoryIndex).createSearcher().getIndexReader();
+ memoryIndices[i] = indexDoc(d, memoryIndex, context, parsedDocument).createSearcher().getIndexReader();
}
try {
MultiReader mReader = new MultiReader(memoryIndices, true);
@@ -101,8 +102,13 @@ class MultiDocumentPercolatorIndex implements PercolatorIndex {
}
}
- MemoryIndex indexDoc(ParseContext.Document d, Analyzer analyzer, MemoryIndex memoryIndex) {
+ MemoryIndex indexDoc(ParseContext.Document d, MemoryIndex memoryIndex, PercolateContext context, ParsedDocument parsedDocument) {
for (IndexableField field : d.getFields()) {
+ Analyzer analyzer = context.analysisService().defaultIndexAnalyzer();
+ DocumentMapper documentMapper = context.mapperService().documentMapper(parsedDocument.type());
+ if (documentMapper != null && documentMapper.mappers().getMapper(field.name()) != null) {
+ analyzer = documentMapper.mappers().indexAnalyzer();
+ }
if (field.fieldType().indexOptions() == IndexOptions.NONE && field.name().equals(UidFieldMapper.NAME)) {
continue;
}
diff --git a/core/src/main/java/org/elasticsearch/percolator/PercolateContext.java b/core/src/main/java/org/elasticsearch/percolator/PercolateContext.java
index 7d4e18c3d3..2ce3236a52 100644
--- a/core/src/main/java/org/elasticsearch/percolator/PercolateContext.java
+++ b/core/src/main/java/org/elasticsearch/percolator/PercolateContext.java
@@ -26,14 +26,11 @@ import org.apache.lucene.search.Collector;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Sort;
-import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.util.Counter;
import org.elasticsearch.action.percolate.PercolateShardRequest;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.cache.recycler.PageCacheRecycler;
-import org.elasticsearch.common.HasContext;
-import org.elasticsearch.common.HasContextAndHeaders;
-import org.elasticsearch.common.HasHeaders;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.lease.Releasables;
@@ -48,7 +45,6 @@ import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.object.ObjectMapper;
-import org.elasticsearch.index.percolator.PercolatorQueriesRegistry;
import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.similarity.SimilarityService;
@@ -82,7 +78,6 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
-import java.util.concurrent.ConcurrentMap;
/**
*/
@@ -125,7 +120,7 @@ public class PercolateContext extends SearchContext {
public PercolateContext(PercolateShardRequest request, SearchShardTarget searchShardTarget, IndexShard indexShard,
IndexService indexService, PageCacheRecycler pageCacheRecycler,
BigArrays bigArrays, ScriptService scriptService, Query aliasFilter, ParseFieldMatcher parseFieldMatcher) {
- super(parseFieldMatcher, request);
+ super(parseFieldMatcher);
this.indexShard = indexShard;
this.indexService = indexService;
this.fieldDataService = indexService.fieldData();
@@ -146,7 +141,7 @@ public class PercolateContext extends SearchContext {
// for testing:
PercolateContext(PercolateShardRequest request, SearchShardTarget searchShardTarget, MapperService mapperService) {
- super(null, request);
+ super(null);
this.searchShardTarget = searchShardTarget;
this.mapperService = mapperService;
this.indexService = null;
@@ -519,6 +514,16 @@ public class PercolateContext extends SearchContext {
}
@Override
+ public SearchContext searchAfter(FieldDoc searchAfter) {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public FieldDoc searchAfter() {
+ return null;
+ }
+
+ @Override
public SearchContext parsedPostFilter(ParsedQuery postFilter) {
throw new UnsupportedOperationException();
}
@@ -670,92 +675,11 @@ public class PercolateContext extends SearchContext {
}
@Override
- public void innerHits(InnerHitsContext innerHitsContext) {
- throw new UnsupportedOperationException();
- }
-
- @Override
public InnerHitsContext innerHits() {
throw new UnsupportedOperationException();
}
@Override
- public <V> V putInContext(Object key, Object value) {
- assert false : "percolatecontext does not support contexts & headers";
- return null;
- }
-
- @Override
- public void putAllInContext(ObjectObjectAssociativeContainer<Object, Object> map) {
- assert false : "percolatocontext does not support contexts & headers";
- }
-
- @Override
- public <V> V getFromContext(Object key) {
- return null;
- }
-
- @Override
- public <V> V getFromContext(Object key, V defaultValue) {
- return defaultValue;
- }
-
- @Override
- public boolean hasInContext(Object key) {
- return false;
- }
-
- @Override
- public int contextSize() {
- return 0;
- }
-
- @Override
- public boolean isContextEmpty() {
- return true;
- }
-
- @Override
- public ImmutableOpenMap<Object, Object> getContext() {
- return ImmutableOpenMap.of();
- }
-
- @Override
- public void copyContextFrom(HasContext other) {
- assert false : "percolatecontext does not support contexts & headers";
- }
-
- @Override
- public <V> void putHeader(String key, V value) {
- assert false : "percolatecontext does not support contexts & headers";
- }
-
- @Override
- public <V> V getHeader(String key) {
- return null;
- }
-
- @Override
- public boolean hasHeader(String key) {
- return false;
- }
-
- @Override
- public Set<String> getHeaders() {
- return Collections.emptySet();
- }
-
- @Override
- public void copyHeadersFrom(HasHeaders from) {
- assert false : "percolatecontext does not support contexts & headers";
- }
-
- @Override
- public void copyContextAndHeadersFrom(HasContextAndHeaders other) {
- assert false : "percolatecontext does not support contexts & headers";
- }
-
- @Override
public Map<Class<?>, Collector> queryCollectors() {
return queryCollectors;
}
diff --git a/core/src/main/java/org/elasticsearch/percolator/PercolateDocumentParser.java b/core/src/main/java/org/elasticsearch/percolator/PercolateDocumentParser.java
index 6733ebd0b3..946d30edcc 100644
--- a/core/src/main/java/org/elasticsearch/percolator/PercolateDocumentParser.java
+++ b/core/src/main/java/org/elasticsearch/percolator/PercolateDocumentParser.java
@@ -24,6 +24,7 @@ import org.apache.lucene.search.Query;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.action.percolate.PercolateShardRequest;
import org.elasticsearch.cluster.action.index.MappingUpdatedAction;
+import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
@@ -34,6 +35,7 @@ import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.mapper.DocumentMapperForType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ParsedDocument;
+import org.elasticsearch.index.mapper.SourceToParse;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.aggregations.AggregationPhase;
@@ -49,21 +51,20 @@ public class PercolateDocumentParser {
private final HighlightPhase highlightPhase;
private final SortParseElement sortParseElement;
private final AggregationPhase aggregationPhase;
- private final MappingUpdatedAction mappingUpdatedAction;
@Inject
- public PercolateDocumentParser(HighlightPhase highlightPhase, SortParseElement sortParseElement, AggregationPhase aggregationPhase, MappingUpdatedAction mappingUpdatedAction) {
+ public PercolateDocumentParser(HighlightPhase highlightPhase, SortParseElement sortParseElement,
+ AggregationPhase aggregationPhase) {
this.highlightPhase = highlightPhase;
this.sortParseElement = sortParseElement;
this.aggregationPhase = aggregationPhase;
- this.mappingUpdatedAction = mappingUpdatedAction;
}
public ParsedDocument parse(PercolateShardRequest request, PercolateContext context, MapperService mapperService, QueryShardContext queryShardContext) {
BytesReference source = request.source();
if (source == null || source.length() == 0) {
if (request.docSource() != null && request.docSource().length() != 0) {
- return parseFetchedDoc(context, request.docSource(), mapperService, request.shardId().getIndex(), request.documentType());
+ return parseFetchedDoc(context, request.docSource(), mapperService, request.shardId().getIndexName(), request.documentType());
} else {
return null;
}
@@ -94,13 +95,10 @@ public class PercolateDocumentParser {
DocumentMapperForType docMapper = mapperService.documentMapperWithAutoCreate(request.documentType());
String index = context.shardTarget().index();
- doc = docMapper.getDocumentMapper().parse(source(parser).index(index).type(request.documentType()).flyweight(true));
+ doc = docMapper.getDocumentMapper().parse(source(parser).index(index).type(request.documentType()).id("_id_for_percolate_api"));
if (docMapper.getMapping() != null) {
doc.addDynamicMappingsUpdate(docMapper.getMapping());
}
- if (doc.dynamicMappingsUpdate() != null) {
- mappingUpdatedAction.updateMappingOnMasterSynchronously(request.shardId().getIndex(), request.documentType(), doc.dynamicMappingsUpdate());
- }
// the document parsing exists the "doc" object, so we need to set the new current field.
currentFieldName = parser.currentName();
}
@@ -186,7 +184,7 @@ public class PercolateDocumentParser {
throw new IllegalArgumentException("Can't specify the document to percolate in the source of the request and as document id");
}
- doc = parseFetchedDoc(context, request.docSource(), mapperService, request.shardId().getIndex(), request.documentType());
+ doc = parseFetchedDoc(context, request.docSource(), mapperService, request.shardId().getIndexName(), request.documentType());
}
if (doc == null) {
@@ -206,19 +204,15 @@ public class PercolateDocumentParser {
}
private ParsedDocument parseFetchedDoc(PercolateContext context, BytesReference fetchedDoc, MapperService mapperService, String index, String type) {
- try (XContentParser parser = XContentFactory.xContent(fetchedDoc).createParser(fetchedDoc)) {
- DocumentMapperForType docMapper = mapperService.documentMapperWithAutoCreate(type);
- ParsedDocument doc = docMapper.getDocumentMapper().parse(source(parser).index(index).type(type).flyweight(true));
- if (doc == null) {
- throw new ElasticsearchParseException("No doc to percolate in the request");
- }
- if (context.highlight() != null) {
- doc.setSource(fetchedDoc);
- }
- return doc;
- } catch (Throwable e) {
- throw new ElasticsearchParseException("failed to parse request", e);
+ DocumentMapperForType docMapper = mapperService.documentMapperWithAutoCreate(type);
+ ParsedDocument doc = docMapper.getDocumentMapper().parse(source(fetchedDoc).index(index).type(type).id("_id_for_percolate_api"));
+ if (doc == null) {
+ throw new ElasticsearchParseException("No doc to percolate in the request");
}
+ if (context.highlight() != null) {
+ doc.setSource(fetchedDoc);
+ }
+ return doc;
}
}
diff --git a/core/src/main/java/org/elasticsearch/percolator/PercolatorService.java b/core/src/main/java/org/elasticsearch/percolator/PercolatorService.java
index e6ffa313e8..95cdff02ad 100644
--- a/core/src/main/java/org/elasticsearch/percolator/PercolatorService.java
+++ b/core/src/main/java/org/elasticsearch/percolator/PercolatorService.java
@@ -39,11 +39,11 @@ import org.elasticsearch.cache.recycler.PageCacheRecycler;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
-import org.elasticsearch.common.HasContextAndHeaders;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
+import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.text.Text;
@@ -52,6 +52,7 @@ import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.fieldvisitor.SingleFieldsVisitor;
+import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.percolator.PercolatorFieldMapper;
@@ -85,7 +86,7 @@ import java.util.stream.StreamSupport;
import static org.apache.lucene.search.BooleanClause.Occur.FILTER;
import static org.apache.lucene.search.BooleanClause.Occur.MUST;
-public class PercolatorService extends AbstractComponent {
+public class PercolatorService extends AbstractComponent implements Releasable {
public final static float NO_SCORE = Float.NEGATIVE_INFINITY;
public final static String TYPE_NAME = ".percolator";
@@ -135,14 +136,14 @@ public class PercolatorService extends AbstractComponent {
multi = new MultiDocumentPercolatorIndex(cache);
}
- public ReduceResult reduce(boolean onlyCount, List<PercolateShardResponse> shardResponses, HasContextAndHeaders headersContext) throws IOException {
+ public ReduceResult reduce(boolean onlyCount, List<PercolateShardResponse> shardResponses) throws IOException {
if (onlyCount) {
long finalCount = 0;
for (PercolateShardResponse shardResponse : shardResponses) {
finalCount += shardResponse.topDocs().totalHits;
}
- InternalAggregations reducedAggregations = reduceAggregations(shardResponses, headersContext);
+ InternalAggregations reducedAggregations = reduceAggregations(shardResponses);
return new PercolatorService.ReduceResult(finalCount, reducedAggregations);
} else {
int requestedSize = shardResponses.get(0).requestedSize();
@@ -162,7 +163,7 @@ public class PercolatorService extends AbstractComponent {
Map<String, HighlightField> hl = shardResponse.hls().get(doc.doc);
matches[i] = new PercolateResponse.Match(new Text(shardResponse.getIndex()), new Text(id), doc.score, hl);
}
- InternalAggregations reducedAggregations = reduceAggregations(shardResponses, headersContext);
+ InternalAggregations reducedAggregations = reduceAggregations(shardResponses);
return new PercolatorService.ReduceResult(foundMatches, matches, reducedAggregations);
}
}
@@ -179,7 +180,7 @@ public class PercolatorService extends AbstractComponent {
// just like is done in other apis.
String[] filteringAliases = indexNameExpressionResolver.filteringAliases(
clusterService.state(),
- indexShard.shardId().index().name(),
+ indexShard.shardId().getIndex().getName(),
request.indices()
);
Query aliasFilter = percolateIndexService.aliasFilter(indexShard.getQueryShardContext(), filteringAliases);
@@ -201,7 +202,8 @@ public class PercolatorService extends AbstractComponent {
// parse the source either into one MemoryIndex, if it is a single document or index multiple docs if nested
PercolatorIndex percolatorIndex;
- boolean isNested = indexShard.mapperService().documentMapper(request.documentType()).hasNestedObjects();
+ DocumentMapper documentMapper = indexShard.mapperService().documentMapper(request.documentType());
+ boolean isNested = documentMapper != null && documentMapper.hasNestedObjects();
if (parsedDocument.docs().size() > 1) {
assert isNested;
percolatorIndex = multi;
@@ -303,11 +305,12 @@ public class PercolatorService extends AbstractComponent {
}
}
+ @Override
public void close() {
cache.close();
}
- private InternalAggregations reduceAggregations(List<PercolateShardResponse> shardResults, HasContextAndHeaders headersContext) {
+ private InternalAggregations reduceAggregations(List<PercolateShardResponse> shardResults) {
if (shardResults.get(0).aggregations() == null) {
return null;
}
@@ -316,7 +319,7 @@ public class PercolatorService extends AbstractComponent {
for (PercolateShardResponse shardResult : shardResults) {
aggregationsList.add(shardResult.aggregations());
}
- InternalAggregations aggregations = InternalAggregations.reduce(aggregationsList, new InternalAggregation.ReduceContext(bigArrays, scriptService, headersContext));
+ InternalAggregations aggregations = InternalAggregations.reduce(aggregationsList, new InternalAggregation.ReduceContext(bigArrays, scriptService));
if (aggregations != null) {
List<SiblingPipelineAggregator> pipelineAggregators = shardResults.get(0).pipelineAggregators();
if (pipelineAggregators != null) {
@@ -324,7 +327,7 @@ public class PercolatorService extends AbstractComponent {
return (InternalAggregation) p;
}).collect(Collectors.toList());
for (SiblingPipelineAggregator pipelineAggregator : pipelineAggregators) {
- InternalAggregation newAgg = pipelineAggregator.doReduce(new InternalAggregations(newAggs), new InternalAggregation.ReduceContext(bigArrays, scriptService, headersContext));
+ InternalAggregation newAgg = pipelineAggregator.doReduce(new InternalAggregations(newAggs), new InternalAggregation.ReduceContext(bigArrays, scriptService));
newAggs.add(newAgg);
}
aggregations = new InternalAggregations(newAggs);
diff --git a/core/src/main/java/org/elasticsearch/percolator/SingleDocumentPercolatorIndex.java b/core/src/main/java/org/elasticsearch/percolator/SingleDocumentPercolatorIndex.java
index 1271872cab..1d5268e379 100644
--- a/core/src/main/java/org/elasticsearch/percolator/SingleDocumentPercolatorIndex.java
+++ b/core/src/main/java/org/elasticsearch/percolator/SingleDocumentPercolatorIndex.java
@@ -28,6 +28,7 @@ import org.apache.lucene.index.memory.MemoryIndex;
import org.apache.lucene.util.CloseableThreadLocal;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.index.engine.Engine;
+import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
@@ -49,11 +50,15 @@ class SingleDocumentPercolatorIndex implements PercolatorIndex {
public void prepare(PercolateContext context, ParsedDocument parsedDocument) {
MemoryIndex memoryIndex = cache.get();
for (IndexableField field : parsedDocument.rootDoc().getFields()) {
+ Analyzer analyzer = context.analysisService().defaultIndexAnalyzer();
+ DocumentMapper documentMapper = context.mapperService().documentMapper(parsedDocument.type());
+ if (documentMapper != null && documentMapper.mappers().getMapper(field.name()) != null) {
+ analyzer = documentMapper.mappers().indexAnalyzer();
+ }
if (field.fieldType().indexOptions() == IndexOptions.NONE && field.name().equals(UidFieldMapper.NAME)) {
continue;
}
try {
- Analyzer analyzer = context.mapperService().documentMapper(parsedDocument.type()).mappers().indexAnalyzer();
// TODO: instead of passing null here, we can have a CTL<Map<String,TokenStream>> and pass previous,
// like the indexer does
try (TokenStream tokenStream = field.tokenStream(analyzer, null)) {
diff --git a/core/src/main/java/org/elasticsearch/plugins/DummyPluginInfo.java b/core/src/main/java/org/elasticsearch/plugins/DummyPluginInfo.java
index a57a96c631..a7d088ce21 100644
--- a/core/src/main/java/org/elasticsearch/plugins/DummyPluginInfo.java
+++ b/core/src/main/java/org/elasticsearch/plugins/DummyPluginInfo.java
@@ -20,9 +20,9 @@ package org.elasticsearch.plugins;
public class DummyPluginInfo extends PluginInfo {
- private DummyPluginInfo(String name, String description, boolean site, String version, boolean jvm, String classname, boolean isolated) {
- super(name, description, site, version, jvm, classname, isolated);
+ private DummyPluginInfo(String name, String description, String version, String classname, boolean isolated) {
+ super(name, description, version, classname, isolated);
}
- public static final DummyPluginInfo INSTANCE = new DummyPluginInfo("dummy_plugin_name", "dummy plugin description", true, "dummy_plugin_version", true, "DummyPluginName", true);
+ public static final DummyPluginInfo INSTANCE = new DummyPluginInfo("dummy_plugin_name", "dummy plugin description", "dummy_plugin_version", "DummyPluginName", true);
}
diff --git a/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java b/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java
new file mode 100644
index 0000000000..656378b0f8
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java
@@ -0,0 +1,401 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.plugins;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.OutputStream;
+import java.net.URL;
+import java.net.URLDecoder;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.DirectoryStream;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.StandardCopyOption;
+import java.nio.file.attribute.PosixFileAttributeView;
+import java.nio.file.attribute.PosixFilePermission;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Locale;
+import java.util.Set;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipInputStream;
+
+import org.apache.lucene.util.IOUtils;
+import org.elasticsearch.Build;
+import org.elasticsearch.Version;
+import org.elasticsearch.bootstrap.JarHell;
+import org.elasticsearch.common.cli.CliTool;
+import org.elasticsearch.common.cli.Terminal;
+import org.elasticsearch.common.hash.MessageDigests;
+import org.elasticsearch.common.io.FileSystemUtils;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.env.Environment;
+
+import static java.util.Collections.unmodifiableSet;
+import static org.elasticsearch.common.cli.Terminal.Verbosity.VERBOSE;
+import static org.elasticsearch.common.util.set.Sets.newHashSet;
+
+/**
+ * A command for the plugin cli to install a plugin into elasticsearch.
+ *
+ * The install command takes a plugin id, which may be any of the following:
+ * <ul>
+ * <li>An official elasticsearch plugin name</li>
+ * <li>Maven coordinates to a plugin zip</li>
+ * <li>A URL to a plugin zip</li>
+ * </ul>
+ *
+ * Plugins are packaged as zip files. Each packaged plugin must contain a
+ * plugin properties file. See {@link PluginInfo}.
+ * <p>
+ * The installation process first extracts the plugin files into a temporary
+ * directory in order to verify the plugin satisfies the following requirements:
+ * <ul>
+ * <li>Jar hell does not exist, either between the plugin's own jars, or with elasticsearch</li>
+ * <li>The plugin is not a module already provided with elasticsearch</li>
+ * <li>If the plugin contains extra security permissions, the policy file is validated</li>
+ * </ul>
+ * <p>
+ * A plugin may also contain an optional {@code bin} directory which contains scripts. The
+ * scripts will be installed into a subdirectory of the elasticsearch bin directory, using
+ * the name of the plugin, and the scripts will be marked executable.
+ * <p>
+ * A plugin may also contain an optional {@code config} directory which contains configuration
+ * files specific to the plugin. The config files be installed into a subdirectory of the
+ * elasticsearch config directory, using the name of the plugin. If any files to be installed
+ * already exist, they will be skipped.
+ */
+class InstallPluginCommand extends CliTool.Command {
+
+ private static final String PROPERTY_SUPPORT_STAGING_URLS = "es.plugins.staging";
+
+ // TODO: make this a resource file generated by gradle
+ static final Set<String> MODULES = unmodifiableSet(newHashSet(
+ "lang-expression",
+ "lang-groovy"));
+
+ // TODO: make this a resource file generated by gradle
+ static final Set<String> OFFICIAL_PLUGINS = unmodifiableSet(newHashSet(
+ "analysis-icu",
+ "analysis-kuromoji",
+ "analysis-phonetic",
+ "analysis-smartcn",
+ "analysis-stempel",
+ "delete-by-query",
+ "discovery-azure",
+ "discovery-ec2",
+ "discovery-gce",
+ "lang-javascript",
+ "lang-painless",
+ "lang-python",
+ "mapper-attachments",
+ "mapper-murmur3",
+ "mapper-size",
+ "repository-azure",
+ "repository-hdfs",
+ "repository-s3",
+ "store-smb"));
+
+ private final String pluginId;
+ private final boolean batch;
+
+ InstallPluginCommand(Terminal terminal, String pluginId, boolean batch) {
+ super(terminal);
+ this.pluginId = pluginId;
+ this.batch = batch;
+ }
+
+ @Override
+ public CliTool.ExitStatus execute(Settings settings, Environment env) throws Exception {
+
+ // TODO: remove this leniency!! is it needed anymore?
+ if (Files.exists(env.pluginsFile()) == false) {
+ terminal.println("Plugins directory [%s] does not exist. Creating...", env.pluginsFile());
+ Files.createDirectory(env.pluginsFile());
+ }
+
+ if (Environment.isWritable(env.pluginsFile()) == false) {
+ throw new IOException("Plugins directory is read only: " + env.pluginsFile());
+ }
+
+ Path pluginZip = download(pluginId, env.tmpFile());
+ Path extractedZip = unzip(pluginZip, env.pluginsFile());
+ install(extractedZip, env);
+
+ return CliTool.ExitStatus.OK;
+ }
+
+ /** Downloads the plugin and returns the file it was downloaded to. */
+ private Path download(String pluginId, Path tmpDir) throws IOException {
+ if (OFFICIAL_PLUGINS.contains(pluginId)) {
+ final String version = Version.CURRENT.toString();
+ final String url;
+ if (System.getProperty(PROPERTY_SUPPORT_STAGING_URLS, "false").equals("true")) {
+ url = String.format(Locale.ROOT, "https://download.elastic.co/elasticsearch/staging/%1$s-%2$s/org/elasticsearch/plugin/%3$s/%1$s/%3$s-%1$s.zip",
+ version, Build.CURRENT.shortHash(), pluginId);
+ } else {
+ url = String.format(Locale.ROOT, "https://download.elastic.co/elasticsearch/release/org/elasticsearch/plugin/%1$s/%2$s/%1$s-%2$s.zip",
+ pluginId, version);
+ }
+ terminal.println("-> Downloading " + pluginId + " from elastic");
+ return downloadZipAndChecksum(url, tmpDir);
+ }
+
+ // now try as maven coordinates, a valid URL would only have a single colon
+ String[] coordinates = pluginId.split(":");
+ if (coordinates.length == 3) {
+ String mavenUrl = String.format(Locale.ROOT, "https://repo1.maven.org/maven2/%1$s/%2$s/%3$s/%2$s-%3$s.zip",
+ coordinates[0].replace(".", "/") /* groupId */, coordinates[1] /* artifactId */, coordinates[2] /* version */);
+ terminal.println("-> Downloading " + pluginId + " from maven central");
+ return downloadZipAndChecksum(mavenUrl, tmpDir);
+ }
+
+ // fall back to plain old URL
+ terminal.println("-> Downloading " + URLDecoder.decode(pluginId, "UTF-8"));
+ return downloadZip(pluginId, tmpDir);
+ }
+
+ /** Downloads a zip from the url, into a temp file under the given temp dir. */
+ private Path downloadZip(String urlString, Path tmpDir) throws IOException {
+ URL url = new URL(urlString);
+ Path zip = Files.createTempFile(tmpDir, null, ".zip");
+ try (InputStream in = url.openStream()) {
+ // must overwrite since creating the temp file above actually created the file
+ Files.copy(in, zip, StandardCopyOption.REPLACE_EXISTING);
+ }
+ return zip;
+ }
+
+ /** Downloads a zip from the url, as well as a SHA1 checksum, and checks the checksum. */
+ private Path downloadZipAndChecksum(String urlString, Path tmpDir) throws IOException {
+ Path zip = downloadZip(urlString, tmpDir);
+
+ URL checksumUrl = new URL(urlString + ".sha1");
+ final String expectedChecksum;
+ try (InputStream in = checksumUrl.openStream()) {
+ BufferedReader checksumReader = new BufferedReader(new InputStreamReader(in, StandardCharsets.UTF_8));
+ expectedChecksum = checksumReader.readLine();
+ if (checksumReader.readLine() != null) {
+ throw new IllegalArgumentException("Invalid checksum file at " + urlString.toString());
+ }
+ }
+
+ byte[] zipbytes = Files.readAllBytes(zip);
+ String gotChecksum = MessageDigests.toHexString(MessageDigests.sha1().digest(zipbytes));
+ if (expectedChecksum.equals(gotChecksum) == false) {
+ throw new IllegalStateException("SHA1 mismatch, expected " + expectedChecksum + " but got " + gotChecksum);
+ }
+
+ return zip;
+ }
+
+ private Path unzip(Path zip, Path pluginsDir) throws IOException {
+ // unzip plugin to a staging temp dir
+ Path target = Files.createTempDirectory(pluginsDir, ".installing-");
+ Files.createDirectories(target);
+
+ // TODO: we should wrap this in a try/catch and try deleting the target dir on failure?
+ try (ZipInputStream zipInput = new ZipInputStream(Files.newInputStream(zip))) {
+ ZipEntry entry;
+ byte[] buffer = new byte[8192];
+ while ((entry = zipInput.getNextEntry()) != null) {
+ Path targetFile = target.resolve(entry.getName());
+ // TODO: handle name being an absolute path
+
+ // be on the safe side: do not rely on that directories are always extracted
+ // before their children (although this makes sense, but is it guaranteed?)
+ Files.createDirectories(targetFile.getParent());
+ if (entry.isDirectory() == false) {
+ try (OutputStream out = Files.newOutputStream(targetFile)) {
+ int len;
+ while((len = zipInput.read(buffer)) >= 0) {
+ out.write(buffer, 0, len);
+ }
+ }
+ }
+ zipInput.closeEntry();
+ }
+ }
+ return target;
+ }
+
+ /** Load information about the plugin, and verify it can be installed with no errors. */
+ private PluginInfo verify(Path pluginRoot, Environment env) throws Exception {
+ // read and validate the plugin descriptor
+ PluginInfo info = PluginInfo.readFromProperties(pluginRoot);
+ terminal.println(VERBOSE, "%s", info);
+
+ // don't let luser install plugin as a module...
+ // they might be unavoidably in maven central and are packaged up the same way)
+ if (MODULES.contains(info.getName())) {
+ throw new IOException("plugin '" + info.getName() + "' cannot be installed like this, it is a system module");
+ }
+
+ // check for jar hell before any copying
+ jarHellCheck(pluginRoot, env.pluginsFile(), info.isIsolated());
+
+ // read optional security policy (extra permissions)
+ // if it exists, confirm or warn the user
+ Path policy = pluginRoot.resolve(PluginInfo.ES_PLUGIN_POLICY);
+ if (Files.exists(policy)) {
+ PluginSecurity.readPolicy(policy, terminal, env, batch);
+ }
+
+ return info;
+ }
+
+ /** check a candidate plugin for jar hell before installing it */
+ private void jarHellCheck(Path candidate, Path pluginsDir, boolean isolated) throws Exception {
+ // create list of current jars in classpath
+ final List<URL> jars = new ArrayList<>();
+ jars.addAll(Arrays.asList(JarHell.parseClassPath()));
+
+ // read existing bundles. this does some checks on the installation too.
+ List<PluginsService.Bundle> bundles = PluginsService.getPluginBundles(pluginsDir);
+
+ // if we aren't isolated, we need to jarhellcheck against any other non-isolated plugins
+ // thats always the first bundle
+ if (isolated == false) {
+ jars.addAll(bundles.get(0).urls);
+ }
+
+ // add plugin jars to the list
+ Path pluginJars[] = FileSystemUtils.files(candidate, "*.jar");
+ for (Path jar : pluginJars) {
+ jars.add(jar.toUri().toURL());
+ }
+ // TODO: no jars should be an error
+ // TODO: verify the classname exists in one of the jars!
+
+ // check combined (current classpath + new jars to-be-added)
+ JarHell.checkJarHell(jars.toArray(new URL[jars.size()]));
+ }
+
+ /**
+ * Installs the plugin from {@code tmpRoot} into the plugins dir.
+ * If the plugin has a bin dir and/or a config dir, those are copied.
+ */
+ private void install(Path tmpRoot, Environment env) throws Exception {
+ List<Path> deleteOnFailure = new ArrayList<>();
+ deleteOnFailure.add(tmpRoot);
+
+ try {
+ PluginInfo info = verify(tmpRoot, env);
+
+ final Path destination = env.pluginsFile().resolve(info.getName());
+ if (Files.exists(destination)) {
+ throw new IOException("plugin directory " + destination.toAbsolutePath() + " already exists. To update the plugin, uninstall it first using 'remove " + info.getName() + "' command");
+ }
+
+ Path tmpBinDir = tmpRoot.resolve("bin");
+ if (Files.exists(tmpBinDir)) {
+ Path destBinDir = env.binFile().resolve(info.getName());
+ deleteOnFailure.add(destBinDir);
+ installBin(info, tmpBinDir, destBinDir);
+ }
+
+ Path tmpConfigDir = tmpRoot.resolve("config");
+ if (Files.exists(tmpConfigDir)) {
+ // some files may already exist, and we don't remove plugin config files on plugin removal,
+ // so any installed config files are left on failure too
+ installConfig(info, tmpConfigDir, env.configFile().resolve(info.getName()));
+ }
+
+ Files.move(tmpRoot, destination, StandardCopyOption.ATOMIC_MOVE);
+ terminal.println("-> Installed " + info.getName());
+
+ } catch (Exception installProblem) {
+ try {
+ IOUtils.rm(deleteOnFailure.toArray(new Path[0]));
+ } catch (IOException exceptionWhileRemovingFiles) {
+ installProblem.addSuppressed(exceptionWhileRemovingFiles);
+ }
+ throw installProblem;
+ }
+ }
+
+ /** Copies the files from {@code tmpBinDir} into {@code destBinDir}, along with permissions from dest dirs parent. */
+ private void installBin(PluginInfo info, Path tmpBinDir, Path destBinDir) throws IOException {
+ if (Files.isDirectory(tmpBinDir) == false) {
+ throw new IOException("bin in plugin " + info.getName() + " is not a directory");
+ }
+ Files.createDirectory(destBinDir);
+
+ // setup file attributes for the installed files to those of the parent dir
+ Set<PosixFilePermission> perms = new HashSet<>();
+ PosixFileAttributeView binAttrs = Files.getFileAttributeView(destBinDir.getParent(), PosixFileAttributeView.class);
+ if (binAttrs != null) {
+ perms = new HashSet<>(binAttrs.readAttributes().permissions());
+ // setting execute bits, since this just means "the file is executable", and actual execution requires read
+ perms.add(PosixFilePermission.OWNER_EXECUTE);
+ perms.add(PosixFilePermission.GROUP_EXECUTE);
+ perms.add(PosixFilePermission.OTHERS_EXECUTE);
+ }
+
+ try (DirectoryStream<Path> stream = Files.newDirectoryStream(tmpBinDir)) {
+ for (Path srcFile : stream) {
+ if (Files.isDirectory(srcFile)) {
+ throw new IOException("Directories not allowed in bin dir for plugin " + info.getName());
+ }
+
+ Path destFile = destBinDir.resolve(tmpBinDir.relativize(srcFile));
+ Files.copy(srcFile, destFile);
+
+ if (perms.isEmpty() == false) {
+ PosixFileAttributeView view = Files.getFileAttributeView(destFile, PosixFileAttributeView.class);
+ view.setPermissions(perms);
+ }
+ }
+ }
+ IOUtils.rm(tmpBinDir); // clean up what we just copied
+ }
+
+ /**
+ * Copies the files from {@code tmpConfigDir} into {@code destConfigDir}.
+ * Any files existing in both the source and destination will be skipped.
+ */
+ private void installConfig(PluginInfo info, Path tmpConfigDir, Path destConfigDir) throws IOException {
+ if (Files.isDirectory(tmpConfigDir) == false) {
+ throw new IOException("config in plugin " + info.getName() + " is not a directory");
+ }
+
+ // create the plugin's config dir "if necessary"
+ Files.createDirectories(destConfigDir);
+
+ try (DirectoryStream<Path> stream = Files.newDirectoryStream(tmpConfigDir)) {
+ for (Path srcFile : stream) {
+ if (Files.isDirectory(srcFile)) {
+ throw new IOException("Directories not allowed in config dir for plugin " + info.getName());
+ }
+
+ Path destFile = destConfigDir.resolve(tmpConfigDir.relativize(srcFile));
+ if (Files.exists(destFile) == false) {
+ Files.copy(srcFile, destFile);
+ }
+ }
+ }
+ IOUtils.rm(tmpConfigDir); // clean up what we just copied
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/plugins/ListPluginsCommand.java b/core/src/main/java/org/elasticsearch/plugins/ListPluginsCommand.java
new file mode 100644
index 0000000000..6abed4e6bc
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/plugins/ListPluginsCommand.java
@@ -0,0 +1,56 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.plugins;
+
+import java.io.IOException;
+import java.nio.file.DirectoryStream;
+import java.nio.file.Files;
+import java.nio.file.Path;
+
+import org.elasticsearch.common.cli.CliTool;
+import org.elasticsearch.common.cli.Terminal;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.env.Environment;
+
+/**
+ * A command for the plugin cli to list plugins installed in elasticsearch.
+ */
+class ListPluginsCommand extends CliTool.Command {
+
+ ListPluginsCommand(Terminal terminal) {
+ super(terminal);
+ }
+
+ @Override
+ public CliTool.ExitStatus execute(Settings settings, Environment env) throws Exception {
+ if (Files.exists(env.pluginsFile()) == false) {
+ throw new IOException("Plugins directory missing: " + env.pluginsFile());
+ }
+
+ terminal.println(Terminal.Verbosity.VERBOSE, "Plugins directory: " + env.pluginsFile());
+ try (DirectoryStream<Path> stream = Files.newDirectoryStream(env.pluginsFile())) {
+ for (Path plugin : stream) {
+ terminal.println(plugin.getFileName().toString());
+ }
+ }
+
+ return CliTool.ExitStatus.OK;
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginCli.java b/core/src/main/java/org/elasticsearch/plugins/PluginCli.java
new file mode 100644
index 0000000000..c69c07b3d6
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/plugins/PluginCli.java
@@ -0,0 +1,124 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.plugins;
+
+import org.apache.commons.cli.CommandLine;
+import org.elasticsearch.common.SuppressForbidden;
+import org.elasticsearch.common.cli.CliTool;
+import org.elasticsearch.common.cli.CliToolConfig;
+import org.elasticsearch.common.cli.Terminal;
+import org.elasticsearch.common.logging.log4j.LogConfigurator;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.env.Environment;
+import org.elasticsearch.node.internal.InternalSettingsPreparer;
+
+import java.util.Locale;
+
+import static org.elasticsearch.common.cli.CliToolConfig.Builder.cmd;
+import static org.elasticsearch.common.cli.CliToolConfig.Builder.option;
+
+/**
+ * A cli tool for adding, removing and listing plugins for elasticsearch.
+ */
+public class PluginCli extends CliTool {
+
+ // commands
+ private static final String LIST_CMD_NAME = "list";
+ private static final String INSTALL_CMD_NAME = "install";
+ private static final String REMOVE_CMD_NAME = "remove";
+
+ // usage config
+ private static final CliToolConfig.Cmd LIST_CMD = cmd(LIST_CMD_NAME, ListPluginsCommand.class).build();
+ private static final CliToolConfig.Cmd INSTALL_CMD = cmd(INSTALL_CMD_NAME, InstallPluginCommand.class)
+ .options(option("b", "batch").required(false))
+ .build();
+ private static final CliToolConfig.Cmd REMOVE_CMD = cmd(REMOVE_CMD_NAME, RemovePluginCommand.class).build();
+
+ static final CliToolConfig CONFIG = CliToolConfig.config("plugin", PluginCli.class)
+ .cmds(LIST_CMD, INSTALL_CMD, REMOVE_CMD)
+ .build();
+
+ public static void main(String[] args) {
+ // initialize default for es.logger.level because we will not read the logging.yml
+ String loggerLevel = System.getProperty("es.logger.level", "INFO");
+ // Set the appender for all potential log files to terminal so that other components that use the logger print out the
+ // same terminal.
+ // The reason for this is that the plugin cli cannot be configured with a file appender because when the plugin command is
+ // executed there is no way of knowing where the logfiles should be placed. For example, if elasticsearch
+ // is run as service then the logs should be at /var/log/elasticsearch but when started from the tar they should be at es.home/logs.
+ // Therefore we print to Terminal.
+ Environment env = InternalSettingsPreparer.prepareEnvironment(Settings.builder()
+ .put("appender.terminal.type", "terminal")
+ .put("rootLogger", "${es.logger.level}, terminal")
+ .put("es.logger.level", loggerLevel)
+ .build(), Terminal.DEFAULT);
+ // configure but do not read the logging conf file
+ LogConfigurator.configure(env.settings(), false);
+ int status = new PluginCli(Terminal.DEFAULT).execute(args).status();
+ exit(status);
+ }
+
+ @SuppressForbidden(reason = "Allowed to exit explicitly from #main()")
+ private static void exit(int status) {
+ System.exit(status);
+ }
+
+ PluginCli(Terminal terminal) {
+ super(CONFIG, terminal);
+ }
+
+ @Override
+ protected Command parse(String cmdName, CommandLine cli) throws Exception {
+ switch (cmdName.toLowerCase(Locale.ROOT)) {
+ case LIST_CMD_NAME:
+ return new ListPluginsCommand(terminal);
+ case INSTALL_CMD_NAME:
+ return parseInstallPluginCommand(cli);
+ case REMOVE_CMD_NAME:
+ return parseRemovePluginCommand(cli);
+ default:
+ assert false : "can't get here as cmd name is validated before this method is called";
+ return exitCmd(ExitStatus.USAGE);
+ }
+ }
+
+ private Command parseInstallPluginCommand(CommandLine cli) {
+ String[] args = cli.getArgs();
+ if (args.length != 1) {
+ return exitCmd(ExitStatus.USAGE, terminal, "Must supply a single plugin id argument");
+ }
+
+ boolean batch = System.console() == null;
+ if (cli.hasOption("b")) {
+ batch = true;
+ }
+
+ return new InstallPluginCommand(terminal, args[0], batch);
+ }
+
+ private Command parseRemovePluginCommand(CommandLine cli) {
+ String[] args = cli.getArgs();
+ if (args.length != 1) {
+ return exitCmd(ExitStatus.USAGE, terminal, "Must supply a single plugin name argument");
+ }
+
+ return new RemovePluginCommand(terminal, args[0]);
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginInfo.java b/core/src/main/java/org/elasticsearch/plugins/PluginInfo.java
index 3062f01697..73464d054d 100644
--- a/core/src/main/java/org/elasticsearch/plugins/PluginInfo.java
+++ b/core/src/main/java/org/elasticsearch/plugins/PluginInfo.java
@@ -42,19 +42,14 @@ public class PluginInfo implements Streamable, ToXContent {
static final XContentBuilderString NAME = new XContentBuilderString("name");
static final XContentBuilderString DESCRIPTION = new XContentBuilderString("description");
static final XContentBuilderString URL = new XContentBuilderString("url");
- static final XContentBuilderString SITE = new XContentBuilderString("site");
static final XContentBuilderString VERSION = new XContentBuilderString("version");
- static final XContentBuilderString JVM = new XContentBuilderString("jvm");
static final XContentBuilderString CLASSNAME = new XContentBuilderString("classname");
static final XContentBuilderString ISOLATED = new XContentBuilderString("isolated");
}
private String name;
private String description;
- private boolean site;
private String version;
-
- private boolean jvm;
private String classname;
private boolean isolated;
@@ -66,15 +61,11 @@ public class PluginInfo implements Streamable, ToXContent {
*
* @param name Its name
* @param description Its description
- * @param site true if it's a site plugin
- * @param jvm true if it's a jvm plugin
* @param version Version number
*/
- PluginInfo(String name, String description, boolean site, String version, boolean jvm, String classname, boolean isolated) {
+ PluginInfo(String name, String description, String version, String classname, boolean isolated) {
this.name = name;
this.description = description;
- this.site = site;
- this.jvm = jvm;
this.version = version;
this.classname = classname;
this.isolated = isolated;
@@ -91,7 +82,6 @@ public class PluginInfo implements Streamable, ToXContent {
if (name == null || name.isEmpty()) {
throw new IllegalArgumentException("Property [name] is missing in [" + descriptor + "]");
}
- PluginManager.checkForForbiddenName(name);
String description = props.getProperty("description");
if (description == null) {
throw new IllegalArgumentException("Property [description] is missing for plugin [" + name + "]");
@@ -101,43 +91,28 @@ public class PluginInfo implements Streamable, ToXContent {
throw new IllegalArgumentException("Property [version] is missing for plugin [" + name + "]");
}
- boolean jvm = Boolean.parseBoolean(props.getProperty("jvm"));
- boolean site = Boolean.parseBoolean(props.getProperty("site"));
- if (jvm == false && site == false) {
- throw new IllegalArgumentException("Plugin [" + name + "] must be at least a jvm or site plugin");
+ String esVersionString = props.getProperty("elasticsearch.version");
+ if (esVersionString == null) {
+ throw new IllegalArgumentException("Property [elasticsearch.version] is missing for plugin [" + name + "]");
}
- boolean isolated = true;
- String classname = "NA";
- if (jvm) {
- String esVersionString = props.getProperty("elasticsearch.version");
- if (esVersionString == null) {
- throw new IllegalArgumentException("Property [elasticsearch.version] is missing for jvm plugin [" + name + "]");
- }
- Version esVersion = Version.fromString(esVersionString);
- if (esVersion.equals(Version.CURRENT) == false) {
- throw new IllegalArgumentException("Plugin [" + name + "] is incompatible with Elasticsearch [" + Version.CURRENT.toString() +
- "]. Was designed for version [" + esVersionString + "]");
- }
- String javaVersionString = props.getProperty("java.version");
- if (javaVersionString == null) {
- throw new IllegalArgumentException("Property [java.version] is missing for jvm plugin [" + name + "]");
- }
- JarHell.checkVersionFormat(javaVersionString);
- JarHell.checkJavaVersion(name, javaVersionString);
- isolated = Boolean.parseBoolean(props.getProperty("isolated", "true"));
- classname = props.getProperty("classname");
- if (classname == null) {
- throw new IllegalArgumentException("Property [classname] is missing for jvm plugin [" + name + "]");
- }
+ Version esVersion = Version.fromString(esVersionString);
+ if (esVersion.equals(Version.CURRENT) == false) {
+ throw new IllegalArgumentException("Plugin [" + name + "] is incompatible with Elasticsearch [" + Version.CURRENT.toString() +
+ "]. Was designed for version [" + esVersionString + "]");
}
-
- if (site) {
- if (!Files.exists(dir.resolve("_site"))) {
- throw new IllegalArgumentException("Plugin [" + name + "] is a site plugin but has no '_site/' directory");
- }
+ String javaVersionString = props.getProperty("java.version");
+ if (javaVersionString == null) {
+ throw new IllegalArgumentException("Property [java.version] is missing for plugin [" + name + "]");
+ }
+ JarHell.checkVersionFormat(javaVersionString);
+ JarHell.checkJavaVersion(name, javaVersionString);
+ boolean isolated = Boolean.parseBoolean(props.getProperty("isolated", "true"));
+ String classname = props.getProperty("classname");
+ if (classname == null) {
+ throw new IllegalArgumentException("Property [classname] is missing for plugin [" + name + "]");
}
- return new PluginInfo(name, description, site, version, jvm, classname, isolated);
+ return new PluginInfo(name, description, version, classname, isolated);
}
/**
@@ -155,47 +130,20 @@ public class PluginInfo implements Streamable, ToXContent {
}
/**
- * @return true if it's a site plugin
- */
- public boolean isSite() {
- return site;
- }
-
- /**
- * @return true if it's a plugin running in the jvm
- */
- public boolean isJvm() {
- return jvm;
- }
-
- /**
- * @return true if jvm plugin has isolated classloader
+ * @return true if plugin has isolated classloader
*/
public boolean isIsolated() {
return isolated;
}
/**
- * @return jvm plugin's classname
+ * @return plugin's classname
*/
public String getClassname() {
return classname;
}
/**
- * We compute the URL for sites: "/_plugin/" + name + "/"
- *
- * @return relative URL for site plugin
- */
- public String getUrl() {
- if (site) {
- return ("/_plugin/" + name + "/");
- } else {
- return null;
- }
- }
-
- /**
* @return Version number for the plugin
*/
public String getVersion() {
@@ -212,8 +160,6 @@ public class PluginInfo implements Streamable, ToXContent {
public void readFrom(StreamInput in) throws IOException {
this.name = in.readString();
this.description = in.readString();
- this.site = in.readBoolean();
- this.jvm = in.readBoolean();
this.version = in.readString();
this.classname = in.readString();
this.isolated = in.readBoolean();
@@ -223,8 +169,6 @@ public class PluginInfo implements Streamable, ToXContent {
public void writeTo(StreamOutput out) throws IOException {
out.writeString(name);
out.writeString(description);
- out.writeBoolean(site);
- out.writeBoolean(jvm);
out.writeString(version);
out.writeString(classname);
out.writeBoolean(isolated);
@@ -236,15 +180,8 @@ public class PluginInfo implements Streamable, ToXContent {
builder.field(Fields.NAME, name);
builder.field(Fields.VERSION, version);
builder.field(Fields.DESCRIPTION, description);
- if (site) {
- builder.field(Fields.URL, getUrl());
- }
- builder.field(Fields.JVM, jvm);
- if (jvm) {
- builder.field(Fields.CLASSNAME, classname);
- builder.field(Fields.ISOLATED, isolated);
- }
- builder.field(Fields.SITE, site);
+ builder.field(Fields.CLASSNAME, classname);
+ builder.field(Fields.ISOLATED, isolated);
builder.endObject();
return builder;
@@ -274,14 +211,9 @@ public class PluginInfo implements Streamable, ToXContent {
.append("- Plugin information:\n")
.append("Name: ").append(name).append("\n")
.append("Description: ").append(description).append("\n")
- .append("Site: ").append(site).append("\n")
.append("Version: ").append(version).append("\n")
- .append("JVM: ").append(jvm).append("\n");
-
- if (jvm) {
- information.append(" * Classname: ").append(classname).append("\n");
- information.append(" * Isolated: ").append(isolated);
- }
+ .append(" * Classname: ").append(classname).append("\n")
+ .append(" * Isolated: ").append(isolated);
return information.toString();
}
diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginManager.java b/core/src/main/java/org/elasticsearch/plugins/PluginManager.java
deleted file mode 100644
index 3abd37a124..0000000000
--- a/core/src/main/java/org/elasticsearch/plugins/PluginManager.java
+++ /dev/null
@@ -1,688 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.plugins;
-
-import org.apache.lucene.util.IOUtils;
-import org.elasticsearch.Build;
-import org.elasticsearch.ElasticsearchCorruptionException;
-import org.elasticsearch.ElasticsearchTimeoutException;
-import org.elasticsearch.ExceptionsHelper;
-import org.elasticsearch.Version;
-import org.elasticsearch.bootstrap.JarHell;
-import org.elasticsearch.common.Randomness;
-import org.elasticsearch.common.Strings;
-import org.elasticsearch.common.cli.Terminal;
-import org.elasticsearch.common.collect.Tuple;
-import org.elasticsearch.common.http.client.HttpDownloadHelper;
-import org.elasticsearch.common.io.FileSystemUtils;
-import org.elasticsearch.common.unit.TimeValue;
-import org.elasticsearch.env.Environment;
-import org.elasticsearch.plugins.PluginsService.Bundle;
-
-import java.io.IOException;
-import java.io.OutputStream;
-import java.net.MalformedURLException;
-import java.net.URL;
-import java.nio.file.DirectoryStream;
-import java.nio.file.FileVisitResult;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.SimpleFileVisitor;
-import java.nio.file.attribute.BasicFileAttributes;
-import java.nio.file.attribute.GroupPrincipal;
-import java.nio.file.attribute.PosixFileAttributeView;
-import java.nio.file.attribute.PosixFileAttributes;
-import java.nio.file.attribute.PosixFilePermission;
-import java.nio.file.attribute.UserPrincipal;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Locale;
-import java.util.Set;
-import java.util.stream.StreamSupport;
-import java.util.zip.ZipEntry;
-import java.util.zip.ZipInputStream;
-
-import static java.util.Collections.unmodifiableSet;
-import static org.elasticsearch.common.Strings.hasLength;
-import static org.elasticsearch.common.cli.Terminal.Verbosity.VERBOSE;
-import static org.elasticsearch.common.io.FileSystemUtils.moveFilesWithoutOverwriting;
-import static org.elasticsearch.common.util.set.Sets.newHashSet;
-
-/**
- *
- */
-public class PluginManager {
-
- public static final String PROPERTY_SUPPORT_STAGING_URLS = "es.plugins.staging";
-
- public enum OutputMode {
- DEFAULT, SILENT, VERBOSE
- }
-
- private static final Set<String> BLACKLIST = unmodifiableSet(newHashSet(
- "elasticsearch",
- "elasticsearch.bat",
- "elasticsearch.in.sh",
- "plugin",
- "plugin.bat",
- "service.bat"));
-
- static final Set<String> MODULES = unmodifiableSet(newHashSet(
- "lang-expression",
- "lang-groovy"));
-
- static final Set<String> OFFICIAL_PLUGINS = unmodifiableSet(newHashSet(
- "analysis-icu",
- "analysis-kuromoji",
- "analysis-phonetic",
- "analysis-smartcn",
- "analysis-stempel",
- "delete-by-query",
- "discovery-azure",
- "discovery-ec2",
- "discovery-gce",
- "discovery-multicast",
- "lang-javascript",
- "lang-plan-a",
- "lang-python",
- "mapper-attachments",
- "mapper-murmur3",
- "mapper-size",
- "reindex",
- "repository-azure",
- "repository-hdfs",
- "repository-s3",
- "store-smb"));
-
- private final Environment environment;
- private URL url;
- private OutputMode outputMode;
- private TimeValue timeout;
-
- public PluginManager(Environment environment, URL url, OutputMode outputMode, TimeValue timeout) {
- this.environment = environment;
- this.url = url;
- this.outputMode = outputMode;
- this.timeout = timeout;
- }
-
- public void downloadAndExtract(String name, Terminal terminal, boolean batch) throws IOException {
- if (name == null && url == null) {
- throw new IllegalArgumentException("plugin name or url must be supplied with install.");
- }
-
- if (!Files.exists(environment.pluginsFile())) {
- terminal.println("Plugins directory [%s] does not exist. Creating...", environment.pluginsFile());
- Files.createDirectory(environment.pluginsFile());
- }
-
- if (!Environment.isWritable(environment.pluginsFile())) {
- throw new IOException("plugin directory " + environment.pluginsFile() + " is read only");
- }
-
- PluginHandle pluginHandle;
- if (name != null) {
- pluginHandle = PluginHandle.parse(name);
- checkForForbiddenName(pluginHandle.name);
- } else {
- // if we have no name but url, use temporary name that will be overwritten later
- pluginHandle = new PluginHandle("temp_name" + Randomness.get().nextInt(), null, null);
- }
-
- Path pluginFile = download(pluginHandle, terminal);
- extract(pluginHandle, terminal, pluginFile, batch);
- }
-
- private Path download(PluginHandle pluginHandle, Terminal terminal) throws IOException {
- Path pluginFile = pluginHandle.newDistroFile(environment);
-
- HttpDownloadHelper downloadHelper = new HttpDownloadHelper();
- boolean downloaded = false;
- boolean verified = false;
- HttpDownloadHelper.DownloadProgress progress;
- if (outputMode == OutputMode.SILENT) {
- progress = new HttpDownloadHelper.NullProgress();
- } else {
- progress = new HttpDownloadHelper.VerboseProgress(terminal.writer());
- }
-
- // first, try directly from the URL provided
- if (url != null) {
- URL pluginUrl = url;
- boolean isSecureProcotol = "https".equalsIgnoreCase(pluginUrl.getProtocol());
- boolean isAuthInfoSet = !Strings.isNullOrEmpty(pluginUrl.getUserInfo());
- if (isAuthInfoSet && !isSecureProcotol) {
- throw new IOException("Basic auth is only supported for HTTPS!");
- }
-
- terminal.println("Trying %s ...", pluginUrl.toExternalForm());
- try {
- downloadHelper.download(pluginUrl, pluginFile, progress, this.timeout);
- downloaded = true;
- terminal.println("Verifying %s checksums if available ...", pluginUrl.toExternalForm());
- Tuple<URL, Path> sha1Info = pluginHandle.newChecksumUrlAndFile(environment, pluginUrl, "sha1");
- verified = downloadHelper.downloadAndVerifyChecksum(sha1Info.v1(), pluginFile,
- sha1Info.v2(), progress, this.timeout, HttpDownloadHelper.SHA1_CHECKSUM);
- Tuple<URL, Path> md5Info = pluginHandle.newChecksumUrlAndFile(environment, pluginUrl, "md5");
- verified = verified || downloadHelper.downloadAndVerifyChecksum(md5Info.v1(), pluginFile,
- md5Info.v2(), progress, this.timeout, HttpDownloadHelper.MD5_CHECKSUM);
- } catch (ElasticsearchTimeoutException | ElasticsearchCorruptionException e) {
- throw e;
- } catch (Exception e) {
- // ignore
- terminal.println("Failed: %s", ExceptionsHelper.detailedMessage(e));
- }
- } else {
- if (PluginHandle.isOfficialPlugin(pluginHandle.name, pluginHandle.user, pluginHandle.version)) {
- checkForOfficialPlugins(pluginHandle.name);
- }
- }
-
- if (!downloaded && url == null) {
- // We try all possible locations
- for (URL url : pluginHandle.urls()) {
- terminal.println("Trying %s ...", url.toExternalForm());
- try {
- downloadHelper.download(url, pluginFile, progress, this.timeout);
- downloaded = true;
- terminal.println("Verifying %s checksums if available ...", url.toExternalForm());
- Tuple<URL, Path> sha1Info = pluginHandle.newChecksumUrlAndFile(environment, url, "sha1");
- verified = downloadHelper.downloadAndVerifyChecksum(sha1Info.v1(), pluginFile,
- sha1Info.v2(), progress, this.timeout, HttpDownloadHelper.SHA1_CHECKSUM);
- Tuple<URL, Path> md5Info = pluginHandle.newChecksumUrlAndFile(environment, url, "md5");
- verified = verified || downloadHelper.downloadAndVerifyChecksum(md5Info.v1(), pluginFile,
- md5Info.v2(), progress, this.timeout, HttpDownloadHelper.MD5_CHECKSUM);
- break;
- } catch (ElasticsearchTimeoutException | ElasticsearchCorruptionException e) {
- throw e;
- } catch (Exception e) {
- terminal.println(VERBOSE, "Failed: %s", ExceptionsHelper.detailedMessage(e));
- }
- }
- }
-
- if (!downloaded) {
- // try to cleanup what we downloaded
- IOUtils.deleteFilesIgnoringExceptions(pluginFile);
- throw new IOException("failed to download out of all possible locations..., use --verbose to get detailed information");
- }
-
- if (verified == false) {
- terminal.println("NOTE: Unable to verify checksum for downloaded plugin (unable to find .sha1 or .md5 file to verify)");
- }
- return pluginFile;
- }
-
- private void extract(PluginHandle pluginHandle, Terminal terminal, Path pluginFile, boolean batch) throws IOException {
- // unzip plugin to a staging temp dir, named for the plugin
- Path tmp = Files.createTempDirectory(environment.tmpFile(), null);
- Path root = tmp.resolve(pluginHandle.name);
- unzipPlugin(pluginFile, root);
-
- // find the actual root (in case its unzipped with extra directory wrapping)
- root = findPluginRoot(root);
-
- // read and validate the plugin descriptor
- PluginInfo info = PluginInfo.readFromProperties(root);
- terminal.println(VERBOSE, "%s", info);
-
- // don't let luser install plugin as a module...
- // they might be unavoidably in maven central and are packaged up the same way)
- if (MODULES.contains(info.getName())) {
- throw new IOException("plugin '" + info.getName() + "' cannot be installed like this, it is a system module");
- }
-
- // update name in handle based on 'name' property found in descriptor file
- pluginHandle = new PluginHandle(info.getName(), pluginHandle.version, pluginHandle.user);
- final Path extractLocation = pluginHandle.extractedDir(environment);
- if (Files.exists(extractLocation)) {
- throw new IOException("plugin directory " + extractLocation.toAbsolutePath() + " already exists. To update the plugin, uninstall it first using 'remove " + pluginHandle.name + "' command");
- }
-
- // check for jar hell before any copying
- if (info.isJvm()) {
- jarHellCheck(root, info.isIsolated());
- }
-
- // read optional security policy (extra permissions)
- // if it exists, confirm or warn the user
- Path policy = root.resolve(PluginInfo.ES_PLUGIN_POLICY);
- if (Files.exists(policy)) {
- PluginSecurity.readPolicy(policy, terminal, environment, batch);
- }
-
- // install plugin
- FileSystemUtils.copyDirectoryRecursively(root, extractLocation);
- terminal.println("Installed %s into %s", pluginHandle.name, extractLocation.toAbsolutePath());
-
- // cleanup
- tryToDeletePath(terminal, tmp, pluginFile);
-
- // take care of bin/ by moving and applying permissions if needed
- Path sourcePluginBinDirectory = extractLocation.resolve("bin");
- Path destPluginBinDirectory = pluginHandle.binDir(environment);
- boolean needToCopyBinDirectory = Files.exists(sourcePluginBinDirectory);
- if (needToCopyBinDirectory) {
- if (Files.exists(destPluginBinDirectory) && !Files.isDirectory(destPluginBinDirectory)) {
- tryToDeletePath(terminal, extractLocation);
- throw new IOException("plugin bin directory " + destPluginBinDirectory + " is not a directory");
- }
-
- try {
- copyBinDirectory(sourcePluginBinDirectory, destPluginBinDirectory, pluginHandle.name, terminal);
- } catch (IOException e) {
- // rollback and remove potentially before installed leftovers
- terminal.printError("Error copying bin directory [%s] to [%s], cleaning up, reason: %s", sourcePluginBinDirectory, destPluginBinDirectory, ExceptionsHelper.detailedMessage(e));
- tryToDeletePath(terminal, extractLocation, pluginHandle.binDir(environment));
- throw e;
- }
-
- }
-
- Path sourceConfigDirectory = extractLocation.resolve("config");
- Path destConfigDirectory = pluginHandle.configDir(environment);
- boolean needToCopyConfigDirectory = Files.exists(sourceConfigDirectory);
- if (needToCopyConfigDirectory) {
- if (Files.exists(destConfigDirectory) && !Files.isDirectory(destConfigDirectory)) {
- tryToDeletePath(terminal, extractLocation, destPluginBinDirectory);
- throw new IOException("plugin config directory " + destConfigDirectory + " is not a directory");
- }
-
- try {
- terminal.println(VERBOSE, "Found config, moving to %s", destConfigDirectory.toAbsolutePath());
- moveFilesWithoutOverwriting(sourceConfigDirectory, destConfigDirectory, ".new");
-
- if (Environment.getFileStore(destConfigDirectory).supportsFileAttributeView(PosixFileAttributeView.class)) {
- //We copy owner, group and permissions from the parent ES_CONFIG directory, assuming they were properly set depending
- // on how es was installed in the first place: can be root:elasticsearch (750) if es was installed from rpm/deb packages
- // or most likely elasticsearch:elasticsearch if installed from tar/zip. As for permissions we don't rely on umask.
- PosixFileAttributes parentDirAttributes = Files.getFileAttributeView(destConfigDirectory.getParent(), PosixFileAttributeView.class).readAttributes();
- //for files though, we make sure not to copy execute permissions from the parent dir and leave them untouched
- Set<PosixFilePermission> baseFilePermissions = new HashSet<>();
- for (PosixFilePermission posixFilePermission : parentDirAttributes.permissions()) {
- switch (posixFilePermission) {
- case OWNER_EXECUTE:
- case GROUP_EXECUTE:
- case OTHERS_EXECUTE:
- break;
- default:
- baseFilePermissions.add(posixFilePermission);
- }
- }
- Files.walkFileTree(destConfigDirectory, new SimpleFileVisitor<Path>() {
- @Override
- public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
- if (attrs.isRegularFile()) {
- Set<PosixFilePermission> newFilePermissions = new HashSet<>(baseFilePermissions);
- Set<PosixFilePermission> currentFilePermissions = Files.getPosixFilePermissions(file);
- for (PosixFilePermission posixFilePermission : currentFilePermissions) {
- switch (posixFilePermission) {
- case OWNER_EXECUTE:
- case GROUP_EXECUTE:
- case OTHERS_EXECUTE:
- newFilePermissions.add(posixFilePermission);
- }
- }
- setPosixFileAttributes(file, parentDirAttributes.owner(), parentDirAttributes.group(), newFilePermissions);
- }
- return FileVisitResult.CONTINUE;
- }
-
- @Override
- public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException {
- setPosixFileAttributes(dir, parentDirAttributes.owner(), parentDirAttributes.group(), parentDirAttributes.permissions());
- return FileVisitResult.CONTINUE;
- }
- });
- } else {
- terminal.println(VERBOSE, "Skipping posix permissions - filestore doesn't support posix permission");
- }
-
- terminal.println(VERBOSE, "Installed %s into %s", pluginHandle.name, destConfigDirectory.toAbsolutePath());
- } catch (IOException e) {
- terminal.printError("Error copying config directory [%s] to [%s], cleaning up, reason: %s", sourceConfigDirectory, destConfigDirectory, ExceptionsHelper.detailedMessage(e));
- tryToDeletePath(terminal, extractLocation, destPluginBinDirectory, destConfigDirectory);
- throw e;
- }
- }
- }
-
- private static void setPosixFileAttributes(Path path, UserPrincipal owner, GroupPrincipal group, Set<PosixFilePermission> permissions) throws IOException {
- PosixFileAttributeView fileAttributeView = Files.getFileAttributeView(path, PosixFileAttributeView.class);
- fileAttributeView.setOwner(owner);
- fileAttributeView.setGroup(group);
- fileAttributeView.setPermissions(permissions);
- }
-
- static void tryToDeletePath(Terminal terminal, Path ... paths) {
- for (Path path : paths) {
- try {
- IOUtils.rm(path);
- } catch (IOException e) {
- terminal.printError(e);
- }
- }
- }
-
- private void copyBinDirectory(Path sourcePluginBinDirectory, Path destPluginBinDirectory, String pluginName, Terminal terminal) throws IOException {
- boolean canCopyFromSource = Files.exists(sourcePluginBinDirectory) && Files.isReadable(sourcePluginBinDirectory) && Files.isDirectory(sourcePluginBinDirectory);
- if (canCopyFromSource) {
- terminal.println(VERBOSE, "Found bin, moving to %s", destPluginBinDirectory.toAbsolutePath());
- if (Files.exists(destPluginBinDirectory)) {
- IOUtils.rm(destPluginBinDirectory);
- }
- try {
- Files.createDirectories(destPluginBinDirectory.getParent());
- FileSystemUtils.move(sourcePluginBinDirectory, destPluginBinDirectory);
- } catch (IOException e) {
- throw new IOException("Could not move [" + sourcePluginBinDirectory + "] to [" + destPluginBinDirectory + "]", e);
- }
- if (Environment.getFileStore(destPluginBinDirectory).supportsFileAttributeView(PosixFileAttributeView.class)) {
- PosixFileAttributes parentDirAttributes = Files.getFileAttributeView(destPluginBinDirectory.getParent(), PosixFileAttributeView.class).readAttributes();
- //copy permissions from parent bin directory
- Set<PosixFilePermission> filePermissions = new HashSet<>();
- for (PosixFilePermission posixFilePermission : parentDirAttributes.permissions()) {
- switch (posixFilePermission) {
- case OWNER_EXECUTE:
- case GROUP_EXECUTE:
- case OTHERS_EXECUTE:
- break;
- default:
- filePermissions.add(posixFilePermission);
- }
- }
- // add file execute permissions to existing perms, so execution will work.
- filePermissions.add(PosixFilePermission.OWNER_EXECUTE);
- filePermissions.add(PosixFilePermission.GROUP_EXECUTE);
- filePermissions.add(PosixFilePermission.OTHERS_EXECUTE);
- Files.walkFileTree(destPluginBinDirectory, new SimpleFileVisitor<Path>() {
- @Override
- public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
- if (attrs.isRegularFile()) {
- setPosixFileAttributes(file, parentDirAttributes.owner(), parentDirAttributes.group(), filePermissions);
- }
- return FileVisitResult.CONTINUE;
- }
-
- @Override
- public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException {
- setPosixFileAttributes(dir, parentDirAttributes.owner(), parentDirAttributes.group(), parentDirAttributes.permissions());
- return FileVisitResult.CONTINUE;
- }
- });
- } else {
- terminal.println(VERBOSE, "Skipping posix permissions - filestore doesn't support posix permission");
- }
- terminal.println(VERBOSE, "Installed %s into %s", pluginName, destPluginBinDirectory.toAbsolutePath());
- }
- }
-
- /** we check whether we need to remove the top-level folder while extracting
- * sometimes (e.g. github) the downloaded archive contains a top-level folder which needs to be removed
- */
- private Path findPluginRoot(Path dir) throws IOException {
- if (Files.exists(dir.resolve(PluginInfo.ES_PLUGIN_PROPERTIES))) {
- return dir;
- } else {
- final Path[] topLevelFiles = FileSystemUtils.files(dir);
- if (topLevelFiles.length == 1 && Files.isDirectory(topLevelFiles[0])) {
- Path subdir = topLevelFiles[0];
- if (Files.exists(subdir.resolve(PluginInfo.ES_PLUGIN_PROPERTIES))) {
- return subdir;
- }
- }
- }
- throw new RuntimeException("Could not find plugin descriptor '" + PluginInfo.ES_PLUGIN_PROPERTIES + "' in plugin zip");
- }
-
- /** check a candidate plugin for jar hell before installing it */
- private void jarHellCheck(Path candidate, boolean isolated) throws IOException {
- // create list of current jars in classpath
- final List<URL> jars = new ArrayList<>();
- jars.addAll(Arrays.asList(JarHell.parseClassPath()));
-
- // read existing bundles. this does some checks on the installation too.
- List<Bundle> bundles = PluginsService.getPluginBundles(environment.pluginsFile());
-
- // if we aren't isolated, we need to jarhellcheck against any other non-isolated plugins
- // thats always the first bundle
- if (isolated == false) {
- jars.addAll(bundles.get(0).urls);
- }
-
- // add plugin jars to the list
- Path pluginJars[] = FileSystemUtils.files(candidate, "*.jar");
- for (Path jar : pluginJars) {
- jars.add(jar.toUri().toURL());
- }
-
- // check combined (current classpath + new jars to-be-added)
- try {
- JarHell.checkJarHell(jars.toArray(new URL[jars.size()]));
- } catch (Exception ex) {
- throw new RuntimeException(ex);
- }
- }
-
- private void unzipPlugin(Path zip, Path target) throws IOException {
- Files.createDirectories(target);
-
- try (ZipInputStream zipInput = new ZipInputStream(Files.newInputStream(zip))) {
- ZipEntry entry;
- byte[] buffer = new byte[8192];
- while ((entry = zipInput.getNextEntry()) != null) {
- Path targetFile = target.resolve(entry.getName());
-
- // be on the safe side: do not rely on that directories are always extracted
- // before their children (although this makes sense, but is it guaranteed?)
- Files.createDirectories(targetFile.getParent());
- if (entry.isDirectory() == false) {
- try (OutputStream out = Files.newOutputStream(targetFile)) {
- int len;
- while((len = zipInput.read(buffer)) >= 0) {
- out.write(buffer, 0, len);
- }
- }
- }
- zipInput.closeEntry();
- }
- }
- }
-
- public void removePlugin(String name, Terminal terminal) throws IOException {
- if (name == null) {
- throw new IllegalArgumentException("plugin name must be supplied with remove [name].");
- }
- PluginHandle pluginHandle = PluginHandle.parse(name);
- boolean removed = false;
-
- checkForForbiddenName(pluginHandle.name);
- Path pluginToDelete = pluginHandle.extractedDir(environment);
- if (Files.exists(pluginToDelete)) {
- terminal.println(VERBOSE, "Removing: %s", pluginToDelete);
- try {
- IOUtils.rm(pluginToDelete);
- } catch (IOException ex){
- throw new IOException("Unable to remove " + pluginHandle.name + ". Check file permissions on " +
- pluginToDelete.toString(), ex);
- }
- removed = true;
- }
- Path binLocation = pluginHandle.binDir(environment);
- if (Files.exists(binLocation)) {
- terminal.println(VERBOSE, "Removing: %s", binLocation);
- try {
- IOUtils.rm(binLocation);
- } catch (IOException ex){
- throw new IOException("Unable to remove " + pluginHandle.name + ". Check file permissions on " +
- binLocation.toString(), ex);
- }
- removed = true;
- }
-
- if (removed) {
- terminal.println("Removed %s", name);
- } else {
- terminal.println("Plugin %s not found. Run \"plugin list\" to get list of installed plugins.", name);
- }
- }
-
- static void checkForForbiddenName(String name) {
- if (!hasLength(name) || BLACKLIST.contains(name.toLowerCase(Locale.ROOT))) {
- throw new IllegalArgumentException("Illegal plugin name: " + name);
- }
- }
-
- protected static void checkForOfficialPlugins(String name) {
- // We make sure that users can use only new short naming for official plugins only
- if (!OFFICIAL_PLUGINS.contains(name)) {
- throw new IllegalArgumentException(name +
- " is not an official plugin so you should install it using elasticsearch/" +
- name + "/latest naming form.");
- }
- }
-
- public Path[] getListInstalledPlugins() throws IOException {
- if (!Files.exists(environment.pluginsFile())) {
- return new Path[0];
- }
-
- try (DirectoryStream<Path> stream = Files.newDirectoryStream(environment.pluginsFile())) {
- return StreamSupport.stream(stream.spliterator(), false).toArray(length -> new Path[length]);
- }
- }
-
- public void listInstalledPlugins(Terminal terminal) throws IOException {
- Path[] plugins = getListInstalledPlugins();
- terminal.println("Installed plugins in %s:", environment.pluginsFile().toAbsolutePath());
- if (plugins == null || plugins.length == 0) {
- terminal.println(" - No plugin detected");
- } else {
- for (Path plugin : plugins) {
- terminal.println(" - " + plugin.getFileName());
- }
- }
- }
-
- /**
- * Helper class to extract properly user name, repository name, version and plugin name
- * from plugin name given by a user.
- */
- static class PluginHandle {
-
- final String version;
- final String user;
- final String name;
-
- PluginHandle(String name, String version, String user) {
- this.version = version;
- this.user = user;
- this.name = name;
- }
-
- List<URL> urls() {
- List<URL> urls = new ArrayList<>();
- if (version != null) {
- // Elasticsearch new download service uses groupId org.elasticsearch.plugin from 2.0.0
- if (user == null) {
- if (!Strings.isNullOrEmpty(System.getProperty(PROPERTY_SUPPORT_STAGING_URLS))) {
- addUrl(urls, String.format(Locale.ROOT, "https://download.elastic.co/elasticsearch/staging/%s-%s/org/elasticsearch/plugin/%s/%s/%s-%s.zip", version, Build.CURRENT.shortHash(), name, version, name, version));
- }
- addUrl(urls, String.format(Locale.ROOT, "https://download.elastic.co/elasticsearch/release/org/elasticsearch/plugin/%s/%s/%s-%s.zip", name, version, name, version));
- } else {
- // Elasticsearch old download service
- addUrl(urls, String.format(Locale.ROOT, "https://download.elastic.co/%1$s/%2$s/%2$s-%3$s.zip", user, name, version));
- // Maven central repository
- addUrl(urls, String.format(Locale.ROOT, "https://search.maven.org/remotecontent?filepath=%1$s/%2$s/%3$s/%2$s-%3$s.zip", user.replace('.', '/'), name, version));
- // Sonatype repository
- addUrl(urls, String.format(Locale.ROOT, "https://oss.sonatype.org/service/local/repositories/releases/content/%1$s/%2$s/%3$s/%2$s-%3$s.zip", user.replace('.', '/'), name, version));
- // Github repository
- addUrl(urls, String.format(Locale.ROOT, "https://github.com/%1$s/%2$s/archive/%3$s.zip", user, name, version));
- }
- }
- if (user != null) {
- // Github repository for master branch (assume site)
- addUrl(urls, String.format(Locale.ROOT, "https://github.com/%1$s/%2$s/archive/master.zip", user, name));
- }
- return urls;
- }
-
- private static void addUrl(List<URL> urls, String url) {
- try {
- urls.add(new URL(url));
- } catch (MalformedURLException e) {
- // We simply ignore malformed URL
- }
- }
-
- Path newDistroFile(Environment env) throws IOException {
- return Files.createTempFile(env.tmpFile(), name, ".zip");
- }
-
- Tuple<URL, Path> newChecksumUrlAndFile(Environment env, URL originalUrl, String suffix) throws IOException {
- URL newUrl = new URL(originalUrl.toString() + "." + suffix);
- return new Tuple<>(newUrl, Files.createTempFile(env.tmpFile(), name, ".zip." + suffix));
- }
-
- Path extractedDir(Environment env) {
- return env.pluginsFile().resolve(name);
- }
-
- Path binDir(Environment env) {
- return env.binFile().resolve(name);
- }
-
- Path configDir(Environment env) {
- return env.configFile().resolve(name);
- }
-
- static PluginHandle parse(String name) {
- String[] elements = name.split("/");
- // We first consider the simplest form: pluginname
- String repo = elements[0];
- String user = null;
- String version = null;
-
- // We consider the form: username/pluginname
- if (elements.length > 1) {
- user = elements[0];
- repo = elements[1];
-
- // We consider the form: username/pluginname/version
- if (elements.length > 2) {
- version = elements[2];
- }
- }
-
- if (isOfficialPlugin(repo, user, version)) {
- return new PluginHandle(repo, Version.CURRENT.number(), null);
- }
-
- return new PluginHandle(repo, version, user);
- }
-
- static boolean isOfficialPlugin(String repo, String user, String version) {
- return version == null && user == null && !Strings.isNullOrEmpty(repo);
- }
- }
-
-}
diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginManagerCliParser.java b/core/src/main/java/org/elasticsearch/plugins/PluginManagerCliParser.java
deleted file mode 100644
index a8a51db971..0000000000
--- a/core/src/main/java/org/elasticsearch/plugins/PluginManagerCliParser.java
+++ /dev/null
@@ -1,256 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.plugins;
-
-import org.apache.commons.cli.CommandLine;
-import org.elasticsearch.common.Strings;
-import org.elasticsearch.common.SuppressForbidden;
-import org.elasticsearch.common.cli.CliTool;
-import org.elasticsearch.common.cli.CliToolConfig;
-import org.elasticsearch.common.cli.Terminal;
-import org.elasticsearch.common.logging.log4j.LogConfigurator;
-import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.common.unit.TimeValue;
-import org.elasticsearch.env.Environment;
-import org.elasticsearch.node.internal.InternalSettingsPreparer;
-import org.elasticsearch.plugins.PluginManager.OutputMode;
-
-import java.net.MalformedURLException;
-import java.net.URL;
-import java.net.URLDecoder;
-import java.util.Locale;
-
-import static org.elasticsearch.common.cli.CliToolConfig.Builder.cmd;
-import static org.elasticsearch.common.cli.CliToolConfig.Builder.option;
-
-public class PluginManagerCliParser extends CliTool {
-
- // By default timeout is 0 which means no timeout
- public static final TimeValue DEFAULT_TIMEOUT = TimeValue.timeValueMillis(0);
-
- private static final CliToolConfig CONFIG = CliToolConfig.config("plugin", PluginManagerCliParser.class)
- .cmds(ListPlugins.CMD, Install.CMD, Remove.CMD)
- .build();
-
- public static void main(String[] args) {
- // initialize default for es.logger.level because we will not read the logging.yml
- String loggerLevel = System.getProperty("es.logger.level", "INFO");
- // Set the appender for all potential log files to terminal so that other components that use the logger print out the
- // same terminal.
- // The reason for this is that the plugin cli cannot be configured with a file appender because when the plugin command is
- // executed there is no way of knowing where the logfiles should be placed. For example, if elasticsearch
- // is run as service then the logs should be at /var/log/elasticsearch but when started from the tar they should be at es.home/logs.
- // Therefore we print to Terminal.
- Environment env = InternalSettingsPreparer.prepareEnvironment(Settings.builder()
- .put("appender.terminal.type", "terminal")
- .put("rootLogger", "${es.logger.level}, terminal")
- .put("es.logger.level", loggerLevel)
- .build(), Terminal.DEFAULT);
- // configure but do not read the logging conf file
- LogConfigurator.configure(env.settings(), false);
- int status = new PluginManagerCliParser().execute(args).status();
- exit(status);
- }
-
- @SuppressForbidden(reason = "Allowed to exit explicitly from #main()")
- private static void exit(int status) {
- System.exit(status);
- }
-
- public PluginManagerCliParser() {
- super(CONFIG);
- }
-
- public PluginManagerCliParser(Terminal terminal) {
- super(CONFIG, terminal);
- }
-
- @Override
- protected Command parse(String cmdName, CommandLine cli) throws Exception {
- switch (cmdName.toLowerCase(Locale.ROOT)) {
- case Install.NAME:
- return Install.parse(terminal, cli);
- case ListPlugins.NAME:
- return ListPlugins.parse(terminal, cli);
- case Remove.NAME:
- return Remove.parse(terminal, cli);
- default:
- assert false : "can't get here as cmd name is validated before this method is called";
- return exitCmd(ExitStatus.USAGE);
- }
- }
-
- /**
- * List all installed plugins
- */
- static class ListPlugins extends CliTool.Command {
-
- private static final String NAME = "list";
-
- private static final CliToolConfig.Cmd CMD = cmd(NAME, ListPlugins.class).build();
- private final OutputMode outputMode;
-
- public static Command parse(Terminal terminal, CommandLine cli) {
- OutputMode outputMode = OutputMode.DEFAULT;
- if (cli.hasOption("s")) {
- outputMode = OutputMode.SILENT;
- }
- if (cli.hasOption("v")) {
- outputMode = OutputMode.VERBOSE;
- }
-
- return new ListPlugins(terminal, outputMode);
- }
-
- ListPlugins(Terminal terminal, OutputMode outputMode) {
- super(terminal);
- this.outputMode = outputMode;
- }
-
- @Override
- public ExitStatus execute(Settings settings, Environment env) throws Exception {
- PluginManager pluginManager = new PluginManager(env, null, outputMode, DEFAULT_TIMEOUT);
- pluginManager.listInstalledPlugins(terminal);
- return ExitStatus.OK;
- }
- }
-
- /**
- * Remove a plugin
- */
- static class Remove extends CliTool.Command {
-
- private static final String NAME = "remove";
-
- private static final CliToolConfig.Cmd CMD = cmd(NAME, Remove.class).build();
-
- public static Command parse(Terminal terminal, CommandLine cli) {
- String[] args = cli.getArgs();
- if (args.length == 0) {
- return exitCmd(ExitStatus.USAGE, terminal, "plugin name is missing (type -h for help)");
- }
-
- OutputMode outputMode = OutputMode.DEFAULT;
- if (cli.hasOption("s")) {
- outputMode = OutputMode.SILENT;
- }
- if (cli.hasOption("v")) {
- outputMode = OutputMode.VERBOSE;
- }
-
- return new Remove(terminal, outputMode, args[0]);
- }
-
- private OutputMode outputMode;
- final String pluginName;
-
- Remove(Terminal terminal, OutputMode outputMode, String pluginToRemove) {
- super(terminal);
- this.outputMode = outputMode;
- this.pluginName = pluginToRemove;
- }
-
- @Override
- public ExitStatus execute(Settings settings, Environment env) throws Exception {
-
- PluginManager pluginManager = new PluginManager(env, null, outputMode, DEFAULT_TIMEOUT);
- terminal.println("-> Removing " + Strings.coalesceToEmpty(pluginName) + "...");
- pluginManager.removePlugin(pluginName, terminal);
- return ExitStatus.OK;
- }
- }
-
- /**
- * Installs a plugin
- */
- static class Install extends Command {
-
- private static final String NAME = "install";
-
- private static final CliToolConfig.Cmd CMD = cmd(NAME, Install.class)
- .options(option("t", "timeout").required(false).hasArg(false))
- .options(option("b", "batch").required(false))
- .build();
-
- static Command parse(Terminal terminal, CommandLine cli) {
- String[] args = cli.getArgs();
-
- // install [plugin-name/url]
- if ((args == null) || (args.length == 0)) {
- return exitCmd(ExitStatus.USAGE, terminal, "plugin name or url is missing (type -h for help)");
- }
- String name = args[0];
-
- URL optionalPluginUrl = null;
- // try parsing cli argument as URL
- try {
- optionalPluginUrl = new URL(name);
- name = null;
- } catch (MalformedURLException e) {
- // we tried to parse the cli argument as url and failed
- // continue treating it as a symbolic plugin name like `analysis-icu` etc.
- }
-
- TimeValue timeout = TimeValue.parseTimeValue(cli.getOptionValue("t"), DEFAULT_TIMEOUT, "cli");
-
- OutputMode outputMode = OutputMode.DEFAULT;
- if (cli.hasOption("s")) {
- outputMode = OutputMode.SILENT;
- }
- if (cli.hasOption("v")) {
- outputMode = OutputMode.VERBOSE;
- }
-
- boolean batch = System.console() == null;
- if (cli.hasOption("b")) {
- batch = true;
- }
-
- return new Install(terminal, name, outputMode, optionalPluginUrl, timeout, batch);
- }
-
- final String name;
- private OutputMode outputMode;
- final URL url;
- final TimeValue timeout;
- final boolean batch;
-
- Install(Terminal terminal, String name, OutputMode outputMode, URL url, TimeValue timeout, boolean batch) {
- super(terminal);
- this.name = name;
- this.outputMode = outputMode;
- this.url = url;
- this.timeout = timeout;
- this.batch = batch;
- }
-
- @Override
- public ExitStatus execute(Settings settings, Environment env) throws Exception {
- PluginManager pluginManager = new PluginManager(env, url, outputMode, timeout);
- if (name != null) {
- terminal.println("-> Installing " + Strings.coalesceToEmpty(name) + "...");
- } else {
- terminal.println("-> Installing from " + URLDecoder.decode(url.toString(), "UTF-8") + "...");
- }
- pluginManager.downloadAndExtract(name, terminal, batch);
- return ExitStatus.OK;
- }
- }
-} \ No newline at end of file
diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginSecurity.java b/core/src/main/java/org/elasticsearch/plugins/PluginSecurity.java
index fd7f2d84e2..4fd039cfaa 100644
--- a/core/src/main/java/org/elasticsearch/plugins/PluginSecurity.java
+++ b/core/src/main/java/org/elasticsearch/plugins/PluginSecurity.java
@@ -19,6 +19,7 @@
package org.elasticsearch.plugins;
+import org.apache.lucene.util.IOUtils;
import org.elasticsearch.common.cli.Terminal;
import org.elasticsearch.common.cli.Terminal.Verbosity;
import org.elasticsearch.env.Environment;
@@ -38,7 +39,7 @@ import java.util.Comparator;
import java.util.List;
class PluginSecurity {
-
+
/**
* Reads plugin policy, prints/confirms exceptions
*/
@@ -49,7 +50,7 @@ class PluginSecurity {
terminal.print(Verbosity.VERBOSE, "plugin has a policy file with no additional permissions");
return;
}
-
+
// sort permissions in a reasonable order
Collections.sort(requested, new Comparator<Permission>() {
@Override
@@ -80,7 +81,7 @@ class PluginSecurity {
return cmp;
}
});
-
+
terminal.println(Verbosity.NORMAL, "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@");
terminal.println(Verbosity.NORMAL, "@ WARNING: plugin requires additional permissions @");
terminal.println(Verbosity.NORMAL, "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@");
@@ -98,11 +99,11 @@ class PluginSecurity {
}
}
}
-
+
/** Format permission type, name, and actions into a string */
static String formatPermission(Permission permission) {
StringBuilder sb = new StringBuilder();
-
+
String clazz = null;
if (permission instanceof UnresolvedPermission) {
clazz = ((UnresolvedPermission) permission).getUnresolvedType();
@@ -110,7 +111,7 @@ class PluginSecurity {
clazz = permission.getClass().getName();
}
sb.append(clazz);
-
+
String name = null;
if (permission instanceof UnresolvedPermission) {
name = ((UnresolvedPermission) permission).getUnresolvedName();
@@ -121,7 +122,7 @@ class PluginSecurity {
sb.append(' ');
sb.append(name);
}
-
+
String actions = null;
if (permission instanceof UnresolvedPermission) {
actions = ((UnresolvedPermission) permission).getUnresolvedActions();
@@ -134,7 +135,7 @@ class PluginSecurity {
}
return sb.toString();
}
-
+
/**
* Parses plugin policy into a set of permissions
*/
@@ -151,8 +152,8 @@ class PluginSecurity {
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException(e);
}
- PluginManager.tryToDeletePath(terminal, emptyPolicyFile);
-
+ IOUtils.rm(emptyPolicyFile);
+
// parse the plugin's policy file into a set of permissions
final Policy policy;
try {
diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginsService.java b/core/src/main/java/org/elasticsearch/plugins/PluginsService.java
index 50938a1916..4e61185491 100644
--- a/core/src/main/java/org/elasticsearch/plugins/PluginsService.java
+++ b/core/src/main/java/org/elasticsearch/plugins/PluginsService.java
@@ -98,7 +98,7 @@ public class PluginsService extends AbstractComponent {
// first we load plugins that are on the classpath. this is for tests and transport clients
for (Class<? extends Plugin> pluginClass : classpathPlugins) {
Plugin plugin = loadPlugin(pluginClass, settings);
- PluginInfo pluginInfo = new PluginInfo(plugin.name(), plugin.description(), false, "NA", true, pluginClass.getName(), false);
+ PluginInfo pluginInfo = new PluginInfo(plugin.name(), plugin.description(), "NA", pluginClass.getName(), false);
if (logger.isTraceEnabled()) {
logger.trace("plugin loaded from classpath [{}]", pluginInfo);
}
@@ -136,18 +136,10 @@ public class PluginsService extends AbstractComponent {
plugins = Collections.unmodifiableList(pluginsLoaded);
- // We need to build a List of jvm and site plugins for checking mandatory plugins
- Map<String, Plugin> jvmPlugins = new HashMap<>();
- List<String> sitePlugins = new ArrayList<>();
-
+ // We need to build a List of plugins for checking mandatory plugins
+ Set<String> pluginsNames = new HashSet<>();
for (Tuple<PluginInfo, Plugin> tuple : plugins) {
- PluginInfo info = tuple.v1();
- if (info.isJvm()) {
- jvmPlugins.put(info.getName(), tuple.v2());
- }
- if (info.isSite()) {
- sitePlugins.add(info.getName());
- }
+ pluginsNames.add(tuple.v1().getName());
}
// Checking expected plugins
@@ -155,7 +147,7 @@ public class PluginsService extends AbstractComponent {
if (mandatoryPlugins != null) {
Set<String> missingPlugins = new HashSet<>();
for (String mandatoryPlugin : mandatoryPlugins) {
- if (!jvmPlugins.containsKey(mandatoryPlugin) && !sitePlugins.contains(mandatoryPlugin) && !missingPlugins.contains(mandatoryPlugin)) {
+ if (!pluginsNames.contains(mandatoryPlugin) && !missingPlugins.contains(mandatoryPlugin)) {
missingPlugins.add(mandatoryPlugin);
}
}
@@ -175,10 +167,11 @@ public class PluginsService extends AbstractComponent {
jvmPluginNames.add(pluginInfo.getName());
}
- logger.info("modules {}, plugins {}, sites {}", moduleNames, jvmPluginNames, sitePlugins);
+ logger.info("modules {}, plugins {}", moduleNames, jvmPluginNames);
Map<Plugin, List<OnModuleReference>> onModuleReferences = new HashMap<>();
- for (Plugin plugin : jvmPlugins.values()) {
+ for (Tuple<PluginInfo, Plugin> pluginEntry : plugins) {
+ Plugin plugin = pluginEntry.v2();
List<OnModuleReference> list = new ArrayList<>();
for (Method method : plugin.getClass().getMethods()) {
if (!method.getName().equals("onModule")) {
@@ -304,9 +297,6 @@ public class PluginsService extends AbstractComponent {
continue; // skip over .DS_Store etc
}
PluginInfo info = PluginInfo.readFromProperties(module);
- if (!info.isJvm()) {
- throw new IllegalStateException("modules must be jvm plugins: " + info);
- }
if (!info.isIsolated()) {
throw new IllegalStateException("modules must be isolated: " + info);
}
@@ -353,17 +343,14 @@ public class PluginsService extends AbstractComponent {
}
List<URL> urls = new ArrayList<>();
- if (info.isJvm()) {
- // a jvm plugin: gather urls for jar files
- try (DirectoryStream<Path> jarStream = Files.newDirectoryStream(plugin, "*.jar")) {
- for (Path jar : jarStream) {
- // normalize with toRealPath to get symlinks out of our hair
- urls.add(jar.toRealPath().toUri().toURL());
- }
+ try (DirectoryStream<Path> jarStream = Files.newDirectoryStream(plugin, "*.jar")) {
+ for (Path jar : jarStream) {
+ // normalize with toRealPath to get symlinks out of our hair
+ urls.add(jar.toRealPath().toUri().toURL());
}
}
final Bundle bundle;
- if (info.isJvm() && info.isIsolated() == false) {
+ if (info.isIsolated() == false) {
bundle = bundles.get(0); // purgatory
} else {
bundle = new Bundle();
@@ -395,15 +382,10 @@ public class PluginsService extends AbstractComponent {
// create a child to load the plugins in this bundle
ClassLoader loader = URLClassLoader.newInstance(bundle.urls.toArray(new URL[0]), getClass().getClassLoader());
for (PluginInfo pluginInfo : bundle.plugins) {
- final Plugin plugin;
- if (pluginInfo.isJvm()) {
- // reload lucene SPI with any new services from the plugin
- reloadLuceneSPI(loader);
- Class<? extends Plugin> pluginClass = loadPluginClass(pluginInfo.getClassname(), loader);
- plugin = loadPlugin(pluginClass, settings);
- } else {
- plugin = new SitePlugin(pluginInfo.getName(), pluginInfo.getDescription());
- }
+ // reload lucene SPI with any new services from the plugin
+ reloadLuceneSPI(loader);
+ final Class<? extends Plugin> pluginClass = loadPluginClass(pluginInfo.getClassname(), loader);
+ final Plugin plugin = loadPlugin(pluginClass, settings);
plugins.add(new Tuple<>(pluginInfo, plugin));
}
}
diff --git a/core/src/main/java/org/elasticsearch/plugins/RemovePluginCommand.java b/core/src/main/java/org/elasticsearch/plugins/RemovePluginCommand.java
new file mode 100644
index 0000000000..f5e55ba2b2
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/plugins/RemovePluginCommand.java
@@ -0,0 +1,77 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.plugins;
+
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.StandardCopyOption;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.lucene.util.IOUtils;
+import org.elasticsearch.common.Strings;
+import org.elasticsearch.common.cli.CliTool;
+import org.elasticsearch.common.cli.Terminal;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.env.Environment;
+
+import static org.elasticsearch.common.cli.Terminal.Verbosity.VERBOSE;
+
+/**
+ * A command for the plugin cli to remove a plugin from elasticsearch.
+ */
+class RemovePluginCommand extends CliTool.Command {
+ private final String pluginName;
+
+ public RemovePluginCommand(Terminal terminal, String pluginName) {
+ super(terminal);
+ this.pluginName = pluginName;
+ }
+
+ @Override
+ public CliTool.ExitStatus execute(Settings settings, Environment env) throws Exception {
+ terminal.println("-> Removing " + Strings.coalesceToEmpty(pluginName) + "...");
+
+ Path pluginDir = env.pluginsFile().resolve(pluginName);
+ if (Files.exists(pluginDir) == false) {
+ throw new IllegalArgumentException("Plugin " + pluginName + " not found. Run 'plugin list' to get list of installed plugins.");
+ }
+
+ List<Path> pluginPaths = new ArrayList<>();
+
+ Path pluginBinDir = env.binFile().resolve(pluginName);
+ if (Files.exists(pluginBinDir)) {
+ if (Files.isDirectory(pluginBinDir) == false) {
+ throw new IllegalStateException("Bin dir for " + pluginName + " is not a directory");
+ }
+ pluginPaths.add(pluginBinDir);
+ terminal.println(VERBOSE, "Removing: %s", pluginBinDir);
+ }
+
+ terminal.println(VERBOSE, "Removing: %s", pluginDir);
+ Path tmpPluginDir = env.pluginsFile().resolve(".removing-" + pluginName);
+ Files.move(pluginDir, tmpPluginDir, StandardCopyOption.ATOMIC_MOVE);
+ pluginPaths.add(tmpPluginDir);
+
+ IOUtils.rm(pluginPaths.toArray(new Path[pluginPaths.size()]));
+
+ return CliTool.ExitStatus.OK;
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java
index e941492e7f..59812f908f 100644
--- a/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java
+++ b/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java
@@ -348,12 +348,11 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent<Rep
if (metaData != null) {
IndexMetaData indexMetaData = metaData.index(index);
if (indexMetaData != null) {
- for (int i = 0; i < indexMetaData.getNumberOfShards(); i++) {
- ShardId shardId = new ShardId(index, i);
+ for (int shardId = 0; shardId < indexMetaData.getNumberOfShards(); shardId++) {
try {
- indexShardRepository.delete(snapshotId, snapshot.version(), shardId);
+ indexShardRepository.delete(snapshotId, snapshot.version(), new ShardId(indexMetaData.getIndex(), shardId));
} catch (SnapshotException ex) {
- logger.warn("[{}] failed to delete shard data for shard [{}]", ex, snapshotId, shardId);
+ logger.warn("[{}] failed to delete shard data for shard [{}][{}]", ex, snapshotId, index, shardId);
}
}
}
diff --git a/core/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java b/core/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java
index 33f9d4e7c3..0aa6222547 100644
--- a/core/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java
+++ b/core/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java
@@ -23,6 +23,7 @@ import org.elasticsearch.common.blobstore.BlobPath;
import org.elasticsearch.common.blobstore.BlobStore;
import org.elasticsearch.common.blobstore.fs.FsBlobStore;
import org.elasticsearch.common.inject.Inject;
+import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.snapshots.IndexShardRepository;
@@ -33,6 +34,7 @@ import org.elasticsearch.repositories.blobstore.BlobStoreRepository;
import java.io.IOException;
import java.nio.file.Path;
+import java.util.function.Function;
/**
* Shared file system implementation of the BlobStoreRepository
@@ -49,6 +51,13 @@ public class FsRepository extends BlobStoreRepository {
public final static String TYPE = "fs";
+ public static final Setting<String> LOCATION_SETTING = new Setting<>("location", "", Function.identity(), false, Setting.Scope.CLUSTER);
+ public static final Setting<String> REPOSITORIES_LOCATION_SETTING = new Setting<>("repositories.fs.location", LOCATION_SETTING, Function.identity(), false, Setting.Scope.CLUSTER);
+ public static final Setting<ByteSizeValue> CHUNK_SIZE_SETTING = Setting.byteSizeSetting("chunk_size", "-1", false, Setting.Scope.CLUSTER);
+ public static final Setting<ByteSizeValue> REPOSITORIES_CHUNK_SIZE_SETTING = Setting.byteSizeSetting("repositories.fs.chunk_size", "-1", false, Setting.Scope.CLUSTER);
+ public static final Setting<Boolean> COMPRESS_SETTING = Setting.boolSetting("compress", false, false, Setting.Scope.CLUSTER);
+ public static final Setting<Boolean> REPOSITORIES_COMPRESS_SETTING = Setting.boolSetting("repositories.fs.compress", false, false, Setting.Scope.CLUSTER);
+
private final FsBlobStore blobStore;
private ByteSizeValue chunkSize;
@@ -68,8 +77,8 @@ public class FsRepository extends BlobStoreRepository {
public FsRepository(RepositoryName name, RepositorySettings repositorySettings, IndexShardRepository indexShardRepository, Environment environment) throws IOException {
super(name.getName(), repositorySettings, indexShardRepository);
Path locationFile;
- String location = repositorySettings.settings().get("location", settings.get("repositories.fs.location"));
- if (location == null) {
+ String location = REPOSITORIES_LOCATION_SETTING.get(repositorySettings.settings());
+ if (location.isEmpty()) {
logger.warn("the repository location is missing, it should point to a shared file system location that is available on all master and data nodes");
throw new RepositoryException(name.name(), "missing location");
}
@@ -85,8 +94,14 @@ public class FsRepository extends BlobStoreRepository {
}
blobStore = new FsBlobStore(settings, locationFile);
- this.chunkSize = repositorySettings.settings().getAsBytesSize("chunk_size", settings.getAsBytesSize("repositories.fs.chunk_size", null));
- this.compress = repositorySettings.settings().getAsBoolean("compress", settings.getAsBoolean("repositories.fs.compress", false));
+ if (CHUNK_SIZE_SETTING.exists(repositorySettings.settings())) {
+ this.chunkSize = CHUNK_SIZE_SETTING.get(repositorySettings.settings());
+ } else if (REPOSITORIES_CHUNK_SIZE_SETTING.exists(settings)) {
+ this.chunkSize = REPOSITORIES_CHUNK_SIZE_SETTING.get(settings);
+ } else {
+ this.chunkSize = null;
+ }
+ this.compress = COMPRESS_SETTING.exists(repositorySettings.settings()) ? COMPRESS_SETTING.get(repositorySettings.settings()) : REPOSITORIES_COMPRESS_SETTING.get(settings);
this.basePath = BlobPath.cleanPath();
}
diff --git a/core/src/main/java/org/elasticsearch/repositories/uri/URLRepository.java b/core/src/main/java/org/elasticsearch/repositories/uri/URLRepository.java
index 4d361683e5..2d15db245a 100644
--- a/core/src/main/java/org/elasticsearch/repositories/uri/URLRepository.java
+++ b/core/src/main/java/org/elasticsearch/repositories/uri/URLRepository.java
@@ -20,11 +20,11 @@
package org.elasticsearch.repositories.uri;
import org.elasticsearch.cluster.metadata.SnapshotId;
-import org.elasticsearch.common.Strings;
import org.elasticsearch.common.blobstore.BlobPath;
import org.elasticsearch.common.blobstore.BlobStore;
import org.elasticsearch.common.blobstore.url.URLBlobStore;
import org.elasticsearch.common.inject.Inject;
+import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.util.URIPattern;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.snapshots.IndexShardRepository;
@@ -34,9 +34,13 @@ import org.elasticsearch.repositories.RepositorySettings;
import org.elasticsearch.repositories.blobstore.BlobStoreRepository;
import java.io.IOException;
+import java.net.MalformedURLException;
import java.net.URISyntaxException;
import java.net.URL;
+import java.util.Arrays;
+import java.util.Collections;
import java.util.List;
+import java.util.function.Function;
/**
* Read-only URL-based implementation of the BlobStoreRepository
@@ -51,13 +55,21 @@ public class URLRepository extends BlobStoreRepository {
public final static String TYPE = "url";
- public final static String[] DEFAULT_SUPPORTED_PROTOCOLS = {"http", "https", "ftp", "file", "jar"};
+ public static final Setting<List<String>> SUPPORTED_PROTOCOLS_SETTING = Setting.listSetting("repositories.url.supported_protocols",
+ Arrays.asList("http", "https", "ftp", "file", "jar"), Function.identity(), false, Setting.Scope.CLUSTER);
- public final static String SUPPORTED_PROTOCOLS_SETTING = "repositories.url.supported_protocols";
+ public static final Setting<List<URIPattern>> ALLOWED_URLS_SETTING = Setting.listSetting("repositories.url.allowed_urls",
+ Collections.emptyList(), URIPattern::new, false, Setting.Scope.CLUSTER);
- public final static String ALLOWED_URLS_SETTING = "repositories.url.allowed_urls";
+ public static final Setting<URL> URL_SETTING = new Setting<>("url", "http:", URLRepository::parseURL, false, Setting.Scope.CLUSTER);
+ public static final Setting<URL> REPOSITORIES_URL_SETTING = new Setting<>("repositories.url.url", (s) -> s.get("repositories.uri.url", "http:"),
+ URLRepository::parseURL, false, Setting.Scope.CLUSTER);
- private final String[] supportedProtocols;
+ public static final Setting<Boolean> LIST_DIRECTORIES_SETTING = Setting.boolSetting("list_directories", true, false, Setting.Scope.CLUSTER);
+ public static final Setting<Boolean> REPOSITORIES_LIST_DIRECTORIES_SETTING = Setting.boolSetting("repositories.uri.list_directories", true,
+ false, Setting.Scope.CLUSTER);
+
+ private final List<String> supportedProtocols;
private final URIPattern[] urlWhiteList;
@@ -79,21 +91,16 @@ public class URLRepository extends BlobStoreRepository {
@Inject
public URLRepository(RepositoryName name, RepositorySettings repositorySettings, IndexShardRepository indexShardRepository, Environment environment) throws IOException {
super(name.getName(), repositorySettings, indexShardRepository);
- URL url;
- String path = repositorySettings.settings().get("url", settings.get("repositories.url.url", settings.get("repositories.uri.url")));
- if (path == null) {
+
+ if (URL_SETTING.exists(repositorySettings.settings()) == false && REPOSITORIES_URL_SETTING.exists(settings) == false) {
throw new RepositoryException(name.name(), "missing url");
- } else {
- url = new URL(path);
- }
- supportedProtocols = settings.getAsArray(SUPPORTED_PROTOCOLS_SETTING, DEFAULT_SUPPORTED_PROTOCOLS);
- String[] urlWhiteList = settings.getAsArray(ALLOWED_URLS_SETTING, Strings.EMPTY_ARRAY);
- this.urlWhiteList = new URIPattern[urlWhiteList.length];
- for (int i = 0; i < urlWhiteList.length; i++) {
- this.urlWhiteList[i] = new URIPattern(urlWhiteList[i]);
}
+ supportedProtocols = SUPPORTED_PROTOCOLS_SETTING.get(settings);
+ urlWhiteList = ALLOWED_URLS_SETTING.get(settings).toArray(new URIPattern[]{});
this.environment = environment;
- listDirectories = repositorySettings.settings().getAsBoolean("list_directories", settings.getAsBoolean("repositories.uri.list_directories", true));
+ listDirectories = LIST_DIRECTORIES_SETTING.exists(repositorySettings.settings()) ? LIST_DIRECTORIES_SETTING.get(repositorySettings.settings()) : REPOSITORIES_LIST_DIRECTORIES_SETTING.get(settings);
+
+ URL url = URL_SETTING.exists(repositorySettings.settings()) ? URL_SETTING.get(repositorySettings.settings()) : REPOSITORIES_URL_SETTING.get(settings);
URL normalizedURL = checkURL(url);
blobStore = new URLBlobStore(settings, normalizedURL);
basePath = BlobPath.cleanPath();
@@ -147,8 +154,8 @@ public class URLRepository extends BlobStoreRepository {
// We didn't match white list - try to resolve against path.repo
URL normalizedUrl = environment.resolveRepoURL(url);
if (normalizedUrl == null) {
- logger.warn("The specified url [{}] doesn't start with any repository paths specified by the path.repo setting: [{}] or by repositories.url.allowed_urls setting: [{}] ", url, environment.repoFiles());
- throw new RepositoryException(repositoryName, "file url [" + url + "] doesn't match any of the locations specified by path.repo or repositories.url.allowed_urls");
+ logger.warn("The specified url [{}] doesn't start with any repository paths specified by the path.repo setting or by {} setting: [{}] ", url, ALLOWED_URLS_SETTING.getKey(), environment.repoFiles());
+ throw new RepositoryException(repositoryName, "file url [" + url + "] doesn't match any of the locations specified by path.repo or " + ALLOWED_URLS_SETTING.getKey());
}
return normalizedUrl;
}
@@ -161,4 +168,11 @@ public class URLRepository extends BlobStoreRepository {
return true;
}
+ private static URL parseURL(String s) {
+ try {
+ return new URL(s);
+ } catch (MalformedURLException e) {
+ throw new IllegalArgumentException("Unable to parse URL repository setting", e);
+ }
+ }
}
diff --git a/core/src/main/java/org/elasticsearch/rest/BaseRestHandler.java b/core/src/main/java/org/elasticsearch/rest/BaseRestHandler.java
index bb99218855..60b3ccce93 100644
--- a/core/src/main/java/org/elasticsearch/rest/BaseRestHandler.java
+++ b/core/src/main/java/org/elasticsearch/rest/BaseRestHandler.java
@@ -19,19 +19,12 @@
package org.elasticsearch.rest;
-import org.elasticsearch.action.Action;
-import org.elasticsearch.action.ActionListener;
-import org.elasticsearch.action.ActionRequest;
-import org.elasticsearch.action.ActionRequestBuilder;
-import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.client.Client;
-import org.elasticsearch.client.FilterClient;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.component.AbstractComponent;
+import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
-import java.util.Set;
-
/**
* Base handler for REST requests.
* <p>
@@ -41,51 +34,20 @@ import java.util.Set;
* {@link org.elasticsearch.rest.RestController#registerRelevantHeaders(String...)}
*/
public abstract class BaseRestHandler extends AbstractComponent implements RestHandler {
-
- private final RestController controller;
+ public static final Setting<Boolean> MULTI_ALLOW_EXPLICIT_INDEX = Setting.boolSetting("rest.action.multi.allow_explicit_index", true, false, Setting.Scope.CLUSTER);
private final Client client;
protected final ParseFieldMatcher parseFieldMatcher;
- protected BaseRestHandler(Settings settings, RestController controller, Client client) {
+ protected BaseRestHandler(Settings settings, Client client) {
super(settings);
- this.controller = controller;
this.client = client;
this.parseFieldMatcher = new ParseFieldMatcher(settings);
}
@Override
public final void handleRequest(RestRequest request, RestChannel channel) throws Exception {
- handleRequest(request, channel, new HeadersAndContextCopyClient(client, request, controller.relevantHeaders()));
+ handleRequest(request, channel, client);
}
protected abstract void handleRequest(RestRequest request, RestChannel channel, Client client) throws Exception;
-
- static final class HeadersAndContextCopyClient extends FilterClient {
-
- private final RestRequest restRequest;
- private final Set<String> headers;
-
- HeadersAndContextCopyClient(Client in, RestRequest restRequest, Set<String> headers) {
- super(in);
- this.restRequest = restRequest;
- this.headers = headers;
- }
-
- private static void copyHeadersAndContext(ActionRequest<?> actionRequest, RestRequest restRequest, Set<String> headers) {
- for (String usefulHeader : headers) {
- String headerValue = restRequest.header(usefulHeader);
- if (headerValue != null) {
- actionRequest.putHeader(usefulHeader, headerValue);
- }
- }
- actionRequest.copyContextFrom(restRequest);
- }
-
- @Override
- protected <Request extends ActionRequest<Request>, Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder>> void doExecute(
- Action<Request, Response, RequestBuilder> action, Request request, ActionListener<Response> listener) {
- copyHeadersAndContext(request, restRequest, headers);
- super.doExecute(action, request, listener);
- }
- }
}
diff --git a/core/src/main/java/org/elasticsearch/rest/RestController.java b/core/src/main/java/org/elasticsearch/rest/RestController.java
index d0a46d29f6..64e21002d8 100644
--- a/core/src/main/java/org/elasticsearch/rest/RestController.java
+++ b/core/src/main/java/org/elasticsearch/rest/RestController.java
@@ -24,13 +24,13 @@ import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.path.PathTrie;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.rest.support.RestUtils;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
-import java.util.Comparator;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
@@ -107,12 +107,7 @@ public class RestController extends AbstractLifecycleComponent<RestController> {
RestFilter[] copy = new RestFilter[filters.length + 1];
System.arraycopy(filters, 0, copy, 0, filters.length);
copy[filters.length] = preProcessor;
- Arrays.sort(copy, new Comparator<RestFilter>() {
- @Override
- public int compare(RestFilter o1, RestFilter o2) {
- return Integer.compare(o1.order(), o2.order());
- }
- });
+ Arrays.sort(copy, (o1, o2) -> Integer.compare(o1.order(), o2.order()));
filters = copy;
}
@@ -163,24 +158,31 @@ public class RestController extends AbstractLifecycleComponent<RestController> {
return new ControllerFilterChain(executionFilter);
}
- public void dispatchRequest(final RestRequest request, final RestChannel channel) {
+ public void dispatchRequest(final RestRequest request, final RestChannel channel, ThreadContext threadContext) {
if (!checkRequestParameters(request, channel)) {
return;
}
-
- if (filters.length == 0) {
- try {
- executeHandler(request, channel);
- } catch (Throwable e) {
+ try (ThreadContext.StoredContext t = threadContext.stashContext()){
+ for (String key : relevantHeaders) {
+ String httpHeader = request.header(key);
+ if (httpHeader != null) {
+ threadContext.putHeader(key, httpHeader);
+ }
+ }
+ if (filters.length == 0) {
try {
- channel.sendResponse(new BytesRestResponse(channel, e));
- } catch (Throwable e1) {
- logger.error("failed to send failure response for uri [" + request.uri() + "]", e1);
+ executeHandler(request, channel);
+ } catch (Throwable e) {
+ try {
+ channel.sendResponse(new BytesRestResponse(channel, e));
+ } catch (Throwable e1) {
+ logger.error("failed to send failure response for uri [" + request.uri() + "]", e1);
+ }
}
+ } else {
+ ControllerFilterChain filterChain = new ControllerFilterChain(handlerFilter);
+ filterChain.continueProcessing(request, channel);
}
- } else {
- ControllerFilterChain filterChain = new ControllerFilterChain(handlerFilter);
- filterChain.continueProcessing(request, channel);
}
}
diff --git a/core/src/main/java/org/elasticsearch/rest/RestRequest.java b/core/src/main/java/org/elasticsearch/rest/RestRequest.java
index 81f6052db5..8872484d58 100644
--- a/core/src/main/java/org/elasticsearch/rest/RestRequest.java
+++ b/core/src/main/java/org/elasticsearch/rest/RestRequest.java
@@ -20,7 +20,6 @@
package org.elasticsearch.rest;
import org.elasticsearch.common.Booleans;
-import org.elasticsearch.common.ContextAndHeaderHolder;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
@@ -38,7 +37,7 @@ import static org.elasticsearch.common.unit.TimeValue.parseTimeValue;
/**
*
*/
-public abstract class RestRequest extends ContextAndHeaderHolder implements ToXContent.Params {
+public abstract class RestRequest implements ToXContent.Params {
public enum Method {
GET, POST, PUT, DELETE, OPTIONS, HEAD
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/health/RestClusterHealthAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/health/RestClusterHealthAction.java
index badf6f6de5..ccd0f98259 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/health/RestClusterHealthAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/health/RestClusterHealthAction.java
@@ -43,7 +43,7 @@ public class RestClusterHealthAction extends BaseRestHandler {
@Inject
public RestClusterHealthAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(RestRequest.Method.GET, "/_cluster/health", this);
controller.registerHandler(RestRequest.Method.GET, "/_cluster/health/{index}", this);
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/hotthreads/RestNodesHotThreadsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/hotthreads/RestNodesHotThreadsAction.java
index 24c4c44941..53bec14f96 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/hotthreads/RestNodesHotThreadsAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/hotthreads/RestNodesHotThreadsAction.java
@@ -43,7 +43,7 @@ public class RestNodesHotThreadsAction extends BaseRestHandler {
@Inject
public RestNodesHotThreadsAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(RestRequest.Method.GET, "/_cluster/nodes/hotthreads", this);
controller.registerHandler(RestRequest.Method.GET, "/_cluster/nodes/hot_threads", this);
controller.registerHandler(RestRequest.Method.GET, "/_cluster/nodes/{nodeId}/hotthreads", this);
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/info/RestNodesInfoAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/info/RestNodesInfoAction.java
index f2c5185000..ce1e7811da 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/info/RestNodesInfoAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/info/RestNodesInfoAction.java
@@ -52,7 +52,7 @@ public class RestNodesInfoAction extends BaseRestHandler {
@Inject
public RestNodesInfoAction(Settings settings, RestController controller, Client client, SettingsFilter settingsFilter) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(GET, "/_nodes", this);
// this endpoint is used for metrics, not for nodeIds, like /_nodes/fs
controller.registerHandler(GET, "/_nodes/{nodeId}", this);
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/stats/RestNodesStatsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/stats/RestNodesStatsAction.java
index 786891d330..2b3f0518c3 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/stats/RestNodesStatsAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/stats/RestNodesStatsAction.java
@@ -45,7 +45,7 @@ public class RestNodesStatsAction extends BaseRestHandler {
@Inject
public RestNodesStatsAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(GET, "/_nodes/stats", this);
controller.registerHandler(GET, "/_nodes/{nodeId}/stats", this);
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/tasks/RestListTasksAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/tasks/RestListTasksAction.java
index 813c782242..46fef04b85 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/tasks/RestListTasksAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/tasks/RestListTasksAction.java
@@ -37,7 +37,7 @@ public class RestListTasksAction extends BaseRestHandler {
@Inject
public RestListTasksAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(GET, "/_tasks", this);
controller.registerHandler(GET, "/_tasks/{nodeId}", this);
controller.registerHandler(GET, "/_tasks/{nodeId}/{actions}", this);
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/delete/RestDeleteRepositoryAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/delete/RestDeleteRepositoryAction.java
index 36e02ba459..136c1cfae3 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/delete/RestDeleteRepositoryAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/delete/RestDeleteRepositoryAction.java
@@ -40,7 +40,7 @@ public class RestDeleteRepositoryAction extends BaseRestHandler {
@Inject
public RestDeleteRepositoryAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(DELETE, "/_snapshot/{repository}", this);
}
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/get/RestGetRepositoriesAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/get/RestGetRepositoriesAction.java
index fd347ccd33..09422481cf 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/get/RestGetRepositoriesAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/get/RestGetRepositoriesAction.java
@@ -50,7 +50,7 @@ public class RestGetRepositoriesAction extends BaseRestHandler {
@Inject
public RestGetRepositoriesAction(Settings settings, RestController controller, Client client, SettingsFilter settingsFilter) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(GET, "/_snapshot", this);
controller.registerHandler(GET, "/_snapshot/{repository}", this);
this.settingsFilter = settingsFilter;
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/put/RestPutRepositoryAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/put/RestPutRepositoryAction.java
index feeeeb77ab..878eb2915b 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/put/RestPutRepositoryAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/put/RestPutRepositoryAction.java
@@ -41,7 +41,7 @@ public class RestPutRepositoryAction extends BaseRestHandler {
@Inject
public RestPutRepositoryAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(PUT, "/_snapshot/{repository}", this);
controller.registerHandler(POST, "/_snapshot/{repository}", this);
}
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/verify/RestVerifyRepositoryAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/verify/RestVerifyRepositoryAction.java
index c0c7ad5b95..306dcbb21b 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/verify/RestVerifyRepositoryAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/verify/RestVerifyRepositoryAction.java
@@ -36,7 +36,7 @@ public class RestVerifyRepositoryAction extends BaseRestHandler {
@Inject
public RestVerifyRepositoryAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(POST, "/_snapshot/{repository}/_verify", this);
}
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/reroute/RestClusterRerouteAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/reroute/RestClusterRerouteAction.java
index 387728918a..529d73d3e5 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/reroute/RestClusterRerouteAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/reroute/RestClusterRerouteAction.java
@@ -49,7 +49,7 @@ public class RestClusterRerouteAction extends BaseRestHandler {
@Inject
public RestClusterRerouteAction(Settings settings, RestController controller, Client client, SettingsFilter settingsFilter) {
- super(settings, controller, client);
+ super(settings, client);
this.settingsFilter = settingsFilter;
controller.registerHandler(RestRequest.Method.POST, "/_cluster/reroute", this);
}
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/settings/RestClusterGetSettingsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/settings/RestClusterGetSettingsAction.java
index 5acbfc48d2..e7c97abbbd 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/settings/RestClusterGetSettingsAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/settings/RestClusterGetSettingsAction.java
@@ -48,7 +48,7 @@ public class RestClusterGetSettingsAction extends BaseRestHandler {
@Inject
public RestClusterGetSettingsAction(Settings settings, RestController controller, Client client, ClusterSettings clusterSettings) {
- super(settings, controller, client);
+ super(settings, client);
this.clusterSettings = clusterSettings;
controller.registerHandler(RestRequest.Method.GET, "/_cluster/settings", this);
}
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/settings/RestClusterUpdateSettingsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/settings/RestClusterUpdateSettingsAction.java
index aa84606b07..64083f1e80 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/settings/RestClusterUpdateSettingsAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/settings/RestClusterUpdateSettingsAction.java
@@ -43,7 +43,7 @@ public class RestClusterUpdateSettingsAction extends BaseRestHandler {
@Inject
public RestClusterUpdateSettingsAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(RestRequest.Method.PUT, "/_cluster/settings", this);
}
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/shards/RestClusterSearchShardsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/shards/RestClusterSearchShardsAction.java
index ee68c1bbb7..860e110b2d 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/shards/RestClusterSearchShardsAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/shards/RestClusterSearchShardsAction.java
@@ -42,7 +42,7 @@ public class RestClusterSearchShardsAction extends BaseRestHandler {
@Inject
public RestClusterSearchShardsAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(GET, "/_search_shards", this);
controller.registerHandler(POST, "/_search_shards", this);
controller.registerHandler(GET, "/{index}/_search_shards", this);
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/create/RestCreateSnapshotAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/create/RestCreateSnapshotAction.java
index bf9dd4a011..9d6be664d4 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/create/RestCreateSnapshotAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/create/RestCreateSnapshotAction.java
@@ -41,7 +41,7 @@ public class RestCreateSnapshotAction extends BaseRestHandler {
@Inject
public RestCreateSnapshotAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(PUT, "/_snapshot/{repository}/{snapshot}", this);
controller.registerHandler(POST, "/_snapshot/{repository}/{snapshot}", this);
}
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/delete/RestDeleteSnapshotAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/delete/RestDeleteSnapshotAction.java
index 66b5a4188c..38c78bd5d8 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/delete/RestDeleteSnapshotAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/delete/RestDeleteSnapshotAction.java
@@ -40,7 +40,7 @@ public class RestDeleteSnapshotAction extends BaseRestHandler {
@Inject
public RestDeleteSnapshotAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(DELETE, "/_snapshot/{repository}/{snapshot}", this);
}
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/get/RestGetSnapshotsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/get/RestGetSnapshotsAction.java
index 123798cf99..1151fed8f2 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/get/RestGetSnapshotsAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/get/RestGetSnapshotsAction.java
@@ -41,7 +41,7 @@ public class RestGetSnapshotsAction extends BaseRestHandler {
@Inject
public RestGetSnapshotsAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(GET, "/_snapshot/{repository}/{snapshot}", this);
}
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/restore/RestRestoreSnapshotAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/restore/RestRestoreSnapshotAction.java
index 028285d306..e2a16bd4b4 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/restore/RestRestoreSnapshotAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/restore/RestRestoreSnapshotAction.java
@@ -40,7 +40,7 @@ public class RestRestoreSnapshotAction extends BaseRestHandler {
@Inject
public RestRestoreSnapshotAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(POST, "/_snapshot/{repository}/{snapshot}/_restore", this);
}
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/status/RestSnapshotsStatusAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/status/RestSnapshotsStatusAction.java
index b60a740a15..2e8810e2ba 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/status/RestSnapshotsStatusAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/snapshots/status/RestSnapshotsStatusAction.java
@@ -41,7 +41,7 @@ public class RestSnapshotsStatusAction extends BaseRestHandler {
@Inject
public RestSnapshotsStatusAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(GET, "/_snapshot/{repository}/{snapshot}/_status", this);
controller.registerHandler(GET, "/_snapshot/{repository}/_status", this);
controller.registerHandler(GET, "/_snapshot/_status", this);
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/state/RestClusterStateAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/state/RestClusterStateAction.java
index f28ecfe488..720d19a7fe 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/state/RestClusterStateAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/state/RestClusterStateAction.java
@@ -52,7 +52,7 @@ public class RestClusterStateAction extends BaseRestHandler {
@Inject
public RestClusterStateAction(Settings settings, RestController controller, Client client, SettingsFilter settingsFilter) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(RestRequest.Method.GET, "/_cluster/state", this);
controller.registerHandler(RestRequest.Method.GET, "/_cluster/state/{metric}", this);
controller.registerHandler(RestRequest.Method.GET, "/_cluster/state/{metric}/{indices}", this);
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/stats/RestClusterStatsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/stats/RestClusterStatsAction.java
index b14293ba31..a09820e71b 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/stats/RestClusterStatsAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/stats/RestClusterStatsAction.java
@@ -38,7 +38,7 @@ public class RestClusterStatsAction extends BaseRestHandler {
@Inject
public RestClusterStatsAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(RestRequest.Method.GET, "/_cluster/stats", this);
controller.registerHandler(RestRequest.Method.GET, "/_cluster/stats/nodes/{nodeId}", this);
}
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/tasks/RestPendingClusterTasksAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/tasks/RestPendingClusterTasksAction.java
index 5d9eac430b..333b6d6449 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/tasks/RestPendingClusterTasksAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/tasks/RestPendingClusterTasksAction.java
@@ -36,7 +36,7 @@ public class RestPendingClusterTasksAction extends BaseRestHandler {
@Inject
public RestPendingClusterTasksAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(RestRequest.Method.GET, "/_cluster/pending_tasks", this);
}
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/RestIndicesAliasesAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/RestIndicesAliasesAction.java
index f62d6febee..c60671f864 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/RestIndicesAliasesAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/RestIndicesAliasesAction.java
@@ -47,7 +47,7 @@ public class RestIndicesAliasesAction extends BaseRestHandler {
@Inject
public RestIndicesAliasesAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(POST, "/_aliases", this);
}
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/delete/RestIndexDeleteAliasesAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/delete/RestIndexDeleteAliasesAction.java
index 6748cc2509..7fcaadc3d8 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/delete/RestIndexDeleteAliasesAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/delete/RestIndexDeleteAliasesAction.java
@@ -38,7 +38,7 @@ public class RestIndexDeleteAliasesAction extends BaseRestHandler {
@Inject
public RestIndexDeleteAliasesAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(DELETE, "/{index}/_alias/{name}", this);
controller.registerHandler(DELETE, "/{index}/_aliases/{name}", this);
}
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/get/RestGetAliasesAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/get/RestGetAliasesAction.java
index aa62ee471d..da439c63d5 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/get/RestGetAliasesAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/get/RestGetAliasesAction.java
@@ -52,7 +52,7 @@ public class RestGetAliasesAction extends BaseRestHandler {
@Inject
public RestGetAliasesAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(GET, "/_alias/{name}", this);
controller.registerHandler(GET, "/{index}/_alias/{name}", this);
}
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/get/RestGetIndicesAliasesAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/get/RestGetIndicesAliasesAction.java
index 4c774b5864..5a45a0a759 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/get/RestGetIndicesAliasesAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/get/RestGetIndicesAliasesAction.java
@@ -51,7 +51,7 @@ public class RestGetIndicesAliasesAction extends BaseRestHandler {
@Inject
public RestGetIndicesAliasesAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(GET, "/{index}/_aliases/{name}", this);
controller.registerHandler(GET, "/_aliases/{name}", this);
}
@@ -75,7 +75,7 @@ public class RestGetIndicesAliasesAction extends BaseRestHandler {
final boolean isAllAliasesRequested = isAllOrWildcard(aliases);
for (IndexMetaData indexMetaData : metaData) {
- builder.startObject(indexMetaData.getIndex(), XContentBuilder.FieldCaseConversion.NONE);
+ builder.startObject(indexMetaData.getIndex().getName(), XContentBuilder.FieldCaseConversion.NONE);
builder.startObject("aliases");
for (ObjectCursor<AliasMetaData> cursor : indexMetaData.getAliases().values()) {
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/head/RestAliasesExistAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/head/RestAliasesExistAction.java
index fce40123b6..15ea664245 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/head/RestAliasesExistAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/head/RestAliasesExistAction.java
@@ -44,7 +44,7 @@ public class RestAliasesExistAction extends BaseRestHandler {
@Inject
public RestAliasesExistAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(HEAD, "/_alias/{name}", this);
controller.registerHandler(HEAD, "/{index}/_alias/{name}", this);
controller.registerHandler(HEAD, "/{index}/_alias", this);
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/put/RestIndexPutAliasAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/put/RestIndexPutAliasAction.java
index 4965f6b218..7a0c2ad466 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/put/RestIndexPutAliasAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/put/RestIndexPutAliasAction.java
@@ -45,7 +45,7 @@ public class RestIndexPutAliasAction extends BaseRestHandler {
@Inject
public RestIndexPutAliasAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(PUT, "/{index}/_alias/{name}", this);
controller.registerHandler(PUT, "/_alias/{name}", this);
controller.registerHandler(PUT, "/{index}/_aliases/{name}", this);
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeAction.java
index 3a86911f46..e440e1b95c 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeAction.java
@@ -61,7 +61,7 @@ public class RestAnalyzeAction extends BaseRestHandler {
@Inject
public RestAnalyzeAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(GET, "/_analyze", this);
controller.registerHandler(GET, "/{index}/_analyze", this);
controller.registerHandler(POST, "/_analyze", this);
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/cache/clear/RestClearIndicesCacheAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/cache/clear/RestClearIndicesCacheAction.java
index cc06a14b8d..7adb690953 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/cache/clear/RestClearIndicesCacheAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/cache/clear/RestClearIndicesCacheAction.java
@@ -51,7 +51,7 @@ public class RestClearIndicesCacheAction extends BaseRestHandler {
@Inject
public RestClearIndicesCacheAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(POST, "/_cache/clear", this);
controller.registerHandler(POST, "/{index}/_cache/clear", this);
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/close/RestCloseIndexAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/close/RestCloseIndexAction.java
index 091fbc1680..5f211b88d1 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/close/RestCloseIndexAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/close/RestCloseIndexAction.java
@@ -39,7 +39,7 @@ public class RestCloseIndexAction extends BaseRestHandler {
@Inject
public RestCloseIndexAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(RestRequest.Method.POST, "/_close", this);
controller.registerHandler(RestRequest.Method.POST, "/{index}/_close", this);
}
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/create/RestCreateIndexAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/create/RestCreateIndexAction.java
index 41a272cc8b..46bc938897 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/create/RestCreateIndexAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/create/RestCreateIndexAction.java
@@ -37,7 +37,7 @@ public class RestCreateIndexAction extends BaseRestHandler {
@Inject
public RestCreateIndexAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(RestRequest.Method.PUT, "/{index}", this);
controller.registerHandler(RestRequest.Method.POST, "/{index}", this);
}
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/delete/RestDeleteIndexAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/delete/RestDeleteIndexAction.java
index 0851fb867b..4953842c54 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/delete/RestDeleteIndexAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/delete/RestDeleteIndexAction.java
@@ -39,7 +39,7 @@ public class RestDeleteIndexAction extends BaseRestHandler {
@Inject
public RestDeleteIndexAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(RestRequest.Method.DELETE, "/", this);
controller.registerHandler(RestRequest.Method.DELETE, "/{index}", this);
}
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/exists/indices/RestIndicesExistsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/exists/indices/RestIndicesExistsAction.java
index 6843f5c5ce..72dea18abd 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/exists/indices/RestIndicesExistsAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/exists/indices/RestIndicesExistsAction.java
@@ -45,7 +45,7 @@ public class RestIndicesExistsAction extends BaseRestHandler {
@Inject
public RestIndicesExistsAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(HEAD, "/{index}", this);
}
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/exists/types/RestTypesExistsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/exists/types/RestTypesExistsAction.java
index f1f227edfd..dd206dcb63 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/exists/types/RestTypesExistsAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/exists/types/RestTypesExistsAction.java
@@ -44,7 +44,7 @@ public class RestTypesExistsAction extends BaseRestHandler {
@Inject
public RestTypesExistsAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(HEAD, "/{index}/{type}", this);
}
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/flush/RestFlushAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/flush/RestFlushAction.java
index 47c0451adf..f3b3304bcf 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/flush/RestFlushAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/flush/RestFlushAction.java
@@ -47,7 +47,7 @@ public class RestFlushAction extends BaseRestHandler {
@Inject
public RestFlushAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(POST, "/_flush", this);
controller.registerHandler(POST, "/{index}/_flush", this);
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/flush/RestSyncedFlushAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/flush/RestSyncedFlushAction.java
index 4fe893bd41..9bb36f03d6 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/flush/RestSyncedFlushAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/flush/RestSyncedFlushAction.java
@@ -45,7 +45,7 @@ public class RestSyncedFlushAction extends BaseRestHandler {
@Inject
public RestSyncedFlushAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(POST, "/_flush/synced", this);
controller.registerHandler(POST, "/{index}/_flush/synced", this);
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/forcemerge/RestForceMergeAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/forcemerge/RestForceMergeAction.java
index d8ef7bace3..8aa2683be5 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/forcemerge/RestForceMergeAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/forcemerge/RestForceMergeAction.java
@@ -46,7 +46,7 @@ public class RestForceMergeAction extends BaseRestHandler {
@Inject
public RestForceMergeAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(POST, "/_forcemerge", this);
controller.registerHandler(POST, "/{index}/_forcemerge", this);
}
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/get/RestGetIndicesAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/get/RestGetIndicesAction.java
index e23dec0f0b..e54b3d92cb 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/get/RestGetIndicesAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/get/RestGetIndicesAction.java
@@ -57,7 +57,7 @@ public class RestGetIndicesAction extends BaseRestHandler {
@Inject
public RestGetIndicesAction(Settings settings, RestController controller, Client client, IndexScopedSettings indexScopedSettings) {
- super(settings, controller, client);
+ super(settings, client);
this.indexScopedSettings = indexScopedSettings;
controller.registerHandler(GET, "/{index}", this);
controller.registerHandler(GET, "/{index}/{type}", this);
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/mapping/get/RestGetFieldMappingAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/mapping/get/RestGetFieldMappingAction.java
index 7594a097c9..0db931d0a7 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/mapping/get/RestGetFieldMappingAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/mapping/get/RestGetFieldMappingAction.java
@@ -51,7 +51,7 @@ public class RestGetFieldMappingAction extends BaseRestHandler {
@Inject
public RestGetFieldMappingAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(GET, "/_mapping/field/{fields}", this);
controller.registerHandler(GET, "/_mapping/{type}/field/{fields}", this);
controller.registerHandler(GET, "/{index}/_mapping/field/{fields}", this);
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/mapping/get/RestGetMappingAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/mapping/get/RestGetMappingAction.java
index 48fa60cb4b..12c29bb781 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/mapping/get/RestGetMappingAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/mapping/get/RestGetMappingAction.java
@@ -52,7 +52,7 @@ public class RestGetMappingAction extends BaseRestHandler {
@Inject
public RestGetMappingAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(GET, "/{index}/{type}/_mapping", this);
controller.registerHandler(GET, "/{index}/_mappings/{type}", this);
controller.registerHandler(GET, "/{index}/_mapping/{type}", this);
@@ -78,7 +78,7 @@ public class RestGetMappingAction extends BaseRestHandler {
} else if (indices.length != 0) {
return new BytesRestResponse(channel, new IndexNotFoundException(indices[0]));
} else if (types.length != 0) {
- return new BytesRestResponse(channel, new TypeMissingException(new Index("_all"), types[0]));
+ return new BytesRestResponse(channel, new TypeMissingException("_all", types[0]));
} else {
return new BytesRestResponse(OK, builder.endObject());
}
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/mapping/put/RestPutMappingAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/mapping/put/RestPutMappingAction.java
index 3ceecbfd3a..fdb16d2fb8 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/mapping/put/RestPutMappingAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/mapping/put/RestPutMappingAction.java
@@ -44,7 +44,7 @@ public class RestPutMappingAction extends BaseRestHandler {
@Inject
public RestPutMappingAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(PUT, "/{index}/_mapping/", this);
controller.registerHandler(PUT, "/{index}/{type}/_mapping", this);
controller.registerHandler(PUT, "/{index}/_mapping/{type}", this);
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/open/RestOpenIndexAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/open/RestOpenIndexAction.java
index cb22f81ba4..58bda9d3a3 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/open/RestOpenIndexAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/open/RestOpenIndexAction.java
@@ -39,7 +39,7 @@ public class RestOpenIndexAction extends BaseRestHandler {
@Inject
public RestOpenIndexAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(RestRequest.Method.POST, "/_open", this);
controller.registerHandler(RestRequest.Method.POST, "/{index}/_open", this);
}
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/recovery/RestRecoveryAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/recovery/RestRecoveryAction.java
index e46831e81e..88bc9fb8c9 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/recovery/RestRecoveryAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/recovery/RestRecoveryAction.java
@@ -45,7 +45,7 @@ public class RestRecoveryAction extends BaseRestHandler {
@Inject
public RestRecoveryAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(GET, "/_recovery", this);
controller.registerHandler(GET, "/{index}/_recovery", this);
}
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/refresh/RestRefreshAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/refresh/RestRefreshAction.java
index e552b13316..fcc6d240b3 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/refresh/RestRefreshAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/refresh/RestRefreshAction.java
@@ -47,7 +47,7 @@ public class RestRefreshAction extends BaseRestHandler {
@Inject
public RestRefreshAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(POST, "/_refresh", this);
controller.registerHandler(POST, "/{index}/_refresh", this);
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/segments/RestIndicesSegmentsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/segments/RestIndicesSegmentsAction.java
index a233c75da5..da76a769ce 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/segments/RestIndicesSegmentsAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/segments/RestIndicesSegmentsAction.java
@@ -45,7 +45,7 @@ public class RestIndicesSegmentsAction extends BaseRestHandler {
@Inject
public RestIndicesSegmentsAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(GET, "/_segments", this);
controller.registerHandler(GET, "/{index}/_segments", this);
}
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/settings/RestGetSettingsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/settings/RestGetSettingsAction.java
index b924acc5fb..7d8748955f 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/settings/RestGetSettingsAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/settings/RestGetSettingsAction.java
@@ -46,7 +46,7 @@ public class RestGetSettingsAction extends BaseRestHandler {
@Inject
public RestGetSettingsAction(Settings settings, RestController controller, Client client, IndexScopedSettings indexScopedSettings) {
- super(settings, controller, client);
+ super(settings, client);
this.indexScopedSettings = indexScopedSettings;
controller.registerHandler(GET, "/{index}/_settings/{name}", this);
controller.registerHandler(GET, "/_settings/{name}", this);
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/settings/RestUpdateSettingsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/settings/RestUpdateSettingsAction.java
index 1a8ba58306..bcf43a4baa 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/settings/RestUpdateSettingsAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/settings/RestUpdateSettingsAction.java
@@ -53,7 +53,7 @@ public class RestUpdateSettingsAction extends BaseRestHandler {
@Inject
public RestUpdateSettingsAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(RestRequest.Method.PUT, "/{index}/_settings", this);
controller.registerHandler(RestRequest.Method.PUT, "/_settings", this);
}
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/shards/RestIndicesShardStoresAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/shards/RestIndicesShardStoresAction.java
index e2dc64cc47..586599c1a1 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/shards/RestIndicesShardStoresAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/shards/RestIndicesShardStoresAction.java
@@ -46,7 +46,7 @@ public class RestIndicesShardStoresAction extends BaseRestHandler {
@Inject
public RestIndicesShardStoresAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(GET, "/_shard_stores", this);
controller.registerHandler(GET, "/{index}/_shard_stores", this);
}
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/stats/RestIndicesStatsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/stats/RestIndicesStatsAction.java
index 891afd6b8c..e75dfcc4dc 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/stats/RestIndicesStatsAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/stats/RestIndicesStatsAction.java
@@ -47,7 +47,7 @@ public class RestIndicesStatsAction extends BaseRestHandler {
@Inject
public RestIndicesStatsAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(GET, "/_stats", this);
controller.registerHandler(GET, "/_stats/{metric}", this);
controller.registerHandler(GET, "/_stats/{metric}/{indexMetric}", this);
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/delete/RestDeleteIndexTemplateAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/delete/RestDeleteIndexTemplateAction.java
index a4c1869609..a59ab9ac70 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/delete/RestDeleteIndexTemplateAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/delete/RestDeleteIndexTemplateAction.java
@@ -36,7 +36,7 @@ public class RestDeleteIndexTemplateAction extends BaseRestHandler {
@Inject
public RestDeleteIndexTemplateAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(RestRequest.Method.DELETE, "/_template/{name}", this);
}
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/get/RestGetIndexTemplateAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/get/RestGetIndexTemplateAction.java
index d5bfa0db90..d62d97400c 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/get/RestGetIndexTemplateAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/get/RestGetIndexTemplateAction.java
@@ -50,7 +50,7 @@ public class RestGetIndexTemplateAction extends BaseRestHandler {
@Inject
public RestGetIndexTemplateAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(GET, "/_template", this);
controller.registerHandler(GET, "/_template/{name}", this);
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/head/RestHeadIndexTemplateAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/head/RestHeadIndexTemplateAction.java
index 0838fa887e..648d083e76 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/head/RestHeadIndexTemplateAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/head/RestHeadIndexTemplateAction.java
@@ -42,7 +42,7 @@ public class RestHeadIndexTemplateAction extends BaseRestHandler {
@Inject
public RestHeadIndexTemplateAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(HEAD, "/_template/{name}", this);
}
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/put/RestPutIndexTemplateAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/put/RestPutIndexTemplateAction.java
index 45f8a674dd..0b08b64e89 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/put/RestPutIndexTemplateAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/template/put/RestPutIndexTemplateAction.java
@@ -36,7 +36,7 @@ public class RestPutIndexTemplateAction extends BaseRestHandler {
@Inject
public RestPutIndexTemplateAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(RestRequest.Method.PUT, "/_template/{name}", this);
controller.registerHandler(RestRequest.Method.POST, "/_template/{name}", this);
}
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/upgrade/RestUpgradeAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/upgrade/RestUpgradeAction.java
index 6a554db60f..60a781f90b 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/upgrade/RestUpgradeAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/upgrade/RestUpgradeAction.java
@@ -49,7 +49,7 @@ public class RestUpgradeAction extends BaseRestHandler {
@Inject
public RestUpgradeAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(POST, "/_upgrade", this);
controller.registerHandler(POST, "/{index}/_upgrade", this);
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/validate/query/RestValidateQueryAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/validate/query/RestValidateQueryAction.java
index 81bdaf7536..86d6e9d608 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/validate/query/RestValidateQueryAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/validate/query/RestValidateQueryAction.java
@@ -57,7 +57,7 @@ public class RestValidateQueryAction extends BaseRestHandler {
@Inject
public RestValidateQueryAction(Settings settings, RestController controller, Client client, IndicesQueriesRegistry indicesQueriesRegistry) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(GET, "/_validate/query", this);
controller.registerHandler(POST, "/_validate/query", this);
controller.registerHandler(GET, "/{index}/_validate/query", this);
diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/validate/template/RestRenderSearchTemplateAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/validate/template/RestRenderSearchTemplateAction.java
index 5ebec7130d..f130865752 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/validate/template/RestRenderSearchTemplateAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/validate/template/RestRenderSearchTemplateAction.java
@@ -52,7 +52,7 @@ public class RestRenderSearchTemplateAction extends BaseRestHandler {
@Inject
public RestRenderSearchTemplateAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(GET, "/_render/template", this);
controller.registerHandler(POST, "/_render/template", this);
controller.registerHandler(GET, "/_render/template/{id}", this);
diff --git a/core/src/main/java/org/elasticsearch/rest/action/bulk/RestBulkAction.java b/core/src/main/java/org/elasticsearch/rest/action/bulk/RestBulkAction.java
index 37ce03bac7..dbc413fcfc 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/bulk/RestBulkAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/bulk/RestBulkAction.java
@@ -58,7 +58,7 @@ public class RestBulkAction extends BaseRestHandler {
@Inject
public RestBulkAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(POST, "/_bulk", this);
controller.registerHandler(PUT, "/_bulk", this);
@@ -67,7 +67,7 @@ public class RestBulkAction extends BaseRestHandler {
controller.registerHandler(POST, "/{index}/{type}/_bulk", this);
controller.registerHandler(PUT, "/{index}/{type}/_bulk", this);
- this.allowExplicitIndex = settings.getAsBoolean("rest.action.multi.allow_explicit_index", true);
+ this.allowExplicitIndex = MULTI_ALLOW_EXPLICIT_INDEX.get(settings);
}
@Override
@@ -77,6 +77,7 @@ public class RestBulkAction extends BaseRestHandler {
String defaultType = request.param("type");
String defaultRouting = request.param("routing");
String fieldsParam = request.param("fields");
+ String defaultPipeline = request.param("pipeline");
String[] defaultFields = fieldsParam != null ? Strings.commaDelimitedListToStringArray(fieldsParam) : null;
String consistencyLevel = request.param("consistency");
@@ -85,7 +86,7 @@ public class RestBulkAction extends BaseRestHandler {
}
bulkRequest.timeout(request.paramAsTime("timeout", BulkShardRequest.DEFAULT_TIMEOUT));
bulkRequest.refresh(request.paramAsBoolean("refresh", bulkRequest.refresh()));
- bulkRequest.add(request.content(), defaultIndex, defaultType, defaultRouting, defaultFields, null, allowExplicitIndex);
+ bulkRequest.add(request.content(), defaultIndex, defaultType, defaultRouting, defaultFields, defaultPipeline, null, allowExplicitIndex);
client.bulk(bulkRequest, new RestBuilderListener<BulkResponse>(channel) {
@Override
diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/AbstractCatAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/AbstractCatAction.java
index 895211a097..12393f5800 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/cat/AbstractCatAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/cat/AbstractCatAction.java
@@ -39,7 +39,7 @@ import static org.elasticsearch.rest.action.support.RestTable.pad;
public abstract class AbstractCatAction extends BaseRestHandler {
public AbstractCatAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
}
protected abstract void doRequest(final RestRequest request, final RestChannel channel, final Client client);
diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestCatAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestCatAction.java
index 337684769f..23229540b9 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestCatAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestCatAction.java
@@ -41,7 +41,7 @@ public class RestCatAction extends BaseRestHandler {
@Inject
public RestCatAction(Settings settings, RestController controller, Set<AbstractCatAction> catActions, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(GET, "/_cat", this);
StringBuilder sb = new StringBuilder();
sb.append(CAT_NL);
diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestPluginsAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestPluginsAction.java
index 34e0522365..1a37ab6da3 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestPluginsAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestPluginsAction.java
@@ -84,8 +84,6 @@ public class RestPluginsAction extends AbstractCatAction {
table.addCell("name", "alias:n;desc:node name");
table.addCell("component", "alias:c;desc:component");
table.addCell("version", "alias:v;desc:component version");
- table.addCell("type", "alias:t;desc:type (j for JVM, s for Site)");
- table.addCell("url", "alias:u;desc:url for site plugins");
table.addCell("description", "alias:d;default:false;desc:plugin details");
table.endHeaders();
return table;
@@ -104,22 +102,6 @@ public class RestPluginsAction extends AbstractCatAction {
table.addCell(node.name());
table.addCell(pluginInfo.getName());
table.addCell(pluginInfo.getVersion());
- String type;
- if (pluginInfo.isSite()) {
- if (pluginInfo.isJvm()) {
- type = "j/s";
- } else {
- type = "s";
- }
- } else {
- if (pluginInfo.isJvm()) {
- type = "j";
- } else {
- type = "";
- }
- }
- table.addCell(type);
- table.addCell(pluginInfo.getUrl());
table.addCell(pluginInfo.getDescription());
table.endRow();
}
diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestSegmentsAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestSegmentsAction.java
index 734fb34009..e5a1b4b49a 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestSegmentsAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestSegmentsAction.java
@@ -127,7 +127,7 @@ public class RestSegmentsAction extends AbstractCatAction {
for (Segment segment : segments) {
table.startRow();
- table.addCell(shardSegment.getShardRouting().getIndex());
+ table.addCell(shardSegment.getShardRouting().getIndexName());
table.addCell(shardSegment.getShardRouting().getId());
table.addCell(shardSegment.getShardRouting().primary() ? "p" : "r");
table.addCell(nodes.get(shardSegment.getShardRouting().currentNodeId()).getHostAddress());
diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java
index 692d5bebbc..94a82e8e77 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java
@@ -188,7 +188,7 @@ public class RestShardsAction extends AbstractCatAction {
table.startRow();
- table.addCell(shard.index());
+ table.addCell(shard.getIndexName());
table.addCell(shard.id());
IndexMetaData indexMeta = state.getState().getMetaData().index(shard.index());
diff --git a/core/src/main/java/org/elasticsearch/rest/action/count/RestCountAction.java b/core/src/main/java/org/elasticsearch/rest/action/count/RestCountAction.java
index 834b3d391b..c423f7a853 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/count/RestCountAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/count/RestCountAction.java
@@ -54,7 +54,7 @@ public class RestCountAction extends BaseRestHandler {
@Inject
public RestCountAction(Settings settings, RestController controller, Client client, IndicesQueriesRegistry indicesQueriesRegistry) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(POST, "/_count", this);
controller.registerHandler(GET, "/_count", this);
controller.registerHandler(POST, "/{index}/_count", this);
diff --git a/core/src/main/java/org/elasticsearch/rest/action/delete/RestDeleteAction.java b/core/src/main/java/org/elasticsearch/rest/action/delete/RestDeleteAction.java
index 4336c9db2d..8e3449344c 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/delete/RestDeleteAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/delete/RestDeleteAction.java
@@ -41,7 +41,7 @@ public class RestDeleteAction extends BaseRestHandler {
@Inject
public RestDeleteAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(DELETE, "/{index}/{type}/{id}", this);
}
diff --git a/core/src/main/java/org/elasticsearch/rest/action/explain/RestExplainAction.java b/core/src/main/java/org/elasticsearch/rest/action/explain/RestExplainAction.java
index 0e472bb0bf..864cddc4ba 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/explain/RestExplainAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/explain/RestExplainAction.java
@@ -58,7 +58,7 @@ public class RestExplainAction extends BaseRestHandler {
@Inject
public RestExplainAction(Settings settings, RestController controller, Client client, IndicesQueriesRegistry indicesQueriesRegistry) {
- super(settings, controller, client);
+ super(settings, client);
this.indicesQueriesRegistry = indicesQueriesRegistry;
controller.registerHandler(GET, "/{index}/{type}/{id}/_explain", this);
controller.registerHandler(POST, "/{index}/{type}/{id}/_explain", this);
diff --git a/core/src/main/java/org/elasticsearch/rest/action/fieldstats/RestFieldStatsAction.java b/core/src/main/java/org/elasticsearch/rest/action/fieldstats/RestFieldStatsAction.java
index c314c4325d..17b406c71e 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/fieldstats/RestFieldStatsAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/fieldstats/RestFieldStatsAction.java
@@ -50,7 +50,7 @@ public class RestFieldStatsAction extends BaseRestHandler {
@Inject
public RestFieldStatsAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(GET, "/_field_stats", this);
controller.registerHandler(POST, "/_field_stats", this);
controller.registerHandler(GET, "/{index}/_field_stats", this);
diff --git a/core/src/main/java/org/elasticsearch/rest/action/get/RestGetAction.java b/core/src/main/java/org/elasticsearch/rest/action/get/RestGetAction.java
index e85eef4857..0f541bf7a9 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/get/RestGetAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/get/RestGetAction.java
@@ -48,7 +48,7 @@ public class RestGetAction extends BaseRestHandler {
@Inject
public RestGetAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(GET, "/{index}/{type}/{id}", this);
}
diff --git a/core/src/main/java/org/elasticsearch/rest/action/get/RestGetSourceAction.java b/core/src/main/java/org/elasticsearch/rest/action/get/RestGetSourceAction.java
index ff6c04a6d1..d38ad458c4 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/get/RestGetSourceAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/get/RestGetSourceAction.java
@@ -48,7 +48,7 @@ public class RestGetSourceAction extends BaseRestHandler {
@Inject
public RestGetSourceAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(GET, "/{index}/{type}/{id}/_source", this);
}
diff --git a/core/src/main/java/org/elasticsearch/rest/action/get/RestHeadAction.java b/core/src/main/java/org/elasticsearch/rest/action/get/RestHeadAction.java
index f32c07f20f..31fd0cc9ea 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/get/RestHeadAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/get/RestHeadAction.java
@@ -44,7 +44,7 @@ public class RestHeadAction extends BaseRestHandler {
@Inject
public RestHeadAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(HEAD, "/{index}/{type}/{id}", this);
controller.registerHandler(HEAD, "/{index}/{type}/{id}/_source", this);
}
diff --git a/core/src/main/java/org/elasticsearch/rest/action/get/RestMultiGetAction.java b/core/src/main/java/org/elasticsearch/rest/action/get/RestMultiGetAction.java
index 440312b7cb..5e3bb8eacf 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/get/RestMultiGetAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/get/RestMultiGetAction.java
@@ -42,7 +42,7 @@ public class RestMultiGetAction extends BaseRestHandler {
@Inject
public RestMultiGetAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(GET, "/_mget", this);
controller.registerHandler(POST, "/_mget", this);
controller.registerHandler(GET, "/{index}/_mget", this);
@@ -50,7 +50,7 @@ public class RestMultiGetAction extends BaseRestHandler {
controller.registerHandler(GET, "/{index}/{type}/_mget", this);
controller.registerHandler(POST, "/{index}/{type}/_mget", this);
- this.allowExplicitIndex = settings.getAsBoolean("rest.action.multi.allow_explicit_index", true);
+ this.allowExplicitIndex = MULTI_ALLOW_EXPLICIT_INDEX.get(settings);
}
@Override
diff --git a/core/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java b/core/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java
index 13a9329918..26dd1eca78 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java
@@ -47,7 +47,7 @@ public class RestIndexAction extends BaseRestHandler {
@Inject
public RestIndexAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(POST, "/{index}/{type}", this); // auto id creation
controller.registerHandler(PUT, "/{index}/{type}/{id}", this);
controller.registerHandler(POST, "/{index}/{type}/{id}", this);
@@ -58,7 +58,7 @@ public class RestIndexAction extends BaseRestHandler {
final class CreateHandler extends BaseRestHandler {
protected CreateHandler(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
}
@Override
@@ -77,6 +77,7 @@ public class RestIndexAction extends BaseRestHandler {
if (request.hasParam("ttl")) {
indexRequest.ttl(request.param("ttl"));
}
+ indexRequest.setPipeline(request.param("pipeline"));
indexRequest.source(request.content());
indexRequest.timeout(request.paramAsTime("timeout", IndexRequest.DEFAULT_TIMEOUT));
indexRequest.refresh(request.paramAsBoolean("refresh", indexRequest.refresh()));
diff --git a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestDeletePipelineAction.java b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestDeletePipelineAction.java
new file mode 100644
index 0000000000..6559e32817
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestDeletePipelineAction.java
@@ -0,0 +1,47 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.rest.action.ingest;
+
+import org.elasticsearch.action.ingest.DeletePipelineRequest;
+import org.elasticsearch.client.Client;
+import org.elasticsearch.common.inject.Inject;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.rest.BaseRestHandler;
+import org.elasticsearch.rest.RestChannel;
+import org.elasticsearch.rest.RestController;
+import org.elasticsearch.rest.RestRequest;
+import org.elasticsearch.rest.action.support.AcknowledgedRestListener;
+
+public class RestDeletePipelineAction extends BaseRestHandler {
+
+ @Inject
+ public RestDeletePipelineAction(Settings settings, RestController controller, Client client) {
+ super(settings, client);
+ controller.registerHandler(RestRequest.Method.DELETE, "/_ingest/pipeline/{id}", this);
+ }
+
+ @Override
+ protected void handleRequest(RestRequest restRequest, RestChannel channel, Client client) throws Exception {
+ DeletePipelineRequest request = new DeletePipelineRequest(restRequest.param("id"));
+ request.masterNodeTimeout(restRequest.paramAsTime("master_timeout", request.masterNodeTimeout()));
+ request.timeout(restRequest.paramAsTime("timeout", request.timeout()));
+ client.admin().cluster().deletePipeline(request, new AcknowledgedRestListener<>(channel));
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestGetPipelineAction.java b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestGetPipelineAction.java
new file mode 100644
index 0000000000..c11290329b
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestGetPipelineAction.java
@@ -0,0 +1,47 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.rest.action.ingest;
+
+import org.elasticsearch.action.ingest.GetPipelineRequest;
+import org.elasticsearch.client.Client;
+import org.elasticsearch.common.Strings;
+import org.elasticsearch.common.inject.Inject;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.rest.BaseRestHandler;
+import org.elasticsearch.rest.RestChannel;
+import org.elasticsearch.rest.RestController;
+import org.elasticsearch.rest.RestRequest;
+import org.elasticsearch.rest.action.support.RestStatusToXContentListener;
+
+public class RestGetPipelineAction extends BaseRestHandler {
+
+ @Inject
+ public RestGetPipelineAction(Settings settings, RestController controller, Client client) {
+ super(settings, client);
+ controller.registerHandler(RestRequest.Method.GET, "/_ingest/pipeline/{id}", this);
+ }
+
+ @Override
+ protected void handleRequest(RestRequest restRequest, RestChannel channel, Client client) throws Exception {
+ GetPipelineRequest request = new GetPipelineRequest(Strings.splitStringByCommaToArray(restRequest.param("id")));
+ request.masterNodeTimeout(restRequest.paramAsTime("master_timeout", request.masterNodeTimeout()));
+ client.admin().cluster().getPipeline(request, new RestStatusToXContentListener<>(channel));
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestPutPipelineAction.java b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestPutPipelineAction.java
new file mode 100644
index 0000000000..badccbb957
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestPutPipelineAction.java
@@ -0,0 +1,55 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.rest.action.ingest;
+
+import org.elasticsearch.action.ingest.PutPipelineRequest;
+import org.elasticsearch.action.ingest.WritePipelineResponse;
+import org.elasticsearch.action.ingest.WritePipelineResponseRestListener;
+import org.elasticsearch.client.Client;
+import org.elasticsearch.common.inject.Inject;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentBuilderString;
+import org.elasticsearch.rest.BaseRestHandler;
+import org.elasticsearch.rest.RestChannel;
+import org.elasticsearch.rest.RestController;
+import org.elasticsearch.rest.RestRequest;
+import org.elasticsearch.rest.action.support.AcknowledgedRestListener;
+import org.elasticsearch.rest.action.support.RestActions;
+
+import java.io.IOException;
+
+public class RestPutPipelineAction extends BaseRestHandler {
+
+ @Inject
+ public RestPutPipelineAction(Settings settings, RestController controller, Client client) {
+ super(settings, client);
+ controller.registerHandler(RestRequest.Method.PUT, "/_ingest/pipeline/{id}", this);
+ }
+
+ @Override
+ protected void handleRequest(RestRequest restRequest, RestChannel channel, Client client) throws Exception {
+ PutPipelineRequest request = new PutPipelineRequest(restRequest.param("id"), RestActions.getRestContent(restRequest));
+ request.masterNodeTimeout(restRequest.paramAsTime("master_timeout", request.masterNodeTimeout()));
+ request.timeout(restRequest.paramAsTime("timeout", request.timeout()));
+ client.admin().cluster().putPipeline(request, new WritePipelineResponseRestListener(channel));
+ }
+
+}
diff --git a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulatePipelineAction.java b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulatePipelineAction.java
new file mode 100644
index 0000000000..94f80a9b61
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulatePipelineAction.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.rest.action.ingest;
+
+import org.elasticsearch.action.ingest.SimulatePipelineRequest;
+import org.elasticsearch.client.Client;
+import org.elasticsearch.common.inject.Inject;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.rest.BaseRestHandler;
+import org.elasticsearch.rest.RestChannel;
+import org.elasticsearch.rest.RestController;
+import org.elasticsearch.rest.RestRequest;
+import org.elasticsearch.rest.action.support.RestActions;
+import org.elasticsearch.rest.action.support.RestStatusToXContentListener;
+import org.elasticsearch.rest.action.support.RestToXContentListener;
+
+public class RestSimulatePipelineAction extends BaseRestHandler {
+
+ @Inject
+ public RestSimulatePipelineAction(Settings settings, RestController controller, Client client) {
+ super(settings, client);
+ controller.registerHandler(RestRequest.Method.POST, "/_ingest/pipeline/{id}/_simulate", this);
+ controller.registerHandler(RestRequest.Method.GET, "/_ingest/pipeline/{id}/_simulate", this);
+ controller.registerHandler(RestRequest.Method.POST, "/_ingest/pipeline/_simulate", this);
+ controller.registerHandler(RestRequest.Method.GET, "/_ingest/pipeline/_simulate", this);
+ }
+
+ @Override
+ protected void handleRequest(RestRequest restRequest, RestChannel channel, Client client) throws Exception {
+ SimulatePipelineRequest request = new SimulatePipelineRequest(RestActions.getRestContent(restRequest));
+ request.setId(restRequest.param("id"));
+ request.setVerbose(restRequest.paramAsBoolean("verbose", false));
+ client.admin().cluster().simulatePipeline(request, new RestStatusToXContentListener<>(channel));
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/rest/action/main/RestMainAction.java b/core/src/main/java/org/elasticsearch/rest/action/main/RestMainAction.java
index 42de9b898a..aaf0906b0f 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/main/RestMainAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/main/RestMainAction.java
@@ -48,7 +48,7 @@ public class RestMainAction extends BaseRestHandler {
@Inject
public RestMainAction(Settings settings, Version version, RestController controller, ClusterName clusterName, Client client, ClusterService clusterService) {
- super(settings, controller, client);
+ super(settings, client);
this.version = version;
this.clusterName = clusterName;
this.clusterService = clusterService;
diff --git a/core/src/main/java/org/elasticsearch/rest/action/percolate/RestMultiPercolateAction.java b/core/src/main/java/org/elasticsearch/rest/action/percolate/RestMultiPercolateAction.java
index 879ec78d75..6a12ff1438 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/percolate/RestMultiPercolateAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/percolate/RestMultiPercolateAction.java
@@ -44,7 +44,7 @@ public class RestMultiPercolateAction extends BaseRestHandler {
@Inject
public RestMultiPercolateAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(POST, "/_mpercolate", this);
controller.registerHandler(POST, "/{index}/_mpercolate", this);
controller.registerHandler(POST, "/{index}/{type}/_mpercolate", this);
@@ -53,7 +53,7 @@ public class RestMultiPercolateAction extends BaseRestHandler {
controller.registerHandler(GET, "/{index}/_mpercolate", this);
controller.registerHandler(GET, "/{index}/{type}/_mpercolate", this);
- this.allowExplicitIndex = settings.getAsBoolean("rest.action.multi.allow_explicit_index", true);
+ this.allowExplicitIndex = MULTI_ALLOW_EXPLICIT_INDEX.get(settings);
}
@Override
diff --git a/core/src/main/java/org/elasticsearch/rest/action/percolate/RestPercolateAction.java b/core/src/main/java/org/elasticsearch/rest/action/percolate/RestPercolateAction.java
index 052fa42104..a7c66b245e 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/percolate/RestPercolateAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/percolate/RestPercolateAction.java
@@ -44,7 +44,7 @@ public class RestPercolateAction extends BaseRestHandler {
@Inject
public RestPercolateAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(GET, "/{index}/{type}/_percolate", this);
controller.registerHandler(POST, "/{index}/{type}/_percolate", this);
@@ -109,7 +109,7 @@ public class RestPercolateAction extends BaseRestHandler {
final class RestCountPercolateDocHandler extends BaseRestHandler {
private RestCountPercolateDocHandler(Settings settings, final RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
}
@Override
@@ -123,7 +123,7 @@ public class RestPercolateAction extends BaseRestHandler {
final class RestPercolateExistingDocHandler extends BaseRestHandler {
protected RestPercolateExistingDocHandler(Settings settings, final RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
}
@Override
@@ -136,7 +136,7 @@ public class RestPercolateAction extends BaseRestHandler {
final class RestCountPercolateExistingDocHandler extends BaseRestHandler {
protected RestCountPercolateExistingDocHandler(Settings settings, final RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
}
@Override
diff --git a/core/src/main/java/org/elasticsearch/rest/action/script/RestDeleteIndexedScriptAction.java b/core/src/main/java/org/elasticsearch/rest/action/script/RestDeleteIndexedScriptAction.java
index b492e7c513..9009025d3a 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/script/RestDeleteIndexedScriptAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/script/RestDeleteIndexedScriptAction.java
@@ -47,7 +47,7 @@ public class RestDeleteIndexedScriptAction extends BaseRestHandler {
}
protected RestDeleteIndexedScriptAction(Settings settings, RestController controller, boolean registerDefaultHandlers, Client client) {
- super(settings, controller, client);
+ super(settings, client);
if (registerDefaultHandlers) {
controller.registerHandler(DELETE, "/_scripts/{lang}/{id}", this);
}
diff --git a/core/src/main/java/org/elasticsearch/rest/action/script/RestGetIndexedScriptAction.java b/core/src/main/java/org/elasticsearch/rest/action/script/RestGetIndexedScriptAction.java
index a4c6784d41..e2c4ff6373 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/script/RestGetIndexedScriptAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/script/RestGetIndexedScriptAction.java
@@ -48,7 +48,7 @@ public class RestGetIndexedScriptAction extends BaseRestHandler {
}
protected RestGetIndexedScriptAction(Settings settings, RestController controller, boolean registerDefaultHandlers, Client client) {
- super(settings, controller, client);
+ super(settings, client);
if (registerDefaultHandlers) {
controller.registerHandler(GET, "/_scripts/{lang}/{id}", this);
}
diff --git a/core/src/main/java/org/elasticsearch/rest/action/script/RestPutIndexedScriptAction.java b/core/src/main/java/org/elasticsearch/rest/action/script/RestPutIndexedScriptAction.java
index ed440c2b9f..f5a6f67517 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/script/RestPutIndexedScriptAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/script/RestPutIndexedScriptAction.java
@@ -55,7 +55,7 @@ public class RestPutIndexedScriptAction extends BaseRestHandler {
}
protected RestPutIndexedScriptAction(Settings settings, RestController controller, boolean registerDefaultHandlers, Client client) {
- super(settings, controller, client);
+ super(settings, client);
if (registerDefaultHandlers) {
controller.registerHandler(POST, "/_scripts/{lang}/{id}", this);
controller.registerHandler(PUT, "/_scripts/{lang}/{id}", this);
@@ -67,7 +67,7 @@ public class RestPutIndexedScriptAction extends BaseRestHandler {
final class CreateHandler extends BaseRestHandler {
protected CreateHandler(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
}
@Override
diff --git a/core/src/main/java/org/elasticsearch/rest/action/search/RestClearScrollAction.java b/core/src/main/java/org/elasticsearch/rest/action/search/RestClearScrollAction.java
index b2a2905585..0dce23bf3b 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/search/RestClearScrollAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/search/RestClearScrollAction.java
@@ -47,7 +47,7 @@ public class RestClearScrollAction extends BaseRestHandler {
@Inject
public RestClearScrollAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(DELETE, "/_search/scroll", this);
controller.registerHandler(DELETE, "/_search/scroll/{scroll_id}", this);
diff --git a/core/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java b/core/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java
index ff51263e08..540dd260f8 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java
@@ -46,7 +46,7 @@ import org.elasticsearch.search.builder.SearchSourceBuilder;
import java.util.Map;
-import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue;
+import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeStringArrayValue;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeStringValue;
import static org.elasticsearch.rest.RestRequest.Method.GET;
@@ -62,7 +62,7 @@ public class RestMultiSearchAction extends BaseRestHandler {
@Inject
public RestMultiSearchAction(Settings settings, RestController controller, Client client, IndicesQueriesRegistry indicesQueriesRegistry) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(GET, "/_msearch", this);
controller.registerHandler(POST, "/_msearch", this);
@@ -78,7 +78,7 @@ public class RestMultiSearchAction extends BaseRestHandler {
controller.registerHandler(GET, "/{index}/{type}/_msearch/template", this);
controller.registerHandler(POST, "/{index}/{type}/_msearch/template", this);
- this.allowExplicitIndex = settings.getAsBoolean("rest.action.multi.allow_explicit_index", true);
+ this.allowExplicitIndex = MULTI_ALLOW_EXPLICIT_INDEX.get(settings);
this.indicesQueriesRegistry = indicesQueriesRegistry;
}
@@ -159,7 +159,7 @@ public class RestMultiSearchAction extends BaseRestHandler {
} else if ("search_type".equals(entry.getKey()) || "searchType".equals(entry.getKey())) {
searchRequest.searchType(nodeStringValue(value, null));
} else if ("request_cache".equals(entry.getKey()) || "requestCache".equals(entry.getKey())) {
- searchRequest.requestCache(nodeBooleanValue(value));
+ searchRequest.requestCache(lenientNodeBooleanValue(value));
} else if ("preference".equals(entry.getKey())) {
searchRequest.preference(nodeStringValue(value, null));
} else if ("routing".equals(entry.getKey())) {
diff --git a/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java b/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java
index 6b8416a872..4f4924693d 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java
@@ -65,7 +65,7 @@ public class RestSearchAction extends BaseRestHandler {
@Inject
public RestSearchAction(Settings settings, RestController controller, Client client, IndicesQueriesRegistry queryRegistry) {
- super(settings, controller, client);
+ super(settings, client);
this.queryRegistry = queryRegistry;
controller.registerHandler(GET, "/_search", this);
controller.registerHandler(POST, "/_search", this);
diff --git a/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchScrollAction.java b/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchScrollAction.java
index eb7e046590..9e9964245e 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchScrollAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchScrollAction.java
@@ -51,7 +51,7 @@ public class RestSearchScrollAction extends BaseRestHandler {
@Inject
public RestSearchScrollAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(GET, "/_search/scroll", this);
controller.registerHandler(POST, "/_search/scroll", this);
diff --git a/core/src/main/java/org/elasticsearch/rest/action/suggest/RestSuggestAction.java b/core/src/main/java/org/elasticsearch/rest/action/suggest/RestSuggestAction.java
index 2841bbe1fe..4e6b88b68b 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/suggest/RestSuggestAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/suggest/RestSuggestAction.java
@@ -49,7 +49,7 @@ public class RestSuggestAction extends BaseRestHandler {
@Inject
public RestSuggestAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(POST, "/_suggest", this);
controller.registerHandler(GET, "/_suggest", this);
controller.registerHandler(POST, "/{index}/_suggest", this);
diff --git a/core/src/main/java/org/elasticsearch/rest/action/template/RestPutSearchTemplateAction.java b/core/src/main/java/org/elasticsearch/rest/action/template/RestPutSearchTemplateAction.java
index 1523d299f0..4d0da8f0d1 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/template/RestPutSearchTemplateAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/template/RestPutSearchTemplateAction.java
@@ -50,7 +50,7 @@ public class RestPutSearchTemplateAction extends RestPutIndexedScriptAction {
final class CreateHandler extends BaseRestHandler {
protected CreateHandler(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
}
@Override
diff --git a/core/src/main/java/org/elasticsearch/rest/action/termvectors/RestMultiTermVectorsAction.java b/core/src/main/java/org/elasticsearch/rest/action/termvectors/RestMultiTermVectorsAction.java
index fe897f9b09..dfcbeef171 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/termvectors/RestMultiTermVectorsAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/termvectors/RestMultiTermVectorsAction.java
@@ -40,7 +40,7 @@ public class RestMultiTermVectorsAction extends BaseRestHandler {
@Inject
public RestMultiTermVectorsAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(GET, "/_mtermvectors", this);
controller.registerHandler(POST, "/_mtermvectors", this);
controller.registerHandler(GET, "/{index}/_mtermvectors", this);
diff --git a/core/src/main/java/org/elasticsearch/rest/action/termvectors/RestTermVectorsAction.java b/core/src/main/java/org/elasticsearch/rest/action/termvectors/RestTermVectorsAction.java
index af81dfcd0a..dbbd885fe6 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/termvectors/RestTermVectorsAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/termvectors/RestTermVectorsAction.java
@@ -49,7 +49,7 @@ public class RestTermVectorsAction extends BaseRestHandler {
@Inject
public RestTermVectorsAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(GET, "/{index}/{type}/_termvectors", this);
controller.registerHandler(POST, "/{index}/{type}/_termvectors", this);
controller.registerHandler(GET, "/{index}/{type}/{id}/_termvectors", this);
diff --git a/core/src/main/java/org/elasticsearch/rest/action/update/RestUpdateAction.java b/core/src/main/java/org/elasticsearch/rest/action/update/RestUpdateAction.java
index 24264ca292..88f9037452 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/update/RestUpdateAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/update/RestUpdateAction.java
@@ -48,7 +48,7 @@ public class RestUpdateAction extends BaseRestHandler {
@Inject
public RestUpdateAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(POST, "/{index}/{type}/{id}/_update", this);
}
diff --git a/core/src/main/java/org/elasticsearch/rest/support/RestUtils.java b/core/src/main/java/org/elasticsearch/rest/support/RestUtils.java
index 56bb18d5e6..167e858c1d 100644
--- a/core/src/main/java/org/elasticsearch/rest/support/RestUtils.java
+++ b/core/src/main/java/org/elasticsearch/rest/support/RestUtils.java
@@ -57,7 +57,7 @@ public class RestUtils {
if (fromIndex >= s.length()) {
return;
}
-
+
int queryStringLength = s.contains("#") ? s.indexOf("#") : s.length();
String name = null;
diff --git a/core/src/main/java/org/elasticsearch/script/AbstractExecutableScript.java b/core/src/main/java/org/elasticsearch/script/AbstractExecutableScript.java
index 41ed875e09..a42ef54812 100644
--- a/core/src/main/java/org/elasticsearch/script/AbstractExecutableScript.java
+++ b/core/src/main/java/org/elasticsearch/script/AbstractExecutableScript.java
@@ -24,9 +24,4 @@ public abstract class AbstractExecutableScript implements ExecutableScript {
@Override
public void setNextVar(String name, Object value) {
}
-
- @Override
- public Object unwrap(Object value) {
- return value;
- }
} \ No newline at end of file
diff --git a/core/src/main/java/org/elasticsearch/script/ExecutableScript.java b/core/src/main/java/org/elasticsearch/script/ExecutableScript.java
index 43c990201f..bdf93acc4d 100644
--- a/core/src/main/java/org/elasticsearch/script/ExecutableScript.java
+++ b/core/src/main/java/org/elasticsearch/script/ExecutableScript.java
@@ -32,8 +32,12 @@ public interface ExecutableScript {
Object run();
/**
- * Unwraps a possible script value. For example, when passing vars and expecting the returned value to
- * be part of the vars.
+ * Unwraps a possible script value. For example, when passing vars and
+ * expecting the returned value to be part of the vars. Javascript and
+ * Python need this but other scripting engines just return the values
+ * passed in.
*/
- Object unwrap(Object value);
+ default Object unwrap(Object value) {
+ return value;
+ }
}
diff --git a/core/src/main/java/org/elasticsearch/script/NativeScriptEngineService.java b/core/src/main/java/org/elasticsearch/script/NativeScriptEngineService.java
index 19bc447888..6b9a8733f5 100644
--- a/core/src/main/java/org/elasticsearch/script/NativeScriptEngineService.java
+++ b/core/src/main/java/org/elasticsearch/script/NativeScriptEngineService.java
@@ -27,6 +27,8 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
+import java.util.Collections;
+import java.util.List;
import java.util.Map;
import static java.util.Collections.unmodifiableMap;
@@ -38,6 +40,8 @@ public class NativeScriptEngineService extends AbstractComponent implements Scri
public static final String NAME = "native";
+ public static final List<String> TYPES = Collections.singletonList(NAME);
+
private final Map<String, NativeScriptFactory> scripts;
@Inject
@@ -47,17 +51,17 @@ public class NativeScriptEngineService extends AbstractComponent implements Scri
}
@Override
- public String[] types() {
- return new String[]{NAME};
+ public List<String> getTypes() {
+ return TYPES;
}
@Override
- public String[] extensions() {
- return new String[0];
+ public List<String> getExtensions() {
+ return Collections.emptyList();
}
@Override
- public boolean sandboxed() {
+ public boolean isSandboxed() {
return false;
}
diff --git a/core/src/main/java/org/elasticsearch/script/ScriptContext.java b/core/src/main/java/org/elasticsearch/script/ScriptContext.java
index 4b1b6de63f..3ab2bb52c9 100644
--- a/core/src/main/java/org/elasticsearch/script/ScriptContext.java
+++ b/core/src/main/java/org/elasticsearch/script/ScriptContext.java
@@ -37,7 +37,7 @@ public interface ScriptContext {
*/
enum Standard implements ScriptContext {
- AGGS("aggs"), SEARCH("search"), UPDATE("update");
+ AGGS("aggs"), SEARCH("search"), UPDATE("update"), INGEST("ingest");
private final String key;
diff --git a/core/src/main/java/org/elasticsearch/script/ScriptEngineRegistry.java b/core/src/main/java/org/elasticsearch/script/ScriptEngineRegistry.java
new file mode 100644
index 0000000000..4bc9a23eba
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/script/ScriptEngineRegistry.java
@@ -0,0 +1,93 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.script;
+
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+
+public class ScriptEngineRegistry {
+
+ private final Map<Class<? extends ScriptEngineService>, List<String>> registeredScriptEngineServices;
+ private final Map<String, Class<? extends ScriptEngineService>> registeredLanguages;
+
+ public ScriptEngineRegistry(Iterable<ScriptEngineRegistration> registrations) {
+ Objects.requireNonNull(registrations);
+ Map<Class<? extends ScriptEngineService>, List<String>> registeredScriptEngineServices = new HashMap<>();
+ Map<String, Class<? extends ScriptEngineService>> registeredLanguages = new HashMap<>();
+ for (ScriptEngineRegistration registration : registrations) {
+ List<String> languages =
+ registeredScriptEngineServices.putIfAbsent(registration.getScriptEngineService(), Collections.unmodifiableList(registration.getScriptEngineLanguages()));
+ if (languages != null) {
+ throw new IllegalArgumentException("script engine service [" + registration.getScriptEngineService() + "] already registered for languages [" + String.join(",", languages) + "]");
+ }
+
+ for (String language : registration.getScriptEngineLanguages()) {
+ Class<? extends ScriptEngineService> scriptEngineServiceClazz =
+ registeredLanguages.putIfAbsent(language, registration.getScriptEngineService());
+ if (scriptEngineServiceClazz != null) {
+ throw new IllegalArgumentException("scripting language [" + language + "] already registered for script engine service [" + scriptEngineServiceClazz.getCanonicalName() + "]");
+ }
+ }
+ }
+
+ this.registeredScriptEngineServices = Collections.unmodifiableMap(registeredScriptEngineServices);
+ this.registeredLanguages = Collections.unmodifiableMap(registeredLanguages);
+ }
+
+ Iterable<Class<? extends ScriptEngineService>> getRegisteredScriptEngineServices() {
+ return registeredScriptEngineServices.keySet();
+ }
+
+ List<String> getLanguages(Class<? extends ScriptEngineService> scriptEngineService) {
+ Objects.requireNonNull(scriptEngineService);
+ return registeredScriptEngineServices.get(scriptEngineService);
+ }
+
+ Map<String, Class<? extends ScriptEngineService>> getRegisteredLanguages() {
+ return registeredLanguages;
+ }
+
+ public static class ScriptEngineRegistration {
+ private final Class<? extends ScriptEngineService> scriptEngineService;
+ private final List<String> scriptEngineLanguages;
+
+ public ScriptEngineRegistration(Class<? extends ScriptEngineService> scriptEngineService, List<String> scriptEngineLanguages) {
+ Objects.requireNonNull(scriptEngineService);
+ Objects.requireNonNull(scriptEngineLanguages);
+ if (scriptEngineLanguages.isEmpty()) {
+ throw new IllegalArgumentException("languages for script engine service [" + scriptEngineService.getCanonicalName() + "] should be non-empty");
+ }
+ this.scriptEngineService = scriptEngineService;
+ this.scriptEngineLanguages = scriptEngineLanguages;
+ }
+
+ Class<? extends ScriptEngineService> getScriptEngineService() {
+ return scriptEngineService;
+ }
+
+ List<String> getScriptEngineLanguages() {
+ return scriptEngineLanguages;
+ }
+ }
+
+}
diff --git a/core/src/main/java/org/elasticsearch/script/ScriptEngineService.java b/core/src/main/java/org/elasticsearch/script/ScriptEngineService.java
index 41befc9406..1c5d0f1e9e 100644
--- a/core/src/main/java/org/elasticsearch/script/ScriptEngineService.java
+++ b/core/src/main/java/org/elasticsearch/script/ScriptEngineService.java
@@ -23,6 +23,7 @@ import org.elasticsearch.common.Nullable;
import org.elasticsearch.search.lookup.SearchLookup;
import java.io.Closeable;
+import java.util.List;
import java.util.Map;
/**
@@ -30,11 +31,11 @@ import java.util.Map;
*/
public interface ScriptEngineService extends Closeable {
- String[] types();
+ List<String> getTypes();
- String[] extensions();
+ List<String> getExtensions();
- boolean sandboxed();
+ boolean isSandboxed();
Object compile(String script, Map<String, String> params);
diff --git a/core/src/main/java/org/elasticsearch/script/ScriptMode.java b/core/src/main/java/org/elasticsearch/script/ScriptMode.java
index b35dda716e..aa9d100d43 100644
--- a/core/src/main/java/org/elasticsearch/script/ScriptMode.java
+++ b/core/src/main/java/org/elasticsearch/script/ScriptMode.java
@@ -19,37 +19,48 @@
package org.elasticsearch.script;
-import org.elasticsearch.common.Booleans;
-
-import java.util.Locale;
+import java.util.HashMap;
+import java.util.Map;
/**
* Mode for a specific script, used for script settings.
- * Defines whether a certain script or catefory of scripts can be executed or not, or whether it can
+ * Defines whether a certain script or category of scripts can be executed or not, or whether it can
* only be executed by a sandboxed scripting language.
*/
enum ScriptMode {
- ON,
- OFF,
- SANDBOX;
+ ON("true"),
+ OFF("false"),
+ SANDBOX("sandbox");
- static ScriptMode parse(String input) {
- input = input.toLowerCase(Locale.ROOT);
- if (Booleans.isExplicitTrue(input)) {
- return ON;
- }
- if (Booleans.isExplicitFalse(input)) {
- return OFF;
+ private final String mode;
+
+ ScriptMode(String mode) {
+ this.mode = mode;
+ }
+
+ private static final Map<String, ScriptMode> SCRIPT_MODES;
+
+ static {
+ SCRIPT_MODES = new HashMap<>();
+ for (ScriptMode scriptMode : ScriptMode.values()) {
+ SCRIPT_MODES.put(scriptMode.mode, scriptMode);
}
- if (SANDBOX.toString().equals(input)) {
- return SANDBOX;
+ }
+
+ static ScriptMode parse(String input) {
+ ScriptMode scriptMode = SCRIPT_MODES.get(input);
+ if (scriptMode == null) {
+ throw new IllegalArgumentException("script mode [" + input + "] not supported");
}
- throw new IllegalArgumentException("script mode [" + input + "] not supported");
+ return scriptMode;
}
+ public String getMode() {
+ return mode;
+ }
@Override
public String toString() {
- return name().toLowerCase(Locale.ROOT);
+ return mode;
}
}
diff --git a/core/src/main/java/org/elasticsearch/script/ScriptModes.java b/core/src/main/java/org/elasticsearch/script/ScriptModes.java
index cfa3a592d8..baaec00935 100644
--- a/core/src/main/java/org/elasticsearch/script/ScriptModes.java
+++ b/core/src/main/java/org/elasticsearch/script/ScriptModes.java
@@ -19,127 +19,32 @@
package org.elasticsearch.script;
-import org.elasticsearch.common.Strings;
+import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.script.ScriptService.ScriptType;
+import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
-import java.util.Set;
import java.util.TreeMap;
-import static java.util.Collections.unmodifiableMap;
-
/**
* Holds the {@link org.elasticsearch.script.ScriptMode}s for each of the different scripting languages available,
* each script source and each scripted operation.
*/
public class ScriptModes {
- static final String SCRIPT_SETTINGS_PREFIX = "script.";
- static final String ENGINE_SETTINGS_PREFIX = "script.engine";
+ private static final String SCRIPT_SETTINGS_PREFIX = "script";
+ private static final String ENGINE_SETTINGS_PREFIX = "script.engine";
final Map<String, ScriptMode> scriptModes;
- ScriptModes(Map<String, ScriptEngineService> scriptEngines, ScriptContextRegistry scriptContextRegistry, Settings settings) {
- //filter out the native engine as we don't want to apply fine grained settings to it.
- //native scripts are always on as they are static by definition.
- Map<String, ScriptEngineService> filteredEngines = new HashMap<>(scriptEngines);
- filteredEngines.remove(NativeScriptEngineService.NAME);
- this.scriptModes = buildScriptModeSettingsMap(settings, filteredEngines, scriptContextRegistry);
- }
-
- private static Map<String, ScriptMode> buildScriptModeSettingsMap(Settings settings, Map<String, ScriptEngineService> scriptEngines, ScriptContextRegistry scriptContextRegistry) {
- HashMap<String, ScriptMode> scriptModesMap = new HashMap<>();
-
- //file scripts are enabled by default, for any language
- addGlobalScriptTypeModes(scriptEngines.keySet(), scriptContextRegistry, ScriptType.FILE, ScriptMode.ON, scriptModesMap);
- //indexed scripts are enabled by default only for sandboxed languages
- addGlobalScriptTypeModes(scriptEngines.keySet(), scriptContextRegistry, ScriptType.INDEXED, ScriptMode.SANDBOX, scriptModesMap);
- //dynamic scripts are enabled by default only for sandboxed languages
- addGlobalScriptTypeModes(scriptEngines.keySet(), scriptContextRegistry, ScriptType.INLINE, ScriptMode.SANDBOX, scriptModesMap);
-
- processSourceBasedGlobalSettings(settings, scriptEngines, scriptContextRegistry, scriptModesMap);
- processOperationBasedGlobalSettings(settings, scriptEngines, scriptContextRegistry, scriptModesMap);
- processEngineSpecificSettings(settings, scriptEngines, scriptContextRegistry, scriptModesMap);
- return unmodifiableMap(scriptModesMap);
- }
-
- private static void processSourceBasedGlobalSettings(Settings settings, Map<String, ScriptEngineService> scriptEngines, ScriptContextRegistry scriptContextRegistry, Map<String, ScriptMode> scriptModes) {
- //read custom source based settings for all operations (e.g. script.indexed: on)
- for (ScriptType scriptType : ScriptType.values()) {
- String scriptTypeSetting = settings.get(SCRIPT_SETTINGS_PREFIX + scriptType);
- if (Strings.hasLength(scriptTypeSetting)) {
- ScriptMode scriptTypeMode = ScriptMode.parse(scriptTypeSetting);
- addGlobalScriptTypeModes(scriptEngines.keySet(), scriptContextRegistry, scriptType, scriptTypeMode, scriptModes);
- }
+ ScriptModes(ScriptSettings scriptSettings, Settings settings) {
+ HashMap<String, ScriptMode> scriptModes = new HashMap<>();
+ for (Setting<ScriptMode> scriptModeSetting : scriptSettings.getScriptLanguageSettings()) {
+ scriptModes.put(scriptModeSetting.getKey(), scriptModeSetting.get(settings));
}
- }
-
- private static void processOperationBasedGlobalSettings(Settings settings, Map<String, ScriptEngineService> scriptEngines, ScriptContextRegistry scriptContextRegistry, Map<String, ScriptMode> scriptModes) {
- //read custom op based settings for all sources (e.g. script.aggs: off)
- //op based settings take precedence over source based settings, hence they get expanded later
- for (ScriptContext scriptContext : scriptContextRegistry.scriptContexts()) {
- ScriptMode scriptMode = getScriptContextMode(settings, SCRIPT_SETTINGS_PREFIX, scriptContext);
- if (scriptMode != null) {
- addGlobalScriptContextModes(scriptEngines.keySet(), scriptContext, scriptMode, scriptModes);
- }
- }
- }
-
- private static void processEngineSpecificSettings(Settings settings, Map<String, ScriptEngineService> scriptEngines, ScriptContextRegistry scriptContextRegistry, Map<String, ScriptMode> scriptModes) {
- Map<String, Settings> langGroupedSettings = settings.getGroups(ENGINE_SETTINGS_PREFIX, true);
- for (Map.Entry<String, Settings> langSettings : langGroupedSettings.entrySet()) {
- //read engine specific settings that refer to a non existing script lang will be ignored
- ScriptEngineService scriptEngineService = scriptEngines.get(langSettings.getKey());
- if (scriptEngineService != null) {
- for (ScriptType scriptType : ScriptType.values()) {
- String scriptTypePrefix = scriptType + ".";
- for (ScriptContext scriptContext : scriptContextRegistry.scriptContexts()) {
- ScriptMode scriptMode = getScriptContextMode(langSettings.getValue(), scriptTypePrefix, scriptContext);
- if (scriptMode != null) {
- addScriptMode(scriptEngineService, scriptType, scriptContext, scriptMode, scriptModes);
- }
- }
- }
- }
- }
- }
-
- private static ScriptMode getScriptContextMode(Settings settings, String prefix, ScriptContext scriptContext) {
- String settingValue = settings.get(prefix + scriptContext.getKey());
- if (Strings.hasLength(settingValue)) {
- return ScriptMode.parse(settingValue);
- }
- return null;
- }
-
- private static void addGlobalScriptTypeModes(Set<String> langs, ScriptContextRegistry scriptContextRegistry, ScriptType scriptType, ScriptMode scriptMode, Map<String, ScriptMode> scriptModes) {
- for (String lang : langs) {
- for (ScriptContext scriptContext : scriptContextRegistry.scriptContexts()) {
- addScriptMode(lang, scriptType, scriptContext, scriptMode, scriptModes);
- }
- }
- }
-
- private static void addGlobalScriptContextModes(Set<String> langs, ScriptContext scriptContext, ScriptMode scriptMode, Map<String, ScriptMode> scriptModes) {
- for (String lang : langs) {
- for (ScriptType scriptType : ScriptType.values()) {
- addScriptMode(lang, scriptType, scriptContext, scriptMode, scriptModes);
- }
- }
- }
-
- private static void addScriptMode(ScriptEngineService scriptEngineService, ScriptType scriptType, ScriptContext scriptContext,
- ScriptMode scriptMode, Map<String, ScriptMode> scriptModes) {
- //expand the lang specific settings to all of the different names given to each scripting language
- for (String scriptEngineName : scriptEngineService.types()) {
- addScriptMode(scriptEngineName, scriptType, scriptContext, scriptMode, scriptModes);
- }
- }
-
- private static void addScriptMode(String lang, ScriptType scriptType, ScriptContext scriptContext, ScriptMode scriptMode, Map<String, ScriptMode> scriptModes) {
- scriptModes.put(ENGINE_SETTINGS_PREFIX + "." + lang + "." + scriptType + "." + scriptContext.getKey(), scriptMode);
+ this.scriptModes = Collections.unmodifiableMap(scriptModes);
}
/**
@@ -156,13 +61,25 @@ public class ScriptModes {
if (NativeScriptEngineService.NAME.equals(lang)) {
return ScriptMode.ON;
}
- ScriptMode scriptMode = scriptModes.get(ENGINE_SETTINGS_PREFIX + "." + lang + "." + scriptType + "." + scriptContext.getKey());
+ ScriptMode scriptMode = scriptModes.get(getKey(lang, scriptType, scriptContext));
if (scriptMode == null) {
throw new IllegalArgumentException("script mode not found for lang [" + lang + "], script_type [" + scriptType + "], operation [" + scriptContext.getKey() + "]");
}
return scriptMode;
}
+ static String operationKey(ScriptContext scriptContext) {
+ return SCRIPT_SETTINGS_PREFIX + "." + scriptContext.getKey();
+ }
+
+ static String sourceKey(ScriptType scriptType) {
+ return SCRIPT_SETTINGS_PREFIX + "." + scriptType.getScriptType();
+ }
+
+ static String getKey(String lang, ScriptType scriptType, ScriptContext scriptContext) {
+ return ENGINE_SETTINGS_PREFIX + "." + lang + "." + scriptType + "." + scriptContext.getKey();
+ }
+
@Override
public String toString() {
//order settings by key before printing them out, for readability
diff --git a/core/src/main/java/org/elasticsearch/script/ScriptModule.java b/core/src/main/java/org/elasticsearch/script/ScriptModule.java
index f3bdad64b6..1ccc7eb956 100644
--- a/core/src/main/java/org/elasticsearch/script/ScriptModule.java
+++ b/core/src/main/java/org/elasticsearch/script/ScriptModule.java
@@ -22,12 +22,14 @@ package org.elasticsearch.script;
import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.inject.multibindings.MapBinder;
import org.elasticsearch.common.inject.multibindings.Multibinder;
-import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.settings.Setting;
+import org.elasticsearch.common.settings.SettingsModule;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import java.util.Objects;
/**
* An {@link org.elasticsearch.common.inject.Module} which manages {@link ScriptEngineService}s, as well
@@ -35,20 +37,25 @@ import java.util.Map;
*/
public class ScriptModule extends AbstractModule {
- private final Settings settings;
+ private final SettingsModule settingsModule;
- private final List<Class<? extends ScriptEngineService>> scriptEngines = new ArrayList<>();
+ private final List<ScriptEngineRegistry.ScriptEngineRegistration> scriptEngineRegistrations = new ArrayList<>();
+
+ {
+ scriptEngineRegistrations.add(new ScriptEngineRegistry.ScriptEngineRegistration(NativeScriptEngineService.class, NativeScriptEngineService.TYPES));
+ }
private final Map<String, Class<? extends NativeScriptFactory>> scripts = new HashMap<>();
private final List<ScriptContext.Plugin> customScriptContexts = new ArrayList<>();
- public ScriptModule(Settings settings) {
- this.settings = settings;
+ public ScriptModule(SettingsModule settingsModule) {
+ this.settingsModule = settingsModule;
}
- public void addScriptEngine(Class<? extends ScriptEngineService> scriptEngine) {
- scriptEngines.add(scriptEngine);
+ public void addScriptEngine(ScriptEngineRegistry.ScriptEngineRegistration scriptEngineRegistration) {
+ Objects.requireNonNull(scriptEngineRegistration);
+ scriptEngineRegistrations.add(scriptEngineRegistration);
}
public void registerScript(String name, Class<? extends NativeScriptFactory> script) {
@@ -74,11 +81,23 @@ public class ScriptModule extends AbstractModule {
Multibinder<ScriptEngineService> multibinder = Multibinder.newSetBinder(binder(), ScriptEngineService.class);
multibinder.addBinding().to(NativeScriptEngineService.class);
- for (Class<? extends ScriptEngineService> scriptEngine : scriptEngines) {
- multibinder.addBinding().to(scriptEngine).asEagerSingleton();
+ for (ScriptEngineRegistry.ScriptEngineRegistration scriptEngineRegistration : scriptEngineRegistrations) {
+ multibinder.addBinding().to(scriptEngineRegistration.getScriptEngineService()).asEagerSingleton();
}
- bind(ScriptContextRegistry.class).toInstance(new ScriptContextRegistry(customScriptContexts));
+ ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(customScriptContexts);
+ ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(scriptEngineRegistrations);
+
+ ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry);
+
+ scriptSettings.getScriptTypeSettings().forEach(settingsModule::registerSetting);
+ scriptSettings.getScriptContextSettings().forEach(settingsModule::registerSetting);
+ scriptSettings.getScriptLanguageSettings().forEach(settingsModule::registerSetting);
+ settingsModule.registerSetting(scriptSettings.getDefaultScriptLanguageSetting());
+
+ bind(ScriptContextRegistry.class).toInstance(scriptContextRegistry);
+ bind(ScriptEngineRegistry.class).toInstance(scriptEngineRegistry);
+ bind(ScriptSettings.class).toInstance(scriptSettings);
bind(ScriptService.class).asEagerSingleton();
}
}
diff --git a/core/src/main/java/org/elasticsearch/script/ScriptService.java b/core/src/main/java/org/elasticsearch/script/ScriptService.java
index c9e9f9a873..d0104656d5 100644
--- a/core/src/main/java/org/elasticsearch/script/ScriptService.java
+++ b/core/src/main/java/org/elasticsearch/script/ScriptService.java
@@ -31,7 +31,6 @@ import org.elasticsearch.action.indexedscripts.delete.DeleteIndexedScriptRequest
import org.elasticsearch.action.indexedscripts.get.GetIndexedScriptRequest;
import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptRequest;
import org.elasticsearch.client.Client;
-import org.elasticsearch.common.HasContextAndHeaders;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.Strings;
@@ -46,6 +45,7 @@ import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
@@ -71,6 +71,7 @@ import java.util.Collections;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
+import java.util.Objects;
import java.util.Set;
import java.util.concurrent.ConcurrentMap;
@@ -83,13 +84,10 @@ public class ScriptService extends AbstractComponent implements Closeable {
static final String DISABLE_DYNAMIC_SCRIPTING_SETTING = "script.disable_dynamic";
- public static final String DEFAULT_SCRIPTING_LANGUAGE_SETTING = "script.default_lang";
- public static final String SCRIPT_CACHE_SIZE_SETTING = "script.cache.max_size";
- public static final int SCRIPT_CACHE_SIZE_DEFAULT = 100;
- public static final String SCRIPT_CACHE_EXPIRE_SETTING = "script.cache.expire";
+ public static final Setting<Integer> SCRIPT_CACHE_SIZE_SETTING = Setting.intSetting("script.cache.max_size", 100, 0, false, Setting.Scope.CLUSTER);
+ public static final Setting<TimeValue> SCRIPT_CACHE_EXPIRE_SETTING = Setting.positiveTimeSetting("script.cache.expire", TimeValue.timeValueMillis(0), false, Setting.Scope.CLUSTER);
public static final String SCRIPT_INDEX = ".scripts";
- public static final String DEFAULT_LANG = "groovy";
- public static final String SCRIPT_AUTO_RELOAD_ENABLED_SETTING = "script.auto_reload_enabled";
+ public static final Setting<Boolean> SCRIPT_AUTO_RELOAD_ENABLED_SETTING = Setting.boolSetting("script.auto_reload_enabled", true, false, Setting.Scope.CLUSTER);
private final String defaultLang;
@@ -138,45 +136,50 @@ public class ScriptService extends AbstractComponent implements Closeable {
@Inject
public ScriptService(Settings settings, Environment env, Set<ScriptEngineService> scriptEngines,
- ResourceWatcherService resourceWatcherService, ScriptContextRegistry scriptContextRegistry) throws IOException {
+ ResourceWatcherService resourceWatcherService, ScriptEngineRegistry scriptEngineRegistry, ScriptContextRegistry scriptContextRegistry, ScriptSettings scriptSettings) throws IOException {
super(settings);
+ Objects.requireNonNull(scriptEngineRegistry);
+ Objects.requireNonNull(scriptContextRegistry);
+ Objects.requireNonNull(scriptSettings);
this.parseFieldMatcher = new ParseFieldMatcher(settings);
if (Strings.hasLength(settings.get(DISABLE_DYNAMIC_SCRIPTING_SETTING))) {
throw new IllegalArgumentException(DISABLE_DYNAMIC_SCRIPTING_SETTING + " is not a supported setting, replace with fine-grained script settings. \n" +
- "Dynamic scripts can be enabled for all languages and all operations by replacing `script.disable_dynamic: false` with `script.inline: on` and `script.indexed: on` in elasticsearch.yml");
+ "Dynamic scripts can be enabled for all languages and all operations by replacing `script.disable_dynamic: false` with `script.inline: true` and `script.indexed: true` in elasticsearch.yml");
}
this.scriptEngines = scriptEngines;
this.scriptContextRegistry = scriptContextRegistry;
- int cacheMaxSize = settings.getAsInt(SCRIPT_CACHE_SIZE_SETTING, SCRIPT_CACHE_SIZE_DEFAULT);
- TimeValue cacheExpire = settings.getAsTime(SCRIPT_CACHE_EXPIRE_SETTING, null);
- logger.debug("using script cache with max_size [{}], expire [{}]", cacheMaxSize, cacheExpire);
+ int cacheMaxSize = SCRIPT_CACHE_SIZE_SETTING.get(settings);
- this.defaultLang = settings.get(DEFAULT_SCRIPTING_LANGUAGE_SETTING, DEFAULT_LANG);
+ this.defaultLang = scriptSettings.getDefaultScriptLanguageSetting().get(settings);
CacheBuilder<CacheKey, CompiledScript> cacheBuilder = CacheBuilder.builder();
if (cacheMaxSize >= 0) {
cacheBuilder.setMaximumWeight(cacheMaxSize);
}
- if (cacheExpire != null) {
+
+ TimeValue cacheExpire = SCRIPT_CACHE_EXPIRE_SETTING.get(settings);
+ if (cacheExpire.getNanos() != 0) {
cacheBuilder.setExpireAfterAccess(cacheExpire.nanos());
}
+
+ logger.debug("using script cache with max_size [{}], expire [{}]", cacheMaxSize, cacheExpire);
this.cache = cacheBuilder.removalListener(new ScriptCacheRemovalListener()).build();
Map<String, ScriptEngineService> enginesByLangBuilder = new HashMap<>();
Map<String, ScriptEngineService> enginesByExtBuilder = new HashMap<>();
for (ScriptEngineService scriptEngine : scriptEngines) {
- for (String type : scriptEngine.types()) {
- enginesByLangBuilder.put(type, scriptEngine);
+ for (String language : scriptEngineRegistry.getLanguages(scriptEngine.getClass())) {
+ enginesByLangBuilder.put(language, scriptEngine);
}
- for (String ext : scriptEngine.extensions()) {
+ for (String ext : scriptEngine.getExtensions()) {
enginesByExtBuilder.put(ext, scriptEngine);
}
}
this.scriptEnginesByLang = unmodifiableMap(enginesByLangBuilder);
this.scriptEnginesByExt = unmodifiableMap(enginesByExtBuilder);
- this.scriptModes = new ScriptModes(this.scriptEnginesByLang, scriptContextRegistry, settings);
+ this.scriptModes = new ScriptModes(scriptSettings, settings);
// add file watcher for static scripts
scriptsDirectory = env.scriptsFile();
@@ -186,7 +189,7 @@ public class ScriptService extends AbstractComponent implements Closeable {
FileWatcher fileWatcher = new FileWatcher(scriptsDirectory);
fileWatcher.addListener(new ScriptChangesListener());
- if (settings.getAsBoolean(SCRIPT_AUTO_RELOAD_ENABLED_SETTING, true)) {
+ if (SCRIPT_AUTO_RELOAD_ENABLED_SETTING.get(settings)) {
// automatic reload is enabled - register scripts
resourceWatcherService.add(fileWatcher);
} else {
@@ -225,7 +228,7 @@ public class ScriptService extends AbstractComponent implements Closeable {
/**
* Checks if a script can be executed and compiles it if needed, or returns the previously compiled and cached script.
*/
- public CompiledScript compile(Script script, ScriptContext scriptContext, HasContextAndHeaders headersContext, Map<String, String> params) {
+ public CompiledScript compile(Script script, ScriptContext scriptContext, Map<String, String> params) {
if (script == null) {
throw new IllegalArgumentException("The parameter script (Script) must not be null.");
}
@@ -253,14 +256,14 @@ public class ScriptService extends AbstractComponent implements Closeable {
" operation [" + scriptContext.getKey() + "] and lang [" + lang + "] are not supported");
}
- return compileInternal(script, headersContext, params);
+ return compileInternal(script, params);
}
/**
* Compiles a script straight-away, or returns the previously compiled and cached script,
* without checking if it can be executed based on settings.
*/
- public CompiledScript compileInternal(Script script, HasContextAndHeaders context, Map<String, String> params) {
+ public CompiledScript compileInternal(Script script, Map<String, String> params) {
if (script == null) {
throw new IllegalArgumentException("The parameter script (Script) must not be null.");
}
@@ -297,7 +300,7 @@ public class ScriptService extends AbstractComponent implements Closeable {
//the script has been updated in the index since the last look up.
final IndexedScript indexedScript = new IndexedScript(lang, name);
name = indexedScript.id;
- code = getScriptFromIndex(indexedScript.lang, indexedScript.id, context);
+ code = getScriptFromIndex(indexedScript.lang, indexedScript.id);
}
CacheKey cacheKey = new CacheKey(scriptEngineService, type == ScriptType.INLINE ? null : name, code, params);
@@ -323,7 +326,7 @@ public class ScriptService extends AbstractComponent implements Closeable {
public void queryScriptIndex(GetIndexedScriptRequest request, final ActionListener<GetResponse> listener) {
String scriptLang = validateScriptLanguage(request.scriptLang());
- GetRequest getRequest = new GetRequest(request, SCRIPT_INDEX).type(scriptLang).id(request.id())
+ GetRequest getRequest = new GetRequest(SCRIPT_INDEX).type(scriptLang).id(request.id())
.version(request.version()).versionType(request.versionType())
.preference("_local"); //Set preference for no forking
client.get(getRequest, listener);
@@ -338,13 +341,12 @@ public class ScriptService extends AbstractComponent implements Closeable {
return scriptLang;
}
- String getScriptFromIndex(String scriptLang, String id, HasContextAndHeaders context) {
+ String getScriptFromIndex(String scriptLang, String id) {
if (client == null) {
throw new IllegalArgumentException("Got an indexed script with no Client registered.");
}
scriptLang = validateScriptLanguage(scriptLang);
GetRequest getRequest = new GetRequest(SCRIPT_INDEX, scriptLang, id);
- getRequest.copyContextAndHeadersFrom(context);
GetResponse responseFields = client.get(getRequest).actionGet();
if (responseFields.isExists()) {
return getScriptFromResponse(responseFields);
@@ -392,7 +394,7 @@ public class ScriptService extends AbstractComponent implements Closeable {
//verify that the script compiles
validate(request.source(), scriptLang);
- IndexRequest indexRequest = new IndexRequest(request).index(SCRIPT_INDEX).type(scriptLang).id(request.id())
+ IndexRequest indexRequest = new IndexRequest().index(SCRIPT_INDEX).type(scriptLang).id(request.id())
.version(request.version()).versionType(request.versionType())
.source(request.source()).opType(request.opType()).refresh(true); //Always refresh after indexing a template
client.index(indexRequest, listener);
@@ -400,7 +402,7 @@ public class ScriptService extends AbstractComponent implements Closeable {
public void deleteScriptFromIndex(DeleteIndexedScriptRequest request, ActionListener<DeleteResponse> listener) {
String scriptLang = validateScriptLanguage(request.scriptLang());
- DeleteRequest deleteRequest = new DeleteRequest(request).index(SCRIPT_INDEX).type(scriptLang).id(request.id())
+ DeleteRequest deleteRequest = new DeleteRequest().index(SCRIPT_INDEX).type(scriptLang).id(request.id())
.refresh(true).version(request.version()).versionType(request.versionType());
client.delete(deleteRequest, listener);
}
@@ -437,8 +439,8 @@ public class ScriptService extends AbstractComponent implements Closeable {
/**
* Compiles (or retrieves from cache) and executes the provided script
*/
- public ExecutableScript executable(Script script, ScriptContext scriptContext, HasContextAndHeaders headersContext, Map<String, String> params) {
- return executable(compile(script, scriptContext, headersContext, params), script.getParams());
+ public ExecutableScript executable(Script script, ScriptContext scriptContext, Map<String, String> params) {
+ return executable(compile(script, scriptContext, params), script.getParams());
}
/**
@@ -452,7 +454,7 @@ public class ScriptService extends AbstractComponent implements Closeable {
* Compiles (or retrieves from cache) and executes the provided search script
*/
public SearchScript search(SearchLookup lookup, Script script, ScriptContext scriptContext, Map<String, String> params) {
- CompiledScript compiledScript = compile(script, scriptContext, SearchContext.current(), params);
+ CompiledScript compiledScript = compile(script, scriptContext, params);
return getScriptEngineServiceForLang(compiledScript.lang()).search(compiledScript, lookup, script.getParams());
}
@@ -477,7 +479,7 @@ public class ScriptService extends AbstractComponent implements Closeable {
case OFF:
return false;
case SANDBOX:
- return scriptEngineService.sandboxed();
+ return scriptEngineService.isSandboxed();
default:
throw new IllegalArgumentException("script mode [" + mode + "] not supported");
}
@@ -536,12 +538,12 @@ public class ScriptService extends AbstractComponent implements Closeable {
try {
//we don't know yet what the script will be used for, but if all of the operations for this lang
// with file scripts are disabled, it makes no sense to even compile it and cache it.
- if (isAnyScriptContextEnabled(engineService.types()[0], engineService, ScriptType.FILE)) {
+ if (isAnyScriptContextEnabled(engineService.getTypes().get(0), engineService, ScriptType.FILE)) {
logger.info("compiling script file [{}]", file.toAbsolutePath());
try(InputStreamReader reader = new InputStreamReader(Files.newInputStream(file), StandardCharsets.UTF_8)) {
String script = Streams.copyToString(reader);
CacheKey cacheKey = new CacheKey(engineService, scriptNameExt.v1(), null, Collections.emptyMap());
- staticCache.put(cacheKey, new CompiledScript(ScriptType.FILE, scriptNameExt.v1(), engineService.types()[0], engineService.compile(script, Collections.emptyMap())));
+ staticCache.put(cacheKey, new CompiledScript(ScriptType.FILE, scriptNameExt.v1(), engineService.getTypes().get(0), engineService.compile(script, Collections.emptyMap())));
scriptMetrics.onCompilation();
}
} else {
@@ -583,14 +585,16 @@ public class ScriptService extends AbstractComponent implements Closeable {
* - loaded from an index
* - loaded from file
*/
- public static enum ScriptType {
+ public enum ScriptType {
- INLINE(0, "inline"),
- INDEXED(1, "id"),
- FILE(2, "file");
+ INLINE(0, "inline", "inline", ScriptMode.SANDBOX),
+ INDEXED(1, "id", "indexed", ScriptMode.SANDBOX),
+ FILE(2, "file", "file", ScriptMode.ON);
private final int val;
private final ParseField parseField;
+ private final String scriptType;
+ private final ScriptMode defaultScriptMode;
public static ScriptType readFrom(StreamInput in) throws IOException {
int scriptTypeVal = in.readVInt();
@@ -611,19 +615,30 @@ public class ScriptService extends AbstractComponent implements Closeable {
}
}
- private ScriptType(int val, String name) {
+ ScriptType(int val, String name, String scriptType, ScriptMode defaultScriptMode) {
this.val = val;
this.parseField = new ParseField(name);
+ this.scriptType = scriptType;
+ this.defaultScriptMode = defaultScriptMode;
}
public ParseField getParseField() {
return parseField;
}
+ public ScriptMode getDefaultScriptMode() {
+ return defaultScriptMode;
+ }
+
+ public String getScriptType() {
+ return scriptType;
+ }
+
@Override
public String toString() {
return name().toLowerCase(Locale.ROOT);
}
+
}
private static final class CacheKey {
@@ -633,7 +648,7 @@ public class ScriptService extends AbstractComponent implements Closeable {
final Map<String, String> params;
private CacheKey(final ScriptEngineService service, final String name, final String code, final Map<String, String> params) {
- this.lang = service.types()[0];
+ this.lang = service.getTypes().get(0);
this.name = name;
this.code = code;
this.params = params;
diff --git a/core/src/main/java/org/elasticsearch/script/ScriptSettings.java b/core/src/main/java/org/elasticsearch/script/ScriptSettings.java
new file mode 100644
index 0000000000..8ececfe25b
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/script/ScriptSettings.java
@@ -0,0 +1,165 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.script;
+
+import org.elasticsearch.common.collect.Tuple;
+import org.elasticsearch.common.settings.Setting;
+import org.elasticsearch.common.settings.Settings;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.function.Function;
+import java.util.stream.Collectors;
+
+public class ScriptSettings {
+
+ public final static String DEFAULT_LANG = "groovy";
+
+ private final static Map<ScriptService.ScriptType, Setting<ScriptMode>> SCRIPT_TYPE_SETTING_MAP;
+
+ static {
+ Map<ScriptService.ScriptType, Setting<ScriptMode>> scriptTypeSettingMap = new HashMap<>();
+ for (ScriptService.ScriptType scriptType : ScriptService.ScriptType.values()) {
+ scriptTypeSettingMap.put(scriptType, new Setting<>(
+ ScriptModes.sourceKey(scriptType),
+ scriptType.getDefaultScriptMode().getMode(),
+ ScriptMode::parse,
+ false,
+ Setting.Scope.CLUSTER));
+ }
+ SCRIPT_TYPE_SETTING_MAP = Collections.unmodifiableMap(scriptTypeSettingMap);
+ }
+
+ private final Map<ScriptContext, Setting<ScriptMode>> scriptContextSettingMap;
+ private final List<Setting<ScriptMode>> scriptLanguageSettings;
+ private final Setting<String> defaultScriptLanguageSetting;
+
+ public ScriptSettings(ScriptEngineRegistry scriptEngineRegistry, ScriptContextRegistry scriptContextRegistry) {
+ Map<ScriptContext, Setting<ScriptMode>> scriptContextSettingMap = contextSettings(scriptContextRegistry);
+ this.scriptContextSettingMap = Collections.unmodifiableMap(scriptContextSettingMap);
+
+ List<Setting<ScriptMode>> scriptLanguageSettings = languageSettings(SCRIPT_TYPE_SETTING_MAP, scriptContextSettingMap, scriptEngineRegistry, scriptContextRegistry);
+ this.scriptLanguageSettings = Collections.unmodifiableList(scriptLanguageSettings);
+
+ this.defaultScriptLanguageSetting = new Setting<>("script.default_lang", DEFAULT_LANG, setting -> {
+ if (!"groovy".equals(setting) && !scriptEngineRegistry.getRegisteredLanguages().containsKey(setting)) {
+ throw new IllegalArgumentException("unregistered default language [" + setting + "]");
+ }
+ return setting;
+ }, false, Setting.Scope.CLUSTER);
+ }
+
+ private static Map<ScriptContext, Setting<ScriptMode>> contextSettings(ScriptContextRegistry scriptContextRegistry) {
+ Map<ScriptContext, Setting<ScriptMode>> scriptContextSettingMap = new HashMap<>();
+ for (ScriptContext scriptContext : scriptContextRegistry.scriptContexts()) {
+ scriptContextSettingMap.put(scriptContext, new Setting<>(
+ ScriptModes.operationKey(scriptContext),
+ ScriptMode.OFF.getMode(),
+ ScriptMode::parse,
+ false,
+ Setting.Scope.CLUSTER
+ ));
+ }
+ return scriptContextSettingMap;
+ }
+
+ private static List<Setting<ScriptMode>> languageSettings(
+ Map<ScriptService.ScriptType, Setting<ScriptMode>> scriptTypeSettingMap,
+ Map<ScriptContext, Setting<ScriptMode>> scriptContextSettingMap,
+ ScriptEngineRegistry scriptEngineRegistry,
+ ScriptContextRegistry scriptContextRegistry) {
+ List<Setting<ScriptMode>> scriptModeSettings = new ArrayList<>();
+ for (Class<? extends ScriptEngineService> scriptEngineService : scriptEngineRegistry.getRegisteredScriptEngineServices()) {
+ List<String> languages = scriptEngineRegistry.getLanguages(scriptEngineService);
+
+ for (String language : languages) {
+ if (NativeScriptEngineService.TYPES.contains(language)) {
+ // native scripts are always enabled, and their settings can not be changed
+ continue;
+ }
+ for (ScriptService.ScriptType scriptType : ScriptService.ScriptType.values()) {
+ for (ScriptContext scriptContext : scriptContextRegistry.scriptContexts()) {
+ Function<Settings, String> defaultSetting = settings -> {
+ // fallback logic for script mode settings
+
+ // the first fallback is other types registered by the same script engine service
+ // e.g., "py.inline.aggs" is in the settings but a script with lang "python" is executed
+ Map<String, List<String>> languageSettings =
+ languages
+ .stream()
+ .map(lang -> Tuple.tuple(lang, settings.get(ScriptModes.getKey(lang, scriptType, scriptContext))))
+ .filter(tuple -> tuple.v2() != null)
+ .collect(Collectors.groupingBy(Tuple::v2, Collectors.mapping(Tuple::v1, Collectors.toList())));
+ if (!languageSettings.isEmpty()) {
+ if (languageSettings.size() > 1) {
+ throw new IllegalArgumentException("conflicting settings [" + languageSettings.toString() + "] for language [" + language + "]");
+ }
+ return languageSettings.keySet().iterator().next();
+ }
+
+ // the next fallback is global operation-based settings (e.g., "script.aggs: false")
+ Setting<ScriptMode> setting = scriptContextSettingMap.get(scriptContext);
+ if (setting.exists(settings)) {
+ return setting.get(settings).getMode();
+ }
+
+ // the next fallback is global source-based settings (e.g., "script.inline: false")
+ Setting<ScriptMode> scriptTypeSetting = scriptTypeSettingMap.get(scriptType);
+ if (scriptTypeSetting.exists(settings)) {
+ return scriptTypeSetting.get(settings).getMode();
+ }
+
+ // the final fallback is the default for the type
+ return scriptType.getDefaultScriptMode().toString();
+ };
+ Setting<ScriptMode> setting =
+ new Setting<>(
+ ScriptModes.getKey(language, scriptType, scriptContext),
+ defaultSetting,
+ ScriptMode::parse,
+ false,
+ Setting.Scope.CLUSTER);
+ scriptModeSettings.add(setting);
+ }
+ }
+ }
+ }
+ return scriptModeSettings;
+ }
+
+ public Iterable<Setting<ScriptMode>> getScriptTypeSettings() {
+ return Collections.unmodifiableCollection(SCRIPT_TYPE_SETTING_MAP.values());
+ }
+
+ public Iterable<Setting<ScriptMode>> getScriptContextSettings() {
+ return Collections.unmodifiableCollection(scriptContextSettingMap.values());
+ }
+
+ public Iterable<Setting<ScriptMode>> getScriptLanguageSettings() {
+ return scriptLanguageSettings;
+ }
+
+ public Setting<String> getDefaultScriptLanguageSetting() {
+ return defaultScriptLanguageSetting;
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/search/SearchModule.java b/core/src/main/java/org/elasticsearch/search/SearchModule.java
index e5c3e90739..739b97034b 100644
--- a/core/src/main/java/org/elasticsearch/search/SearchModule.java
+++ b/core/src/main/java/org/elasticsearch/search/SearchModule.java
@@ -19,14 +19,6 @@
package org.elasticsearch.search;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.function.Supplier;
-
import org.apache.lucene.search.BooleanQuery;
import org.elasticsearch.common.geo.ShapesAvailability;
import org.elasticsearch.common.geo.builders.CircleBuilder;
@@ -68,6 +60,7 @@ import org.elasticsearch.index.query.MatchQueryParser;
import org.elasticsearch.index.query.MoreLikeThisQueryParser;
import org.elasticsearch.index.query.MultiMatchQueryParser;
import org.elasticsearch.index.query.NestedQueryParser;
+import org.elasticsearch.index.query.ParentIdQueryParser;
import org.elasticsearch.index.query.PrefixQueryParser;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryParser;
@@ -218,6 +211,7 @@ import org.elasticsearch.search.fetch.explain.ExplainFetchSubPhase;
import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsFetchSubPhase;
import org.elasticsearch.search.fetch.innerhits.InnerHitsFetchSubPhase;
import org.elasticsearch.search.fetch.matchedqueries.MatchedQueriesFetchSubPhase;
+import org.elasticsearch.search.fetch.parent.ParentFieldSubFetchPhase;
import org.elasticsearch.search.fetch.script.ScriptFieldsFetchSubPhase;
import org.elasticsearch.search.fetch.source.FetchSourceSubPhase;
import org.elasticsearch.search.fetch.version.VersionFetchSubPhase;
@@ -225,9 +219,19 @@ import org.elasticsearch.search.highlight.HighlightPhase;
import org.elasticsearch.search.highlight.Highlighter;
import org.elasticsearch.search.highlight.Highlighters;
import org.elasticsearch.search.query.QueryPhase;
+import org.elasticsearch.search.rescore.QueryRescorerBuilder;
+import org.elasticsearch.search.rescore.RescoreBuilder;
import org.elasticsearch.search.suggest.Suggester;
import org.elasticsearch.search.suggest.Suggesters;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.function.Supplier;
+
/**
*
*/
@@ -325,6 +329,7 @@ public class SearchModule extends AbstractModule {
bind(IndicesQueriesRegistry.class).toInstance(buildQueryParserRegistry());
configureFetchSubPhase();
configureShapes();
+ configureRescorers();
}
protected void configureFetchSubPhase() {
@@ -336,6 +341,7 @@ public class SearchModule extends AbstractModule {
fetchSubPhaseMultibinder.addBinding().to(VersionFetchSubPhase.class);
fetchSubPhaseMultibinder.addBinding().to(MatchedQueriesFetchSubPhase.class);
fetchSubPhaseMultibinder.addBinding().to(HighlightPhase.class);
+ fetchSubPhaseMultibinder.addBinding().to(ParentFieldSubFetchPhase.class);
for (Class<? extends FetchSubPhase> clazz : fetchSubPhases) {
fetchSubPhaseMultibinder.addBinding().to(clazz);
}
@@ -464,6 +470,10 @@ public class SearchModule extends AbstractModule {
}
}
+ private void configureRescorers() {
+ namedWriteableRegistry.registerPrototype(RescoreBuilder.class, QueryRescorerBuilder.PROTOTYPE);
+ }
+
private void registerBuiltinFunctionScoreParsers() {
registerFunctionScoreParser(new ScriptScoreFunctionParser());
registerFunctionScoreParser(new GaussDecayFunctionParser());
@@ -523,6 +533,7 @@ public class SearchModule extends AbstractModule {
registerQueryParser(GeoPolygonQueryParser::new);
registerQueryParser(ExistsQueryParser::new);
registerQueryParser(MatchNoneQueryParser::new);
+ registerQueryParser(ParentIdQueryParser::new);
if (ShapesAvailability.JTS_AVAILABLE && ShapesAvailability.SPATIAL4J_AVAILABLE) {
registerQueryParser(GeoShapeQueryParser::new);
}
diff --git a/core/src/main/java/org/elasticsearch/search/SearchService.java b/core/src/main/java/org/elasticsearch/search/SearchService.java
index 84d00cb54b..4c47de2c9b 100644
--- a/core/src/main/java/org/elasticsearch/search/SearchService.java
+++ b/core/src/main/java/org/elasticsearch/search/SearchService.java
@@ -23,9 +23,11 @@ import com.carrotsearch.hppc.ObjectFloatHashMap;
import com.carrotsearch.hppc.ObjectHashSet;
import com.carrotsearch.hppc.ObjectSet;
import com.carrotsearch.hppc.cursors.ObjectCursor;
+
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.NumericDocValues;
+import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.search.TopDocs;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.cache.recycler.PageCacheRecycler;
@@ -60,7 +62,6 @@ import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MappedFieldType.Loading;
import org.elasticsearch.index.mapper.MapperService;
-import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.search.stats.ShardSearchStats;
@@ -100,6 +101,8 @@ import org.elasticsearch.search.query.QuerySearchRequest;
import org.elasticsearch.search.query.QuerySearchResult;
import org.elasticsearch.search.query.QuerySearchResultProvider;
import org.elasticsearch.search.query.ScrollQuerySearchResult;
+import org.elasticsearch.search.rescore.RescoreBuilder;
+import org.elasticsearch.search.searchafter.SearchAfterBuilder;
import org.elasticsearch.threadpool.ThreadPool;
import java.io.IOException;
@@ -122,8 +125,9 @@ import static org.elasticsearch.common.unit.TimeValue.timeValueMinutes;
public class SearchService extends AbstractLifecycleComponent<SearchService> implements IndexEventListener {
public static final Setting<Loading> INDEX_NORMS_LOADING_SETTING = new Setting<>("index.norms.loading", Loading.LAZY.toString(), (s) -> Loading.parse(s, Loading.LAZY), false, Setting.Scope.INDEX);
- public static final String DEFAULT_KEEPALIVE_KEY = "search.default_keep_alive";
- public static final String KEEPALIVE_INTERVAL_KEY = "search.keep_alive_interval";
+ // we can have 5 minutes here, since we make sure to clean with search requests and when shard/index closes
+ public static final Setting<TimeValue> DEFAULT_KEEPALIVE_SETTING = Setting.positiveTimeSetting("search.default_keep_alive", timeValueMinutes(5), false, Setting.Scope.CLUSTER);
+ public static final Setting<TimeValue> KEEPALIVE_INTERVAL_SETTING = Setting.positiveTimeSetting("search.keep_alive_interval", timeValueMinutes(1), false, Setting.Scope.CLUSTER);
public static final TimeValue NO_TIMEOUT = timeValueMillis(-1);
public static final Setting<TimeValue> DEFAULT_SEARCH_TIMEOUT_SETTING = Setting.timeSetting("search.default_search_timeout", NO_TIMEOUT, true, Setting.Scope.CLUSTER);
@@ -183,9 +187,8 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
this.fetchPhase = fetchPhase;
this.indicesQueryCache = indicesQueryCache;
- TimeValue keepAliveInterval = settings.getAsTime(KEEPALIVE_INTERVAL_KEY, timeValueMinutes(1));
- // we can have 5 minutes here, since we make sure to clean with search requests and when shard/index closes
- this.defaultKeepAlive = settings.getAsTime(DEFAULT_KEEPALIVE_KEY, timeValueMinutes(5)).millis();
+ TimeValue keepAliveInterval = KEEPALIVE_INTERVAL_SETTING.get(settings);
+ this.defaultKeepAlive = DEFAULT_KEEPALIVE_SETTING.get(settings).millis();
Map<String, SearchParseElement> elementParsers = new HashMap<>();
elementParsers.putAll(dfsPhase.parseElements());
@@ -553,7 +556,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
IndexService indexService = indicesService.indexServiceSafe(request.index());
IndexShard indexShard = indexService.getShard(request.shardId());
- SearchShardTarget shardTarget = new SearchShardTarget(clusterService.localNode().id(), request.index(), request.shardId());
+ SearchShardTarget shardTarget = new SearchShardTarget(clusterService.localNode().id(), indexShard.shardId().getIndex(), request.shardId());
Engine.Searcher engineSearcher = searcher == null ? indexShard.acquireSearcher("search") : searcher;
@@ -566,7 +569,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
context.scrollContext().scroll = request.scroll();
}
if (request.template() != null) {
- ExecutableScript executable = this.scriptService.executable(request.template(), ScriptContext.Standard.SEARCH, context, Collections.emptyMap());
+ ExecutableScript executable = this.scriptService.executable(request.template(), ScriptContext.Standard.SEARCH, Collections.emptyMap());
BytesReference run = (BytesReference) executable.run();
try (XContentParser parser = XContentFactory.xContent(run).createParser(run)) {
QueryParseContext queryParseContext = new QueryParseContext(indicesService.getIndicesQueryRegistry());
@@ -607,7 +610,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
private void freeAllContextForIndex(Index index) {
assert index != null;
for (SearchContext ctx : activeContexts.values()) {
- if (index.equals(ctx.indexShard().shardId().index())) {
+ if (index.equals(ctx.indexShard().shardId().getIndex())) {
freeContext(ctx.id());
}
}
@@ -772,33 +775,12 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
}
}
if (source.rescores() != null) {
- XContentParser completeRescoreParser = null;
try {
- XContentBuilder completeRescoreBuilder = XContentFactory.jsonBuilder();
- completeRescoreBuilder.startObject();
- completeRescoreBuilder.startArray("rescore");
- for (BytesReference rescore : source.rescores()) {
- XContentParser parser = XContentFactory.xContent(rescore).createParser(rescore);
- parser.nextToken();
- completeRescoreBuilder.copyCurrentStructure(parser);
- }
- completeRescoreBuilder.endArray();
- completeRescoreBuilder.endObject();
- BytesReference completeRescoreBytes = completeRescoreBuilder.bytes();
- completeRescoreParser = XContentFactory.xContent(completeRescoreBytes).createParser(completeRescoreBytes);
- completeRescoreParser.nextToken();
- completeRescoreParser.nextToken();
- completeRescoreParser.nextToken();
- this.elementParsers.get("rescore").parse(completeRescoreParser, context);
- } catch (Exception e) {
- String sSource = "_na_";
- try {
- sSource = source.toString();
- } catch (Throwable e1) {
- // ignore
+ for (RescoreBuilder<?> rescore : source.rescores()) {
+ context.addRescore(rescore.build(context.indexShard().getQueryShardContext()));
}
- XContentLocation location = completeRescoreParser != null ? completeRescoreParser.getTokenLocation() : null;
- throw new SearchParseException(context, "failed to parse rescore source [" + sSource + "]", location, e);
+ } catch (IOException e) {
+ throw new SearchContextException(context, "failed to create RescoreSearchContext", e);
}
}
if (source.fields() != null) {
@@ -884,6 +866,16 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
if (source.stats() != null) {
context.groupStats(source.stats());
}
+ if (source.searchAfter() != null && source.searchAfter().length > 0) {
+ if (context.scrollContext() != null) {
+ throw new SearchContextException(context, "`search_after` cannot be used in a scroll context.");
+ }
+ if (context.from() > 0) {
+ throw new SearchContextException(context, "`from` parameter must be set to 0 when `search_after` is used.");
+ }
+ FieldDoc fieldDoc = SearchAfterBuilder.buildFieldDoc(context.sort(), source.searchAfter());
+ context.searchAfter(fieldDoc);
+ }
}
private static final int[] EMPTY_DOC_IDS = new int[0];
@@ -1033,22 +1025,8 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
final Map<String, MappedFieldType> warmUp = new HashMap<>();
for (DocumentMapper docMapper : mapperService.docMappers(false)) {
for (FieldMapper fieldMapper : docMapper.mappers()) {
- final FieldDataType fieldDataType;
- final String indexName;
- if (fieldMapper instanceof ParentFieldMapper) {
- MappedFieldType joinFieldType = ((ParentFieldMapper) fieldMapper).getChildJoinFieldType();
- if (joinFieldType == null) {
- continue;
- }
- fieldDataType = joinFieldType.fieldDataType();
- // TODO: this can be removed in 3.0 when the old parent/child impl is removed:
- // related to: https://github.com/elastic/elasticsearch/pull/12418
- indexName = fieldMapper.fieldType().name();
- } else {
- fieldDataType = fieldMapper.fieldType().fieldDataType();
- indexName = fieldMapper.fieldType().name();
- }
-
+ final FieldDataType fieldDataType = fieldMapper.fieldType().fieldDataType();
+ final String indexName = fieldMapper.fieldType().name();
if (fieldDataType == null) {
continue;
}
@@ -1101,21 +1079,8 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
final Map<String, MappedFieldType> warmUpGlobalOrdinals = new HashMap<>();
for (DocumentMapper docMapper : mapperService.docMappers(false)) {
for (FieldMapper fieldMapper : docMapper.mappers()) {
- final FieldDataType fieldDataType;
- final String indexName;
- if (fieldMapper instanceof ParentFieldMapper) {
- MappedFieldType joinFieldType = ((ParentFieldMapper) fieldMapper).getChildJoinFieldType();
- if (joinFieldType == null) {
- continue;
- }
- fieldDataType = joinFieldType.fieldDataType();
- // TODO: this can be removed in 3.0 when the old parent/child impl is removed:
- // related to: https://github.com/elastic/elasticsearch/pull/12418
- indexName = fieldMapper.fieldType().name();
- } else {
- fieldDataType = fieldMapper.fieldType().fieldDataType();
- indexName = fieldMapper.fieldType().name();
- }
+ final FieldDataType fieldDataType = fieldMapper.fieldType().fieldDataType();
+ final String indexName = fieldMapper.fieldType().name();
if (fieldDataType == null) {
continue;
}
@@ -1167,7 +1132,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
// Use the same value for both checks since lastAccessTime can
// be modified by another thread between checks!
final long lastAccessTime = context.lastAccessTime();
- if (lastAccessTime == -1l) { // its being processed or timeout is disabled
+ if (lastAccessTime == -1L) { // its being processed or timeout is disabled
continue;
}
if ((time - lastAccessTime > context.keepAlive())) {
diff --git a/core/src/main/java/org/elasticsearch/search/SearchShardTarget.java b/core/src/main/java/org/elasticsearch/search/SearchShardTarget.java
index 87a2114a78..d3958505d7 100644
--- a/core/src/main/java/org/elasticsearch/search/SearchShardTarget.java
+++ b/core/src/main/java/org/elasticsearch/search/SearchShardTarget.java
@@ -24,6 +24,7 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.text.Text;
+import org.elasticsearch.index.Index;
import java.io.IOException;
@@ -40,9 +41,9 @@ public class SearchShardTarget implements Streamable, Comparable<SearchShardTarg
}
- public SearchShardTarget(String nodeId, String index, int shardId) {
+ public SearchShardTarget(String nodeId, Index index, int shardId) {
this.nodeId = nodeId == null ? null : new Text(nodeId);
- this.index = new Text(index);
+ this.index = new Text(index.getName());
this.shardId = shardId;
}
diff --git a/core/src/main/java/org/elasticsearch/search/action/SearchServiceTransportAction.java b/core/src/main/java/org/elasticsearch/search/action/SearchServiceTransportAction.java
index 6e2bdf932f..138b215e68 100644
--- a/core/src/main/java/org/elasticsearch/search/action/SearchServiceTransportAction.java
+++ b/core/src/main/java/org/elasticsearch/search/action/SearchServiceTransportAction.java
@@ -125,7 +125,7 @@ public class SearchServiceTransportAction extends AbstractComponent {
}
public void sendClearAllScrollContexts(DiscoveryNode node, ClearScrollRequest request, final ActionListener<TransportResponse> listener) {
- transportService.sendRequest(node, CLEAR_SCROLL_CONTEXTS_ACTION_NAME, new ClearScrollContextsRequest(request), new ActionListenerResponseHandler<TransportResponse>(listener) {
+ transportService.sendRequest(node, CLEAR_SCROLL_CONTEXTS_ACTION_NAME, new ClearScrollContextsRequest(), new ActionListenerResponseHandler<TransportResponse>(listener) {
@Override
public TransportResponse newInstance() {
return TransportResponse.Empty.INSTANCE;
@@ -220,11 +220,10 @@ public class SearchServiceTransportAction extends AbstractComponent {
}
ScrollFreeContextRequest(ClearScrollRequest request, long id) {
- this((TransportRequest) request, id);
+ this(id);
}
- private ScrollFreeContextRequest(TransportRequest request, long id) {
- super(request);
+ private ScrollFreeContextRequest(long id) {
this.id = id;
}
@@ -252,7 +251,7 @@ public class SearchServiceTransportAction extends AbstractComponent {
}
SearchFreeContextRequest(SearchRequest request, long id) {
- super(request, id);
+ super(id);
this.originalIndices = new OriginalIndices(request);
}
@@ -322,14 +321,6 @@ public class SearchServiceTransportAction extends AbstractComponent {
}
public static class ClearScrollContextsRequest extends TransportRequest {
-
- public ClearScrollContextsRequest() {
- }
-
- ClearScrollContextsRequest(TransportRequest request) {
- super(request);
- }
-
}
class ClearScrollContextsTransportHandler implements TransportRequestHandler<ClearScrollContextsRequest> {
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/InternalAggregation.java b/core/src/main/java/org/elasticsearch/search/aggregations/InternalAggregation.java
index 1c67a941da..04b1026446 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/InternalAggregation.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/InternalAggregation.java
@@ -18,8 +18,6 @@
*/
package org.elasticsearch.search.aggregations;
-import org.elasticsearch.common.DelegatingHasContextAndHeaders;
-import org.elasticsearch.common.HasContextAndHeaders;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
@@ -92,13 +90,12 @@ public abstract class InternalAggregation implements Aggregation, ToXContent, St
}
}
- public static class ReduceContext extends DelegatingHasContextAndHeaders {
+ public static class ReduceContext {
private final BigArrays bigArrays;
private ScriptService scriptService;
- public ReduceContext(BigArrays bigArrays, ScriptService scriptService, HasContextAndHeaders headersContext) {
- super(headersContext);
+ public ReduceContext(BigArrays bigArrays, ScriptService scriptService) {
this.bigArrays = bigArrays;
this.scriptService = scriptService;
}
@@ -106,7 +103,7 @@ public abstract class InternalAggregation implements Aggregation, ToXContent, St
public BigArrays bigArrays() {
return bigArrays;
}
-
+
public ScriptService scriptService() {
return scriptService;
}
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridParser.java
index 60302f2da4..6473b5ae7f 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridParser.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridParser.java
@@ -62,8 +62,7 @@ public class GeoHashGridParser implements Aggregator.Parser {
@Override
public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException {
- ValuesSourceParser<ValuesSource.GeoPoint> vsParser = ValuesSourceParser.geoPoint(aggregationName, InternalGeoHashGrid.TYPE, context)
- .build();
+ ValuesSourceParser vsParser = ValuesSourceParser.geoPoint(aggregationName, InternalGeoHashGrid.TYPE, context).build();
int precision = GeoHashGridParams.DEFAULT_PRECISION;
int requiredSize = GeoHashGridParams.DEFAULT_MAX_NUM_CELLS;
@@ -132,7 +131,6 @@ public class GeoHashGridParser implements Aggregator.Parser {
final InternalAggregation aggregation = new InternalGeoHashGrid(name, requiredSize,
Collections.<InternalGeoHashGrid.Bucket> emptyList(), pipelineAggregators, metaData);
return new NonCollectingAggregator(name, aggregationContext, parent, pipelineAggregators, metaData) {
- @Override
public InternalAggregation buildEmptyAggregation() {
return aggregation;
}
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramParser.java
index 52d77e1594..694abf26e6 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramParser.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramParser.java
@@ -28,7 +28,6 @@ import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValueType;
-import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.ValuesSourceParser;
import org.elasticsearch.search.internal.SearchContext;
@@ -79,7 +78,7 @@ public class DateHistogramParser implements Aggregator.Parser {
@Override
public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException {
- ValuesSourceParser<Numeric> vsParser = ValuesSourceParser.numeric(aggregationName, InternalDateHistogram.TYPE, context)
+ ValuesSourceParser vsParser = ValuesSourceParser.numeric(aggregationName, InternalDateHistogram.TYPE, context)
.targetValueType(ValueType.DATE)
.formattable(true)
.timezoneAware(true)
@@ -191,7 +190,7 @@ public class DateHistogramParser implements Aggregator.Parser {
.timeZone(vsParser.input().timezone())
.offset(offset).build();
- ValuesSourceConfig<Numeric> config = vsParser.config();
+ ValuesSourceConfig config = vsParser.config();
return new HistogramAggregator.Factory(aggregationName, config, rounding, order, keyed, minDocCount, extendedBounds,
new InternalDateHistogram.Factory());
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramParser.java
index 31ee6681d4..c738251d0e 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramParser.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramParser.java
@@ -25,7 +25,6 @@ import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValueType;
-import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
import org.elasticsearch.search.aggregations.support.ValuesSourceParser;
import org.elasticsearch.search.aggregations.support.format.ValueParser;
import org.elasticsearch.search.internal.SearchContext;
@@ -47,7 +46,7 @@ public class HistogramParser implements Aggregator.Parser {
@Override
public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException {
- ValuesSourceParser<Numeric> vsParser = ValuesSourceParser.numeric(aggregationName, InternalHistogram.TYPE, context)
+ ValuesSourceParser vsParser = ValuesSourceParser.numeric(aggregationName, InternalHistogram.TYPE, context)
.targetValueType(ValueType.NUMERIC)
.formattable(true)
.build();
@@ -128,7 +127,7 @@ public class HistogramParser implements Aggregator.Parser {
Rounding rounding = new Rounding.Interval(interval);
if (offset != 0) {
- rounding = new Rounding.OffsetRounding(rounding, offset);
+ rounding = new Rounding.OffsetRounding((Rounding.Interval) rounding, offset);
}
if (extendedBounds != null) {
@@ -137,7 +136,7 @@ public class HistogramParser implements Aggregator.Parser {
}
return new HistogramAggregator.Factory(aggregationName, vsParser.config(), rounding, order, keyed, minDocCount, extendedBounds,
- new InternalHistogram.Factory<>());
+ new InternalHistogram.Factory());
}
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregator.java
index 38e15e216c..1ae7341f61 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregator.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregator.java
@@ -81,9 +81,9 @@ public class MissingAggregator extends SingleBucketAggregator {
return new InternalMissing(name, 0, buildEmptySubAggregations(), pipelineAggregators(), metaData());
}
- public static class Factory extends ValuesSourceAggregatorFactory<ValuesSource> {
+ public static class Factory extends ValuesSourceAggregatorFactory<ValuesSource> {
- public Factory(String name, ValuesSourceConfig<ValuesSource> valueSourceConfig) {
+ public Factory(String name, ValuesSourceConfig valueSourceConfig) {
super(name, InternalMissing.TYPE.name(), valueSourceConfig);
}
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingParser.java
index 4210e020d8..6ecdc129dd 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingParser.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingParser.java
@@ -22,7 +22,6 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactory;
-import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceParser;
import org.elasticsearch.search.internal.SearchContext;
@@ -40,7 +39,8 @@ public class MissingParser implements Aggregator.Parser {
@Override
public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException {
- ValuesSourceParser<ValuesSource> vsParser = ValuesSourceParser.any(aggregationName, InternalMissing.TYPE, context)
+
+ ValuesSourceParser vsParser = ValuesSourceParser.any(aggregationName, InternalMissing.TYPE, context)
.scriptable(false)
.build();
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregator.java
index 4541aa9d14..8cb980954c 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregator.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregator.java
@@ -203,8 +203,7 @@ public class SamplerAggregator extends SingleBucketAggregator {
private int maxDocsPerValue;
private String executionHint;
- public DiversifiedFactory(String name, int shardSize, String executionHint, ValuesSourceConfig<ValuesSource> vsConfig,
- int maxDocsPerValue) {
+ public DiversifiedFactory(String name, int shardSize, String executionHint, ValuesSourceConfig vsConfig, int maxDocsPerValue) {
super(name, InternalSampler.TYPE.name(), vsConfig);
this.shardSize = shardSize;
this.maxDocsPerValue = maxDocsPerValue;
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerParser.java
index d51f43657f..498a7cb5c6 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerParser.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerParser.java
@@ -23,7 +23,6 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactory;
-import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.ValuesSourceParser;
import org.elasticsearch.search.internal.SearchContext;
@@ -56,10 +55,10 @@ public class SamplerParser implements Aggregator.Parser {
String executionHint = null;
int shardSize = DEFAULT_SHARD_SAMPLE_SIZE;
int maxDocsPerValue = MAX_DOCS_PER_VALUE_DEFAULT;
+ ValuesSourceParser vsParser = null;
boolean diversityChoiceMade = false;
- ValuesSourceParser<ValuesSource> vsParser = ValuesSourceParser.any(aggregationName, InternalSampler.TYPE, context).scriptable(true)
- .formattable(false).build();
+ vsParser = ValuesSourceParser.any(aggregationName, InternalSampler.TYPE, context).scriptable(true).formattable(false).build();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
@@ -89,7 +88,7 @@ public class SamplerParser implements Aggregator.Parser {
}
}
- ValuesSourceConfig<ValuesSource> vsConfig = vsParser.config();
+ ValuesSourceConfig vsConfig = vsParser.config();
if (vsConfig.valid()) {
return new SamplerAggregator.DiversifiedFactory(aggregationName, shardSize, executionHint, vsConfig, maxDocsPerValue);
} else {
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java
index 9b66fe01b6..399e85728a 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java
@@ -20,6 +20,7 @@ package org.elasticsearch.search.aggregations.bucket.significant;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.PostingsEnum;
+import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticsearchException;
@@ -79,6 +80,8 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
TermsAggregator.BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude,
AggregationContext aggregationContext, Aggregator parent, SignificantTermsAggregatorFactory termsAggregatorFactory,
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
+ ValuesSource.Bytes.WithOrdinals valueSourceWithOrdinals = (ValuesSource.Bytes.WithOrdinals) valuesSource;
+ IndexSearcher indexSearcher = aggregationContext.searchContext().searcher();
final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null : includeExclude.convertToOrdinalsFilter();
return new GlobalOrdinalsSignificantTermsAggregator(name, factories,
(ValuesSource.Bytes.WithOrdinals.FieldData) valuesSource, bucketCountThresholds, filter, aggregationContext,
@@ -95,8 +98,9 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null : includeExclude.convertToOrdinalsFilter();
return new GlobalOrdinalsSignificantTermsAggregator.WithHash(name, factories,
- (ValuesSource.Bytes.WithOrdinals.FieldData) valuesSource, bucketCountThresholds, filter, aggregationContext, parent,
- termsAggregatorFactory, pipelineAggregators, metaData);
+ (ValuesSource.Bytes.WithOrdinals.FieldData) valuesSource, bucketCountThresholds, filter,
+ aggregationContext,
+ parent, termsAggregatorFactory, pipelineAggregators, metaData);
}
};
@@ -139,7 +143,7 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
return new TermsAggregator.BucketCountThresholds(bucketCountThresholds);
}
- public SignificantTermsAggregatorFactory(String name, ValuesSourceConfig<ValuesSource> valueSourceConfig, TermsAggregator.BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude,
+ public SignificantTermsAggregatorFactory(String name, ValuesSourceConfig valueSourceConfig, TermsAggregator.BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude,
String executionHint, Query filter, SignificanceHeuristic significanceHeuristic) {
super(name, SignificantStringTerms.TYPE.name(), valueSourceConfig);
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsParser.java
index b4b89c29c3..28e0fb5a81 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsParser.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsParser.java
@@ -28,7 +28,6 @@ import org.elasticsearch.search.aggregations.bucket.significant.heuristics.Signi
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParserMapper;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator;
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
-import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceParser;
import org.elasticsearch.search.internal.SearchContext;
@@ -54,7 +53,7 @@ public class SignificantTermsParser implements Aggregator.Parser {
@Override
public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException {
SignificantTermsParametersParser aggParser = new SignificantTermsParametersParser(significanceHeuristicParserMapper);
- ValuesSourceParser<ValuesSource> vsParser = ValuesSourceParser.any(aggregationName, SignificantStringTerms.TYPE, context)
+ ValuesSourceParser vsParser = ValuesSourceParser.any(aggregationName, SignificantStringTerms.TYPE, context)
.scriptable(false)
.formattable(true)
.build();
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/ScriptHeuristic.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/ScriptHeuristic.java
index 9efea00051..a160451265 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/ScriptHeuristic.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/ScriptHeuristic.java
@@ -87,7 +87,7 @@ public class ScriptHeuristic extends SignificanceHeuristic {
@Override
public void initialize(InternalAggregation.ReduceContext context) {
- searchScript = context.scriptService().executable(script, ScriptContext.Standard.AGGS, context, Collections.emptyMap());
+ searchScript = context.scriptService().executable(script, ScriptContext.Standard.AGGS, Collections.emptyMap());
searchScript.setNextVar("_subset_freq", subsetDfHolder);
searchScript.setNextVar("_subset_size", subsetSizeHolder);
searchScript.setNextVar("_superset_freq", supersetDfHolder);
@@ -175,7 +175,7 @@ public class ScriptHeuristic extends SignificanceHeuristic {
}
ExecutableScript searchScript;
try {
- searchScript = scriptService.executable(script, ScriptContext.Standard.AGGS, context, Collections.emptyMap());
+ searchScript = scriptService.executable(script, ScriptContext.Standard.AGGS, Collections.emptyMap());
} catch (Exception e) {
throw new ElasticsearchParseException("failed to parse [{}] significance heuristic. the script [{}] could not be loaded", e, script, heuristicName);
}
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractTermsParametersParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractTermsParametersParser.java
index ecd9d3bc69..891526c33c 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractTermsParametersParser.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractTermsParametersParser.java
@@ -36,13 +36,13 @@ public abstract class AbstractTermsParametersParser {
public static final ParseField SHARD_MIN_DOC_COUNT_FIELD_NAME = new ParseField("shard_min_doc_count");
public static final ParseField REQUIRED_SIZE_FIELD_NAME = new ParseField("size");
public static final ParseField SHOW_TERM_DOC_COUNT_ERROR = new ParseField("show_term_doc_count_error");
-
+
//These are the results of the parsing.
private TermsAggregator.BucketCountThresholds bucketCountThresholds = new TermsAggregator.BucketCountThresholds();
private String executionHint = null;
-
+
private SubAggCollectionMode collectMode = SubAggCollectionMode.DEPTH_FIRST;
@@ -59,12 +59,12 @@ public abstract class AbstractTermsParametersParser {
public IncludeExclude getIncludeExclude() {
return includeExclude;
}
-
+
public SubAggCollectionMode getCollectionMode() {
return collectMode;
}
- public void parse(String aggregationName, XContentParser parser, SearchContext context, ValuesSourceParser<?> vsParser, IncludeExclude.Parser incExcParser) throws IOException {
+ public void parse(String aggregationName, XContentParser parser, SearchContext context, ValuesSourceParser vsParser, IncludeExclude.Parser incExcParser) throws IOException {
bucketCountThresholds = getDefaultBucketCountThresholds();
XContentParser.Token token;
String currentFieldName = null;
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java
index 04f7adf27b..270dc009af 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java
@@ -165,7 +165,7 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory<Values
private final TermsAggregator.BucketCountThresholds bucketCountThresholds;
private final boolean showTermDocCountError;
- public TermsAggregatorFactory(String name, ValuesSourceConfig<ValuesSource> config, Terms.Order order,
+ public TermsAggregatorFactory(String name, ValuesSourceConfig config, Terms.Order order,
TermsAggregator.BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude, String executionHint,
SubAggCollectionMode executionMode, boolean showTermDocCountError) {
super(name, StringTerms.TYPE.name(), config);
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsParser.java
index a7b60e930f..478309d1bc 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsParser.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsParser.java
@@ -25,7 +25,6 @@ import org.elasticsearch.search.aggregations.bucket.BucketUtils;
import org.elasticsearch.search.aggregations.bucket.terms.Terms.Order;
import org.elasticsearch.search.aggregations.bucket.terms.TermsParametersParser.OrderElement;
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
-import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceParser;
import org.elasticsearch.search.internal.SearchContext;
@@ -46,8 +45,7 @@ public class TermsParser implements Aggregator.Parser {
@Override
public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException {
TermsParametersParser aggParser = new TermsParametersParser();
- ValuesSourceParser<ValuesSource> vsParser = ValuesSourceParser.any(aggregationName, StringTerms.TYPE, context).scriptable(true)
- .formattable(true).build();
+ ValuesSourceParser vsParser = ValuesSourceParser.any(aggregationName, StringTerms.TYPE, context).scriptable(true).formattable(true).build();
IncludeExclude.Parser incExcParser = new IncludeExclude.Parser();
aggParser.parse(aggregationName, parser, context, vsParser, incExcParser);
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/ValuesSourceMetricsAggregationBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/ValuesSourceMetricsAggregationBuilder.java
index 81933144a5..e675548622 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/ValuesSourceMetricsAggregationBuilder.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/ValuesSourceMetricsAggregationBuilder.java
@@ -62,7 +62,6 @@ public abstract class ValuesSourceMetricsAggregationBuilder<B extends ValuesSour
/**
* Configure the value to use when documents miss a value.
*/
- @SuppressWarnings("unchecked")
public B missing(Object missingValue) {
this.missing = missingValue;
return (B) this;
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/AvgAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/AvgAggregator.java
index 67a6f19f72..17506b79e2 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/AvgAggregator.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/AvgAggregator.java
@@ -110,7 +110,7 @@ public class AvgAggregator extends NumericMetricsAggregator.SingleValue {
@Override
public InternalAggregation buildEmptyAggregation() {
- return new InternalAvg(name, 0.0, 0l, formatter, pipelineAggregators(), metaData());
+ return new InternalAvg(name, 0.0, 0L, formatter, pipelineAggregators(), metaData());
}
public static class Factory extends ValuesSourceAggregatorFactory.LeafOnly<ValuesSource.Numeric> {
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityAggregatorFactory.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityAggregatorFactory.java
index 4df8dc7260..1b2d5fc101 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityAggregatorFactory.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityAggregatorFactory.java
@@ -35,7 +35,7 @@ final class CardinalityAggregatorFactory extends ValuesSourceAggregatorFactory.L
private final long precisionThreshold;
- CardinalityAggregatorFactory(String name, ValuesSourceConfig<ValuesSource> config, long precisionThreshold) {
+ CardinalityAggregatorFactory(String name, ValuesSourceConfig config, long precisionThreshold) {
super(name, InternalCardinality.TYPE.name(), config);
this.precisionThreshold = precisionThreshold;
}
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityParser.java
index 3155232645..68339457fe 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityParser.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityParser.java
@@ -24,7 +24,6 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactory;
-import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceParser;
import org.elasticsearch.search.internal.SearchContext;
@@ -44,7 +43,7 @@ public class CardinalityParser implements Aggregator.Parser {
@Override
public AggregatorFactory parse(String name, XContentParser parser, SearchContext context) throws IOException {
- ValuesSourceParser<ValuesSource> vsParser = ValuesSourceParser.any(name, InternalCardinality.TYPE, context).formattable(false).build();
+ ValuesSourceParser<?> vsParser = ValuesSourceParser.any(name, InternalCardinality.TYPE, context).formattable(false).build();
long precisionThreshold = -1;
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidAggregator.java
index b99db25a65..b5739f53f4 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidAggregator.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidAggregator.java
@@ -114,7 +114,7 @@ public final class GeoCentroidAggregator extends MetricsAggregator {
@Override
public InternalAggregation buildEmptyAggregation() {
- return new InternalGeoCentroid(name, null, 0l, pipelineAggregators(), metaData());
+ return new InternalGeoCentroid(name, null, 0L, pipelineAggregators(), metaData());
}
@Override
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetric.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetric.java
index 00c6b6b49b..3a516c690e 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetric.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetric.java
@@ -92,7 +92,7 @@ public class InternalScriptedMetric extends InternalMetricsAggregation implement
vars.putAll(firstAggregation.reduceScript.getParams());
}
CompiledScript compiledScript = reduceContext.scriptService().compile(firstAggregation.reduceScript,
- ScriptContext.Standard.AGGS, reduceContext, Collections.emptyMap());
+ ScriptContext.Standard.AGGS, Collections.emptyMap());
ExecutableScript script = reduceContext.scriptService().executable(compiledScript, vars);
aggregation = script.run();
} else {
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregator.java
index 6603c6289b..68d886a7bf 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregator.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregator.java
@@ -59,11 +59,11 @@ public class ScriptedMetricAggregator extends MetricsAggregator {
this.params = params;
ScriptService scriptService = context.searchContext().scriptService();
if (initScript != null) {
- scriptService.executable(initScript, ScriptContext.Standard.AGGS, context.searchContext(), Collections.emptyMap()).run();
+ scriptService.executable(initScript, ScriptContext.Standard.AGGS, Collections.emptyMap()).run();
}
this.mapScript = scriptService.search(context.searchContext().lookup(), mapScript, ScriptContext.Standard.AGGS, Collections.emptyMap());
if (combineScript != null) {
- this.combineScript = scriptService.executable(combineScript, ScriptContext.Standard.AGGS, context.searchContext(), Collections.emptyMap());
+ this.combineScript = scriptService.executable(combineScript, ScriptContext.Standard.AGGS, Collections.emptyMap());
} else {
this.combineScript = null;
}
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountAggregator.java
index 867d87633b..2d1d9dbb91 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountAggregator.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountAggregator.java
@@ -100,7 +100,7 @@ public class ValueCountAggregator extends NumericMetricsAggregator.SingleValue {
@Override
public InternalAggregation buildEmptyAggregation() {
- return new InternalValueCount(name, 0l, formatter, pipelineAggregators(), metaData());
+ return new InternalValueCount(name, 0L, formatter, pipelineAggregators(), metaData());
}
@Override
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountParser.java
index 0a9ea4a907..764f6ce938 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountParser.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountParser.java
@@ -40,7 +40,7 @@ public class ValueCountParser implements Aggregator.Parser {
@Override
public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException {
- ValuesSourceParser<?> vsParser = ValuesSourceParser.any(aggregationName, InternalValueCount.TYPE, context)
+ ValuesSourceParser vsParser = ValuesSourceParser.any(aggregationName, InternalValueCount.TYPE, context)
.build();
XContentParser.Token token;
@@ -54,6 +54,6 @@ public class ValueCountParser implements Aggregator.Parser {
}
}
- return new ValueCountAggregator.Factory<>(aggregationName, vsParser.config());
+ return new ValueCountAggregator.Factory(aggregationName, vsParser.config());
}
}
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketscript/BucketScriptPipelineAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketscript/BucketScriptPipelineAggregator.java
index 76cb15ed46..4da355f4f8 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketscript/BucketScriptPipelineAggregator.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketscript/BucketScriptPipelineAggregator.java
@@ -94,7 +94,7 @@ public class BucketScriptPipelineAggregator extends PipelineAggregator {
InternalMultiBucketAggregation<InternalMultiBucketAggregation, InternalMultiBucketAggregation.InternalBucket> originalAgg = (InternalMultiBucketAggregation<InternalMultiBucketAggregation, InternalMultiBucketAggregation.InternalBucket>) aggregation;
List<? extends Bucket> buckets = originalAgg.getBuckets();
- CompiledScript compiledScript = reduceContext.scriptService().compile(script, ScriptContext.Standard.AGGS, reduceContext, Collections.emptyMap());
+ CompiledScript compiledScript = reduceContext.scriptService().compile(script, ScriptContext.Standard.AGGS, Collections.emptyMap());
List newBuckets = new ArrayList<>();
for (Bucket bucket : buckets) {
Map<String, Object> vars = new HashMap<>();
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/having/BucketSelectorPipelineAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/having/BucketSelectorPipelineAggregator.java
index edc3b4e87c..1032d0f017 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/having/BucketSelectorPipelineAggregator.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/having/BucketSelectorPipelineAggregator.java
@@ -89,7 +89,7 @@ public class BucketSelectorPipelineAggregator extends PipelineAggregator {
InternalMultiBucketAggregation<InternalMultiBucketAggregation, InternalMultiBucketAggregation.InternalBucket> originalAgg = (InternalMultiBucketAggregation<InternalMultiBucketAggregation, InternalMultiBucketAggregation.InternalBucket>) aggregation;
List<? extends Bucket> buckets = originalAgg.getBuckets();
- CompiledScript compiledScript = reduceContext.scriptService().compile(script, ScriptContext.Standard.AGGS, reduceContext, Collections.emptyMap());
+ CompiledScript compiledScript = reduceContext.scriptService().compile(script, ScriptContext.Standard.AGGS, Collections.emptyMap());
List newBuckets = new ArrayList<>();
for (Bucket bucket : buckets) {
Map<String, Object> vars = new HashMap<>();
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSource.java b/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSource.java
index d9fe3ad66c..b03bc8d683 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSource.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSource.java
@@ -53,9 +53,6 @@ import org.elasticsearch.search.aggregations.support.values.ScriptLongValues;
import java.io.IOException;
-/**
- * How to load values for an aggregation.
- */
public abstract class ValuesSource {
/**
@@ -531,7 +528,6 @@ public abstract class ValuesSource {
return indexFieldData.load(context).getBytesValues();
}
- @Override
public org.elasticsearch.index.fielddata.MultiGeoPointValues geoPointValues(LeafReaderContext context) {
return indexFieldData.load(context).getGeoPointValues();
}
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregatorFactory.java b/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregatorFactory.java
index 3f56162a2f..d0eaec2d8b 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregatorFactory.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregatorFactory.java
@@ -78,20 +78,19 @@ public abstract class ValuesSourceAggregatorFactory<VS extends ValuesSource> ext
boolean collectsFromSingleBucket, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
throws IOException;
- @SuppressWarnings("unchecked") // Safe because we check the types with isAssignableFrom
private void resolveValuesSourceConfigFromAncestors(String aggName, AggregatorFactory parent, Class<VS> requiredValuesSourceType) {
- ValuesSourceConfig<?> config;
+ ValuesSourceConfig config;
while (parent != null) {
if (parent instanceof ValuesSourceAggregatorFactory) {
- config = ((ValuesSourceAggregatorFactory<?>) parent).config;
+ config = ((ValuesSourceAggregatorFactory) parent).config;
if (config != null && config.valid()) {
if (requiredValuesSourceType == null || requiredValuesSourceType.isAssignableFrom(config.valueSourceType)) {
ValueFormat format = config.format;
- this.config = (ValuesSourceConfig<VS>) config;
+ this.config = config;
// if the user explicitly defined a format pattern, we'll do our best to keep it even when we inherit the
// value source form one of the ancestor aggregations
if (this.config.formatPattern != null && format != null && format instanceof ValueFormat.Patternable) {
- this.config.format = ((ValueFormat.Patternable<?>) format).create(this.config.formatPattern);
+ this.config.format = ((ValueFormat.Patternable) format).create(this.config.formatPattern);
}
return;
}
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceParser.java
index 7c26061917..fced5fdc91 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceParser.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceParser.java
@@ -48,16 +48,13 @@ import java.util.HashMap;
import java.util.Map;
/**
- * Parses a description of where to load the value sent by a user into a
- * ValuesSourceConfig which can be used to work with the values in various ways,
- * one of which is to create an actual ValueSource (done with the help of
- * AggregationContext).
+ *
*/
public class ValuesSourceParser<VS extends ValuesSource> {
static final ParseField TIME_ZONE = new ParseField("time_zone");
- public static Builder<ValuesSource> any(String aggName, InternalAggregation.Type aggType, SearchContext context) {
+ public static Builder any(String aggName, InternalAggregation.Type aggType, SearchContext context) {
return new Builder<>(aggName, aggType, context, ValuesSource.class);
}
diff --git a/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java
index 9f3c2dda12..bc15198337 100644
--- a/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java
+++ b/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java
@@ -41,12 +41,14 @@ import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.script.Script;
+import org.elasticsearch.search.searchafter.SearchAfterBuilder;
import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
import org.elasticsearch.search.fetch.innerhits.InnerHitsBuilder;
import org.elasticsearch.search.fetch.source.FetchSourceContext;
import org.elasticsearch.search.highlight.HighlightBuilder;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.rescore.RescoreBuilder;
+import org.elasticsearch.search.rescore.RescoreBuilder;
import org.elasticsearch.search.sort.SortBuilder;
import org.elasticsearch.search.sort.SortBuilders;
import org.elasticsearch.search.sort.SortOrder;
@@ -93,6 +95,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
public static final ParseField STATS_FIELD = new ParseField("stats");
public static final ParseField EXT_FIELD = new ParseField("ext");
public static final ParseField PROFILE_FIELD = new ParseField("profile");
+ public static final ParseField SEARCH_AFTER = new ParseField("search_after");
private static final SearchSourceBuilder PROTOTYPE = new SearchSourceBuilder();
@@ -134,6 +137,8 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
private boolean trackScores = false;
+ private SearchAfterBuilder searchAfterBuilder;
+
private Float minScore;
private long timeoutInMillis = -1;
@@ -152,7 +157,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
private BytesReference innerHitsBuilder;
- private List<BytesReference> rescoreBuilders;
+ private List<RescoreBuilder<?>> rescoreBuilders;
private ObjectFloatHashMap<String> indexBoost = null;
@@ -380,6 +385,28 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
return trackScores;
}
+
+ /**
+ * The sort values that indicates which docs this request should "search after".
+ * The sort values of the search_after must be equal to the number of sort fields in the query and they should be
+ * of the same type (or parsable as such).
+ * Defaults to <tt>null</tt>.
+ */
+ public Object[] searchAfter() {
+ if (searchAfterBuilder == null) {
+ return null;
+ }
+ return searchAfterBuilder.getSortValues();
+ }
+
+ /**
+ * Set the sort values that indicates which docs this request should "search after".
+ */
+ public SearchSourceBuilder searchAfter(Object[] values) {
+ this.searchAfterBuilder = new SearchAfterBuilder().setSortValues(values);
+ return this;
+ }
+
/**
* Add an aggregation to perform as part of the search.
*/
@@ -459,20 +486,12 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
return suggestBuilder;
}
- public SearchSourceBuilder addRescorer(RescoreBuilder rescoreBuilder) {
- try {
+ public SearchSourceBuilder addRescorer(RescoreBuilder<?> rescoreBuilder) {
if (rescoreBuilders == null) {
rescoreBuilders = new ArrayList<>();
}
- XContentBuilder builder = XContentFactory.jsonBuilder();
- builder.startObject();
- rescoreBuilder.toXContent(builder, EMPTY_PARAMS);
- builder.endObject();
- rescoreBuilders.add(builder.bytes());
+ rescoreBuilders.add(rescoreBuilder);
return this;
- } catch (IOException e) {
- throw new RuntimeException(e);
- }
}
public SearchSourceBuilder clearRescorers() {
@@ -499,7 +518,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
/**
* Gets the bytes representing the rescore builders for this request.
*/
- public List<BytesReference> rescores() {
+ public List<RescoreBuilder<?>> rescores() {
return rescoreBuilders;
}
@@ -883,8 +902,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
} else if (context.parseFieldMatcher().match(currentFieldName, RESCORE_FIELD)) {
rescoreBuilders = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
- XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().copyCurrentStructure(parser);
- rescoreBuilders.add(xContentBuilder.bytes());
+ rescoreBuilders.add(RescoreBuilder.parseFromXContent(context));
}
} else if (context.parseFieldMatcher().match(currentFieldName, STATS_FIELD)) {
stats = new ArrayList<>();
@@ -898,6 +916,8 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
}
} else if (context.parseFieldMatcher().match(currentFieldName, _SOURCE_FIELD)) {
fetchSourceContext = FetchSourceContext.parse(parser, context);
+ } else if (context.parseFieldMatcher().match(currentFieldName, SEARCH_AFTER)) {
+ searchAfterBuilder = SearchAfterBuilder.PROTOTYPE.fromXContent(parser, context.parseFieldMatcher());
} else {
throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].",
parser.getTokenLocation());
@@ -1003,6 +1023,10 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
builder.field(TRACK_SCORES_FIELD.getPreferredName(), true);
}
+ if (searchAfterBuilder != null) {
+ builder.field(SEARCH_AFTER.getPreferredName(), searchAfterBuilder.getSortValues());
+ }
+
if (indexBoost != null) {
builder.startObject(INDICES_BOOST_FIELD.getPreferredName());
assert !indexBoost.containsKey(null);
@@ -1048,10 +1072,8 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
if (rescoreBuilders != null) {
builder.startArray(RESCORE_FIELD.getPreferredName());
- for (BytesReference rescoreBuilder : rescoreBuilders) {
- XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(rescoreBuilder);
- parser.nextToken();
- builder.copyCurrentStructure(parser);
+ for (RescoreBuilder<?> rescoreBuilder : rescoreBuilders) {
+ rescoreBuilder.toXContent(builder, params);
}
builder.endArray();
}
@@ -1197,9 +1219,9 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
}
if (in.readBoolean()) {
int size = in.readVInt();
- List<BytesReference> rescoreBuilders = new ArrayList<>();
+ List<RescoreBuilder<?>> rescoreBuilders = new ArrayList<>();
for (int i = 0; i < size; i++) {
- rescoreBuilders.add(in.readBytesReference());
+ rescoreBuilders.add(in.readRescorer());
}
builder.rescoreBuilders = rescoreBuilders;
}
@@ -1243,6 +1265,9 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
} else {
builder.profile = false;
}
+ if (in.readBoolean()) {
+ builder.searchAfterBuilder = SearchAfterBuilder.PROTOTYPE.readFrom(in);
+ }
return builder;
}
@@ -1313,8 +1338,8 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
out.writeBoolean(hasRescoreBuilders);
if (hasRescoreBuilders) {
out.writeVInt(rescoreBuilders.size());
- for (BytesReference rescoreBuilder : rescoreBuilders) {
- out.writeBytesReference(rescoreBuilder);
+ for (RescoreBuilder<?> rescoreBuilder : rescoreBuilders) {
+ out.writeRescorer(rescoreBuilder);
}
}
boolean hasScriptFields = scriptFields != null;
@@ -1359,13 +1384,18 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
if (out.getVersion().onOrAfter(Version.V_2_2_0)) {
out.writeBoolean(profile);
}
+ boolean hasSearchAfter = searchAfterBuilder != null;
+ out.writeBoolean(hasSearchAfter);
+ if (hasSearchAfter) {
+ searchAfterBuilder.writeTo(out);
+ }
}
@Override
public int hashCode() {
return Objects.hash(aggregations, explain, fetchSourceContext, fieldDataFields, fieldNames, from,
highlightBuilder, indexBoost, innerHitsBuilder, minScore, postQueryBuilder, queryBuilder, rescoreBuilders, scriptFields,
- size, sorts, stats, suggestBuilder, terminateAfter, timeoutInMillis, trackScores, version, profile);
+ size, sorts, searchAfterBuilder, stats, suggestBuilder, terminateAfter, timeoutInMillis, trackScores, version, profile);
}
@Override
@@ -1393,6 +1423,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
&& Objects.equals(scriptFields, other.scriptFields)
&& Objects.equals(size, other.size)
&& Objects.equals(sorts, other.sorts)
+ && Objects.equals(searchAfterBuilder, other.searchAfterBuilder)
&& Objects.equals(stats, other.stats)
&& Objects.equals(suggestBuilder, other.suggestBuilder)
&& Objects.equals(terminateAfter, other.terminateAfter)
diff --git a/core/src/main/java/org/elasticsearch/search/controller/SearchPhaseController.java b/core/src/main/java/org/elasticsearch/search/controller/SearchPhaseController.java
index ef16a03831..d79b1f59a6 100644
--- a/core/src/main/java/org/elasticsearch/search/controller/SearchPhaseController.java
+++ b/core/src/main/java/org/elasticsearch/search/controller/SearchPhaseController.java
@@ -31,7 +31,6 @@ import org.apache.lucene.search.TermStatistics;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.TopFieldDocs;
import org.elasticsearch.action.search.SearchRequest;
-import org.elasticsearch.common.HasContextAndHeaders;
import org.elasticsearch.common.collect.HppcMaps;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
@@ -299,7 +298,7 @@ public class SearchPhaseController extends AbstractComponent {
}
public InternalSearchResponse merge(ScoreDoc[] sortedDocs, AtomicArray<? extends QuerySearchResultProvider> queryResultsArr,
- AtomicArray<? extends FetchSearchResultProvider> fetchResultsArr, HasContextAndHeaders headersContext) {
+ AtomicArray<? extends FetchSearchResultProvider> fetchResultsArr) {
List<? extends AtomicArray.Entry<? extends QuerySearchResultProvider>> queryResults = queryResultsArr.asList();
List<? extends AtomicArray.Entry<? extends FetchSearchResultProvider>> fetchResults = fetchResultsArr.asList();
@@ -407,7 +406,7 @@ public class SearchPhaseController extends AbstractComponent {
for (AtomicArray.Entry<? extends QuerySearchResultProvider> entry : queryResults) {
aggregationsList.add((InternalAggregations) entry.value.queryResult().aggregations());
}
- aggregations = InternalAggregations.reduce(aggregationsList, new ReduceContext(bigArrays, scriptService, headersContext));
+ aggregations = InternalAggregations.reduce(aggregationsList, new ReduceContext(bigArrays, scriptService));
}
}
@@ -430,7 +429,7 @@ public class SearchPhaseController extends AbstractComponent {
}).collect(Collectors.toList());
for (SiblingPipelineAggregator pipelineAggregator : pipelineAggregators) {
InternalAggregation newAgg = pipelineAggregator.doReduce(new InternalAggregations(newAggs), new ReduceContext(
- bigArrays, scriptService, headersContext));
+ bigArrays, scriptService));
newAggs.add(newAgg);
}
aggregations = new InternalAggregations(newAggs);
diff --git a/core/src/main/java/org/elasticsearch/search/fetch/ShardFetchRequest.java b/core/src/main/java/org/elasticsearch/search/fetch/ShardFetchRequest.java
index 0d524ed3e3..4087eb9a01 100644
--- a/core/src/main/java/org/elasticsearch/search/fetch/ShardFetchRequest.java
+++ b/core/src/main/java/org/elasticsearch/search/fetch/ShardFetchRequest.java
@@ -22,7 +22,6 @@ package org.elasticsearch.search.fetch;
import com.carrotsearch.hppc.IntArrayList;
import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.search.ScoreDoc;
-import org.elasticsearch.action.search.SearchScrollRequest;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lucene.Lucene;
@@ -47,16 +46,7 @@ public class ShardFetchRequest extends TransportRequest {
public ShardFetchRequest() {
}
- public ShardFetchRequest(SearchScrollRequest request, long id, IntArrayList list, ScoreDoc lastEmittedDoc) {
- super(request);
- this.id = id;
- this.docIds = list.buffer;
- this.size = list.size();
- this.lastEmittedDoc = lastEmittedDoc;
- }
-
- protected ShardFetchRequest(TransportRequest originalRequest, long id, IntArrayList list, ScoreDoc lastEmittedDoc) {
- super(originalRequest);
+ public ShardFetchRequest(long id, IntArrayList list, ScoreDoc lastEmittedDoc) {
this.id = id;
this.docIds = list.buffer;
this.size = list.size();
diff --git a/core/src/main/java/org/elasticsearch/search/fetch/ShardFetchSearchRequest.java b/core/src/main/java/org/elasticsearch/search/fetch/ShardFetchSearchRequest.java
index cc53b48f13..d908aca0fc 100644
--- a/core/src/main/java/org/elasticsearch/search/fetch/ShardFetchSearchRequest.java
+++ b/core/src/main/java/org/elasticsearch/search/fetch/ShardFetchSearchRequest.java
@@ -46,7 +46,7 @@ public class ShardFetchSearchRequest extends ShardFetchRequest implements Indice
}
public ShardFetchSearchRequest(SearchRequest request, long id, IntArrayList list, ScoreDoc lastEmittedDoc) {
- super(request, id, list, lastEmittedDoc);
+ super(id, list, lastEmittedDoc);
this.originalIndices = new OriginalIndices(request);
}
diff --git a/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsContext.java b/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsContext.java
index 125563cd09..be4dd7cdad 100644
--- a/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsContext.java
+++ b/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsContext.java
@@ -27,6 +27,7 @@ import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.ConstantScoreScorer;
import org.apache.lucene.search.ConstantScoreWeight;
import org.apache.lucene.search.DocIdSetIterator;
+import org.apache.lucene.search.DocValuesTermsQuery;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer;
@@ -55,6 +56,7 @@ import org.elasticsearch.search.internal.InternalSearchHit;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
+import java.util.HashMap;
import java.util.Map;
/**
@@ -63,6 +65,10 @@ public final class InnerHitsContext {
private final Map<String, BaseInnerHits> innerHits;
+ public InnerHitsContext() {
+ this.innerHits = new HashMap<>();
+ }
+
public InnerHitsContext(Map<String, BaseInnerHits> innerHits) {
this.innerHits = innerHits;
}
@@ -72,9 +78,19 @@ public final class InnerHitsContext {
}
public void addInnerHitDefinition(String name, BaseInnerHits innerHit) {
+ if (innerHits.containsKey(name)) {
+ throw new IllegalArgumentException("inner_hit definition with the name [" + name + "] already exists. Use a different inner_hit name");
+ }
+
innerHits.put(name, innerHit);
}
+ public void addInnerHitDefinitions(Map<String, BaseInnerHits> innerHits) {
+ for (Map.Entry<String, BaseInnerHits> entry : innerHits.entrySet()) {
+ addInnerHitDefinition(entry.getKey(), entry.getValue());
+ }
+ }
+
public static abstract class BaseInnerHits extends FilteredSearchContext {
protected final ParsedQuery query;
@@ -284,20 +300,18 @@ public final class InnerHitsContext {
@Override
public TopDocs topDocs(SearchContext context, FetchSubPhase.HitContext hitContext) throws IOException {
- final String field;
- final String term;
+ final Query hitQuery;
if (isParentHit(hitContext.hit())) {
- field = ParentFieldMapper.NAME;
- term = Uid.createUid(hitContext.hit().type(), hitContext.hit().id());
+ String field = ParentFieldMapper.joinField(hitContext.hit().type());
+ hitQuery = new DocValuesTermsQuery(field, hitContext.hit().id());
} else if (isChildHit(hitContext.hit())) {
DocumentMapper hitDocumentMapper = mapperService.documentMapper(hitContext.hit().type());
final String parentType = hitDocumentMapper.parentFieldMapper().type();
- field = UidFieldMapper.NAME;
SearchHitField parentField = hitContext.hit().field(ParentFieldMapper.NAME);
if (parentField == null) {
throw new IllegalStateException("All children must have a _parent");
}
- term = Uid.createUid(parentType, (String) parentField.getValue());
+ hitQuery = new TermQuery(new Term(UidFieldMapper.NAME, Uid.createUid(parentType, parentField.getValue())));
} else {
return Lucene.EMPTY_TOP_DOCS;
}
@@ -305,9 +319,9 @@ public final class InnerHitsContext {
BooleanQuery q = new BooleanQuery.Builder()
.add(query.query(), Occur.MUST)
// Only include docs that have the current hit as parent
- .add(new TermQuery(new Term(field, term)), Occur.MUST)
+ .add(hitQuery, Occur.FILTER)
// Only include docs that have this inner hits type
- .add(documentMapper.typeFilter(), Occur.MUST)
+ .add(documentMapper.typeFilter(), Occur.FILTER)
.build();
if (size() == 0) {
final int count = context.searcher().count(q);
diff --git a/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsFetchSubPhase.java b/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsFetchSubPhase.java
index 3557e55570..39c432f9cc 100644
--- a/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsFetchSubPhase.java
+++ b/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsFetchSubPhase.java
@@ -63,7 +63,7 @@ public class InnerHitsFetchSubPhase implements FetchSubPhase {
@Override
public boolean hitExecutionNeeded(SearchContext context) {
- return context.innerHits() != null;
+ return context.innerHits() != null && context.innerHits().getInnerHits().size() > 0;
}
@Override
diff --git a/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsParseElement.java b/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsParseElement.java
index 6ed635e8c8..d813dea1de 100644
--- a/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsParseElement.java
+++ b/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsParseElement.java
@@ -61,9 +61,10 @@ public class InnerHitsParseElement implements SearchParseElement {
public void parse(XContentParser parser, SearchContext searchContext) throws Exception {
QueryShardContext context = searchContext.indexShard().getQueryShardContext();
context.reset(parser);
- Map<String, InnerHitsContext.BaseInnerHits> innerHitsMap = parseInnerHits(parser, context, searchContext);
- if (innerHitsMap != null) {
- searchContext.innerHits(new InnerHitsContext(innerHitsMap));
+ Map<String, InnerHitsContext.BaseInnerHits> topLevelInnerHits = parseInnerHits(parser, context, searchContext);
+ if (topLevelInnerHits != null) {
+ InnerHitsContext innerHitsContext = searchContext.innerHits();
+ innerHitsContext.addInnerHitDefinitions(topLevelInnerHits);
}
}
diff --git a/core/src/main/java/org/elasticsearch/search/fetch/parent/ParentFieldSubFetchPhase.java b/core/src/main/java/org/elasticsearch/search/fetch/parent/ParentFieldSubFetchPhase.java
new file mode 100644
index 0000000000..41fe717642
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/search/fetch/parent/ParentFieldSubFetchPhase.java
@@ -0,0 +1,88 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.search.fetch.parent;
+
+import org.apache.lucene.index.LeafReader;
+import org.apache.lucene.index.SortedDocValues;
+import org.apache.lucene.util.BytesRef;
+import org.elasticsearch.ExceptionsHelper;
+import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
+import org.elasticsearch.search.SearchHitField;
+import org.elasticsearch.search.SearchParseElement;
+import org.elasticsearch.search.fetch.FetchSubPhase;
+import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
+import org.elasticsearch.search.internal.InternalSearchHit;
+import org.elasticsearch.search.internal.InternalSearchHitField;
+import org.elasticsearch.search.internal.SearchContext;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+
+public class ParentFieldSubFetchPhase implements FetchSubPhase {
+
+ @Override
+ public Map<String, ? extends SearchParseElement> parseElements() {
+ return Collections.emptyMap();
+ }
+
+ @Override
+ public boolean hitExecutionNeeded(SearchContext context) {
+ return true;
+ }
+
+ @Override
+ public void hitExecute(SearchContext context, HitContext hitContext) {
+ ParentFieldMapper parentFieldMapper = context.mapperService().documentMapper(hitContext.hit().type()).parentFieldMapper();
+ if (parentFieldMapper.active() == false) {
+ return;
+ }
+
+ String parentId = getParentId(parentFieldMapper, hitContext.reader(), hitContext.docId());
+ Map<String, SearchHitField> fields = hitContext.hit().fieldsOrNull();
+ if (fields == null) {
+ fields = new HashMap<>();
+ hitContext.hit().fields(fields);
+ }
+ fields.put(ParentFieldMapper.NAME, new InternalSearchHitField(ParentFieldMapper.NAME, Collections.singletonList(parentId)));
+ }
+
+ @Override
+ public boolean hitsExecutionNeeded(SearchContext context) {
+ return false;
+ }
+
+ @Override
+ public void hitsExecute(SearchContext context, InternalSearchHit[] hits) {
+ }
+
+ public static String getParentId(ParentFieldMapper fieldMapper, LeafReader reader, int docId) {
+ try {
+ SortedDocValues docValues = reader.getSortedDocValues(fieldMapper.name());
+ BytesRef parentId = docValues.get(docId);
+ assert parentId.length > 0;
+ return parentId.utf8ToString();
+ } catch (IOException e) {
+ throw ExceptionsHelper.convertToElastic(e);
+ }
+ }
+
+}
diff --git a/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java
index 01ab53f6d6..51e9ea4f5d 100644
--- a/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java
+++ b/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java
@@ -19,6 +19,7 @@
package org.elasticsearch.search.internal;
+import org.apache.lucene.queries.TermsQuery;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.BoostQuery;
@@ -26,6 +27,9 @@ import org.apache.lucene.search.Collector;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Sort;
+import org.apache.lucene.search.FieldDoc;
+import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.util.Counter;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.cache.recycler.PageCacheRecycler;
@@ -45,6 +49,7 @@ import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
+import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.query.AbstractQueryBuilder;
import org.elasticsearch.index.query.ParsedQuery;
@@ -112,6 +117,7 @@ public class DefaultSearchContext extends SearchContext {
private Sort sort;
private Float minimumScore;
private boolean trackScores = false; // when sorting, track scores as well...
+ private FieldDoc searchAfter;
/**
* The original query as sent by the user without the types and aliases
* applied. Putting things in here leaks them into highlighting so don't add
@@ -140,7 +146,6 @@ public class DefaultSearchContext extends SearchContext {
private volatile long keepAlive;
private final long originNanoTime = System.nanoTime();
private volatile long lastAccessTime = -1;
- private InnerHitsContext innerHitsContext;
private Profilers profilers;
private final Map<String, FetchSubPhaseContext> subPhaseContexts = new HashMap<>();
@@ -152,7 +157,7 @@ public class DefaultSearchContext extends SearchContext {
BigArrays bigArrays, Counter timeEstimateCounter, ParseFieldMatcher parseFieldMatcher,
TimeValue timeout
) {
- super(parseFieldMatcher, request);
+ super(parseFieldMatcher);
this.id = id;
this.request = request;
this.searchType = request.searchType();
@@ -238,19 +243,37 @@ public class DefaultSearchContext extends SearchContext {
}
@Override
+ @Nullable
public Query searchFilter(String[] types) {
- Query filter = mapperService().searchFilter(types);
- if (filter == null && aliasFilter == null) {
+ return createSearchFilter(types, aliasFilter, mapperService().hasNested());
+ }
+
+ // extracted to static helper method to make writing unit tests easier:
+ static Query createSearchFilter(String[] types, Query aliasFilter, boolean hasNestedFields) {
+ Query typesFilter = null;
+ if (types != null && types.length >= 1) {
+ BytesRef[] typesBytes = new BytesRef[types.length];
+ for (int i = 0; i < typesBytes.length; i++) {
+ typesBytes[i] = new BytesRef(types[i]);
+ }
+ typesFilter = new TermsQuery(TypeFieldMapper.NAME, typesBytes);
+ }
+
+ if (typesFilter == null && aliasFilter == null && hasNestedFields == false) {
return null;
}
+
BooleanQuery.Builder bq = new BooleanQuery.Builder();
- if (filter != null) {
- bq.add(filter, Occur.MUST);
+ if (typesFilter != null) {
+ bq.add(typesFilter, Occur.FILTER);
+ } else if (hasNestedFields) {
+ bq.add(Queries.newNonNestedFilter(), Occur.FILTER);
}
if (aliasFilter != null) {
- bq.add(aliasFilter, Occur.MUST);
+ bq.add(aliasFilter, Occur.FILTER);
}
- return new ConstantScoreQuery(bq.build());
+
+ return bq.build();
}
@Override
@@ -529,6 +552,17 @@ public class DefaultSearchContext extends SearchContext {
}
@Override
+ public SearchContext searchAfter(FieldDoc searchAfter) {
+ this.searchAfter = searchAfter;
+ return this;
+ }
+
+ @Override
+ public FieldDoc searchAfter() {
+ return searchAfter;
+ }
+
+ @Override
public SearchContext parsedPostFilter(ParsedQuery postFilter) {
this.postFilter = postFilter;
return this;
@@ -727,16 +761,6 @@ public class DefaultSearchContext extends SearchContext {
}
@Override
- public void innerHits(InnerHitsContext innerHitsContext) {
- this.innerHitsContext = innerHitsContext;
- }
-
- @Override
- public InnerHitsContext innerHits() {
- return innerHitsContext;
- }
-
- @Override
public Map<Class<?>, Collector> queryCollectors() {
return queryCollectors;
}
diff --git a/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java
index eaa14933b3..449e3207ad 100644
--- a/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java
+++ b/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java
@@ -20,6 +20,7 @@
package org.elasticsearch.search.internal;
import org.apache.lucene.search.Collector;
+import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Sort;
import org.apache.lucene.util.Counter;
@@ -62,7 +63,7 @@ public abstract class FilteredSearchContext extends SearchContext {
public FilteredSearchContext(SearchContext in) {
//inner_hits in percolator ends up with null inner search context
- super(in == null ? ParseFieldMatcher.EMPTY : in.parseFieldMatcher(), in);
+ super(in == null ? ParseFieldMatcher.EMPTY : in.parseFieldMatcher());
this.in = in;
}
@@ -177,11 +178,6 @@ public abstract class FilteredSearchContext extends SearchContext {
}
@Override
- public void innerHits(InnerHitsContext innerHitsContext) {
- in.innerHits(innerHitsContext);
- }
-
- @Override
public InnerHitsContext innerHits() {
return in.innerHits();
}
@@ -337,6 +333,16 @@ public abstract class FilteredSearchContext extends SearchContext {
}
@Override
+ public SearchContext searchAfter(FieldDoc searchAfter) {
+ return in.searchAfter(searchAfter);
+ }
+
+ @Override
+ public FieldDoc searchAfter() {
+ return in.searchAfter();
+ }
+
+ @Override
public SearchContext parsedPostFilter(ParsedQuery postFilter) {
return in.parsedPostFilter(postFilter);
}
diff --git a/core/src/main/java/org/elasticsearch/search/internal/InternalScrollSearchRequest.java b/core/src/main/java/org/elasticsearch/search/internal/InternalScrollSearchRequest.java
index 77a490af38..7f91813804 100644
--- a/core/src/main/java/org/elasticsearch/search/internal/InternalScrollSearchRequest.java
+++ b/core/src/main/java/org/elasticsearch/search/internal/InternalScrollSearchRequest.java
@@ -42,7 +42,6 @@ public class InternalScrollSearchRequest extends TransportRequest {
}
public InternalScrollSearchRequest(SearchScrollRequest request, long id) {
- super(request);
this.id = id;
this.scroll = request.scroll();
}
diff --git a/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java
index 76164b5c0f..a93cde1a8a 100644
--- a/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java
+++ b/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java
@@ -20,13 +20,12 @@ package org.elasticsearch.search.internal;
import org.apache.lucene.search.Collector;
+import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Sort;
import org.apache.lucene.util.Counter;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.cache.recycler.PageCacheRecycler;
-import org.elasticsearch.common.DelegatingHasContextAndHeaders;
-import org.elasticsearch.common.HasContextAndHeaders;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.lease.Releasable;
@@ -66,7 +65,7 @@ import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
-public abstract class SearchContext extends DelegatingHasContextAndHeaders implements Releasable {
+public abstract class SearchContext implements Releasable {
private static ThreadLocal<SearchContext> current = new ThreadLocal<>();
public final static int DEFAULT_TERMINATE_AFTER = 0;
@@ -87,11 +86,11 @@ public abstract class SearchContext extends DelegatingHasContextAndHeaders imple
private Map<Lifetime, List<Releasable>> clearables = null;
private final AtomicBoolean closed = new AtomicBoolean(false);
+ private InnerHitsContext innerHitsContext;
protected final ParseFieldMatcher parseFieldMatcher;
- protected SearchContext(ParseFieldMatcher parseFieldMatcher, HasContextAndHeaders contextHeaders) {
- super(contextHeaders);
+ protected SearchContext(ParseFieldMatcher parseFieldMatcher) {
this.parseFieldMatcher = parseFieldMatcher;
}
@@ -170,9 +169,12 @@ public abstract class SearchContext extends DelegatingHasContextAndHeaders imple
public abstract void highlight(SearchContextHighlight highlight);
- public abstract void innerHits(InnerHitsContext innerHitsContext);
-
- public abstract InnerHitsContext innerHits();
+ public InnerHitsContext innerHits() {
+ if (innerHitsContext == null) {
+ innerHitsContext = new InnerHitsContext();
+ }
+ return innerHitsContext;
+ }
public abstract SuggestionSearchContext suggest();
@@ -240,6 +242,10 @@ public abstract class SearchContext extends DelegatingHasContextAndHeaders imple
public abstract boolean trackScores();
+ public abstract SearchContext searchAfter(FieldDoc searchAfter);
+
+ public abstract FieldDoc searchAfter();
+
public abstract SearchContext parsedPostFilter(ParsedQuery postFilter);
public abstract ParsedQuery parsedPostFilter();
diff --git a/core/src/main/java/org/elasticsearch/search/internal/ShardSearchLocalRequest.java b/core/src/main/java/org/elasticsearch/search/internal/ShardSearchLocalRequest.java
index 9d15dfd579..0f46461f4a 100644
--- a/core/src/main/java/org/elasticsearch/search/internal/ShardSearchLocalRequest.java
+++ b/core/src/main/java/org/elasticsearch/search/internal/ShardSearchLocalRequest.java
@@ -22,7 +22,6 @@ package org.elasticsearch.search.internal;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.cluster.routing.ShardRouting;
-import org.elasticsearch.common.ContextAndHeaderHolder;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
@@ -57,7 +56,7 @@ import static org.elasticsearch.search.Scroll.readScroll;
* </pre>
*/
-public class ShardSearchLocalRequest extends ContextAndHeaderHolder implements ShardSearchRequest {
+public class ShardSearchLocalRequest implements ShardSearchRequest {
private String index;
private int shardId;
@@ -84,7 +83,6 @@ public class ShardSearchLocalRequest extends ContextAndHeaderHolder implements S
this.scroll = searchRequest.scroll();
this.filteringAliases = filteringAliases;
this.nowInMillis = nowInMillis;
- copyContextAndHeadersFrom(searchRequest);
}
public ShardSearchLocalRequest(String[] types, long nowInMillis) {
@@ -99,7 +97,7 @@ public class ShardSearchLocalRequest extends ContextAndHeaderHolder implements S
public ShardSearchLocalRequest(ShardId shardId, int numberOfShards, SearchType searchType, SearchSourceBuilder source, String[] types,
Boolean requestCache) {
- this.index = shardId.getIndex();
+ this.index = shardId.getIndexName();
this.shardId = shardId.id();
this.numberOfShards = numberOfShards;
this.searchType = searchType;
diff --git a/core/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java b/core/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java
index b1730b6a14..1f0b3d1f18 100644
--- a/core/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java
+++ b/core/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java
@@ -20,7 +20,6 @@
package org.elasticsearch.search.internal;
import org.elasticsearch.action.search.SearchType;
-import org.elasticsearch.common.HasContextAndHeaders;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.script.Template;
import org.elasticsearch.search.Scroll;
@@ -33,7 +32,7 @@ import java.io.IOException;
* It provides all the methods that the {@link org.elasticsearch.search.internal.SearchContext} needs.
* Provides a cache key based on its content that can be used to cache shard level response.
*/
-public interface ShardSearchRequest extends HasContextAndHeaders {
+public interface ShardSearchRequest {
String index();
diff --git a/core/src/main/java/org/elasticsearch/search/internal/ShardSearchTransportRequest.java b/core/src/main/java/org/elasticsearch/search/internal/ShardSearchTransportRequest.java
index 0f9c0ced41..48ea31c170 100644
--- a/core/src/main/java/org/elasticsearch/search/internal/ShardSearchTransportRequest.java
+++ b/core/src/main/java/org/elasticsearch/search/internal/ShardSearchTransportRequest.java
@@ -51,7 +51,6 @@ public class ShardSearchTransportRequest extends TransportRequest implements Sha
public ShardSearchTransportRequest(SearchRequest searchRequest, ShardRouting shardRouting, int numberOfShards,
String[] filteringAliases, long nowInMillis) {
- super(searchRequest);
this.shardSearchLocalRequest = new ShardSearchLocalRequest(searchRequest, shardRouting, numberOfShards, filteringAliases, nowInMillis);
this.originalIndices = new OriginalIndices(searchRequest);
}
diff --git a/core/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java
index 5feabad792..c4880814fc 100644
--- a/core/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java
+++ b/core/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java
@@ -66,8 +66,6 @@ public class SubSearchContext extends FilteredSearchContext {
private boolean trackScores;
private boolean version;
- private InnerHitsContext innerHitsContext;
-
public SubSearchContext(SearchContext context) {
super(context);
this.fetchSearchResult = new FetchSearchResult();
@@ -326,13 +324,4 @@ public class SubSearchContext extends FilteredSearchContext {
throw new UnsupportedOperationException("Not supported");
}
- @Override
- public void innerHits(InnerHitsContext innerHitsContext) {
- this.innerHitsContext = innerHitsContext;
- }
-
- @Override
- public InnerHitsContext innerHits() {
- return innerHitsContext;
- }
}
diff --git a/core/src/main/java/org/elasticsearch/search/profile/CollectorResult.java b/core/src/main/java/org/elasticsearch/search/profile/CollectorResult.java
index d0c006eb5d..8da14d23d9 100644
--- a/core/src/main/java/org/elasticsearch/search/profile/CollectorResult.java
+++ b/core/src/main/java/org/elasticsearch/search/profile/CollectorResult.java
@@ -36,7 +36,7 @@ import java.util.Locale;
* Collectors used in the search. Children CollectorResult's may be
* embedded inside of a parent CollectorResult
*/
-public class CollectorResult implements ToXContent, Writeable<CollectorResult> {
+public class CollectorResult implements ToXContent, Writeable {
public static final String REASON_SEARCH_COUNT = "search_count";
public static final String REASON_SEARCH_TOP_HITS = "search_top_hits";
@@ -125,7 +125,7 @@ public class CollectorResult implements ToXContent, Writeable<CollectorResult> {
builder = builder.startObject()
.field(NAME.getPreferredName(), getName())
.field(REASON.getPreferredName(), getReason())
- .field(TIME.getPreferredName(), String.format(Locale.US, "%.10gms", getTime() / 1000000.0));
+ .field(TIME.getPreferredName(), String.format(Locale.US, "%.10gms", (double) (getTime() / 1000000.0)));
if (!children.isEmpty()) {
builder = builder.startArray(CHILDREN.getPreferredName());
@@ -150,7 +150,7 @@ public class CollectorResult implements ToXContent, Writeable<CollectorResult> {
}
@Override
- public CollectorResult readFrom(StreamInput in) throws IOException {
+ public Object readFrom(StreamInput in) throws IOException {
return new CollectorResult(in);
}
}
diff --git a/core/src/main/java/org/elasticsearch/search/query/QueryPhase.java b/core/src/main/java/org/elasticsearch/search/query/QueryPhase.java
index 5352fb0289..5a98744505 100644
--- a/core/src/main/java/org/elasticsearch/search/query/QueryPhase.java
+++ b/core/src/main/java/org/elasticsearch/search/query/QueryPhase.java
@@ -192,10 +192,10 @@ public class QueryPhase implements SearchPhase {
final ScrollContext scrollContext = searchContext.scrollContext();
assert (scrollContext != null) == (searchContext.request().scroll() != null);
final TopDocsCollector<?> topDocsCollector;
- ScoreDoc lastEmittedDoc;
+ ScoreDoc after = null;
if (searchContext.request().scroll() != null) {
numDocs = Math.min(searchContext.size(), totalNumDocs);
- lastEmittedDoc = scrollContext.lastEmittedDoc;
+ after = scrollContext.lastEmittedDoc;
if (returnsDocsInOrder(query, searchContext.sort())) {
if (scrollContext.totalHits == -1) {
@@ -209,7 +209,7 @@ public class QueryPhase implements SearchPhase {
if (scrollContext.lastEmittedDoc != null) {
BooleanQuery bq = new BooleanQuery.Builder()
.add(query, BooleanClause.Occur.MUST)
- .add(new MinDocQuery(lastEmittedDoc.doc + 1), BooleanClause.Occur.FILTER)
+ .add(new MinDocQuery(after.doc + 1), BooleanClause.Occur.FILTER)
.build();
query = bq;
}
@@ -217,7 +217,7 @@ public class QueryPhase implements SearchPhase {
}
}
} else {
- lastEmittedDoc = null;
+ after = searchContext.searchAfter();
}
if (totalNumDocs == 0) {
// top collectors don't like a size of 0
@@ -226,13 +226,13 @@ public class QueryPhase implements SearchPhase {
assert numDocs > 0;
if (searchContext.sort() != null) {
topDocsCollector = TopFieldCollector.create(searchContext.sort(), numDocs,
- (FieldDoc) lastEmittedDoc, true, searchContext.trackScores(), searchContext.trackScores());
+ (FieldDoc) after, true, searchContext.trackScores(), searchContext.trackScores());
} else {
rescore = !searchContext.rescore().isEmpty();
for (RescoreSearchContext rescoreContext : searchContext.rescore()) {
numDocs = Math.max(rescoreContext.window(), numDocs);
}
- topDocsCollector = TopScoreDocCollector.create(numDocs, lastEmittedDoc);
+ topDocsCollector = TopScoreDocCollector.create(numDocs, after);
}
collector = topDocsCollector;
if (doProfile) {
diff --git a/core/src/main/java/org/elasticsearch/search/query/QuerySearchRequest.java b/core/src/main/java/org/elasticsearch/search/query/QuerySearchRequest.java
index a1395bd73d..15593abf0d 100644
--- a/core/src/main/java/org/elasticsearch/search/query/QuerySearchRequest.java
+++ b/core/src/main/java/org/elasticsearch/search/query/QuerySearchRequest.java
@@ -47,7 +47,6 @@ public class QuerySearchRequest extends TransportRequest implements IndicesReque
}
public QuerySearchRequest(SearchRequest request, long id, AggregatedDfs dfs) {
- super(request);
this.id = id;
this.dfs = dfs;
this.originalIndices = new OriginalIndices(request);
diff --git a/core/src/main/java/org/elasticsearch/search/rescore/QueryRescorer.java b/core/src/main/java/org/elasticsearch/search/rescore/QueryRescorer.java
index 7f95ff1082..319055639a 100644
--- a/core/src/main/java/org/elasticsearch/search/rescore/QueryRescorer.java
+++ b/core/src/main/java/org/elasticsearch/search/rescore/QueryRescorer.java
@@ -27,7 +27,7 @@ import org.apache.lucene.search.TopDocs;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
-import org.elasticsearch.index.query.ParsedQuery;
+import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.internal.ContextIndexSearcher;
import org.elasticsearch.search.internal.SearchContext;
@@ -120,17 +120,17 @@ public final class QueryRescorer implements Rescorer {
}
}
- private static final ObjectParser<QueryRescoreContext, SearchContext> RESCORE_PARSER = new ObjectParser<>("query", null);
+ private static final ObjectParser<QueryRescoreContext, QueryShardContext> RESCORE_PARSER = new ObjectParser<>("query", null);
static {
- RESCORE_PARSER.declareObject(QueryRescoreContext::setParsedQuery, (p, c) -> c.indexShard().getQueryShardContext().parse(p), new ParseField("rescore_query"));
+ RESCORE_PARSER.declareObject(QueryRescoreContext::setQuery, (p, c) -> c.parse(p).query(), new ParseField("rescore_query"));
RESCORE_PARSER.declareFloat(QueryRescoreContext::setQueryWeight, new ParseField("query_weight"));
RESCORE_PARSER.declareFloat(QueryRescoreContext::setRescoreQueryWeight, new ParseField("rescore_query_weight"));
RESCORE_PARSER.declareString(QueryRescoreContext::setScoreMode, new ParseField("score_mode"));
}
@Override
- public RescoreSearchContext parse(XContentParser parser, SearchContext context) throws IOException {
+ public RescoreSearchContext parse(XContentParser parser, QueryShardContext context) throws IOException {
return RESCORE_PARSER.parse(parser, new QueryRescoreContext(this), context);
}
@@ -178,22 +178,24 @@ public final class QueryRescorer implements Rescorer {
public static class QueryRescoreContext extends RescoreSearchContext {
+ static final int DEFAULT_WINDOW_SIZE = 10;
+
public QueryRescoreContext(QueryRescorer rescorer) {
- super(NAME, 10, rescorer);
+ super(NAME, DEFAULT_WINDOW_SIZE, rescorer);
this.scoreMode = QueryRescoreMode.Total;
}
- private ParsedQuery parsedQuery;
+ private Query query;
private float queryWeight = 1.0f;
private float rescoreQueryWeight = 1.0f;
private QueryRescoreMode scoreMode;
- public void setParsedQuery(ParsedQuery parsedQuery) {
- this.parsedQuery = parsedQuery;
+ public void setQuery(Query query) {
+ this.query = query;
}
public Query query() {
- return parsedQuery.query();
+ return query;
}
public float queryWeight() {
diff --git a/core/src/main/java/org/elasticsearch/search/rescore/QueryRescorerBuilder.java b/core/src/main/java/org/elasticsearch/search/rescore/QueryRescorerBuilder.java
new file mode 100644
index 0000000000..10c727a902
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/search/rescore/QueryRescorerBuilder.java
@@ -0,0 +1,242 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.search.rescore;
+
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.ParsingException;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.common.xcontent.ObjectParser;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.index.query.MatchAllQueryBuilder;
+import org.elasticsearch.index.query.QueryBuilder;
+import org.elasticsearch.index.query.QueryParseContext;
+import org.elasticsearch.index.query.QueryShardContext;
+import org.elasticsearch.search.rescore.QueryRescorer.QueryRescoreContext;
+
+import java.io.IOException;
+import java.util.Locale;
+import java.util.Objects;
+
+public class QueryRescorerBuilder extends RescoreBuilder<QueryRescorerBuilder> {
+
+ public static final String NAME = "query";
+
+ public static final QueryRescorerBuilder PROTOTYPE = new QueryRescorerBuilder(new MatchAllQueryBuilder());
+
+ public static final float DEFAULT_RESCORE_QUERYWEIGHT = 1.0f;
+ public static final float DEFAULT_QUERYWEIGHT = 1.0f;
+ public static final QueryRescoreMode DEFAULT_SCORE_MODE = QueryRescoreMode.Total;
+ private final QueryBuilder<?> queryBuilder;
+ private float rescoreQueryWeight = DEFAULT_RESCORE_QUERYWEIGHT;
+ private float queryWeight = DEFAULT_QUERYWEIGHT;
+ private QueryRescoreMode scoreMode = DEFAULT_SCORE_MODE;
+
+ private static ParseField RESCORE_QUERY_FIELD = new ParseField("rescore_query");
+ private static ParseField QUERY_WEIGHT_FIELD = new ParseField("query_weight");
+ private static ParseField RESCORE_QUERY_WEIGHT_FIELD = new ParseField("rescore_query_weight");
+ private static ParseField SCORE_MODE_FIELD = new ParseField("score_mode");
+
+ private static final ObjectParser<InnerBuilder, QueryParseContext> QUERY_RESCORE_PARSER = new ObjectParser<>(NAME, null);
+
+ static {
+ QUERY_RESCORE_PARSER.declareObject(InnerBuilder::setQueryBuilder, (p, c) -> {
+ try {
+ return c.parseInnerQueryBuilder();
+ } catch (IOException e) {
+ throw new ParsingException(p.getTokenLocation(), "Could not parse inner query", e);
+ }
+ } , RESCORE_QUERY_FIELD);
+ QUERY_RESCORE_PARSER.declareFloat(InnerBuilder::setQueryWeight, QUERY_WEIGHT_FIELD);
+ QUERY_RESCORE_PARSER.declareFloat(InnerBuilder::setRescoreQueryWeight, RESCORE_QUERY_WEIGHT_FIELD);
+ QUERY_RESCORE_PARSER.declareString((struct, value) -> struct.setScoreMode(QueryRescoreMode.fromString(value)), SCORE_MODE_FIELD);
+ }
+
+ /**
+ * Creates a new {@link QueryRescorerBuilder} instance
+ * @param builder the query builder to build the rescore query from
+ */
+ public QueryRescorerBuilder(QueryBuilder<?> builder) {
+ this.queryBuilder = builder;
+ }
+
+ /**
+ * @return the query used for this rescore query
+ */
+ public QueryBuilder<?> getRescoreQuery() {
+ return this.queryBuilder;
+ }
+
+ /**
+ * Sets the original query weight for rescoring. The default is <tt>1.0</tt>
+ */
+ public QueryRescorerBuilder setQueryWeight(float queryWeight) {
+ this.queryWeight = queryWeight;
+ return this;
+ }
+
+
+ /**
+ * Gets the original query weight for rescoring. The default is <tt>1.0</tt>
+ */
+ public float getQueryWeight() {
+ return this.queryWeight;
+ }
+
+ /**
+ * Sets the original query weight for rescoring. The default is <tt>1.0</tt>
+ */
+ public QueryRescorerBuilder setRescoreQueryWeight(float rescoreQueryWeight) {
+ this.rescoreQueryWeight = rescoreQueryWeight;
+ return this;
+ }
+
+ /**
+ * Gets the original query weight for rescoring. The default is <tt>1.0</tt>
+ */
+ public float getRescoreQueryWeight() {
+ return this.rescoreQueryWeight;
+ }
+
+ /**
+ * Sets the original query score mode. The default is {@link QueryRescoreMode#Total}.
+ */
+ public QueryRescorerBuilder setScoreMode(QueryRescoreMode scoreMode) {
+ this.scoreMode = scoreMode;
+ return this;
+ }
+
+ /**
+ * Gets the original query score mode. The default is <tt>total</tt>
+ */
+ public QueryRescoreMode getScoreMode() {
+ return this.scoreMode;
+ }
+
+ @Override
+ public void doXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject(NAME);
+ builder.field(RESCORE_QUERY_FIELD.getPreferredName(), queryBuilder);
+ builder.field(QUERY_WEIGHT_FIELD.getPreferredName(), queryWeight);
+ builder.field(RESCORE_QUERY_WEIGHT_FIELD.getPreferredName(), rescoreQueryWeight);
+ builder.field(SCORE_MODE_FIELD.getPreferredName(), scoreMode.name().toLowerCase(Locale.ROOT));
+ builder.endObject();
+ }
+
+ public QueryRescorerBuilder fromXContent(QueryParseContext parseContext) throws IOException {
+ InnerBuilder innerBuilder = QUERY_RESCORE_PARSER.parse(parseContext.parser(), new InnerBuilder(), parseContext);
+ return innerBuilder.build();
+ }
+
+ @Override
+ public QueryRescoreContext build(QueryShardContext context) throws IOException {
+ org.elasticsearch.search.rescore.QueryRescorer rescorer = new org.elasticsearch.search.rescore.QueryRescorer();
+ QueryRescoreContext queryRescoreContext = new QueryRescoreContext(rescorer);
+ queryRescoreContext.setQuery(this.queryBuilder.toQuery(context));
+ queryRescoreContext.setQueryWeight(this.queryWeight);
+ queryRescoreContext.setRescoreQueryWeight(this.rescoreQueryWeight);
+ queryRescoreContext.setScoreMode(this.scoreMode);
+ if (this.windowSize != null) {
+ queryRescoreContext.setWindowSize(this.windowSize);
+ }
+ return queryRescoreContext;
+ }
+
+ @Override
+ public final int hashCode() {
+ int result = super.hashCode();
+ return 31 * result + Objects.hash(scoreMode, queryWeight, rescoreQueryWeight, queryBuilder);
+ }
+
+ @Override
+ public final boolean equals(Object obj) {
+ if (this == obj) {
+ return true;
+ }
+ if (obj == null || getClass() != obj.getClass()) {
+ return false;
+ }
+ QueryRescorerBuilder other = (QueryRescorerBuilder) obj;
+ return super.equals(obj) &&
+ Objects.equals(scoreMode, other.scoreMode) &&
+ Objects.equals(queryWeight, other.queryWeight) &&
+ Objects.equals(rescoreQueryWeight, other.rescoreQueryWeight) &&
+ Objects.equals(queryBuilder, other.queryBuilder);
+ }
+
+ @Override
+ public QueryRescorerBuilder doReadFrom(StreamInput in) throws IOException {
+ QueryRescorerBuilder rescorer = new QueryRescorerBuilder(in.readQuery());
+ rescorer.setScoreMode(QueryRescoreMode.PROTOTYPE.readFrom(in));
+ rescorer.setRescoreQueryWeight(in.readFloat());
+ rescorer.setQueryWeight(in.readFloat());
+ return rescorer;
+ }
+
+ @Override
+ public void doWriteTo(StreamOutput out) throws IOException {
+ out.writeQuery(queryBuilder);
+ scoreMode.writeTo(out);
+ out.writeFloat(rescoreQueryWeight);
+ out.writeFloat(queryWeight);
+ }
+
+ @Override
+ public String getWriteableName() {
+ return NAME;
+ }
+
+ /**
+ * Helper to be able to use {@link ObjectParser}, since we need the inner query builder
+ * for the constructor of {@link QueryRescorerBuilder}, but {@link ObjectParser} only
+ * allows filling properties of an already constructed value.
+ */
+ private class InnerBuilder {
+
+ private QueryBuilder<?> queryBuilder;
+ private float rescoreQueryWeight = DEFAULT_RESCORE_QUERYWEIGHT;
+ private float queryWeight = DEFAULT_QUERYWEIGHT;
+ private QueryRescoreMode scoreMode = DEFAULT_SCORE_MODE;
+
+ void setQueryBuilder(QueryBuilder<?> builder) {
+ this.queryBuilder = builder;
+ }
+
+ QueryRescorerBuilder build() {
+ QueryRescorerBuilder queryRescoreBuilder = new QueryRescorerBuilder(queryBuilder);
+ queryRescoreBuilder.setQueryWeight(queryWeight);
+ queryRescoreBuilder.setRescoreQueryWeight(rescoreQueryWeight);
+ queryRescoreBuilder.setScoreMode(scoreMode);
+ return queryRescoreBuilder;
+ }
+
+ void setQueryWeight(float queryWeight) {
+ this.queryWeight = queryWeight;
+ }
+
+ void setRescoreQueryWeight(float rescoreQueryWeight) {
+ this.rescoreQueryWeight = rescoreQueryWeight;
+ }
+
+ void setScoreMode(QueryRescoreMode scoreMode) {
+ this.scoreMode = scoreMode;
+ }
+ }
+} \ No newline at end of file
diff --git a/core/src/main/java/org/elasticsearch/search/rescore/RescoreBuilder.java b/core/src/main/java/org/elasticsearch/search/rescore/RescoreBuilder.java
index 7510d24f82..8dad07a543 100644
--- a/core/src/main/java/org/elasticsearch/search/rescore/RescoreBuilder.java
+++ b/core/src/main/java/org/elasticsearch/search/rescore/RescoreBuilder.java
@@ -20,255 +20,140 @@
package org.elasticsearch.search.rescore;
import org.elasticsearch.ExceptionsHelper;
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.io.stream.NamedWriteable;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
-import org.elasticsearch.index.query.MatchAllQueryBuilder;
+import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryBuilder;
+import org.elasticsearch.index.query.QueryParseContext;
+import org.elasticsearch.index.query.QueryShardContext;
+import org.elasticsearch.search.rescore.QueryRescorer.QueryRescoreContext;
import java.io.IOException;
-import java.util.Locale;
import java.util.Objects;
-public class RescoreBuilder implements ToXContent, Writeable<RescoreBuilder> {
-
- private Rescorer rescorer;
- private Integer windowSize;
- public static final RescoreBuilder PROTOYPE = new RescoreBuilder(new QueryRescorer(new MatchAllQueryBuilder()));
+/**
+ * The abstract base builder for instances of {@link RescoreBuilder}.
+ */
+public abstract class RescoreBuilder<RB extends RescoreBuilder<RB>> implements ToXContent, NamedWriteable<RB> {
- public RescoreBuilder(Rescorer rescorer) {
- if (rescorer == null) {
- throw new IllegalArgumentException("rescorer cannot be null");
- }
- this.rescorer = rescorer;
- }
+ protected Integer windowSize;
- public Rescorer rescorer() {
- return this.rescorer;
- }
+ private static ParseField WINDOW_SIZE_FIELD = new ParseField("window_size");
- public RescoreBuilder windowSize(int windowSize) {
+ @SuppressWarnings("unchecked")
+ public RB windowSize(int windowSize) {
this.windowSize = windowSize;
- return this;
+ return (RB) this;
}
public Integer windowSize() {
return windowSize;
}
+ public static RescoreBuilder<?> parseFromXContent(QueryParseContext parseContext) throws IOException {
+ XContentParser parser = parseContext.parser();
+ String fieldName = null;
+ RescoreBuilder<?> rescorer = null;
+ Integer windowSize = null;
+ XContentParser.Token token;
+ while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
+ if (token == XContentParser.Token.FIELD_NAME) {
+ fieldName = parser.currentName();
+ } else if (token.isValue()) {
+ if (parseContext.parseFieldMatcher().match(fieldName, WINDOW_SIZE_FIELD)) {
+ windowSize = parser.intValue();
+ } else {
+ throw new ParsingException(parser.getTokenLocation(), "rescore doesn't support [" + fieldName + "]");
+ }
+ } else if (token == XContentParser.Token.START_OBJECT) {
+ // we only have QueryRescorer at this point
+ if (QueryRescorerBuilder.NAME.equals(fieldName)) {
+ rescorer = QueryRescorerBuilder.PROTOTYPE.fromXContent(parseContext);
+ } else {
+ throw new ParsingException(parser.getTokenLocation(), "rescore doesn't support rescorer with name [" + fieldName + "]");
+ }
+ } else {
+ throw new ParsingException(parser.getTokenLocation(), "unexpected token [" + token + "] after [" + fieldName + "]");
+ }
+ }
+ if (rescorer == null) {
+ throw new ParsingException(parser.getTokenLocation(), "missing rescore type");
+ }
+ if (windowSize != null) {
+ rescorer.windowSize(windowSize.intValue());
+ }
+ return rescorer;
+ }
+
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
if (windowSize != null) {
builder.field("window_size", windowSize);
}
- rescorer.toXContent(builder, params);
+ doXContent(builder, params);
+ builder.endObject();
return builder;
}
- public static QueryRescorer queryRescorer(QueryBuilder<?> queryBuilder) {
- return new QueryRescorer(queryBuilder);
+ protected abstract void doXContent(XContentBuilder builder, Params params) throws IOException;
+
+ public abstract QueryRescoreContext build(QueryShardContext context) throws IOException;
+
+ public static QueryRescorerBuilder queryRescorer(QueryBuilder<?> queryBuilder) {
+ return new QueryRescorerBuilder(queryBuilder);
}
@Override
- public final int hashCode() {
- return Objects.hash(windowSize, rescorer);
+ public int hashCode() {
+ return Objects.hash(windowSize);
}
@Override
- public final boolean equals(Object obj) {
+ public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
+ @SuppressWarnings("rawtypes")
RescoreBuilder other = (RescoreBuilder) obj;
- return Objects.equals(windowSize, other.windowSize) &&
- Objects.equals(rescorer, other.rescorer);
+ return Objects.equals(windowSize, other.windowSize);
}
@Override
- public RescoreBuilder readFrom(StreamInput in) throws IOException {
- RescoreBuilder builder = new RescoreBuilder(in.readRescorer());
- Integer windowSize = in.readOptionalVInt();
- if (windowSize != null) {
- builder.windowSize(windowSize);
- }
+ public RB readFrom(StreamInput in) throws IOException {
+ RB builder = doReadFrom(in);
+ builder.windowSize = in.readOptionalVInt();
return builder;
}
+ protected abstract RB doReadFrom(StreamInput in) throws IOException;
+
@Override
public void writeTo(StreamOutput out) throws IOException {
- out.writeRescorer(rescorer);
+ doWriteTo(out);
out.writeOptionalVInt(this.windowSize);
}
+ protected abstract void doWriteTo(StreamOutput out) throws IOException;
+
@Override
public final String toString() {
try {
XContentBuilder builder = XContentFactory.jsonBuilder();
builder.prettyPrint();
- builder.startObject();
toXContent(builder, EMPTY_PARAMS);
- builder.endObject();
return builder.string();
} catch (Exception e) {
return "{ \"error\" : \"" + ExceptionsHelper.detailedMessage(e) + "\"}";
}
}
-
- public static abstract class Rescorer implements ToXContent, NamedWriteable<Rescorer> {
-
- private String name;
-
- public Rescorer(String name) {
- this.name = name;
- }
-
- @Override
- public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
- builder.startObject(name);
- builder = innerToXContent(builder, params);
- builder.endObject();
- return builder;
- }
-
- protected abstract XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException;
-
- @Override
- public abstract int hashCode();
-
- @Override
- public abstract boolean equals(Object obj);
- }
-
- public static class QueryRescorer extends Rescorer {
-
- private static final String NAME = "query";
- public static final QueryRescorer PROTOTYPE = new QueryRescorer(new MatchAllQueryBuilder());
- public static final float DEFAULT_RESCORE_QUERYWEIGHT = 1.0f;
- public static final float DEFAULT_QUERYWEIGHT = 1.0f;
- public static final QueryRescoreMode DEFAULT_SCORE_MODE = QueryRescoreMode.Total;
- private final QueryBuilder<?> queryBuilder;
- private float rescoreQueryWeight = DEFAULT_RESCORE_QUERYWEIGHT;
- private float queryWeight = DEFAULT_QUERYWEIGHT;
- private QueryRescoreMode scoreMode = DEFAULT_SCORE_MODE;
-
- /**
- * Creates a new {@link QueryRescorer} instance
- * @param builder the query builder to build the rescore query from
- */
- public QueryRescorer(QueryBuilder<?> builder) {
- super(NAME);
- this.queryBuilder = builder;
- }
-
- /**
- * @return the query used for this rescore query
- */
- public QueryBuilder<?> getRescoreQuery() {
- return this.queryBuilder;
- }
-
- /**
- * Sets the original query weight for rescoring. The default is <tt>1.0</tt>
- */
- public QueryRescorer setQueryWeight(float queryWeight) {
- this.queryWeight = queryWeight;
- return this;
- }
-
-
- /**
- * Gets the original query weight for rescoring. The default is <tt>1.0</tt>
- */
- public float getQueryWeight() {
- return this.queryWeight;
- }
-
- /**
- * Sets the original query weight for rescoring. The default is <tt>1.0</tt>
- */
- public QueryRescorer setRescoreQueryWeight(float rescoreQueryWeight) {
- this.rescoreQueryWeight = rescoreQueryWeight;
- return this;
- }
-
- /**
- * Gets the original query weight for rescoring. The default is <tt>1.0</tt>
- */
- public float getRescoreQueryWeight() {
- return this.rescoreQueryWeight;
- }
-
- /**
- * Sets the original query score mode. The default is {@link QueryRescoreMode#Total}.
- */
- public QueryRescorer setScoreMode(QueryRescoreMode scoreMode) {
- this.scoreMode = scoreMode;
- return this;
- }
-
- /**
- * Gets the original query score mode. The default is <tt>total</tt>
- */
- public QueryRescoreMode getScoreMode() {
- return this.scoreMode;
- }
-
- @Override
- protected XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException {
- builder.field("rescore_query", queryBuilder);
- builder.field("query_weight", queryWeight);
- builder.field("rescore_query_weight", rescoreQueryWeight);
- builder.field("score_mode", scoreMode.name().toLowerCase(Locale.ROOT));
- return builder;
- }
-
- @Override
- public final int hashCode() {
- return Objects.hash(getClass(), scoreMode, queryWeight, rescoreQueryWeight, queryBuilder);
- }
-
- @Override
- public final boolean equals(Object obj) {
- if (this == obj) {
- return true;
- }
- if (obj == null || getClass() != obj.getClass()) {
- return false;
- }
- QueryRescorer other = (QueryRescorer) obj;
- return Objects.equals(scoreMode, other.scoreMode) &&
- Objects.equals(queryWeight, other.queryWeight) &&
- Objects.equals(rescoreQueryWeight, other.rescoreQueryWeight) &&
- Objects.equals(queryBuilder, other.queryBuilder);
- }
-
- @Override
- public QueryRescorer readFrom(StreamInput in) throws IOException {
- QueryRescorer rescorer = new QueryRescorer(in.readQuery());
- rescorer.setScoreMode(QueryRescoreMode.PROTOTYPE.readFrom(in));
- rescorer.setRescoreQueryWeight(in.readFloat());
- rescorer.setQueryWeight(in.readFloat());
- return rescorer;
- }
-
- @Override
- public void writeTo(StreamOutput out) throws IOException {
- out.writeQuery(queryBuilder);
- scoreMode.writeTo(out);
- out.writeFloat(rescoreQueryWeight);
- out.writeFloat(queryWeight);
- }
-
- @Override
- public String getWriteableName() {
- return NAME;
- }
- }
}
diff --git a/core/src/main/java/org/elasticsearch/search/rescore/RescoreParseElement.java b/core/src/main/java/org/elasticsearch/search/rescore/RescoreParseElement.java
index 7f9f2725fb..149db6cec2 100644
--- a/core/src/main/java/org/elasticsearch/search/rescore/RescoreParseElement.java
+++ b/core/src/main/java/org/elasticsearch/search/rescore/RescoreParseElement.java
@@ -21,9 +21,12 @@ package org.elasticsearch.search.rescore;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.internal.SearchContext;
+import java.io.IOException;
+
/**
*
*/
@@ -33,14 +36,14 @@ public class RescoreParseElement implements SearchParseElement {
public void parse(XContentParser parser, SearchContext context) throws Exception {
if (parser.currentToken() == XContentParser.Token.START_ARRAY) {
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
- parseSingleRescoreContext(parser, context);
+ context.addRescore(parseSingleRescoreContext(parser, context.indexShard().getQueryShardContext()));
}
} else {
- parseSingleRescoreContext(parser, context);
+ context.addRescore(parseSingleRescoreContext(parser, context.indexShard().getQueryShardContext()));
}
}
- public void parseSingleRescoreContext(XContentParser parser, SearchContext context) throws Exception {
+ public RescoreSearchContext parseSingleRescoreContext(XContentParser parser, QueryShardContext context) throws ElasticsearchParseException, IOException {
String fieldName = null;
RescoreSearchContext rescoreContext = null;
Integer windowSize = null;
@@ -71,7 +74,7 @@ public class RescoreParseElement implements SearchParseElement {
if (windowSize != null) {
rescoreContext.setWindowSize(windowSize.intValue());
}
- context.addRescore(rescoreContext);
+ return rescoreContext;
}
}
diff --git a/core/src/main/java/org/elasticsearch/search/rescore/Rescorer.java b/core/src/main/java/org/elasticsearch/search/rescore/Rescorer.java
index 3c90289fde..e3465a4df8 100644
--- a/core/src/main/java/org/elasticsearch/search/rescore/Rescorer.java
+++ b/core/src/main/java/org/elasticsearch/search/rescore/Rescorer.java
@@ -24,6 +24,7 @@ import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.TopDocs;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
@@ -68,11 +69,11 @@ public interface Rescorer {
* Parses the {@link RescoreSearchContext} for this impelementation
*
* @param parser the parser to read the context from
- * @param context the current search context
+ * @param context the current shard context
* @return the parsed {@link RescoreSearchContext}
* @throws IOException if an {@link IOException} occurs while parsing the context
*/
- public RescoreSearchContext parse(XContentParser parser, SearchContext context) throws IOException;
+ public RescoreSearchContext parse(XContentParser parser, QueryShardContext context) throws IOException;
/**
* Extracts all terms needed to exectue this {@link Rescorer}. This method
@@ -81,7 +82,7 @@ public interface Rescorer {
* {@link SearchType#DFS_QUERY_THEN_FETCH}
*/
public void extractTerms(SearchContext context, RescoreSearchContext rescoreContext, Set<Term> termsSet);
-
+
/*
* TODO: At this point we only have one implemenation which modifies the
* TopDocs given. Future implemenations might return actual resutls that
diff --git a/core/src/main/java/org/elasticsearch/search/searchafter/SearchAfterBuilder.java b/core/src/main/java/org/elasticsearch/search/searchafter/SearchAfterBuilder.java
new file mode 100644
index 0000000000..13fae70174
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/search/searchafter/SearchAfterBuilder.java
@@ -0,0 +1,312 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.search.searchafter;
+
+import org.apache.lucene.search.FieldDoc;
+import org.apache.lucene.search.Sort;
+import org.apache.lucene.search.SortField;
+import org.apache.lucene.util.BytesRef;
+import org.elasticsearch.ElasticsearchException;
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.ParseFieldMatcher;
+import org.elasticsearch.common.ParsingException;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.common.io.stream.Writeable;
+import org.elasticsearch.common.text.Text;
+import org.elasticsearch.common.xcontent.ToXContent;
+import org.elasticsearch.common.xcontent.FromXContentBuilder;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.common.xcontent.XContentFactory;
+import org.elasticsearch.index.fielddata.IndexFieldData;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Objects;
+
+/**
+ *
+ */
+public class SearchAfterBuilder implements ToXContent, FromXContentBuilder<SearchAfterBuilder>, Writeable<SearchAfterBuilder> {
+ public static final SearchAfterBuilder PROTOTYPE = new SearchAfterBuilder();
+ public static final ParseField SEARCH_AFTER = new ParseField("search_after");
+ private static final Object[] EMPTY_SORT_VALUES = new Object[0];
+
+ private Object[] sortValues = EMPTY_SORT_VALUES;
+
+ public SearchAfterBuilder setSortValues(Object[] values) {
+ if (values == null) {
+ throw new NullPointerException("Values cannot be null.");
+ }
+ if (values.length == 0) {
+ throw new IllegalArgumentException("Values must contains at least one value.");
+ }
+ sortValues = new Object[values.length];
+ System.arraycopy(values, 0, sortValues, 0, values.length);
+ return this;
+ }
+
+ public Object[] getSortValues() {
+ return sortValues;
+ }
+
+ public static FieldDoc buildFieldDoc(Sort sort, Object[] values) {
+ if (sort == null || sort.getSort() == null || sort.getSort().length == 0) {
+ throw new IllegalArgumentException("Sort must contain at least one field.");
+ }
+
+ SortField[] sortFields = sort.getSort();
+ if (sortFields.length != values.length) {
+ throw new IllegalArgumentException(SEARCH_AFTER.getPreferredName() + " has " + values.length + " value(s) but sort has " + sort.getSort().length + ".");
+ }
+ Object[] fieldValues = new Object[sortFields.length];
+ for (int i = 0; i < sortFields.length; i++) {
+ SortField sortField = sortFields[i];
+ if (values[i] != null) {
+ fieldValues[i] = convertValueFromSortField(values[i], sortField);
+ } else {
+ fieldValues[i] = null;
+ }
+ }
+ // We set the doc id to Integer.MAX_VALUE in order to make sure that the search starts "after" the first document that is equal to the field values.
+ return new FieldDoc(Integer.MAX_VALUE, 0, fieldValues);
+ }
+
+ private static Object convertValueFromSortField(Object value, SortField sortField) {
+ if (sortField.getComparatorSource() instanceof IndexFieldData.XFieldComparatorSource) {
+ IndexFieldData.XFieldComparatorSource cmpSource = (IndexFieldData.XFieldComparatorSource) sortField.getComparatorSource();
+ return convertValueFromSortType(sortField.getField(), cmpSource.reducedType(), value);
+ }
+ return convertValueFromSortType(sortField.getField(), sortField.getType(), value);
+ }
+
+ private static Object convertValueFromSortType(String fieldName, SortField.Type sortType, Object value) {
+ try {
+ switch (sortType) {
+ case DOC:
+ if (value instanceof Number) {
+ return ((Number) value).intValue();
+ }
+ return Integer.parseInt(value.toString());
+
+ case SCORE:
+ if (value instanceof Number) {
+ return ((Number) value).floatValue();
+ }
+ return Float.parseFloat(value.toString());
+
+ case INT:
+ if (value instanceof Number) {
+ return ((Number) value).intValue();
+ }
+ return Integer.parseInt(value.toString());
+
+ case DOUBLE:
+ if (value instanceof Number) {
+ return ((Number) value).doubleValue();
+ }
+ return Double.parseDouble(value.toString());
+
+ case LONG:
+ if (value instanceof Number) {
+ return ((Number) value).longValue();
+ }
+ return Long.parseLong(value.toString());
+
+ case FLOAT:
+ if (value instanceof Number) {
+ return ((Number) value).floatValue();
+ }
+ return Float.parseFloat(value.toString());
+
+ case STRING_VAL:
+ case STRING:
+ return new BytesRef(value.toString());
+
+ default:
+ throw new IllegalArgumentException("Comparator type [" + sortType.name() + "] for field [" + fieldName + "] is not supported.");
+ }
+ } catch(NumberFormatException e) {
+ throw new IllegalArgumentException("Failed to parse " + SEARCH_AFTER.getPreferredName() + " value for field [" + fieldName + "].", e);
+ }
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ innerToXContent(builder);
+ builder.endObject();
+ return builder;
+ }
+
+ void innerToXContent(XContentBuilder builder) throws IOException {
+ builder.field(SEARCH_AFTER.getPreferredName(), sortValues);
+ }
+
+ @Override
+ public SearchAfterBuilder fromXContent(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException {
+ SearchAfterBuilder builder = new SearchAfterBuilder();
+ XContentParser.Token token = parser.currentToken();
+ List<Object> values = new ArrayList<> ();
+ if (token == XContentParser.Token.START_ARRAY) {
+ while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
+ if (token == XContentParser.Token.VALUE_NUMBER) {
+ switch (parser.numberType()) {
+ case INT:
+ values.add(parser.intValue());
+ break;
+
+ case LONG:
+ values.add(parser.longValue());
+ break;
+
+ case DOUBLE:
+ values.add(parser.doubleValue());
+ break;
+
+ case FLOAT:
+ values.add(parser.floatValue());
+ break;
+
+ default:
+ throw new AssertionError("Unknown number type []" + parser.numberType());
+ }
+ } else if (token == XContentParser.Token.VALUE_STRING) {
+ values.add(parser.text());
+ } else if (token == XContentParser.Token.VALUE_BOOLEAN) {
+ values.add(parser.booleanValue());
+ } else if (token == XContentParser.Token.VALUE_NULL) {
+ values.add(null);
+ } else {
+ throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.VALUE_STRING + "] or [" + XContentParser.Token.VALUE_NUMBER + "] or [" + XContentParser.Token.VALUE_BOOLEAN + "] or [" + XContentParser.Token.VALUE_NULL + "] but found [" + token + "] inside search_after.", parser.getTokenLocation());
+ }
+ }
+ } else {
+ throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.START_ARRAY + "] in [" + SEARCH_AFTER.getPreferredName() + "] but found [" + token + "] inside search_after", parser.getTokenLocation());
+ }
+ builder.setSortValues(values.toArray());
+ return builder;
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ out.writeVInt(sortValues.length);
+ for (Object fieldValue : sortValues) {
+ if (fieldValue == null) {
+ out.writeByte((byte) 0);
+ } else {
+ Class<?> type = fieldValue.getClass();
+ if (type == String.class) {
+ out.writeByte((byte) 1);
+ out.writeString((String) fieldValue);
+ } else if (type == Integer.class) {
+ out.writeByte((byte) 2);
+ out.writeInt((Integer) fieldValue);
+ } else if (type == Long.class) {
+ out.writeByte((byte) 3);
+ out.writeLong((Long) fieldValue);
+ } else if (type == Float.class) {
+ out.writeByte((byte) 4);
+ out.writeFloat((Float) fieldValue);
+ } else if (type == Double.class) {
+ out.writeByte((byte) 5);
+ out.writeDouble((Double) fieldValue);
+ } else if (type == Byte.class) {
+ out.writeByte((byte) 6);
+ out.writeByte((Byte) fieldValue);
+ } else if (type == Short.class) {
+ out.writeByte((byte) 7);
+ out.writeShort((Short) fieldValue);
+ } else if (type == Boolean.class) {
+ out.writeByte((byte) 8);
+ out.writeBoolean((Boolean) fieldValue);
+ } else if (fieldValue instanceof Text) {
+ out.writeByte((byte) 9);
+ out.writeText((Text) fieldValue);
+ } else {
+ throw new IOException("Can't handle " + SEARCH_AFTER.getPreferredName() + " field value of type [" + type + "]");
+ }
+ }
+ }
+ }
+
+ @Override
+ public SearchAfterBuilder readFrom(StreamInput in) throws IOException {
+ SearchAfterBuilder builder = new SearchAfterBuilder();
+ int size = in.readVInt();
+ Object[] values = new Object[size];
+ for (int i = 0; i < size; i++) {
+ byte type = in.readByte();
+ if (type == 0) {
+ values[i] = null;
+ } else if (type == 1) {
+ values[i] = in.readString();
+ } else if (type == 2) {
+ values[i] = in.readInt();
+ } else if (type == 3) {
+ values[i] = in.readLong();
+ } else if (type == 4) {
+ values[i] = in.readFloat();
+ } else if (type == 5) {
+ values[i] = in.readDouble();
+ } else if (type == 6) {
+ values[i] = in.readByte();
+ } else if (type == 7) {
+ values[i] = in.readShort();
+ } else if (type == 8) {
+ values[i] = in.readBoolean();
+ } else if (type == 9) {
+ values[i] = in.readText();
+ } else {
+ throw new IOException("Can't match type [" + type + "]");
+ }
+ }
+ builder.setSortValues(values);
+ return builder;
+ }
+
+ @Override
+ public boolean equals(Object other) {
+ if (! (other instanceof SearchAfterBuilder)) {
+ return false;
+ }
+ return Arrays.equals(sortValues, ((SearchAfterBuilder) other).sortValues);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(this.sortValues);
+ }
+
+ @Override
+ public String toString() {
+ try {
+ XContentBuilder builder = XContentFactory.jsonBuilder();
+ builder.prettyPrint();
+ toXContent(builder, EMPTY_PARAMS);
+ return builder.string();
+ } catch (Exception e) {
+ throw new ElasticsearchException("Failed to build xcontent.", e);
+ }
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java b/core/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java
index 36b651a222..e9a9c8df57 100644
--- a/core/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java
+++ b/core/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java
@@ -22,11 +22,8 @@ package org.elasticsearch.search.sort;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.script.Script;
-import org.elasticsearch.script.ScriptService.ScriptType;
import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
/**
* Script sort builder allows to sort based on a custom script expression.
@@ -35,17 +32,8 @@ public class ScriptSortBuilder extends SortBuilder {
private Script script;
- @Deprecated
- private String scriptString;
-
private final String type;
- @Deprecated
- private String lang;
-
- @Deprecated
- private Map<String, Object> params;
-
private SortOrder order;
private String sortMode;
@@ -66,66 +54,6 @@ public class ScriptSortBuilder extends SortBuilder {
}
/**
- * Constructs a script sort builder with the script and the type.
- *
- * @param script
- * The script to use.
- * @param type
- * The type, can either be "string" or "number".
- *
- * @deprecated Use {@link #ScriptSortBuilder(Script, String)} instead.
- */
- @Deprecated
- public ScriptSortBuilder(String script, String type) {
- this.scriptString = script;
- this.type = type;
- }
-
- /**
- * Adds a parameter to the script.
- *
- * @param name
- * The name of the parameter.
- * @param value
- * The value of the parameter.
- *
- * @deprecated Use {@link #ScriptSortBuilder(Script, String)} instead.
- */
- @Deprecated
- public ScriptSortBuilder param(String name, Object value) {
- if (params == null) {
- params = new HashMap<>();
- }
- params.put(name, value);
- return this;
- }
-
- /**
- * Sets parameters for the script.
- *
- * @param params
- * The script parameters
- *
- * @deprecated Use {@link #ScriptSortBuilder(Script, String)} instead.
- */
- @Deprecated
- public ScriptSortBuilder setParams(Map<String, Object> params) {
- this.params = params;
- return this;
- }
-
- /**
- * The language of the script.
- *
- * @deprecated Use {@link #ScriptSortBuilder(Script, String)} instead.
- */
- @Deprecated
- public ScriptSortBuilder lang(String lang) {
- this.lang = lang;
- return this;
- }
-
- /**
* Sets the sort order.
*/
@Override
@@ -172,12 +100,7 @@ public class ScriptSortBuilder extends SortBuilder {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params builderParams) throws IOException {
builder.startObject("_script");
- if (script == null) {
-
- builder.field("script", new Script(scriptString, ScriptType.INLINE, lang, params));
- } else {
- builder.field("script", script);
- }
+ builder.field("script", script);
builder.field("type", type);
if (order == SortOrder.DESC) {
builder.field("reverse", true);
@@ -189,7 +112,7 @@ public class ScriptSortBuilder extends SortBuilder {
builder.field("nested_path", nestedPath);
}
if (nestedFilter != null) {
- builder.field("nested_filter", nestedFilter, params);
+ builder.field("nested_filter", nestedFilter, builderParams);
}
builder.endObject();
return builder;
diff --git a/core/src/main/java/org/elasticsearch/search/sort/SortBuilders.java b/core/src/main/java/org/elasticsearch/search/sort/SortBuilders.java
index 01134ca264..9a843c43f7 100644
--- a/core/src/main/java/org/elasticsearch/search/sort/SortBuilders.java
+++ b/core/src/main/java/org/elasticsearch/search/sort/SortBuilders.java
@@ -55,20 +55,6 @@ public class SortBuilders {
}
/**
- * Constructs a new script based sort.
- *
- * @param script
- * The script to use.
- * @param type
- * The type, can either be "string" or "number".
- * @deprecated Use {@link #scriptSort(Script, String)} instead.
- */
- @Deprecated
- public static ScriptSortBuilder scriptSort(String script, String type) {
- return new ScriptSortBuilder(script, type);
- }
-
- /**
* A geo distance based sort.
*
* @param fieldName The geo point like field name.
diff --git a/core/src/main/java/org/elasticsearch/search/sort/SortOrder.java b/core/src/main/java/org/elasticsearch/search/sort/SortOrder.java
index cb2bca28fb..001924d1bd 100644
--- a/core/src/main/java/org/elasticsearch/search/sort/SortOrder.java
+++ b/core/src/main/java/org/elasticsearch/search/sort/SortOrder.java
@@ -19,12 +19,19 @@
package org.elasticsearch.search.sort;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.common.io.stream.Writeable;
+
+import java.io.IOException;
+import java.util.Locale;
+
/**
* A sorting order.
*
*
*/
-public enum SortOrder {
+public enum SortOrder implements Writeable<SortOrder> {
/**
* Ascending order.
*/
@@ -42,5 +49,30 @@ public enum SortOrder {
public String toString() {
return "desc";
}
+ };
+
+ public static final SortOrder DEFAULT = DESC;
+ private static final SortOrder PROTOTYPE = DEFAULT;
+
+ @Override
+ public SortOrder readFrom(StreamInput in) throws IOException {
+ int ordinal = in.readVInt();
+ if (ordinal < 0 || ordinal >= values().length) {
+ throw new IOException("Unknown SortOrder ordinal [" + ordinal + "]");
+ }
+ return values()[ordinal];
+ }
+
+ public static SortOrder readOrderFrom(StreamInput in) throws IOException {
+ return PROTOTYPE.readFrom(in);
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ out.writeVInt(this.ordinal());
+ }
+
+ public static SortOrder fromString(String op) {
+ return valueOf(op.toUpperCase(Locale.ROOT));
}
}
diff --git a/core/src/main/java/org/elasticsearch/search/suggest/SuggestContextParser.java b/core/src/main/java/org/elasticsearch/search/suggest/SuggestContextParser.java
index a8050d1aca..a7aa3fd60b 100644
--- a/core/src/main/java/org/elasticsearch/search/suggest/SuggestContextParser.java
+++ b/core/src/main/java/org/elasticsearch/search/suggest/SuggestContextParser.java
@@ -18,7 +18,6 @@
*/
package org.elasticsearch.search.suggest;
-import org.elasticsearch.common.HasContextAndHeaders;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.MapperService;
@@ -26,6 +25,6 @@ import org.elasticsearch.index.mapper.MapperService;
import java.io.IOException;
public interface SuggestContextParser {
- SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService, IndexFieldDataService indexFieldDataService, HasContextAndHeaders headersContext) throws IOException;
+ SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService, IndexFieldDataService indexFieldDataService) throws IOException;
-} \ No newline at end of file
+}
diff --git a/core/src/main/java/org/elasticsearch/search/suggest/SuggestParseElement.java b/core/src/main/java/org/elasticsearch/search/suggest/SuggestParseElement.java
index 650eb76b1c..a8a4e9ec26 100644
--- a/core/src/main/java/org/elasticsearch/search/suggest/SuggestParseElement.java
+++ b/core/src/main/java/org/elasticsearch/search/suggest/SuggestParseElement.java
@@ -19,7 +19,6 @@
package org.elasticsearch.search.suggest;
import org.apache.lucene.util.BytesRef;
-import org.elasticsearch.common.HasContextAndHeaders;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
@@ -46,12 +45,12 @@ public final class SuggestParseElement implements SearchParseElement {
@Override
public void parse(XContentParser parser, SearchContext context) throws Exception {
SuggestionSearchContext suggestionSearchContext = parseInternal(parser, context.mapperService(), context.fieldData(),
- context.shardTarget().index(), context.shardTarget().shardId(), context);
+ context.shardTarget().index(), context.shardTarget().shardId());
context.suggest(suggestionSearchContext);
}
public SuggestionSearchContext parseInternal(XContentParser parser, MapperService mapperService, IndexFieldDataService fieldDataService,
- String index, int shardId, HasContextAndHeaders headersContext) throws IOException {
+ String index, int shardId) throws IOException {
SuggestionSearchContext suggestionSearchContext = new SuggestionSearchContext();
BytesRef globalText = null;
@@ -96,7 +95,7 @@ public final class SuggestParseElement implements SearchParseElement {
throw new IllegalArgumentException("Suggester[" + fieldName + "] not supported");
}
final SuggestContextParser contextParser = suggesters.get(fieldName).getContextParser();
- suggestionContext = contextParser.parse(parser, mapperService, fieldDataService, headersContext);
+ suggestionContext = contextParser.parse(parser, mapperService, fieldDataService);
}
}
if (suggestionContext != null) {
diff --git a/core/src/main/java/org/elasticsearch/search/suggest/SuggestUtils.java b/core/src/main/java/org/elasticsearch/search/suggest/SuggestUtils.java
index 62689e6583..2509f792ec 100644
--- a/core/src/main/java/org/elasticsearch/search/suggest/SuggestUtils.java
+++ b/core/src/main/java/org/elasticsearch/search/suggest/SuggestUtils.java
@@ -210,15 +210,20 @@ public final class SuggestUtils {
public static final ParseField MIN_WORD_LENGTH = new ParseField("min_word_length", "min_word_len");
public static final ParseField MIN_DOC_FREQ = new ParseField("min_doc_freq");
public static final ParseField SHARD_SIZE = new ParseField("shard_size");
+ public static final ParseField ANALYZER = new ParseField("analyzer");
+ public static final ParseField FIELD = new ParseField("field");
+ public static final ParseField SIZE = new ParseField("size");
+ public static final ParseField SORT = new ParseField("sort");
+ public static final ParseField ACCURACY = new ParseField("accuracy");
}
public static boolean parseDirectSpellcheckerSettings(XContentParser parser, String fieldName,
DirectSpellcheckerSettings suggestion, ParseFieldMatcher parseFieldMatcher) throws IOException {
- if ("accuracy".equals(fieldName)) {
+ if (parseFieldMatcher.match(fieldName, Fields.ACCURACY)) {
suggestion.accuracy(parser.floatValue());
} else if (parseFieldMatcher.match(fieldName, Fields.SUGGEST_MODE)) {
suggestion.suggestMode(SuggestUtils.resolveSuggestMode(parser.text()));
- } else if ("sort".equals(fieldName)) {
+ } else if (parseFieldMatcher.match(fieldName, Fields.SORT)) {
suggestion.sort(SuggestUtils.resolveSort(parser.text()));
} else if (parseFieldMatcher.match(fieldName, Fields.STRING_DISTANCE)) {
suggestion.stringDistance(SuggestUtils.resolveDistance(parser.text()));
@@ -246,16 +251,16 @@ public final class SuggestUtils {
public static boolean parseSuggestContext(XContentParser parser, MapperService mapperService, String fieldName,
SuggestionSearchContext.SuggestionContext suggestion, ParseFieldMatcher parseFieldMatcher) throws IOException {
- if ("analyzer".equals(fieldName)) {
+ if (parseFieldMatcher.match(fieldName, Fields.ANALYZER)) {
String analyzerName = parser.text();
Analyzer analyzer = mapperService.analysisService().analyzer(analyzerName);
if (analyzer == null) {
throw new IllegalArgumentException("Analyzer [" + analyzerName + "] doesn't exists");
}
suggestion.setAnalyzer(analyzer);
- } else if ("field".equals(fieldName)) {
+ } else if (parseFieldMatcher.match(fieldName, Fields.FIELD)) {
suggestion.setField(parser.text());
- } else if ("size".equals(fieldName)) {
+ } else if (parseFieldMatcher.match(fieldName, Fields.SIZE)) {
suggestion.setSize(parser.intValue());
} else if (parseFieldMatcher.match(fieldName, Fields.SHARD_SIZE)) {
suggestion.setShardSize(parser.intValue());
diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestParser.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestParser.java
index a2e5f743c5..702b03f359 100644
--- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestParser.java
+++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestParser.java
@@ -20,7 +20,6 @@ package org.elasticsearch.search.suggest.completion;
import org.apache.lucene.analysis.Analyzer;
import org.elasticsearch.ElasticsearchException;
-import org.elasticsearch.common.HasContextAndHeaders;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.unit.Fuzziness;
@@ -34,7 +33,10 @@ import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.core.CompletionFieldMapper;
import org.elasticsearch.index.query.RegexpFlag;
import org.elasticsearch.search.suggest.SuggestContextParser;
+import org.elasticsearch.search.suggest.SuggestUtils.Fields;
import org.elasticsearch.search.suggest.SuggestionSearchContext;
+import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder.FuzzyOptionsBuilder;
+import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder.RegexOptionsBuilder;
import org.elasticsearch.search.suggest.completion.context.ContextMapping;
import org.elasticsearch.search.suggest.completion.context.ContextMappings;
@@ -74,29 +76,29 @@ import java.util.Map;
*/
public class CompletionSuggestParser implements SuggestContextParser {
- private static ObjectParser<CompletionSuggestionContext, ContextAndSuggest> TLP_PARSER = new ObjectParser<>("completion", null);
- private static ObjectParser<CompletionSuggestionBuilder.RegexOptionsBuilder, ContextAndSuggest> REGEXP_PARSER = new ObjectParser<>("regexp", CompletionSuggestionBuilder.RegexOptionsBuilder::new);
- private static ObjectParser<CompletionSuggestionBuilder.FuzzyOptionsBuilder, ContextAndSuggest> FUZZY_PARSER = new ObjectParser<>("fuzzy", CompletionSuggestionBuilder.FuzzyOptionsBuilder::new);
+ private static ObjectParser<CompletionSuggestionContext, ContextAndSuggest> TLP_PARSER = new ObjectParser<>(CompletionSuggestionBuilder.SUGGESTION_NAME, null);
+ private static ObjectParser<CompletionSuggestionBuilder.RegexOptionsBuilder, ContextAndSuggest> REGEXP_PARSER = new ObjectParser<>(RegexOptionsBuilder.REGEX_OPTIONS.getPreferredName(), CompletionSuggestionBuilder.RegexOptionsBuilder::new);
+ private static ObjectParser<CompletionSuggestionBuilder.FuzzyOptionsBuilder, ContextAndSuggest> FUZZY_PARSER = new ObjectParser<>(FuzzyOptionsBuilder.FUZZY_OPTIONS.getPreferredName(), CompletionSuggestionBuilder.FuzzyOptionsBuilder::new);
static {
- FUZZY_PARSER.declareInt(CompletionSuggestionBuilder.FuzzyOptionsBuilder::setFuzzyMinLength, new ParseField("min_length"));
- FUZZY_PARSER.declareInt(CompletionSuggestionBuilder.FuzzyOptionsBuilder::setMaxDeterminizedStates, new ParseField("max_determinized_states"));
- FUZZY_PARSER.declareBoolean(CompletionSuggestionBuilder.FuzzyOptionsBuilder::setUnicodeAware, new ParseField("unicode_aware"));
- FUZZY_PARSER.declareInt(CompletionSuggestionBuilder.FuzzyOptionsBuilder::setFuzzyPrefixLength, new ParseField("prefix_length"));
- FUZZY_PARSER.declareBoolean(CompletionSuggestionBuilder.FuzzyOptionsBuilder::setTranspositions, new ParseField("transpositions"));
+ FUZZY_PARSER.declareInt(CompletionSuggestionBuilder.FuzzyOptionsBuilder::setFuzzyMinLength, FuzzyOptionsBuilder.MIN_LENGTH_FIELD);
+ FUZZY_PARSER.declareInt(CompletionSuggestionBuilder.FuzzyOptionsBuilder::setMaxDeterminizedStates, FuzzyOptionsBuilder.MAX_DETERMINIZED_STATES_FIELD);
+ FUZZY_PARSER.declareBoolean(CompletionSuggestionBuilder.FuzzyOptionsBuilder::setUnicodeAware, FuzzyOptionsBuilder.UNICODE_AWARE_FIELD);
+ FUZZY_PARSER.declareInt(CompletionSuggestionBuilder.FuzzyOptionsBuilder::setFuzzyPrefixLength, FuzzyOptionsBuilder.PREFIX_LENGTH_FIELD);
+ FUZZY_PARSER.declareBoolean(CompletionSuggestionBuilder.FuzzyOptionsBuilder::setTranspositions, FuzzyOptionsBuilder.TRANSPOSITION_FIELD);
FUZZY_PARSER.declareValue((a, b) -> {
try {
a.setFuzziness(Fuzziness.parse(b).asDistance());
} catch (IOException e) {
throw new ElasticsearchException(e);
}
- }, new ParseField("fuzziness"));
- REGEXP_PARSER.declareInt(CompletionSuggestionBuilder.RegexOptionsBuilder::setMaxDeterminizedStates, new ParseField("max_determinized_states"));
- REGEXP_PARSER.declareStringOrNull(CompletionSuggestionBuilder.RegexOptionsBuilder::setFlags, new ParseField("flags"));
+ }, Fuzziness.FIELD);
+ REGEXP_PARSER.declareInt(CompletionSuggestionBuilder.RegexOptionsBuilder::setMaxDeterminizedStates, RegexOptionsBuilder.MAX_DETERMINIZED_STATES);
+ REGEXP_PARSER.declareStringOrNull(CompletionSuggestionBuilder.RegexOptionsBuilder::setFlags, RegexOptionsBuilder.FLAGS_VALUE);
- TLP_PARSER.declareStringArray(CompletionSuggestionContext::setPayloadFields, new ParseField("payload"));
- TLP_PARSER.declareObjectOrDefault(CompletionSuggestionContext::setFuzzyOptionsBuilder, FUZZY_PARSER, CompletionSuggestionBuilder.FuzzyOptionsBuilder::new, new ParseField("fuzzy"));
- TLP_PARSER.declareObject(CompletionSuggestionContext::setRegexOptionsBuilder, REGEXP_PARSER, new ParseField("regexp"));
- TLP_PARSER.declareString(SuggestionSearchContext.SuggestionContext::setField, new ParseField("field"));
+ TLP_PARSER.declareStringArray(CompletionSuggestionContext::setPayloadFields, CompletionSuggestionBuilder.PAYLOAD_FIELD);
+ TLP_PARSER.declareObjectOrDefault(CompletionSuggestionContext::setFuzzyOptionsBuilder, FUZZY_PARSER, CompletionSuggestionBuilder.FuzzyOptionsBuilder::new, FuzzyOptionsBuilder.FUZZY_OPTIONS);
+ TLP_PARSER.declareObject(CompletionSuggestionContext::setRegexOptionsBuilder, REGEXP_PARSER, RegexOptionsBuilder.REGEX_OPTIONS);
+ TLP_PARSER.declareString(SuggestionSearchContext.SuggestionContext::setField, Fields.FIELD);
TLP_PARSER.declareField((p, v, c) -> {
String analyzerName = p.text();
Analyzer analyzer = c.mapperService.analysisService().analyzer(analyzerName);
@@ -104,10 +106,9 @@ public class CompletionSuggestParser implements SuggestContextParser {
throw new IllegalArgumentException("Analyzer [" + analyzerName + "] doesn't exists");
}
v.setAnalyzer(analyzer);
- }, new ParseField("analyzer"), ObjectParser.ValueType.STRING);
- TLP_PARSER.declareString(SuggestionSearchContext.SuggestionContext::setField, new ParseField("analyzer"));
- TLP_PARSER.declareInt(SuggestionSearchContext.SuggestionContext::setSize, new ParseField("size"));
- TLP_PARSER.declareInt(SuggestionSearchContext.SuggestionContext::setShardSize, new ParseField("size"));
+ }, Fields.ANALYZER, ObjectParser.ValueType.STRING);
+ TLP_PARSER.declareInt(SuggestionSearchContext.SuggestionContext::setSize, Fields.SIZE);
+ TLP_PARSER.declareInt(SuggestionSearchContext.SuggestionContext::setShardSize, Fields.SHARD_SIZE);
TLP_PARSER.declareField((p, v, c) -> {
// Copy the current structure. We will parse, once the mapping is provided
XContentBuilder builder = XContentFactory.contentBuilder(p.contentType());
@@ -115,7 +116,7 @@ public class CompletionSuggestParser implements SuggestContextParser {
BytesReference bytes = builder.bytes();
c.contextParser = XContentFactory.xContent(bytes).createParser(bytes);
p.skipChildren();
- }, new ParseField("contexts", "context"), ObjectParser.ValueType.OBJECT); // context is deprecated
+ }, CompletionSuggestionBuilder.CONTEXTS_FIELD, ObjectParser.ValueType.OBJECT); // context is deprecated
}
private static class ContextAndSuggest {
@@ -134,8 +135,7 @@ public class CompletionSuggestParser implements SuggestContextParser {
}
@Override
- public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService, IndexFieldDataService fieldDataService,
- HasContextAndHeaders headersContext) throws IOException {
+ public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService, IndexFieldDataService fieldDataService) throws IOException {
final CompletionSuggestionContext suggestion = new CompletionSuggestionContext(completionSuggester, mapperService, fieldDataService);
final ContextAndSuggest contextAndSuggest = new ContextAndSuggest(mapperService);
TLP_PARSER.parse(parser, suggestion, contextAndSuggest);
diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java
index 100e701c03..9cf78ea667 100644
--- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java
+++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java
@@ -21,6 +21,7 @@ package org.elasticsearch.search.suggest.completion;
import org.apache.lucene.search.suggest.document.FuzzyCompletionQuery;
import org.apache.lucene.util.automaton.Operations;
import org.apache.lucene.util.automaton.RegExp;
+import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
@@ -45,19 +46,30 @@ import java.util.Set;
* indexing.
*/
public class CompletionSuggestionBuilder extends SuggestBuilder.SuggestionBuilder<CompletionSuggestionBuilder> {
+
+ final static String SUGGESTION_NAME = "completion";
+ static final ParseField PAYLOAD_FIELD = new ParseField("payload");
+ static final ParseField CONTEXTS_FIELD = new ParseField("contexts", "context");
private FuzzyOptionsBuilder fuzzyOptionsBuilder;
private RegexOptionsBuilder regexOptionsBuilder;
private final Map<String, List<ToXContent>> queryContexts = new HashMap<>();
private final Set<String> payloadFields = new HashSet<>();
public CompletionSuggestionBuilder(String name) {
- super(name, "completion");
+ super(name, SUGGESTION_NAME);
}
/**
* Options for fuzzy queries
*/
public static class FuzzyOptionsBuilder implements ToXContent {
+ static final ParseField FUZZY_OPTIONS = new ParseField("fuzzy");
+ static final ParseField TRANSPOSITION_FIELD = new ParseField("transpositions");
+ static final ParseField MIN_LENGTH_FIELD = new ParseField("min_length");
+ static final ParseField PREFIX_LENGTH_FIELD = new ParseField("prefix_length");
+ static final ParseField UNICODE_AWARE_FIELD = new ParseField("unicode_aware");
+ static final ParseField MAX_DETERMINIZED_STATES_FIELD = new ParseField("max_determinized_states");
+
private int editDistance = FuzzyCompletionQuery.DEFAULT_MAX_EDITS;
private boolean transpositions = FuzzyCompletionQuery.DEFAULT_TRANSPOSITIONS;
private int fuzzyMinLength = FuzzyCompletionQuery.DEFAULT_MIN_FUZZY_LENGTH;
@@ -179,13 +191,13 @@ public class CompletionSuggestionBuilder extends SuggestBuilder.SuggestionBuilde
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
- builder.startObject("fuzzy");
+ builder.startObject(FUZZY_OPTIONS.getPreferredName());
builder.field(Fuzziness.FIELD.getPreferredName(), editDistance);
- builder.field("transpositions", transpositions);
- builder.field("min_length", fuzzyMinLength);
- builder.field("prefix_length", fuzzyPrefixLength);
- builder.field("unicode_aware", unicodeAware);
- builder.field("max_determinized_states", maxDeterminizedStates);
+ builder.field(TRANSPOSITION_FIELD.getPreferredName(), transpositions);
+ builder.field(MIN_LENGTH_FIELD.getPreferredName(), fuzzyMinLength);
+ builder.field(PREFIX_LENGTH_FIELD.getPreferredName(), fuzzyPrefixLength);
+ builder.field(UNICODE_AWARE_FIELD.getPreferredName(), unicodeAware);
+ builder.field(MAX_DETERMINIZED_STATES_FIELD.getPreferredName(), maxDeterminizedStates);
builder.endObject();
return builder;
}
@@ -195,6 +207,9 @@ public class CompletionSuggestionBuilder extends SuggestBuilder.SuggestionBuilde
* Options for regular expression queries
*/
public static class RegexOptionsBuilder implements ToXContent {
+ static final ParseField REGEX_OPTIONS = new ParseField("regex");
+ static final ParseField FLAGS_VALUE = new ParseField("flags", "flags_value");
+ static final ParseField MAX_DETERMINIZED_STATES = new ParseField("max_determinized_states");
private int flagsValue = RegExp.ALL;
private int maxDeterminizedStates = Operations.DEFAULT_MAX_DETERMINIZED_STATES;
@@ -228,9 +243,9 @@ public class CompletionSuggestionBuilder extends SuggestBuilder.SuggestionBuilde
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
- builder.startObject("regex");
- builder.field("flags_value", flagsValue);
- builder.field("max_determinized_states", maxDeterminizedStates);
+ builder.startObject(REGEX_OPTIONS.getPreferredName());
+ builder.field(FLAGS_VALUE.getPreferredName(), flagsValue);
+ builder.field(MAX_DETERMINIZED_STATES.getPreferredName(), maxDeterminizedStates);
builder.endObject();
return builder;
}
@@ -322,7 +337,7 @@ public class CompletionSuggestionBuilder extends SuggestBuilder.SuggestionBuilde
@Override
protected XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException {
if (payloadFields != null) {
- builder.startArray("payload");
+ builder.startArray(PAYLOAD_FIELD.getPreferredName());
for (String field : payloadFields) {
builder.value(field);
}
@@ -335,7 +350,7 @@ public class CompletionSuggestionBuilder extends SuggestBuilder.SuggestionBuilde
regexOptionsBuilder.toXContent(builder, params);
}
if (queryContexts.isEmpty() == false) {
- builder.startObject("contexts");
+ builder.startObject(CONTEXTS_FIELD.getPreferredName());
for (Map.Entry<String, List<ToXContent>> entry : this.queryContexts.entrySet()) {
builder.startArray(entry.getKey());
for (ToXContent queryContext : entry.getValue()) {
diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorBuilder.java
new file mode 100644
index 0000000000..90ec2845b8
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorBuilder.java
@@ -0,0 +1,493 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.search.suggest.phrase;
+
+import org.apache.lucene.util.automaton.LevenshteinAutomata;
+import org.elasticsearch.ExceptionsHelper;
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.collect.Tuple;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.common.io.stream.Writeable;
+import org.elasticsearch.common.xcontent.ObjectParser;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentFactory;
+import org.elasticsearch.index.mapper.MapperService;
+import org.elasticsearch.index.query.QueryParseContext;
+import org.elasticsearch.index.query.QueryShardContext;
+import org.elasticsearch.search.suggest.SuggestUtils;
+import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.CandidateGenerator;
+
+import java.io.IOException;
+import java.util.HashSet;
+import java.util.Objects;
+import java.util.Set;
+import java.util.function.Consumer;
+
+public final class DirectCandidateGeneratorBuilder
+ implements Writeable<DirectCandidateGeneratorBuilder>, CandidateGenerator {
+
+ private static final String TYPE = "direct_generator";
+ static final DirectCandidateGeneratorBuilder PROTOTYPE = new DirectCandidateGeneratorBuilder("_na_");
+
+ static final ParseField DIRECT_GENERATOR_FIELD = new ParseField(TYPE);
+ static final ParseField FIELDNAME_FIELD = new ParseField("field");
+ static final ParseField PREFILTER_FIELD = new ParseField("pre_filter");
+ static final ParseField POSTFILTER_FIELD = new ParseField("post_filter");
+ static final ParseField SUGGESTMODE_FIELD = new ParseField("suggest_mode");
+ static final ParseField MIN_DOC_FREQ_FIELD = new ParseField("min_doc_freq");
+ static final ParseField ACCURACY_FIELD = new ParseField("accuracy");
+ static final ParseField SIZE_FIELD = new ParseField("size");
+ static final ParseField SORT_FIELD = new ParseField("sort");
+ static final ParseField STRING_DISTANCE_FIELD = new ParseField("string_distance");
+ static final ParseField MAX_EDITS_FIELD = new ParseField("max_edits");
+ static final ParseField MAX_INSPECTIONS_FIELD = new ParseField("max_inspections");
+ static final ParseField MAX_TERM_FREQ_FIELD = new ParseField("max_term_freq");
+ static final ParseField PREFIX_LENGTH_FIELD = new ParseField("prefix_length");
+ static final ParseField MIN_WORD_LENGTH_FIELD = new ParseField("min_word_length");
+
+ private final String field;
+ private String preFilter;
+ private String postFilter;
+ private String suggestMode;
+ private Float accuracy;
+ private Integer size;
+ private String sort;
+ private String stringDistance;
+ private Integer maxEdits;
+ private Integer maxInspections;
+ private Float maxTermFreq;
+ private Integer prefixLength;
+ private Integer minWordLength;
+ private Float minDocFreq;
+
+ /**
+ * @param field Sets from what field to fetch the candidate suggestions from.
+ */
+ public DirectCandidateGeneratorBuilder(String field) {
+ this.field = field;
+ }
+
+ /**
+ * Quasi copy-constructor that takes all values from the generator
+ * passed in, but uses different field name. Needed by parser because we
+ * need to buffer the field name but read all other properties to a
+ * temporary object.
+ */
+ private static DirectCandidateGeneratorBuilder replaceField(String field, DirectCandidateGeneratorBuilder other) {
+ DirectCandidateGeneratorBuilder generator = new DirectCandidateGeneratorBuilder(field);
+ generator.preFilter = other.preFilter;
+ generator.postFilter = other.postFilter;
+ generator.suggestMode = other.suggestMode;
+ generator.accuracy = other.accuracy;
+ generator.size = other.size;
+ generator.sort = other.sort;
+ generator.stringDistance = other.stringDistance;
+ generator.maxEdits = other.maxEdits;
+ generator.maxInspections = other.maxInspections;
+ generator.maxTermFreq = other.maxTermFreq;
+ generator.prefixLength = other.prefixLength;
+ generator.minWordLength = other.minWordLength;
+ generator.minDocFreq = other.minDocFreq;
+ return generator;
+ }
+
+ /**
+ * The global suggest mode controls what suggested terms are included or
+ * controls for what suggest text tokens, terms should be suggested for.
+ * Three possible values can be specified:
+ * <ol>
+ * <li><code>missing</code> - Only suggest terms in the suggest text
+ * that aren't in the index. This is the default.
+ * <li><code>popular</code> - Only suggest terms that occur in more docs
+ * then the original suggest text term.
+ * <li><code>always</code> - Suggest any matching suggest terms based on
+ * tokens in the suggest text.
+ * </ol>
+ */
+ public DirectCandidateGeneratorBuilder suggestMode(String suggestMode) {
+ this.suggestMode = suggestMode;
+ return this;
+ }
+
+ /**
+ * Sets how similar the suggested terms at least need to be compared to
+ * the original suggest text tokens. A value between 0 and 1 can be
+ * specified. This value will be compared to the string distance result
+ * of each candidate spelling correction.
+ * <p>
+ * Default is <tt>0.5</tt>
+ */
+ public DirectCandidateGeneratorBuilder accuracy(float accuracy) {
+ this.accuracy = accuracy;
+ return this;
+ }
+
+ /**
+ * Sets the maximum suggestions to be returned per suggest text term.
+ */
+ public DirectCandidateGeneratorBuilder size(int size) {
+ if (size <= 0) {
+ throw new IllegalArgumentException("Size must be positive");
+ }
+ this.size = size;
+ return this;
+ }
+
+ /**
+ * Sets how to sort the suggest terms per suggest text token. Two
+ * possible values:
+ * <ol>
+ * <li><code>score</code> - Sort should first be based on score, then
+ * document frequency and then the term itself.
+ * <li><code>frequency</code> - Sort should first be based on document
+ * frequency, then score and then the term itself.
+ * </ol>
+ * <p>
+ * What the score is depends on the suggester being used.
+ */
+ public DirectCandidateGeneratorBuilder sort(String sort) {
+ this.sort = sort;
+ return this;
+ }
+
+ /**
+ * Sets what string distance implementation to use for comparing how
+ * similar suggested terms are. Four possible values can be specified:
+ * <ol>
+ * <li><code>internal</code> - This is the default and is based on
+ * <code>damerau_levenshtein</code>, but highly optimized for comparing
+ * string distance for terms inside the index.
+ * <li><code>damerau_levenshtein</code> - String distance algorithm
+ * based on Damerau-Levenshtein algorithm.
+ * <li><code>levenstein</code> - String distance algorithm based on
+ * Levenstein edit distance algorithm.
+ * <li><code>jarowinkler</code> - String distance algorithm based on
+ * Jaro-Winkler algorithm.
+ * <li><code>ngram</code> - String distance algorithm based on character
+ * n-grams.
+ * </ol>
+ */
+ public DirectCandidateGeneratorBuilder stringDistance(String stringDistance) {
+ this.stringDistance = stringDistance;
+ return this;
+ }
+
+ /**
+ * Sets the maximum edit distance candidate suggestions can have in
+ * order to be considered as a suggestion. Can only be a value between 1
+ * and 2. Any other value result in an bad request error being thrown.
+ * Defaults to <tt>2</tt>.
+ */
+ public DirectCandidateGeneratorBuilder maxEdits(Integer maxEdits) {
+ if (maxEdits < 1 || maxEdits > LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE) {
+ throw new IllegalArgumentException("Illegal max_edits value " + maxEdits);
+ }
+ this.maxEdits = maxEdits;
+ return this;
+ }
+
+ /**
+ * A factor that is used to multiply with the size in order to inspect
+ * more candidate suggestions. Can improve accuracy at the cost of
+ * performance. Defaults to <tt>5</tt>.
+ */
+ public DirectCandidateGeneratorBuilder maxInspections(Integer maxInspections) {
+ this.maxInspections = maxInspections;
+ return this;
+ }
+
+ /**
+ * Sets a maximum threshold in number of documents a suggest text token
+ * can exist in order to be corrected. Can be a relative percentage
+ * number (e.g 0.4) or an absolute number to represent document
+ * frequencies. If an value higher than 1 is specified then fractional
+ * can not be specified. Defaults to <tt>0.01</tt>.
+ * <p>
+ * This can be used to exclude high frequency terms from being
+ * suggested. High frequency terms are usually spelled correctly on top
+ * of this this also improves the suggest performance.
+ */
+ public DirectCandidateGeneratorBuilder maxTermFreq(float maxTermFreq) {
+ this.maxTermFreq = maxTermFreq;
+ return this;
+ }
+
+ /**
+ * Sets the number of minimal prefix characters that must match in order
+ * be a candidate suggestion. Defaults to 1. Increasing this number
+ * improves suggest performance. Usually misspellings don't occur in the
+ * beginning of terms.
+ */
+ public DirectCandidateGeneratorBuilder prefixLength(int prefixLength) {
+ this.prefixLength = prefixLength;
+ return this;
+ }
+
+ /**
+ * The minimum length a suggest text term must have in order to be
+ * corrected. Defaults to <tt>4</tt>.
+ */
+ public DirectCandidateGeneratorBuilder minWordLength(int minWordLength) {
+ this.minWordLength = minWordLength;
+ return this;
+ }
+
+ /**
+ * Sets a minimal threshold in number of documents a suggested term
+ * should appear in. This can be specified as an absolute number or as a
+ * relative percentage of number of documents. This can improve quality
+ * by only suggesting high frequency terms. Defaults to 0f and is not
+ * enabled. If a value higher than 1 is specified then the number cannot
+ * be fractional.
+ */
+ public DirectCandidateGeneratorBuilder minDocFreq(float minDocFreq) {
+ this.minDocFreq = minDocFreq;
+ return this;
+ }
+
+ /**
+ * Sets a filter (analyzer) that is applied to each of the tokens passed to this candidate generator.
+ * This filter is applied to the original token before candidates are generated.
+ */
+ public DirectCandidateGeneratorBuilder preFilter(String preFilter) {
+ this.preFilter = preFilter;
+ return this;
+ }
+
+ /**
+ * Sets a filter (analyzer) that is applied to each of the generated tokens
+ * before they are passed to the actual phrase scorer.
+ */
+ public DirectCandidateGeneratorBuilder postFilter(String postFilter) {
+ this.postFilter = postFilter;
+ return this;
+ }
+
+ /**
+ * gets the type identifier of this {@link CandidateGenerator}
+ */
+ @Override
+ public String getType() {
+ return TYPE;
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ outputFieldIfNotNull(field, FIELDNAME_FIELD, builder);
+ outputFieldIfNotNull(accuracy, ACCURACY_FIELD, builder);
+ outputFieldIfNotNull(maxEdits, MAX_EDITS_FIELD, builder);
+ outputFieldIfNotNull(maxInspections, MAX_INSPECTIONS_FIELD, builder);
+ outputFieldIfNotNull(maxTermFreq, MAX_TERM_FREQ_FIELD, builder);
+ outputFieldIfNotNull(minWordLength, MIN_WORD_LENGTH_FIELD, builder);
+ outputFieldIfNotNull(minDocFreq, MIN_DOC_FREQ_FIELD, builder);
+ outputFieldIfNotNull(preFilter, PREFILTER_FIELD, builder);
+ outputFieldIfNotNull(prefixLength, PREFIX_LENGTH_FIELD, builder);
+ outputFieldIfNotNull(postFilter, POSTFILTER_FIELD, builder);
+ outputFieldIfNotNull(suggestMode, SUGGESTMODE_FIELD, builder);
+ outputFieldIfNotNull(size, SIZE_FIELD, builder);
+ outputFieldIfNotNull(sort, SORT_FIELD, builder);
+ outputFieldIfNotNull(stringDistance, STRING_DISTANCE_FIELD, builder);
+ builder.endObject();
+ return builder;
+ }
+
+ private static <T> void outputFieldIfNotNull(T value, ParseField field, XContentBuilder builder) throws IOException {
+ if (value != null) {
+ builder.field(field.getPreferredName(), value);
+ }
+ }
+
+ private static ObjectParser<Tuple<Set<String>, DirectCandidateGeneratorBuilder>, QueryParseContext> PARSER = new ObjectParser<>(TYPE);
+
+ static {
+ PARSER.declareString((tp, s) -> tp.v1().add(s), FIELDNAME_FIELD);
+ PARSER.declareString((tp, s) -> tp.v2().preFilter(s), PREFILTER_FIELD);
+ PARSER.declareString((tp, s) -> tp.v2().postFilter(s), POSTFILTER_FIELD);
+ PARSER.declareString((tp, s) -> tp.v2().suggestMode(s), SUGGESTMODE_FIELD);
+ PARSER.declareFloat((tp, f) -> tp.v2().minDocFreq(f), MIN_DOC_FREQ_FIELD);
+ PARSER.declareFloat((tp, f) -> tp.v2().accuracy(f), ACCURACY_FIELD);
+ PARSER.declareInt((tp, i) -> tp.v2().size(i), SIZE_FIELD);
+ PARSER.declareString((tp, s) -> tp.v2().sort(s), SORT_FIELD);
+ PARSER.declareString((tp, s) -> tp.v2().stringDistance(s), STRING_DISTANCE_FIELD);
+ PARSER.declareInt((tp, i) -> tp.v2().maxInspections(i), MAX_INSPECTIONS_FIELD);
+ PARSER.declareFloat((tp, f) -> tp.v2().maxTermFreq(f), MAX_TERM_FREQ_FIELD);
+ PARSER.declareInt((tp, i) -> tp.v2().maxEdits(i), MAX_EDITS_FIELD);
+ PARSER.declareInt((tp, i) -> tp.v2().minWordLength(i), MIN_WORD_LENGTH_FIELD);
+ PARSER.declareInt((tp, i) -> tp.v2().prefixLength(i), PREFIX_LENGTH_FIELD);
+ }
+
+ @Override
+ public DirectCandidateGeneratorBuilder fromXContent(QueryParseContext parseContext) throws IOException {
+ DirectCandidateGeneratorBuilder tempGenerator = new DirectCandidateGeneratorBuilder("_na_");
+ Set<String> tmpFieldName = new HashSet<>(1); // bucket for the field
+ // name, needed as
+ // constructor arg
+ // later
+ PARSER.parse(parseContext.parser(),
+ new Tuple<Set<String>, DirectCandidateGeneratorBuilder>(tmpFieldName, tempGenerator));
+ if (tmpFieldName.size() != 1) {
+ throw new IllegalArgumentException("[" + TYPE + "] expects exactly one field parameter, but found " + tmpFieldName);
+ }
+ return replaceField(tmpFieldName.iterator().next(), tempGenerator);
+ }
+
+ public PhraseSuggestionContext.DirectCandidateGenerator build(QueryShardContext context) throws IOException {
+ MapperService mapperService = context.getMapperService();
+ PhraseSuggestionContext.DirectCandidateGenerator generator = new PhraseSuggestionContext.DirectCandidateGenerator();
+ generator.setField(this.field);
+ transferIfNotNull(this.size, generator::size);
+ if (this.preFilter != null) {
+ generator.preFilter(mapperService.analysisService().analyzer(this.preFilter));
+ if (generator.preFilter() == null) {
+ throw new IllegalArgumentException("Analyzer [" + this.preFilter + "] doesn't exists");
+ }
+ }
+ if (this.postFilter != null) {
+ generator.postFilter(mapperService.analysisService().analyzer(this.postFilter));
+ if (generator.postFilter() == null) {
+ throw new IllegalArgumentException("Analyzer [" + this.postFilter + "] doesn't exists");
+ }
+ }
+ transferIfNotNull(this.accuracy, generator::accuracy);
+ if (this.suggestMode != null) {
+ generator.suggestMode(SuggestUtils.resolveSuggestMode(this.suggestMode));
+ }
+ if (this.sort != null) {
+ generator.sort(SuggestUtils.resolveSort(this.sort));
+ }
+ if (this.stringDistance != null) {
+ generator.stringDistance(SuggestUtils.resolveDistance(this.stringDistance));
+ }
+ transferIfNotNull(this.maxEdits, generator::maxEdits);
+ if (generator.maxEdits() < 1 || generator.maxEdits() > LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE) {
+ throw new IllegalArgumentException("Illegal max_edits value " + generator.maxEdits());
+ }
+ transferIfNotNull(this.maxInspections, generator::maxInspections);
+ transferIfNotNull(this.maxTermFreq, generator::maxTermFreq);
+ transferIfNotNull(this.prefixLength, generator::prefixLength);
+ transferIfNotNull(this.minWordLength, generator::minQueryLength);
+ transferIfNotNull(this.minDocFreq, generator::minDocFreq);
+ return generator;
+ }
+
+ private static <T> void transferIfNotNull(T value, Consumer<T> consumer) {
+ if (value != null) {
+ consumer.accept(value);
+ }
+ }
+
+ @Override
+ public final String toString() {
+ try {
+ XContentBuilder builder = XContentFactory.jsonBuilder();
+ builder.prettyPrint();
+ toXContent(builder, EMPTY_PARAMS);
+ return builder.string();
+ } catch (Exception e) {
+ return "{ \"error\" : \"" + ExceptionsHelper.detailedMessage(e) + "\"}";
+ }
+ }
+
+ @Override
+ public DirectCandidateGeneratorBuilder readFrom(StreamInput in) throws IOException {
+ DirectCandidateGeneratorBuilder cg = new DirectCandidateGeneratorBuilder(in.readString());
+ cg.suggestMode = in.readOptionalString();
+ if (in.readBoolean()) {
+ cg.accuracy = in.readFloat();
+ }
+ cg.size = in.readOptionalVInt();
+ cg.sort = in.readOptionalString();
+ cg.stringDistance = in.readOptionalString();
+ cg.maxEdits = in.readOptionalVInt();
+ cg.maxInspections = in.readOptionalVInt();
+ if (in.readBoolean()) {
+ cg.maxTermFreq = in.readFloat();
+ }
+ cg.prefixLength = in.readOptionalVInt();
+ cg.minWordLength = in.readOptionalVInt();
+ if (in.readBoolean()) {
+ cg.minDocFreq = in.readFloat();
+ }
+ cg.preFilter = in.readOptionalString();
+ cg.postFilter = in.readOptionalString();
+ return cg;
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ out.writeString(field);
+ out.writeOptionalString(suggestMode);
+ out.writeBoolean(accuracy != null);
+ if (accuracy != null) {
+ out.writeFloat(accuracy);
+ }
+ out.writeOptionalVInt(size);
+ out.writeOptionalString(sort);
+ out.writeOptionalString(stringDistance);
+ out.writeOptionalVInt(maxEdits);
+ out.writeOptionalVInt(maxInspections);
+ out.writeBoolean(maxTermFreq != null);
+ if (maxTermFreq != null) {
+ out.writeFloat(maxTermFreq);
+ }
+ out.writeOptionalVInt(prefixLength);
+ out.writeOptionalVInt(minWordLength);
+ out.writeBoolean(minDocFreq != null);
+ if (minDocFreq != null) {
+ out.writeFloat(minDocFreq);
+ }
+ out.writeOptionalString(preFilter);
+ out.writeOptionalString(postFilter);
+ }
+
+ @Override
+ public final int hashCode() {
+ return Objects.hash(field, preFilter, postFilter, suggestMode, accuracy,
+ size, sort, stringDistance, maxEdits, maxInspections,
+ maxTermFreq, prefixLength, minWordLength, minDocFreq);
+ }
+
+ @Override
+ public final boolean equals(Object obj) {
+ if (this == obj) {
+ return true;
+ }
+ if (obj == null || getClass() != obj.getClass()) {
+ return false;
+ }
+ DirectCandidateGeneratorBuilder other = (DirectCandidateGeneratorBuilder) obj;
+ return Objects.equals(field, other.field) &&
+ Objects.equals(preFilter, other.preFilter) &&
+ Objects.equals(postFilter, other.postFilter) &&
+ Objects.equals(suggestMode, other.suggestMode) &&
+ Objects.equals(accuracy, other.accuracy) &&
+ Objects.equals(size, other.size) &&
+ Objects.equals(sort, other.sort) &&
+ Objects.equals(stringDistance, other.stringDistance) &&
+ Objects.equals(maxEdits, other.maxEdits) &&
+ Objects.equals(maxInspections, other.maxInspections) &&
+ Objects.equals(maxTermFreq, other.maxTermFreq) &&
+ Objects.equals(prefixLength, other.prefixLength) &&
+ Objects.equals(minWordLength, other.minWordLength) &&
+ Objects.equals(minDocFreq, other.minDocFreq);
+ }
+} \ No newline at end of file
diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/LaplaceScorer.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/LaplaceScorer.java
index 04d98c3827..678f3082ba 100644
--- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/LaplaceScorer.java
+++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/LaplaceScorer.java
@@ -27,14 +27,14 @@ import org.elasticsearch.search.suggest.phrase.DirectCandidateGenerator.Candidat
import java.io.IOException;
//TODO public for tests
public final class LaplaceScorer extends WordScorer {
-
+
public static final WordScorerFactory FACTORY = new WordScorer.WordScorerFactory() {
@Override
public WordScorer newScorer(IndexReader reader, Terms terms, String field, double realWordLikelyhood, BytesRef separator) throws IOException {
return new LaplaceScorer(reader, terms, field, realWordLikelyhood, separator, 0.5);
}
};
-
+
private double alpha;
public LaplaceScorer(IndexReader reader, Terms terms, String field,
@@ -42,7 +42,11 @@ public final class LaplaceScorer extends WordScorer {
super(reader, terms, field, realWordLikelyhood, separator);
this.alpha = alpha;
}
-
+
+ double alpha() {
+ return this.alpha;
+ }
+
@Override
protected double scoreBigram(Candidate word, Candidate w_1) throws IOException {
SuggestUtils.join(separator, spare, w_1.term, word.term);
diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/LinearInterpoatingScorer.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/LinearInterpoatingScorer.java
index d2b1ba48b1..368d461fc5 100644
--- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/LinearInterpoatingScorer.java
+++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/LinearInterpoatingScorer.java
@@ -41,7 +41,19 @@ public final class LinearInterpoatingScorer extends WordScorer {
this.bigramLambda = bigramLambda / sum;
this.trigramLambda = trigramLambda / sum;
}
-
+
+ double trigramLambda() {
+ return this.trigramLambda;
+ }
+
+ double bigramLambda() {
+ return this.bigramLambda;
+ }
+
+ double unigramLambda() {
+ return this.unigramLambda;
+ }
+
@Override
protected double scoreBigram(Candidate word, Candidate w_1) throws IOException {
SuggestUtils.join(separator, spare, w_1.term, word.term);
diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestParser.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestParser.java
index 0b904a9572..fc60fc6fc8 100644
--- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestParser.java
+++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestParser.java
@@ -22,7 +22,6 @@ import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Terms;
import org.apache.lucene.util.BytesRef;
-import org.elasticsearch.common.HasContextAndHeaders;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParser.Token;
@@ -36,6 +35,8 @@ import org.elasticsearch.script.Template;
import org.elasticsearch.search.suggest.SuggestContextParser;
import org.elasticsearch.search.suggest.SuggestUtils;
import org.elasticsearch.search.suggest.SuggestionSearchContext;
+import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.Laplace;
+import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.StupidBackoff;
import org.elasticsearch.search.suggest.phrase.PhraseSuggestionContext.DirectCandidateGenerator;
import java.io.IOException;
@@ -50,8 +51,7 @@ public final class PhraseSuggestParser implements SuggestContextParser {
}
@Override
- public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService, IndexFieldDataService fieldDataService,
- HasContextAndHeaders headersContext) throws IOException {
+ public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService, IndexFieldDataService fieldDataService) throws IOException {
PhraseSuggestionContext suggestion = new PhraseSuggestionContext(suggester);
ParseFieldMatcher parseFieldMatcher = mapperService.getIndexSettings().getParseFieldMatcher();
XContentParser.Token token;
@@ -98,18 +98,10 @@ public final class PhraseSuggestParser implements SuggestContextParser {
}
}
} else if (token == Token.START_ARRAY) {
- if ("direct_generator".equals(fieldName) || "directGenerator".equals(fieldName)) {
+ if (parseFieldMatcher.match(fieldName, DirectCandidateGeneratorBuilder.DIRECT_GENERATOR_FIELD)) {
// for now we only have a single type of generators
while ((token = parser.nextToken()) == Token.START_OBJECT) {
- PhraseSuggestionContext.DirectCandidateGenerator generator = new PhraseSuggestionContext.DirectCandidateGenerator();
- while ((token = parser.nextToken()) != Token.END_OBJECT) {
- if (token == XContentParser.Token.FIELD_NAME) {
- fieldName = parser.currentName();
- }
- if (token.isValue()) {
- parseCandidateGenerator(parser, mapperService, fieldName, generator, parseFieldMatcher);
- }
- }
+ PhraseSuggestionContext.DirectCandidateGenerator generator = parseCandidateGenerator(parser, mapperService, parseFieldMatcher);
verifyGenerator(generator);
suggestion.addGenerator(generator);
}
@@ -143,8 +135,7 @@ public final class PhraseSuggestParser implements SuggestContextParser {
throw new IllegalArgumentException("suggester[phrase][collate] query already set, doesn't support additional [" + fieldName + "]");
}
Template template = Template.parse(parser, parseFieldMatcher);
- CompiledScript compiledScript = suggester.scriptService().compile(template, ScriptContext.Standard.SEARCH,
- headersContext, Collections.emptyMap());
+ CompiledScript compiledScript = suggester.scriptService().compile(template, ScriptContext.Standard.SEARCH, Collections.emptyMap());
suggestion.setCollateQueryScript(compiledScript);
} else if ("params".equals(fieldName)) {
suggestion.setCollateScriptParams(parser.map());
@@ -265,7 +256,7 @@ public final class PhraseSuggestParser implements SuggestContextParser {
});
} else if ("laplace".equals(fieldName)) {
ensureNoSmoothing(suggestion);
- double theAlpha = 0.5;
+ double theAlpha = Laplace.DEFAULT_LAPLACE_ALPHA;
while ((token = parser.nextToken()) != Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
@@ -286,7 +277,7 @@ public final class PhraseSuggestParser implements SuggestContextParser {
} else if ("stupid_backoff".equals(fieldName) || "stupidBackoff".equals(fieldName)) {
ensureNoSmoothing(suggestion);
- double theDiscount = 0.4;
+ double theDiscount = StupidBackoff.DEFAULT_BACKOFF_DISCOUNT;
while ((token = parser.nextToken()) != Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
fieldName = parser.currentName();
@@ -324,34 +315,44 @@ public final class PhraseSuggestParser implements SuggestContextParser {
}
}
- private void parseCandidateGenerator(XContentParser parser, MapperService mapperService, String fieldName,
- PhraseSuggestionContext.DirectCandidateGenerator generator, ParseFieldMatcher parseFieldMatcher) throws IOException {
- if (!SuggestUtils.parseDirectSpellcheckerSettings(parser, fieldName, generator, parseFieldMatcher)) {
- if ("field".equals(fieldName)) {
- generator.setField(parser.text());
- if (mapperService.fullName(generator.field()) == null) {
- throw new IllegalArgumentException("No mapping found for field [" + generator.field() + "]");
- }
- } else if ("size".equals(fieldName)) {
- generator.size(parser.intValue());
- } else if ("pre_filter".equals(fieldName) || "preFilter".equals(fieldName)) {
- String analyzerName = parser.text();
- Analyzer analyzer = mapperService.analysisService().analyzer(analyzerName);
- if (analyzer == null) {
- throw new IllegalArgumentException("Analyzer [" + analyzerName + "] doesn't exists");
- }
- generator.preFilter(analyzer);
- } else if ("post_filter".equals(fieldName) || "postFilter".equals(fieldName)) {
- String analyzerName = parser.text();
- Analyzer analyzer = mapperService.analysisService().analyzer(analyzerName);
- if (analyzer == null) {
- throw new IllegalArgumentException("Analyzer [" + analyzerName + "] doesn't exists");
+ static PhraseSuggestionContext.DirectCandidateGenerator parseCandidateGenerator(XContentParser parser, MapperService mapperService,
+ ParseFieldMatcher parseFieldMatcher) throws IOException {
+ XContentParser.Token token;
+ String fieldName = null;
+ PhraseSuggestionContext.DirectCandidateGenerator generator = new PhraseSuggestionContext.DirectCandidateGenerator();
+ while ((token = parser.nextToken()) != Token.END_OBJECT) {
+ if (token == XContentParser.Token.FIELD_NAME) {
+ fieldName = parser.currentName();
+ }
+ if (token.isValue()) {
+ if (!SuggestUtils.parseDirectSpellcheckerSettings(parser, fieldName, generator, parseFieldMatcher)) {
+ if ("field".equals(fieldName)) {
+ generator.setField(parser.text());
+ if (mapperService.fullName(generator.field()) == null) {
+ throw new IllegalArgumentException("No mapping found for field [" + generator.field() + "]");
+ }
+ } else if ("size".equals(fieldName)) {
+ generator.size(parser.intValue());
+ } else if ("pre_filter".equals(fieldName) || "preFilter".equals(fieldName)) {
+ String analyzerName = parser.text();
+ Analyzer analyzer = mapperService.analysisService().analyzer(analyzerName);
+ if (analyzer == null) {
+ throw new IllegalArgumentException("Analyzer [" + analyzerName + "] doesn't exists");
+ }
+ generator.preFilter(analyzer);
+ } else if ("post_filter".equals(fieldName) || "postFilter".equals(fieldName)) {
+ String analyzerName = parser.text();
+ Analyzer analyzer = mapperService.analysisService().analyzer(analyzerName);
+ if (analyzer == null) {
+ throw new IllegalArgumentException("Analyzer [" + analyzerName + "] doesn't exists");
+ }
+ generator.postFilter(analyzer);
+ } else {
+ throw new IllegalArgumentException("CandidateGenerator doesn't support [" + fieldName + "]");
+ }
}
- generator.postFilter(analyzer);
- } else {
- throw new IllegalArgumentException("CandidateGenerator doesn't support [" + fieldName + "]");
}
}
+ return generator;
}
-
}
diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java
index 1055fbe83f..b72cd41ea7 100644
--- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java
+++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java
@@ -18,10 +18,23 @@
*/
package org.elasticsearch.search.suggest.phrase;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.Terms;
+import org.apache.lucene.util.BytesRef;
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.ParseFieldMatcher;
+import org.elasticsearch.common.ParsingException;
+import org.elasticsearch.common.io.stream.NamedWriteable;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.common.xcontent.XContentParser.Token;
+import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.script.Template;
import org.elasticsearch.search.suggest.SuggestBuilder.SuggestionBuilder;
+import org.elasticsearch.search.suggest.phrase.WordScorer.WordScorerFactory;
import java.io.IOException;
import java.util.ArrayList;
@@ -29,6 +42,7 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
+import java.util.Objects;
import java.util.Set;
/**
@@ -264,13 +278,13 @@ public final class PhraseSuggestionBuilder extends SuggestionBuilder<PhraseSugge
}
/**
- * Creates a new {@link DirectCandidateGenerator}
+ * Creates a new {@link DirectCandidateGeneratorBuilder}
*
* @param field
* the field this candidate generator operates on.
*/
- public static DirectCandidateGenerator candidateGenerator(String field) {
- return new DirectCandidateGenerator(field);
+ public static DirectCandidateGeneratorBuilder candidateGenerator(String field) {
+ return new DirectCandidateGeneratorBuilder(field);
}
/**
@@ -284,7 +298,14 @@ public final class PhraseSuggestionBuilder extends SuggestionBuilder<PhraseSugge
* </p>
*/
public static final class StupidBackoff extends SmoothingModel {
- private final double discount;
+ /**
+ * Default discount parameter for {@link StupidBackoff} smoothing
+ */
+ public static final double DEFAULT_BACKOFF_DISCOUNT = 0.4;
+ private double discount = DEFAULT_BACKOFF_DISCOUNT;
+ static final StupidBackoff PROTOTYPE = new StupidBackoff(DEFAULT_BACKOFF_DISCOUNT);
+ private static final String NAME = "stupid_backoff";
+ private static final ParseField DISCOUNT_FIELD = new ParseField("discount");
/**
* Creates a Stupid-Backoff smoothing model.
@@ -293,15 +314,70 @@ public final class PhraseSuggestionBuilder extends SuggestionBuilder<PhraseSugge
* the discount given to lower order ngrams if the higher order ngram doesn't exits
*/
public StupidBackoff(double discount) {
- super("stupid_backoff");
this.discount = discount;
}
+ /**
+ * @return the discount parameter of the model
+ */
+ public double getDiscount() {
+ return this.discount;
+ }
+
@Override
protected XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException {
- builder.field("discount", discount);
+ builder.field(DISCOUNT_FIELD.getPreferredName(), discount);
return builder;
}
+
+ @Override
+ public String getWriteableName() {
+ return NAME;
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ out.writeDouble(discount);
+ }
+
+ @Override
+ public StupidBackoff readFrom(StreamInput in) throws IOException {
+ return new StupidBackoff(in.readDouble());
+ }
+
+ @Override
+ protected boolean doEquals(SmoothingModel other) {
+ StupidBackoff otherModel = (StupidBackoff) other;
+ return Objects.equals(discount, otherModel.discount);
+ }
+
+ @Override
+ public final int hashCode() {
+ return Objects.hash(discount);
+ }
+
+ @Override
+ public SmoothingModel fromXContent(QueryParseContext parseContext) throws IOException {
+ XContentParser parser = parseContext.parser();
+ XContentParser.Token token;
+ String fieldName = null;
+ double discount = DEFAULT_BACKOFF_DISCOUNT;
+ while ((token = parser.nextToken()) != Token.END_OBJECT) {
+ if (token == XContentParser.Token.FIELD_NAME) {
+ fieldName = parser.currentName();
+ }
+ if (token.isValue() && parseContext.parseFieldMatcher().match(fieldName, DISCOUNT_FIELD)) {
+ discount = parser.doubleValue();
+ }
+ }
+ return new StupidBackoff(discount);
+ }
+
+ @Override
+ public WordScorerFactory buildWordScorerFactory() {
+ return (IndexReader reader, Terms terms, String field, double realWordLikelyhood, BytesRef separator)
+ -> new StupidBackoffScorer(reader, terms, field, realWordLikelyhood, separator, discount);
+ }
}
/**
@@ -314,39 +390,119 @@ public final class PhraseSuggestionBuilder extends SuggestionBuilder<PhraseSugge
* </p>
*/
public static final class Laplace extends SmoothingModel {
- private final double alpha;
+ private double alpha = DEFAULT_LAPLACE_ALPHA;
+ private static final String NAME = "laplace";
+ private static final ParseField ALPHA_FIELD = new ParseField("alpha");
+ /**
+ * Default alpha parameter for laplace smoothing
+ */
+ public static final double DEFAULT_LAPLACE_ALPHA = 0.5;
+ static final Laplace PROTOTYPE = new Laplace(DEFAULT_LAPLACE_ALPHA);
+
/**
* Creates a Laplace smoothing model.
*
*/
public Laplace(double alpha) {
- super("laplace");
this.alpha = alpha;
}
+ /**
+ * @return the laplace model alpha parameter
+ */
+ public double getAlpha() {
+ return this.alpha;
+ }
+
@Override
protected XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException {
- builder.field("alpha", alpha);
+ builder.field(ALPHA_FIELD.getPreferredName(), alpha);
return builder;
}
- }
+ @Override
+ public String getWriteableName() {
+ return NAME;
+ }
- public static abstract class SmoothingModel implements ToXContent {
- private final String type;
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ out.writeDouble(alpha);
+ }
+
+ @Override
+ public SmoothingModel readFrom(StreamInput in) throws IOException {
+ return new Laplace(in.readDouble());
+ }
+
+ @Override
+ protected boolean doEquals(SmoothingModel other) {
+ Laplace otherModel = (Laplace) other;
+ return Objects.equals(alpha, otherModel.alpha);
+ }
+
+ @Override
+ public final int hashCode() {
+ return Objects.hash(alpha);
+ }
- protected SmoothingModel(String type) {
- this.type = type;
+ @Override
+ public SmoothingModel fromXContent(QueryParseContext parseContext) throws IOException {
+ XContentParser parser = parseContext.parser();
+ XContentParser.Token token;
+ String fieldName = null;
+ double alpha = DEFAULT_LAPLACE_ALPHA;
+ while ((token = parser.nextToken()) != Token.END_OBJECT) {
+ if (token == XContentParser.Token.FIELD_NAME) {
+ fieldName = parser.currentName();
+ }
+ if (token.isValue() && parseContext.parseFieldMatcher().match(fieldName, ALPHA_FIELD)) {
+ alpha = parser.doubleValue();
+ }
+ }
+ return new Laplace(alpha);
+ }
+
+ @Override
+ public WordScorerFactory buildWordScorerFactory() {
+ return (IndexReader reader, Terms terms, String field, double realWordLikelyhood, BytesRef separator)
+ -> new LaplaceScorer(reader, terms, field, realWordLikelyhood, separator, alpha);
}
+ }
+
+
+ public static abstract class SmoothingModel implements NamedWriteable<SmoothingModel>, ToXContent {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
- builder.startObject(type);
+ builder.startObject(getWriteableName());
innerToXContent(builder,params);
builder.endObject();
return builder;
}
+ @Override
+ public final boolean equals(Object obj) {
+ if (this == obj) {
+ return true;
+ }
+ if (obj == null || getClass() != obj.getClass()) {
+ return false;
+ }
+ @SuppressWarnings("unchecked")
+ SmoothingModel other = (SmoothingModel) obj;
+ return doEquals(other);
+ }
+
+ public abstract SmoothingModel fromXContent(QueryParseContext parseContext) throws IOException;
+
+ public abstract WordScorerFactory buildWordScorerFactory();
+
+ /**
+ * subtype specific implementation of "equals".
+ */
+ protected abstract boolean doEquals(SmoothingModel other);
+
protected abstract XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException;
}
@@ -359,9 +515,14 @@ public final class PhraseSuggestionBuilder extends SuggestionBuilder<PhraseSugge
* </p>
*/
public static final class LinearInterpolation extends SmoothingModel {
+ private static final String NAME = "linear";
+ static final LinearInterpolation PROTOTYPE = new LinearInterpolation(0.8, 0.1, 0.1);
private final double trigramLambda;
private final double bigramLambda;
private final double unigramLambda;
+ private static final ParseField TRIGRAM_FIELD = new ParseField("trigram_lambda");
+ private static final ParseField BIGRAM_FIELD = new ParseField("bigram_lambda");
+ private static final ParseField UNIGRAM_FIELD = new ParseField("unigram_lambda");
/**
* Creates a linear interpolation smoothing model.
@@ -376,283 +537,118 @@ public final class PhraseSuggestionBuilder extends SuggestionBuilder<PhraseSugge
* the unigram lambda
*/
public LinearInterpolation(double trigramLambda, double bigramLambda, double unigramLambda) {
- super("linear");
+ double sum = trigramLambda + bigramLambda + unigramLambda;
+ if (Math.abs(sum - 1.0) > 0.001) {
+ throw new IllegalArgumentException("linear smoothing lambdas must sum to 1");
+ }
this.trigramLambda = trigramLambda;
this.bigramLambda = bigramLambda;
this.unigramLambda = unigramLambda;
}
- @Override
- protected XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException {
- builder.field("trigram_lambda", trigramLambda);
- builder.field("bigram_lambda", bigramLambda);
- builder.field("unigram_lambda", unigramLambda);
- return builder;
- }
- }
-
- /**
- * {@link CandidateGenerator} base class.
- */
- public static abstract class CandidateGenerator implements ToXContent {
- private final String type;
-
- public CandidateGenerator(String type) {
- this.type = type;
- }
-
- public String getType() {
- return type;
+ public double getTrigramLambda() {
+ return this.trigramLambda;
}
- }
-
- /**
- *
- *
- */
- public static final class DirectCandidateGenerator extends CandidateGenerator {
- private final String field;
- private String preFilter;
- private String postFilter;
- private String suggestMode;
- private Float accuracy;
- private Integer size;
- private String sort;
- private String stringDistance;
- private Integer maxEdits;
- private Integer maxInspections;
- private Float maxTermFreq;
- private Integer prefixLength;
- private Integer minWordLength;
- private Float minDocFreq;
-
- /**
- * @param field Sets from what field to fetch the candidate suggestions from.
- */
- public DirectCandidateGenerator(String field) {
- super("direct_generator");
- this.field = field;
+ public double getBigramLambda() {
+ return this.bigramLambda;
}
- /**
- * The global suggest mode controls what suggested terms are included or
- * controls for what suggest text tokens, terms should be suggested for.
- * Three possible values can be specified:
- * <ol>
- * <li><code>missing</code> - Only suggest terms in the suggest text
- * that aren't in the index. This is the default.
- * <li><code>popular</code> - Only suggest terms that occur in more docs
- * then the original suggest text term.
- * <li><code>always</code> - Suggest any matching suggest terms based on
- * tokens in the suggest text.
- * </ol>
- */
- public DirectCandidateGenerator suggestMode(String suggestMode) {
- this.suggestMode = suggestMode;
- return this;
- }
-
- /**
- * Sets how similar the suggested terms at least need to be compared to
- * the original suggest text tokens. A value between 0 and 1 can be
- * specified. This value will be compared to the string distance result
- * of each candidate spelling correction.
- * <p>
- * Default is <tt>0.5</tt>
- */
- public DirectCandidateGenerator accuracy(float accuracy) {
- this.accuracy = accuracy;
- return this;
+ public double getUnigramLambda() {
+ return this.unigramLambda;
}
- /**
- * Sets the maximum suggestions to be returned per suggest text term.
- */
- public DirectCandidateGenerator size(int size) {
- if (size <= 0) {
- throw new IllegalArgumentException("Size must be positive");
- }
- this.size = size;
- return this;
- }
-
- /**
- * Sets how to sort the suggest terms per suggest text token. Two
- * possible values:
- * <ol>
- * <li><code>score</code> - Sort should first be based on score, then
- * document frequency and then the term itself.
- * <li><code>frequency</code> - Sort should first be based on document
- * frequency, then scotr and then the term itself.
- * </ol>
- * <p>
- * What the score is depends on the suggester being used.
- */
- public DirectCandidateGenerator sort(String sort) {
- this.sort = sort;
- return this;
- }
-
- /**
- * Sets what string distance implementation to use for comparing how
- * similar suggested terms are. Four possible values can be specified:
- * <ol>
- * <li><code>internal</code> - This is the default and is based on
- * <code>damerau_levenshtein</code>, but highly optimized for comparing
- * string distance for terms inside the index.
- * <li><code>damerau_levenshtein</code> - String distance algorithm
- * based on Damerau-Levenshtein algorithm.
- * <li><code>levenstein</code> - String distance algorithm based on
- * Levenstein edit distance algorithm.
- * <li><code>jarowinkler</code> - String distance algorithm based on
- * Jaro-Winkler algorithm.
- * <li><code>ngram</code> - String distance algorithm based on character
- * n-grams.
- * </ol>
- */
- public DirectCandidateGenerator stringDistance(String stringDistance) {
- this.stringDistance = stringDistance;
- return this;
- }
-
- /**
- * Sets the maximum edit distance candidate suggestions can have in
- * order to be considered as a suggestion. Can only be a value between 1
- * and 2. Any other value result in an bad request error being thrown.
- * Defaults to <tt>2</tt>.
- */
- public DirectCandidateGenerator maxEdits(Integer maxEdits) {
- this.maxEdits = maxEdits;
- return this;
- }
-
- /**
- * A factor that is used to multiply with the size in order to inspect
- * more candidate suggestions. Can improve accuracy at the cost of
- * performance. Defaults to <tt>5</tt>.
- */
- public DirectCandidateGenerator maxInspections(Integer maxInspections) {
- this.maxInspections = maxInspections;
- return this;
- }
-
- /**
- * Sets a maximum threshold in number of documents a suggest text token
- * can exist in order to be corrected. Can be a relative percentage
- * number (e.g 0.4) or an absolute number to represent document
- * frequencies. If an value higher than 1 is specified then fractional
- * can not be specified. Defaults to <tt>0.01</tt>.
- * <p>
- * This can be used to exclude high frequency terms from being
- * suggested. High frequency terms are usually spelled correctly on top
- * of this this also improves the suggest performance.
- */
- public DirectCandidateGenerator maxTermFreq(float maxTermFreq) {
- this.maxTermFreq = maxTermFreq;
- return this;
+ @Override
+ protected XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.field(TRIGRAM_FIELD.getPreferredName(), trigramLambda);
+ builder.field(BIGRAM_FIELD.getPreferredName(), bigramLambda);
+ builder.field(UNIGRAM_FIELD.getPreferredName(), unigramLambda);
+ return builder;
}
- /**
- * Sets the number of minimal prefix characters that must match in order
- * be a candidate suggestion. Defaults to 1. Increasing this number
- * improves suggest performance. Usually misspellings don't occur in the
- * beginning of terms.
- */
- public DirectCandidateGenerator prefixLength(int prefixLength) {
- this.prefixLength = prefixLength;
- return this;
+ @Override
+ public String getWriteableName() {
+ return NAME;
}
- /**
- * The minimum length a suggest text term must have in order to be
- * corrected. Defaults to <tt>4</tt>.
- */
- public DirectCandidateGenerator minWordLength(int minWordLength) {
- this.minWordLength = minWordLength;
- return this;
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ out.writeDouble(trigramLambda);
+ out.writeDouble(bigramLambda);
+ out.writeDouble(unigramLambda);
}
- /**
- * Sets a minimal threshold in number of documents a suggested term
- * should appear in. This can be specified as an absolute number or as a
- * relative percentage of number of documents. This can improve quality
- * by only suggesting high frequency terms. Defaults to 0f and is not
- * enabled. If a value higher than 1 is specified then the number cannot
- * be fractional.
- */
- public DirectCandidateGenerator minDocFreq(float minDocFreq) {
- this.minDocFreq = minDocFreq;
- return this;
+ @Override
+ public LinearInterpolation readFrom(StreamInput in) throws IOException {
+ return new LinearInterpolation(in.readDouble(), in.readDouble(), in.readDouble());
}
- /**
- * Sets a filter (analyzer) that is applied to each of the tokens passed to this candidate generator.
- * This filter is applied to the original token before candidates are generated.
- */
- public DirectCandidateGenerator preFilter(String preFilter) {
- this.preFilter = preFilter;
- return this;
+ @Override
+ protected boolean doEquals(SmoothingModel other) {
+ final LinearInterpolation otherModel = (LinearInterpolation) other;
+ return Objects.equals(trigramLambda, otherModel.trigramLambda) &&
+ Objects.equals(bigramLambda, otherModel.bigramLambda) &&
+ Objects.equals(unigramLambda, otherModel.unigramLambda);
}
- /**
- * Sets a filter (analyzer) that is applied to each of the generated tokens
- * before they are passed to the actual phrase scorer.
- */
- public DirectCandidateGenerator postFilter(String postFilter) {
- this.postFilter = postFilter;
- return this;
+ @Override
+ public final int hashCode() {
+ return Objects.hash(trigramLambda, bigramLambda, unigramLambda);
}
@Override
- public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
- builder.startObject();
- if (field != null) {
- builder.field("field", field);
- }
- if (suggestMode != null) {
- builder.field("suggest_mode", suggestMode);
- }
- if (accuracy != null) {
- builder.field("accuracy", accuracy);
- }
- if (size != null) {
- builder.field("size", size);
- }
- if (sort != null) {
- builder.field("sort", sort);
- }
- if (stringDistance != null) {
- builder.field("string_distance", stringDistance);
- }
- if (maxEdits != null) {
- builder.field("max_edits", maxEdits);
- }
- if (maxInspections != null) {
- builder.field("max_inspections", maxInspections);
- }
- if (maxTermFreq != null) {
- builder.field("max_term_freq", maxTermFreq);
- }
- if (prefixLength != null) {
- builder.field("prefix_length", prefixLength);
- }
- if (minWordLength != null) {
- builder.field("min_word_length", minWordLength);
- }
- if (minDocFreq != null) {
- builder.field("min_doc_freq", minDocFreq);
- }
- if (preFilter != null) {
- builder.field("pre_filter", preFilter);
- }
- if (postFilter != null) {
- builder.field("post_filter", postFilter);
+ public LinearInterpolation fromXContent(QueryParseContext parseContext) throws IOException {
+ XContentParser parser = parseContext.parser();
+ XContentParser.Token token;
+ String fieldName = null;
+ double trigramLambda = 0.0;
+ double bigramLambda = 0.0;
+ double unigramLambda = 0.0;
+ ParseFieldMatcher matcher = parseContext.parseFieldMatcher();
+ while ((token = parser.nextToken()) != Token.END_OBJECT) {
+ if (token == XContentParser.Token.FIELD_NAME) {
+ fieldName = parser.currentName();
+ } else if (token.isValue()) {
+ if (matcher.match(fieldName, TRIGRAM_FIELD)) {
+ trigramLambda = parser.doubleValue();
+ if (trigramLambda < 0) {
+ throw new IllegalArgumentException("trigram_lambda must be positive");
+ }
+ } else if (matcher.match(fieldName, BIGRAM_FIELD)) {
+ bigramLambda = parser.doubleValue();
+ if (bigramLambda < 0) {
+ throw new IllegalArgumentException("bigram_lambda must be positive");
+ }
+ } else if (matcher.match(fieldName, UNIGRAM_FIELD)) {
+ unigramLambda = parser.doubleValue();
+ if (unigramLambda < 0) {
+ throw new IllegalArgumentException("unigram_lambda must be positive");
+ }
+ } else {
+ throw new IllegalArgumentException(
+ "suggester[phrase][smoothing][linear] doesn't support field [" + fieldName + "]");
+ }
+ } else {
+ throw new ParsingException(parser.getTokenLocation(), "[" + NAME + "] unknown token [" + token + "] after [" + fieldName + "]");
+ }
}
- builder.endObject();
- return builder;
+ return new LinearInterpolation(trigramLambda, bigramLambda, unigramLambda);
}
+ @Override
+ public WordScorerFactory buildWordScorerFactory() {
+ return (IndexReader reader, Terms terms, String field, double realWordLikelyhood, BytesRef separator) ->
+ new LinearInterpoatingScorer(reader, terms, field, realWordLikelyhood, separator, trigramLambda, bigramLambda,
+ unigramLambda);
+ }
}
+ /**
+ * {@link CandidateGenerator} interface.
+ */
+ public interface CandidateGenerator extends ToXContent {
+ String getType();
+
+ CandidateGenerator fromXContent(QueryParseContext parseContext) throws IOException;
+ }
}
diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/StupidBackoffScorer.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/StupidBackoffScorer.java
index fcf6064d22..5bd3d942b1 100644
--- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/StupidBackoffScorer.java
+++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/StupidBackoffScorer.java
@@ -42,6 +42,10 @@ public class StupidBackoffScorer extends WordScorer {
this.discount = discount;
}
+ double discount() {
+ return this.discount;
+ }
+
@Override
protected double scoreBigram(Candidate word, Candidate w_1) throws IOException {
SuggestUtils.join(separator, spare, w_1.term, word.term);
diff --git a/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestParser.java b/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestParser.java
index a0e0e28954..a2fd680c21 100644
--- a/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestParser.java
+++ b/core/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestParser.java
@@ -18,7 +18,6 @@
*/
package org.elasticsearch.search.suggest.term;
-import org.elasticsearch.common.HasContextAndHeaders;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
@@ -39,8 +38,7 @@ public final class TermSuggestParser implements SuggestContextParser {
}
@Override
- public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService, IndexFieldDataService fieldDataService,
- HasContextAndHeaders headersContext) throws IOException {
+ public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService, IndexFieldDataService fieldDataService) throws IOException {
XContentParser.Token token;
String fieldName = null;
TermSuggestionContext suggestion = new TermSuggestionContext(suggester);
diff --git a/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java b/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java
index e167a03f8f..c6f189ea8a 100644
--- a/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java
+++ b/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java
@@ -62,6 +62,7 @@ import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
+import org.elasticsearch.index.Index;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.snapshots.IndexShardRepository;
@@ -236,7 +237,7 @@ public class RestoreService extends AbstractComponent implements ClusterStateLis
String index = indexEntry.getValue();
boolean partial = checkPartial(index);
RestoreSource restoreSource = new RestoreSource(snapshotId, snapshot.version(), index);
- String renamedIndex = indexEntry.getKey();
+ String renamedIndexName = indexEntry.getKey();
IndexMetaData snapshotIndexMetaData = metaData.index(index);
snapshotIndexMetaData = updateIndexSettings(snapshotIndexMetaData, request.indexSettings, request.ignoreIndexSettings);
try {
@@ -245,14 +246,15 @@ public class RestoreService extends AbstractComponent implements ClusterStateLis
throw new SnapshotRestoreException(snapshotId, "cannot restore index [" + index + "] because it cannot be upgraded", ex);
}
// Check that the index is closed or doesn't exist
- IndexMetaData currentIndexMetaData = currentState.metaData().index(renamedIndex);
+ IndexMetaData currentIndexMetaData = currentState.metaData().index(renamedIndexName);
IntSet ignoreShards = new IntHashSet();
+ final Index renamedIndex;
if (currentIndexMetaData == null) {
// Index doesn't exist - create it and start recovery
// Make sure that the index we are about to create has a validate name
- createIndexService.validateIndexName(renamedIndex, currentState);
- createIndexService.validateIndexSettings(renamedIndex, snapshotIndexMetaData.getSettings());
- IndexMetaData.Builder indexMdBuilder = IndexMetaData.builder(snapshotIndexMetaData).state(IndexMetaData.State.OPEN).index(renamedIndex);
+ createIndexService.validateIndexName(renamedIndexName, currentState);
+ createIndexService.validateIndexSettings(renamedIndexName, snapshotIndexMetaData.getSettings());
+ IndexMetaData.Builder indexMdBuilder = IndexMetaData.builder(snapshotIndexMetaData).state(IndexMetaData.State.OPEN).index(renamedIndexName);
indexMdBuilder.settings(Settings.settingsBuilder().put(snapshotIndexMetaData.getSettings()).put(IndexMetaData.SETTING_INDEX_UUID, Strings.randomBase64UUID()));
if (!request.includeAliases() && !snapshotIndexMetaData.getAliases().isEmpty()) {
// Remove all aliases - they shouldn't be restored
@@ -269,8 +271,9 @@ public class RestoreService extends AbstractComponent implements ClusterStateLis
rtBuilder.addAsNewRestore(updatedIndexMetaData, restoreSource, ignoreShards);
blocks.addBlocks(updatedIndexMetaData);
mdBuilder.put(updatedIndexMetaData, true);
+ renamedIndex = updatedIndexMetaData.getIndex();
} else {
- validateExistingIndex(currentIndexMetaData, snapshotIndexMetaData, renamedIndex, partial);
+ validateExistingIndex(currentIndexMetaData, snapshotIndexMetaData, renamedIndexName, partial);
// Index exists and it's closed - open it in metadata and start recovery
IndexMetaData.Builder indexMdBuilder = IndexMetaData.builder(snapshotIndexMetaData).state(IndexMetaData.State.OPEN);
indexMdBuilder.version(Math.max(snapshotIndexMetaData.getVersion(), currentIndexMetaData.getVersion() + 1));
@@ -289,10 +292,11 @@ public class RestoreService extends AbstractComponent implements ClusterStateLis
}
}
indexMdBuilder.settings(Settings.settingsBuilder().put(snapshotIndexMetaData.getSettings()).put(IndexMetaData.SETTING_INDEX_UUID, currentIndexMetaData.getIndexUUID()));
- IndexMetaData updatedIndexMetaData = indexMdBuilder.index(renamedIndex).build();
+ IndexMetaData updatedIndexMetaData = indexMdBuilder.index(renamedIndexName).build();
rtBuilder.addAsRestore(updatedIndexMetaData, restoreSource);
blocks.updateBlocks(updatedIndexMetaData);
mdBuilder.put(updatedIndexMetaData, true);
+ renamedIndex = updatedIndexMetaData.getIndex();
}
for (int shard = 0; shard < snapshotIndexMetaData.getNumberOfShards(); shard++) {
@@ -732,7 +736,7 @@ public class RestoreService extends AbstractComponent implements ClusterStateLis
List<ShardId> shardsToFail = null;
for (ObjectObjectCursor<ShardId, ShardRestoreStatus> shard : entry.shards()) {
if (!shard.value.state().completed()) {
- if (!event.state().metaData().hasIndex(shard.key.getIndex())) {
+ if (!event.state().metaData().hasIndex(shard.key.getIndex().getName())) {
if (shardsToFail == null) {
shardsToFail = new ArrayList<>();
}
diff --git a/core/src/main/java/org/elasticsearch/snapshots/SnapshotShardFailure.java b/core/src/main/java/org/elasticsearch/snapshots/SnapshotShardFailure.java
index 60bd25542c..7facd49088 100644
--- a/core/src/main/java/org/elasticsearch/snapshots/SnapshotShardFailure.java
+++ b/core/src/main/java/org/elasticsearch/snapshots/SnapshotShardFailure.java
@@ -21,6 +21,7 @@ package org.elasticsearch.snapshots;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.action.ShardOperationFailedException;
+import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
@@ -37,9 +38,7 @@ import java.io.IOException;
* Stores information about failures that occurred during shard snapshotting process
*/
public class SnapshotShardFailure implements ShardOperationFailedException {
- private String index;
-
- private int shardId;
+ private ShardId shardId;
private String reason;
@@ -56,13 +55,11 @@ public class SnapshotShardFailure implements ShardOperationFailedException {
* Constructs new snapshot shard failure object
*
* @param nodeId node where failure occurred
- * @param index index which the shard belongs to
* @param shardId shard id
* @param reason failure reason
*/
- public SnapshotShardFailure(@Nullable String nodeId, String index, int shardId, String reason) {
+ public SnapshotShardFailure(@Nullable String nodeId, ShardId shardId, String reason) {
this.nodeId = nodeId;
- this.index = index;
this.shardId = shardId;
this.reason = reason;
status = RestStatus.INTERNAL_SERVER_ERROR;
@@ -75,7 +72,7 @@ public class SnapshotShardFailure implements ShardOperationFailedException {
*/
@Override
public String index() {
- return this.index;
+ return this.shardId.getIndexName();
}
/**
@@ -85,7 +82,7 @@ public class SnapshotShardFailure implements ShardOperationFailedException {
*/
@Override
public int shardId() {
- return this.shardId;
+ return this.shardId.id();
}
/**
@@ -110,7 +107,7 @@ public class SnapshotShardFailure implements ShardOperationFailedException {
@Override
public Throwable getCause() {
- return new IndexShardSnapshotFailedException(new ShardId(index, shardId), reason);
+ return new IndexShardSnapshotFailedException(shardId, reason);
}
/**
@@ -138,8 +135,7 @@ public class SnapshotShardFailure implements ShardOperationFailedException {
@Override
public void readFrom(StreamInput in) throws IOException {
nodeId = in.readOptionalString();
- index = in.readString();
- shardId = in.readVInt();
+ shardId = ShardId.readShardId(in);
reason = in.readString();
status = RestStatus.readFrom(in);
}
@@ -147,15 +143,14 @@ public class SnapshotShardFailure implements ShardOperationFailedException {
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeOptionalString(nodeId);
- out.writeString(index);
- out.writeVInt(shardId);
+ shardId.writeTo(out);
out.writeString(reason);
RestStatus.writeTo(out, status);
}
@Override
public String toString() {
- return "[" + index + "][" + shardId + "] failed, reason [" + reason + "]";
+ return shardId + " failed, reason [" + reason + "]";
}
/**
@@ -181,6 +176,9 @@ public class SnapshotShardFailure implements ShardOperationFailedException {
SnapshotShardFailure snapshotShardFailure = new SnapshotShardFailure();
XContentParser.Token token = parser.currentToken();
+ String index = null;
+ String index_uuid = IndexMetaData.INDEX_UUID_NA_VALUE;
+ int shardId = -1;
if (token == XContentParser.Token.START_OBJECT) {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
@@ -188,13 +186,15 @@ public class SnapshotShardFailure implements ShardOperationFailedException {
token = parser.nextToken();
if (token.isValue()) {
if ("index".equals(currentFieldName)) {
- snapshotShardFailure.index = parser.text();
+ index = parser.text();
+ } else if ("index_uuid".equals(currentFieldName)) {
+ index_uuid = parser.text();
} else if ("node_id".equals(currentFieldName)) {
snapshotShardFailure.nodeId = parser.text();
} else if ("reason".equals(currentFieldName)) {
snapshotShardFailure.reason = parser.text();
} else if ("shard_id".equals(currentFieldName)) {
- snapshotShardFailure.shardId = parser.intValue();
+ shardId = parser.intValue();
} else if ("status".equals(currentFieldName)) {
snapshotShardFailure.status = RestStatus.valueOf(parser.text());
} else {
@@ -208,13 +208,21 @@ public class SnapshotShardFailure implements ShardOperationFailedException {
} else {
throw new ElasticsearchParseException("unexpected token [{}]", token);
}
+ if (index == null) {
+ throw new ElasticsearchParseException("index name was not set");
+ }
+ if (shardId == -1) {
+ throw new ElasticsearchParseException("index shard was not set");
+ }
+ snapshotShardFailure.shardId = new ShardId(index, index_uuid, shardId);
return snapshotShardFailure;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
- builder.field("index", index);
- builder.field("shard_id", shardId);
+ builder.field("index", shardId.getIndexName());
+ builder.field("index_uuid", shardId.getIndexName());
+ builder.field("shard_id", shardId.id());
builder.field("reason", reason);
if (nodeId != null) {
builder.field("node_id", nodeId);
diff --git a/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java
index b1d16a04ea..909fb4a156 100644
--- a/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java
+++ b/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java
@@ -488,7 +488,7 @@ public class SnapshotsService extends AbstractLifecycleComponent<SnapshotsServic
if (indexMetaData != null) {
int numberOfShards = indexMetaData.getNumberOfShards();
for (int i = 0; i < numberOfShards; i++) {
- ShardId shardId = new ShardId(index, i);
+ ShardId shardId = new ShardId(indexMetaData.getIndex(), i);
SnapshotShardFailure shardFailure = findShardFailure(snapshot.shardFailures(), shardId);
if (shardFailure != null) {
IndexShardSnapshotStatus shardSnapshotStatus = new IndexShardSnapshotStatus();
@@ -508,7 +508,7 @@ public class SnapshotsService extends AbstractLifecycleComponent<SnapshotsServic
private SnapshotShardFailure findShardFailure(List<SnapshotShardFailure> shardFailures, ShardId shardId) {
for (SnapshotShardFailure shardFailure : shardFailures) {
- if (shardId.getIndex().equals(shardFailure.index()) && shardId.getId() == shardFailure.shardId()) {
+ if (shardId.getIndexName().equals(shardFailure.index()) && shardId.getId() == shardFailure.shardId()) {
return shardFailure;
}
}
@@ -751,10 +751,10 @@ public class SnapshotsService extends AbstractLifecycleComponent<SnapshotsServic
Set<String> closed = new HashSet<>();
for (ObjectObjectCursor<ShardId, SnapshotsInProgress.ShardSnapshotStatus> entry : shards) {
if (entry.value.state() == State.MISSING) {
- if (metaData.hasIndex(entry.key.getIndex()) && metaData.index(entry.key.getIndex()).getState() == IndexMetaData.State.CLOSE) {
- closed.add(entry.key.getIndex());
+ if (metaData.hasIndex(entry.key.getIndex().getName()) && metaData.index(entry.key.getIndex()).getState() == IndexMetaData.State.CLOSE) {
+ closed.add(entry.key.getIndex().getName());
} else {
- missing.add(entry.key.getIndex());
+ missing.add(entry.key.getIndex().getName());
}
}
}
@@ -796,7 +796,7 @@ public class SnapshotsService extends AbstractLifecycleComponent<SnapshotsServic
ShardSnapshotStatus status = shardStatus.value;
if (status.state().failed()) {
failures.add(new ShardSearchFailure(status.reason(), new SearchShardTarget(status.nodeId(), shardId.getIndex(), shardId.id())));
- shardFailures.add(new SnapshotShardFailure(status.nodeId(), shardId.getIndex(), shardId.id(), status.reason()));
+ shardFailures.add(new SnapshotShardFailure(status.nodeId(), shardId, status.reason()));
}
}
Snapshot snapshot = repository.finalizeSnapshot(snapshotId, entry.indices(), entry.startTime(), failure, entry.shards().size(), Collections.unmodifiableList(shardFailures));
@@ -1029,16 +1029,16 @@ public class SnapshotsService extends AbstractLifecycleComponent<SnapshotsServic
IndexMetaData indexMetaData = metaData.index(index);
if (indexMetaData == null) {
// The index was deleted before we managed to start the snapshot - mark it as missing.
- builder.put(new ShardId(index, 0), new SnapshotsInProgress.ShardSnapshotStatus(null, State.MISSING, "missing index"));
+ builder.put(new ShardId(index, IndexMetaData.INDEX_UUID_NA_VALUE, 0), new SnapshotsInProgress.ShardSnapshotStatus(null, State.MISSING, "missing index"));
} else if (indexMetaData.getState() == IndexMetaData.State.CLOSE) {
for (int i = 0; i < indexMetaData.getNumberOfShards(); i++) {
- ShardId shardId = new ShardId(index, i);
+ ShardId shardId = new ShardId(indexMetaData.getIndex(), i);
builder.put(shardId, new SnapshotsInProgress.ShardSnapshotStatus(null, State.MISSING, "index is closed"));
}
} else {
IndexRoutingTable indexRoutingTable = clusterState.getRoutingTable().index(index);
for (int i = 0; i < indexMetaData.getNumberOfShards(); i++) {
- ShardId shardId = new ShardId(index, i);
+ ShardId shardId = new ShardId(indexMetaData.getIndex(), i);
if (indexRoutingTable != null) {
ShardRouting primary = indexRoutingTable.shard(i).primaryShard();
if (primary == null || !primary.assignedToNode()) {
diff --git a/core/src/main/java/org/elasticsearch/tasks/ChildTask.java b/core/src/main/java/org/elasticsearch/tasks/ChildTask.java
deleted file mode 100644
index 14d49baf39..0000000000
--- a/core/src/main/java/org/elasticsearch/tasks/ChildTask.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.elasticsearch.tasks;
-
-import org.elasticsearch.action.admin.cluster.node.tasks.list.TaskInfo;
-import org.elasticsearch.cluster.node.DiscoveryNode;
-import org.elasticsearch.common.inject.Provider;
-
-/**
- * Child task
- */
-public class ChildTask extends Task {
-
- private final String parentNode;
-
- private final long parentId;
-
- public ChildTask(long id, String type, String action, Provider<String> description, String parentNode, long parentId) {
- super(id, type, action, description);
- this.parentNode = parentNode;
- this.parentId = parentId;
- }
-
- /**
- * Returns parent node of the task or null if task doesn't have any parent tasks
- */
- public String getParentNode() {
- return parentNode;
- }
-
- /**
- * Returns id of the parent task or -1L if task doesn't have any parent tasks
- */
- public long getParentId() {
- return parentId;
- }
-
- public TaskInfo taskInfo(DiscoveryNode node, boolean detailed) {
- return new TaskInfo(node, getId(), getType(), getAction(), detailed ? getDescription() : null, parentNode, parentId);
- }
-}
diff --git a/core/src/main/java/org/elasticsearch/tasks/Task.java b/core/src/main/java/org/elasticsearch/tasks/Task.java
index 9e925b09d1..9e02bc7c5e 100644
--- a/core/src/main/java/org/elasticsearch/tasks/Task.java
+++ b/core/src/main/java/org/elasticsearch/tasks/Task.java
@@ -29,6 +29,8 @@ import org.elasticsearch.common.inject.Provider;
*/
public class Task {
+ public static final long NO_PARENT_ID = 0;
+
private final long id;
private final String type;
@@ -37,15 +39,27 @@ public class Task {
private final Provider<String> description;
+ private final String parentNode;
+
+ private final long parentId;
+
+
public Task(long id, String type, String action, Provider<String> description) {
+ this(id, type, action, description, null, NO_PARENT_ID);
+ }
+
+ public Task(long id, String type, String action, Provider<String> description, String parentNode, long parentId) {
this.id = id;
this.type = type;
this.action = action;
this.description = description;
+ this.parentNode = parentNode;
+ this.parentId = parentId;
}
+
public TaskInfo taskInfo(DiscoveryNode node, boolean detailed) {
- return new TaskInfo(node, id, type, action, detailed ? getDescription() : null);
+ return new TaskInfo(node, getId(), getType(), getAction(), detailed ? getDescription() : null, parentNode, parentId);
}
/**
@@ -76,4 +90,18 @@ public class Task {
return description.get();
}
+ /**
+ * Returns the parent node of the task or null if the task doesn't have any parent tasks
+ */
+ public String getParentNode() {
+ return parentNode;
+ }
+
+ /**
+ * Returns id of the parent task or NO_PARENT_ID if the task doesn't have any parent tasks
+ */
+ public long getParentId() {
+ return parentId;
+ }
+
}
diff --git a/core/src/main/java/org/elasticsearch/tasks/TaskManager.java b/core/src/main/java/org/elasticsearch/tasks/TaskManager.java
index 68e2dcbe9a..ef05f91190 100644
--- a/core/src/main/java/org/elasticsearch/tasks/TaskManager.java
+++ b/core/src/main/java/org/elasticsearch/tasks/TaskManager.java
@@ -25,9 +25,11 @@ import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.common.util.concurrent.ConcurrentMapLong;
import org.elasticsearch.transport.TransportRequest;
+import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
+import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.atomic.AtomicLong;
/**
@@ -61,9 +63,9 @@ public class TaskManager extends AbstractComponent {
/**
* Unregister the task
*/
- public void unregister(Task task) {
+ public Task unregister(Task task) {
logger.trace("unregister task for id: {}", task.getId());
- tasks.remove(task.getId());
+ return tasks.remove(task.getId());
}
/**
@@ -72,5 +74,4 @@ public class TaskManager extends AbstractComponent {
public Map<Long, Task> getTasks() {
return Collections.unmodifiableMap(new HashMap<>(tasks));
}
-
}
diff --git a/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java
index 0e6204ddd1..378a849115 100644
--- a/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java
+++ b/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java
@@ -20,6 +20,7 @@
package org.elasticsearch.threadpool;
import org.apache.lucene.util.Counter;
+import org.apache.lucene.util.IOUtils;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.io.stream.StreamInput;
@@ -34,11 +35,13 @@ import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.EsAbortPolicy;
import org.elasticsearch.common.util.concurrent.EsExecutors;
import org.elasticsearch.common.util.concurrent.EsThreadPoolExecutor;
+import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.common.util.concurrent.XRejectedExecutionHandler;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
+import java.io.Closeable;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
@@ -67,7 +70,7 @@ import static org.elasticsearch.common.unit.TimeValue.timeValueMinutes;
/**
*
*/
-public class ThreadPool extends AbstractComponent {
+public class ThreadPool extends AbstractComponent implements Closeable {
public static class Names {
public static final String SAME = "same";
@@ -200,6 +203,8 @@ public class ThreadPool extends AbstractComponent {
static final Executor DIRECT_EXECUTOR = command -> command.run();
+ private final ThreadContext threadContext;
+
public ThreadPool(String name) {
this(Settings.builder().put("name", name).build());
}
@@ -208,7 +213,7 @@ public class ThreadPool extends AbstractComponent {
super(settings);
assert settings.get("name") != null : "ThreadPool's settings should contain a name";
-
+ threadContext = new ThreadContext(settings);
Map<String, Settings> groupSettings = THREADPOOL_GROUP_SETTING.get(settings).getAsGroups();
validate(groupSettings);
@@ -448,7 +453,7 @@ public class ThreadPool extends AbstractComponent {
} else {
logger.debug("creating thread_pool [{}], type [{}], keep_alive [{}]", name, type, keepAlive);
}
- Executor executor = EsExecutors.newCached(name, keepAlive.millis(), TimeUnit.MILLISECONDS, threadFactory);
+ Executor executor = EsExecutors.newCached(name, keepAlive.millis(), TimeUnit.MILLISECONDS, threadFactory, threadContext);
return new ExecutorHolder(executor, new Info(name, threadPoolType, -1, -1, keepAlive, null));
} else if (ThreadPoolType.FIXED == threadPoolType) {
int defaultSize = defaultSettings.getAsInt("size", EsExecutors.boundedNumberOfProcessors(settings));
@@ -483,7 +488,7 @@ public class ThreadPool extends AbstractComponent {
int size = applyHardSizeLimit(name, settings.getAsInt("size", defaultSize));
SizeValue queueSize = getAsSizeOrUnbounded(settings, "capacity", getAsSizeOrUnbounded(settings, "queue", getAsSizeOrUnbounded(settings, "queue_size", defaultQueueSize)));
logger.debug("creating thread_pool [{}], type [{}], size [{}], queue_size [{}]", name, type, size, queueSize);
- Executor executor = EsExecutors.newFixed(name, size, queueSize == null ? -1 : (int) queueSize.singles(), threadFactory);
+ Executor executor = EsExecutors.newFixed(name, size, queueSize == null ? -1 : (int) queueSize.singles(), threadFactory, threadContext);
return new ExecutorHolder(executor, new Info(name, threadPoolType, size, size, null, queueSize));
} else if (ThreadPoolType.SCALING == threadPoolType) {
TimeValue defaultKeepAlive = defaultSettings.getAsTime("keep_alive", timeValueMinutes(5));
@@ -527,7 +532,7 @@ public class ThreadPool extends AbstractComponent {
} else {
logger.debug("creating thread_pool [{}], type [{}], min [{}], size [{}], keep_alive [{}]", name, type, min, size, keepAlive);
}
- Executor executor = EsExecutors.newScaling(name, min, size, keepAlive.millis(), TimeUnit.MILLISECONDS, threadFactory);
+ Executor executor = EsExecutors.newScaling(name, min, size, keepAlive.millis(), TimeUnit.MILLISECONDS, threadFactory, threadContext);
return new ExecutorHolder(executor, new Info(name, threadPoolType, min, size, keepAlive, null));
}
throw new IllegalArgumentException("No type found [" + type + "], for [" + name + "]");
@@ -914,17 +919,30 @@ public class ThreadPool extends AbstractComponent {
*/
public static boolean terminate(ThreadPool pool, long timeout, TimeUnit timeUnit) {
if (pool != null) {
- pool.shutdown();
try {
- if (pool.awaitTermination(timeout, timeUnit)) {
- return true;
+ pool.shutdown();
+ try {
+ if (pool.awaitTermination(timeout, timeUnit)) {
+ return true;
+ }
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
}
- } catch (InterruptedException e) {
- Thread.currentThread().interrupt();
+ // last resort
+ pool.shutdownNow();
+ } finally {
+ IOUtils.closeWhileHandlingException(pool);
}
- // last resort
- pool.shutdownNow();
}
return false;
}
+
+ @Override
+ public void close() throws IOException {
+ threadContext.close();
+ }
+
+ public ThreadContext getThreadContext() {
+ return threadContext;
+ }
}
diff --git a/core/src/main/java/org/elasticsearch/transport/Transport.java b/core/src/main/java/org/elasticsearch/transport/Transport.java
index 78b07e3aae..c930773f39 100644
--- a/core/src/main/java/org/elasticsearch/transport/Transport.java
+++ b/core/src/main/java/org/elasticsearch/transport/Transport.java
@@ -22,7 +22,6 @@ package org.elasticsearch.transport;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.component.LifecycleComponent;
import org.elasticsearch.common.settings.Setting;
-import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.BoundTransportAddress;
import org.elasticsearch.common.transport.TransportAddress;
@@ -36,7 +35,6 @@ import java.util.Map;
public interface Transport extends LifecycleComponent<Transport> {
- Setting<Settings> TRANSPORT_PROFILES_SETTING = Setting.groupSetting("transport.profiles.", true, Setting.Scope.CLUSTER);
Setting<Boolean> TRANSPORT_TCP_COMPRESS = Setting.boolSetting("transport.tcp.compress", false, false, Setting.Scope.CLUSTER);
void transportServiceAdapter(TransportServiceAdapter service);
diff --git a/core/src/main/java/org/elasticsearch/transport/TransportMessage.java b/core/src/main/java/org/elasticsearch/transport/TransportMessage.java
index f52f9179dd..1434a6eaa7 100644
--- a/core/src/main/java/org/elasticsearch/transport/TransportMessage.java
+++ b/core/src/main/java/org/elasticsearch/transport/TransportMessage.java
@@ -19,29 +19,20 @@
package org.elasticsearch.transport;
-import org.elasticsearch.common.ContextAndHeaderHolder;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.transport.TransportAddress;
import java.io.IOException;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
-/**
- * The transport message is also a {@link ContextAndHeaderHolder context holder} that holds <b>transient</b> context, that is,
- * the context is not serialized with message.
- */
-public abstract class TransportMessage<TM extends TransportMessage<TM>> extends ContextAndHeaderHolder implements Streamable {
+public abstract class TransportMessage<TM extends TransportMessage<TM>> implements Streamable {
private TransportAddress remoteAddress;
- protected TransportMessage() {
- }
-
- protected TransportMessage(TM message) {
- copyContextAndHeadersFrom(message);
- }
-
public void remoteAddress(TransportAddress remoteAddress) {
this.remoteAddress = remoteAddress;
}
@@ -52,16 +43,11 @@ public abstract class TransportMessage<TM extends TransportMessage<TM>> extends
@Override
public void readFrom(StreamInput in) throws IOException {
- headers = in.readBoolean() ? in.readMap() : null;
+
}
@Override
public void writeTo(StreamOutput out) throws IOException {
- if (headers == null) {
- out.writeBoolean(false);
- } else {
- out.writeBoolean(true);
- out.writeMap(headers);
- }
+
}
}
diff --git a/core/src/main/java/org/elasticsearch/transport/TransportRequest.java b/core/src/main/java/org/elasticsearch/transport/TransportRequest.java
index d5c1491f1a..7db7f0726d 100644
--- a/core/src/main/java/org/elasticsearch/transport/TransportRequest.java
+++ b/core/src/main/java/org/elasticsearch/transport/TransportRequest.java
@@ -26,24 +26,12 @@ import org.elasticsearch.tasks.Task;
public abstract class TransportRequest extends TransportMessage<TransportRequest> {
public static class Empty extends TransportRequest {
-
public static final Empty INSTANCE = new Empty();
-
- public Empty() {
- super();
- }
-
- public Empty(TransportRequest request) {
- super(request);
- }
}
public TransportRequest() {
}
- protected TransportRequest(TransportRequest request) {
- super(request);
- }
public Task createTask(long id, String type, String action) {
return new Task(id, type, action, this::getDescription);
diff --git a/core/src/main/java/org/elasticsearch/transport/TransportResponse.java b/core/src/main/java/org/elasticsearch/transport/TransportResponse.java
index 8ea7cd60d2..28dcd12ca5 100644
--- a/core/src/main/java/org/elasticsearch/transport/TransportResponse.java
+++ b/core/src/main/java/org/elasticsearch/transport/TransportResponse.java
@@ -24,23 +24,6 @@ package org.elasticsearch.transport;
public abstract class TransportResponse extends TransportMessage<TransportResponse> {
public static class Empty extends TransportResponse {
-
public static final Empty INSTANCE = new Empty();
-
- public Empty() {
- super();
- }
-
- public Empty(TransportResponse request) {
- super(request);
- }
- }
-
- protected TransportResponse() {
- }
-
- protected TransportResponse(TransportResponse response) {
- super(response);
}
-
}
diff --git a/core/src/main/java/org/elasticsearch/transport/TransportService.java b/core/src/main/java/org/elasticsearch/transport/TransportService.java
index 5d74c4a408..a6a1cab4f0 100644
--- a/core/src/main/java/org/elasticsearch/transport/TransportService.java
+++ b/core/src/main/java/org/elasticsearch/transport/TransportService.java
@@ -40,6 +40,7 @@ import org.elasticsearch.common.util.concurrent.ConcurrentMapLong;
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
import org.elasticsearch.common.util.concurrent.FutureUtils;
import org.elasticsearch.tasks.TaskManager;
+import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.threadpool.ThreadPool;
import java.io.IOException;
@@ -116,7 +117,7 @@ public class TransportService extends AbstractLifecycleComponent<TransportServic
setTracerLogExclude(TRACE_LOG_EXCLUDE_SETTING.get(settings));
tracerLog = Loggers.getLogger(logger, ".tracer");
adapter = createAdapter();
- taskManager = new TaskManager(settings);
+ taskManager = createTaskManager();
}
/**
@@ -140,6 +141,10 @@ public class TransportService extends AbstractLifecycleComponent<TransportServic
return new Adapter();
}
+ protected TaskManager createTaskManager() {
+ return new TaskManager(settings);
+ }
+
// These need to be optional as they don't exist in the context of a transport client
@Inject(optional = true)
public void setDynamicSettings(ClusterSettings clusterSettings) {
@@ -288,7 +293,7 @@ public class TransportService extends AbstractLifecycleComponent<TransportServic
} else {
timeoutHandler = new TimeoutHandler(requestId);
}
- clientHandlers.put(requestId, new RequestHolder<>(handler, node, action, timeoutHandler));
+ clientHandlers.put(requestId, new RequestHolder<>(new ContextRestoreResponseHandler<T>(threadPool.getThreadContext().newStoredContext(), handler), node, action, timeoutHandler));
if (started.get() == false) {
// if we are not started the exception handling will remove the RequestHolder again and calls the handler to notify the caller.
// it will only notify if the toStop code hasn't done the work yet.
@@ -494,6 +499,7 @@ public class TransportService extends AbstractLifecycleComponent<TransportServic
@Override
public TransportResponseHandler onResponseReceived(final long requestId) {
RequestHolder holder = clientHandlers.remove(requestId);
+
if (holder == null) {
checkForTimeout(requestId);
return null;
@@ -708,6 +714,41 @@ public class TransportService extends AbstractLifecycleComponent<TransportServic
}
}
+ /**
+ * This handler wrapper ensures that the response thread executes with the correct thread context. Before any of the4 handle methods
+ * are invoked we restore the context.
+ */
+ private final static class ContextRestoreResponseHandler<T extends TransportResponse> implements TransportResponseHandler<T> {
+ private final TransportResponseHandler<T> delegate;
+ private final ThreadContext.StoredContext threadContext;
+ private ContextRestoreResponseHandler(ThreadContext.StoredContext threadContext, TransportResponseHandler<T> delegate) {
+ this.delegate = delegate;
+ this.threadContext = threadContext;
+ }
+
+ @Override
+ public T newInstance() {
+ return delegate.newInstance();
+ }
+
+ @Override
+ public void handleResponse(T response) {
+ threadContext.restore();
+ delegate.handleResponse(response);
+ }
+
+ @Override
+ public void handleException(TransportException exp) {
+ threadContext.restore();
+ delegate.handleException(exp);
+ }
+
+ @Override
+ public String executor() {
+ return delegate.executor();
+ }
+ }
+
static class DirectResponseChannel implements TransportChannel {
final ESLogger logger;
final DiscoveryNode localNode;
diff --git a/core/src/main/java/org/elasticsearch/transport/TransportSettings.java b/core/src/main/java/org/elasticsearch/transport/TransportSettings.java
new file mode 100644
index 0000000000..affb7e535f
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/transport/TransportSettings.java
@@ -0,0 +1,45 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.transport;
+
+import org.elasticsearch.common.settings.Setting;
+import org.elasticsearch.common.settings.Settings;
+
+import java.util.List;
+
+import static java.util.Collections.emptyList;
+
+/**
+ * a collection of settings related to transport components, which are also needed in org.elasticsearch.bootstrap.Security
+ * This class should only contain static code which is *safe* to load before the security manager is enforced.
+ */
+final public class TransportSettings {
+
+ public static final Setting<List<String>> HOST = Setting.listSetting("transport.host", emptyList(), s -> s, false, Setting.Scope.CLUSTER);
+ public static final Setting<List<String>> PUBLISH_HOST = Setting.listSetting("transport.publish_host", HOST, s -> s, false, Setting.Scope.CLUSTER);
+ public static final Setting<List<String>> BIND_HOST = Setting.listSetting("transport.bind_host", HOST, s -> s, false, Setting.Scope.CLUSTER);
+ public static final Setting<String> PORT = new Setting<>("transport.tcp.port", "9300-9400", s -> s, false, Setting.Scope.CLUSTER);
+ public static final Setting<Integer> PUBLISH_PORT = Setting.intSetting("transport.publish_port", -1, -1, false, Setting.Scope.CLUSTER);
+ public static final String DEFAULT_PROFILE = "default";
+ public static final Setting<Settings> TRANSPORT_PROFILES_SETTING = Setting.groupSetting("transport.profiles.", true, Setting.Scope.CLUSTER);
+
+ private TransportSettings() {
+
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/transport/local/LocalTransport.java b/core/src/main/java/org/elasticsearch/transport/local/LocalTransport.java
index ba067fdabd..7a41bf626c 100644
--- a/core/src/main/java/org/elasticsearch/transport/local/LocalTransport.java
+++ b/core/src/main/java/org/elasticsearch/transport/local/LocalTransport.java
@@ -36,6 +36,7 @@ import org.elasticsearch.common.transport.LocalTransportAddress;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
import org.elasticsearch.common.util.concurrent.EsExecutors;
+import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.ActionNotFoundTransportException;
import org.elasticsearch.transport.ConnectTransportException;
@@ -72,7 +73,7 @@ import static org.elasticsearch.common.util.concurrent.ConcurrentCollections.new
public class LocalTransport extends AbstractLifecycleComponent<Transport> implements Transport {
public static final String LOCAL_TRANSPORT_THREAD_NAME_PREFIX = "local_transport";
- private final ThreadPool threadPool;
+ final ThreadPool threadPool;
private final ThreadPoolExecutor workers;
private final Version version;
private volatile TransportServiceAdapter transportServiceAdapter;
@@ -96,7 +97,7 @@ public class LocalTransport extends AbstractLifecycleComponent<Transport> implem
int queueSize = this.settings.getAsInt(TRANSPORT_LOCAL_QUEUE, -1);
logger.debug("creating [{}] workers, queue_size [{}]", workerCount, queueSize);
final ThreadFactory threadFactory = EsExecutors.daemonThreadFactory(this.settings, LOCAL_TRANSPORT_THREAD_NAME_PREFIX);
- this.workers = EsExecutors.newFixed(LOCAL_TRANSPORT_THREAD_NAME_PREFIX, workerCount, queueSize, threadFactory);
+ this.workers = EsExecutors.newFixed(LOCAL_TRANSPORT_THREAD_NAME_PREFIX, workerCount, queueSize, threadFactory, threadPool.getThreadContext());
this.namedWriteableRegistry = namedWriteableRegistry;
}
@@ -209,6 +210,7 @@ public class LocalTransport extends AbstractLifecycleComponent<Transport> implem
status = TransportStatus.setRequest(status);
stream.writeByte(status); // 0 for request, 1 for response.
+ threadPool.getThreadContext().writeTo(stream);
stream.writeString(action);
request.writeTo(stream);
@@ -220,12 +222,11 @@ public class LocalTransport extends AbstractLifecycleComponent<Transport> implem
}
final byte[] data = stream.bytes().toBytes();
-
transportServiceAdapter.sent(data.length);
transportServiceAdapter.onRequestSent(node, requestId, action, request, options);
- targetTransport.workers().execute(new Runnable() {
- @Override
- public void run() {
+ targetTransport.workers().execute(() -> {
+ ThreadContext threadContext = targetTransport.threadPool.getThreadContext();
+ try (ThreadContext.StoredContext context = threadContext.stashContext()) {
targetTransport.messageReceived(data, action, LocalTransport.this, version, requestId);
}
});
@@ -246,8 +247,9 @@ public class LocalTransport extends AbstractLifecycleComponent<Transport> implem
long requestId = stream.readLong();
byte status = stream.readByte();
boolean isRequest = TransportStatus.isRequest(status);
-
if (isRequest) {
+ ThreadContext threadContext = threadPool.getThreadContext();
+ threadContext.readHeaders(stream);
handleRequest(stream, requestId, sourceTransport, version);
} else {
final TransportResponseHandler handler = transportServiceAdapter.onResponseReceived(requestId);
@@ -322,6 +324,7 @@ public class LocalTransport extends AbstractLifecycleComponent<Transport> implem
logger.warn("Failed to send error message back to client for action [" + action + "]", e);
logger.warn("Actual Exception", e1);
}
+
}
}
@@ -338,15 +341,11 @@ public class LocalTransport extends AbstractLifecycleComponent<Transport> implem
}
protected void handleParsedResponse(final TransportResponse response, final TransportResponseHandler handler) {
- threadPool.executor(handler.executor()).execute(new Runnable() {
- @SuppressWarnings({"unchecked"})
- @Override
- public void run() {
- try {
- handler.handleResponse(response);
- } catch (Throwable e) {
- handleException(handler, new ResponseHandlerFailureTransportException(e));
- }
+ threadPool.executor(handler.executor()).execute(() -> {
+ try {
+ handler.handleResponse(response);
+ } catch (Throwable e) {
+ handleException(handler, new ResponseHandlerFailureTransportException(e));
}
});
}
diff --git a/core/src/main/java/org/elasticsearch/transport/local/LocalTransportChannel.java b/core/src/main/java/org/elasticsearch/transport/local/LocalTransportChannel.java
index e1e85e9a12..aad31fd8cc 100644
--- a/core/src/main/java/org/elasticsearch/transport/local/LocalTransportChannel.java
+++ b/core/src/main/java/org/elasticsearch/transport/local/LocalTransportChannel.java
@@ -21,6 +21,7 @@ package org.elasticsearch.transport.local;
import org.elasticsearch.Version;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
+import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.transport.RemoteTransportException;
import org.elasticsearch.transport.TransportChannel;
import org.elasticsearch.transport.TransportResponse;
@@ -79,9 +80,9 @@ public class LocalTransportChannel implements TransportChannel {
stream.writeByte(status); // 0 for request, 1 for response.
response.writeTo(stream);
final byte[] data = stream.bytes().toBytes();
- targetTransport.workers().execute(new Runnable() {
- @Override
- public void run() {
+ targetTransport.workers().execute(() -> {
+ ThreadContext threadContext = targetTransport.threadPool.getThreadContext();
+ try (ThreadContext.StoredContext ignore = threadContext.stashContext()){
targetTransport.messageReceived(data, action, sourceTransport, version, null);
}
});
@@ -97,9 +98,9 @@ public class LocalTransportChannel implements TransportChannel {
stream.writeThrowable(tx);
final byte[] data = stream.bytes().toBytes();
- targetTransport.workers().execute(new Runnable() {
- @Override
- public void run() {
+ targetTransport.workers().execute(() -> {
+ ThreadContext threadContext = targetTransport.threadPool.getThreadContext();
+ try (ThreadContext.StoredContext ignore = threadContext.stashContext()){
targetTransport.messageReceived(data, action, sourceTransport, version, null);
}
});
diff --git a/core/src/main/java/org/elasticsearch/transport/netty/MessageChannelHandler.java b/core/src/main/java/org/elasticsearch/transport/netty/MessageChannelHandler.java
index 8df17f7323..6732b26ddb 100644
--- a/core/src/main/java/org/elasticsearch/transport/netty/MessageChannelHandler.java
+++ b/core/src/main/java/org/elasticsearch/transport/netty/MessageChannelHandler.java
@@ -30,6 +30,7 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.transport.InetSocketTransportAddress;
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
+import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.ActionNotFoundTransportException;
import org.elasticsearch.transport.RemoteTransportException;
@@ -64,9 +65,11 @@ public class MessageChannelHandler extends SimpleChannelUpstreamHandler {
protected final TransportServiceAdapter transportServiceAdapter;
protected final NettyTransport transport;
protected final String profileName;
+ private final ThreadContext threadContext;
public MessageChannelHandler(NettyTransport transport, ESLogger logger, String profileName) {
this.threadPool = transport.threadPool();
+ this.threadContext = threadPool.getThreadContext();
this.transportServiceAdapter = transport.transportServiceAdapter();
this.transport = transport;
this.logger = logger;
@@ -101,7 +104,7 @@ public class MessageChannelHandler extends SimpleChannelUpstreamHandler {
// buffer, or in the cumlation buffer, which is cleaned each time
StreamInput streamIn = ChannelBufferStreamInputFactory.create(buffer, size);
boolean success = false;
- try {
+ try (ThreadContext.StoredContext tCtx = threadContext.stashContext()) {
long requestId = streamIn.readLong();
byte status = streamIn.readByte();
Version version = Version.fromId(streamIn.readInt());
@@ -123,8 +126,8 @@ public class MessageChannelHandler extends SimpleChannelUpstreamHandler {
streamIn = compressor.streamInput(streamIn);
}
streamIn.setVersion(version);
-
if (TransportStatus.isRequest(status)) {
+ threadContext.readHeaders(streamIn);
String action = handleRequest(ctx.getChannel(), streamIn, requestId, version);
// Chek the entire message has been read
diff --git a/core/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java b/core/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java
index 6a6a6c3801..99fbac17b6 100644
--- a/core/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java
+++ b/core/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java
@@ -24,7 +24,6 @@ import org.elasticsearch.Version;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.Booleans;
import org.elasticsearch.common.Strings;
-import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.bytes.ReleasablePagedBytesReference;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.compress.CompressorFactory;
@@ -40,7 +39,9 @@ import org.elasticsearch.common.netty.OpenChannelsHandler;
import org.elasticsearch.common.netty.ReleaseChannelFutureListener;
import org.elasticsearch.common.network.NetworkAddress;
import org.elasticsearch.common.network.NetworkService;
+import org.elasticsearch.common.network.NetworkService.TcpSettings;
import org.elasticsearch.common.network.NetworkUtils;
+import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.BoundTransportAddress;
import org.elasticsearch.common.transport.InetSocketTransportAddress;
@@ -63,6 +64,7 @@ import org.elasticsearch.transport.TransportException;
import org.elasticsearch.transport.TransportRequest;
import org.elasticsearch.transport.TransportRequestOptions;
import org.elasticsearch.transport.TransportServiceAdapter;
+import org.elasticsearch.transport.TransportSettings;
import org.elasticsearch.transport.support.TransportStatus;
import org.jboss.netty.bootstrap.ClientBootstrap;
import org.jboss.netty.bootstrap.ServerBootstrap;
@@ -117,18 +119,6 @@ import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static java.util.Collections.unmodifiableMap;
-import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_BLOCKING;
-import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_BLOCKING_CLIENT;
-import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_BLOCKING_SERVER;
-import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_CONNECT_TIMEOUT;
-import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_DEFAULT_CONNECT_TIMEOUT;
-import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_DEFAULT_RECEIVE_BUFFER_SIZE;
-import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_DEFAULT_SEND_BUFFER_SIZE;
-import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_KEEP_ALIVE;
-import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_NO_DELAY;
-import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_RECEIVE_BUFFER_SIZE;
-import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_REUSE_ADDRESS;
-import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_SEND_BUFFER_SIZE;
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
import static org.elasticsearch.common.transport.NetworkExceptionHelper.isCloseConnectionException;
import static org.elasticsearch.common.transport.NetworkExceptionHelper.isConnectException;
@@ -152,16 +142,45 @@ public class NettyTransport extends AbstractLifecycleComponent<Transport> implem
public static final String TRANSPORT_CLIENT_WORKER_THREAD_NAME_PREFIX = "transport_client_worker";
public static final String TRANSPORT_CLIENT_BOSS_THREAD_NAME_PREFIX = "transport_client_boss";
- public static final String WORKER_COUNT = "transport.netty.worker_count";
- public static final String CONNECTIONS_PER_NODE_RECOVERY = "transport.connections_per_node.recovery";
- public static final String CONNECTIONS_PER_NODE_BULK = "transport.connections_per_node.bulk";
- public static final String CONNECTIONS_PER_NODE_REG = "transport.connections_per_node.reg";
- public static final String CONNECTIONS_PER_NODE_STATE = "transport.connections_per_node.state";
- public static final String CONNECTIONS_PER_NODE_PING = "transport.connections_per_node.ping";
- public static final String PING_SCHEDULE = "transport.ping_schedule"; // the scheduled internal ping interval setting
- public static final TimeValue DEFAULT_PING_SCHEDULE = TimeValue.timeValueMillis(-1); // the default ping schedule, defaults to disabled (-1)
- public static final String DEFAULT_PORT_RANGE = "9300-9400";
- public static final String DEFAULT_PROFILE = "default";
+ public static final Setting<Integer> WORKER_COUNT = new Setting<>("transport.netty.worker_count",
+ (s) -> Integer.toString(EsExecutors.boundedNumberOfProcessors(s) * 2), (s) -> Setting.parseInt(s, 1, "transport.netty.worker_count"),
+ false, Setting.Scope.CLUSTER);
+ public static final Setting<Integer> CONNECTIONS_PER_NODE_RECOVERY = Setting.intSetting("transport.connections_per_node.recovery", 2, 1, false, Setting.Scope.CLUSTER);
+ public static final Setting<Integer> CONNECTIONS_PER_NODE_BULK = Setting.intSetting("transport.connections_per_node.bulk", 3, 1, false, Setting.Scope.CLUSTER);
+ public static final Setting<Integer> CONNECTIONS_PER_NODE_REG = Setting.intSetting("transport.connections_per_node.reg", 6, 1, false, Setting.Scope.CLUSTER);
+ public static final Setting<Integer> CONNECTIONS_PER_NODE_STATE = Setting.intSetting("transport.connections_per_node.state", 1, 1, false, Setting.Scope.CLUSTER);
+ public static final Setting<Integer> CONNECTIONS_PER_NODE_PING = Setting.intSetting("transport.connections_per_node.ping", 1, 1, false, Setting.Scope.CLUSTER);
+ // the scheduled internal ping interval setting, defaults to disabled (-1)
+ public static final Setting<TimeValue> PING_SCHEDULE = Setting.timeSetting("transport.ping_schedule", TimeValue.timeValueSeconds(-1), false, Setting.Scope.CLUSTER);
+ public static final Setting<Boolean> TCP_BLOCKING_CLIENT = Setting.boolSetting("transport.tcp.blocking_client", TcpSettings.TCP_BLOCKING_CLIENT, false, Setting.Scope.CLUSTER);
+ public static final Setting<TimeValue> TCP_CONNECT_TIMEOUT = Setting.timeSetting("transport.tcp.connect_timeout", TcpSettings.TCP_CONNECT_TIMEOUT, false, Setting.Scope.CLUSTER);
+ public static final Setting<Boolean> TCP_NO_DELAY = Setting.boolSetting("transport.tcp_no_delay", TcpSettings.TCP_NO_DELAY, false, Setting.Scope.CLUSTER);
+ public static final Setting<Boolean> TCP_KEEP_ALIVE = Setting.boolSetting("transport.tcp.keep_alive", TcpSettings.TCP_KEEP_ALIVE, false, Setting.Scope.CLUSTER);
+ public static final Setting<Boolean> TCP_BLOCKING_SERVER = Setting.boolSetting("transport.tcp.blocking_server", TcpSettings.TCP_BLOCKING_SERVER, false, Setting.Scope.CLUSTER);
+ public static final Setting<Boolean> TCP_REUSE_ADDRESS = Setting.boolSetting("transport.tcp.reuse_address", TcpSettings.TCP_REUSE_ADDRESS, false, Setting.Scope.CLUSTER);
+
+ public static final Setting<ByteSizeValue> TCP_SEND_BUFFER_SIZE = Setting.byteSizeSetting("transport.tcp.send_buffer_size", TcpSettings.TCP_SEND_BUFFER_SIZE, false, Setting.Scope.CLUSTER);
+ public static final Setting<ByteSizeValue> TCP_RECEIVE_BUFFER_SIZE = Setting.byteSizeSetting("transport.tcp.receive_buffer_size", TcpSettings.TCP_RECEIVE_BUFFER_SIZE, false, Setting.Scope.CLUSTER);
+
+ public static final Setting<ByteSizeValue> NETTY_MAX_CUMULATION_BUFFER_CAPACITY = Setting.byteSizeSetting("transport.netty.max_cumulation_buffer_capacity", new ByteSizeValue(-1), false, Setting.Scope.CLUSTER);
+ public static final Setting<Integer> NETTY_MAX_COMPOSITE_BUFFER_COMPONENTS = Setting.intSetting("transport.netty.max_composite_buffer_components", -1, -1, false, Setting.Scope.CLUSTER);
+
+
+ // See AdaptiveReceiveBufferSizePredictor#DEFAULT_XXX for default values in netty..., we can use higher ones for us, even fixed one
+ public static final Setting<ByteSizeValue> NETTY_RECEIVE_PREDICTOR_SIZE = Setting.byteSizeSetting("transport.netty.receive_predictor_size",
+ settings -> {
+ long defaultReceiverPredictor = 512 * 1024;
+ if (JvmInfo.jvmInfo().getMem().getDirectMemoryMax().bytes() > 0) {
+ // we can guess a better default...
+ long l = (long) ((0.3 * JvmInfo.jvmInfo().getMem().getDirectMemoryMax().bytes()) / WORKER_COUNT.get(settings));
+ defaultReceiverPredictor = Math.min(defaultReceiverPredictor, Math.max(l, 64 * 1024));
+ }
+ return new ByteSizeValue(defaultReceiverPredictor).toString();
+ }, false, Setting.Scope.CLUSTER);
+ public static final Setting<ByteSizeValue> NETTY_RECEIVE_PREDICTOR_MIN = Setting.byteSizeSetting("transport.netty.receive_predictor_min", NETTY_RECEIVE_PREDICTOR_SIZE, false, Setting.Scope.CLUSTER);
+ public static final Setting<ByteSizeValue> NETTY_RECEIVE_PREDICTOR_MAX = Setting.byteSizeSetting("transport.netty.receive_predictor_max", NETTY_RECEIVE_PREDICTOR_SIZE, false, Setting.Scope.CLUSTER);
+ public static final Setting<Integer> NETTY_BOSS_COUNT = Setting.intSetting("transport.netty.boss_count", 1, 1, false, Setting.Scope.CLUSTER);
+
protected final NetworkService networkService;
protected final Version version;
@@ -207,8 +226,6 @@ public class NettyTransport extends AbstractLifecycleComponent<Transport> implem
final ScheduledPing scheduledPing;
@Inject
- @SuppressForbidden(reason = "sets org.jboss.netty.epollBugWorkaround based on netty.epollBugWorkaround")
- // TODO: why be confusing like this? just let the user do it with the netty parameter instead!
public NettyTransport(Settings settings, ThreadPool threadPool, NetworkService networkService, BigArrays bigArrays, Version version, NamedWriteableRegistry namedWriteableRegistry) {
super(settings);
this.threadPool = threadPool;
@@ -216,44 +233,22 @@ public class NettyTransport extends AbstractLifecycleComponent<Transport> implem
this.bigArrays = bigArrays;
this.version = version;
- if (settings.getAsBoolean("netty.epollBugWorkaround", false)) {
- System.setProperty("org.jboss.netty.epollBugWorkaround", "true");
- }
-
- this.workerCount = settings.getAsInt(WORKER_COUNT, EsExecutors.boundedNumberOfProcessors(settings) * 2);
- this.blockingClient = settings.getAsBoolean("transport.netty.transport.tcp.blocking_client", settings.getAsBoolean(TCP_BLOCKING_CLIENT, settings.getAsBoolean(TCP_BLOCKING, false)));
- this.connectTimeout = this.settings.getAsTime("transport.netty.connect_timeout", settings.getAsTime("transport.tcp.connect_timeout", settings.getAsTime(TCP_CONNECT_TIMEOUT, TCP_DEFAULT_CONNECT_TIMEOUT)));
- this.maxCumulationBufferCapacity = this.settings.getAsBytesSize("transport.netty.max_cumulation_buffer_capacity", null);
- this.maxCompositeBufferComponents = this.settings.getAsInt("transport.netty.max_composite_buffer_components", -1);
+ this.workerCount = WORKER_COUNT.get(settings);
+ this.blockingClient = TCP_BLOCKING_CLIENT.get(settings);
+ this.connectTimeout = TCP_CONNECT_TIMEOUT.get(settings);
+ this.maxCumulationBufferCapacity = NETTY_MAX_CUMULATION_BUFFER_CAPACITY.get(settings);
+ this.maxCompositeBufferComponents = NETTY_MAX_COMPOSITE_BUFFER_COMPONENTS.get(settings);
this.compress = Transport.TRANSPORT_TCP_COMPRESS.get(settings);
- this.connectionsPerNodeRecovery = this.settings.getAsInt("transport.netty.connections_per_node.recovery", settings.getAsInt(CONNECTIONS_PER_NODE_RECOVERY, 2));
- this.connectionsPerNodeBulk = this.settings.getAsInt("transport.netty.connections_per_node.bulk", settings.getAsInt(CONNECTIONS_PER_NODE_BULK, 3));
- this.connectionsPerNodeReg = this.settings.getAsInt("transport.netty.connections_per_node.reg", settings.getAsInt(CONNECTIONS_PER_NODE_REG, 6));
- this.connectionsPerNodeState = this.settings.getAsInt("transport.netty.connections_per_node.high", settings.getAsInt(CONNECTIONS_PER_NODE_STATE, 1));
- this.connectionsPerNodePing = this.settings.getAsInt("transport.netty.connections_per_node.ping", settings.getAsInt(CONNECTIONS_PER_NODE_PING, 1));
-
- // we want to have at least 1 for reg/state/ping
- if (this.connectionsPerNodeReg == 0) {
- throw new IllegalArgumentException("can't set [connection_per_node.reg] to 0");
- }
- if (this.connectionsPerNodePing == 0) {
- throw new IllegalArgumentException("can't set [connection_per_node.ping] to 0");
- }
- if (this.connectionsPerNodeState == 0) {
- throw new IllegalArgumentException("can't set [connection_per_node.state] to 0");
- }
-
- long defaultReceiverPredictor = 512 * 1024;
- if (JvmInfo.jvmInfo().getMem().getDirectMemoryMax().bytes() > 0) {
- // we can guess a better default...
- long l = (long) ((0.3 * JvmInfo.jvmInfo().getMem().getDirectMemoryMax().bytes()) / workerCount);
- defaultReceiverPredictor = Math.min(defaultReceiverPredictor, Math.max(l, 64 * 1024));
- }
+ this.connectionsPerNodeRecovery = CONNECTIONS_PER_NODE_RECOVERY.get(settings);
+ this.connectionsPerNodeBulk = CONNECTIONS_PER_NODE_BULK.get(settings);
+ this.connectionsPerNodeReg = CONNECTIONS_PER_NODE_REG.get(settings);
+ this.connectionsPerNodeState = CONNECTIONS_PER_NODE_STATE.get(settings);
+ this.connectionsPerNodePing = CONNECTIONS_PER_NODE_PING.get(settings);
// See AdaptiveReceiveBufferSizePredictor#DEFAULT_XXX for default values in netty..., we can use higher ones for us, even fixed one
- this.receivePredictorMin = this.settings.getAsBytesSize("transport.netty.receive_predictor_min", this.settings.getAsBytesSize("transport.netty.receive_predictor_size", new ByteSizeValue(defaultReceiverPredictor)));
- this.receivePredictorMax = this.settings.getAsBytesSize("transport.netty.receive_predictor_max", this.settings.getAsBytesSize("transport.netty.receive_predictor_size", new ByteSizeValue(defaultReceiverPredictor)));
+ this.receivePredictorMin = NETTY_RECEIVE_PREDICTOR_MIN.get(settings);
+ this.receivePredictorMax = NETTY_RECEIVE_PREDICTOR_MAX.get(settings);
if (receivePredictorMax.bytes() == receivePredictorMin.bytes()) {
receiveBufferSizePredictorFactory = new FixedReceiveBufferSizePredictorFactory((int) receivePredictorMax.bytes());
} else {
@@ -261,7 +256,7 @@ public class NettyTransport extends AbstractLifecycleComponent<Transport> implem
}
this.scheduledPing = new ScheduledPing();
- this.pingSchedule = settings.getAsTime(PING_SCHEDULE, DEFAULT_PING_SCHEDULE);
+ this.pingSchedule = PING_SCHEDULE.get(settings);
if (pingSchedule.millis() > 0) {
threadPool.schedule(pingSchedule, ThreadPool.Names.GENERIC, scheduledPing);
}
@@ -290,19 +285,19 @@ public class NettyTransport extends AbstractLifecycleComponent<Transport> implem
boolean success = false;
try {
clientBootstrap = createClientBootstrap();
- if (settings.getAsBoolean("network.server", true)) {
+ if (NetworkService.NETWORK_SERVER.get(settings)) {
final OpenChannelsHandler openChannels = new OpenChannelsHandler(logger);
this.serverOpenChannels = openChannels;
// extract default profile first and create standard bootstrap
- Map<String, Settings> profiles = TRANSPORT_PROFILES_SETTING.get(settings()).getAsGroups(true);
- if (!profiles.containsKey(DEFAULT_PROFILE)) {
+ Map<String, Settings> profiles = TransportSettings.TRANSPORT_PROFILES_SETTING.get(settings()).getAsGroups(true);
+ if (!profiles.containsKey(TransportSettings.DEFAULT_PROFILE)) {
profiles = new HashMap<>(profiles);
- profiles.put(DEFAULT_PROFILE, Settings.EMPTY);
+ profiles.put(TransportSettings.DEFAULT_PROFILE, Settings.EMPTY);
}
Settings fallbackSettings = createFallbackSettings();
- Settings defaultSettings = profiles.get(DEFAULT_PROFILE);
+ Settings defaultSettings = profiles.get(TransportSettings.DEFAULT_PROFILE);
// loop through all profiles and start them up, special handling for default one
for (Map.Entry<String, Settings> entry : profiles.entrySet()) {
@@ -312,10 +307,10 @@ public class NettyTransport extends AbstractLifecycleComponent<Transport> implem
if (!Strings.hasLength(name)) {
logger.info("transport profile configured without a name. skipping profile with settings [{}]", profileSettings.toDelimitedString(','));
continue;
- } else if (DEFAULT_PROFILE.equals(name)) {
+ } else if (TransportSettings.DEFAULT_PROFILE.equals(name)) {
profileSettings = settingsBuilder()
.put(profileSettings)
- .put("port", profileSettings.get("port", this.settings.get("transport.tcp.port", DEFAULT_PORT_RANGE)))
+ .put("port", profileSettings.get("port", TransportSettings.PORT.get(this.settings)))
.build();
} else if (profileSettings.get("port") == null) {
// if profile does not have a port, skip it
@@ -352,7 +347,7 @@ public class NettyTransport extends AbstractLifecycleComponent<Transport> implem
if (blockingClient) {
clientBootstrap = new ClientBootstrap(new OioClientSocketChannelFactory(Executors.newCachedThreadPool(daemonThreadFactory(settings, TRANSPORT_CLIENT_WORKER_THREAD_NAME_PREFIX))));
} else {
- int bossCount = settings.getAsInt("transport.netty.boss_count", 1);
+ int bossCount = NETTY_BOSS_COUNT.get(settings);
clientBootstrap = new ClientBootstrap(new NioClientSocketChannelFactory(
Executors.newCachedThreadPool(daemonThreadFactory(settings, TRANSPORT_CLIENT_BOSS_THREAD_NAME_PREFIX)),
bossCount,
@@ -362,29 +357,25 @@ public class NettyTransport extends AbstractLifecycleComponent<Transport> implem
clientBootstrap.setPipelineFactory(configureClientChannelPipelineFactory());
clientBootstrap.setOption("connectTimeoutMillis", connectTimeout.millis());
- String tcpNoDelay = settings.get("transport.netty.tcp_no_delay", settings.get(TCP_NO_DELAY, "true"));
- if (!"default".equals(tcpNoDelay)) {
- clientBootstrap.setOption("tcpNoDelay", Booleans.parseBoolean(tcpNoDelay, null));
- }
+ boolean tcpNoDelay = TCP_NO_DELAY.get(settings);
+ clientBootstrap.setOption("tcpNoDelay", tcpNoDelay);
- String tcpKeepAlive = settings.get("transport.netty.tcp_keep_alive", settings.get(TCP_KEEP_ALIVE, "true"));
- if (!"default".equals(tcpKeepAlive)) {
- clientBootstrap.setOption("keepAlive", Booleans.parseBoolean(tcpKeepAlive, null));
- }
+ boolean tcpKeepAlive = TCP_KEEP_ALIVE.get(settings);
+ clientBootstrap.setOption("keepAlive", tcpKeepAlive);
- ByteSizeValue tcpSendBufferSize = settings.getAsBytesSize("transport.netty.tcp_send_buffer_size", settings.getAsBytesSize(TCP_SEND_BUFFER_SIZE, TCP_DEFAULT_SEND_BUFFER_SIZE));
- if (tcpSendBufferSize != null && tcpSendBufferSize.bytes() > 0) {
+ ByteSizeValue tcpSendBufferSize = TCP_SEND_BUFFER_SIZE.get(settings);
+ if (tcpSendBufferSize.bytes() > 0) {
clientBootstrap.setOption("sendBufferSize", tcpSendBufferSize.bytes());
}
- ByteSizeValue tcpReceiveBufferSize = settings.getAsBytesSize("transport.netty.tcp_receive_buffer_size", settings.getAsBytesSize(TCP_RECEIVE_BUFFER_SIZE, TCP_DEFAULT_RECEIVE_BUFFER_SIZE));
- if (tcpReceiveBufferSize != null && tcpReceiveBufferSize.bytes() > 0) {
+ ByteSizeValue tcpReceiveBufferSize = TCP_RECEIVE_BUFFER_SIZE.get(settings);
+ if (tcpReceiveBufferSize.bytes() > 0) {
clientBootstrap.setOption("receiveBufferSize", tcpReceiveBufferSize.bytes());
}
clientBootstrap.setOption("receiveBufferSizePredictorFactory", receiveBufferSizePredictorFactory);
- boolean reuseAddress = settings.getAsBoolean("transport.netty.reuse_address", settings.getAsBoolean(TCP_REUSE_ADDRESS, NetworkUtils.defaultReuseAddress()));
+ boolean reuseAddress = TCP_REUSE_ADDRESS.get(settings);
clientBootstrap.setOption("reuseAddress", reuseAddress);
return clientBootstrap;
@@ -393,36 +384,32 @@ public class NettyTransport extends AbstractLifecycleComponent<Transport> implem
private Settings createFallbackSettings() {
Settings.Builder fallbackSettingsBuilder = settingsBuilder();
- String fallbackBindHost = settings.get("transport.netty.bind_host", settings.get("transport.bind_host", settings.get("transport.host")));
- if (fallbackBindHost != null) {
- fallbackSettingsBuilder.put("bind_host", fallbackBindHost);
+ List<String> fallbackBindHost = TransportSettings.BIND_HOST.get(settings);
+ if (fallbackBindHost.isEmpty() == false) {
+ fallbackSettingsBuilder.putArray("bind_host", fallbackBindHost);
}
- String fallbackPublishHost = settings.get("transport.netty.publish_host", settings.get("transport.publish_host", settings.get("transport.host")));
- if (fallbackPublishHost != null) {
- fallbackSettingsBuilder.put("publish_host", fallbackPublishHost);
+ List<String> fallbackPublishHost = TransportSettings.PUBLISH_HOST.get(settings);
+ if (fallbackPublishHost.isEmpty() == false) {
+ fallbackSettingsBuilder.putArray("publish_host", fallbackPublishHost);
}
- String fallbackTcpNoDelay = settings.get("transport.netty.tcp_no_delay", settings.get(TCP_NO_DELAY, "true"));
- if (fallbackTcpNoDelay != null) {
- fallbackSettingsBuilder.put("tcp_no_delay", fallbackTcpNoDelay);
- }
+ boolean fallbackTcpNoDelay = settings.getAsBoolean("transport.netty.tcp_no_delay", TcpSettings.TCP_NO_DELAY.get(settings));
+ fallbackSettingsBuilder.put("tcp_no_delay", fallbackTcpNoDelay);
- String fallbackTcpKeepAlive = settings.get("transport.netty.tcp_keep_alive", settings.get(TCP_KEEP_ALIVE, "true"));
- if (fallbackTcpKeepAlive != null) {
+ boolean fallbackTcpKeepAlive = settings.getAsBoolean("transport.netty.tcp_keep_alive", TcpSettings.TCP_KEEP_ALIVE.get(settings));
fallbackSettingsBuilder.put("tcp_keep_alive", fallbackTcpKeepAlive);
- }
- boolean fallbackReuseAddress = settings.getAsBoolean("transport.netty.reuse_address", settings.getAsBoolean(TCP_REUSE_ADDRESS, NetworkUtils.defaultReuseAddress()));
+ boolean fallbackReuseAddress = settings.getAsBoolean("transport.netty.reuse_address", TcpSettings.TCP_REUSE_ADDRESS.get(settings));
fallbackSettingsBuilder.put("reuse_address", fallbackReuseAddress);
- ByteSizeValue fallbackTcpSendBufferSize = settings.getAsBytesSize("transport.netty.tcp_send_buffer_size", settings.getAsBytesSize(TCP_SEND_BUFFER_SIZE, TCP_DEFAULT_SEND_BUFFER_SIZE));
- if (fallbackTcpSendBufferSize != null) {
+ ByteSizeValue fallbackTcpSendBufferSize = settings.getAsBytesSize("transport.netty.tcp_send_buffer_size", TcpSettings.TCP_SEND_BUFFER_SIZE.get(settings));
+ if (fallbackTcpSendBufferSize.bytes() >= 0) {
fallbackSettingsBuilder.put("tcp_send_buffer_size", fallbackTcpSendBufferSize);
}
- ByteSizeValue fallbackTcpBufferSize = settings.getAsBytesSize("transport.netty.tcp_receive_buffer_size", settings.getAsBytesSize(TCP_RECEIVE_BUFFER_SIZE, TCP_DEFAULT_RECEIVE_BUFFER_SIZE));
- if (fallbackTcpBufferSize != null) {
+ ByteSizeValue fallbackTcpBufferSize = settings.getAsBytesSize("transport.netty.tcp_receive_buffer_size", TcpSettings.TCP_RECEIVE_BUFFER_SIZE.get(settings));
+ if (fallbackTcpBufferSize.bytes() >= 0) {
fallbackSettingsBuilder.put("tcp_receive_buffer_size", fallbackTcpBufferSize);
}
@@ -455,7 +442,7 @@ public class NettyTransport extends AbstractLifecycleComponent<Transport> implem
final BoundTransportAddress boundTransportAddress = createBoundTransportAddress(name, settings, boundAddresses);
- if (DEFAULT_PROFILE.equals(name)) {
+ if (TransportSettings.DEFAULT_PROFILE.equals(name)) {
this.boundAddress = boundTransportAddress;
} else {
profileBoundAddresses.put(name, boundTransportAddress);
@@ -508,8 +495,8 @@ public class NettyTransport extends AbstractLifecycleComponent<Transport> implem
}
final String[] publishHosts;
- if (DEFAULT_PROFILE.equals(name)) {
- publishHosts = settings.getAsArray("transport.netty.publish_host", settings.getAsArray("transport.publish_host", settings.getAsArray("transport.host", null)));
+ if (TransportSettings.DEFAULT_PROFILE.equals(name)) {
+ publishHosts = TransportSettings.PUBLISH_HOST.get(settings).toArray(Strings.EMPTY_ARRAY);
} else {
publishHosts = profileSettings.getAsArray("publish_host", boundAddressesHostStrings);
}
@@ -521,15 +508,15 @@ public class NettyTransport extends AbstractLifecycleComponent<Transport> implem
throw new BindTransportException("Failed to resolve publish address", e);
}
- Integer publishPort;
- if (DEFAULT_PROFILE.equals(name)) {
- publishPort = settings.getAsInt("transport.netty.publish_port", settings.getAsInt("transport.publish_port", null));
+ int publishPort;
+ if (TransportSettings.DEFAULT_PROFILE.equals(name)) {
+ publishPort = TransportSettings.PUBLISH_PORT.get(settings);
} else {
- publishPort = profileSettings.getAsInt("publish_port", null);
+ publishPort = profileSettings.getAsInt("publish_port", -1);
}
// if port not explicitly provided, search for port of address in boundAddresses that matches publishInetAddress
- if (publishPort == null) {
+ if (publishPort < 0) {
for (InetSocketAddress boundAddress : boundAddresses) {
InetAddress boundInetAddress = boundAddress.getAddress();
if (boundInetAddress.isAnyLocalAddress() || boundInetAddress.equals(publishInetAddress)) {
@@ -540,7 +527,7 @@ public class NettyTransport extends AbstractLifecycleComponent<Transport> implem
}
// if port still not matches, just take port of first bound address
- if (publishPort == null) {
+ if (publishPort < 0) {
// TODO: In case of DEFAULT_PROFILE we should probably fail here, as publish address does not match any bound address
// In case of a custom profile, we might use the publish address of the default profile
publishPort = boundAddresses.get(0).getPort();
@@ -552,15 +539,15 @@ public class NettyTransport extends AbstractLifecycleComponent<Transport> implem
}
private void createServerBootstrap(String name, Settings settings) {
- boolean blockingServer = settings.getAsBoolean("transport.tcp.blocking_server", this.settings.getAsBoolean(TCP_BLOCKING_SERVER, this.settings.getAsBoolean(TCP_BLOCKING, false)));
+ boolean blockingServer = TCP_BLOCKING_SERVER.get(settings);
String port = settings.get("port");
String bindHost = settings.get("bind_host");
String publishHost = settings.get("publish_host");
String tcpNoDelay = settings.get("tcp_no_delay");
String tcpKeepAlive = settings.get("tcp_keep_alive");
boolean reuseAddress = settings.getAsBoolean("reuse_address", NetworkUtils.defaultReuseAddress());
- ByteSizeValue tcpSendBufferSize = settings.getAsBytesSize("tcp_send_buffer_size", TCP_DEFAULT_SEND_BUFFER_SIZE);
- ByteSizeValue tcpReceiveBufferSize = settings.getAsBytesSize("tcp_receive_buffer_size", TCP_DEFAULT_RECEIVE_BUFFER_SIZE);
+ ByteSizeValue tcpSendBufferSize = TCP_SEND_BUFFER_SIZE.getDefault(settings);
+ ByteSizeValue tcpReceiveBufferSize = TCP_RECEIVE_BUFFER_SIZE.getDefault(settings);
logger.debug("using profile[{}], worker_count[{}], port[{}], bind_host[{}], publish_host[{}], compress[{}], connect_timeout[{}], connections_per_node[{}/{}/{}/{}/{}], receive_predictor[{}->{}]",
name, workerCount, port, bindHost, publishHost, compress, connectTimeout, connectionsPerNodeRecovery, connectionsPerNodeBulk, connectionsPerNodeReg, connectionsPerNodeState, connectionsPerNodePing, receivePredictorMin, receivePredictorMax);
@@ -679,10 +666,7 @@ public class NettyTransport extends AbstractLifecycleComponent<Transport> implem
@Override
public TransportAddress[] addressesFromString(String address, int perAddressLimit) throws Exception {
- return parse(address, settings.get("transport.profiles.default.port",
- settings.get("transport.netty.port",
- settings.get("transport.tcp.port",
- DEFAULT_PORT_RANGE))), perAddressLimit);
+ return parse(address, settings.get("transport.profiles.default.port", TransportSettings.PORT.get(settings)), perAddressLimit);
}
// this code is a take on guava's HostAndPort, like a HostAndPortRange
@@ -845,6 +829,7 @@ public class NettyTransport extends AbstractLifecycleComponent<Transport> implem
Version version = Version.smallest(this.version, node.version());
stream.setVersion(version);
+ threadPool.getThreadContext().writeTo(stream);
stream.writeString(action);
ReleasablePagedBytesReference bytes;
@@ -1151,7 +1136,7 @@ public class NettyTransport extends AbstractLifecycleComponent<Transport> implem
public ChannelPipeline getPipeline() throws Exception {
ChannelPipeline channelPipeline = Channels.pipeline();
SizeHeaderFrameDecoder sizeHeader = new SizeHeaderFrameDecoder();
- if (nettyTransport.maxCumulationBufferCapacity != null) {
+ if (nettyTransport.maxCumulationBufferCapacity.bytes() >= 0) {
if (nettyTransport.maxCumulationBufferCapacity.bytes() > Integer.MAX_VALUE) {
sizeHeader.setMaxCumulationBufferCapacity(Integer.MAX_VALUE);
} else {
@@ -1189,7 +1174,7 @@ public class NettyTransport extends AbstractLifecycleComponent<Transport> implem
ChannelPipeline channelPipeline = Channels.pipeline();
channelPipeline.addLast("openChannels", nettyTransport.serverOpenChannels);
SizeHeaderFrameDecoder sizeHeader = new SizeHeaderFrameDecoder();
- if (nettyTransport.maxCumulationBufferCapacity != null) {
+ if (nettyTransport.maxCumulationBufferCapacity.bytes() > 0) {
if (nettyTransport.maxCumulationBufferCapacity.bytes() > Integer.MAX_VALUE) {
sizeHeader.setMaxCumulationBufferCapacity(Integer.MAX_VALUE);
} else {
diff --git a/core/src/main/java/org/elasticsearch/tribe/TribeService.java b/core/src/main/java/org/elasticsearch/tribe/TribeService.java
index 78453c9eac..44d35305a6 100644
--- a/core/src/main/java/org/elasticsearch/tribe/TribeService.java
+++ b/core/src/main/java/org/elasticsearch/tribe/TribeService.java
@@ -42,19 +42,24 @@ import org.elasticsearch.common.Strings;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.regex.Regex;
+import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
+import org.elasticsearch.discovery.DiscoveryModule;
import org.elasticsearch.discovery.DiscoveryService;
+import org.elasticsearch.env.Environment;
import org.elasticsearch.gateway.GatewayService;
import org.elasticsearch.node.Node;
import org.elasticsearch.rest.RestStatus;
+import java.util.Collections;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CopyOnWriteArrayList;
+import java.util.function.Function;
import static java.util.Collections.unmodifiableMap;
@@ -82,12 +87,12 @@ public class TribeService extends AbstractLifecycleComponent<TribeService> {
public static final ClusterBlock TRIBE_WRITE_BLOCK = new ClusterBlock(11, "tribe node, write not allowed", false, false, RestStatus.BAD_REQUEST, EnumSet.of(ClusterBlockLevel.WRITE));
public static Settings processSettings(Settings settings) {
- if (settings.get(TRIBE_NAME) != null) {
+ if (TRIBE_NAME_SETTING.exists(settings)) {
// if its a node client started by this service as tribe, remove any tribe group setting
// to avoid recursive configuration
Settings.Builder sb = Settings.builder().put(settings);
for (String s : settings.getAsMap().keySet()) {
- if (s.startsWith("tribe.") && !s.equals(TRIBE_NAME)) {
+ if (s.startsWith("tribe.") && !s.equals(TRIBE_NAME_SETTING.getKey())) {
sb.remove(s);
}
}
@@ -99,24 +104,36 @@ public class TribeService extends AbstractLifecycleComponent<TribeService> {
}
// its a tribe configured node..., force settings
Settings.Builder sb = Settings.builder().put(settings);
- sb.put("node.client", true); // this node should just act as a node client
- sb.put("discovery.type", "local"); // a tribe node should not use zen discovery
- sb.put("discovery.initial_state_timeout", 0); // nothing is going to be discovered, since no master will be elected
+ sb.put(Node.NODE_CLIENT_SETTING.getKey(), true); // this node should just act as a node client
+ sb.put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), "local"); // a tribe node should not use zen discovery
+ sb.put(DiscoveryService.INITIAL_STATE_TIMEOUT_SETTING.getKey(), 0); // nothing is going to be discovered, since no master will be elected
if (sb.get("cluster.name") == null) {
sb.put("cluster.name", "tribe_" + Strings.randomBase64UUID()); // make sure it won't join other tribe nodes in the same JVM
}
- sb.put(TransportMasterNodeReadAction.FORCE_LOCAL_SETTING, true);
+ sb.put(TransportMasterNodeReadAction.FORCE_LOCAL_SETTING.getKey(), true);
return sb.build();
}
- public static final String TRIBE_NAME = "tribe.name";
-
+ private static final Setting<String> TRIBE_NAME_SETTING = Setting.simpleString("tribe.name", false, Setting.Scope.CLUSTER); // internal settings only
private final ClusterService clusterService;
private final String[] blockIndicesWrite;
private final String[] blockIndicesRead;
private final String[] blockIndicesMetadata;
-
private static final String ON_CONFLICT_ANY = "any", ON_CONFLICT_DROP = "drop", ON_CONFLICT_PREFER = "prefer_";
+
+ public static final Setting<String> ON_CONFLICT_SETTING = new Setting<>("tribe.on_conflict", ON_CONFLICT_ANY, (s) -> {
+ if (ON_CONFLICT_ANY.equals(s) || ON_CONFLICT_DROP.equals(s) || s.startsWith(ON_CONFLICT_PREFER)) {
+ return s;
+ }
+ throw new IllegalArgumentException("Invalid value for [tribe.on_conflict] must be either [any, drop or start with prefer_] but was: " +s);
+ }, false, Setting.Scope.CLUSTER);
+
+ public static final Setting<Boolean> BLOCKS_METADATA_SETTING = Setting.boolSetting("tribe.blocks.metadata", false, false, Setting.Scope.CLUSTER);
+ public static final Setting<Boolean> BLOCKS_WRITE_SETTING = Setting.boolSetting("tribe.blocks.write", false, false, Setting.Scope.CLUSTER);
+ public static final Setting<List<String>> BLOCKS_WRITE_INDICES_SETTING = Setting.listSetting("tribe.blocks.write.indices", Collections.emptyList(), Function.identity(), false, Setting.Scope.CLUSTER);
+ public static final Setting<List<String>> BLOCKS_READ_INDICES_SETTING = Setting.listSetting("tribe.blocks.read.indices", Collections.emptyList(), Function.identity(), false, Setting.Scope.CLUSTER);
+ public static final Setting<List<String>> BLOCKS_METADATA_INDICES_SETTING = Setting.listSetting("tribe.blocks.metadata.indices", Collections.emptyList(), Function.identity(), false, Setting.Scope.CLUSTER);
+
private final String onConflict;
private final Set<String> droppedIndices = ConcurrentCollections.newConcurrentSet();
@@ -132,12 +149,15 @@ public class TribeService extends AbstractLifecycleComponent<TribeService> {
for (Map.Entry<String, Settings> entry : nodesSettings.entrySet()) {
Settings.Builder sb = Settings.builder().put(entry.getValue());
sb.put("name", settings.get("name") + "/" + entry.getKey());
- sb.put("path.home", settings.get("path.home")); // pass through ES home dir
- sb.put(TRIBE_NAME, entry.getKey());
+ sb.put(Environment.PATH_HOME_SETTING.getKey(), Environment.PATH_HOME_SETTING.get(settings)); // pass through ES home dir
+ if (Environment.PATH_CONF_SETTING.exists(settings)) {
+ sb.put(Environment.PATH_CONF_SETTING.getKey(), Environment.PATH_CONF_SETTING.get(settings));
+ }
+ sb.put(TRIBE_NAME_SETTING.getKey(), entry.getKey());
if (sb.get("http.enabled") == null) {
sb.put("http.enabled", false);
}
- sb.put("node.client", true);
+ sb.put(Node.NODE_CLIENT_SETTING.getKey(), true);
nodes.add(new TribeClientNode(sb.build()));
}
@@ -149,15 +169,15 @@ public class TribeService extends AbstractLifecycleComponent<TribeService> {
// master elected in this single tribe node local "cluster"
clusterService.removeInitialStateBlock(discoveryService.getNoMasterBlock());
clusterService.removeInitialStateBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK);
- if (settings.getAsBoolean("tribe.blocks.write", false)) {
+ if (BLOCKS_WRITE_SETTING.get(settings)) {
clusterService.addInitialStateBlock(TRIBE_WRITE_BLOCK);
}
- blockIndicesWrite = settings.getAsArray("tribe.blocks.write.indices", Strings.EMPTY_ARRAY);
- if (settings.getAsBoolean("tribe.blocks.metadata", false)) {
+ blockIndicesWrite = BLOCKS_WRITE_INDICES_SETTING.get(settings).toArray(Strings.EMPTY_ARRAY);
+ if (BLOCKS_METADATA_SETTING.get(settings)) {
clusterService.addInitialStateBlock(TRIBE_METADATA_BLOCK);
}
- blockIndicesMetadata = settings.getAsArray("tribe.blocks.metadata.indices", Strings.EMPTY_ARRAY);
- blockIndicesRead = settings.getAsArray("tribe.blocks.read.indices", Strings.EMPTY_ARRAY);
+ blockIndicesMetadata = BLOCKS_METADATA_INDICES_SETTING.get(settings).toArray(Strings.EMPTY_ARRAY);
+ blockIndicesRead = BLOCKS_READ_INDICES_SETTING.get(settings).toArray(Strings.EMPTY_ARRAY);
for (Node node : nodes) {
node.injector().getInstance(ClusterService.class).add(new TribeClusterStateListener(node));
}
@@ -166,7 +186,7 @@ public class TribeService extends AbstractLifecycleComponent<TribeService> {
this.blockIndicesRead = blockIndicesRead;
this.blockIndicesWrite = blockIndicesWrite;
- this.onConflict = settings.get("tribe.on_conflict", ON_CONFLICT_ANY);
+ this.onConflict = ON_CONFLICT_SETTING.get(settings);
}
@Override
@@ -213,7 +233,7 @@ public class TribeService extends AbstractLifecycleComponent<TribeService> {
private final TribeNodeClusterStateTaskExecutor executor;
TribeClusterStateListener(Node tribeNode) {
- String tribeName = tribeNode.settings().get(TRIBE_NAME);
+ String tribeName = TRIBE_NAME_SETTING.get(tribeNode.settings());
this.tribeName = tribeName;
executor = new TribeNodeClusterStateTaskExecutor(tribeName);
}
@@ -266,7 +286,7 @@ public class TribeService extends AbstractLifecycleComponent<TribeService> {
// -- merge nodes
// go over existing nodes, and see if they need to be removed
for (DiscoveryNode discoNode : currentState.nodes()) {
- String markedTribeName = discoNode.attributes().get(TRIBE_NAME);
+ String markedTribeName = discoNode.attributes().get(TRIBE_NAME_SETTING.getKey());
if (markedTribeName != null && markedTribeName.equals(tribeName)) {
if (tribeState.nodes().get(discoNode.id()) == null) {
clusterStateChanged = true;
@@ -283,7 +303,7 @@ public class TribeService extends AbstractLifecycleComponent<TribeService> {
for (ObjectObjectCursor<String, String> attr : tribe.attributes()) {
tribeAttr.put(attr.key, attr.value);
}
- tribeAttr.put(TRIBE_NAME, tribeName);
+ tribeAttr.put(TRIBE_NAME_SETTING.getKey(), tribeName);
DiscoveryNode discoNode = new DiscoveryNode(tribe.name(), tribe.id(), tribe.getHostName(), tribe.getHostAddress(), tribe.address(), unmodifiableMap(tribeAttr), tribe.version());
clusterStateChanged = true;
logger.info("[{}] adding node [{}]", tribeName, discoNode);
@@ -297,18 +317,18 @@ public class TribeService extends AbstractLifecycleComponent<TribeService> {
RoutingTable.Builder routingTable = RoutingTable.builder(currentState.routingTable());
// go over existing indices, and see if they need to be removed
for (IndexMetaData index : currentState.metaData()) {
- String markedTribeName = index.getSettings().get(TRIBE_NAME);
+ String markedTribeName = TRIBE_NAME_SETTING.get(index.getSettings());
if (markedTribeName != null && markedTribeName.equals(tribeName)) {
IndexMetaData tribeIndex = tribeState.metaData().index(index.getIndex());
clusterStateChanged = true;
if (tribeIndex == null || tribeIndex.getState() == IndexMetaData.State.CLOSE) {
- logger.info("[{}] removing index [{}]", tribeName, index.getIndex());
+ logger.info("[{}] removing index {}", tribeName, index.getIndex());
removeIndex(blocks, metaData, routingTable, index);
} else {
// always make sure to update the metadata and routing table, in case
// there are changes in them (new mapping, shards moving from initializing to started)
routingTable.add(tribeState.routingTable().index(index.getIndex()));
- Settings tribeSettings = Settings.builder().put(tribeIndex.getSettings()).put(TRIBE_NAME, tribeName).build();
+ Settings tribeSettings = Settings.builder().put(tribeIndex.getSettings()).put(TRIBE_NAME_SETTING.getKey(), tribeName).build();
metaData.put(IndexMetaData.builder(tribeIndex).settings(tribeSettings));
}
}
@@ -322,14 +342,14 @@ public class TribeService extends AbstractLifecycleComponent<TribeService> {
}
final IndexMetaData indexMetaData = currentState.metaData().index(tribeIndex.getIndex());
if (indexMetaData == null) {
- if (!droppedIndices.contains(tribeIndex.getIndex())) {
+ if (!droppedIndices.contains(tribeIndex.getIndex().getName())) {
// a new index, add it, and add the tribe name as a setting
clusterStateChanged = true;
- logger.info("[{}] adding index [{}]", tribeName, tribeIndex.getIndex());
+ logger.info("[{}] adding index {}", tribeName, tribeIndex.getIndex());
addNewIndex(tribeState, blocks, metaData, routingTable, tribeIndex);
}
} else {
- String existingFromTribe = indexMetaData.getSettings().get(TRIBE_NAME);
+ String existingFromTribe = TRIBE_NAME_SETTING.get(indexMetaData.getSettings());
if (!tribeName.equals(existingFromTribe)) {
// we have a potential conflict on index names, decide what to do...
if (ON_CONFLICT_ANY.equals(onConflict)) {
@@ -337,16 +357,16 @@ public class TribeService extends AbstractLifecycleComponent<TribeService> {
} else if (ON_CONFLICT_DROP.equals(onConflict)) {
// drop the indices, there is a conflict
clusterStateChanged = true;
- logger.info("[{}] dropping index [{}] due to conflict with [{}]", tribeName, tribeIndex.getIndex(), existingFromTribe);
+ logger.info("[{}] dropping index {} due to conflict with [{}]", tribeName, tribeIndex.getIndex(), existingFromTribe);
removeIndex(blocks, metaData, routingTable, tribeIndex);
- droppedIndices.add(tribeIndex.getIndex());
+ droppedIndices.add(tribeIndex.getIndex().getName());
} else if (onConflict.startsWith(ON_CONFLICT_PREFER)) {
// on conflict, prefer a tribe...
String preferredTribeName = onConflict.substring(ON_CONFLICT_PREFER.length());
if (tribeName.equals(preferredTribeName)) {
// the new one is hte preferred one, replace...
clusterStateChanged = true;
- logger.info("[{}] adding index [{}], preferred over [{}]", tribeName, tribeIndex.getIndex(), existingFromTribe);
+ logger.info("[{}] adding index {}, preferred over [{}]", tribeName, tribeIndex.getIndex(), existingFromTribe);
removeIndex(blocks, metaData, routingTable, tribeIndex);
addNewIndex(tribeState, blocks, metaData, routingTable, tribeIndex);
} // else: either the existing one is the preferred one, or we haven't seen one, carry on
@@ -363,23 +383,23 @@ public class TribeService extends AbstractLifecycleComponent<TribeService> {
}
private void removeIndex(ClusterBlocks.Builder blocks, MetaData.Builder metaData, RoutingTable.Builder routingTable, IndexMetaData index) {
- metaData.remove(index.getIndex());
- routingTable.remove(index.getIndex());
- blocks.removeIndexBlocks(index.getIndex());
+ metaData.remove(index.getIndex().getName());
+ routingTable.remove(index.getIndex().getName());
+ blocks.removeIndexBlocks(index.getIndex().getName());
}
private void addNewIndex(ClusterState tribeState, ClusterBlocks.Builder blocks, MetaData.Builder metaData, RoutingTable.Builder routingTable, IndexMetaData tribeIndex) {
- Settings tribeSettings = Settings.builder().put(tribeIndex.getSettings()).put(TRIBE_NAME, tribeName).build();
+ Settings tribeSettings = Settings.builder().put(tribeIndex.getSettings()).put(TRIBE_NAME_SETTING.getKey(), tribeName).build();
metaData.put(IndexMetaData.builder(tribeIndex).settings(tribeSettings));
routingTable.add(tribeState.routingTable().index(tribeIndex.getIndex()));
- if (Regex.simpleMatch(blockIndicesMetadata, tribeIndex.getIndex())) {
- blocks.addIndexBlock(tribeIndex.getIndex(), IndexMetaData.INDEX_METADATA_BLOCK);
+ if (Regex.simpleMatch(blockIndicesMetadata, tribeIndex.getIndex().getName())) {
+ blocks.addIndexBlock(tribeIndex.getIndex().getName(), IndexMetaData.INDEX_METADATA_BLOCK);
}
- if (Regex.simpleMatch(blockIndicesRead, tribeIndex.getIndex())) {
- blocks.addIndexBlock(tribeIndex.getIndex(), IndexMetaData.INDEX_READ_BLOCK);
+ if (Regex.simpleMatch(blockIndicesRead, tribeIndex.getIndex().getName())) {
+ blocks.addIndexBlock(tribeIndex.getIndex().getName(), IndexMetaData.INDEX_READ_BLOCK);
}
- if (Regex.simpleMatch(blockIndicesWrite, tribeIndex.getIndex())) {
- blocks.addIndexBlock(tribeIndex.getIndex(), IndexMetaData.INDEX_WRITE_BLOCK);
+ if (Regex.simpleMatch(blockIndicesWrite, tribeIndex.getIndex().getName())) {
+ blocks.addIndexBlock(tribeIndex.getIndex().getName(), IndexMetaData.INDEX_WRITE_BLOCK);
}
}
}
diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy
index 8db3aca8dc..2228d0365f 100644
--- a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy
+++ b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy
@@ -31,7 +31,7 @@ grant codeBase "${codebase.securesm-1.0.jar}" {
//// Very special jar permissions:
//// These are dangerous permissions that we don't want to grant to everything.
-grant codeBase "${codebase.lucene-core-5.5.0-snapshot-1721183.jar}" {
+grant codeBase "${codebase.lucene-core-5.5.0-snapshot-1725675.jar}" {
// needed to allow MMapDirectory's "unmap hack"
permission java.lang.RuntimePermission "accessClassInPackage.sun.misc";
permission java.lang.reflect.ReflectPermission "suppressAccessChecks";
diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy b/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy
index 419c666d55..408fdcd855 100644
--- a/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy
+++ b/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy
@@ -31,7 +31,7 @@ grant codeBase "${codebase.securemock-1.2.jar}" {
permission java.lang.reflect.ReflectPermission "suppressAccessChecks";
};
-grant codeBase "${codebase.lucene-test-framework-5.5.0-snapshot-1721183.jar}" {
+grant codeBase "${codebase.lucene-test-framework-5.5.0-snapshot-1725675.jar}" {
// needed by RamUsageTester
permission java.lang.reflect.ReflectPermission "suppressAccessChecks";
};
diff --git a/core/src/main/resources/org/elasticsearch/plugins/plugin-install.help b/core/src/main/resources/org/elasticsearch/plugins/plugin-install.help
index 3cb31e22a9..c001f8c6cf 100644
--- a/core/src/main/resources/org/elasticsearch/plugins/plugin-install.help
+++ b/core/src/main/resources/org/elasticsearch/plugins/plugin-install.help
@@ -13,16 +13,11 @@ DESCRIPTION
Officially supported or commercial plugins require just the plugin name:
plugin install analysis-icu
- plugin install shield
+ plugin install x-pack
- Plugins from GitHub require 'username/repository' or 'username/repository/version':
+ Plugins from Maven Central require 'groupId:artifactId:version':
- plugin install lmenezes/elasticsearch-kopf
- plugin install lmenezes/elasticsearch-kopf/1.5.7
-
- Plugins from Maven Central or Sonatype require 'groupId/artifactId/version':
-
- plugin install org.elasticsearch/elasticsearch-mapper-attachments/2.6.0
+ plugin install org.elasticsearch:mapper-attachments:3.0.0
Plugins can be installed from a custom URL or file location as follows:
@@ -42,9 +37,9 @@ OFFICIAL PLUGINS
- discovery-azure
- discovery-ec2
- discovery-gce
- - discovery-multicast
+ - ingest-geoip
- lang-javascript
- - lang-plan-a
+ - lang-painless
- lang-python
- mapper-attachments
- mapper-murmur3
@@ -58,8 +53,6 @@ OFFICIAL PLUGINS
OPTIONS
- -t,--timeout Timeout until the plugin download is abort
-
-v,--verbose Verbose output
-h,--help Shows this message
diff --git a/core/src/test/java/org/elasticsearch/ESExceptionTests.java b/core/src/test/java/org/elasticsearch/ESExceptionTests.java
index a547e18de5..75a69cd3e5 100644
--- a/core/src/test/java/org/elasticsearch/ESExceptionTests.java
+++ b/core/src/test/java/org/elasticsearch/ESExceptionTests.java
@@ -85,9 +85,9 @@ public class ESExceptionTests extends ESTestCase {
assertEquals(ElasticsearchException.getExceptionName(rootCauses[0]), "index_not_found_exception");
assertEquals(rootCauses[0].getMessage(), "no such index");
ShardSearchFailure failure = new ShardSearchFailure(new ParsingException(1, 2, "foobar", null),
- new SearchShardTarget("node_1", "foo", 1));
+ new SearchShardTarget("node_1", new Index("foo", "_na_"), 1));
ShardSearchFailure failure1 = new ShardSearchFailure(new ParsingException(1, 2, "foobar", null),
- new SearchShardTarget("node_1", "foo", 2));
+ new SearchShardTarget("node_1", new Index("foo", "_na_"), 2));
SearchPhaseExecutionException ex = new SearchPhaseExecutionException("search", "all shards failed", new ShardSearchFailure[]{failure, failure1});
if (randomBoolean()) {
rootCauses = (randomBoolean() ? new RemoteTransportException("remoteboom", ex) : ex).guessRootCauses();
@@ -105,11 +105,11 @@ public class ESExceptionTests extends ESTestCase {
{
ShardSearchFailure failure = new ShardSearchFailure(
new ParsingException(1, 2, "foobar", null),
- new SearchShardTarget("node_1", "foo", 1));
- ShardSearchFailure failure1 = new ShardSearchFailure(new QueryShardException(new Index("foo1"), "foobar", null),
- new SearchShardTarget("node_1", "foo1", 1));
- ShardSearchFailure failure2 = new ShardSearchFailure(new QueryShardException(new Index("foo1"), "foobar", null),
- new SearchShardTarget("node_1", "foo1", 2));
+ new SearchShardTarget("node_1", new Index("foo", "_na_"), 1));
+ ShardSearchFailure failure1 = new ShardSearchFailure(new QueryShardException(new Index("foo1", "_na_"), "foobar", null),
+ new SearchShardTarget("node_1", new Index("foo1", "_na_"), 1));
+ ShardSearchFailure failure2 = new ShardSearchFailure(new QueryShardException(new Index("foo1", "_na_"), "foobar", null),
+ new SearchShardTarget("node_1", new Index("foo1", "_na_"), 2));
SearchPhaseExecutionException ex = new SearchPhaseExecutionException("search", "all shards failed", new ShardSearchFailure[]{failure, failure1, failure2});
final ElasticsearchException[] rootCauses = ex.guessRootCauses();
assertEquals(rootCauses.length, 2);
@@ -118,7 +118,7 @@ public class ESExceptionTests extends ESTestCase {
assertEquals(((ParsingException) rootCauses[0]).getLineNumber(), 1);
assertEquals(((ParsingException) rootCauses[0]).getColumnNumber(), 2);
assertEquals(ElasticsearchException.getExceptionName(rootCauses[1]), "query_shard_exception");
- assertEquals((rootCauses[1]).getIndex(), "foo1");
+ assertEquals((rootCauses[1]).getIndex().getName(), "foo1");
assertEquals(rootCauses[1].getMessage(), "foobar");
}
@@ -136,9 +136,9 @@ public class ESExceptionTests extends ESTestCase {
public void testDeduplicate() throws IOException {
{
ShardSearchFailure failure = new ShardSearchFailure(new ParsingException(1, 2, "foobar", null),
- new SearchShardTarget("node_1", "foo", 1));
+ new SearchShardTarget("node_1", new Index("foo", "_na_"), 1));
ShardSearchFailure failure1 = new ShardSearchFailure(new ParsingException(1, 2, "foobar", null),
- new SearchShardTarget("node_1", "foo", 2));
+ new SearchShardTarget("node_1", new Index("foo", "_na_"), 2));
SearchPhaseExecutionException ex = new SearchPhaseExecutionException("search", "all shards failed", randomBoolean() ? failure1.getCause() : failure.getCause(), new ShardSearchFailure[]{failure, failure1});
XContentBuilder builder = XContentFactory.jsonBuilder();
builder.startObject();
@@ -149,24 +149,24 @@ public class ESExceptionTests extends ESTestCase {
}
{
ShardSearchFailure failure = new ShardSearchFailure(new ParsingException(1, 2, "foobar", null),
- new SearchShardTarget("node_1", "foo", 1));
- ShardSearchFailure failure1 = new ShardSearchFailure(new QueryShardException(new Index("foo1"), "foobar", null),
- new SearchShardTarget("node_1", "foo1", 1));
- ShardSearchFailure failure2 = new ShardSearchFailure(new QueryShardException(new Index("foo1"), "foobar", null),
- new SearchShardTarget("node_1", "foo1", 2));
+ new SearchShardTarget("node_1", new Index("foo", "_na_"), 1));
+ ShardSearchFailure failure1 = new ShardSearchFailure(new QueryShardException(new Index("foo1", "_na_"), "foobar", null),
+ new SearchShardTarget("node_1", new Index("foo1", "_na_"), 1));
+ ShardSearchFailure failure2 = new ShardSearchFailure(new QueryShardException(new Index("foo1", "_na_"), "foobar", null),
+ new SearchShardTarget("node_1", new Index("foo1", "_na_"), 2));
SearchPhaseExecutionException ex = new SearchPhaseExecutionException("search", "all shards failed", new ShardSearchFailure[]{failure, failure1, failure2});
XContentBuilder builder = XContentFactory.jsonBuilder();
builder.startObject();
ex.toXContent(builder, PARAMS);
builder.endObject();
- String expected = "{\"type\":\"search_phase_execution_exception\",\"reason\":\"all shards failed\",\"phase\":\"search\",\"grouped\":true,\"failed_shards\":[{\"shard\":1,\"index\":\"foo\",\"node\":\"node_1\",\"reason\":{\"type\":\"parsing_exception\",\"reason\":\"foobar\",\"line\":1,\"col\":2}},{\"shard\":1,\"index\":\"foo1\",\"node\":\"node_1\",\"reason\":{\"type\":\"query_shard_exception\",\"reason\":\"foobar\",\"index\":\"foo1\"}}]}";
+ String expected = "{\"type\":\"search_phase_execution_exception\",\"reason\":\"all shards failed\",\"phase\":\"search\",\"grouped\":true,\"failed_shards\":[{\"shard\":1,\"index\":\"foo\",\"node\":\"node_1\",\"reason\":{\"type\":\"parsing_exception\",\"reason\":\"foobar\",\"line\":1,\"col\":2}},{\"shard\":1,\"index\":\"foo1\",\"node\":\"node_1\",\"reason\":{\"type\":\"query_shard_exception\",\"reason\":\"foobar\",\"index_uuid\":\"_na_\",\"index\":\"foo1\"}}]}";
assertEquals(expected, builder.string());
}
{
ShardSearchFailure failure = new ShardSearchFailure(new ParsingException(1, 2, "foobar", null),
- new SearchShardTarget("node_1", "foo", 1));
+ new SearchShardTarget("node_1", new Index("foo", "_na_"), 1));
ShardSearchFailure failure1 = new ShardSearchFailure(new ParsingException(1, 2, "foobar", null),
- new SearchShardTarget("node_1", "foo", 2));
+ new SearchShardTarget("node_1", new Index("foo", "_na_"), 2));
NullPointerException nullPointerException = new NullPointerException();
SearchPhaseExecutionException ex = new SearchPhaseExecutionException("search", "all shards failed", nullPointerException, new ShardSearchFailure[]{failure, failure1});
assertEquals(nullPointerException, ex.getCause());
@@ -286,12 +286,12 @@ public class ESExceptionTests extends ESTestCase {
public void testSerializeUnknownException() throws IOException {
BytesStreamOutput out = new BytesStreamOutput();
ParsingException ParsingException = new ParsingException(1, 2, "foobar", null);
- Throwable ex = new Throwable("wtf", ParsingException);
+ Throwable ex = new Throwable("eggplant", ParsingException);
out.writeThrowable(ex);
StreamInput in = StreamInput.wrap(out.bytes());
Throwable throwable = in.readThrowable();
- assertEquals("wtf", throwable.getMessage());
+ assertEquals("throwable: eggplant", throwable.getMessage());
assertTrue(throwable instanceof ElasticsearchException);
ParsingException e = (ParsingException)throwable.getCause();
assertEquals(ParsingException.getIndex(), e.getIndex());
@@ -320,7 +320,7 @@ public class ESExceptionTests extends ESTestCase {
new OutOfMemoryError("no memory left"),
new AlreadyClosedException("closed!!", new NullPointerException()),
new LockObtainFailedException("can't lock directory", new NullPointerException()),
- new Throwable("this exception is unknown", new QueryShardException(new Index("foo"), "foobar", null) ), // somethin unknown
+ new Throwable("this exception is unknown", new QueryShardException(new Index("foo", "_na_"), "foobar", null) ), // somethin unknown
};
for (Throwable t : causes) {
BytesStreamOutput out = new BytesStreamOutput();
@@ -329,7 +329,9 @@ public class ESExceptionTests extends ESTestCase {
StreamInput in = StreamInput.wrap(out.bytes());
ElasticsearchException e = in.readThrowable();
assertEquals(e.getMessage(), ex.getMessage());
- assertEquals(ex.getCause().getClass().getName(), e.getCause().getMessage(), ex.getCause().getMessage());
+ assertTrue("Expected: " + e.getCause().getMessage() + " to contain: " +
+ ex.getCause().getClass().getName() + " but it didn't",
+ e.getCause().getMessage().contains(ex.getCause().getMessage()));
if (ex.getCause().getClass() != Throwable.class) { // throwable is not directly mapped
assertEquals(e.getCause().getClass(), ex.getCause().getClass());
} else {
diff --git a/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java b/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java
index db7b5df666..50764eef65 100644
--- a/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java
+++ b/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java
@@ -245,8 +245,8 @@ public class ExceptionSerializationTests extends ESTestCase {
}
public void testQueryShardException() throws IOException {
- QueryShardException ex = serialize(new QueryShardException(new Index("foo"), "fobar", null));
- assertEquals(ex.getIndex(), "foo");
+ QueryShardException ex = serialize(new QueryShardException(new Index("foo", "_na_"), "fobar", null));
+ assertEquals(ex.getIndex().getName(), "foo");
assertEquals(ex.getMessage(), "fobar");
ex = serialize(new QueryShardException((Index) null, null, null));
@@ -255,7 +255,7 @@ public class ExceptionSerializationTests extends ESTestCase {
}
public void testSearchException() throws IOException {
- SearchShardTarget target = new SearchShardTarget("foo", "bar", 1);
+ SearchShardTarget target = new SearchShardTarget("foo", new Index("bar", "_na_"), 1);
SearchException ex = serialize(new SearchException(target, "hello world"));
assertEquals(target, ex.shard());
assertEquals(ex.getMessage(), "hello world");
@@ -268,7 +268,7 @@ public class ExceptionSerializationTests extends ESTestCase {
public void testAlreadyExpiredException() throws IOException {
AlreadyExpiredException alreadyExpiredException = serialize(new AlreadyExpiredException("index", "type", "id", 1, 2, 3));
- assertEquals("index", alreadyExpiredException.getIndex());
+ assertEquals("index", alreadyExpiredException.getIndex().getName());
assertEquals("type", alreadyExpiredException.type());
assertEquals("id", alreadyExpiredException.id());
assertEquals(2, alreadyExpiredException.ttl());
@@ -303,7 +303,7 @@ public class ExceptionSerializationTests extends ESTestCase {
}
public void testRecoverFilesRecoveryException() throws IOException {
- ShardId id = new ShardId("foo", 1);
+ ShardId id = new ShardId("foo", "_na_", 1);
ByteSizeValue bytes = new ByteSizeValue(randomIntBetween(0, 10000));
RecoverFilesRecoveryException ex = serialize(new RecoverFilesRecoveryException(id, 10, bytes, null));
assertEquals(ex.getShardId(), id);
@@ -331,7 +331,7 @@ public class ExceptionSerializationTests extends ESTestCase {
}
public void testBatchOperationException() throws IOException {
- ShardId id = new ShardId("foo", 1);
+ ShardId id = new ShardId("foo", "_na_", 1);
TranslogRecoveryPerformer.BatchOperationException ex = serialize(new TranslogRecoveryPerformer.BatchOperationException(id, "batched the fucker", 666, null));
assertEquals(ex.getShardId(), id);
assertEquals(666, ex.completedOperations());
@@ -368,7 +368,7 @@ public class ExceptionSerializationTests extends ESTestCase {
}
public void testPercolateException() throws IOException {
- ShardId id = new ShardId("foo", 1);
+ ShardId id = new ShardId("foo", "_na_", 1);
PercolateException ex = serialize(new PercolateException(id, "percolate my ass", null));
assertEquals(id, ex.getShardId());
assertEquals("percolate my ass", ex.getMessage());
@@ -402,9 +402,9 @@ public class ExceptionSerializationTests extends ESTestCase {
}
public void testIndexFailedEngineException() throws IOException {
- ShardId id = new ShardId("foo", 1);
+ ShardId id = new ShardId("foo", "_na_", 1);
IndexFailedEngineException ex = serialize(new IndexFailedEngineException(id, "type", "id", null));
- assertEquals(ex.getShardId(), new ShardId("foo", 1));
+ assertEquals(ex.getShardId(), new ShardId("foo", "_na_", 1));
assertEquals("type", ex.type());
assertEquals("id", ex.id());
assertNull(ex.getCause());
@@ -433,7 +433,7 @@ public class ExceptionSerializationTests extends ESTestCase {
}
public void testIllegalIndexShardStateException() throws IOException {
- ShardId id = new ShardId("foo", 1);
+ ShardId id = new ShardId("foo", "_na_", 1);
IndexShardState state = randomFrom(IndexShardState.values());
IllegalIndexShardStateException ex = serialize(new IllegalIndexShardStateException(id, state, "come back later buddy"));
assertEquals(id, ex.getShardId());
@@ -477,7 +477,7 @@ public class ExceptionSerializationTests extends ESTestCase {
public void testRoutingMissingException() throws IOException {
RoutingMissingException ex = serialize(new RoutingMissingException("idx", "type", "id"));
- assertEquals("idx", ex.getIndex());
+ assertEquals("idx", ex.getIndex().getName());
assertEquals("type", ex.getType());
assertEquals("id", ex.getId());
assertEquals("routing is required for [idx]/[type]/[id]", ex.getMessage());
@@ -505,7 +505,7 @@ public class ExceptionSerializationTests extends ESTestCase {
public void testRecoveryEngineException() throws IOException {
- ShardId id = new ShardId("foo", 1);
+ ShardId id = new ShardId("foo", "_na_", 1);
RecoveryEngineException ex = serialize(new RecoveryEngineException(id, 10, "total failure", new NullPointerException()));
assertEquals(id, ex.getShardId());
assertEquals("Phase[10] total failure", ex.getMessage());
@@ -543,9 +543,9 @@ public class ExceptionSerializationTests extends ESTestCase {
public void testNotSerializableExceptionWrapper() throws IOException {
NotSerializableExceptionWrapper ex = serialize(new NotSerializableExceptionWrapper(new NullPointerException()));
- assertEquals("{\"type\":\"null_pointer_exception\",\"reason\":null}", toXContent(ex));
+ assertEquals("{\"type\":\"null_pointer_exception\",\"reason\":\"null_pointer_exception: null\"}", toXContent(ex));
ex = serialize(new NotSerializableExceptionWrapper(new IllegalArgumentException("nono!")));
- assertEquals("{\"type\":\"illegal_argument_exception\",\"reason\":\"nono!\"}", toXContent(ex));
+ assertEquals("{\"type\":\"illegal_argument_exception\",\"reason\":\"illegal_argument_exception: nono!\"}", toXContent(ex));
Throwable[] unknowns = new Throwable[]{
new Exception("foobar"),
@@ -586,7 +586,7 @@ public class ExceptionSerializationTests extends ESTestCase {
ElasticsearchException serialize = serialize((ElasticsearchException) uhe);
assertTrue(serialize instanceof NotSerializableExceptionWrapper);
NotSerializableExceptionWrapper e = (NotSerializableExceptionWrapper) serialize;
- assertEquals("msg", e.getMessage());
+ assertEquals("unknown_header_exception: msg", e.getMessage());
assertEquals(2, e.getHeader("foo").size());
assertEquals("foo", e.getHeader("foo").get(0));
assertEquals("bar", e.getHeader("foo").get(1));
diff --git a/core/src/test/java/org/elasticsearch/action/ListenerActionIT.java b/core/src/test/java/org/elasticsearch/action/ListenerActionIT.java
index f68cb76c95..a9bb96a0a9 100644
--- a/core/src/test/java/org/elasticsearch/action/ListenerActionIT.java
+++ b/core/src/test/java/org/elasticsearch/action/ListenerActionIT.java
@@ -61,7 +61,7 @@ public class ListenerActionIT extends ESIntegTestCase {
latch.await();
- boolean shouldBeThreaded = DiscoveryNode.clientNode(client.settings()) || TransportClient.CLIENT_TYPE.equals(client.settings().get(Client.CLIENT_TYPE_SETTING));
+ boolean shouldBeThreaded = DiscoveryNode.clientNode(client.settings()) || TransportClient.CLIENT_TYPE.equals(Client.CLIENT_TYPE_SETTING_S.get(client.settings()));
if (shouldBeThreaded) {
assertTrue(threadName.get().contains("listener"));
} else {
diff --git a/core/src/test/java/org/elasticsearch/action/admin/HotThreadsIT.java b/core/src/test/java/org/elasticsearch/action/admin/HotThreadsIT.java
index 6c11bc35de..47823307ff 100644
--- a/core/src/test/java/org/elasticsearch/action/admin/HotThreadsIT.java
+++ b/core/src/test/java/org/elasticsearch/action/admin/HotThreadsIT.java
@@ -119,7 +119,7 @@ public class HotThreadsIT extends ESIntegTestCase {
.setQuery(matchAllQuery())
.setPostFilter(boolQuery().must(matchAllQuery()).mustNot(boolQuery().must(termQuery("field1", "value1")).must(termQuery("field1", "value2"))))
.get(),
- 3l);
+ 3L);
}
latch.await();
assertThat(hasErrors.get(), is(false));
diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/RecordingTaskManagerListener.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/RecordingTaskManagerListener.java
new file mode 100644
index 0000000000..025d2b39f6
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/RecordingTaskManagerListener.java
@@ -0,0 +1,81 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.admin.cluster.node.tasks;
+
+import org.elasticsearch.action.admin.cluster.node.tasks.list.TaskInfo;
+import org.elasticsearch.cluster.node.DiscoveryNode;
+import org.elasticsearch.common.collect.Tuple;
+import org.elasticsearch.common.regex.Regex;
+import org.elasticsearch.tasks.Task;
+import org.elasticsearch.test.tasks.MockTaskManagerListener;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.stream.Collectors;
+
+/**
+ * MockTaskManagerListener that records all task registration/unregistration events
+ */
+public class RecordingTaskManagerListener implements MockTaskManagerListener {
+
+ private String[] actionMasks;
+ private DiscoveryNode localNode;
+
+ private List<Tuple<Boolean, TaskInfo>> events = new ArrayList<>();
+
+ public RecordingTaskManagerListener(DiscoveryNode localNode, String... actionMasks) {
+ this.actionMasks = actionMasks;
+ this.localNode = localNode;
+ }
+
+ @Override
+ public synchronized void onTaskRegistered(Task task) {
+ if (Regex.simpleMatch(actionMasks, task.getAction())) {
+ events.add(new Tuple<>(true, task.taskInfo(localNode, true)));
+ }
+ }
+
+ @Override
+ public synchronized void onTaskUnregistered(Task task) {
+ if (Regex.simpleMatch(actionMasks, task.getAction())) {
+ events.add(new Tuple<>(false, task.taskInfo(localNode, true)));
+ }
+ }
+
+ public synchronized List<Tuple<Boolean, TaskInfo>> getEvents() {
+ return Collections.unmodifiableList(new ArrayList<>(events));
+ }
+
+ public synchronized List<TaskInfo> getRegistrationEvents() {
+ List<TaskInfo> events = this.events.stream().filter(Tuple::v1).map(Tuple::v2).collect(Collectors.toList());
+ return Collections.unmodifiableList(events);
+ }
+
+ public synchronized List<TaskInfo> getUnregistrationEvents() {
+ List<TaskInfo> events = this.events.stream().filter(event -> event.v1() == false).map(Tuple::v2).collect(Collectors.toList());
+ return Collections.unmodifiableList(events);
+ }
+
+ public synchronized void reset() {
+ events.clear();
+ }
+
+}
diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java
index 4228c9fa69..fbb93202fc 100644
--- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java
+++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java
@@ -18,21 +18,277 @@
*/
package org.elasticsearch.action.admin.cluster.node.tasks;
+import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction;
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksAction;
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse;
+import org.elasticsearch.action.admin.cluster.node.tasks.list.TaskInfo;
+import org.elasticsearch.action.admin.indices.refresh.RefreshAction;
+import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeAction;
+import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryAction;
+import org.elasticsearch.action.percolate.PercolateAction;
+import org.elasticsearch.cluster.ClusterService;
+import org.elasticsearch.cluster.node.DiscoveryNode;
+import org.elasticsearch.common.Strings;
+import org.elasticsearch.common.collect.Tuple;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESIntegTestCase;
+import org.elasticsearch.test.tasks.MockTaskManager;
+import org.elasticsearch.test.transport.MockTransportService;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.function.Function;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
+import static org.hamcrest.Matchers.lessThanOrEqualTo;
/**
* Integration tests for task management API
+ * <p>
+ * We need at least 2 nodes so we have a master node a non-master node
*/
-@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE)
+@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE, minNumDataNodes = 2)
public class TasksIT extends ESIntegTestCase {
+ private Map<Tuple<String, String>, RecordingTaskManagerListener> listeners = new HashMap<>();
+
+ @Override
+ protected Collection<Class<? extends Plugin>> nodePlugins() {
+ return pluginList(MockTransportService.TestPlugin.class);
+ }
+
+ @Override
+ protected Settings nodeSettings(int nodeOrdinal) {
+ return Settings.builder()
+ .put(super.nodeSettings(nodeOrdinal))
+ .put(MockTaskManager.USE_MOCK_TASK_MANAGER, true)
+ .build();
+ }
+
public void testTaskCounts() {
// Run only on data nodes
ListTasksResponse response = client().admin().cluster().prepareListTasks("data:true").setActions(ListTasksAction.NAME + "[n]").get();
assertThat(response.getTasks().size(), greaterThanOrEqualTo(cluster().numDataNodes()));
}
+
+ public void testMasterNodeOperationTasks() {
+ registerTaskManageListeners(ClusterHealthAction.NAME);
+
+ // First run the health on the master node - should produce only one task on the master node
+ internalCluster().masterClient().admin().cluster().prepareHealth().get();
+ assertEquals(1, numberOfEvents(ClusterHealthAction.NAME, Tuple::v1)); // counting only registration events
+ assertEquals(1, numberOfEvents(ClusterHealthAction.NAME, event -> event.v1() == false)); // counting only unregistration events
+
+ resetTaskManageListeners(ClusterHealthAction.NAME);
+
+ // Now run the health on a non-master node - should produce one task on master and one task on another node
+ internalCluster().nonMasterClient().admin().cluster().prepareHealth().get();
+ assertEquals(2, numberOfEvents(ClusterHealthAction.NAME, Tuple::v1)); // counting only registration events
+ assertEquals(2, numberOfEvents(ClusterHealthAction.NAME, event -> event.v1() == false)); // counting only unregistration events
+ List<TaskInfo> tasks = findEvents(ClusterHealthAction.NAME, Tuple::v1);
+
+ // Verify that one of these tasks is a parent of another task
+ if (tasks.get(0).getParentNode() == null) {
+ assertParentTask(Collections.singletonList(tasks.get(1)), tasks.get(0));
+ } else {
+ assertParentTask(Collections.singletonList(tasks.get(0)), tasks.get(1));
+ }
+ }
+
+ public void testTransportReplicationAllShardsTasks() {
+ registerTaskManageListeners(PercolateAction.NAME); // main task
+ registerTaskManageListeners(PercolateAction.NAME + "[s]"); // shard level tasks
+ createIndex("test");
+ ensureGreen("test"); // Make sure all shards are allocated
+ client().preparePercolate().setIndices("test").setDocumentType("foo").setSource("{}").get();
+
+ // the percolate operation should produce one main task
+ NumShards numberOfShards = getNumShards("test");
+ assertEquals(1, numberOfEvents(PercolateAction.NAME, Tuple::v1));
+ // and then one operation per shard
+ assertEquals(numberOfShards.totalNumShards, numberOfEvents(PercolateAction.NAME + "[s]", Tuple::v1));
+
+ // the shard level tasks should have the main task as a parent
+ assertParentTask(findEvents(PercolateAction.NAME + "[s]", Tuple::v1), findEvents(PercolateAction.NAME, Tuple::v1).get(0));
+ }
+
+ public void testTransportBroadcastByNodeTasks() {
+ registerTaskManageListeners(UpgradeAction.NAME); // main task
+ registerTaskManageListeners(UpgradeAction.NAME + "[n]"); // node level tasks
+ createIndex("test");
+ ensureGreen("test"); // Make sure all shards are allocated
+ client().admin().indices().prepareUpgrade("test").get();
+
+ // the percolate operation should produce one main task
+ assertEquals(1, numberOfEvents(UpgradeAction.NAME, Tuple::v1));
+ // and then one operation per each node where shards are located
+ assertEquals(internalCluster().nodesInclude("test").size(), numberOfEvents(UpgradeAction.NAME + "[n]", Tuple::v1));
+
+ // all node level tasks should have the main task as a parent
+ assertParentTask(findEvents(UpgradeAction.NAME + "[n]", Tuple::v1), findEvents(UpgradeAction.NAME, Tuple::v1).get(0));
+ }
+
+ public void testTransportReplicationSingleShardTasks() {
+ registerTaskManageListeners(ValidateQueryAction.NAME); // main task
+ registerTaskManageListeners(ValidateQueryAction.NAME + "[s]"); // shard level tasks
+ createIndex("test");
+ ensureGreen("test"); // Make sure all shards are allocated
+ client().admin().indices().prepareValidateQuery("test").get();
+
+ // the validate operation should produce one main task
+ assertEquals(1, numberOfEvents(ValidateQueryAction.NAME, Tuple::v1));
+ // and then one operation
+ assertEquals(1, numberOfEvents(ValidateQueryAction.NAME + "[s]", Tuple::v1));
+ // the shard level operation should have the main task as its parent
+ assertParentTask(findEvents(ValidateQueryAction.NAME + "[s]", Tuple::v1), findEvents(ValidateQueryAction.NAME, Tuple::v1).get(0));
+ }
+
+
+ public void testTransportBroadcastReplicationTasks() {
+ registerTaskManageListeners(RefreshAction.NAME); // main task
+ registerTaskManageListeners(RefreshAction.NAME + "[s]"); // shard level tasks
+ registerTaskManageListeners(RefreshAction.NAME + "[s][*]"); // primary and replica shard tasks
+ createIndex("test");
+ ensureGreen("test"); // Make sure all shards are allocated
+ client().admin().indices().prepareRefresh("test").get();
+
+ // the refresh operation should produce one main task
+ NumShards numberOfShards = getNumShards("test");
+
+ logger.debug("number of shards, total: [{}], primaries: [{}] ", numberOfShards.totalNumShards, numberOfShards.numPrimaries);
+ logger.debug("main events {}", numberOfEvents(RefreshAction.NAME, Tuple::v1));
+ logger.debug("main event node {}", findEvents(RefreshAction.NAME, Tuple::v1).get(0).getNode().name());
+ logger.debug("[s] events {}", numberOfEvents(RefreshAction.NAME + "[s]", Tuple::v1));
+ logger.debug("[s][*] events {}", numberOfEvents(RefreshAction.NAME + "[s][*]", Tuple::v1));
+ logger.debug("nodes with the index {}", internalCluster().nodesInclude("test"));
+
+ assertEquals(1, numberOfEvents(RefreshAction.NAME, Tuple::v1));
+ // Because it's broadcast replication action we will have as many [s] level requests
+ // as we have primary shards on the coordinating node plus we will have one task per primary outside of the
+ // coordinating node due to replication.
+ // If all primaries are on the coordinating node, the number of tasks should be equal to the number of primaries
+ // If all primaries are not on the coordinating node, the number of tasks should be equal to the number of primaries times 2
+ assertThat(numberOfEvents(RefreshAction.NAME + "[s]", Tuple::v1), greaterThanOrEqualTo(numberOfShards.numPrimaries));
+ assertThat(numberOfEvents(RefreshAction.NAME + "[s]", Tuple::v1), lessThanOrEqualTo(numberOfShards.numPrimaries * 2));
+
+ // Verify that all [s] events have the proper parent
+ // This is complicated because if the shard task runs on the same node it has main task as a parent
+ // but if it runs on non-coordinating node it would have another intermediate [s] task on the coordinating node as a parent
+ TaskInfo mainTask = findEvents(RefreshAction.NAME, Tuple::v1).get(0);
+ List<TaskInfo> sTasks = findEvents(RefreshAction.NAME + "[s]", Tuple::v1);
+ for (TaskInfo taskInfo : sTasks) {
+ if (mainTask.getNode().equals(taskInfo.getNode())) {
+ // This shard level task runs on the same node as a parent task - it should have the main task as a direct parent
+ assertParentTask(Collections.singletonList(taskInfo), mainTask);
+ } else {
+ String description = taskInfo.getDescription();
+ // This shard level task runs on another node - it should have a corresponding shard level task on the node where main task is running
+ List<TaskInfo> sTasksOnRequestingNode = findEvents(RefreshAction.NAME + "[s]",
+ event -> event.v1() && mainTask.getNode().equals(event.v2().getNode()) && description.equals(event.v2().getDescription()));
+ // There should be only one parent task
+ assertEquals(1, sTasksOnRequestingNode.size());
+ assertParentTask(Collections.singletonList(taskInfo), sTasksOnRequestingNode.get(0));
+ }
+ }
+
+ // we will have as many [s][p] and [s][r] tasks as we have primary and replica shards
+ assertEquals(numberOfShards.totalNumShards, numberOfEvents(RefreshAction.NAME + "[s][*]", Tuple::v1));
+
+ // we the [s][p] and [s][r] tasks should have a corresponding [s] task on the same node as a parent
+ List<TaskInfo> spEvents = findEvents(RefreshAction.NAME + "[s][*]", Tuple::v1);
+ for (TaskInfo taskInfo : spEvents) {
+ List<TaskInfo> sTask;
+ if (taskInfo.getAction().endsWith("[s][p]")) {
+ // A [s][p] level task should have a corresponding [s] level task on the same node
+ sTask = findEvents(RefreshAction.NAME + "[s]",
+ event -> event.v1() && taskInfo.getNode().equals(event.v2().getNode()) && taskInfo.getDescription().equals(event.v2().getDescription()));
+ } else {
+ // A [s][r] level task should have a corresponding [s] level task on the a different node (where primary is located)
+ sTask = findEvents(RefreshAction.NAME + "[s]",
+ event -> event.v1() && taskInfo.getParentNode().equals(event.v2().getNode().getId()) && taskInfo.getDescription().equals(event.v2().getDescription()));
+ }
+ // There should be only one parent task
+ assertEquals(1, sTask.size());
+ assertParentTask(Collections.singletonList(taskInfo), sTask.get(0));
+ }
+ }
+
+ @Override
+ public void tearDown() throws Exception {
+ for (Map.Entry<Tuple<String, String>, RecordingTaskManagerListener> entry : listeners.entrySet()) {
+ ((MockTaskManager)internalCluster().getInstance(ClusterService.class, entry.getKey().v1()).getTaskManager()).removeListener(entry.getValue());
+ }
+ listeners.clear();
+ super.tearDown();
+ }
+
+ /**
+ * Registers recording task event listeners with the given action mask on all nodes
+ */
+ private void registerTaskManageListeners(String actionMasks) {
+ for (ClusterService clusterService : internalCluster().getInstances(ClusterService.class)) {
+ DiscoveryNode node = clusterService.localNode();
+ RecordingTaskManagerListener listener = new RecordingTaskManagerListener(node, Strings.splitStringToArray(actionMasks, ','));
+ ((MockTaskManager)clusterService.getTaskManager()).addListener(listener);
+ RecordingTaskManagerListener oldListener = listeners.put(new Tuple<>(node.name(), actionMasks), listener);
+ assertNull(oldListener);
+ }
+ }
+
+ /**
+ * Resets all recording task event listeners with the given action mask on all nodes
+ */
+ private void resetTaskManageListeners(String actionMasks) {
+ for (Map.Entry<Tuple<String, String>, RecordingTaskManagerListener> entry : listeners.entrySet()) {
+ if (actionMasks == null || entry.getKey().v2().equals(actionMasks)) {
+ entry.getValue().reset();
+ }
+ }
+ }
+
+ /**
+ * Returns the number of events that satisfy the criteria across all nodes
+ *
+ * @param actionMasks action masks to match
+ * @return number of events that satisfy the criteria
+ */
+ private int numberOfEvents(String actionMasks, Function<Tuple<Boolean, TaskInfo>, Boolean> criteria) {
+ return findEvents(actionMasks, criteria).size();
+ }
+
+ /**
+ * Returns all events that satisfy the criteria across all nodes
+ *
+ * @param actionMasks action masks to match
+ * @return number of events that satisfy the criteria
+ */
+ private List<TaskInfo> findEvents(String actionMasks, Function<Tuple<Boolean, TaskInfo>, Boolean> criteria) {
+ List<TaskInfo> events = new ArrayList<>();
+ for (Map.Entry<Tuple<String, String>, RecordingTaskManagerListener> entry : listeners.entrySet()) {
+ if (actionMasks == null || entry.getKey().v2().equals(actionMasks)) {
+ for (Tuple<Boolean, TaskInfo> taskEvent : entry.getValue().getEvents()) {
+ if (criteria.apply(taskEvent)) {
+ events.add(taskEvent.v2());
+ }
+ }
+ }
+ }
+ return events;
+ }
+
+ /**
+ * Asserts that all tasks in the tasks list have the same parentTask
+ */
+ private void assertParentTask(List<TaskInfo> tasks, TaskInfo parentTask) {
+ for (TaskInfo task : tasks) {
+ assertNotNull(task.getParentNode());
+ assertEquals(parentTask.getNode().getId(), task.getParentNode());
+ assertEquals(parentTask.getId(), task.getParentId());
+ }
+ }
}
diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java
index d8d4f2656c..4e1c08261a 100644
--- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java
+++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java
@@ -48,10 +48,11 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.tasks.ChildTask;
import org.elasticsearch.tasks.Task;
+import org.elasticsearch.tasks.TaskManager;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.cluster.TestClusterService;
+import org.elasticsearch.test.tasks.MockTaskManager;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.transport.local.LocalTransport;
@@ -95,12 +96,11 @@ public class TransportTasksActionTests extends ESTestCase {
threadPool = null;
}
- @Before
- public final void setupTestNodes() throws Exception {
+ public void setupTestNodes(Settings settings) {
nodesCount = randomIntBetween(2, 10);
testNodes = new TestNode[nodesCount];
for (int i = 0; i < testNodes.length; i++) {
- testNodes[i] = new TestNode("node" + i, threadPool, Settings.EMPTY);
+ testNodes[i] = new TestNode("node" + i, threadPool, settings);
}
}
@@ -113,11 +113,20 @@ public class TransportTasksActionTests extends ESTestCase {
private static class TestNode implements Releasable {
public TestNode(String name, ThreadPool threadPool, Settings settings) {
- clusterService = new TestClusterService(threadPool);
- transportService = new TransportService(Settings.EMPTY,
- new LocalTransport(Settings.EMPTY, threadPool, Version.CURRENT, new NamedWriteableRegistry()),
- threadPool);
+ transportService = new TransportService(settings,
+ new LocalTransport(settings, threadPool, Version.CURRENT, new NamedWriteableRegistry()),
+ threadPool){
+ @Override
+ protected TaskManager createTaskManager() {
+ if (settings.getAsBoolean(MockTaskManager.USE_MOCK_TASK_MANAGER, false)) {
+ return new MockTaskManager(settings);
+ } else {
+ return super.createTaskManager();
+ }
+ }
+ };
transportService.start();
+ clusterService = new TestClusterService(threadPool, transportService);
discoveryNode = new DiscoveryNode(name, transportService.boundAddress().publishAddress(), Version.CURRENT);
transportListTasksAction = new TransportListTasksAction(settings, clusterName, threadPool, clusterService, transportService,
new ActionFilters(Collections.emptySet()), new IndexNameExpressionResolver(settings));
@@ -150,6 +159,15 @@ public class TransportTasksActionTests extends ESTestCase {
}
}
+ public static RecordingTaskManagerListener[] setupListeners(TestNode[] nodes, String... actionMasks) {
+ RecordingTaskManagerListener[] listeners = new RecordingTaskManagerListener[nodes.length];
+ for (int i = 0; i < nodes.length; i++) {
+ listeners[i] = new RecordingTaskManagerListener(nodes[i].discoveryNode, actionMasks);
+ ((MockTaskManager)(nodes[i].clusterService.getTaskManager())).addListener(listeners[i]);
+ }
+ return listeners;
+ }
+
public static class NodeRequest extends BaseNodeRequest {
protected String requestName;
private boolean enableTaskManager;
@@ -159,7 +177,7 @@ public class TransportTasksActionTests extends ESTestCase {
}
public NodeRequest(NodesRequest request, String nodeId) {
- super(request, nodeId);
+ super(nodeId);
requestName = request.requestName;
enableTaskManager = request.enableTaskManager;
}
@@ -180,7 +198,7 @@ public class TransportTasksActionTests extends ESTestCase {
@Override
public String getDescription() {
- return "NodeRequest[" + requestName + ", " + enableTaskManager + "]";
+ return "NodeRequest[" + requestName + ", " + enableTaskManager + "]";
}
@Override
@@ -464,6 +482,7 @@ public class TransportTasksActionTests extends ESTestCase {
}
public void testRunningTasksCount() throws Exception {
+ setupTestNodes(Settings.EMPTY);
connectNodes(testNodes);
CountDownLatch checkLatch = new CountDownLatch(1);
CountDownLatch responseLatch = new CountDownLatch(1);
@@ -553,6 +572,7 @@ public class TransportTasksActionTests extends ESTestCase {
}
public void testFindChildTasks() throws Exception {
+ setupTestNodes(Settings.EMPTY);
connectNodes(testNodes);
CountDownLatch checkLatch = new CountDownLatch(1);
ActionFuture<NodesResponse> future = startBlockingTestNodesAction(checkLatch);
@@ -586,10 +606,11 @@ public class TransportTasksActionTests extends ESTestCase {
}
public void testTaskManagementOptOut() throws Exception {
+ setupTestNodes(Settings.EMPTY);
connectNodes(testNodes);
CountDownLatch checkLatch = new CountDownLatch(1);
// Starting actions that disable task manager
- ActionFuture<NodesResponse> future = startBlockingTestNodesAction(checkLatch, new NodesRequest("Test Request", false));
+ ActionFuture<NodesResponse> future = startBlockingTestNodesAction(checkLatch, new NodesRequest("Test Request", false));
TestNode testNode = testNodes[randomIntBetween(0, testNodes.length - 1)];
@@ -606,6 +627,7 @@ public class TransportTasksActionTests extends ESTestCase {
}
public void testTasksDescriptions() throws Exception {
+ setupTestNodes(Settings.EMPTY);
connectNodes(testNodes);
CountDownLatch checkLatch = new CountDownLatch(1);
ActionFuture<NodesResponse> future = startBlockingTestNodesAction(checkLatch);
@@ -637,8 +659,11 @@ public class TransportTasksActionTests extends ESTestCase {
}
public void testFailedTasksCount() throws ExecutionException, InterruptedException, IOException {
+ Settings settings = Settings.builder().put(MockTaskManager.USE_MOCK_TASK_MANAGER, true).build();
+ setupTestNodes(settings);
connectNodes(testNodes);
TestNodesAction[] actions = new TestNodesAction[nodesCount];
+ RecordingTaskManagerListener[] listeners = setupListeners(testNodes, "testAction*");
for (int i = 0; i < testNodes.length; i++) {
final int node = i;
actions[i] = new TestNodesAction(Settings.EMPTY, "testAction", clusterName, threadPool, testNodes[i].clusterService, testNodes[i].transportService) {
@@ -656,9 +681,21 @@ public class TransportTasksActionTests extends ESTestCase {
NodesRequest request = new NodesRequest("Test Request");
NodesResponse responses = actions[0].execute(request).get();
assertEquals(nodesCount, responses.failureCount());
+
+ // Make sure that actions are still registered in the task manager on all nodes
+ // Twice on the coordinating node and once on all other nodes.
+ assertEquals(4, listeners[0].getEvents().size());
+ assertEquals(2, listeners[0].getRegistrationEvents().size());
+ assertEquals(2, listeners[0].getUnregistrationEvents().size());
+ for (int i = 1; i < listeners.length; i++) {
+ assertEquals(2, listeners[i].getEvents().size());
+ assertEquals(1, listeners[i].getRegistrationEvents().size());
+ assertEquals(1, listeners[i].getUnregistrationEvents().size());
+ }
}
public void testTaskLevelActionFailures() throws ExecutionException, InterruptedException, IOException {
+ setupTestNodes(Settings.EMPTY);
connectNodes(testNodes);
CountDownLatch checkLatch = new CountDownLatch(1);
ActionFuture<NodesResponse> future = startBlockingTestNodesAction(checkLatch);
@@ -672,7 +709,7 @@ public class TransportTasksActionTests extends ESTestCase {
@Override
protected TestTaskResponse taskOperation(TestTasksRequest request, Task task) {
logger.info("Task action on node " + node);
- if (failTaskOnNode == node && ((ChildTask) task).getParentNode() != null) {
+ if (failTaskOnNode == node && task.getParentNode() != null) {
logger.info("Failing on node " + node);
throw new RuntimeException("Task level failure");
}
diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIT.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIT.java
index 56eaad11f9..20e66361ab 100644
--- a/core/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIT.java
+++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIT.java
@@ -27,6 +27,7 @@ import org.elasticsearch.common.Priority;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.EsExecutors;
import org.elasticsearch.index.store.Store;
+import org.elasticsearch.node.Node;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
import org.elasticsearch.test.ESIntegTestCase.Scope;
@@ -60,17 +61,17 @@ public class ClusterStatsIT extends ESIntegTestCase {
ClusterStatsResponse response = client().admin().cluster().prepareClusterStats().get();
assertCounts(response.getNodesStats().getCounts(), 1, 0, 0, 1, 0);
- internalCluster().startNode(Settings.builder().put("node.data", false));
+ internalCluster().startNode(Settings.builder().put(Node.NODE_DATA_SETTING.getKey(), false));
waitForNodes(2);
response = client().admin().cluster().prepareClusterStats().get();
assertCounts(response.getNodesStats().getCounts(), 2, 1, 0, 1, 0);
- internalCluster().startNode(Settings.builder().put("node.master", false));
+ internalCluster().startNode(Settings.builder().put(Node.NODE_MASTER_SETTING.getKey(), false));
waitForNodes(3);
response = client().admin().cluster().prepareClusterStats().get();
assertCounts(response.getNodesStats().getCounts(), 3, 1, 1, 1, 0);
- internalCluster().startNode(Settings.builder().put("node.client", true));
+ internalCluster().startNode(Settings.builder().put(Node.NODE_CLIENT_SETTING.getKey(), true));
waitForNodes(4);
response = client().admin().cluster().prepareClusterStats().get();
assertCounts(response.getNodesStats().getCounts(), 4, 1, 1, 1, 1);
@@ -92,7 +93,7 @@ public class ClusterStatsIT extends ESIntegTestCase {
ensureYellow();
response = client().admin().cluster().prepareClusterStats().get();
assertThat(response.getStatus(), Matchers.equalTo(ClusterHealthStatus.YELLOW));
- assertThat(response.indicesStats.getDocs().getCount(), Matchers.equalTo(0l));
+ assertThat(response.indicesStats.getDocs().getCount(), Matchers.equalTo(0L));
assertThat(response.indicesStats.getIndexCount(), Matchers.equalTo(1));
assertShardStats(response.getIndicesStats().getShards(), 1, 2, 2, 0.0);
@@ -103,7 +104,7 @@ public class ClusterStatsIT extends ESIntegTestCase {
refresh(); // make the doc visible
response = client().admin().cluster().prepareClusterStats().get();
assertThat(response.getStatus(), Matchers.equalTo(ClusterHealthStatus.GREEN));
- assertThat(response.indicesStats.getDocs().getCount(), Matchers.equalTo(1l));
+ assertThat(response.indicesStats.getDocs().getCount(), Matchers.equalTo(1L));
assertShardStats(response.getIndicesStats().getShards(), 1, 4, 2, 1.0);
prepareCreate("test2").setSettings("number_of_shards", 3, "number_of_replicas", 0).get();
@@ -140,10 +141,10 @@ public class ClusterStatsIT extends ESIntegTestCase {
ensureYellow("test1");
ClusterStatsResponse response = client().admin().cluster().prepareClusterStats().get();
String msg = response.toString();
- assertThat(msg, response.getTimestamp(), Matchers.greaterThan(946681200000l)); // 1 Jan 2000
- assertThat(msg, response.indicesStats.getStore().getSizeInBytes(), Matchers.greaterThan(0l));
+ assertThat(msg, response.getTimestamp(), Matchers.greaterThan(946681200000L)); // 1 Jan 2000
+ assertThat(msg, response.indicesStats.getStore().getSizeInBytes(), Matchers.greaterThan(0L));
- assertThat(msg, response.nodesStats.getFs().getTotal().bytes(), Matchers.greaterThan(0l));
+ assertThat(msg, response.nodesStats.getFs().getTotal().bytes(), Matchers.greaterThan(0L));
assertThat(msg, response.nodesStats.getJvm().getVersions().size(), Matchers.greaterThan(0));
assertThat(msg, response.nodesStats.getVersions().size(), Matchers.greaterThan(0));
@@ -164,7 +165,7 @@ public class ClusterStatsIT extends ESIntegTestCase {
internalCluster().ensureAtMostNumDataNodes(0);
// start one node with 7 processors.
- internalCluster().startNodesAsync(Settings.builder().put(EsExecutors.PROCESSORS, 7).build()).get();
+ internalCluster().startNodesAsync(Settings.builder().put(EsExecutors.PROCESSORS_SETTING.getKey(), 7).build()).get();
waitForNodes(1);
ClusterStatsResponse response = client().admin().cluster().prepareClusterStats().get();
diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java
index cb0e0fa0f7..ed4e8cd387 100644
--- a/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java
+++ b/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java
@@ -47,7 +47,7 @@ public class TransportAnalyzeActionTests extends ESTestCase {
@Override
public void setUp() throws Exception {
super.setUp();
- Settings settings = Settings.builder().put("path.home", createTempDir().toString()).build();
+ Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build();
Settings indexSettings = settingsBuilder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
@@ -57,7 +57,7 @@ public class TransportAnalyzeActionTests extends ESTestCase {
.putArray("index.analysis.analyzer.custom_analyzer.filter", "lowercase", "wordDelimiter")
.put("index.analysis.analyzer.custom_analyzer.tokenizer", "whitespace")
.putArray("index.analysis.analyzer.custom_analyzer.filter", "lowercase", "wordDelimiter").build();
- IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(new Index("index"), indexSettings);
+ IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings);
environment = new Environment(settings);
registry = new AnalysisRegistry(null, environment);
analysisService = registry.build(idxSettings);
diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java b/core/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java
index 292705305c..5cf9d85801 100644
--- a/core/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java
+++ b/core/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java
@@ -54,7 +54,7 @@ import static org.hamcrest.core.IsNull.notNullValue;
public class CreateIndexIT extends ESIntegTestCase {
public void testCreationDateGivenFails() {
try {
- prepareCreate("test").setSettings(Settings.builder().put(IndexMetaData.SETTING_CREATION_DATE, 4l)).get();
+ prepareCreate("test").setSettings(Settings.builder().put(IndexMetaData.SETTING_CREATION_DATE, 4L)).get();
fail();
} catch (IllegalArgumentException ex) {
assertEquals("unknown setting [index.creation_date]", ex.getMessage());
diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushUnitTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushUnitTests.java
index 8439e98f0e..18b22b95cb 100644
--- a/core/src/test/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushUnitTests.java
+++ b/core/src/test/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushUnitTests.java
@@ -148,7 +148,7 @@ public class SyncedFlushUnitTests extends ESTestCase {
int failures = 0;
List<ShardsSyncedFlushResult> shardsResults = new ArrayList<>();
for (int shard = 0; shard < shards; shard++) {
- final ShardId shardId = new ShardId(index, shard);
+ final ShardId shardId = new ShardId(index, "_na_", shard);
if (randomInt(5) < 2) {
// total shard failure
failed += replicas + 1;
diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java b/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java
index d7f85ec565..118560b7ab 100644
--- a/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java
+++ b/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java
@@ -37,11 +37,8 @@ import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESIntegTestCase;
-import org.elasticsearch.test.InternalSettingsPlugin;
-import org.elasticsearch.test.MockIndexEventListener;
import org.elasticsearch.test.junit.annotations.TestLogging;
import org.elasticsearch.test.store.MockFSIndexStore;
-import org.elasticsearch.test.transport.MockTransportService;
import java.util.Collection;
import java.util.HashMap;
@@ -61,6 +58,7 @@ import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST)
+@TestLogging("_root:DEBUG,action.admin.indices.shards:TRACE,cluster.service:TRACE")
public class IndicesShardStoreRequestIT extends ESIntegTestCase {
@Override
@@ -74,7 +72,6 @@ public class IndicesShardStoreRequestIT extends ESIntegTestCase {
assertThat(rsp.getStoreStatuses().size(), equalTo(0));
}
- @TestLogging("action.admin.indices.shards:TRACE,cluster.service:TRACE")
public void testBasic() throws Exception {
String index = "test";
internalCluster().ensureAtLeastNumDataNodes(2);
@@ -96,7 +93,7 @@ public class IndicesShardStoreRequestIT extends ESIntegTestCase {
assertThat(shardStores.values().size(), equalTo(2));
for (ObjectCursor<List<IndicesShardStoresResponse.StoreStatus>> shardStoreStatuses : shardStores.values()) {
for (IndicesShardStoresResponse.StoreStatus storeStatus : shardStoreStatuses.value) {
- assertThat(storeStatus.getVersion(), greaterThan(-1l));
+ assertThat(storeStatus.getVersion(), greaterThan(-1L));
assertThat(storeStatus.getAllocationId(), notNullValue());
assertThat(storeStatus.getNode(), notNullValue());
assertThat(storeStatus.getStoreException(), nullValue());
@@ -194,10 +191,10 @@ public class IndicesShardStoreRequestIT extends ESIntegTestCase {
for (IndicesShardStoresResponse.StoreStatus status : shardStatus.value) {
if (corruptedShardIDMap.containsKey(shardStatus.key)
&& corruptedShardIDMap.get(shardStatus.key).contains(status.getNode().name())) {
- assertThat(status.getVersion(), greaterThanOrEqualTo(0l));
+ assertThat(status.getVersion(), greaterThanOrEqualTo(0L));
assertThat(status.getStoreException(), notNullValue());
} else {
- assertThat(status.getVersion(), greaterThanOrEqualTo(0l));
+ assertThat(status.getVersion(), greaterThanOrEqualTo(0L));
assertNull(status.getStoreException());
}
}
diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsTests.java
index 4d7e9aa216..1f18da5820 100644
--- a/core/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsTests.java
+++ b/core/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsTests.java
@@ -66,11 +66,11 @@ public class IndicesStatsTests extends ESSingleNodeTestCase {
IndicesStatsResponse rsp = client().admin().indices().prepareStats("test").get();
SegmentsStats stats = rsp.getIndex("test").getTotal().getSegments();
- assertThat(stats.getTermsMemoryInBytes(), greaterThan(0l));
- assertThat(stats.getStoredFieldsMemoryInBytes(), greaterThan(0l));
- assertThat(stats.getTermVectorsMemoryInBytes(), greaterThan(0l));
- assertThat(stats.getNormsMemoryInBytes(), greaterThan(0l));
- assertThat(stats.getDocValuesMemoryInBytes(), greaterThan(0l));
+ assertThat(stats.getTermsMemoryInBytes(), greaterThan(0L));
+ assertThat(stats.getStoredFieldsMemoryInBytes(), greaterThan(0L));
+ assertThat(stats.getTermVectorsMemoryInBytes(), greaterThan(0L));
+ assertThat(stats.getNormsMemoryInBytes(), greaterThan(0L));
+ assertThat(stats.getDocValuesMemoryInBytes(), greaterThan(0L));
// now check multiple segments stats are merged together
client().prepareIndex("test", "doc", "2").setSource("foo", "bar").get();
@@ -93,7 +93,7 @@ public class IndicesStatsTests extends ESSingleNodeTestCase {
for (ShardStats shardStats : rsp.getIndex("test").getShards()) {
final CommitStats commitStats = shardStats.getCommitStats();
assertNotNull(commitStats);
- assertThat(commitStats.getGeneration(), greaterThan(0l));
+ assertThat(commitStats.getGeneration(), greaterThan(0L));
assertThat(commitStats.getId(), notNullValue());
assertThat(commitStats.getUserData(), hasKey(Translog.TRANSLOG_GENERATION_KEY));
assertThat(commitStats.getUserData(), hasKey(Translog.TRANSLOG_UUID_KEY));
diff --git a/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorIT.java b/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorIT.java
index 04b58e6b9f..70d78e78f2 100644
--- a/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorIT.java
+++ b/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorIT.java
@@ -31,6 +31,7 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.env.Environment;
import org.elasticsearch.test.ESIntegTestCase;
import java.util.Arrays;
@@ -156,7 +157,7 @@ public class BulkProcessorIT extends ESIntegTestCase {
public void testBulkProcessorConcurrentRequestsNoNodeAvailableException() throws Exception {
//we create a transport client with no nodes to make sure it throws NoNodeAvailableException
Settings settings = Settings.builder()
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
Client transportClient = TransportClient.builder().settings(settings).build();
diff --git a/core/src/test/java/org/elasticsearch/action/bulk/BulkShardRequestTests.java b/core/src/test/java/org/elasticsearch/action/bulk/BulkShardRequestTests.java
index d218d4d62f..3ad343e246 100644
--- a/core/src/test/java/org/elasticsearch/action/bulk/BulkShardRequestTests.java
+++ b/core/src/test/java/org/elasticsearch/action/bulk/BulkShardRequestTests.java
@@ -28,9 +28,9 @@ public class BulkShardRequestTests extends ESTestCase {
public void testToString() {
String index = randomSimpleString(getRandom(), 10);
int count = between(1, 100);
- BulkShardRequest r = new BulkShardRequest(null, new ShardId(index, 0), false, new BulkItemRequest[count]);
+ BulkShardRequest r = new BulkShardRequest(null, new ShardId(index, "ignored", 0), false, new BulkItemRequest[count]);
assertEquals("BulkShardRequest to [" + index + "] containing [" + count + "] requests", r.toString());
- r = new BulkShardRequest(null, new ShardId(index, 0), true, new BulkItemRequest[count]);
+ r = new BulkShardRequest(null, new ShardId(index, "ignored", 0), true, new BulkItemRequest[count]);
assertEquals("BulkShardRequest to [" + index + "] containing [" + count + "] requests and a refresh", r.toString());
}
}
diff --git a/core/src/test/java/org/elasticsearch/action/ingest/BulkRequestModifierTests.java b/core/src/test/java/org/elasticsearch/action/ingest/BulkRequestModifierTests.java
new file mode 100644
index 0000000000..66c2a0183e
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/action/ingest/BulkRequestModifierTests.java
@@ -0,0 +1,165 @@
+package org.elasticsearch.action.ingest;
+
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.elasticsearch.action.ActionListener;
+import org.elasticsearch.action.ActionRequest;
+import org.elasticsearch.action.bulk.BulkItemResponse;
+import org.elasticsearch.action.bulk.BulkRequest;
+import org.elasticsearch.action.bulk.BulkResponse;
+import org.elasticsearch.action.index.IndexRequest;
+import org.elasticsearch.action.index.IndexResponse;
+import org.elasticsearch.index.shard.ShardId;
+import org.elasticsearch.test.ESTestCase;
+import org.hamcrest.Matchers;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.is;
+import static org.hamcrest.Matchers.nullValue;
+import static org.mockito.Mockito.mock;
+
+public class BulkRequestModifierTests extends ESTestCase {
+
+ public void testBulkRequestModifier() {
+ int numRequests = scaledRandomIntBetween(8, 64);
+ BulkRequest bulkRequest = new BulkRequest();
+ for (int i = 0; i < numRequests; i++) {
+ bulkRequest.add(new IndexRequest("_index", "_type", String.valueOf(i)).source("{}"));
+ }
+ CaptureActionListener actionListener = new CaptureActionListener();
+ IngestActionFilter.BulkRequestModifier bulkRequestModifier = new IngestActionFilter.BulkRequestModifier(bulkRequest);
+
+ int i = 0;
+ Set<Integer> failedSlots = new HashSet<>();
+ while (bulkRequestModifier.hasNext()) {
+ bulkRequestModifier.next();
+ if (randomBoolean()) {
+ bulkRequestModifier.markCurrentItemAsFailed(new RuntimeException());
+ failedSlots.add(i);
+ }
+ i++;
+ }
+
+ assertThat(bulkRequestModifier.getBulkRequest().requests().size(), equalTo(numRequests - failedSlots.size()));
+ // simulate that we actually executed the modified bulk request:
+ ActionListener<BulkResponse> result = bulkRequestModifier.wrapActionListenerIfNeeded(actionListener);
+ result.onResponse(new BulkResponse(new BulkItemResponse[numRequests - failedSlots.size()], 0));
+
+ BulkResponse bulkResponse = actionListener.getResponse();
+ for (int j = 0; j < bulkResponse.getItems().length; j++) {
+ if (failedSlots.contains(j)) {
+ BulkItemResponse item = bulkResponse.getItems()[j];
+ assertThat(item.isFailed(), is(true));
+ assertThat(item.getFailure().getIndex(), equalTo("_index"));
+ assertThat(item.getFailure().getType(), equalTo("_type"));
+ assertThat(item.getFailure().getId(), equalTo(String.valueOf(j)));
+ assertThat(item.getFailure().getMessage(), equalTo("java.lang.RuntimeException"));
+ } else {
+ assertThat(bulkResponse.getItems()[j], nullValue());
+ }
+ }
+ }
+
+ public void testPipelineFailures() {
+ BulkRequest originalBulkRequest = new BulkRequest();
+ for (int i = 0; i < 32; i++) {
+ originalBulkRequest.add(new IndexRequest("index", "type", String.valueOf(i)));
+ }
+
+ IngestActionFilter.BulkRequestModifier modifier = new IngestActionFilter.BulkRequestModifier(originalBulkRequest);
+ for (int i = 0; modifier.hasNext(); i++) {
+ modifier.next();
+ if (i % 2 == 0) {
+ modifier.markCurrentItemAsFailed(new RuntimeException());
+ }
+ }
+
+ // So half of the requests have "failed", so only the successful requests are left:
+ BulkRequest bulkRequest = modifier.getBulkRequest();
+ assertThat(bulkRequest.requests().size(), Matchers.equalTo(16));
+
+ List<BulkItemResponse> responses = new ArrayList<>();
+ ActionListener<BulkResponse> bulkResponseListener = modifier.wrapActionListenerIfNeeded(new ActionListener<BulkResponse>() {
+ @Override
+ public void onResponse(BulkResponse bulkItemResponses) {
+ responses.addAll(Arrays.asList(bulkItemResponses.getItems()));
+ }
+
+ @Override
+ public void onFailure(Throwable e) {
+ }
+ });
+
+ List<BulkItemResponse> originalResponses = new ArrayList<>();
+ for (ActionRequest actionRequest : bulkRequest.requests()) {
+ IndexRequest indexRequest = (IndexRequest) actionRequest;
+ IndexResponse indexResponse = new IndexResponse(new ShardId("index", "_na_", 0), indexRequest.type(), indexRequest.id(), 1, true);
+ originalResponses.add(new BulkItemResponse(Integer.parseInt(indexRequest.id()), indexRequest.opType().lowercase(), indexResponse));
+ }
+ bulkResponseListener.onResponse(new BulkResponse(originalResponses.toArray(new BulkItemResponse[originalResponses.size()]), 0));
+
+ assertThat(responses.size(), Matchers.equalTo(32));
+ for (int i = 0; i < 32; i++) {
+ assertThat(responses.get(i).getId(), Matchers.equalTo(String.valueOf(i)));
+ }
+ }
+
+ public void testNoFailures() {
+ BulkRequest originalBulkRequest = new BulkRequest();
+ for (int i = 0; i < 32; i++) {
+ originalBulkRequest.add(new IndexRequest("index", "type", String.valueOf(i)));
+ }
+
+ IngestActionFilter.BulkRequestModifier modifier = new IngestActionFilter.BulkRequestModifier(originalBulkRequest);
+ while (modifier.hasNext()) {
+ modifier.next();
+ }
+
+ BulkRequest bulkRequest = modifier.getBulkRequest();
+ assertThat(bulkRequest, Matchers.sameInstance(originalBulkRequest));
+ @SuppressWarnings("unchecked")
+ ActionListener<BulkResponse> actionListener = mock(ActionListener.class);
+ assertThat(modifier.wrapActionListenerIfNeeded(actionListener), Matchers.sameInstance(actionListener));
+ }
+
+ private static class CaptureActionListener implements ActionListener<BulkResponse> {
+
+ private BulkResponse response;
+
+ @Override
+ public void onResponse(BulkResponse bulkItemResponses) {
+ this.response = bulkItemResponses;
+ }
+
+ @Override
+ public void onFailure(Throwable e) {
+ }
+
+ public BulkResponse getResponse() {
+ return response;
+ }
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/action/ingest/IngestActionFilterTests.java b/core/src/test/java/org/elasticsearch/action/ingest/IngestActionFilterTests.java
new file mode 100644
index 0000000000..e1ffe94e63
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/action/ingest/IngestActionFilterTests.java
@@ -0,0 +1,249 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.ingest;
+
+import org.elasticsearch.action.ActionListener;
+import org.elasticsearch.action.ActionRequest;
+import org.elasticsearch.action.bulk.BulkAction;
+import org.elasticsearch.action.bulk.BulkRequest;
+import org.elasticsearch.action.delete.DeleteRequest;
+import org.elasticsearch.action.index.IndexAction;
+import org.elasticsearch.action.index.IndexRequest;
+import org.elasticsearch.action.support.ActionFilterChain;
+import org.elasticsearch.action.update.UpdateRequest;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.ingest.IngestService;
+import org.elasticsearch.ingest.PipelineExecutionService;
+import org.elasticsearch.ingest.PipelineStore;
+import org.elasticsearch.ingest.core.CompoundProcessor;
+import org.elasticsearch.ingest.core.IngestDocument;
+import org.elasticsearch.ingest.core.Pipeline;
+import org.elasticsearch.ingest.core.Processor;
+import org.elasticsearch.node.service.NodeService;
+import org.elasticsearch.tasks.Task;
+import org.elasticsearch.test.ESTestCase;
+import org.elasticsearch.threadpool.ThreadPool;
+import org.junit.Before;
+import org.mockito.stubbing.Answer;
+
+import java.util.function.Consumer;
+
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.nullValue;
+import static org.mockito.Matchers.same;
+import static org.mockito.Mockito.any;
+import static org.mockito.Mockito.doAnswer;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.verifyZeroInteractions;
+import static org.mockito.Mockito.when;
+
+public class IngestActionFilterTests extends ESTestCase {
+
+ private IngestActionFilter filter;
+ private PipelineExecutionService executionService;
+
+ @Before
+ public void setup() {
+ executionService = mock(PipelineExecutionService.class);
+ IngestService ingestService = mock(IngestService.class);
+ when(ingestService.getPipelineExecutionService()).thenReturn(executionService);
+ NodeService nodeService = mock(NodeService.class);
+ when(nodeService.getIngestService()).thenReturn(ingestService);
+ filter = new IngestActionFilter(Settings.EMPTY, nodeService);
+ }
+
+ public void testApplyNoPipelineId() throws Exception {
+ IndexRequest indexRequest = new IndexRequest();
+ Task task = mock(Task.class);
+ ActionListener actionListener = mock(ActionListener.class);
+ ActionFilterChain actionFilterChain = mock(ActionFilterChain.class);
+
+ filter.apply(task, IndexAction.NAME, indexRequest, actionListener, actionFilterChain);
+
+ verify(actionFilterChain).proceed(task, IndexAction.NAME, indexRequest, actionListener);
+ verifyZeroInteractions(executionService, actionFilterChain);
+ }
+
+ public void testApplyBulkNoPipelineId() throws Exception {
+ BulkRequest bulkRequest = new BulkRequest();
+ bulkRequest.add(new IndexRequest());
+ Task task = mock(Task.class);
+ ActionListener actionListener = mock(ActionListener.class);
+ ActionFilterChain actionFilterChain = mock(ActionFilterChain.class);
+
+ filter.apply(task, BulkAction.NAME, bulkRequest, actionListener, actionFilterChain);
+
+ verify(actionFilterChain).proceed(task, BulkAction.NAME, bulkRequest, actionListener);
+ verifyZeroInteractions(executionService, actionFilterChain);
+ }
+
+ @SuppressWarnings("unchecked")
+ public void testApplyIngestIdViaRequestParam() throws Exception {
+ Task task = mock(Task.class);
+ IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").setPipeline("_id");
+ indexRequest.source("field", "value");
+ ActionListener actionListener = mock(ActionListener.class);
+ ActionFilterChain actionFilterChain = mock(ActionFilterChain.class);
+
+ filter.apply(task, IndexAction.NAME, indexRequest, actionListener, actionFilterChain);
+
+ verify(executionService).execute(same(indexRequest), any(Consumer.class), any(Consumer.class));
+ verifyZeroInteractions(actionFilterChain);
+ }
+
+ @SuppressWarnings("unchecked")
+ public void testApplyExecuted() throws Exception {
+ Task task = mock(Task.class);
+ IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").setPipeline("_id");
+ indexRequest.source("field", "value");
+ ActionListener actionListener = mock(ActionListener.class);
+ ActionFilterChain actionFilterChain = mock(ActionFilterChain.class);
+
+ Answer answer = invocationOnMock -> {
+ @SuppressWarnings("unchecked")
+ Consumer<Boolean> listener = (Consumer) invocationOnMock.getArguments()[2];
+ listener.accept(true);
+ return null;
+ };
+ doAnswer(answer).when(executionService).execute(any(IndexRequest.class), any(Consumer.class), any(Consumer.class));
+ filter.apply(task, IndexAction.NAME, indexRequest, actionListener, actionFilterChain);
+
+ verify(executionService).execute(same(indexRequest), any(Consumer.class), any(Consumer.class));
+ verify(actionFilterChain).proceed(task, IndexAction.NAME, indexRequest, actionListener);
+ verifyZeroInteractions(actionListener);
+ }
+
+ @SuppressWarnings("unchecked")
+ public void testApplyFailed() throws Exception {
+ Task task = mock(Task.class);
+ IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").setPipeline("_id");
+ indexRequest.source("field", "value");
+ ActionListener actionListener = mock(ActionListener.class);
+ ActionFilterChain actionFilterChain = mock(ActionFilterChain.class);
+
+ RuntimeException exception = new RuntimeException();
+ Answer answer = invocationOnMock -> {
+ Consumer<Throwable> handler = (Consumer) invocationOnMock.getArguments()[1];
+ handler.accept(exception);
+ return null;
+ };
+ doAnswer(answer).when(executionService).execute(same(indexRequest), any(Consumer.class), any(Consumer.class));
+ filter.apply(task, IndexAction.NAME, indexRequest, actionListener, actionFilterChain);
+
+ verify(executionService).execute(same(indexRequest), any(Consumer.class), any(Consumer.class));
+ verify(actionListener).onFailure(exception);
+ verifyZeroInteractions(actionFilterChain);
+ }
+
+ public void testApplyWithBulkRequest() throws Exception {
+ Task task = mock(Task.class);
+ ThreadPool threadPool = mock(ThreadPool.class);
+ when(threadPool.executor(any())).thenReturn(Runnable::run);
+ PipelineStore store = mock(PipelineStore.class);
+
+ Processor processor = new Processor() {
+ @Override
+ public void execute(IngestDocument ingestDocument) {
+ ingestDocument.setFieldValue("field2", "value2");
+ }
+
+ @Override
+ public String getType() {
+ return null;
+ }
+
+ @Override
+ public String getTag() {
+ return null;
+ }
+ };
+ when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", new CompoundProcessor(processor)));
+ executionService = new PipelineExecutionService(store, threadPool);
+ IngestService ingestService = mock(IngestService.class);
+ when(ingestService.getPipelineExecutionService()).thenReturn(executionService);
+ NodeService nodeService = mock(NodeService.class);
+ when(nodeService.getIngestService()).thenReturn(ingestService);
+ filter = new IngestActionFilter(Settings.EMPTY, nodeService);
+
+ BulkRequest bulkRequest = new BulkRequest();
+ int numRequest = scaledRandomIntBetween(8, 64);
+ for (int i = 0; i < numRequest; i++) {
+ if (rarely()) {
+ ActionRequest request;
+ if (randomBoolean()) {
+ request = new DeleteRequest("_index", "_type", "_id");
+ } else {
+ request = new UpdateRequest("_index", "_type", "_id");
+ }
+ bulkRequest.add(request);
+ } else {
+ IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").setPipeline("_id");
+ indexRequest.source("field1", "value1");
+ bulkRequest.add(indexRequest);
+ }
+ }
+
+ ActionListener actionListener = mock(ActionListener.class);
+ ActionFilterChain actionFilterChain = mock(ActionFilterChain.class);
+
+ filter.apply(task, BulkAction.NAME, bulkRequest, actionListener, actionFilterChain);
+
+ assertBusy(() -> {
+ verify(actionFilterChain).proceed(task, BulkAction.NAME, bulkRequest, actionListener);
+ verifyZeroInteractions(actionListener);
+
+ int assertedRequests = 0;
+ for (ActionRequest actionRequest : bulkRequest.requests()) {
+ if (actionRequest instanceof IndexRequest) {
+ IndexRequest indexRequest = (IndexRequest) actionRequest;
+ assertThat(indexRequest.sourceAsMap().size(), equalTo(2));
+ assertThat(indexRequest.sourceAsMap().get("field1"), equalTo("value1"));
+ assertThat(indexRequest.sourceAsMap().get("field2"), equalTo("value2"));
+ }
+ assertedRequests++;
+ }
+ assertThat(assertedRequests, equalTo(numRequest));
+ });
+ }
+
+ @SuppressWarnings("unchecked")
+ public void testIndexApiSinglePipelineExecution() {
+ Answer answer = invocationOnMock -> {
+ @SuppressWarnings("unchecked")
+ Consumer<Boolean> listener = (Consumer) invocationOnMock.getArguments()[2];
+ listener.accept(true);
+ return null;
+ };
+ doAnswer(answer).when(executionService).execute(any(IndexRequest.class), any(Consumer.class), any(Consumer.class));
+
+ Task task = mock(Task.class);
+ ActionListener actionListener = mock(ActionListener.class);
+ ActionFilterChain actionFilterChain = mock(ActionFilterChain.class);
+
+ IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").setPipeline("_id").source("field", "value");
+ filter.apply(task, IndexAction.NAME, indexRequest, actionListener, actionFilterChain);
+ assertThat(indexRequest.getPipeline(), nullValue());
+ filter.apply(task, IndexAction.NAME, indexRequest, actionListener, actionFilterChain);
+ verify(executionService, times(1)).execute(same(indexRequest), any(Consumer.class), any(Consumer.class));
+ verify(actionFilterChain, times(2)).proceed(task, IndexAction.NAME, indexRequest, actionListener);
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/action/ingest/IngestProxyActionFilterTests.java b/core/src/test/java/org/elasticsearch/action/ingest/IngestProxyActionFilterTests.java
new file mode 100644
index 0000000000..fa9728c4cd
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/action/ingest/IngestProxyActionFilterTests.java
@@ -0,0 +1,251 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.ingest;
+
+import org.elasticsearch.action.ActionListener;
+import org.elasticsearch.action.ActionRequest;
+import org.elasticsearch.action.bulk.BulkAction;
+import org.elasticsearch.action.bulk.BulkRequest;
+import org.elasticsearch.action.bulk.BulkResponse;
+import org.elasticsearch.action.index.IndexAction;
+import org.elasticsearch.action.index.IndexRequest;
+import org.elasticsearch.action.index.IndexResponse;
+import org.elasticsearch.action.support.ActionFilterChain;
+import org.elasticsearch.cluster.ClusterName;
+import org.elasticsearch.cluster.ClusterService;
+import org.elasticsearch.cluster.ClusterState;
+import org.elasticsearch.cluster.node.DiscoveryNode;
+import org.elasticsearch.cluster.node.DiscoveryNodes;
+import org.elasticsearch.common.transport.DummyTransportAddress;
+import org.elasticsearch.tasks.Task;
+import org.elasticsearch.test.ESTestCase;
+import org.elasticsearch.test.VersionUtils;
+import org.elasticsearch.transport.TransportException;
+import org.elasticsearch.transport.TransportRequest;
+import org.elasticsearch.transport.TransportResponse;
+import org.elasticsearch.transport.TransportResponseHandler;
+import org.elasticsearch.transport.TransportService;
+import org.hamcrest.CustomTypeSafeMatcher;
+import org.mockito.stubbing.Answer;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.hamcrest.CoreMatchers.equalTo;
+import static org.mockito.Matchers.any;
+import static org.mockito.Matchers.argThat;
+import static org.mockito.Matchers.eq;
+import static org.mockito.Matchers.same;
+import static org.mockito.Mockito.doAnswer;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.verifyZeroInteractions;
+import static org.mockito.Mockito.when;
+
+public class IngestProxyActionFilterTests extends ESTestCase {
+
+ private TransportService transportService;
+
+ @SuppressWarnings("unchecked")
+ private IngestProxyActionFilter buildFilter(int ingestNodes, int totalNodes) {
+ ClusterState.Builder clusterState = new ClusterState.Builder(new ClusterName("_name"));
+ DiscoveryNodes.Builder builder = new DiscoveryNodes.Builder();
+ DiscoveryNode localNode = null;
+ for (int i = 0; i < totalNodes; i++) {
+ String nodeId = "node" + i;
+ Map<String, String> attributes = new HashMap<>();
+ if (i >= ingestNodes) {
+ attributes.put("ingest", "false");
+ } else if (randomBoolean()) {
+ attributes.put("ingest", "true");
+ }
+ DiscoveryNode node = new DiscoveryNode(nodeId, nodeId, DummyTransportAddress.INSTANCE, attributes, VersionUtils.randomVersion(random()));
+ builder.put(node);
+ if (i == totalNodes - 1) {
+ localNode = node;
+ }
+ }
+ clusterState.nodes(builder);
+ ClusterService clusterService = mock(ClusterService.class);
+ when(clusterService.localNode()).thenReturn(localNode);
+ when(clusterService.state()).thenReturn(clusterState.build());
+ transportService = mock(TransportService.class);
+ return new IngestProxyActionFilter(clusterService, transportService);
+ }
+
+ public void testApplyNoIngestNodes() {
+ Task task = mock(Task.class);
+ ActionListener actionListener = mock(ActionListener.class);
+ ActionFilterChain actionFilterChain = mock(ActionFilterChain.class);
+ int totalNodes = randomIntBetween(1, 5);
+ IngestProxyActionFilter filter = buildFilter(0, totalNodes);
+
+ String action;
+ ActionRequest request;
+ if (randomBoolean()) {
+ action = IndexAction.NAME;
+ request = new IndexRequest().setPipeline("_id");
+ } else {
+ action = BulkAction.NAME;
+ request = new BulkRequest().add(new IndexRequest().setPipeline("_id"));
+ }
+ try {
+ filter.apply(task, action, request, actionListener, actionFilterChain);
+ fail("should have failed because there are no ingest nodes");
+ } catch(IllegalStateException e) {
+ assertThat(e.getMessage(), equalTo("There are no ingest nodes in this cluster, unable to forward request to an ingest node."));
+ }
+ verifyZeroInteractions(transportService);
+ verifyZeroInteractions(actionFilterChain);
+ verifyZeroInteractions(actionListener);
+ }
+
+ public void testApplyNoPipelineId() {
+ Task task = mock(Task.class);
+ ActionListener actionListener = mock(ActionListener.class);
+ ActionFilterChain actionFilterChain = mock(ActionFilterChain.class);
+ int totalNodes = randomIntBetween(1, 5);
+ IngestProxyActionFilter filter = buildFilter(randomIntBetween(0, totalNodes - 1), totalNodes);
+
+ String action;
+ ActionRequest request;
+ if (randomBoolean()) {
+ action = IndexAction.NAME;
+ request = new IndexRequest();
+ } else {
+ action = BulkAction.NAME;
+ request = new BulkRequest().add(new IndexRequest());
+ }
+ filter.apply(task, action, request, actionListener, actionFilterChain);
+ verifyZeroInteractions(transportService);
+ verify(actionFilterChain).proceed(any(Task.class), eq(action), same(request), same(actionListener));
+ verifyZeroInteractions(actionListener);
+ }
+
+ public void testApplyAnyAction() {
+ Task task = mock(Task.class);
+ ActionListener actionListener = mock(ActionListener.class);
+ ActionFilterChain actionFilterChain = mock(ActionFilterChain.class);
+ ActionRequest request = mock(ActionRequest.class);
+ int totalNodes = randomIntBetween(1, 5);
+ IngestProxyActionFilter filter = buildFilter(randomIntBetween(0, totalNodes - 1), totalNodes);
+
+ String action = randomAsciiOfLengthBetween(1, 20);
+ filter.apply(task, action, request, actionListener, actionFilterChain);
+ verifyZeroInteractions(transportService);
+ verify(actionFilterChain).proceed(any(Task.class), eq(action), same(request), same(actionListener));
+ verifyZeroInteractions(actionListener);
+ }
+
+ @SuppressWarnings("unchecked")
+ public void testApplyIndexRedirect() {
+ Task task = mock(Task.class);
+ ActionListener actionListener = mock(ActionListener.class);
+ ActionFilterChain actionFilterChain = mock(ActionFilterChain.class);
+ int totalNodes = randomIntBetween(2, 5);
+ IngestProxyActionFilter filter = buildFilter(randomIntBetween(1, totalNodes - 1), totalNodes);
+ Answer<Void> answer = invocationOnMock -> {
+ TransportResponseHandler transportResponseHandler = (TransportResponseHandler) invocationOnMock.getArguments()[3];
+ transportResponseHandler.handleResponse(new IndexResponse());
+ return null;
+ };
+ doAnswer(answer).when(transportService).sendRequest(any(DiscoveryNode.class), any(String.class), any(TransportRequest.class), any(TransportResponseHandler.class));
+
+ IndexRequest indexRequest = new IndexRequest().setPipeline("_id");
+ filter.apply(task, IndexAction.NAME, indexRequest, actionListener, actionFilterChain);
+
+ verify(transportService).sendRequest(argThat(new IngestNodeMatcher()), eq(IndexAction.NAME), same(indexRequest), any(TransportResponseHandler.class));
+ verifyZeroInteractions(actionFilterChain);
+ verify(actionListener).onResponse(any(IndexResponse.class));
+ verify(actionListener, never()).onFailure(any(TransportException.class));
+ }
+
+ @SuppressWarnings("unchecked")
+ public void testApplyBulkRedirect() {
+ Task task = mock(Task.class);
+ ActionListener actionListener = mock(ActionListener.class);
+ ActionFilterChain actionFilterChain = mock(ActionFilterChain.class);
+ int totalNodes = randomIntBetween(2, 5);
+ IngestProxyActionFilter filter = buildFilter(randomIntBetween(1, totalNodes - 1), totalNodes);
+ Answer<Void> answer = invocationOnMock -> {
+ TransportResponseHandler transportResponseHandler = (TransportResponseHandler) invocationOnMock.getArguments()[3];
+ transportResponseHandler.handleResponse(new BulkResponse(null, -1));
+ return null;
+ };
+ doAnswer(answer).when(transportService).sendRequest(any(DiscoveryNode.class), any(String.class), any(TransportRequest.class), any(TransportResponseHandler.class));
+
+ BulkRequest bulkRequest = new BulkRequest();
+ bulkRequest.add(new IndexRequest().setPipeline("_id"));
+ int numNoPipelineRequests = randomIntBetween(0, 10);
+ for (int i = 0; i < numNoPipelineRequests; i++) {
+ bulkRequest.add(new IndexRequest());
+ }
+ filter.apply(task, BulkAction.NAME, bulkRequest, actionListener, actionFilterChain);
+
+ verify(transportService).sendRequest(argThat(new IngestNodeMatcher()), eq(BulkAction.NAME), same(bulkRequest), any(TransportResponseHandler.class));
+ verifyZeroInteractions(actionFilterChain);
+ verify(actionListener).onResponse(any(BulkResponse.class));
+ verify(actionListener, never()).onFailure(any(TransportException.class));
+ }
+
+ @SuppressWarnings("unchecked")
+ public void testApplyFailures() {
+ Task task = mock(Task.class);
+ ActionListener actionListener = mock(ActionListener.class);
+ ActionFilterChain actionFilterChain = mock(ActionFilterChain.class);
+ int totalNodes = randomIntBetween(2, 5);
+ IngestProxyActionFilter filter = buildFilter(randomIntBetween(1, totalNodes - 1), totalNodes);
+ Answer<Void> answer = invocationOnMock -> {
+ TransportResponseHandler transportResponseHandler = (TransportResponseHandler) invocationOnMock.getArguments()[3];
+ transportResponseHandler.handleException(new TransportException(new IllegalArgumentException()));
+ return null;
+ };
+ doAnswer(answer).when(transportService).sendRequest(any(DiscoveryNode.class), any(String.class), any(TransportRequest.class), any(TransportResponseHandler.class));
+
+ String action;
+ ActionRequest request;
+ if (randomBoolean()) {
+ action = IndexAction.NAME;
+ request = new IndexRequest().setPipeline("_id");
+ } else {
+ action = BulkAction.NAME;
+ request = new BulkRequest().add(new IndexRequest().setPipeline("_id"));
+ }
+
+ filter.apply(task, action, request, actionListener, actionFilterChain);
+
+ verify(transportService).sendRequest(argThat(new IngestNodeMatcher()), eq(action), same(request), any(TransportResponseHandler.class));
+ verifyZeroInteractions(actionFilterChain);
+ verify(actionListener).onFailure(any(TransportException.class));
+ verify(actionListener, never()).onResponse(any(TransportResponse.class));
+ }
+
+ private static class IngestNodeMatcher extends CustomTypeSafeMatcher<DiscoveryNode> {
+ private IngestNodeMatcher() {
+ super("discovery node should be an ingest node");
+ }
+
+ @Override
+ protected boolean matchesSafely(DiscoveryNode node) {
+ return node.isIngestNode();
+ }
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentSimpleResultTests.java b/core/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentSimpleResultTests.java
new file mode 100644
index 0000000000..882fca7215
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentSimpleResultTests.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.ingest;
+
+import org.elasticsearch.common.io.stream.BytesStreamOutput;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.ingest.core.IngestDocument;
+import org.elasticsearch.ingest.RandomDocumentPicks;
+import org.elasticsearch.test.ESTestCase;
+
+import java.io.IOException;
+
+import static org.hamcrest.CoreMatchers.equalTo;
+import static org.hamcrest.CoreMatchers.instanceOf;
+
+public class SimulateDocumentSimpleResultTests extends ESTestCase {
+
+ public void testSerialization() throws IOException {
+ boolean isFailure = randomBoolean();
+ SimulateDocumentBaseResult simulateDocumentBaseResult;
+ if (isFailure) {
+ simulateDocumentBaseResult = new SimulateDocumentBaseResult(new IllegalArgumentException("test"));
+ } else {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
+ simulateDocumentBaseResult = new SimulateDocumentBaseResult(ingestDocument);
+ }
+
+ BytesStreamOutput out = new BytesStreamOutput();
+ simulateDocumentBaseResult.writeTo(out);
+ StreamInput streamInput = StreamInput.wrap(out.bytes());
+ SimulateDocumentBaseResult otherSimulateDocumentBaseResult = SimulateDocumentBaseResult.readSimulateDocumentSimpleResult(streamInput);
+
+ assertThat(otherSimulateDocumentBaseResult.getIngestDocument(), equalTo(simulateDocumentBaseResult.getIngestDocument()));
+ if (isFailure) {
+ assertThat(otherSimulateDocumentBaseResult.getFailure(), instanceOf(IllegalArgumentException.class));
+ IllegalArgumentException e = (IllegalArgumentException) otherSimulateDocumentBaseResult.getFailure();
+ assertThat(e.getMessage(), equalTo("test"));
+ }
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/action/ingest/SimulateExecutionServiceTests.java b/core/src/test/java/org/elasticsearch/action/ingest/SimulateExecutionServiceTests.java
new file mode 100644
index 0000000000..d58b9bf850
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/action/ingest/SimulateExecutionServiceTests.java
@@ -0,0 +1,206 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.ingest;
+
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.ingest.RandomDocumentPicks;
+import org.elasticsearch.ingest.TestProcessor;
+import org.elasticsearch.ingest.core.CompoundProcessor;
+import org.elasticsearch.ingest.core.Processor;
+import org.elasticsearch.ingest.core.IngestDocument;
+import org.elasticsearch.ingest.core.Pipeline;
+import org.elasticsearch.test.ESTestCase;
+import org.elasticsearch.threadpool.ThreadPool;
+import org.junit.After;
+import org.junit.Before;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.instanceOf;
+import static org.hamcrest.Matchers.not;
+import static org.hamcrest.Matchers.nullValue;
+import static org.hamcrest.Matchers.sameInstance;
+
+public class SimulateExecutionServiceTests extends ESTestCase {
+
+ private ThreadPool threadPool;
+ private SimulateExecutionService executionService;
+ private Pipeline pipeline;
+ private Processor processor;
+ private IngestDocument ingestDocument;
+
+ @Before
+ public void setup() {
+ threadPool = new ThreadPool(
+ Settings.builder()
+ .put("name", getClass().getName())
+ .build()
+ );
+ executionService = new SimulateExecutionService(threadPool);
+ processor = new TestProcessor("id", "mock", ingestDocument -> {});
+ pipeline = new Pipeline("_id", "_description", new CompoundProcessor(processor, processor));
+ ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
+ }
+
+ @After
+ public void destroy() {
+ threadPool.shutdown();
+ }
+
+ public void testExecuteVerboseDocumentSimple() throws Exception {
+ List<SimulateProcessorResult> processorResultList = new ArrayList<>();
+ executionService.executeVerboseDocument(processor, ingestDocument, processorResultList);
+ SimulateProcessorResult result = new SimulateProcessorResult("id", ingestDocument);
+ assertThat(processorResultList.size(), equalTo(1));
+ assertThat(processorResultList.get(0).getProcessorTag(), equalTo(result.getProcessorTag()));
+ assertThat(processorResultList.get(0).getIngestDocument(), equalTo(result.getIngestDocument()));
+ assertThat(processorResultList.get(0).getFailure(), nullValue());
+ }
+
+ public void testExecuteVerboseDocumentSimpleException() throws Exception {
+ RuntimeException exception = new RuntimeException("mock_exception");
+ TestProcessor processor = new TestProcessor("id", "mock", ingestDocument -> { throw exception; });
+ List<SimulateProcessorResult> processorResultList = new ArrayList<>();
+ try {
+ executionService.executeVerboseDocument(processor, ingestDocument, processorResultList);
+ fail("should throw exception");
+ } catch (RuntimeException e) {
+ assertThat(e.getMessage(), equalTo("mock_exception"));
+ }
+ SimulateProcessorResult result = new SimulateProcessorResult("id", exception);
+ assertThat(processorResultList.size(), equalTo(1));
+ assertThat(processorResultList.get(0).getProcessorTag(), equalTo(result.getProcessorTag()));
+ assertThat(processorResultList.get(0).getFailure(), equalTo(result.getFailure()));
+ }
+
+ public void testExecuteVerboseDocumentCompoundSuccess() throws Exception {
+ TestProcessor processor1 = new TestProcessor("p1", "mock", ingestDocument -> { });
+ TestProcessor processor2 = new TestProcessor("p2", "mock", ingestDocument -> { });
+
+ Processor compoundProcessor = new CompoundProcessor(processor1, processor2);
+ List<SimulateProcessorResult> processorResultList = new ArrayList<>();
+ executionService.executeVerboseDocument(compoundProcessor, ingestDocument, processorResultList);
+ assertThat(processor1.getInvokedCounter(), equalTo(1));
+ assertThat(processor2.getInvokedCounter(), equalTo(1));
+ assertThat(processorResultList.size(), equalTo(2));
+ assertThat(processorResultList.get(0).getProcessorTag(), equalTo("p1"));
+ assertThat(processorResultList.get(0).getIngestDocument(), equalTo(ingestDocument));
+ assertThat(processorResultList.get(0).getFailure(), nullValue());
+ assertThat(processorResultList.get(1).getProcessorTag(), equalTo("p2"));
+ assertThat(processorResultList.get(1).getIngestDocument(), equalTo(ingestDocument));
+ assertThat(processorResultList.get(1).getFailure(), nullValue());
+ }
+
+ public void testExecuteVerboseDocumentCompoundOnFailure() throws Exception {
+ TestProcessor processor1 = new TestProcessor("p1", "mock", ingestDocument -> { });
+ TestProcessor processor2 = new TestProcessor("p2", "mock", ingestDocument -> { throw new RuntimeException("p2_exception"); });
+ TestProcessor onFailureProcessor1 = new TestProcessor("fail_p1", "mock", ingestDocument -> { });
+ TestProcessor onFailureProcessor2 = new TestProcessor("fail_p2", "mock", ingestDocument -> { throw new RuntimeException("fail_p2_exception"); });
+ TestProcessor onFailureProcessor3 = new TestProcessor("fail_p3", "mock", ingestDocument -> { });
+ CompoundProcessor onFailureCompoundProcessor = new CompoundProcessor(Collections.singletonList(onFailureProcessor2), Collections.singletonList(onFailureProcessor3));
+
+ Processor compoundProcessor = new CompoundProcessor(Arrays.asList(processor1, processor2), Arrays.asList(onFailureProcessor1, onFailureCompoundProcessor));
+ List<SimulateProcessorResult> processorResultList = new ArrayList<>();
+ executionService.executeVerboseDocument(compoundProcessor, ingestDocument, processorResultList);
+ assertThat(processor1.getInvokedCounter(), equalTo(1));
+ assertThat(processor2.getInvokedCounter(), equalTo(1));
+ assertThat(onFailureProcessor1.getInvokedCounter(), equalTo(1));
+ assertThat(onFailureProcessor2.getInvokedCounter(), equalTo(1));
+ assertThat(onFailureProcessor3.getInvokedCounter(), equalTo(1));
+ assertThat(processorResultList.size(), equalTo(5));
+ assertThat(processorResultList.get(0).getProcessorTag(), equalTo("p1"));
+ assertThat(processorResultList.get(1).getProcessorTag(), equalTo("p2"));
+ assertThat(processorResultList.get(2).getProcessorTag(), equalTo("fail_p1"));
+ assertThat(processorResultList.get(3).getProcessorTag(), equalTo("fail_p2"));
+ assertThat(processorResultList.get(4).getProcessorTag(), equalTo("fail_p3"));
+ }
+
+ public void testExecuteVerboseItem() throws Exception {
+ TestProcessor processor = new TestProcessor("test-id", "mock", ingestDocument -> {});
+ Pipeline pipeline = new Pipeline("_id", "_description", new CompoundProcessor(processor, processor));
+ SimulateDocumentResult actualItemResponse = executionService.executeDocument(pipeline, ingestDocument, true);
+ assertThat(processor.getInvokedCounter(), equalTo(2));
+ assertThat(actualItemResponse, instanceOf(SimulateDocumentVerboseResult.class));
+ SimulateDocumentVerboseResult simulateDocumentVerboseResult = (SimulateDocumentVerboseResult) actualItemResponse;
+ assertThat(simulateDocumentVerboseResult.getProcessorResults().size(), equalTo(2));
+ assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getProcessorTag(), equalTo("test-id"));
+ assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getIngestDocument(), not(sameInstance(ingestDocument)));
+ assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getIngestDocument(), equalTo(ingestDocument));
+ assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getIngestDocument().getSourceAndMetadata(), not(sameInstance(ingestDocument.getSourceAndMetadata())));
+
+ assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getFailure(), nullValue());
+ assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getProcessorTag(), equalTo("test-id"));
+ assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getIngestDocument(), not(sameInstance(ingestDocument)));
+ assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getIngestDocument(), equalTo(ingestDocument));
+ assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getIngestDocument().getSourceAndMetadata(), not(sameInstance(ingestDocument.getSourceAndMetadata())));
+ assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getIngestDocument().getSourceAndMetadata(),
+ not(sameInstance(simulateDocumentVerboseResult.getProcessorResults().get(0).getIngestDocument().getSourceAndMetadata())));
+ assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getFailure(), nullValue());
+ }
+
+ public void testExecuteItem() throws Exception {
+ TestProcessor processor = new TestProcessor("processor_0", "mock", ingestDocument -> {});
+ Pipeline pipeline = new Pipeline("_id", "_description", new CompoundProcessor(processor, processor));
+ SimulateDocumentResult actualItemResponse = executionService.executeDocument(pipeline, ingestDocument, false);
+ assertThat(processor.getInvokedCounter(), equalTo(2));
+ assertThat(actualItemResponse, instanceOf(SimulateDocumentBaseResult.class));
+ SimulateDocumentBaseResult simulateDocumentBaseResult = (SimulateDocumentBaseResult) actualItemResponse;
+ assertThat(simulateDocumentBaseResult.getIngestDocument(), equalTo(ingestDocument));
+ assertThat(simulateDocumentBaseResult.getFailure(), nullValue());
+ }
+
+ public void testExecuteVerboseItemWithFailure() throws Exception {
+ TestProcessor processor1 = new TestProcessor("processor_0", "mock", ingestDocument -> { throw new RuntimeException("processor failed"); });
+ TestProcessor processor2 = new TestProcessor("processor_1", "mock", ingestDocument -> {});
+ Pipeline pipeline = new Pipeline("_id", "_description", new CompoundProcessor(Collections.singletonList(processor1), Collections.singletonList(processor2)));
+ SimulateDocumentResult actualItemResponse = executionService.executeDocument(pipeline, ingestDocument, true);
+ assertThat(processor1.getInvokedCounter(), equalTo(1));
+ assertThat(processor2.getInvokedCounter(), equalTo(1));
+ assertThat(actualItemResponse, instanceOf(SimulateDocumentVerboseResult.class));
+ SimulateDocumentVerboseResult simulateDocumentVerboseResult = (SimulateDocumentVerboseResult) actualItemResponse;
+ assertThat(simulateDocumentVerboseResult.getProcessorResults().size(), equalTo(2));
+ assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getProcessorTag(), equalTo("processor_0"));
+ assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getIngestDocument(), nullValue());
+ assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getFailure(), instanceOf(RuntimeException.class));
+ RuntimeException runtimeException = (RuntimeException) simulateDocumentVerboseResult.getProcessorResults().get(0).getFailure();
+ assertThat(runtimeException.getMessage(), equalTo("processor failed"));
+ assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getProcessorTag(), equalTo("processor_1"));
+ assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getIngestDocument(), not(sameInstance(ingestDocument)));
+ assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getIngestDocument(), equalTo(ingestDocument));
+ assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getFailure(), nullValue());
+ }
+
+ public void testExecuteItemWithFailure() throws Exception {
+ TestProcessor processor = new TestProcessor(ingestDocument -> { throw new RuntimeException("processor failed"); });
+ Pipeline pipeline = new Pipeline("_id", "_description", new CompoundProcessor(processor, processor));
+ SimulateDocumentResult actualItemResponse = executionService.executeDocument(pipeline, ingestDocument, false);
+ assertThat(processor.getInvokedCounter(), equalTo(1));
+ assertThat(actualItemResponse, instanceOf(SimulateDocumentBaseResult.class));
+ SimulateDocumentBaseResult simulateDocumentBaseResult = (SimulateDocumentBaseResult) actualItemResponse;
+ assertThat(simulateDocumentBaseResult.getIngestDocument(), nullValue());
+ assertThat(simulateDocumentBaseResult.getFailure(), instanceOf(RuntimeException.class));
+ RuntimeException runtimeException = (RuntimeException) simulateDocumentBaseResult.getFailure();
+ assertThat(runtimeException.getMessage(), equalTo("processor failed"));
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineRequestParsingTests.java b/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineRequestParsingTests.java
new file mode 100644
index 0000000000..c0e7d6921a
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineRequestParsingTests.java
@@ -0,0 +1,181 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.ingest;
+
+import org.elasticsearch.ingest.PipelineStore;
+import org.elasticsearch.ingest.TestProcessor;
+import org.elasticsearch.ingest.core.CompoundProcessor;
+import org.elasticsearch.ingest.core.IngestDocument;
+import org.elasticsearch.ingest.core.Pipeline;
+import org.elasticsearch.ingest.core.Processor;
+import org.elasticsearch.test.ESTestCase;
+import org.junit.Before;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+import static org.elasticsearch.action.ingest.SimulatePipelineRequest.Fields;
+import static org.elasticsearch.ingest.core.IngestDocument.MetaData.ID;
+import static org.elasticsearch.ingest.core.IngestDocument.MetaData.INDEX;
+import static org.elasticsearch.ingest.core.IngestDocument.MetaData.TYPE;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.nullValue;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+public class SimulatePipelineRequestParsingTests extends ESTestCase {
+
+ private PipelineStore store;
+
+ @Before
+ public void init() throws IOException {
+ TestProcessor processor = new TestProcessor(ingestDocument -> {});
+ CompoundProcessor pipelineCompoundProcessor = new CompoundProcessor(processor);
+ Pipeline pipeline = new Pipeline(SimulatePipelineRequest.SIMULATED_PIPELINE_ID, null, pipelineCompoundProcessor);
+ Map<String, Processor.Factory> processorRegistry = new HashMap<>();
+ processorRegistry.put("mock_processor", mock(Processor.Factory.class));
+ store = mock(PipelineStore.class);
+ when(store.get(SimulatePipelineRequest.SIMULATED_PIPELINE_ID)).thenReturn(pipeline);
+ when(store.getProcessorFactoryRegistry()).thenReturn(processorRegistry);
+ }
+
+ public void testParseUsingPipelineStore() throws Exception {
+ int numDocs = randomIntBetween(1, 10);
+
+ Map<String, Object> requestContent = new HashMap<>();
+ List<Map<String, Object>> docs = new ArrayList<>();
+ List<Map<String, Object>> expectedDocs = new ArrayList<>();
+ requestContent.put(Fields.DOCS, docs);
+ for (int i = 0; i < numDocs; i++) {
+ Map<String, Object> doc = new HashMap<>();
+ String index = randomAsciiOfLengthBetween(1, 10);
+ String type = randomAsciiOfLengthBetween(1, 10);
+ String id = randomAsciiOfLengthBetween(1, 10);
+ doc.put(INDEX.getFieldName(), index);
+ doc.put(TYPE.getFieldName(), type);
+ doc.put(ID.getFieldName(), id);
+ String fieldName = randomAsciiOfLengthBetween(1, 10);
+ String fieldValue = randomAsciiOfLengthBetween(1, 10);
+ doc.put(Fields.SOURCE, Collections.singletonMap(fieldName, fieldValue));
+ docs.add(doc);
+ Map<String, Object> expectedDoc = new HashMap<>();
+ expectedDoc.put(INDEX.getFieldName(), index);
+ expectedDoc.put(TYPE.getFieldName(), type);
+ expectedDoc.put(ID.getFieldName(), id);
+ expectedDoc.put(Fields.SOURCE, Collections.singletonMap(fieldName, fieldValue));
+ expectedDocs.add(expectedDoc);
+ }
+
+ SimulatePipelineRequest.Parsed actualRequest = SimulatePipelineRequest.parseWithPipelineId(SimulatePipelineRequest.SIMULATED_PIPELINE_ID, requestContent, false, store);
+ assertThat(actualRequest.isVerbose(), equalTo(false));
+ assertThat(actualRequest.getDocuments().size(), equalTo(numDocs));
+ Iterator<Map<String, Object>> expectedDocsIterator = expectedDocs.iterator();
+ for (IngestDocument ingestDocument : actualRequest.getDocuments()) {
+ Map<String, Object> expectedDocument = expectedDocsIterator.next();
+ Map<IngestDocument.MetaData, String> metadataMap = ingestDocument.extractMetadata();
+ assertThat(metadataMap.get(INDEX), equalTo(expectedDocument.get(INDEX.getFieldName())));
+ assertThat(metadataMap.get(TYPE), equalTo(expectedDocument.get(TYPE.getFieldName())));
+ assertThat(metadataMap.get(ID), equalTo(expectedDocument.get(ID.getFieldName())));
+ assertThat(ingestDocument.getSourceAndMetadata(), equalTo(expectedDocument.get(Fields.SOURCE)));
+ }
+
+ assertThat(actualRequest.getPipeline().getId(), equalTo(SimulatePipelineRequest.SIMULATED_PIPELINE_ID));
+ assertThat(actualRequest.getPipeline().getDescription(), nullValue());
+ assertThat(actualRequest.getPipeline().getProcessors().size(), equalTo(1));
+ }
+
+ public void testParseWithProvidedPipeline() throws Exception {
+ int numDocs = randomIntBetween(1, 10);
+
+ Map<String, Object> requestContent = new HashMap<>();
+ List<Map<String, Object>> docs = new ArrayList<>();
+ List<Map<String, Object>> expectedDocs = new ArrayList<>();
+ requestContent.put(Fields.DOCS, docs);
+ for (int i = 0; i < numDocs; i++) {
+ Map<String, Object> doc = new HashMap<>();
+ String index = randomAsciiOfLengthBetween(1, 10);
+ String type = randomAsciiOfLengthBetween(1, 10);
+ String id = randomAsciiOfLengthBetween(1, 10);
+ doc.put(INDEX.getFieldName(), index);
+ doc.put(TYPE.getFieldName(), type);
+ doc.put(ID.getFieldName(), id);
+ String fieldName = randomAsciiOfLengthBetween(1, 10);
+ String fieldValue = randomAsciiOfLengthBetween(1, 10);
+ doc.put(Fields.SOURCE, Collections.singletonMap(fieldName, fieldValue));
+ docs.add(doc);
+ Map<String, Object> expectedDoc = new HashMap<>();
+ expectedDoc.put(INDEX.getFieldName(), index);
+ expectedDoc.put(TYPE.getFieldName(), type);
+ expectedDoc.put(ID.getFieldName(), id);
+ expectedDoc.put(Fields.SOURCE, Collections.singletonMap(fieldName, fieldValue));
+ expectedDocs.add(expectedDoc);
+ }
+
+ Map<String, Object> pipelineConfig = new HashMap<>();
+ List<Map<String, Object>> processors = new ArrayList<>();
+ int numProcessors = randomIntBetween(1, 10);
+ for (int i = 0; i < numProcessors; i++) {
+ Map<String, Object> processorConfig = new HashMap<>();
+ List<Map<String, Object>> onFailureProcessors = new ArrayList<>();
+ int numOnFailureProcessors = randomIntBetween(0, 1);
+ for (int j = 0; j < numOnFailureProcessors; j++) {
+ onFailureProcessors.add(Collections.singletonMap("mock_processor", Collections.emptyMap()));
+ }
+ if (numOnFailureProcessors > 0) {
+ processorConfig.put("on_failure", onFailureProcessors);
+ }
+ processors.add(Collections.singletonMap("mock_processor", processorConfig));
+ }
+ pipelineConfig.put("processors", processors);
+
+ List<Map<String, Object>> onFailureProcessors = new ArrayList<>();
+ int numOnFailureProcessors = randomIntBetween(0, 1);
+ for (int i = 0; i < numOnFailureProcessors; i++) {
+ onFailureProcessors.add(Collections.singletonMap("mock_processor", Collections.emptyMap()));
+ }
+ if (numOnFailureProcessors > 0) {
+ pipelineConfig.put("on_failure", onFailureProcessors);
+ }
+
+ requestContent.put(Fields.PIPELINE, pipelineConfig);
+
+ SimulatePipelineRequest.Parsed actualRequest = SimulatePipelineRequest.parse(requestContent, false, store);
+ assertThat(actualRequest.isVerbose(), equalTo(false));
+ assertThat(actualRequest.getDocuments().size(), equalTo(numDocs));
+ Iterator<Map<String, Object>> expectedDocsIterator = expectedDocs.iterator();
+ for (IngestDocument ingestDocument : actualRequest.getDocuments()) {
+ Map<String, Object> expectedDocument = expectedDocsIterator.next();
+ Map<IngestDocument.MetaData, String> metadataMap = ingestDocument.extractMetadata();
+ assertThat(metadataMap.get(INDEX), equalTo(expectedDocument.get(INDEX.getFieldName())));
+ assertThat(metadataMap.get(TYPE), equalTo(expectedDocument.get(TYPE.getFieldName())));
+ assertThat(metadataMap.get(ID), equalTo(expectedDocument.get(ID.getFieldName())));
+ assertThat(ingestDocument.getSourceAndMetadata(), equalTo(expectedDocument.get(Fields.SOURCE)));
+ }
+
+ assertThat(actualRequest.getPipeline().getId(), equalTo(SimulatePipelineRequest.SIMULATED_PIPELINE_ID));
+ assertThat(actualRequest.getPipeline().getDescription(), nullValue());
+ assertThat(actualRequest.getPipeline().getProcessors().size(), equalTo(numProcessors));
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineResponseTests.java b/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineResponseTests.java
new file mode 100644
index 0000000000..12a62f0684
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineResponseTests.java
@@ -0,0 +1,117 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.ingest;
+
+import org.elasticsearch.common.io.stream.BytesStreamOutput;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.ingest.core.IngestDocument;
+import org.elasticsearch.ingest.RandomDocumentPicks;
+import org.elasticsearch.test.ESTestCase;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+
+import static org.hamcrest.CoreMatchers.equalTo;
+import static org.hamcrest.CoreMatchers.instanceOf;
+import static org.hamcrest.CoreMatchers.nullValue;
+
+public class SimulatePipelineResponseTests extends ESTestCase {
+
+ public void testSerialization() throws IOException {
+ boolean isVerbose = randomBoolean();
+ int numResults = randomIntBetween(1, 10);
+ List<SimulateDocumentResult> results = new ArrayList<>(numResults);
+ for (int i = 0; i < numResults; i++) {
+ boolean isFailure = randomBoolean();
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
+ if (isVerbose) {
+ int numProcessors = randomIntBetween(1, 10);
+ List<SimulateProcessorResult> processorResults = new ArrayList<>(numProcessors);
+ for (int j = 0; j < numProcessors; j++) {
+ String processorTag = randomAsciiOfLengthBetween(1, 10);
+ SimulateProcessorResult processorResult;
+ if (isFailure) {
+ processorResult = new SimulateProcessorResult(processorTag, new IllegalArgumentException("test"));
+ } else {
+ processorResult = new SimulateProcessorResult(processorTag, ingestDocument);
+ }
+ processorResults.add(processorResult);
+ }
+ results.add(new SimulateDocumentVerboseResult(processorResults));
+ } else {
+ results.add(new SimulateDocumentBaseResult(ingestDocument));
+ SimulateDocumentBaseResult simulateDocumentBaseResult;
+ if (isFailure) {
+ simulateDocumentBaseResult = new SimulateDocumentBaseResult(new IllegalArgumentException("test"));
+ } else {
+ simulateDocumentBaseResult = new SimulateDocumentBaseResult(ingestDocument);
+ }
+ results.add(simulateDocumentBaseResult);
+ }
+ }
+
+ SimulatePipelineResponse response = new SimulatePipelineResponse(randomAsciiOfLengthBetween(1, 10), isVerbose, results);
+ BytesStreamOutput out = new BytesStreamOutput();
+ response.writeTo(out);
+ StreamInput streamInput = StreamInput.wrap(out.bytes());
+ SimulatePipelineResponse otherResponse = new SimulatePipelineResponse();
+ otherResponse.readFrom(streamInput);
+
+ assertThat(otherResponse.getPipelineId(), equalTo(response.getPipelineId()));
+ assertThat(otherResponse.getResults().size(), equalTo(response.getResults().size()));
+
+ Iterator<SimulateDocumentResult> expectedResultIterator = response.getResults().iterator();
+ for (SimulateDocumentResult result : otherResponse.getResults()) {
+ if (isVerbose) {
+ SimulateDocumentVerboseResult expectedSimulateDocumentVerboseResult = (SimulateDocumentVerboseResult) expectedResultIterator.next();
+ assertThat(result, instanceOf(SimulateDocumentVerboseResult.class));
+ SimulateDocumentVerboseResult simulateDocumentVerboseResult = (SimulateDocumentVerboseResult) result;
+ assertThat(simulateDocumentVerboseResult.getProcessorResults().size(), equalTo(expectedSimulateDocumentVerboseResult.getProcessorResults().size()));
+ Iterator<SimulateProcessorResult> expectedProcessorResultIterator = expectedSimulateDocumentVerboseResult.getProcessorResults().iterator();
+ for (SimulateProcessorResult simulateProcessorResult : simulateDocumentVerboseResult.getProcessorResults()) {
+ SimulateProcessorResult expectedProcessorResult = expectedProcessorResultIterator.next();
+ assertThat(simulateProcessorResult.getProcessorTag(), equalTo(expectedProcessorResult.getProcessorTag()));
+ assertThat(simulateProcessorResult.getIngestDocument(), equalTo(expectedProcessorResult.getIngestDocument()));
+ if (expectedProcessorResult.getFailure() == null) {
+ assertThat(simulateProcessorResult.getFailure(), nullValue());
+ } else {
+ assertThat(simulateProcessorResult.getFailure(), instanceOf(IllegalArgumentException.class));
+ IllegalArgumentException e = (IllegalArgumentException) simulateProcessorResult.getFailure();
+ assertThat(e.getMessage(), equalTo("test"));
+ }
+ }
+ } else {
+ SimulateDocumentBaseResult expectedSimulateDocumentBaseResult = (SimulateDocumentBaseResult) expectedResultIterator.next();
+ assertThat(result, instanceOf(SimulateDocumentBaseResult.class));
+ SimulateDocumentBaseResult simulateDocumentBaseResult = (SimulateDocumentBaseResult) result;
+ assertThat(simulateDocumentBaseResult.getIngestDocument(), equalTo(expectedSimulateDocumentBaseResult.getIngestDocument()));
+ if (expectedSimulateDocumentBaseResult.getFailure() == null) {
+ assertThat(simulateDocumentBaseResult.getFailure(), nullValue());
+ } else {
+ assertThat(simulateDocumentBaseResult.getFailure(), instanceOf(IllegalArgumentException.class));
+ IllegalArgumentException e = (IllegalArgumentException) simulateDocumentBaseResult.getFailure();
+ assertThat(e.getMessage(), equalTo("test"));
+ }
+ }
+ }
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/action/ingest/SimulateProcessorResultTests.java b/core/src/test/java/org/elasticsearch/action/ingest/SimulateProcessorResultTests.java
new file mode 100644
index 0000000000..0885475adc
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/action/ingest/SimulateProcessorResultTests.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.ingest;
+
+import org.elasticsearch.common.io.stream.BytesStreamOutput;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.ingest.core.IngestDocument;
+import org.elasticsearch.ingest.RandomDocumentPicks;
+import org.elasticsearch.test.ESTestCase;
+
+import java.io.IOException;
+
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.instanceOf;
+
+public class SimulateProcessorResultTests extends ESTestCase {
+
+ public void testSerialization() throws IOException {
+ String processorTag = randomAsciiOfLengthBetween(1, 10);
+ boolean isFailure = randomBoolean();
+ SimulateProcessorResult simulateProcessorResult;
+ if (isFailure) {
+ simulateProcessorResult = new SimulateProcessorResult(processorTag, new IllegalArgumentException("test"));
+ } else {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
+ simulateProcessorResult = new SimulateProcessorResult(processorTag, ingestDocument);
+ }
+
+ BytesStreamOutput out = new BytesStreamOutput();
+ simulateProcessorResult.writeTo(out);
+ StreamInput streamInput = StreamInput.wrap(out.bytes());
+ SimulateProcessorResult otherSimulateProcessorResult = new SimulateProcessorResult(streamInput);
+ assertThat(otherSimulateProcessorResult.getProcessorTag(), equalTo(simulateProcessorResult.getProcessorTag()));
+ assertThat(otherSimulateProcessorResult.getIngestDocument(), equalTo(simulateProcessorResult.getIngestDocument()));
+ if (isFailure) {
+ assertThat(otherSimulateProcessorResult.getFailure(), instanceOf(IllegalArgumentException.class));
+ IllegalArgumentException e = (IllegalArgumentException) otherSimulateProcessorResult.getFailure();
+ assertThat(e.getMessage(), equalTo("test"));
+ }
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/action/ingest/WritePipelineResponseTests.java b/core/src/test/java/org/elasticsearch/action/ingest/WritePipelineResponseTests.java
new file mode 100644
index 0000000000..8eb3f4ece7
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/action/ingest/WritePipelineResponseTests.java
@@ -0,0 +1,61 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.ingest;
+
+import org.elasticsearch.common.io.stream.BytesStreamOutput;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.ingest.core.PipelineFactoryError;
+import org.elasticsearch.test.ESTestCase;
+
+import java.io.IOException;
+
+import static org.hamcrest.CoreMatchers.equalTo;
+import static org.hamcrest.CoreMatchers.nullValue;
+
+public class WritePipelineResponseTests extends ESTestCase {
+
+ public void testSerializationWithoutError() throws IOException {
+ boolean isAcknowledged = randomBoolean();
+ WritePipelineResponse response;
+ response = new WritePipelineResponse(isAcknowledged);
+ BytesStreamOutput out = new BytesStreamOutput();
+ response.writeTo(out);
+ StreamInput streamInput = StreamInput.wrap(out.bytes());
+ WritePipelineResponse otherResponse = new WritePipelineResponse();
+ otherResponse.readFrom(streamInput);
+
+ assertThat(otherResponse.isAcknowledged(), equalTo(response.isAcknowledged()));
+ }
+
+ public void testSerializationWithError() throws IOException {
+ PipelineFactoryError error = new PipelineFactoryError("error");
+ WritePipelineResponse response = new WritePipelineResponse(error);
+ BytesStreamOutput out = new BytesStreamOutput();
+ response.writeTo(out);
+ StreamInput streamInput = StreamInput.wrap(out.bytes());
+ WritePipelineResponse otherResponse = new WritePipelineResponse();
+ otherResponse.readFrom(streamInput);
+
+ assertThat(otherResponse.getError().getReason(), equalTo(response.getError().getReason()));
+ assertThat(otherResponse.getError().getProcessorType(), equalTo(response.getError().getProcessorType()));
+ assertThat(otherResponse.getError().getProcessorTag(), equalTo(response.getError().getProcessorTag()));
+ assertThat(otherResponse.getError().getProcessorPropertyName(), equalTo(response.getError().getProcessorPropertyName()));
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/action/ingest/WriteableIngestDocumentTests.java b/core/src/test/java/org/elasticsearch/action/ingest/WriteableIngestDocumentTests.java
new file mode 100644
index 0000000000..8d3c812f4c
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/action/ingest/WriteableIngestDocumentTests.java
@@ -0,0 +1,114 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.ingest;
+
+import org.elasticsearch.common.io.stream.BytesStreamOutput;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.ingest.RandomDocumentPicks;
+import org.elasticsearch.ingest.core.IngestDocument;
+import org.elasticsearch.test.ESTestCase;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.not;
+
+public class WriteableIngestDocumentTests extends ESTestCase {
+
+ public void testEqualsAndHashcode() throws Exception {
+ Map<String, Object> sourceAndMetadata = RandomDocumentPicks.randomSource(random());
+ int numFields = randomIntBetween(1, IngestDocument.MetaData.values().length);
+ for (int i = 0; i < numFields; i++) {
+ sourceAndMetadata.put(randomFrom(IngestDocument.MetaData.values()).getFieldName(), randomAsciiOfLengthBetween(5, 10));
+ }
+ Map<String, String> ingestMetadata = new HashMap<>();
+ numFields = randomIntBetween(1, 5);
+ for (int i = 0; i < numFields; i++) {
+ ingestMetadata.put(randomAsciiOfLengthBetween(5, 10), randomAsciiOfLengthBetween(5, 10));
+ }
+ WriteableIngestDocument ingestDocument = new WriteableIngestDocument(new IngestDocument(sourceAndMetadata, ingestMetadata));
+
+ boolean changed = false;
+ Map<String, Object> otherSourceAndMetadata;
+ if (randomBoolean()) {
+ otherSourceAndMetadata = RandomDocumentPicks.randomSource(random());
+ changed = true;
+ } else {
+ otherSourceAndMetadata = new HashMap<>(sourceAndMetadata);
+ }
+ if (randomBoolean()) {
+ numFields = randomIntBetween(1, IngestDocument.MetaData.values().length);
+ for (int i = 0; i < numFields; i++) {
+ otherSourceAndMetadata.put(randomFrom(IngestDocument.MetaData.values()).getFieldName(), randomAsciiOfLengthBetween(5, 10));
+ }
+ changed = true;
+ }
+
+ Map<String, String> otherIngestMetadata;
+ if (randomBoolean()) {
+ otherIngestMetadata = new HashMap<>();
+ numFields = randomIntBetween(1, 5);
+ for (int i = 0; i < numFields; i++) {
+ otherIngestMetadata.put(randomAsciiOfLengthBetween(5, 10), randomAsciiOfLengthBetween(5, 10));
+ }
+ changed = true;
+ } else {
+ otherIngestMetadata = Collections.unmodifiableMap(ingestMetadata);
+ }
+
+ WriteableIngestDocument otherIngestDocument = new WriteableIngestDocument(new IngestDocument(otherSourceAndMetadata, otherIngestMetadata));
+ if (changed) {
+ assertThat(ingestDocument, not(equalTo(otherIngestDocument)));
+ assertThat(otherIngestDocument, not(equalTo(ingestDocument)));
+ } else {
+ assertThat(ingestDocument, equalTo(otherIngestDocument));
+ assertThat(otherIngestDocument, equalTo(ingestDocument));
+ assertThat(ingestDocument.hashCode(), equalTo(otherIngestDocument.hashCode()));
+ WriteableIngestDocument thirdIngestDocument = new WriteableIngestDocument(new IngestDocument(Collections.unmodifiableMap(sourceAndMetadata), Collections.unmodifiableMap(ingestMetadata)));
+ assertThat(thirdIngestDocument, equalTo(ingestDocument));
+ assertThat(ingestDocument, equalTo(thirdIngestDocument));
+ assertThat(ingestDocument.hashCode(), equalTo(thirdIngestDocument.hashCode()));
+ }
+ }
+
+ public void testSerialization() throws IOException {
+ Map<String, Object> sourceAndMetadata = RandomDocumentPicks.randomSource(random());
+ int numFields = randomIntBetween(1, IngestDocument.MetaData.values().length);
+ for (int i = 0; i < numFields; i++) {
+ sourceAndMetadata.put(randomFrom(IngestDocument.MetaData.values()).getFieldName(), randomAsciiOfLengthBetween(5, 10));
+ }
+ Map<String, String> ingestMetadata = new HashMap<>();
+ numFields = randomIntBetween(1, 5);
+ for (int i = 0; i < numFields; i++) {
+ ingestMetadata.put(randomAsciiOfLengthBetween(5, 10), randomAsciiOfLengthBetween(5, 10));
+ }
+ Map<String, Object> document = RandomDocumentPicks.randomSource(random());
+ WriteableIngestDocument writeableIngestDocument = new WriteableIngestDocument(new IngestDocument(sourceAndMetadata, ingestMetadata));
+
+ BytesStreamOutput out = new BytesStreamOutput();
+ writeableIngestDocument.writeTo(out);
+ StreamInput streamInput = StreamInput.wrap(out.bytes());
+ WriteableIngestDocument otherWriteableIngestDocument = new WriteableIngestDocument(streamInput);
+ assertThat(otherWriteableIngestDocument, equalTo(writeableIngestDocument));
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/action/search/SearchRequestBuilderTests.java b/core/src/test/java/org/elasticsearch/action/search/SearchRequestBuilderTests.java
index fc6453318c..9cef4d46e8 100644
--- a/core/src/test/java/org/elasticsearch/action/search/SearchRequestBuilderTests.java
+++ b/core/src/test/java/org/elasticsearch/action/search/SearchRequestBuilderTests.java
@@ -22,6 +22,7 @@ package org.elasticsearch.action.search;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.env.Environment;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.test.ESTestCase;
@@ -38,7 +39,7 @@ public class SearchRequestBuilderTests extends ESTestCase {
//this client will not be hit by any request, but it needs to be a non null proper client
//that is why we create it but we don't add any transport address to it
Settings settings = Settings.builder()
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
client = TransportClient.builder().settings(settings).build();
}
diff --git a/core/src/test/java/org/elasticsearch/action/support/AutoCreateIndexTests.java b/core/src/test/java/org/elasticsearch/action/support/AutoCreateIndexTests.java
new file mode 100644
index 0000000000..40359e1c74
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/action/support/AutoCreateIndexTests.java
@@ -0,0 +1,147 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.support;
+
+import org.elasticsearch.Version;
+import org.elasticsearch.cluster.ClusterName;
+import org.elasticsearch.cluster.ClusterState;
+import org.elasticsearch.cluster.metadata.IndexMetaData;
+import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
+import org.elasticsearch.cluster.metadata.MetaData;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.index.mapper.MapperService;
+import org.elasticsearch.test.ESTestCase;
+
+import static org.hamcrest.CoreMatchers.equalTo;
+
+public class AutoCreateIndexTests extends ESTestCase {
+
+ public void testParseFailed() {
+ try {
+ new AutoCreateIndex(Settings.builder().put("action.auto_create_index", ",,,").build(), new IndexNameExpressionResolver(Settings.EMPTY));
+ fail("initialization should have failed");
+ } catch (IllegalArgumentException ex) {
+ assertEquals("Can't parse [,,,] for setting [action.auto_create_index] must be either [true, false, or a comma separated list of index patterns]", ex.getMessage());
+ }
+ }
+
+ public void testParseFailedMissingIndex() {
+ String prefix = randomFrom("+", "-");
+ Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), prefix).build();
+ try {
+ new AutoCreateIndex(settings, new IndexNameExpressionResolver(settings));
+ fail("initialization should have failed");
+ } catch(IllegalArgumentException ex) {
+ assertEquals("Can't parse [" + prefix + "] for setting [action.auto_create_index] must contain an index name after [" + prefix + "]", ex.getMessage());
+ }
+ }
+
+ public void testAutoCreationDisabled() {
+ Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), false).build();
+ AutoCreateIndex autoCreateIndex = new AutoCreateIndex(settings, new IndexNameExpressionResolver(Settings.EMPTY));
+ assertThat(autoCreateIndex.shouldAutoCreate(randomAsciiOfLengthBetween(1, 10), buildClusterState()), equalTo(false));
+ }
+
+ public void testAutoCreationEnabled() {
+ Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), true).build();
+ AutoCreateIndex autoCreateIndex = new AutoCreateIndex(settings, new IndexNameExpressionResolver(Settings.EMPTY));
+ assertThat(autoCreateIndex.shouldAutoCreate(randomAsciiOfLengthBetween(1, 10), buildClusterState()), equalTo(true));
+ }
+
+ public void testDefaultAutoCreation() {
+ AutoCreateIndex autoCreateIndex = new AutoCreateIndex(Settings.EMPTY, new IndexNameExpressionResolver(Settings.EMPTY));
+ assertThat(autoCreateIndex.shouldAutoCreate(randomAsciiOfLengthBetween(1, 10), buildClusterState()), equalTo(true));
+ }
+
+ public void testExistingIndex() {
+ Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), randomFrom(true, false, randomAsciiOfLengthBetween(7, 10))).build();
+ AutoCreateIndex autoCreateIndex = new AutoCreateIndex(settings, new IndexNameExpressionResolver(Settings.EMPTY));
+ assertThat(autoCreateIndex.shouldAutoCreate(randomFrom("index1", "index2", "index3"), buildClusterState("index1", "index2", "index3")), equalTo(false));
+ }
+
+ public void testDynamicMappingDisabled() {
+ Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), randomFrom(true, randomAsciiOfLengthBetween(1, 10)))
+ .put(MapperService.INDEX_MAPPER_DYNAMIC_SETTING.getKey(), false).build();
+ AutoCreateIndex autoCreateIndex = new AutoCreateIndex(settings, new IndexNameExpressionResolver(Settings.EMPTY));
+ assertThat(autoCreateIndex.shouldAutoCreate(randomAsciiOfLengthBetween(1, 10), buildClusterState()), equalTo(false));
+ }
+
+ public void testAutoCreationPatternEnabled() {
+ Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), randomFrom("+index*", "index*")).build();
+ AutoCreateIndex autoCreateIndex = new AutoCreateIndex(settings, new IndexNameExpressionResolver(settings));
+ ClusterState clusterState = ClusterState.builder(new ClusterName("test")).metaData(MetaData.builder()).build();
+ assertThat(autoCreateIndex.shouldAutoCreate("index" + randomAsciiOfLengthBetween(1, 5), clusterState), equalTo(true));
+ assertThat(autoCreateIndex.shouldAutoCreate("does_not_match" + randomAsciiOfLengthBetween(1, 5), clusterState), equalTo(false));
+ }
+
+ public void testAutoCreationPatternDisabled() {
+ Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), "-index*").build();
+ AutoCreateIndex autoCreateIndex = new AutoCreateIndex(settings, new IndexNameExpressionResolver(settings));
+ ClusterState clusterState = ClusterState.builder(new ClusterName("test")).metaData(MetaData.builder()).build();
+ assertThat(autoCreateIndex.shouldAutoCreate("index" + randomAsciiOfLengthBetween(1, 5), clusterState), equalTo(false));
+ //default is false when patterns are specified
+ assertThat(autoCreateIndex.shouldAutoCreate("does_not_match" + randomAsciiOfLengthBetween(1, 5), clusterState), equalTo(false));
+ }
+
+ public void testAutoCreationMultiplePatternsWithWildcards() {
+ Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), randomFrom("+test*,-index*", "test*,-index*")).build();
+ AutoCreateIndex autoCreateIndex = new AutoCreateIndex(settings, new IndexNameExpressionResolver(settings));
+ ClusterState clusterState = ClusterState.builder(new ClusterName("test")).metaData(MetaData.builder()).build();
+ assertThat(autoCreateIndex.shouldAutoCreate("index" + randomAsciiOfLengthBetween(1, 5), clusterState), equalTo(false));
+ assertThat(autoCreateIndex.shouldAutoCreate("test" + randomAsciiOfLengthBetween(1, 5), clusterState), equalTo(true));
+ assertThat(autoCreateIndex.shouldAutoCreate("does_not_match" + randomAsciiOfLengthBetween(1, 5), clusterState), equalTo(false));
+ }
+
+ public void testAutoCreationMultiplePatternsNoWildcards() {
+ Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), "+test1,-index1").build();
+ AutoCreateIndex autoCreateIndex = new AutoCreateIndex(settings, new IndexNameExpressionResolver(settings));
+ ClusterState clusterState = ClusterState.builder(new ClusterName("test")).metaData(MetaData.builder()).build();
+ assertThat(autoCreateIndex.shouldAutoCreate("test1", clusterState), equalTo(true));
+ assertThat(autoCreateIndex.shouldAutoCreate("index" + randomAsciiOfLengthBetween(1, 5), clusterState), equalTo(false));
+ assertThat(autoCreateIndex.shouldAutoCreate("test" + randomAsciiOfLengthBetween(2, 5), clusterState), equalTo(false));
+ assertThat(autoCreateIndex.shouldAutoCreate("does_not_match" + randomAsciiOfLengthBetween(1, 5), clusterState), equalTo(false));
+ }
+
+ public void testAutoCreationMultipleIndexNames() {
+ Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), "test1,test2").build();
+ AutoCreateIndex autoCreateIndex = new AutoCreateIndex(settings, new IndexNameExpressionResolver(settings));
+ ClusterState clusterState = ClusterState.builder(new ClusterName("test")).metaData(MetaData.builder()).build();
+ assertThat(autoCreateIndex.shouldAutoCreate("test1", clusterState), equalTo(true));
+ assertThat(autoCreateIndex.shouldAutoCreate("test2", clusterState), equalTo(true));
+ assertThat(autoCreateIndex.shouldAutoCreate("does_not_match" + randomAsciiOfLengthBetween(1, 5), clusterState), equalTo(false));
+ }
+
+ public void testAutoCreationConflictingPatternsFirstWins() {
+ Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), "+test1,-test1,-test2,+test2").build();
+ AutoCreateIndex autoCreateIndex = new AutoCreateIndex(settings, new IndexNameExpressionResolver(settings));
+ ClusterState clusterState = ClusterState.builder(new ClusterName("test")).metaData(MetaData.builder()).build();
+ assertThat(autoCreateIndex.shouldAutoCreate("test1", clusterState), equalTo(true));
+ assertThat(autoCreateIndex.shouldAutoCreate("test2", clusterState), equalTo(false));
+ assertThat(autoCreateIndex.shouldAutoCreate("does_not_match" + randomAsciiOfLengthBetween(1, 5), clusterState), equalTo(false));
+ }
+
+ private static ClusterState buildClusterState(String... indices) {
+ MetaData.Builder metaData = MetaData.builder();
+ for (String index : indices) {
+ metaData.put(IndexMetaData.builder(index).settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1));
+ }
+ return ClusterState.builder(org.elasticsearch.cluster.ClusterName.DEFAULT).metaData(metaData).build();
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java b/core/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java
index fed4e1d638..00068c05ef 100644
--- a/core/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java
+++ b/core/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java
@@ -220,9 +220,10 @@ public class TransportActionFilterChainTests extends ESTestCase {
RequestTestFilter testFilter = new RequestTestFilter(randomInt(), new RequestCallback() {
@Override
- public void execute(Task task, final String action, final ActionRequest actionRequest, final ActionListener actionListener, final ActionFilterChain actionFilterChain) {
+ public <Request extends ActionRequest<Request>, Response extends ActionResponse> void execute(Task task, String action, Request request,
+ ActionListener<Response> listener, ActionFilterChain<Request, Response> actionFilterChain) {
for (int i = 0; i <= additionalContinueCount; i++) {
- actionFilterChain.proceed(task, action, actionRequest, actionListener);
+ actionFilterChain.proceed(task, action, request, listener);
}
}
});
@@ -276,7 +277,8 @@ public class TransportActionFilterChainTests extends ESTestCase {
ResponseTestFilter testFilter = new ResponseTestFilter(randomInt(), new ResponseCallback() {
@Override
- public void execute(String action, ActionResponse response, ActionListener listener, ActionFilterChain chain) {
+ public <Response extends ActionResponse> void execute(String action, Response response, ActionListener<Response> listener,
+ ActionFilterChain<?, Response> chain) {
for (int i = 0; i <= additionalContinueCount; i++) {
chain.proceed(action, response, listener);
}
@@ -344,17 +346,18 @@ public class TransportActionFilterChainTests extends ESTestCase {
return order;
}
- @SuppressWarnings("unchecked")
@Override
- public void apply(Task task, String action, ActionRequest actionRequest, ActionListener actionListener, ActionFilterChain actionFilterChain) {
+ public <Request extends ActionRequest<Request>, Response extends ActionResponse> void apply(Task task, String action, Request request,
+ ActionListener<Response> listener, ActionFilterChain<Request, Response> chain) {
this.runs.incrementAndGet();
this.lastActionName = action;
this.executionToken = counter.incrementAndGet();
- this.callback.execute(task, action, actionRequest, actionListener, actionFilterChain);
+ this.callback.execute(task, action, request, listener, chain);
}
@Override
- public void apply(String action, ActionResponse response, ActionListener listener, ActionFilterChain chain) {
+ public <Response extends ActionResponse> void apply(String action, Response response, ActionListener<Response> listener,
+ ActionFilterChain<?, Response> chain) {
chain.proceed(action, response, listener);
}
}
@@ -377,12 +380,14 @@ public class TransportActionFilterChainTests extends ESTestCase {
}
@Override
- public void apply(Task task, String action, ActionRequest request, ActionListener listener, ActionFilterChain chain) {
+ public <Request extends ActionRequest<Request>, Response extends ActionResponse> void apply(Task task, String action, Request request,
+ ActionListener<Response> listener, ActionFilterChain<Request, Response> chain) {
chain.proceed(task, action, request, listener);
}
@Override
- public void apply(String action, ActionResponse response, ActionListener listener, ActionFilterChain chain) {
+ public <Response extends ActionResponse> void apply(String action, Response response, ActionListener<Response> listener,
+ ActionFilterChain<?, Response> chain) {
this.runs.incrementAndGet();
this.lastActionName = action;
this.executionToken = counter.incrementAndGet();
@@ -393,21 +398,24 @@ public class TransportActionFilterChainTests extends ESTestCase {
private static enum RequestOperation implements RequestCallback {
CONTINUE_PROCESSING {
@Override
- public void execute(Task task, String action, ActionRequest actionRequest, ActionListener actionListener, ActionFilterChain actionFilterChain) {
- actionFilterChain.proceed(task, action, actionRequest, actionListener);
+ public <Request extends ActionRequest<Request>, Response extends ActionResponse> void execute(Task task, String action, Request request,
+ ActionListener<Response> listener, ActionFilterChain<Request, Response> actionFilterChain) {
+ actionFilterChain.proceed(task, action, request, listener);
}
},
LISTENER_RESPONSE {
@Override
- @SuppressWarnings("unchecked")
- public void execute(Task task, String action, ActionRequest actionRequest, ActionListener actionListener, ActionFilterChain actionFilterChain) {
- actionListener.onResponse(new TestResponse());
+ @SuppressWarnings("unchecked") // Safe because its all we test with
+ public <Request extends ActionRequest<Request>, Response extends ActionResponse> void execute(Task task, String action, Request request,
+ ActionListener<Response> listener, ActionFilterChain<Request, Response> actionFilterChain) {
+ ((ActionListener<TestResponse>) listener).onResponse(new TestResponse());
}
},
LISTENER_FAILURE {
@Override
- public void execute(Task task, String action, ActionRequest actionRequest, ActionListener actionListener, ActionFilterChain actionFilterChain) {
- actionListener.onFailure(new ElasticsearchTimeoutException(""));
+ public <Request extends ActionRequest<Request>, Response extends ActionResponse> void execute(Task task, String action, Request request,
+ ActionListener<Response> listener, ActionFilterChain<Request, Response> actionFilterChain) {
+ listener.onFailure(new ElasticsearchTimeoutException(""));
}
}
}
@@ -415,31 +423,36 @@ public class TransportActionFilterChainTests extends ESTestCase {
private static enum ResponseOperation implements ResponseCallback {
CONTINUE_PROCESSING {
@Override
- public void execute(String action, ActionResponse response, ActionListener listener, ActionFilterChain chain) {
+ public <Response extends ActionResponse> void execute(String action, Response response, ActionListener<Response> listener,
+ ActionFilterChain<?, Response> chain) {
chain.proceed(action, response, listener);
}
},
LISTENER_RESPONSE {
@Override
- @SuppressWarnings("unchecked")
- public void execute(String action, ActionResponse response, ActionListener listener, ActionFilterChain chain) {
- listener.onResponse(new TestResponse());
+ @SuppressWarnings("unchecked") // Safe because its all we test with
+ public <Response extends ActionResponse> void execute(String action, Response response, ActionListener<Response> listener,
+ ActionFilterChain<?, Response> chain) {
+ ((ActionListener<TestResponse>) listener).onResponse(new TestResponse());
}
},
LISTENER_FAILURE {
@Override
- public void execute(String action, ActionResponse response, ActionListener listener, ActionFilterChain chain) {
+ public <Response extends ActionResponse> void execute(String action, Response response, ActionListener<Response> listener,
+ ActionFilterChain<?, Response> chain) {
listener.onFailure(new ElasticsearchTimeoutException(""));
}
}
}
private static interface RequestCallback {
- void execute(Task task, String action, ActionRequest actionRequest, ActionListener actionListener, ActionFilterChain actionFilterChain);
+ <Request extends ActionRequest<Request>, Response extends ActionResponse> void execute(Task task, String action, Request request,
+ ActionListener<Response> listener, ActionFilterChain<Request, Response> actionFilterChain);
}
private static interface ResponseCallback {
- void execute(String action, ActionResponse response, ActionListener listener, ActionFilterChain chain);
+ <Response extends ActionResponse> void execute(String action, Response response, ActionListener<Response> listener,
+ ActionFilterChain<?, Response> chain);
}
public static class TestRequest extends ActionRequest<TestRequest> {
diff --git a/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java b/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java
index d94049c036..a408ccc5bf 100644
--- a/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java
+++ b/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java
@@ -47,6 +47,7 @@ import org.elasticsearch.cluster.routing.TestShardRouting;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.DummyTransportAddress;
+import org.elasticsearch.index.Index;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.test.ESTestCase;
@@ -198,7 +199,7 @@ public class TransportBroadcastByNodeActionTests extends ESTestCase {
void setClusterState(TestClusterService clusterService, String index) {
int numberOfNodes = randomIntBetween(3, 5);
DiscoveryNodes.Builder discoBuilder = DiscoveryNodes.builder();
- IndexRoutingTable.Builder indexRoutingTable = IndexRoutingTable.builder(index);
+ IndexRoutingTable.Builder indexRoutingTable = IndexRoutingTable.builder(new Index(index,"_na_"));
int shardIndex = -1;
for (int i = 0; i < numberOfNodes; i++) {
@@ -206,7 +207,7 @@ public class TransportBroadcastByNodeActionTests extends ESTestCase {
discoBuilder = discoBuilder.put(node);
int numberOfShards = randomIntBetween(1, 10);
for (int j = 0; j < numberOfShards; j++) {
- final ShardId shardId = new ShardId(index, ++shardIndex);
+ final ShardId shardId = new ShardId(index, "_na_", ++shardIndex);
ShardRouting shard = TestShardRouting.newShardRouting(index, shardId.getId(), node.id(), true, ShardRoutingState.STARTED, 1);
IndexShardRoutingTable.Builder indexShard = new IndexShardRoutingTable.Builder(shardId);
indexShard.addShard(shard);
@@ -241,7 +242,7 @@ public class TransportBroadcastByNodeActionTests extends ESTestCase {
.addGlobalBlock(new ClusterBlock(1, "test-block", false, true, RestStatus.SERVICE_UNAVAILABLE, ClusterBlockLevel.ALL));
clusterService.setState(ClusterState.builder(clusterService.state()).blocks(block));
try {
- action.new AsyncAction(request, listener).start();
+ action.new AsyncAction(null, request, listener).start();
fail("expected ClusterBlockException");
} catch (ClusterBlockException expected) {
assertEquals("blocked by: [SERVICE_UNAVAILABLE/1/test-block];", expected.getMessage());
@@ -256,7 +257,7 @@ public class TransportBroadcastByNodeActionTests extends ESTestCase {
.addIndexBlock(TEST_INDEX, new ClusterBlock(1, "test-block", false, true, RestStatus.SERVICE_UNAVAILABLE, ClusterBlockLevel.ALL));
clusterService.setState(ClusterState.builder(clusterService.state()).blocks(block));
try {
- action.new AsyncAction(request, listener).start();
+ action.new AsyncAction(null, request, listener).start();
fail("expected ClusterBlockException");
} catch (ClusterBlockException expected) {
assertEquals("blocked by: [SERVICE_UNAVAILABLE/1/test-block];", expected.getMessage());
@@ -267,7 +268,7 @@ public class TransportBroadcastByNodeActionTests extends ESTestCase {
Request request = new Request(new String[]{TEST_INDEX});
PlainActionFuture<Response> listener = new PlainActionFuture<>();
- action.new AsyncAction(request, listener).start();
+ action.new AsyncAction(null, request, listener).start();
Map<String, List<CapturingTransport.CapturedRequest>> capturedRequests = transport.getCapturedRequestsByTargetNodeAndClear();
ShardsIterator shardIt = clusterService.state().routingTable().allShards(new String[]{TEST_INDEX});
@@ -301,7 +302,7 @@ public class TransportBroadcastByNodeActionTests extends ESTestCase {
clusterService.setState(ClusterState.builder(clusterService.state()).nodes(builder));
- action.new AsyncAction(request, listener).start();
+ action.new AsyncAction(null, request, listener).start();
Map<String, List<CapturingTransport.CapturedRequest>> capturedRequests = transport.getCapturedRequestsByTargetNodeAndClear();
@@ -388,7 +389,7 @@ public class TransportBroadcastByNodeActionTests extends ESTestCase {
clusterService.setState(ClusterState.builder(clusterService.state()).nodes(builder));
}
- action.new AsyncAction(request, listener).start();
+ action.new AsyncAction(null, request, listener).start();
Map<String, List<CapturingTransport.CapturedRequest>> capturedRequests = transport.getCapturedRequestsByTargetNodeAndClear();
ShardsIterator shardIt = clusterService.state().getRoutingTable().allShards(new String[]{TEST_INDEX});
diff --git a/core/src/test/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java b/core/src/test/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java
index f20e54050c..6d0ae3d2e7 100644
--- a/core/src/test/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java
+++ b/core/src/test/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java
@@ -60,8 +60,8 @@ public class IndexingMasterFailoverIT extends ESIntegTestCase {
logger.info("--> start 4 nodes, 3 master, 1 data");
final Settings sharedSettings = Settings.builder()
- .put(FaultDetection.SETTING_PING_TIMEOUT, "1s") // for hitting simulated network failures quickly
- .put(FaultDetection.SETTING_PING_RETRIES, "1") // for hitting simulated network failures quickly
+ .put(FaultDetection.PING_TIMEOUT_SETTING.getKey(), "1s") // for hitting simulated network failures quickly
+ .put(FaultDetection.PING_RETRIES_SETTING.getKey(), "1") // for hitting simulated network failures quickly
.put("discovery.zen.join_timeout", "10s") // still long to induce failures but to long so test won't time out
.put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "1s") // <-- for hitting simulated network failures quickly
.put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), 2)
diff --git a/core/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java b/core/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java
index c3084b93eb..7b9fd91a56 100644
--- a/core/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java
+++ b/core/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java
@@ -41,6 +41,7 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.rest.RestStatus;
+import org.elasticsearch.tasks.Task;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.cluster.TestClusterService;
import org.elasticsearch.threadpool.ThreadPool;
@@ -147,7 +148,7 @@ public class BroadcastReplicationTests extends ESTestCase {
if (shardsSucceeded == 1 && randomBoolean()) {
//sometimes add failure (no failure means shard unavailable)
failures = new ReplicationResponse.ShardInfo.Failure[1];
- failures[0] = new ReplicationResponse.ShardInfo.Failure(index, shardRequests.v1().id(), null, new Exception("pretend shard failed"), RestStatus.GATEWAY_TIMEOUT, false);
+ failures[0] = new ReplicationResponse.ShardInfo.Failure(shardRequests.v1(), null, new Exception("pretend shard failed"), RestStatus.GATEWAY_TIMEOUT, false);
failed++;
}
replicationResponse.setShardInfo(new ReplicationResponse.ShardInfo(2, shardsSucceeded, failures));
@@ -171,11 +172,11 @@ public class BroadcastReplicationTests extends ESTestCase {
public void testShardsList() throws InterruptedException, ExecutionException {
final String index = "test";
- final ShardId shardId = new ShardId(index, 0);
+ final ShardId shardId = new ShardId(index, "_na_", 0);
ClusterState clusterState = state(index, randomBoolean(),
randomBoolean() ? ShardRoutingState.INITIALIZING : ShardRoutingState.UNASSIGNED, ShardRoutingState.UNASSIGNED);
logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint());
- List<ShardId> shards = broadcastReplicationAction.shards(new DummyBroadcastRequest().indices(shardId.index().name()), clusterState);
+ List<ShardId> shards = broadcastReplicationAction.shards(new DummyBroadcastRequest().indices(shardId.getIndexName()), clusterState);
assertThat(shards.size(), equalTo(1));
assertThat(shards.get(0), equalTo(shardId));
}
@@ -207,7 +208,7 @@ public class BroadcastReplicationTests extends ESTestCase {
}
@Override
- protected void shardExecute(DummyBroadcastRequest request, ShardId shardId, ActionListener<ReplicationResponse> shardActionListener) {
+ protected void shardExecute(Task task, DummyBroadcastRequest request, ShardId shardId, ActionListener<ReplicationResponse> shardActionListener) {
capturedShardRequests.add(new Tuple<>(shardId, shardActionListener));
}
}
diff --git a/core/src/test/java/org/elasticsearch/action/support/replication/ClusterStateCreationUtils.java b/core/src/test/java/org/elasticsearch/action/support/replication/ClusterStateCreationUtils.java
index 913d52d5b1..49a5e072e1 100644
--- a/core/src/test/java/org/elasticsearch/action/support/replication/ClusterStateCreationUtils.java
+++ b/core/src/test/java/org/elasticsearch/action/support/replication/ClusterStateCreationUtils.java
@@ -74,7 +74,7 @@ public class ClusterStateCreationUtils {
}
}
numberOfNodes = Math.max(2, numberOfNodes); // we need a non-local master to test shard failures
- final ShardId shardId = new ShardId(index, 0);
+ final ShardId shardId = new ShardId(index, "_na_", 0);
DiscoveryNodes.Builder discoBuilder = DiscoveryNodes.builder();
Set<String> unassignedNodes = new HashSet<>();
for (int i = 0; i < numberOfNodes + 1; i++) {
@@ -131,7 +131,7 @@ public class ClusterStateCreationUtils {
ClusterState.Builder state = ClusterState.builder(new ClusterName("test"));
state.nodes(discoBuilder);
state.metaData(MetaData.builder().put(indexMetaData, false).generateClusterUuidIfNeeded());
- state.routingTable(RoutingTable.builder().add(IndexRoutingTable.builder(index).addIndexShard(indexShardRoutingBuilder.build())).build());
+ state.routingTable(RoutingTable.builder().add(IndexRoutingTable.builder(indexMetaData.getIndex()).addIndexShard(indexShardRoutingBuilder.build())).build());
return state.build();
}
@@ -155,11 +155,11 @@ public class ClusterStateCreationUtils {
ClusterState.Builder state = ClusterState.builder(new ClusterName("test"));
state.nodes(discoBuilder);
state.metaData(MetaData.builder().put(indexMetaData, false).generateClusterUuidIfNeeded());
- IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(index);
+ IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(indexMetaData.getIndex());
for (int i = 0; i < numberOfShards; i++) {
RoutingTable.Builder routing = new RoutingTable.Builder();
routing.addAsNew(indexMetaData);
- final ShardId shardId = new ShardId(index, i);
+ final ShardId shardId = new ShardId(index, "_na_", i);
IndexShardRoutingTable.Builder indexShardRoutingBuilder = new IndexShardRoutingTable.Builder(shardId);
indexShardRoutingBuilder.addShard(TestShardRouting.newShardRouting(index, i, newNode(0).id(), null, null, true, ShardRoutingState.STARTED, 0, null));
indexShardRoutingBuilder.addShard(TestShardRouting.newShardRouting(index, i, newNode(1).id(), null, null, false, ShardRoutingState.STARTED, 0, null));
diff --git a/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java b/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java
index 9fdbdf1cb3..402a454649 100644
--- a/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java
+++ b/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java
@@ -140,7 +140,7 @@ public class TransportReplicationActionTests extends ESTestCase {
ClusterBlocks.Builder block = ClusterBlocks.builder()
.addGlobalBlock(new ClusterBlock(1, "non retryable", false, true, RestStatus.SERVICE_UNAVAILABLE, ClusterBlockLevel.ALL));
clusterService.setState(ClusterState.builder(clusterService.state()).blocks(block));
- TransportReplicationAction.ReroutePhase reroutePhase = action.new ReroutePhase(request, listener);
+ TransportReplicationAction.ReroutePhase reroutePhase = action.new ReroutePhase(null, request, listener);
reroutePhase.run();
assertListenerThrows("primary phase should fail operation", listener, ClusterBlockException.class);
@@ -148,13 +148,13 @@ public class TransportReplicationActionTests extends ESTestCase {
.addGlobalBlock(new ClusterBlock(1, "retryable", true, true, RestStatus.SERVICE_UNAVAILABLE, ClusterBlockLevel.ALL));
clusterService.setState(ClusterState.builder(clusterService.state()).blocks(block));
listener = new PlainActionFuture<>();
- reroutePhase = action.new ReroutePhase(new Request().timeout("5ms"), listener);
+ reroutePhase = action.new ReroutePhase(null, new Request().timeout("5ms"), listener);
reroutePhase.run();
assertListenerThrows("failed to timeout on retryable block", listener, ClusterBlockException.class);
listener = new PlainActionFuture<>();
- reroutePhase = action.new ReroutePhase(new Request(), listener);
+ reroutePhase = action.new ReroutePhase(null, new Request(), listener);
reroutePhase.run();
assertFalse("primary phase should wait on retryable block", listener.isDone());
@@ -171,7 +171,7 @@ public class TransportReplicationActionTests extends ESTestCase {
public void testNotStartedPrimary() throws InterruptedException, ExecutionException {
final String index = "test";
- final ShardId shardId = new ShardId(index, 0);
+ final ShardId shardId = new ShardId(index, "_na_", 0);
// no replicas in oder to skip the replication part
clusterService.setState(state(index, true,
randomBoolean() ? ShardRoutingState.INITIALIZING : ShardRoutingState.UNASSIGNED));
@@ -180,13 +180,13 @@ public class TransportReplicationActionTests extends ESTestCase {
Request request = new Request(shardId).timeout("1ms");
PlainActionFuture<Response> listener = new PlainActionFuture<>();
- TransportReplicationAction.ReroutePhase reroutePhase = action.new ReroutePhase(request, listener);
+ TransportReplicationAction.ReroutePhase reroutePhase = action.new ReroutePhase(null, request, listener);
reroutePhase.run();
assertListenerThrows("unassigned primary didn't cause a timeout", listener, UnavailableShardsException.class);
request = new Request(shardId);
listener = new PlainActionFuture<>();
- reroutePhase = action.new ReroutePhase(request, listener);
+ reroutePhase = action.new ReroutePhase(null, request, listener);
reroutePhase.run();
assertFalse("unassigned primary didn't cause a retry", listener.isDone());
@@ -209,21 +209,21 @@ public class TransportReplicationActionTests extends ESTestCase {
clusterService.setState(state(index, true,
randomBoolean() ? ShardRoutingState.INITIALIZING : ShardRoutingState.UNASSIGNED));
logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint());
- Request request = new Request(new ShardId("unknown_index", 0)).timeout("1ms");
+ Request request = new Request(new ShardId("unknown_index", "_na_", 0)).timeout("1ms");
PlainActionFuture<Response> listener = new PlainActionFuture<>();
- TransportReplicationAction.ReroutePhase reroutePhase = action.new ReroutePhase(request, listener);
+ TransportReplicationAction.ReroutePhase reroutePhase = action.new ReroutePhase(null, request, listener);
reroutePhase.run();
assertListenerThrows("must throw index not found exception", listener, IndexNotFoundException.class);
- request = new Request(new ShardId(index, 10)).timeout("1ms");
+ request = new Request(new ShardId(index, "_na_", 10)).timeout("1ms");
listener = new PlainActionFuture<>();
- reroutePhase = action.new ReroutePhase(request, listener);
+ reroutePhase = action.new ReroutePhase(null, request, listener);
reroutePhase.run();
assertListenerThrows("must throw shard not found exception", listener, ShardNotFoundException.class);
}
public void testRoutePhaseExecutesRequest() {
final String index = "test";
- final ShardId shardId = new ShardId(index, 0);
+ final ShardId shardId = new ShardId(index, "_na_", 0);
clusterService.setState(stateWithStartedPrimary(index, randomBoolean(), 3));
@@ -234,7 +234,7 @@ public class TransportReplicationActionTests extends ESTestCase {
Request request = new Request(shardId);
PlainActionFuture<Response> listener = new PlainActionFuture<>();
- TransportReplicationAction.ReroutePhase reroutePhase = action.new ReroutePhase(request, listener);
+ TransportReplicationAction.ReroutePhase reroutePhase = action.new ReroutePhase(null, request, listener);
reroutePhase.run();
assertThat(request.shardId(), equalTo(shardId));
logger.info("--> primary is assigned to [{}], checking request forwarded", primaryNodeId);
@@ -251,7 +251,7 @@ public class TransportReplicationActionTests extends ESTestCase {
public void testPrimaryPhaseExecutesRequest() throws InterruptedException, ExecutionException {
final String index = "test";
- final ShardId shardId = new ShardId(index, 0);
+ final ShardId shardId = new ShardId(index, "_na_", 0);
clusterService.setState(state(index, true, ShardRoutingState.STARTED, ShardRoutingState.STARTED));
Request request = new Request(shardId).timeout("1ms");
PlainActionFuture<Response> listener = new PlainActionFuture<>();
@@ -267,7 +267,7 @@ public class TransportReplicationActionTests extends ESTestCase {
public void testAddedReplicaAfterPrimaryOperation() {
final String index = "test";
- final ShardId shardId = new ShardId(index, 0);
+ final ShardId shardId = new ShardId(index, "_na_", 0);
// start with no replicas
clusterService.setState(stateWithStartedPrimary(index, true, 0));
logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint());
@@ -300,7 +300,7 @@ public class TransportReplicationActionTests extends ESTestCase {
public void testRelocatingReplicaAfterPrimaryOperation() {
final String index = "test";
- final ShardId shardId = new ShardId(index, 0);
+ final ShardId shardId = new ShardId(index, "_na_", 0);
// start with a replica
clusterService.setState(state(index, true, ShardRoutingState.STARTED, randomBoolean() ? ShardRoutingState.INITIALIZING : ShardRoutingState.STARTED));
logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint());
@@ -334,7 +334,7 @@ public class TransportReplicationActionTests extends ESTestCase {
public void testIndexDeletedAfterPrimaryOperation() {
final String index = "test";
- final ShardId shardId = new ShardId(index, 0);
+ final ShardId shardId = new ShardId(index, "_na_", 0);
clusterService.setState(state(index, true, ShardRoutingState.STARTED, ShardRoutingState.STARTED));
logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint());
final ClusterState stateWithDeletedIndex = state(index + "_new", true, ShardRoutingState.STARTED, ShardRoutingState.RELOCATING);
@@ -361,7 +361,7 @@ public class TransportReplicationActionTests extends ESTestCase {
public void testWriteConsistency() throws ExecutionException, InterruptedException {
action = new ActionWithConsistency(Settings.EMPTY, "testActionWithConsistency", transportService, clusterService, threadPool);
final String index = "test";
- final ShardId shardId = new ShardId(index, 0);
+ final ShardId shardId = new ShardId(index, "_na_", 0);
final int assignedReplicas = randomInt(2);
final int unassignedReplicas = randomInt(2);
final int totalShards = 1 + assignedReplicas + unassignedReplicas;
@@ -430,7 +430,7 @@ public class TransportReplicationActionTests extends ESTestCase {
public void testReplication() throws ExecutionException, InterruptedException {
final String index = "test";
- final ShardId shardId = new ShardId(index, 0);
+ final ShardId shardId = new ShardId(index, "_na_", 0);
clusterService.setState(stateWithStartedPrimary(index, true, randomInt(5)));
@@ -453,7 +453,7 @@ public class TransportReplicationActionTests extends ESTestCase {
public void testReplicationWithShadowIndex() throws ExecutionException, InterruptedException {
final String index = "test";
- final ShardId shardId = new ShardId(index, 0);
+ final ShardId shardId = new ShardId(index, "_na_", 0);
ClusterState state = stateWithStartedPrimary(index, true, randomInt(5));
MetaData.Builder metaData = MetaData.builder(state.metaData());
@@ -511,7 +511,7 @@ public class TransportReplicationActionTests extends ESTestCase {
assertThat(nodesSentTo.keySet(), not(hasItem(clusterService.state().getNodes().localNodeId())));
// requests were sent to the correct shard copies
- for (ShardRouting shard : clusterService.state().getRoutingTable().shardRoutingTable(shardId.getIndex(), shardId.id())) {
+ for (ShardRouting shard : clusterService.state().getRoutingTable().shardRoutingTable(shardId)) {
if (shard.primary() == false && executeOnReplica == false) {
continue;
}
@@ -602,7 +602,7 @@ public class TransportReplicationActionTests extends ESTestCase {
public void testCounterOnPrimary() throws Exception {
final String index = "test";
- final ShardId shardId = new ShardId(index, 0);
+ final ShardId shardId = new ShardId(index, "_na_", 0);
// no replica, we only want to test on primary
clusterService.setState(state(index, true,
ShardRoutingState.STARTED));
@@ -640,7 +640,7 @@ public class TransportReplicationActionTests extends ESTestCase {
public void testCounterIncrementedWhileReplicationOngoing() throws InterruptedException, ExecutionException, IOException {
final String index = "test";
- final ShardId shardId = new ShardId(index, 0);
+ final ShardId shardId = new ShardId(index, "_na_", 0);
// one replica to make sure replication is attempted
clusterService.setState(state(index, true,
ShardRoutingState.STARTED, ShardRoutingState.STARTED));
@@ -670,8 +670,8 @@ public class TransportReplicationActionTests extends ESTestCase {
}
public void testReplicasCounter() throws Exception {
- final ShardId shardId = new ShardId("test", 0);
- clusterService.setState(state(shardId.index().getName(), true,
+ final ShardId shardId = new ShardId("test", "_na_", 0);
+ clusterService.setState(state(shardId.getIndexName(), true,
ShardRoutingState.STARTED, ShardRoutingState.STARTED));
action = new ActionWithDelay(Settings.EMPTY, "testActionWithExceptions", transportService, clusterService, threadPool);
final Action.ReplicaOperationTransportHandler replicaOperationTransportHandler = action.new ReplicaOperationTransportHandler();
@@ -706,7 +706,7 @@ public class TransportReplicationActionTests extends ESTestCase {
public void testCounterDecrementedIfShardOperationThrowsException() throws InterruptedException, ExecutionException, IOException {
action = new ActionWithExceptions(Settings.EMPTY, "testActionWithExceptions", transportService, clusterService, threadPool);
final String index = "test";
- final ShardId shardId = new ShardId(index, 0);
+ final ShardId shardId = new ShardId(index, "_na_", 0);
clusterService.setState(state(index, true,
ShardRoutingState.STARTED, ShardRoutingState.STARTED));
logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint());
@@ -749,7 +749,7 @@ public class TransportReplicationActionTests extends ESTestCase {
Request(ShardId shardId) {
this();
this.shardId = shardId;
- this.index = shardId.getIndex();
+ this.index = shardId.getIndexName();
// keep things simple
}
@@ -773,7 +773,7 @@ public class TransportReplicationActionTests extends ESTestCase {
ClusterService clusterService,
ThreadPool threadPool) {
super(settings, actionName, transportService, clusterService, null, threadPool,
- new ShardStateAction(settings, clusterService, transportService, null, null), null,
+ new ShardStateAction(settings, clusterService, transportService, null, null, threadPool), null,
new ActionFilters(new HashSet<ActionFilter>()), new IndexNameExpressionResolver(Settings.EMPTY), Request::new, Request::new, ThreadPool.Names.SAME);
}
diff --git a/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsCheckDocFreqIT.java b/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsCheckDocFreqIT.java
index 60fa0e9d68..4928cc6608 100644
--- a/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsCheckDocFreqIT.java
+++ b/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsCheckDocFreqIT.java
@@ -99,7 +99,7 @@ public class GetTermVectorsCheckDocFreqIT extends ESIntegTestCase {
Fields fields = response.getFields();
assertThat(fields.size(), equalTo(1));
Terms terms = fields.terms("field");
- assertThat(terms.size(), equalTo(8l));
+ assertThat(terms.size(), equalTo(8L));
assertThat(terms.getSumTotalTermFreq(), Matchers.equalTo((long) -1));
assertThat(terms.getDocCount(), Matchers.equalTo(-1));
assertThat(terms.getSumDocFreq(), equalTo((long) -1));
@@ -158,7 +158,7 @@ public class GetTermVectorsCheckDocFreqIT extends ESIntegTestCase {
Fields fields = response.getFields();
assertThat(fields.size(), equalTo(1));
Terms terms = fields.terms("field");
- assertThat(terms.size(), equalTo(8l));
+ assertThat(terms.size(), equalTo(8L));
assertThat(terms.getSumTotalTermFreq(), Matchers.equalTo((long) (9 * numDocs)));
assertThat(terms.getDocCount(), Matchers.equalTo(numDocs));
assertThat(terms.getSumDocFreq(), equalTo((long) numDocs * values.length));
@@ -214,7 +214,7 @@ public class GetTermVectorsCheckDocFreqIT extends ESIntegTestCase {
Fields fields = response.getFields();
assertThat(fields.size(), equalTo(1));
Terms terms = fields.terms("field");
- assertThat(terms.size(), equalTo(8l));
+ assertThat(terms.size(), equalTo(8L));
assertThat(terms.getSumTotalTermFreq(), Matchers.equalTo((long) (9 * numDocs)));
assertThat(terms.getDocCount(), Matchers.equalTo(numDocs));
assertThat(terms.getSumDocFreq(), equalTo((long) numDocs * values.length));
diff --git a/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java b/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java
index 0c542698b5..21114804cb 100644
--- a/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java
+++ b/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java
@@ -152,7 +152,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
.addMapping("type1",
"field0", "type=integer,", // no tvs
"field1", "type=string,index=no", // no tvs
- "field2", "type=string,index=no,store=yes", // no tvs
+ "field2", "type=string,index=no,store=true", // no tvs
"field3", "type=string,index=no,term_vector=yes", // no tvs
"field4", "type=string,index=not_analyzed", // yes tvs
"field5", "type=string,index=analyzed")); // yes tvs
@@ -317,7 +317,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
assertThat(fields.size(), equalTo(ft.storeTermVectors() ? 1 : 0));
if (ft.storeTermVectors()) {
Terms terms = fields.terms("field");
- assertThat(terms.size(), equalTo(8l));
+ assertThat(terms.size(), equalTo(8L));
TermsEnum iterator = terms.iterator();
for (int j = 0; j < values.length; j++) {
String string = values[j];
@@ -637,7 +637,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
int[][] endOffset = {{15}, {43}, {19}, {25}, {39}, {30}, {9}, {3, 34}};
Terms terms = fields.terms(fieldName);
- assertThat(terms.size(), equalTo(8l));
+ assertThat(terms.size(), equalTo(8L));
TermsEnum iterator = terms.iterator();
for (int j = 0; j < values.length; j++) {
String string = values[j];
@@ -870,53 +870,6 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
checkBrownFoxTermVector(resp.getFields(), "field1", false);
}
- public void testArtificialNonExistingField() throws Exception {
- // setup indices
- Settings.Builder settings = settingsBuilder()
- .put(indexSettings())
- .put("index.analysis.analyzer", "standard");
- assertAcked(prepareCreate("test")
- .setSettings(settings)
- .addMapping("type1", "field1", "type=string"));
- ensureGreen();
-
- // index just one doc
- List<IndexRequestBuilder> indexBuilders = new ArrayList<>();
- indexBuilders.add(client().prepareIndex()
- .setIndex("test")
- .setType("type1")
- .setId("1")
- .setRouting("1")
- .setSource("field1", "some text"));
- indexRandom(true, indexBuilders);
-
- // request tvs from artificial document
- XContentBuilder doc = jsonBuilder()
- .startObject()
- .field("field1", "the quick brown fox jumps over the lazy dog")
- .field("non_existing", "the quick brown fox jumps over the lazy dog")
- .endObject();
-
- for (int i = 0; i < 2; i++) {
- TermVectorsResponse resp = client().prepareTermVectors()
- .setIndex("test")
- .setType("type1")
- .setDoc(doc)
- .setRouting("" + i)
- .setOffsets(true)
- .setPositions(true)
- .setFieldStatistics(true)
- .setTermStatistics(true)
- .get();
- assertThat(resp.isExists(), equalTo(true));
- checkBrownFoxTermVector(resp.getFields(), "field1", false);
- // we should have created a mapping for this field
- assertMappingOnMaster("test", "type1", "non_existing");
- // and return the generated term vectors
- checkBrownFoxTermVector(resp.getFields(), "non_existing", false);
- }
- }
-
public void testPerFieldAnalyzer() throws IOException {
int numFields = 25;
@@ -1134,12 +1087,12 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
response = client().prepareTermVectors(indexOrAlias(), "type1", "1").setVersion(Versions.MATCH_ANY).get();
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
- assertThat(response.getVersion(), equalTo(1l));
+ assertThat(response.getVersion(), equalTo(1L));
response = client().prepareTermVectors(indexOrAlias(), "type1", "1").setVersion(1).get();
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
- assertThat(response.getVersion(), equalTo(1l));
+ assertThat(response.getVersion(), equalTo(1L));
try {
client().prepareGet(indexOrAlias(), "type1", "1").setVersion(2).get();
@@ -1156,13 +1109,13 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
assertThat(response.getIndex(), equalTo("test"));
- assertThat(response.getVersion(), equalTo(1l));
+ assertThat(response.getVersion(), equalTo(1L));
response = client().prepareTermVectors(indexOrAlias(), "type1", "1").setVersion(1).setRealtime(false).get();
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
assertThat(response.getIndex(), equalTo("test"));
- assertThat(response.getVersion(), equalTo(1l));
+ assertThat(response.getVersion(), equalTo(1L));
try {
client().prepareGet(indexOrAlias(), "type1", "1").setVersion(2).setRealtime(false).get();
@@ -1181,7 +1134,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
assertThat(response.getIndex(), equalTo("test"));
- assertThat(response.getVersion(), equalTo(2l));
+ assertThat(response.getVersion(), equalTo(2L));
try {
client().prepareGet(indexOrAlias(), "type1", "1").setVersion(1).get();
@@ -1194,7 +1147,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
assertThat(response.getIndex(), equalTo("test"));
- assertThat(response.getVersion(), equalTo(2l));
+ assertThat(response.getVersion(), equalTo(2L));
// From Lucene index:
refresh();
@@ -1204,7 +1157,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
assertThat(response.getIndex(), equalTo("test"));
- assertThat(response.getVersion(), equalTo(2l));
+ assertThat(response.getVersion(), equalTo(2L));
try {
client().prepareGet(indexOrAlias(), "type1", "1").setVersion(1).setRealtime(false).get();
@@ -1217,7 +1170,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
assertThat(response.getIndex(), equalTo("test"));
- assertThat(response.getVersion(), equalTo(2l));
+ assertThat(response.getVersion(), equalTo(2L));
}
public void testFilterLength() throws ExecutionException, InterruptedException, IOException {
diff --git a/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java b/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java
index 5526095710..bcb2661338 100644
--- a/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java
+++ b/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java
@@ -28,6 +28,7 @@ import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.index.get.GetResult;
+import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptService.ScriptType;
import org.elasticsearch.test.ESTestCase;
@@ -144,7 +145,7 @@ public class UpdateRequestTests extends ESTestCase {
// We simulate that the document is not existing yet
GetResult getResult = new GetResult("test", "type1", "1", 0, false, null, null);
- UpdateHelper.Result result = updateHelper.prepare(updateRequest, getResult);
+ UpdateHelper.Result result = updateHelper.prepare(new ShardId("test", "_na_", 0),updateRequest, getResult);
Streamable action = result.action();
assertThat(action, instanceOf(IndexRequest.class));
IndexRequest indexAction = (IndexRequest) action;
@@ -161,7 +162,7 @@ public class UpdateRequestTests extends ESTestCase {
// We simulate that the document is not existing yet
getResult = new GetResult("test", "type1", "2", 0, false, null, null);
- result = updateHelper.prepare(updateRequest, getResult);
+ result = updateHelper.prepare(new ShardId("test", "_na_", 0), updateRequest, getResult);
action = result.action();
assertThat(action, instanceOf(IndexRequest.class));
indexAction = (IndexRequest) action;
diff --git a/core/src/test/java/org/elasticsearch/broadcast/BroadcastActionsIT.java b/core/src/test/java/org/elasticsearch/broadcast/BroadcastActionsIT.java
index 18c1572e86..f60709d6da 100644
--- a/core/src/test/java/org/elasticsearch/broadcast/BroadcastActionsIT.java
+++ b/core/src/test/java/org/elasticsearch/broadcast/BroadcastActionsIT.java
@@ -58,7 +58,7 @@ public class BroadcastActionsIT extends ESIntegTestCase {
SearchResponse countResponse = client().prepareSearch("test").setSize(0)
.setQuery(termQuery("_type", "type1"))
.get();
- assertThat(countResponse.getHits().totalHits(), equalTo(2l));
+ assertThat(countResponse.getHits().totalHits(), equalTo(2L));
assertThat(countResponse.getTotalShards(), equalTo(numShards.numPrimaries));
assertThat(countResponse.getSuccessfulShards(), equalTo(numShards.numPrimaries));
assertThat(countResponse.getFailedShards(), equalTo(0));
diff --git a/core/src/test/java/org/elasticsearch/bwcompat/BasicBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/BasicBackwardsCompatibilityIT.java
index 43633fe6f2..d5dcd14200 100644
--- a/core/src/test/java/org/elasticsearch/bwcompat/BasicBackwardsCompatibilityIT.java
+++ b/core/src/test/java/org/elasticsearch/bwcompat/BasicBackwardsCompatibilityIT.java
@@ -122,11 +122,11 @@ public class BasicBackwardsCompatibilityIT extends ESBackcompatTestCase {
assertThat(id, client().prepareIndex("test", "type1", id).setRouting(routingKey).setSource("field1", English.intToEnglish(i)).get().isCreated(), is(true));
GetResponse get = client().prepareGet("test", "type1", id).setRouting(routingKey).setVersion(1).get();
assertThat("Document with ID " + id + " should exist but doesn't", get.isExists(), is(true));
- assertThat(get.getVersion(), equalTo(1l));
+ assertThat(get.getVersion(), equalTo(1L));
client().prepareIndex("test", "type1", id).setRouting(routingKey).setSource("field1", English.intToEnglish(i)).execute().actionGet();
get = client().prepareGet("test", "type1", id).setRouting(routingKey).setVersion(2).get();
assertThat("Document with ID " + id + " should exist but doesn't", get.isExists(), is(true));
- assertThat(get.getVersion(), equalTo(2l));
+ assertThat(get.getVersion(), equalTo(2L));
}
assertVersionCreated(compatibilityVersion(), "test");
@@ -262,7 +262,7 @@ public class BasicBackwardsCompatibilityIT extends ESBackcompatTestCase {
for (IndexRoutingTable indexRoutingTable : clusterState.routingTable()) {
for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) {
for (ShardRouting shardRouting : indexShardRoutingTable) {
- if (shardRouting.currentNodeId() != null && index.equals(shardRouting.getIndex())) {
+ if (shardRouting.currentNodeId() != null && index.equals(shardRouting.getIndexName())) {
String name = clusterState.nodes().get(shardRouting.currentNodeId()).name();
assertThat("Allocated on new node: " + name, Regex.simpleMatch(pattern, name), is(true));
}
@@ -416,30 +416,30 @@ public class BasicBackwardsCompatibilityIT extends ESBackcompatTestCase {
client().prepareIndex(indexName, "type1", "4").setSource(jsonBuilder().startObject().startObject("obj2").field("obj2_val", "1").endObject().field("y2", "y_2").field("field3", "value3_4").endObject()));
SearchResponse countResponse = client().prepareSearch().setSize(0).setQuery(existsQuery("field1")).get();
- assertHitCount(countResponse, 2l);
+ assertHitCount(countResponse, 2L);
countResponse = client().prepareSearch().setSize(0).setQuery(constantScoreQuery(existsQuery("field1"))).get();
- assertHitCount(countResponse, 2l);
+ assertHitCount(countResponse, 2L);
countResponse = client().prepareSearch().setSize(0).setQuery(queryStringQuery("_exists_:field1")).get();
- assertHitCount(countResponse, 2l);
+ assertHitCount(countResponse, 2L);
countResponse = client().prepareSearch().setSize(0).setQuery(existsQuery("field2")).get();
- assertHitCount(countResponse, 2l);
+ assertHitCount(countResponse, 2L);
countResponse = client().prepareSearch().setSize(0).setQuery(existsQuery("field3")).get();
- assertHitCount(countResponse, 1l);
+ assertHitCount(countResponse, 1L);
// wildcard check
countResponse = client().prepareSearch().setSize(0).setQuery(existsQuery("x*")).get();
- assertHitCount(countResponse, 2l);
+ assertHitCount(countResponse, 2L);
// object check
countResponse = client().prepareSearch().setSize(0).setQuery(existsQuery("obj1")).get();
- assertHitCount(countResponse, 2l);
+ assertHitCount(countResponse, 2L);
countResponse = client().prepareSearch().setSize(0).setQuery(queryStringQuery("_missing_:field1")).get();
- assertHitCount(countResponse, 2l);
+ assertHitCount(countResponse, 2L);
if (!backwardsCluster().upgradeOneNode()) {
break;
@@ -598,7 +598,7 @@ public class BasicBackwardsCompatibilityIT extends ESBackcompatTestCase {
assertThat(termVectorsResponse.isExists(), equalTo(true));
Fields fields = termVectorsResponse.getFields();
assertThat(fields.size(), equalTo(1));
- assertThat(fields.terms("field").size(), equalTo(8l));
+ assertThat(fields.terms("field").size(), equalTo(8L));
}
public void testIndicesStats() {
diff --git a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java
index 8a18b72820..b5b1f955ae 100644
--- a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java
+++ b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java
@@ -39,18 +39,19 @@ import org.elasticsearch.common.io.FileSystemUtils;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
-import org.elasticsearch.common.util.MultiDataPathUpgrader;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
+import org.elasticsearch.env.Environment;
import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.gateway.MetaDataStateFormat;
import org.elasticsearch.index.IndexSettings;
+import org.elasticsearch.index.MergePolicyConfig;
import org.elasticsearch.index.engine.Segment;
import org.elasticsearch.index.mapper.string.StringFieldMapperPositionIncrementGapTests;
import org.elasticsearch.index.query.QueryBuilders;
-import org.elasticsearch.index.MergePolicyConfig;
+import org.elasticsearch.node.Node;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.aggregations.AggregationBuilders;
@@ -143,14 +144,14 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase {
Path baseTempDir = createTempDir();
// start single data path node
Settings.Builder nodeSettings = Settings.builder()
- .put("path.data", baseTempDir.resolve("single-path").toAbsolutePath())
- .put("node.master", false); // workaround for dangling index loading issue when node is master
+ .put(Environment.PATH_DATA_SETTING.getKey(), baseTempDir.resolve("single-path").toAbsolutePath())
+ .put(Node.NODE_MASTER_SETTING.getKey(), false); // workaround for dangling index loading issue when node is master
InternalTestCluster.Async<String> singleDataPathNode = internalCluster().startNodeAsync(nodeSettings.build());
// start multi data path node
nodeSettings = Settings.builder()
- .put("path.data", baseTempDir.resolve("multi-path1").toAbsolutePath() + "," + baseTempDir.resolve("multi-path2").toAbsolutePath())
- .put("node.master", false); // workaround for dangling index loading issue when node is master
+ .put(Environment.PATH_DATA_SETTING.getKey(), baseTempDir.resolve("multi-path1").toAbsolutePath() + "," + baseTempDir.resolve("multi-path2").toAbsolutePath())
+ .put(Node.NODE_MASTER_SETTING.getKey(), false); // workaround for dangling index loading issue when node is master
InternalTestCluster.Async<String> multiDataPathNode = internalCluster().startNodeAsync(nodeSettings.build());
// find single data path dir
@@ -208,10 +209,6 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase {
}
void importIndex(String indexName) throws IOException {
- final Iterable<NodeEnvironment> instances = internalCluster().getInstances(NodeEnvironment.class);
- for (NodeEnvironment nodeEnv : instances) { // upgrade multidata path
- MultiDataPathUpgrader.upgradeMultiDataPath(nodeEnv, logger);
- }
// force reloading dangling indices with a cluster state republish
client().admin().cluster().prepareReroute().get();
ensureGreen(indexName);
@@ -219,6 +216,7 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase {
// randomly distribute the files from src over dests paths
public static void copyIndex(final ESLogger logger, final Path src, final String indexName, final Path... dests) throws IOException {
+ Path destinationDataPath = dests[randomInt(dests.length - 1)];
for (Path dest : dests) {
Path indexDir = dest.resolve(indexName);
assertFalse(Files.exists(indexDir));
@@ -244,7 +242,7 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase {
}
Path relativeFile = src.relativize(file);
- Path destFile = dests[randomInt(dests.length - 1)].resolve(indexName).resolve(relativeFile);
+ Path destFile = destinationDataPath.resolve(indexName).resolve(relativeFile);
logger.trace("--> Moving " + relativeFile.toString() + " to " + destFile.toString());
Files.move(file, destFile);
assertFalse(Files.exists(file));
@@ -334,7 +332,7 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase {
}
}
SearchResponse test = client().prepareSearch(indexName).get();
- assertThat(test.getHits().getTotalHits(), greaterThanOrEqualTo(1l));
+ assertThat(test.getHits().getTotalHits(), greaterThanOrEqualTo(1L));
}
void assertBasicSearchWorks(String indexName) {
diff --git a/core/src/test/java/org/elasticsearch/bwcompat/RecoveryWithUnsupportedIndicesIT.java b/core/src/test/java/org/elasticsearch/bwcompat/RecoveryWithUnsupportedIndicesIT.java
index a573a8374e..23163b8611 100644
--- a/core/src/test/java/org/elasticsearch/bwcompat/RecoveryWithUnsupportedIndicesIT.java
+++ b/core/src/test/java/org/elasticsearch/bwcompat/RecoveryWithUnsupportedIndicesIT.java
@@ -18,6 +18,7 @@
*/
package org.elasticsearch.bwcompat;
+import org.elasticsearch.common.network.NetworkModule;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.node.Node;
@@ -28,7 +29,7 @@ public class RecoveryWithUnsupportedIndicesIT extends StaticIndexBackwardCompati
String indexName = "unsupported-0.20.6";
logger.info("Checking static index " + indexName);
- Settings nodeSettings = prepareBackwardsDataDir(getBwcIndicesPath().resolve(indexName + ".zip"), Node.HTTP_ENABLED, true);
+ Settings nodeSettings = prepareBackwardsDataDir(getBwcIndicesPath().resolve(indexName + ".zip"), NetworkModule.HTTP_ENABLED.getKey(), true);
try {
internalCluster().startNode(nodeSettings);
fail();
diff --git a/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java b/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java
index 6ad05b3ff8..eabb954c2c 100644
--- a/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java
+++ b/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java
@@ -1,4 +1,5 @@
/*
+/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
@@ -27,6 +28,8 @@ import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.IndexTemplateMetaData;
import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.env.Environment;
+import org.elasticsearch.repositories.uri.URLRepository;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.snapshots.AbstractSnapshotIntegTestCase;
import org.elasticsearch.snapshots.RestoreInfo;
@@ -64,7 +67,7 @@ public class RestoreBackwardsCompatIT extends AbstractSnapshotIntegTestCase {
// Configure using path.repo
return settingsBuilder()
.put(super.nodeSettings(nodeOrdinal))
- .put("path.repo", getBwcIndicesPath())
+ .put(Environment.PATH_REPO_SETTING.getKey(), getBwcIndicesPath())
.build();
} else {
// Configure using url white list
@@ -72,7 +75,7 @@ public class RestoreBackwardsCompatIT extends AbstractSnapshotIntegTestCase {
URI repoJarPatternUri = new URI("jar:" + getBwcIndicesPath().toUri().toString() + "*.zip!/repo/");
return settingsBuilder()
.put(super.nodeSettings(nodeOrdinal))
- .putArray("repositories.url.allowed_urls", repoJarPatternUri.toString())
+ .putArray(URLRepository.ALLOWED_URLS_SETTING.getKey(), repoJarPatternUri.toString())
.build();
} catch (URISyntaxException ex) {
throw new IllegalArgumentException(ex);
diff --git a/core/src/test/java/org/elasticsearch/bwcompat/StaticIndexBackwardCompatibilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/StaticIndexBackwardCompatibilityIT.java
index 37961c3d35..794aea8548 100644
--- a/core/src/test/java/org/elasticsearch/bwcompat/StaticIndexBackwardCompatibilityIT.java
+++ b/core/src/test/java/org/elasticsearch/bwcompat/StaticIndexBackwardCompatibilityIT.java
@@ -49,7 +49,7 @@ public class StaticIndexBackwardCompatibilityIT extends ESIntegTestCase {
assertEquals(index, getIndexResponse.indices()[0]);
ensureYellow(index);
SearchResponse test = client().prepareSearch(index).get();
- assertThat(test.getHits().getTotalHits(), greaterThanOrEqualTo(1l));
+ assertThat(test.getHits().getTotalHits(), greaterThanOrEqualTo(1L));
}
}
diff --git a/core/src/test/java/org/elasticsearch/bwcompat/TransportClientBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/TransportClientBackwardsCompatibilityIT.java
index 0e2f94e3cf..b1fc1d45dc 100644
--- a/core/src/test/java/org/elasticsearch/bwcompat/TransportClientBackwardsCompatibilityIT.java
+++ b/core/src/test/java/org/elasticsearch/bwcompat/TransportClientBackwardsCompatibilityIT.java
@@ -39,7 +39,7 @@ import static org.hamcrest.CoreMatchers.equalTo;
public class TransportClientBackwardsCompatibilityIT extends ESBackcompatTestCase {
public void testSniffMode() throws ExecutionException, InterruptedException {
Settings settings = Settings.builder().put(requiredSettings()).put("client.transport.nodes_sampler_interval", "1s")
- .put("name", "transport_client_sniff_mode").put(ClusterName.SETTING, cluster().getClusterName())
+ .put("name", "transport_client_sniff_mode").put(ClusterName.CLUSTER_NAME_SETTING.getKey(), cluster().getClusterName())
.put("client.transport.sniff", true).build();
CompositeTestCluster compositeTestCluster = backwardsCluster();
diff --git a/core/src/test/java/org/elasticsearch/bwcompat/UnicastBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/UnicastBackwardsCompatibilityIT.java
index ab7e95812f..d6eea58cb7 100644
--- a/core/src/test/java/org/elasticsearch/bwcompat/UnicastBackwardsCompatibilityIT.java
+++ b/core/src/test/java/org/elasticsearch/bwcompat/UnicastBackwardsCompatibilityIT.java
@@ -22,6 +22,7 @@ package org.elasticsearch.bwcompat;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.test.ESBackcompatTestCase;
+import org.elasticsearch.transport.TransportSettings;
import static org.hamcrest.Matchers.equalTo;
@@ -30,7 +31,7 @@ public class UnicastBackwardsCompatibilityIT extends ESBackcompatTestCase {
protected Settings nodeSettings(int nodeOrdinal) {
return Settings.builder()
.put(super.nodeSettings(nodeOrdinal))
- .put("transport.tcp.port", 9380 + nodeOrdinal)
+ .put(TransportSettings.PORT.getKey(), 9380 + nodeOrdinal)
.put("discovery.zen.ping.unicast.hosts", "localhost:9380,localhost:9381,localhost:9390,localhost:9391")
.build();
}
@@ -39,7 +40,7 @@ public class UnicastBackwardsCompatibilityIT extends ESBackcompatTestCase {
protected Settings externalNodeSettings(int nodeOrdinal) {
return Settings.settingsBuilder()
.put(super.externalNodeSettings(nodeOrdinal))
- .put("transport.tcp.port", 9390 + nodeOrdinal)
+ .put(TransportSettings.PORT.getKey(), 9390 + nodeOrdinal)
.put("discovery.zen.ping.unicast.hosts", "localhost:9380,localhost:9381,localhost:9390,localhost:9391")
.build();
}
diff --git a/core/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java b/core/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java
index b814cff520..25a1e7005a 100644
--- a/core/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java
+++ b/core/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java
@@ -23,34 +23,22 @@ import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.GenericAction;
import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteAction;
-import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteResponse;
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotAction;
-import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse;
import org.elasticsearch.action.admin.cluster.stats.ClusterStatsAction;
-import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheAction;
-import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse;
import org.elasticsearch.action.admin.indices.create.CreateIndexAction;
-import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
import org.elasticsearch.action.admin.indices.flush.FlushAction;
-import org.elasticsearch.action.admin.indices.flush.FlushResponse;
import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction;
-import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
import org.elasticsearch.action.delete.DeleteAction;
-import org.elasticsearch.action.delete.DeleteResponse;
import org.elasticsearch.action.get.GetAction;
-import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.index.IndexAction;
-import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.indexedscripts.delete.DeleteIndexedScriptAction;
-import org.elasticsearch.action.indexedscripts.delete.DeleteIndexedScriptResponse;
import org.elasticsearch.action.search.SearchAction;
-import org.elasticsearch.action.search.SearchResponse;
-import org.elasticsearch.client.support.Headers;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.util.concurrent.ThreadContext;
+import org.elasticsearch.env.Environment;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.threadpool.ThreadPool;
-import org.elasticsearch.transport.TransportMessage;
import org.junit.After;
import org.junit.Before;
@@ -58,7 +46,6 @@ import java.util.HashMap;
import java.util.Map;
import static org.hamcrest.Matchers.equalTo;
-import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
/**
@@ -67,8 +54,8 @@ import static org.hamcrest.Matchers.notNullValue;
public abstract class AbstractClientHeadersTestCase extends ESTestCase {
protected static final Settings HEADER_SETTINGS = Settings.builder()
- .put(Headers.PREFIX + ".key1", "val1")
- .put(Headers.PREFIX + ".key2", "val 2")
+ .put(ThreadContext.PREFIX + ".key1", "val1")
+ .put(ThreadContext.PREFIX + ".key2", "val 2")
.build();
private static final GenericAction[] ACTIONS = new GenericAction[] {
@@ -91,8 +78,10 @@ public abstract class AbstractClientHeadersTestCase extends ESTestCase {
Settings settings = Settings.builder()
.put(HEADER_SETTINGS)
.put("path.home", createTempDir().toString())
+ .put("name", "test-" + getTestName())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
- threadPool = new ThreadPool("test-" + getTestName());
+ threadPool = new ThreadPool(settings);
client = buildClient(settings, ACTIONS);
}
@@ -113,89 +102,75 @@ public abstract class AbstractClientHeadersTestCase extends ESTestCase {
// validation in the settings??? - ugly and conceptually wrong)
// choosing arbitrary top level actions to test
- client.prepareGet("idx", "type", "id").execute().addListener(new AssertingActionListener<GetResponse>(GetAction.NAME));
- client.prepareSearch().execute().addListener(new AssertingActionListener<SearchResponse>(SearchAction.NAME));
- client.prepareDelete("idx", "type", "id").execute().addListener(new AssertingActionListener<DeleteResponse>(DeleteAction.NAME));
- client.prepareDeleteIndexedScript("lang", "id").execute().addListener(new AssertingActionListener<DeleteIndexedScriptResponse>(DeleteIndexedScriptAction.NAME));
- client.prepareIndex("idx", "type", "id").setSource("source").execute().addListener(new AssertingActionListener<IndexResponse>(IndexAction.NAME));
+ client.prepareGet("idx", "type", "id").execute().addListener(new AssertingActionListener<>(GetAction.NAME, client.threadPool()));
+ client.prepareSearch().execute().addListener(new AssertingActionListener<>(SearchAction.NAME, client.threadPool()));
+ client.prepareDelete("idx", "type", "id").execute().addListener(new AssertingActionListener<>(DeleteAction.NAME, client.threadPool()));
+ client.prepareDeleteIndexedScript("lang", "id").execute().addListener(new AssertingActionListener<>(DeleteIndexedScriptAction.NAME, client.threadPool()));
+ client.prepareIndex("idx", "type", "id").setSource("source").execute().addListener(new AssertingActionListener<>(IndexAction.NAME, client.threadPool()));
// choosing arbitrary cluster admin actions to test
- client.admin().cluster().prepareClusterStats().execute().addListener(new AssertingActionListener<ClusterStatsResponse>(ClusterStatsAction.NAME));
- client.admin().cluster().prepareCreateSnapshot("repo", "bck").execute().addListener(new AssertingActionListener<CreateSnapshotResponse>(CreateSnapshotAction.NAME));
- client.admin().cluster().prepareReroute().execute().addListener(new AssertingActionListener<ClusterRerouteResponse>(ClusterRerouteAction.NAME));
+ client.admin().cluster().prepareClusterStats().execute().addListener(new AssertingActionListener<>(ClusterStatsAction.NAME, client.threadPool()));
+ client.admin().cluster().prepareCreateSnapshot("repo", "bck").execute().addListener(new AssertingActionListener<>(CreateSnapshotAction.NAME, client.threadPool()));
+ client.admin().cluster().prepareReroute().execute().addListener(new AssertingActionListener<>(ClusterRerouteAction.NAME, client.threadPool()));
// choosing arbitrary indices admin actions to test
- client.admin().indices().prepareCreate("idx").execute().addListener(new AssertingActionListener<CreateIndexResponse>(CreateIndexAction.NAME));
- client.admin().indices().prepareStats().execute().addListener(new AssertingActionListener<IndicesStatsResponse>(IndicesStatsAction.NAME));
- client.admin().indices().prepareClearCache("idx1", "idx2").execute().addListener(new AssertingActionListener<ClearIndicesCacheResponse>(ClearIndicesCacheAction.NAME));
- client.admin().indices().prepareFlush().execute().addListener(new AssertingActionListener<FlushResponse>(FlushAction.NAME));
+ client.admin().indices().prepareCreate("idx").execute().addListener(new AssertingActionListener<>(CreateIndexAction.NAME, client.threadPool()));
+ client.admin().indices().prepareStats().execute().addListener(new AssertingActionListener<>(IndicesStatsAction.NAME, client.threadPool()));
+ client.admin().indices().prepareClearCache("idx1", "idx2").execute().addListener(new AssertingActionListener<>(ClearIndicesCacheAction.NAME, client.threadPool()));
+ client.admin().indices().prepareFlush().execute().addListener(new AssertingActionListener<>(FlushAction.NAME, client.threadPool()));
}
public void testOverideHeader() throws Exception {
String key1Val = randomAsciiOfLength(5);
- Map<String, Object> expected = new HashMap<>();
+ Map<String, String> expected = new HashMap<>();
expected.put("key1", key1Val);
expected.put("key2", "val 2");
-
+ client.threadPool().getThreadContext().putHeader("key1", key1Val);
client.prepareGet("idx", "type", "id")
- .putHeader("key1", key1Val)
- .execute().addListener(new AssertingActionListener<GetResponse>(GetAction.NAME, expected));
+ .execute().addListener(new AssertingActionListener<>(GetAction.NAME, expected, client.threadPool()));
client.admin().cluster().prepareClusterStats()
- .putHeader("key1", key1Val)
- .execute().addListener(new AssertingActionListener<ClusterStatsResponse>(ClusterStatsAction.NAME, expected));
+ .execute().addListener(new AssertingActionListener<>(ClusterStatsAction.NAME, expected, client.threadPool()));
client.admin().indices().prepareCreate("idx")
- .putHeader("key1", key1Val)
- .execute().addListener(new AssertingActionListener<CreateIndexResponse>(CreateIndexAction.NAME, expected));
+ .execute().addListener(new AssertingActionListener<>(CreateIndexAction.NAME, expected, client.threadPool()));
}
- protected static void assertHeaders(Map<String, Object> headers, Map<String, Object> expected) {
- assertThat(headers, notNullValue());
- assertThat(headers.size(), is(expected.size()));
- for (Map.Entry<String, Object> expectedEntry : expected.entrySet()) {
- assertThat(headers.get(expectedEntry.getKey()), equalTo(expectedEntry.getValue()));
+ protected static void assertHeaders(Map<String, String> headers, Map<String, String> expected) {
+ assertNotNull(headers);
+ assertEquals(expected.size(), headers.size());
+ for (Map.Entry<String, String> expectedEntry : expected.entrySet()) {
+ assertEquals(headers.get(expectedEntry.getKey()), expectedEntry.getValue());
}
}
- protected static void assertHeaders(TransportMessage<?> message) {
- assertHeaders(message, HEADER_SETTINGS.getAsSettings(Headers.PREFIX).getAsStructuredMap());
- }
-
- protected static void assertHeaders(TransportMessage<?> message, Map<String, Object> expected) {
- assertThat(message.getHeaders(), notNullValue());
- assertThat(message.getHeaders().size(), is(expected.size()));
- for (Map.Entry<String, Object> expectedEntry : expected.entrySet()) {
- assertThat(message.getHeader(expectedEntry.getKey()), equalTo(expectedEntry.getValue()));
- }
+ protected static void assertHeaders(ThreadPool pool) {
+ assertHeaders(pool.getThreadContext().getHeaders(), (Map)HEADER_SETTINGS.getAsSettings(ThreadContext.PREFIX).getAsStructuredMap());
}
public static class InternalException extends Exception {
private final String action;
- private final Map<String, Object> headers;
- public InternalException(String action, TransportMessage<?> message) {
+ public InternalException(String action) {
this.action = action;
- this.headers = new HashMap<>();
- for (String key : message.getHeaders()) {
- headers.put(key, message.getHeader(key));
- }
}
}
protected static class AssertingActionListener<T> implements ActionListener<T> {
private final String action;
- private final Map<String, Object> expectedHeaders;
+ private final Map<String, String> expectedHeaders;
+ private final ThreadPool pool;
- public AssertingActionListener(String action) {
- this(action, HEADER_SETTINGS.getAsSettings(Headers.PREFIX).getAsStructuredMap());
+ public AssertingActionListener(String action, ThreadPool pool) {
+ this(action, (Map)HEADER_SETTINGS.getAsSettings(ThreadContext.PREFIX).getAsStructuredMap(), pool);
}
- public AssertingActionListener(String action, Map<String, Object> expectedHeaders) {
+ public AssertingActionListener(String action, Map<String, String> expectedHeaders, ThreadPool pool) {
this.action = action;
this.expectedHeaders = expectedHeaders;
+ this.pool = pool;
}
@Override
@@ -208,7 +183,7 @@ public abstract class AbstractClientHeadersTestCase extends ESTestCase {
Throwable e = unwrap(t, InternalException.class);
assertThat("expected action [" + action + "] to throw an internal exception", e, notNullValue());
assertThat(action, equalTo(((InternalException) e).action));
- Map<String, Object> headers = ((InternalException) e).headers;
+ Map<String, String> headers = pool.getThreadContext().getHeaders();
assertHeaders(headers, expectedHeaders);
}
diff --git a/core/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java b/core/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java
index e7ba8de0f9..f69c8f2da0 100644
--- a/core/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java
+++ b/core/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java
@@ -27,7 +27,6 @@ import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.TransportAction;
import org.elasticsearch.client.AbstractClientHeadersTestCase;
import org.elasticsearch.client.Client;
-import org.elasticsearch.client.support.Headers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.tasks.TaskManager;
@@ -46,9 +45,8 @@ public class NodeClientHeadersTests extends AbstractClientHeadersTestCase {
@Override
protected Client buildClient(Settings headersSettings, GenericAction[] testedActions) {
Settings settings = HEADER_SETTINGS;
- Headers headers = new Headers(settings);
Actions actions = new Actions(settings, threadPool, testedActions);
- return new NodeClient(settings, threadPool, headers, actions);
+ return new NodeClient(settings, threadPool, actions);
}
private static class Actions extends HashMap<GenericAction, TransportAction> {
@@ -68,7 +66,7 @@ public class NodeClientHeadersTests extends AbstractClientHeadersTestCase {
@Override
protected void doExecute(ActionRequest request, ActionListener listener) {
- listener.onFailure(new InternalException(actionName, request));
+ listener.onFailure(new InternalException(actionName));
}
}
diff --git a/core/src/test/java/org/elasticsearch/client/node/NodeClientIT.java b/core/src/test/java/org/elasticsearch/client/node/NodeClientIT.java
index 966553b8f3..ae839e8837 100644
--- a/core/src/test/java/org/elasticsearch/client/node/NodeClientIT.java
+++ b/core/src/test/java/org/elasticsearch/client/node/NodeClientIT.java
@@ -34,12 +34,12 @@ import static org.hamcrest.Matchers.is;
public class NodeClientIT extends ESIntegTestCase {
@Override
protected Settings nodeSettings(int nodeOrdinal) {
- return settingsBuilder().put(super.nodeSettings(nodeOrdinal)).put(Client.CLIENT_TYPE_SETTING, "anything").build();
+ return settingsBuilder().put(super.nodeSettings(nodeOrdinal)).put(Client.CLIENT_TYPE_SETTING_S.getKey(), "anything").build();
}
public void testThatClientTypeSettingCannotBeChanged() {
for (Settings settings : internalCluster().getInstances(Settings.class)) {
- assertThat(settings.get(Client.CLIENT_TYPE_SETTING), is("node"));
+ assertThat(Client.CLIENT_TYPE_SETTING_S.get(settings), is("node"));
}
}
}
diff --git a/core/src/test/java/org/elasticsearch/client/transport/TransportClientHeadersTests.java b/core/src/test/java/org/elasticsearch/client/transport/TransportClientHeadersTests.java
index f127ae2837..221b425cae 100644
--- a/core/src/test/java/org/elasticsearch/client/transport/TransportClientHeadersTests.java
+++ b/core/src/test/java/org/elasticsearch/client/transport/TransportClientHeadersTests.java
@@ -36,6 +36,7 @@ import org.elasticsearch.common.network.NetworkModule;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.LocalTransportAddress;
import org.elasticsearch.common.transport.TransportAddress;
+import org.elasticsearch.env.Environment;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.tasks.TaskManager;
import org.elasticsearch.threadpool.ThreadPool;
@@ -83,7 +84,7 @@ public class TransportClientHeadersTests extends AbstractClientHeadersTestCase {
.put("node.name", "transport_client_" + this.getTestName() + "_1")
.put("client.transport.nodes_sampler_interval", "1s")
.put(HEADER_SETTINGS)
- .put("path.home", createTempDir().toString()).build())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build())
.addPlugin(InternalTransportService.TestPlugin.class)
.build();
@@ -134,30 +135,30 @@ public class TransportClientHeadersTests extends AbstractClientHeadersTestCase {
@Override @SuppressWarnings("unchecked")
public <T extends TransportResponse> void sendRequest(DiscoveryNode node, String action, TransportRequest request, TransportRequestOptions options, TransportResponseHandler<T> handler) {
if (TransportLivenessAction.NAME.equals(action)) {
- assertHeaders(request);
+ assertHeaders(threadPool);
((TransportResponseHandler<LivenessResponse>) handler).handleResponse(new LivenessResponse(ClusterName.DEFAULT, node));
return;
}
if (ClusterStateAction.NAME.equals(action)) {
- assertHeaders(request);
+ assertHeaders(threadPool);
ClusterName cluster1 = new ClusterName("cluster1");
((TransportResponseHandler<ClusterStateResponse>) handler).handleResponse(new ClusterStateResponse(cluster1, state(cluster1)));
clusterStateLatch.countDown();
return;
}
- handler.handleException(new TransportException("", new InternalException(action, request)));
+ handler.handleException(new TransportException("", new InternalException(action)));
}
@Override
public boolean nodeConnected(DiscoveryNode node) {
- assertThat((LocalTransportAddress) node.getAddress(), equalTo(address));
+ assertThat(node.getAddress(), equalTo(address));
return true;
}
@Override
public void connectToNode(DiscoveryNode node) throws ConnectTransportException {
- assertThat((LocalTransportAddress) node.getAddress(), equalTo(address));
+ assertThat(node.getAddress(), equalTo(address));
}
}
diff --git a/core/src/test/java/org/elasticsearch/client/transport/TransportClientIT.java b/core/src/test/java/org/elasticsearch/client/transport/TransportClientIT.java
index f01fdffd14..bd1bd83ef8 100644
--- a/core/src/test/java/org/elasticsearch/client/transport/TransportClientIT.java
+++ b/core/src/test/java/org/elasticsearch/client/transport/TransportClientIT.java
@@ -24,6 +24,7 @@ import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.TransportAddress;
+import org.elasticsearch.env.Environment;
import org.elasticsearch.node.Node;
import org.elasticsearch.node.internal.InternalSettingsPreparer;
import org.elasticsearch.test.ESIntegTestCase;
@@ -31,6 +32,8 @@ import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
import org.elasticsearch.test.ESIntegTestCase.Scope;
import org.elasticsearch.transport.TransportService;
+import java.io.IOException;
+
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
@@ -40,24 +43,24 @@ import static org.hamcrest.Matchers.startsWith;
@ClusterScope(scope = Scope.TEST, numDataNodes = 0, transportClientRatio = 1.0)
public class TransportClientIT extends ESIntegTestCase {
public void testPickingUpChangesInDiscoveryNode() {
- String nodeName = internalCluster().startNode(Settings.builder().put("node.data", false));
+ String nodeName = internalCluster().startNode(Settings.builder().put(Node.NODE_DATA_SETTING.getKey(), false));
TransportClient client = (TransportClient) internalCluster().client(nodeName);
assertThat(client.connectedNodes().get(0).dataNode(), equalTo(false));
}
- public void testNodeVersionIsUpdated() {
+ public void testNodeVersionIsUpdated() throws IOException {
TransportClient client = (TransportClient) internalCluster().client();
TransportClientNodesService nodeService = client.nodeService();
Node node = new Node(Settings.builder()
.put(internalCluster().getDefaultSettings())
- .put("path.home", createTempDir())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
.put("node.name", "testNodeVersionIsUpdated")
.put("http.enabled", false)
- .put("node.data", false)
+ .put(Node.NODE_DATA_SETTING.getKey(), false)
.put("cluster.name", "foobar")
- .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true) // make sure we get what we set :)
+ .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING.getKey(), true) // make sure we get what we set :)
.build());
node.start();
try {
@@ -85,14 +88,16 @@ public class TransportClientIT extends ESIntegTestCase {
public void testThatTransportClientSettingIsSet() {
TransportClient client = (TransportClient) internalCluster().client();
Settings settings = client.injector.getInstance(Settings.class);
- assertThat(settings.get(Client.CLIENT_TYPE_SETTING), is("transport"));
+ assertThat(Client.CLIENT_TYPE_SETTING_S.get(settings), is("transport"));
}
public void testThatTransportClientSettingCannotBeChanged() {
- Settings baseSettings = settingsBuilder().put(Client.CLIENT_TYPE_SETTING, "anything").put("path.home", createTempDir()).build();
+ Settings baseSettings = settingsBuilder()
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
+ .build();
try (TransportClient client = TransportClient.builder().settings(baseSettings).build()) {
Settings settings = client.injector.getInstance(Settings.class);
- assertThat(settings.get(Client.CLIENT_TYPE_SETTING), is("transport"));
+ assertThat(Client.CLIENT_TYPE_SETTING_S.get(settings), is("transport"));
}
}
}
diff --git a/core/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java b/core/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java
index 72ace64d9e..e6ea0410a5 100644
--- a/core/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java
+++ b/core/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java
@@ -21,7 +21,6 @@ package org.elasticsearch.client.transport;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionListener;
-import org.elasticsearch.client.support.Headers;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.settings.Settings;
@@ -74,7 +73,7 @@ public class TransportClientNodesServiceTests extends ESTestCase {
};
transportService = new TransportService(Settings.EMPTY, transport, threadPool);
transportService.start();
- transportClientNodesService = new TransportClientNodesService(Settings.EMPTY, ClusterName.DEFAULT, transportService, threadPool, Headers.EMPTY, Version.CURRENT);
+ transportClientNodesService = new TransportClientNodesService(Settings.EMPTY, ClusterName.DEFAULT, transportService, threadPool, Version.CURRENT);
nodesCount = randomIntBetween(1, 10);
for (int i = 0; i < nodesCount; i++) {
diff --git a/core/src/test/java/org/elasticsearch/client/transport/TransportClientRetryIT.java b/core/src/test/java/org/elasticsearch/client/transport/TransportClientRetryIT.java
index b28fdba8c7..dcb5ac4694 100644
--- a/core/src/test/java/org/elasticsearch/client/transport/TransportClientRetryIT.java
+++ b/core/src/test/java/org/elasticsearch/client/transport/TransportClientRetryIT.java
@@ -27,11 +27,12 @@ import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.TransportAddress;
+import org.elasticsearch.env.Environment;
+import org.elasticsearch.node.Node;
import org.elasticsearch.node.internal.InternalSettingsPreparer;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
import org.elasticsearch.test.ESIntegTestCase.Scope;
-import org.elasticsearch.test.junit.annotations.TestLogging;
import org.elasticsearch.transport.TransportService;
import java.io.IOException;
@@ -42,7 +43,6 @@ import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
@ClusterScope(scope = Scope.TEST, numClientNodes = 0)
-@TestLogging("discovery.zen:TRACE")
public class TransportClientRetryIT extends ESIntegTestCase {
public void testRetry() throws IOException, ExecutionException, InterruptedException {
Iterable<TransportService> instances = internalCluster().getInstances(TransportService.class);
@@ -54,10 +54,10 @@ public class TransportClientRetryIT extends ESIntegTestCase {
Settings.Builder builder = settingsBuilder().put("client.transport.nodes_sampler_interval", "1s")
.put("name", "transport_client_retry_test")
- .put("node.mode", internalCluster().getNodeMode())
- .put(ClusterName.SETTING, internalCluster().getClusterName())
- .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true)
- .put("path.home", createTempDir());
+ .put(Node.NODE_MODE_SETTING.getKey(), internalCluster().getNodeMode())
+ .put(ClusterName.CLUSTER_NAME_SETTING.getKey(), internalCluster().getClusterName())
+ .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING.getKey(), true)
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir());
try (TransportClient transportClient = TransportClient.builder().settings(builder.build()).build()) {
transportClient.addTransportAddresses(addresses);
diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java b/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java
index ac2845c86a..9a8e8fb726 100644
--- a/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java
+++ b/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java
@@ -20,6 +20,7 @@
package org.elasticsearch.cluster;
import com.carrotsearch.hppc.cursors.ObjectCursor;
+
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionModule;
@@ -100,7 +101,7 @@ public class ClusterInfoServiceIT extends ESIntegTestCase {
}
@Override
- protected boolean apply(String action, ActionRequest request, ActionListener listener) {
+ protected boolean apply(String action, ActionRequest<?> request, ActionListener<?> listener) {
if (blockedActions.contains(action)) {
throw new ElasticsearchException("force exception on [" + action + "]");
}
@@ -108,7 +109,7 @@ public class ClusterInfoServiceIT extends ESIntegTestCase {
}
@Override
- protected boolean apply(String action, ActionResponse response, ActionListener listener) {
+ protected boolean apply(String action, ActionResponse response, ActionListener<?> listener) {
return true;
}
diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java b/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java
index 7cfa0eeaaa..7af4e375fd 100644
--- a/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java
@@ -122,7 +122,7 @@ public class ClusterModuleTests extends ModuleTestCase {
}
public void testRegisterShardsAllocator() {
- Settings settings = Settings.builder().put(ClusterModule.SHARDS_ALLOCATOR_TYPE_KEY, "custom").build();
+ Settings settings = Settings.builder().put(ClusterModule.SHARDS_ALLOCATOR_TYPE_SETTING.getKey(), "custom").build();
ClusterModule module = new ClusterModule(settings);
module.registerShardsAllocator("custom", FakeShardsAllocator.class);
assertBinding(module, ShardsAllocator.class, FakeShardsAllocator.class);
@@ -138,14 +138,14 @@ public class ClusterModuleTests extends ModuleTestCase {
}
public void testUnknownShardsAllocator() {
- Settings settings = Settings.builder().put(ClusterModule.SHARDS_ALLOCATOR_TYPE_KEY, "dne").build();
+ Settings settings = Settings.builder().put(ClusterModule.SHARDS_ALLOCATOR_TYPE_SETTING.getKey(), "dne").build();
ClusterModule module = new ClusterModule(settings);
assertBindingFailure(module, "Unknown [shards_allocator]");
}
public void testEvenShardsAllocatorBackcompat() {
Settings settings = Settings.builder()
- .put(ClusterModule.SHARDS_ALLOCATOR_TYPE_KEY, ClusterModule.EVEN_SHARD_COUNT_ALLOCATOR).build();
+ .put(ClusterModule.SHARDS_ALLOCATOR_TYPE_SETTING.getKey(), ClusterModule.EVEN_SHARD_COUNT_ALLOCATOR).build();
ClusterModule module = new ClusterModule(settings);
assertBinding(module, ShardsAllocator.class, BalancedShardsAllocator.class);
}
diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java b/core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java
index 2d781c866d..cc3cae8606 100644
--- a/core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java
+++ b/core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java
@@ -35,6 +35,7 @@ import org.elasticsearch.common.inject.Singleton;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.discovery.zen.ZenDiscovery;
+import org.elasticsearch.node.Node;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
@@ -305,7 +306,7 @@ public class ClusterServiceIT extends ESIntegTestCase {
.build();
InternalTestCluster.Async<String> master = internalCluster().startNodeAsync(settings);
- InternalTestCluster.Async<String> nonMaster = internalCluster().startNodeAsync(settingsBuilder().put(settings).put("node.master", false).build());
+ InternalTestCluster.Async<String> nonMaster = internalCluster().startNodeAsync(settingsBuilder().put(settings).put(Node.NODE_MASTER_SETTING.getKey(), false).build());
master.get();
ensureGreen(); // make sure we have a cluster
@@ -631,7 +632,7 @@ public class ClusterServiceIT extends ESIntegTestCase {
controlSources = new HashSet<>(Arrays.asList("1", "2", "3", "4", "5"));
for (PendingClusterTask task : response) {
if (controlSources.remove(task.getSource().string())) {
- assertThat(task.getTimeInQueueInMillis(), greaterThan(0l));
+ assertThat(task.getTimeInQueueInMillis(), greaterThan(0L));
}
}
assertTrue(controlSources.isEmpty());
@@ -642,7 +643,7 @@ public class ClusterServiceIT extends ESIntegTestCase {
Settings settings = settingsBuilder()
.put("discovery.type", "zen")
.put("discovery.zen.minimum_master_nodes", 1)
- .put(ZenDiscovery.SETTING_PING_TIMEOUT, "400ms")
+ .put(ZenDiscovery.PING_TIMEOUT_SETTING.getKey(), "400ms")
.put("discovery.initial_state_timeout", "500ms")
.build();
diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterStateDiffIT.java b/core/src/test/java/org/elasticsearch/cluster/ClusterStateDiffIT.java
index 8b79b78397..6f3fdee36d 100644
--- a/core/src/test/java/org/elasticsearch/cluster/ClusterStateDiffIT.java
+++ b/core/src/test/java/org/elasticsearch/cluster/ClusterStateDiffIT.java
@@ -46,6 +46,7 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.LocalTransportAddress;
import org.elasticsearch.discovery.DiscoverySettings;
import org.elasticsearch.gateway.GatewayService;
+import org.elasticsearch.index.Index;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.test.ESIntegTestCase;
@@ -223,11 +224,11 @@ public class ClusterStateDiffIT extends ESIntegTestCase {
* Randomly updates index routing table in the cluster state
*/
private IndexRoutingTable randomIndexRoutingTable(String index, String[] nodeIds) {
- IndexRoutingTable.Builder builder = IndexRoutingTable.builder(index);
+ IndexRoutingTable.Builder builder = IndexRoutingTable.builder(new Index(index, "_na_"));
int shardCount = randomInt(10);
for (int i = 0; i < shardCount; i++) {
- IndexShardRoutingTable.Builder indexShard = new IndexShardRoutingTable.Builder(new ShardId(index, i));
+ IndexShardRoutingTable.Builder indexShard = new IndexShardRoutingTable.Builder(new ShardId(index, "_na_", i));
int replicaCount = randomIntBetween(1, 10);
for (int j = 0; j < replicaCount; j++) {
UnassignedInfo unassignedInfo = null;
diff --git a/core/src/test/java/org/elasticsearch/cluster/DiskUsageTests.java b/core/src/test/java/org/elasticsearch/cluster/DiskUsageTests.java
index f581e4c91f..a74102f696 100644
--- a/core/src/test/java/org/elasticsearch/cluster/DiskUsageTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/DiskUsageTests.java
@@ -29,6 +29,7 @@ import org.elasticsearch.cluster.routing.ShardRoutingHelper;
import org.elasticsearch.cluster.routing.UnassignedInfo;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.transport.DummyTransportAddress;
+import org.elasticsearch.index.Index;
import org.elasticsearch.index.shard.ShardPath;
import org.elasticsearch.index.store.StoreStats;
import org.elasticsearch.monitor.fs.FsInfo;
@@ -91,13 +92,14 @@ public class DiskUsageTests extends ESTestCase {
}
public void testFillShardLevelInfo() {
- ShardRouting test_0 = ShardRouting.newUnassigned("test", 0, null, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
+ final Index index = new Index("test", "_na_");
+ ShardRouting test_0 = ShardRouting.newUnassigned(index, 0, null, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
ShardRoutingHelper.initialize(test_0, "node1");
ShardRoutingHelper.moveToStarted(test_0);
Path test0Path = createTempDir().resolve("indices").resolve("test").resolve("0");
CommonStats commonStats0 = new CommonStats();
commonStats0.store = new StoreStats(100, 1);
- ShardRouting test_1 = ShardRouting.newUnassigned("test", 1, null, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
+ ShardRouting test_1 = ShardRouting.newUnassigned(index, 1, null, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
ShardRoutingHelper.initialize(test_1, "node2");
ShardRoutingHelper.moveToStarted(test_1);
Path test1Path = createTempDir().resolve("indices").resolve("test").resolve("1");
@@ -113,8 +115,8 @@ public class DiskUsageTests extends ESTestCase {
assertEquals(2, shardSizes.size());
assertTrue(shardSizes.containsKey(ClusterInfo.shardIdentifierFromRouting(test_0)));
assertTrue(shardSizes.containsKey(ClusterInfo.shardIdentifierFromRouting(test_1)));
- assertEquals(100l, shardSizes.get(ClusterInfo.shardIdentifierFromRouting(test_0)).longValue());
- assertEquals(1000l, shardSizes.get(ClusterInfo.shardIdentifierFromRouting(test_1)).longValue());
+ assertEquals(100L, shardSizes.get(ClusterInfo.shardIdentifierFromRouting(test_0)).longValue());
+ assertEquals(1000L, shardSizes.get(ClusterInfo.shardIdentifierFromRouting(test_1)).longValue());
assertEquals(2, routingToPath.size());
assertTrue(routingToPath.containsKey(test_0));
diff --git a/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java b/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java
index 2d726d9742..78128fe30f 100644
--- a/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java
+++ b/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java
@@ -62,6 +62,7 @@ import static org.hamcrest.Matchers.nullValue;
@ClusterScope(scope = Scope.TEST, numDataNodes = 0)
@ESIntegTestCase.SuppressLocalMode
+@TestLogging("_root:DEBUG,cluster.service:TRACE,discovery.zen:TRACE")
public class MinimumMasterNodesIT extends ESIntegTestCase {
@Override
@@ -71,13 +72,12 @@ public class MinimumMasterNodesIT extends ESIntegTestCase {
return classes;
}
- @TestLogging("cluster.service:TRACE,discovery.zen:TRACE,gateway:TRACE,transport.tracer:TRACE")
public void testSimpleMinimumMasterNodes() throws Exception {
Settings settings = settingsBuilder()
.put("discovery.type", "zen")
.put("discovery.zen.minimum_master_nodes", 2)
- .put(ZenDiscovery.SETTING_PING_TIMEOUT, "200ms")
+ .put(ZenDiscovery.PING_TIMEOUT_SETTING.getKey(), "200ms")
.put("discovery.initial_state_timeout", "500ms")
.build();
@@ -117,7 +117,7 @@ public class MinimumMasterNodesIT extends ESIntegTestCase {
logger.info("--> verify we the data back");
for (int i = 0; i < 10; i++) {
- assertThat(client().prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(100l));
+ assertThat(client().prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(100L));
}
internalCluster().stopCurrentMasterNode();
@@ -189,7 +189,7 @@ public class MinimumMasterNodesIT extends ESIntegTestCase {
Settings settings = settingsBuilder()
.put("discovery.type", "zen")
.put("discovery.zen.minimum_master_nodes", 3)
- .put(ZenDiscovery.SETTING_PING_TIMEOUT, "1s")
+ .put(ZenDiscovery.PING_TIMEOUT_SETTING.getKey(), "1s")
.put("discovery.initial_state_timeout", "500ms")
.build();
@@ -264,7 +264,7 @@ public class MinimumMasterNodesIT extends ESIntegTestCase {
public void testDynamicUpdateMinimumMasterNodes() throws Exception {
Settings settings = settingsBuilder()
.put("discovery.type", "zen")
- .put(ZenDiscovery.SETTING_PING_TIMEOUT, "400ms")
+ .put(ZenDiscovery.PING_TIMEOUT_SETTING.getKey(), "400ms")
.put("discovery.initial_state_timeout", "500ms")
.build();
@@ -279,7 +279,7 @@ public class MinimumMasterNodesIT extends ESIntegTestCase {
setMinimumMasterNodes(2);
// make sure it has been processed on all nodes (master node spawns a secondary cluster state update task)
- for (Client client : internalCluster()) {
+ for (Client client : internalCluster().getClients()) {
assertThat(client.admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setLocal(true).get().isTimedOut(),
equalTo(false));
}
@@ -303,7 +303,7 @@ public class MinimumMasterNodesIT extends ESIntegTestCase {
assertTrue(awaitBusy(
() -> {
boolean success = true;
- for (Client client : internalCluster()) {
+ for (Client client : internalCluster().getClients()) {
boolean clientHasNoMasterBlock = hasNoMasterBlock.test(client);
if (logger.isDebugEnabled()) {
logger.debug("Checking for NO_MASTER_BLOCK on client: {} NO_MASTER_BLOCK: [{}]", client, clientHasNoMasterBlock);
@@ -322,7 +322,7 @@ public class MinimumMasterNodesIT extends ESIntegTestCase {
int nodeCount = scaledRandomIntBetween(1, 5);
Settings.Builder settings = settingsBuilder()
.put("discovery.type", "zen")
- .put(ZenDiscovery.SETTING_PING_TIMEOUT, "200ms")
+ .put(ZenDiscovery.PING_TIMEOUT_SETTING.getKey(), "200ms")
.put("discovery.initial_state_timeout", "500ms");
// set an initial value which is at least quorum to avoid split brains during initial startup
@@ -361,8 +361,8 @@ public class MinimumMasterNodesIT extends ESIntegTestCase {
public void testCanNotPublishWithoutMinMastNodes() throws Exception {
Settings settings = settingsBuilder()
.put("discovery.type", "zen")
- .put(FaultDetection.SETTING_PING_TIMEOUT, "1h") // disable it
- .put(ZenDiscovery.SETTING_PING_TIMEOUT, "200ms")
+ .put(FaultDetection.PING_TIMEOUT_SETTING.getKey(), "1h") // disable it
+ .put(ZenDiscovery.PING_TIMEOUT_SETTING.getKey(), "200ms")
.put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), 2)
.put(DiscoverySettings.COMMIT_TIMEOUT_SETTING.getKey(), "100ms") // speed things up
.build();
diff --git a/core/src/test/java/org/elasticsearch/cluster/NoMasterNodeIT.java b/core/src/test/java/org/elasticsearch/cluster/NoMasterNodeIT.java
index 8e5479d6f8..370f1464fd 100644
--- a/core/src/test/java/org/elasticsearch/cluster/NoMasterNodeIT.java
+++ b/core/src/test/java/org/elasticsearch/cluster/NoMasterNodeIT.java
@@ -65,7 +65,7 @@ public class NoMasterNodeIT extends ESIntegTestCase {
.put("discovery.type", "zen")
.put("action.auto_create_index", autoCreateIndex)
.put("discovery.zen.minimum_master_nodes", 2)
- .put(ZenDiscovery.SETTING_PING_TIMEOUT, "200ms")
+ .put(ZenDiscovery.PING_TIMEOUT_SETTING.getKey(), "200ms")
.put("discovery.initial_state_timeout", "500ms")
.put(DiscoverySettings.NO_MASTER_BLOCK_SETTING.getKey(), "all")
.build();
@@ -217,7 +217,7 @@ public class NoMasterNodeIT extends ESIntegTestCase {
.put("discovery.type", "zen")
.put("action.auto_create_index", false)
.put("discovery.zen.minimum_master_nodes", 2)
- .put(ZenDiscovery.SETTING_PING_TIMEOUT, "200ms")
+ .put(ZenDiscovery.PING_TIMEOUT_SETTING.getKey(), "200ms")
.put("discovery.initial_state_timeout", "500ms")
.put(DiscoverySettings.NO_MASTER_BLOCK_SETTING.getKey(), "write")
.build();
@@ -248,10 +248,10 @@ public class NoMasterNodeIT extends ESIntegTestCase {
assertExists(getResponse);
SearchResponse countResponse = client().prepareSearch("test1").setSize(0).get();
- assertHitCount(countResponse, 1l);
+ assertHitCount(countResponse, 1L);
SearchResponse searchResponse = client().prepareSearch("test1").get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
countResponse = client().prepareSearch("test2").setSize(0).get();
assertThat(countResponse.getTotalShards(), equalTo(2));
diff --git a/core/src/test/java/org/elasticsearch/cluster/SimpleClusterStateIT.java b/core/src/test/java/org/elasticsearch/cluster/SimpleClusterStateIT.java
index d78356cbf6..e838989e2a 100644
--- a/core/src/test/java/org/elasticsearch/cluster/SimpleClusterStateIT.java
+++ b/core/src/test/java/org/elasticsearch/cluster/SimpleClusterStateIT.java
@@ -90,8 +90,8 @@ public class SimpleClusterStateIT extends ESIntegTestCase {
.setTemplate("te*")
.setOrder(0)
.addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
- .startObject("field1").field("type", "string").field("store", "yes").endObject()
- .startObject("field2").field("type", "string").field("store", "yes").field("index", "not_analyzed").endObject()
+ .startObject("field1").field("type", "string").field("store", true).endObject()
+ .startObject("field2").field("type", "string").field("store", true).field("index", "not_analyzed").endObject()
.endObject().endObject().endObject())
.get();
diff --git a/core/src/test/java/org/elasticsearch/cluster/SimpleDataNodesIT.java b/core/src/test/java/org/elasticsearch/cluster/SimpleDataNodesIT.java
index bc3aea4ac7..44565a269b 100644
--- a/core/src/test/java/org/elasticsearch/cluster/SimpleDataNodesIT.java
+++ b/core/src/test/java/org/elasticsearch/cluster/SimpleDataNodesIT.java
@@ -23,6 +23,7 @@ import org.elasticsearch.action.UnavailableShardsException;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.client.Requests;
import org.elasticsearch.common.Priority;
+import org.elasticsearch.node.Node;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
import org.elasticsearch.test.ESIntegTestCase.Scope;
@@ -38,7 +39,7 @@ import static org.hamcrest.Matchers.equalTo;
@ClusterScope(scope= Scope.TEST, numDataNodes =0)
public class SimpleDataNodesIT extends ESIntegTestCase {
public void testDataNodes() throws Exception {
- internalCluster().startNode(settingsBuilder().put("node.data", false).build());
+ internalCluster().startNode(settingsBuilder().put(Node.NODE_DATA_SETTING.getKey(), false).build());
client().admin().indices().create(createIndexRequest("test")).actionGet();
try {
client().index(Requests.indexRequest("test").type("type1").id("1").source(source("1", "test")).timeout(timeValueSeconds(1))).actionGet();
@@ -47,7 +48,7 @@ public class SimpleDataNodesIT extends ESIntegTestCase {
// all is well
}
- internalCluster().startNode(settingsBuilder().put("node.data", false).build());
+ internalCluster().startNode(settingsBuilder().put(Node.NODE_DATA_SETTING.getKey(), false).build());
assertThat(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForNodes("2").setLocal(true).execute().actionGet().isTimedOut(), equalTo(false));
// still no shard should be allocated
@@ -59,7 +60,7 @@ public class SimpleDataNodesIT extends ESIntegTestCase {
}
// now, start a node data, and see that it gets with shards
- internalCluster().startNode(settingsBuilder().put("node.data", true).build());
+ internalCluster().startNode(settingsBuilder().put(Node.NODE_DATA_SETTING.getKey(), true).build());
assertThat(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForNodes("3").setLocal(true).execute().actionGet().isTimedOut(), equalTo(false));
IndexResponse indexResponse = client().index(Requests.indexRequest("test").type("type1").id("1").source(source("1", "test"))).actionGet();
diff --git a/core/src/test/java/org/elasticsearch/cluster/SpecificMasterNodesIT.java b/core/src/test/java/org/elasticsearch/cluster/SpecificMasterNodesIT.java
index 90c39d7bbe..934a4d0cb8 100644
--- a/core/src/test/java/org/elasticsearch/cluster/SpecificMasterNodesIT.java
+++ b/core/src/test/java/org/elasticsearch/cluster/SpecificMasterNodesIT.java
@@ -23,6 +23,7 @@ import org.elasticsearch.cluster.metadata.MappingMetaData;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.discovery.MasterNotDiscoveredException;
import org.elasticsearch.index.query.QueryBuilders;
+import org.elasticsearch.node.Node;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
import org.elasticsearch.test.ESIntegTestCase.Scope;
@@ -43,7 +44,7 @@ public class SpecificMasterNodesIT extends ESIntegTestCase {
public void testSimpleOnlyMasterNodeElection() throws IOException {
logger.info("--> start data node / non master node");
- internalCluster().startNode(settingsBuilder().put("node.data", true).put("node.master", false).put("discovery.initial_state_timeout", "1s"));
+ internalCluster().startNode(settingsBuilder().put(Node.NODE_DATA_SETTING.getKey(), true).put(Node.NODE_MASTER_SETTING.getKey(), false).put("discovery.initial_state_timeout", "1s"));
try {
assertThat(client().admin().cluster().prepareState().setMasterNodeTimeout("100ms").execute().actionGet().getState().nodes().masterNodeId(), nullValue());
fail("should not be able to find master");
@@ -51,7 +52,7 @@ public class SpecificMasterNodesIT extends ESIntegTestCase {
// all is well, no master elected
}
logger.info("--> start master node");
- final String masterNodeName = internalCluster().startNode(settingsBuilder().put("node.data", false).put("node.master", true));
+ final String masterNodeName = internalCluster().startNode(settingsBuilder().put(Node.NODE_DATA_SETTING.getKey(), false).put(Node.NODE_MASTER_SETTING.getKey(), true));
assertThat(internalCluster().nonMasterClient().admin().cluster().prepareState().execute().actionGet().getState().nodes().masterNode().name(), equalTo(masterNodeName));
assertThat(internalCluster().masterClient().admin().cluster().prepareState().execute().actionGet().getState().nodes().masterNode().name(), equalTo(masterNodeName));
@@ -66,14 +67,14 @@ public class SpecificMasterNodesIT extends ESIntegTestCase {
}
logger.info("--> start master node");
- final String nextMasterEligibleNodeName = internalCluster().startNode(settingsBuilder().put("node.data", false).put("node.master", true));
+ final String nextMasterEligibleNodeName = internalCluster().startNode(settingsBuilder().put(Node.NODE_DATA_SETTING.getKey(), false).put(Node.NODE_MASTER_SETTING.getKey(), true));
assertThat(internalCluster().nonMasterClient().admin().cluster().prepareState().execute().actionGet().getState().nodes().masterNode().name(), equalTo(nextMasterEligibleNodeName));
assertThat(internalCluster().masterClient().admin().cluster().prepareState().execute().actionGet().getState().nodes().masterNode().name(), equalTo(nextMasterEligibleNodeName));
}
public void testElectOnlyBetweenMasterNodes() throws IOException {
logger.info("--> start data node / non master node");
- internalCluster().startNode(settingsBuilder().put("node.data", true).put("node.master", false).put("discovery.initial_state_timeout", "1s"));
+ internalCluster().startNode(settingsBuilder().put(Node.NODE_DATA_SETTING.getKey(), true).put(Node.NODE_MASTER_SETTING.getKey(), false).put("discovery.initial_state_timeout", "1s"));
try {
assertThat(client().admin().cluster().prepareState().setMasterNodeTimeout("100ms").execute().actionGet().getState().nodes().masterNodeId(), nullValue());
fail("should not be able to find master");
@@ -81,12 +82,12 @@ public class SpecificMasterNodesIT extends ESIntegTestCase {
// all is well, no master elected
}
logger.info("--> start master node (1)");
- final String masterNodeName = internalCluster().startNode(settingsBuilder().put("node.data", false).put("node.master", true));
+ final String masterNodeName = internalCluster().startNode(settingsBuilder().put(Node.NODE_DATA_SETTING.getKey(), false).put(Node.NODE_MASTER_SETTING.getKey(), true));
assertThat(internalCluster().nonMasterClient().admin().cluster().prepareState().execute().actionGet().getState().nodes().masterNode().name(), equalTo(masterNodeName));
assertThat(internalCluster().masterClient().admin().cluster().prepareState().execute().actionGet().getState().nodes().masterNode().name(), equalTo(masterNodeName));
logger.info("--> start master node (2)");
- final String nextMasterEligableNodeName = internalCluster().startNode(settingsBuilder().put("node.data", false).put("node.master", true));
+ final String nextMasterEligableNodeName = internalCluster().startNode(settingsBuilder().put(Node.NODE_DATA_SETTING.getKey(), false).put(Node.NODE_MASTER_SETTING.getKey(), true));
assertThat(internalCluster().nonMasterClient().admin().cluster().prepareState().execute().actionGet().getState().nodes().masterNode().name(), equalTo(masterNodeName));
assertThat(internalCluster().nonMasterClient().admin().cluster().prepareState().execute().actionGet().getState().nodes().masterNode().name(), equalTo(masterNodeName));
assertThat(internalCluster().masterClient().admin().cluster().prepareState().execute().actionGet().getState().nodes().masterNode().name(), equalTo(masterNodeName));
@@ -103,10 +104,10 @@ public class SpecificMasterNodesIT extends ESIntegTestCase {
*/
public void testCustomDefaultMapping() throws Exception {
logger.info("--> start master node / non data");
- internalCluster().startNode(settingsBuilder().put("node.data", false).put("node.master", true));
+ internalCluster().startNode(settingsBuilder().put(Node.NODE_DATA_SETTING.getKey(), false).put(Node.NODE_MASTER_SETTING.getKey(), true));
logger.info("--> start data node / non master node");
- internalCluster().startNode(settingsBuilder().put("node.data", true).put("node.master", false));
+ internalCluster().startNode(settingsBuilder().put(Node.NODE_DATA_SETTING.getKey(), true).put(Node.NODE_MASTER_SETTING.getKey(), false));
createIndex("test");
assertAcked(client().admin().indices().preparePutMapping("test").setType("_default_").setSource("_timestamp", "enabled=true"));
@@ -123,10 +124,10 @@ public class SpecificMasterNodesIT extends ESIntegTestCase {
public void testAliasFilterValidation() throws Exception {
logger.info("--> start master node / non data");
- internalCluster().startNode(settingsBuilder().put("node.data", false).put("node.master", true));
+ internalCluster().startNode(settingsBuilder().put(Node.NODE_DATA_SETTING.getKey(), false).put(Node.NODE_MASTER_SETTING.getKey(), true));
logger.info("--> start data node / non master node");
- internalCluster().startNode(settingsBuilder().put("node.data", true).put("node.master", false));
+ internalCluster().startNode(settingsBuilder().put(Node.NODE_DATA_SETTING.getKey(), true).put(Node.NODE_MASTER_SETTING.getKey(), false));
assertAcked(prepareCreate("test").addMapping("type1", "{\"type1\" : {\"properties\" : {\"table_a\" : { \"type\" : \"nested\", \"properties\" : {\"field_a\" : { \"type\" : \"string\" },\"field_b\" :{ \"type\" : \"string\" }}}}}}"));
client().admin().indices().prepareAliases().addAlias("test", "a_test", QueryBuilders.nestedQuery("table_a", QueryBuilders.termQuery("table_a.field_b", "y"))).get();
diff --git a/core/src/test/java/org/elasticsearch/cluster/UpdateSettingsValidationIT.java b/core/src/test/java/org/elasticsearch/cluster/UpdateSettingsValidationIT.java
index 526f64a8b4..43a455cb84 100644
--- a/core/src/test/java/org/elasticsearch/cluster/UpdateSettingsValidationIT.java
+++ b/core/src/test/java/org/elasticsearch/cluster/UpdateSettingsValidationIT.java
@@ -21,6 +21,7 @@ package org.elasticsearch.cluster;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.elasticsearch.common.Priority;
+import org.elasticsearch.node.Node;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
import org.elasticsearch.test.ESIntegTestCase.Scope;
@@ -36,9 +37,9 @@ import static org.hamcrest.Matchers.equalTo;
public class UpdateSettingsValidationIT extends ESIntegTestCase {
public void testUpdateSettingsValidation() throws Exception {
List<String> nodes = internalCluster().startNodesAsync(
- settingsBuilder().put("node.data", false).build(),
- settingsBuilder().put("node.master", false).build(),
- settingsBuilder().put("node.master", false).build()
+ settingsBuilder().put(Node.NODE_DATA_SETTING.getKey(), false).build(),
+ settingsBuilder().put(Node.NODE_MASTER_SETTING.getKey(), false).build(),
+ settingsBuilder().put(Node.NODE_MASTER_SETTING.getKey(), false).build()
).get();
String master = nodes.get(0);
String node_1 = nodes.get(1);
diff --git a/core/src/test/java/org/elasticsearch/cluster/ack/AckIT.java b/core/src/test/java/org/elasticsearch/cluster/ack/AckIT.java
index 9f646d0df5..2ec3b11a16 100644
--- a/core/src/test/java/org/elasticsearch/cluster/ack/AckIT.java
+++ b/core/src/test/java/org/elasticsearch/cluster/ack/AckIT.java
@@ -38,6 +38,8 @@ import org.elasticsearch.cluster.routing.ShardRoutingState;
import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.discovery.DiscoverySettings;
+import org.elasticsearch.index.Index;
+import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
@@ -60,7 +62,7 @@ public class AckIT extends ESIntegTestCase {
//otherwise the operation is most likely acknowledged even if it doesn't support ack
return Settings.builder().put(super.nodeSettings(nodeOrdinal))
.put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), 0).build();
-}
+ }
public void testUpdateSettingsAcknowledgement() {
createIndex("test");
@@ -83,13 +85,15 @@ public class AckIT extends ESIntegTestCase {
public void testClusterRerouteAcknowledgement() throws InterruptedException {
assertAcked(prepareCreate("test").setSettings(Settings.builder()
- .put(indexSettings())
- .put(SETTING_NUMBER_OF_SHARDS, between(cluster().numDataNodes(), DEFAULT_MAX_NUM_SHARDS))
- .put(SETTING_NUMBER_OF_REPLICAS, 0)
+ .put(indexSettings())
+ .put(SETTING_NUMBER_OF_SHARDS, between(cluster().numDataNodes(), DEFAULT_MAX_NUM_SHARDS))
+ .put(SETTING_NUMBER_OF_REPLICAS, 0)
));
ensureGreen();
MoveAllocationCommand moveAllocationCommand = getAllocationCommand();
+ final Index index = client().admin().cluster().prepareState().get().getState().metaData().index("test").getIndex();
+ final ShardId commandShard = new ShardId(index, moveAllocationCommand.shardId());
assertAcked(client().admin().cluster().prepareReroute().add(moveAllocationCommand));
@@ -97,7 +101,7 @@ public class AckIT extends ESIntegTestCase {
ClusterState clusterState = getLocalClusterState(client);
for (ShardRouting shardRouting : clusterState.getRoutingNodes().routingNodeIter(moveAllocationCommand.fromNode())) {
//if the shard that we wanted to move is still on the same node, it must be relocating
- if (shardRouting.shardId().equals(moveAllocationCommand.shardId())) {
+ if (shardRouting.shardId().equals(commandShard)) {
assertThat(shardRouting.relocating(), equalTo(true));
}
@@ -105,7 +109,7 @@ public class AckIT extends ESIntegTestCase {
boolean found = false;
for (ShardRouting shardRouting : clusterState.getRoutingNodes().routingNodeIter(moveAllocationCommand.toNode())) {
- if (shardRouting.shardId().equals(moveAllocationCommand.shardId())) {
+ if (shardRouting.shardId().equals(commandShard)) {
assertThat(shardRouting.state(), anyOf(equalTo(ShardRoutingState.INITIALIZING), equalTo(ShardRoutingState.STARTED)));
found = true;
break;
@@ -137,6 +141,9 @@ public class AckIT extends ESIntegTestCase {
MoveAllocationCommand moveAllocationCommand = getAllocationCommand();
+ final Index index = client().admin().cluster().prepareState().get().getState().metaData().index("test").getIndex();
+ final ShardId commandShard = new ShardId(index, moveAllocationCommand.shardId());
+
assertAcked(client().admin().cluster().prepareReroute().setDryRun(true).add(moveAllocationCommand));
//testing only on master with the latest cluster state as we didn't make any change thus we cannot guarantee that
@@ -145,7 +152,7 @@ public class AckIT extends ESIntegTestCase {
boolean found = false;
for (ShardRouting shardRouting : clusterStateResponse.getState().getRoutingNodes().routingNodeIter(moveAllocationCommand.fromNode())) {
//the shard that we wanted to move is still on the same node, as we had dryRun flag
- if (shardRouting.shardId().equals(moveAllocationCommand.shardId())) {
+ if (shardRouting.shardId().equals(commandShard)) {
assertThat(shardRouting.started(), equalTo(true));
found = true;
break;
@@ -154,7 +161,7 @@ public class AckIT extends ESIntegTestCase {
assertThat(found, equalTo(true));
for (ShardRouting shardRouting : clusterStateResponse.getState().getRoutingNodes().routingNodeIter(moveAllocationCommand.toNode())) {
- if (shardRouting.shardId().equals(moveAllocationCommand.shardId())) {
+ if (shardRouting.shardId().equals(commandShard)) {
fail("shard [" + shardRouting + "] shouldn't be on node [" + moveAllocationCommand.toString() + "]");
}
}
@@ -199,7 +206,7 @@ public class AckIT extends ESIntegTestCase {
assertNotNull(shardToBeMoved);
logger.info("==> going to move shard [{}] from [{}] to [{}]", shardToBeMoved, fromNodeId, toNodeId);
- return new MoveAllocationCommand(shardToBeMoved.shardId(), fromNodeId, toNodeId);
+ return new MoveAllocationCommand(shardToBeMoved.getIndexName(), shardToBeMoved.id(), fromNodeId, toNodeId);
}
public void testIndicesAliasesAcknowledgement() {
diff --git a/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java b/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java
new file mode 100644
index 0000000000..4e8d1d9266
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java
@@ -0,0 +1,241 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.cluster.action.shard;
+
+import org.apache.lucene.index.CorruptIndexException;
+import org.elasticsearch.Version;
+import org.elasticsearch.cluster.ClusterName;
+import org.elasticsearch.cluster.ClusterState;
+import org.elasticsearch.cluster.ClusterStateTaskExecutor;
+import org.elasticsearch.cluster.metadata.IndexMetaData;
+import org.elasticsearch.cluster.metadata.MetaData;
+import org.elasticsearch.cluster.node.DiscoveryNodes;
+import org.elasticsearch.cluster.routing.GroupShardsIterator;
+import org.elasticsearch.cluster.routing.RoutingNodes;
+import org.elasticsearch.cluster.routing.RoutingTable;
+import org.elasticsearch.cluster.routing.ShardIterator;
+import org.elasticsearch.cluster.routing.ShardRouting;
+import org.elasticsearch.cluster.routing.ShardRoutingState;
+import org.elasticsearch.cluster.routing.TestShardRouting;
+import org.elasticsearch.cluster.routing.allocation.AllocationService;
+import org.elasticsearch.cluster.routing.allocation.FailedRerouteAllocation;
+import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
+import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider;
+import org.elasticsearch.test.ESAllocationTestCase;
+import org.junit.Before;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.function.Function;
+import java.util.stream.Collectors;
+import java.util.stream.IntStream;
+
+import static org.elasticsearch.common.settings.Settings.settingsBuilder;
+import static org.hamcrest.CoreMatchers.equalTo;
+import static org.hamcrest.CoreMatchers.not;
+
+public class ShardFailedClusterStateTaskExecutorTests extends ESAllocationTestCase {
+
+ private static final String INDEX = "INDEX";
+ private AllocationService allocationService;
+ private int numberOfReplicas;
+ private MetaData metaData;
+ private RoutingTable routingTable;
+ private ClusterState clusterState;
+ private ShardStateAction.ShardFailedClusterStateTaskExecutor executor;
+
+ @Before
+ public void setUp() throws Exception {
+ super.setUp();
+ allocationService = createAllocationService(settingsBuilder()
+ .put("cluster.routing.allocation.node_concurrent_recoveries", 8)
+ .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always")
+ .build());
+ numberOfReplicas = randomIntBetween(2, 16);
+ metaData = MetaData.builder()
+ .put(IndexMetaData.builder(INDEX).settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(numberOfReplicas))
+ .build();
+ routingTable = RoutingTable.builder()
+ .addAsNew(metaData.index(INDEX))
+ .build();
+ clusterState = ClusterState.builder(ClusterName.DEFAULT).metaData(metaData).routingTable(routingTable).build();
+ executor = new ShardStateAction.ShardFailedClusterStateTaskExecutor(allocationService, null, logger);
+ }
+
+ public void testEmptyTaskListProducesSameClusterState() throws Exception {
+ List<ShardStateAction.ShardRoutingEntry> tasks = Collections.emptyList();
+ ClusterStateTaskExecutor.BatchResult<ShardStateAction.ShardRoutingEntry> result =
+ executor.execute(clusterState, tasks);
+ assertTasksSuccessful(tasks, result, clusterState, false);
+ }
+
+ public void testDuplicateFailuresAreOkay() throws Exception {
+ String reason = "test duplicate failures are okay";
+ ClusterState currentState = createClusterStateWithStartedShards(reason);
+ List<ShardStateAction.ShardRoutingEntry> tasks = createExistingShards(currentState, reason);
+ ClusterStateTaskExecutor.BatchResult<ShardStateAction.ShardRoutingEntry> result = executor.execute(currentState, tasks);
+ assertTasksSuccessful(tasks, result, clusterState, true);
+ }
+
+ public void testNonExistentShardsAreMarkedAsSuccessful() throws Exception {
+ String reason = "test non existent shards are marked as successful";
+ ClusterState currentState = createClusterStateWithStartedShards(reason);
+ List<ShardStateAction.ShardRoutingEntry> tasks = createNonExistentShards(currentState, reason);
+ ClusterStateTaskExecutor.BatchResult<ShardStateAction.ShardRoutingEntry> result = executor.execute(clusterState, tasks);
+ assertTasksSuccessful(tasks, result, clusterState, false);
+ }
+
+ public void testTriviallySuccessfulTasksBatchedWithFailingTasks() throws Exception {
+ String reason = "test trivially successful tasks batched with failing tasks";
+ ClusterState currentState = createClusterStateWithStartedShards(reason);
+ List<ShardStateAction.ShardRoutingEntry> failingTasks = createExistingShards(currentState, reason);
+ List<ShardStateAction.ShardRoutingEntry> nonExistentTasks = createNonExistentShards(currentState, reason);
+ ShardStateAction.ShardFailedClusterStateTaskExecutor failingExecutor = new ShardStateAction.ShardFailedClusterStateTaskExecutor(allocationService, null, logger) {
+ @Override
+ RoutingAllocation.Result applyFailedShards(ClusterState currentState, List<FailedRerouteAllocation.FailedShard> failedShards) {
+ throw new RuntimeException("simulated applyFailedShards failure");
+ }
+ };
+ List<ShardStateAction.ShardRoutingEntry> tasks = new ArrayList<>();
+ tasks.addAll(failingTasks);
+ tasks.addAll(nonExistentTasks);
+ ClusterStateTaskExecutor.BatchResult<ShardStateAction.ShardRoutingEntry> result = failingExecutor.execute(currentState, tasks);
+ Map<ShardStateAction.ShardRoutingEntry, Boolean> taskResultMap =
+ failingTasks.stream().collect(Collectors.toMap(Function.identity(), task -> false));
+ taskResultMap.putAll(nonExistentTasks.stream().collect(Collectors.toMap(Function.identity(), task -> true)));
+ assertTaskResults(taskResultMap, result, currentState, false);
+ }
+
+ private ClusterState createClusterStateWithStartedShards(String reason) {
+ int numberOfNodes = 1 + numberOfReplicas;
+ DiscoveryNodes.Builder nodes = DiscoveryNodes.builder();
+ IntStream.rangeClosed(1, numberOfNodes).mapToObj(node -> newNode("node" + node)).forEach(nodes::put);
+ ClusterState stateAfterAddingNode =
+ ClusterState.builder(clusterState).nodes(nodes).build();
+ RoutingTable afterReroute =
+ allocationService.reroute(stateAfterAddingNode, reason).routingTable();
+ ClusterState stateAfterReroute = ClusterState.builder(stateAfterAddingNode).routingTable(afterReroute).build();
+ RoutingNodes routingNodes = stateAfterReroute.getRoutingNodes();
+ RoutingTable afterStart =
+ allocationService.applyStartedShards(stateAfterReroute, routingNodes.shardsWithState(ShardRoutingState.INITIALIZING)).routingTable();
+ return ClusterState.builder(stateAfterReroute).routingTable(afterStart).build();
+ }
+
+ private List<ShardStateAction.ShardRoutingEntry> createExistingShards(ClusterState currentState, String reason) {
+ List<ShardRouting> shards = new ArrayList<>();
+ GroupShardsIterator shardGroups =
+ currentState.routingTable().allAssignedShardsGrouped(new String[] { INDEX }, true);
+ for (ShardIterator shardIt : shardGroups) {
+ for (ShardRouting shard : shardIt.asUnordered()) {
+ shards.add(shard);
+ }
+ }
+ List<ShardRouting> failures = randomSubsetOf(randomIntBetween(1, 1 + shards.size() / 4), shards.toArray(new ShardRouting[0]));
+ String indexUUID = metaData.index(INDEX).getIndexUUID();
+ int numberOfTasks = randomIntBetween(failures.size(), 2 * failures.size());
+ List<ShardRouting> shardsToFail = new ArrayList<>(numberOfTasks);
+ for (int i = 0; i < numberOfTasks; i++) {
+ shardsToFail.add(randomFrom(failures));
+ }
+ return toTasks(shardsToFail, indexUUID, reason);
+ }
+
+ private List<ShardStateAction.ShardRoutingEntry> createNonExistentShards(ClusterState currentState, String reason) {
+ // add shards from a non-existent index
+ MetaData nonExistentMetaData =
+ MetaData.builder()
+ .put(IndexMetaData.builder("non-existent").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(numberOfReplicas))
+ .build();
+ RoutingTable routingTable = RoutingTable.builder().addAsNew(nonExistentMetaData.index("non-existent")).build();
+ String nonExistentIndexUUID = nonExistentMetaData.index("non-existent").getIndexUUID();
+
+ List<ShardStateAction.ShardRoutingEntry> existingShards = createExistingShards(currentState, reason);
+ List<ShardStateAction.ShardRoutingEntry> shardsWithMismatchedAllocationIds = new ArrayList<>();
+ for (ShardStateAction.ShardRoutingEntry existingShard : existingShards) {
+ ShardRouting sr = existingShard.getShardRouting();
+ ShardRouting nonExistentShardRouting =
+ TestShardRouting.newShardRouting(sr.index(), sr.id(), sr.currentNodeId(), sr.relocatingNodeId(), sr.restoreSource(), sr.primary(), sr.state(), sr.version());
+ shardsWithMismatchedAllocationIds.add(new ShardStateAction.ShardRoutingEntry(nonExistentShardRouting, existingShard.indexUUID, existingShard.message, existingShard.failure));
+ }
+
+ List<ShardStateAction.ShardRoutingEntry> tasks = new ArrayList<>();
+ tasks.addAll(toTasks(routingTable.allShards(), nonExistentIndexUUID, reason));
+ tasks.addAll(shardsWithMismatchedAllocationIds);
+ return tasks;
+ }
+
+ private static void assertTasksSuccessful(
+ List<ShardStateAction.ShardRoutingEntry> tasks,
+ ClusterStateTaskExecutor.BatchResult<ShardStateAction.ShardRoutingEntry> result,
+ ClusterState clusterState,
+ boolean clusterStateChanged
+ ) {
+ Map<ShardStateAction.ShardRoutingEntry, Boolean> taskResultMap =
+ tasks.stream().collect(Collectors.toMap(Function.identity(), task -> true));
+ assertTaskResults(taskResultMap, result, clusterState, clusterStateChanged);
+ }
+
+ private static void assertTaskResults(
+ Map<ShardStateAction.ShardRoutingEntry, Boolean> taskResultMap,
+ ClusterStateTaskExecutor.BatchResult<ShardStateAction.ShardRoutingEntry> result,
+ ClusterState clusterState,
+ boolean clusterStateChanged
+ ) {
+ // there should be as many task results as tasks
+ assertEquals(taskResultMap.size(), result.executionResults.size());
+
+ for (Map.Entry<ShardStateAction.ShardRoutingEntry, Boolean> entry : taskResultMap.entrySet()) {
+ // every task should have a corresponding task result
+ assertTrue(result.executionResults.containsKey(entry.getKey()));
+
+ // the task results are as expected
+ assertEquals(entry.getValue(), result.executionResults.get(entry.getKey()).isSuccess());
+ }
+
+ // every shard that we requested to be successfully failed is
+ // gone
+ List<ShardRouting> shards = clusterState.getRoutingTable().allShards();
+ for (Map.Entry<ShardStateAction.ShardRoutingEntry, Boolean> entry : taskResultMap.entrySet()) {
+ if (entry.getValue()) {
+ for (ShardRouting shard : shards) {
+ if (entry.getKey().getShardRouting().allocationId() != null) {
+ assertThat(shard.allocationId(), not(equalTo(entry.getKey().getShardRouting().allocationId())));
+ }
+ }
+ }
+ }
+
+ if (clusterStateChanged) {
+ assertNotSame(clusterState, result.resultingState);
+ } else {
+ assertSame(clusterState, result.resultingState);
+ }
+ }
+
+ private static List<ShardStateAction.ShardRoutingEntry> toTasks(List<ShardRouting> shards, String indexUUID, String message) {
+ return shards
+ .stream()
+ .map(shard -> new ShardStateAction.ShardRoutingEntry(shard, indexUUID, message, new CorruptIndexException("simulated", indexUUID)))
+ .collect(Collectors.toList());
+ }
+
+}
diff --git a/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java b/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java
index c59405f234..8a13e6e6dd 100644
--- a/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java
@@ -27,19 +27,19 @@ import org.elasticsearch.cluster.NotMasterException;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.IndexRoutingTable;
import org.elasticsearch.cluster.routing.RoutingService;
+import org.elasticsearch.cluster.routing.RoutingTable;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.ShardsIterator;
import org.elasticsearch.cluster.routing.allocation.AllocationService;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.discovery.Discovery;
+import org.elasticsearch.index.shard.ShardNotFoundException;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.cluster.TestClusterService;
import org.elasticsearch.test.transport.CapturingTransport;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.NodeDisconnectedException;
import org.elasticsearch.transport.NodeNotConnectedException;
-import org.elasticsearch.transport.RemoteTransportException;
-import org.elasticsearch.transport.SendRequestTransportException;
import org.elasticsearch.transport.TransportException;
import org.elasticsearch.transport.TransportResponse;
import org.elasticsearch.transport.TransportService;
@@ -48,8 +48,6 @@ import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
-import java.util.ArrayList;
-import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
@@ -72,7 +70,7 @@ public class ShardStateActionTests extends ESTestCase {
private static class TestShardStateAction extends ShardStateAction {
public TestShardStateAction(Settings settings, ClusterService clusterService, TransportService transportService, AllocationService allocationService, RoutingService routingService) {
- super(settings, clusterService, transportService, allocationService, routingService);
+ super(settings, clusterService, transportService, allocationService, routingService, THREAD_POOL);
}
private Runnable onBeforeWaitForNewMasterAndRetry;
@@ -293,6 +291,41 @@ public class ShardStateActionTests extends ESTestCase {
assertTrue(failure.get());
}
+ public void testShardNotFound() throws InterruptedException {
+ final String index = "test";
+
+ clusterService.setState(stateWithStartedPrimary(index, true, randomInt(5)));
+
+ String indexUUID = clusterService.state().metaData().index(index).getIndexUUID();
+
+ AtomicBoolean success = new AtomicBoolean();
+ CountDownLatch latch = new CountDownLatch(1);
+
+ ShardRouting failedShard = getRandomShardRouting(index);
+ RoutingTable routingTable = RoutingTable.builder(clusterService.state().getRoutingTable()).remove(index).build();
+ clusterService.setState(ClusterState.builder(clusterService.state()).routingTable(routingTable));
+ shardStateAction.shardFailed(failedShard, indexUUID, "test", getSimulatedFailure(), new ShardStateAction.Listener() {
+ @Override
+ public void onSuccess() {
+ success.set(true);
+ latch.countDown();
+ }
+
+ @Override
+ public void onFailure(Throwable t) {
+ success.set(false);
+ latch.countDown();
+ assert false;
+ }
+ });
+
+ CapturingTransport.CapturedRequest[] capturedRequests = transport.getCapturedRequestsAndClear();
+ transport.handleResponse(capturedRequests[0].requestId, TransportResponse.Empty.INSTANCE);
+
+ latch.await();
+ assertTrue(success.get());
+ }
+
private ShardRouting getRandomShardRouting(String index) {
IndexRoutingTable indexRoutingTable = clusterService.state().routingTable().index(index);
ShardsIterator shardsIterator = indexRoutingTable.randomAllActiveShardsIt();
diff --git a/core/src/test/java/org/elasticsearch/cluster/allocation/AwarenessAllocationIT.java b/core/src/test/java/org/elasticsearch/cluster/allocation/AwarenessAllocationIT.java
index 1e9c25ed78..cf948366f6 100644
--- a/core/src/test/java/org/elasticsearch/cluster/allocation/AwarenessAllocationIT.java
+++ b/core/src/test/java/org/elasticsearch/cluster/allocation/AwarenessAllocationIT.java
@@ -108,7 +108,7 @@ public class AwarenessAllocationIT extends ESIntegTestCase {
.put(AwarenessAllocationDecider.CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP_SETTING.getKey() + "zone.values", "a,b")
.put(AwarenessAllocationDecider.CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTE_SETTING.getKey(), "zone")
.put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), 3)
- .put(ZenDiscovery.SETTING_JOIN_TIMEOUT, "10s")
+ .put(ZenDiscovery.JOIN_TIMEOUT_SETTING.getKey(), "10s")
.build();
logger.info("--> starting 4 nodes on different zones");
diff --git a/core/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java b/core/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java
index 6835f34356..cc5ce05aca 100644
--- a/core/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java
+++ b/core/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java
@@ -100,7 +100,7 @@ public class ClusterRerouteIT extends ESIntegTestCase {
logger.info("--> explicitly allocate shard 1, *under dry_run*");
state = client().admin().cluster().prepareReroute()
.setExplain(randomBoolean())
- .add(new AllocateEmptyPrimaryAllocationCommand(new ShardId("test", 0), node_1, true))
+ .add(new AllocateEmptyPrimaryAllocationCommand("test", 0, node_1, true))
.setDryRun(true)
.execute().actionGet().getState();
assertThat(state.getRoutingNodes().unassigned().size(), equalTo(1));
@@ -113,7 +113,7 @@ public class ClusterRerouteIT extends ESIntegTestCase {
logger.info("--> explicitly allocate shard 1, actually allocating, no dry run");
state = client().admin().cluster().prepareReroute()
.setExplain(randomBoolean())
- .add(new AllocateEmptyPrimaryAllocationCommand(new ShardId("test", 0), node_1, true))
+ .add(new AllocateEmptyPrimaryAllocationCommand("test", 0, node_1, true))
.execute().actionGet().getState();
assertThat(state.getRoutingNodes().unassigned().size(), equalTo(1));
assertThat(state.getRoutingNodes().node(state.nodes().resolveNode(node_1).id()).get(0).state(), equalTo(ShardRoutingState.INITIALIZING));
@@ -129,7 +129,7 @@ public class ClusterRerouteIT extends ESIntegTestCase {
logger.info("--> move shard 1 primary from node1 to node2");
state = client().admin().cluster().prepareReroute()
.setExplain(randomBoolean())
- .add(new MoveAllocationCommand(new ShardId("test", 0), node_1, node_2))
+ .add(new MoveAllocationCommand("test", 0, node_1, node_2))
.execute().actionGet().getState();
assertThat(state.getRoutingNodes().node(state.nodes().resolveNode(node_1).id()).get(0).state(), equalTo(ShardRoutingState.RELOCATING));
@@ -162,8 +162,8 @@ public class ClusterRerouteIT extends ESIntegTestCase {
public void testDelayWithALargeAmountOfShards() throws Exception {
Settings commonSettings = settingsBuilder()
- .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_INCOMING_RECOVERIES_SETTING, 1)
- .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING, 1)
+ .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_INCOMING_RECOVERIES_SETTING.getKey(), 1)
+ .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING.getKey(), 1)
.build();
logger.info("--> starting 4 nodes");
String node_1 = internalCluster().startNode(commonSettings);
@@ -212,7 +212,7 @@ public class ClusterRerouteIT extends ESIntegTestCase {
logger.info("--> explicitly allocate shard 1, actually allocating, no dry run");
state = client().admin().cluster().prepareReroute()
.setExplain(randomBoolean())
- .add(new AllocateEmptyPrimaryAllocationCommand(new ShardId("test", 0), node_1, true))
+ .add(new AllocateEmptyPrimaryAllocationCommand("test", 0, node_1, true))
.execute().actionGet().getState();
assertThat(state.getRoutingNodes().unassigned().size(), equalTo(1));
assertThat(state.getRoutingNodes().node(state.nodes().resolveNode(node_1).id()).get(0).state(), equalTo(ShardRoutingState.INITIALIZING));
@@ -228,7 +228,7 @@ public class ClusterRerouteIT extends ESIntegTestCase {
client().prepareIndex("test", "type", "1").setSource("field", "value").setRefresh(true).execute().actionGet();
logger.info("--> closing all nodes");
- Path[] shardLocation = internalCluster().getInstance(NodeEnvironment.class, node_1).availableShardPaths(new ShardId("test", 0));
+ Path[] shardLocation = internalCluster().getInstance(NodeEnvironment.class, node_1).availableShardPaths(new ShardId("test", "_na_", 0));
assertThat(FileSystemUtils.exists(shardLocation), equalTo(true)); // make sure the data is there!
internalCluster().closeNonSharedNodes(false); // don't wipe data directories the index needs to be there!
@@ -246,7 +246,7 @@ public class ClusterRerouteIT extends ESIntegTestCase {
logger.info("--> explicitly allocate primary");
state = client().admin().cluster().prepareReroute()
.setExplain(randomBoolean())
- .add(new AllocateEmptyPrimaryAllocationCommand(new ShardId("test", 0), node_1, true))
+ .add(new AllocateEmptyPrimaryAllocationCommand("test", 0, node_1, true))
.execute().actionGet().getState();
assertThat(state.getRoutingNodes().unassigned().size(), equalTo(1));
assertThat(state.getRoutingNodes().node(state.nodes().resolveNode(node_1).id()).get(0).state(), equalTo(ShardRoutingState.INITIALIZING));
@@ -291,7 +291,7 @@ public class ClusterRerouteIT extends ESIntegTestCase {
assertThat(healthResponse.isTimedOut(), equalTo(false));
logger.info("--> try to move the shard from node1 to node2");
- MoveAllocationCommand cmd = new MoveAllocationCommand(new ShardId("test", 0), node_1, node_2);
+ MoveAllocationCommand cmd = new MoveAllocationCommand("test", 0, node_1, node_2);
ClusterRerouteResponse resp = client().admin().cluster().prepareReroute().add(cmd).setExplain(true).execute().actionGet();
RoutingExplanations e = resp.getExplanations();
assertThat(e.explanations().size(), equalTo(1));
@@ -331,7 +331,7 @@ public class ClusterRerouteIT extends ESIntegTestCase {
try {
enableIndexBlock("test-blocks", blockSetting);
assertAcked(client().admin().cluster().prepareReroute()
- .add(new MoveAllocationCommand(new ShardId("test-blocks", 0), nodesIds.get(toggle % 2), nodesIds.get(++toggle % 2))));
+ .add(new MoveAllocationCommand("test-blocks", 0, nodesIds.get(toggle % 2), nodesIds.get(++toggle % 2))));
ClusterHealthResponse healthResponse = client().admin().cluster().prepareHealth().setWaitForYellowStatus().setWaitForRelocatingShards(0).execute().actionGet();
assertThat(healthResponse.isTimedOut(), equalTo(false));
@@ -344,7 +344,7 @@ public class ClusterRerouteIT extends ESIntegTestCase {
try {
setClusterReadOnly(true);
assertBlocked(client().admin().cluster().prepareReroute()
- .add(new MoveAllocationCommand(new ShardId("test-blocks", 1), nodesIds.get(toggle % 2), nodesIds.get(++toggle % 2))));
+ .add(new MoveAllocationCommand("test-blocks", 1, nodesIds.get(toggle % 2), nodesIds.get(++toggle % 2))));
} finally {
setClusterReadOnly(false);
}
diff --git a/core/src/test/java/org/elasticsearch/cluster/allocation/FilteringAllocationIT.java b/core/src/test/java/org/elasticsearch/cluster/allocation/FilteringAllocationIT.java
index 0c3eed1d53..f775918b8f 100644
--- a/core/src/test/java/org/elasticsearch/cluster/allocation/FilteringAllocationIT.java
+++ b/core/src/test/java/org/elasticsearch/cluster/allocation/FilteringAllocationIT.java
@@ -58,7 +58,7 @@ public class FilteringAllocationIT extends ESIntegTestCase {
client().prepareIndex("test", "type", Integer.toString(i)).setSource("field", "value" + i).execute().actionGet();
}
client().admin().indices().prepareRefresh().execute().actionGet();
- assertThat(client().prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(100l));
+ assertThat(client().prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(100L));
logger.info("--> decommission the second node");
client().admin().cluster().prepareUpdateSettings()
@@ -77,7 +77,7 @@ public class FilteringAllocationIT extends ESIntegTestCase {
}
client().admin().indices().prepareRefresh().execute().actionGet();
- assertThat(client().prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(100l));
+ assertThat(client().prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(100L));
}
public void testDisablingAllocationFiltering() throws Exception {
@@ -99,7 +99,7 @@ public class FilteringAllocationIT extends ESIntegTestCase {
client().prepareIndex("test", "type", Integer.toString(i)).setSource("field", "value" + i).execute().actionGet();
}
client().admin().indices().prepareRefresh().execute().actionGet();
- assertThat(client().prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(100l));
+ assertThat(client().prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(100L));
ClusterState clusterState = client().admin().cluster().prepareState().execute().actionGet().getState();
IndexRoutingTable indexRoutingTable = clusterState.routingTable().index("test");
int numShardsOnNode1 = 0;
diff --git a/core/src/test/java/org/elasticsearch/cluster/allocation/ShardsAllocatorModuleIT.java b/core/src/test/java/org/elasticsearch/cluster/allocation/ShardsAllocatorModuleIT.java
index 89a7f8ad65..60fa45ebfa 100644
--- a/core/src/test/java/org/elasticsearch/cluster/allocation/ShardsAllocatorModuleIT.java
+++ b/core/src/test/java/org/elasticsearch/cluster/allocation/ShardsAllocatorModuleIT.java
@@ -40,10 +40,10 @@ public class ShardsAllocatorModuleIT extends ESIntegTestCase {
}
public void testLoadByShortKeyShardsAllocator() throws IOException {
- Settings build = settingsBuilder().put(ClusterModule.SHARDS_ALLOCATOR_TYPE_KEY, "even_shard") // legacy just to make sure we don't barf
+ Settings build = settingsBuilder().put(ClusterModule.SHARDS_ALLOCATOR_TYPE_SETTING.getKey(), "even_shard") // legacy just to make sure we don't barf
.build();
assertAllocatorInstance(build, BalancedShardsAllocator.class);
- build = settingsBuilder().put(ClusterModule.SHARDS_ALLOCATOR_TYPE_KEY, ClusterModule.BALANCED_ALLOCATOR).build();
+ build = settingsBuilder().put(ClusterModule.SHARDS_ALLOCATOR_TYPE_SETTING.getKey(), ClusterModule.BALANCED_ALLOCATOR).build();
assertAllocatorInstance(build, BalancedShardsAllocator.class);
}
diff --git a/core/src/test/java/org/elasticsearch/cluster/health/RoutingTableGenerator.java b/core/src/test/java/org/elasticsearch/cluster/health/RoutingTableGenerator.java
index 5f48c5abde..0303f7c894 100644
--- a/core/src/test/java/org/elasticsearch/cluster/health/RoutingTableGenerator.java
+++ b/core/src/test/java/org/elasticsearch/cluster/health/RoutingTableGenerator.java
@@ -58,7 +58,7 @@ class RoutingTableGenerator {
}
public IndexShardRoutingTable genShardRoutingTable(String index, int shardId, int replicas, ShardCounter counter) {
- IndexShardRoutingTable.Builder builder = new IndexShardRoutingTable.Builder(new ShardId(index, shardId));
+ IndexShardRoutingTable.Builder builder = new IndexShardRoutingTable.Builder(new ShardId(index, "_na_", shardId));
ShardRouting shardRouting = genShardRouting(index, shardId, true);
counter.update(shardRouting);
builder.addShard(shardRouting);
@@ -74,7 +74,7 @@ class RoutingTableGenerator {
public IndexRoutingTable genIndexRoutingTable(IndexMetaData indexMetaData, ShardCounter counter) {
IndexRoutingTable.Builder builder = IndexRoutingTable.builder(indexMetaData.getIndex());
for (int shard = 0; shard < indexMetaData.getNumberOfShards(); shard++) {
- builder.addIndexShard(genShardRoutingTable(indexMetaData.getIndex(), shard, indexMetaData.getNumberOfReplicas(), counter));
+ builder.addIndexShard(genShardRoutingTable(indexMetaData.getIndex().getName(), shard, indexMetaData.getNumberOfReplicas(), counter));
}
return builder.build();
}
diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java
index d3b31221b6..1f98275aee 100644
--- a/core/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java
@@ -69,7 +69,7 @@ public class IndexNameExpressionResolverTests extends ESTestCase {
indexNameExpressionResolver.concreteIndices(context, "bar");
fail();
} catch (IndexNotFoundException e) {
- assertThat(e.getIndex(), equalTo("bar"));
+ assertThat(e.getIndex().getName(), equalTo("bar"));
}
results = indexNameExpressionResolver.concreteIndices(context, "foofoo", "foobar");
@@ -84,14 +84,14 @@ public class IndexNameExpressionResolverTests extends ESTestCase {
indexNameExpressionResolver.concreteIndices(context, "bar");
fail();
} catch (IndexNotFoundException e) {
- assertThat(e.getIndex(), equalTo("bar"));
+ assertThat(e.getIndex().getName(), equalTo("bar"));
}
try {
indexNameExpressionResolver.concreteIndices(context, "foo", "bar");
fail();
} catch (IndexNotFoundException e) {
- assertThat(e.getIndex(), equalTo("bar"));
+ assertThat(e.getIndex().getName(), equalTo("bar"));
}
results = indexNameExpressionResolver.concreteIndices(context, "barbaz", "foobar");
@@ -102,7 +102,7 @@ public class IndexNameExpressionResolverTests extends ESTestCase {
indexNameExpressionResolver.concreteIndices(context, "barbaz", "bar");
fail();
} catch (IndexNotFoundException e) {
- assertThat(e.getIndex(), equalTo("bar"));
+ assertThat(e.getIndex().getName(), equalTo("bar"));
}
results = indexNameExpressionResolver.concreteIndices(context, "baz*");
@@ -227,21 +227,21 @@ public class IndexNameExpressionResolverTests extends ESTestCase {
indexNameExpressionResolver.concreteIndices(context, "bar");
fail();
} catch(IndexNotFoundException e) {
- assertThat(e.getIndex(), equalTo("bar"));
+ assertThat(e.getIndex().getName(), equalTo("bar"));
}
try {
indexNameExpressionResolver.concreteIndices(context, "baz*");
fail();
} catch (IndexNotFoundException e) {
- assertThat(e.getIndex(), equalTo("baz*"));
+ assertThat(e.getIndex().getName(), equalTo("baz*"));
}
try {
indexNameExpressionResolver.concreteIndices(context, "foo", "baz*");
fail();
} catch (IndexNotFoundException e) {
- assertThat(e.getIndex(), equalTo("baz*"));
+ assertThat(e.getIndex().getName(), equalTo("baz*"));
}
}
@@ -362,7 +362,7 @@ public class IndexNameExpressionResolverTests extends ESTestCase {
indexNameExpressionResolver.concreteIndices(context, "baz*");
fail();
} catch (IndexNotFoundException e) {
- assertThat(e.getIndex(), equalTo("baz*"));
+ assertThat(e.getIndex().getName(), equalTo("baz*"));
}
String[] results = indexNameExpressionResolver.concreteIndices(context, "foo", "baz*");
@@ -385,7 +385,7 @@ public class IndexNameExpressionResolverTests extends ESTestCase {
indexNameExpressionResolver.concreteIndices(context, "foo", "baz*");
fail();
} catch (IndexNotFoundException e) {
- assertThat(e.getIndex(), equalTo("baz*"));
+ assertThat(e.getIndex().getName(), equalTo("baz*"));
}
results = indexNameExpressionResolver.concreteIndices(context, "foofoobar");
@@ -401,14 +401,14 @@ public class IndexNameExpressionResolverTests extends ESTestCase {
indexNameExpressionResolver.concreteIndices(context, "baz*");
fail();
} catch (IndexNotFoundException e) {
- assertThat(e.getIndex(), equalTo("baz*"));
+ assertThat(e.getIndex().getName(), equalTo("baz*"));
}
try {
indexNameExpressionResolver.concreteIndices(context, "foo", "baz*");
fail();
} catch (IndexNotFoundException e) {
- assertThat(e.getIndex(), equalTo("baz*"));
+ assertThat(e.getIndex().getName(), equalTo("baz*"));
}
String[] results = indexNameExpressionResolver.concreteIndices(context, "foofoobar");
@@ -432,7 +432,7 @@ public class IndexNameExpressionResolverTests extends ESTestCase {
indexNameExpressionResolver.concreteIndices(context, "baz*");
fail();
} catch (IndexNotFoundException e) {
- assertThat(e.getIndex(), equalTo("baz*"));
+ assertThat(e.getIndex().getName(), equalTo("baz*"));
}
try {
@@ -440,7 +440,7 @@ public class IndexNameExpressionResolverTests extends ESTestCase {
indexNameExpressionResolver.concreteIndices(context, "foo", "baz*");
fail();
} catch (IndexNotFoundException e) {
- assertThat(e.getIndex(), equalTo("baz*"));
+ assertThat(e.getIndex().getName(), equalTo("baz*"));
}
try {
@@ -465,7 +465,7 @@ public class IndexNameExpressionResolverTests extends ESTestCase {
fail();
} catch(IndexClosedException e) {
assertThat(e.getMessage(), equalTo("closed"));
- assertEquals(e.getIndex(), "foofoo-closed");
+ assertEquals(e.getIndex().getName(), "foofoo-closed");
}
IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(state, IndicesOptions.strictSingleIndexNoExpandForbidClosed());
@@ -485,7 +485,7 @@ public class IndexNameExpressionResolverTests extends ESTestCase {
indexNameExpressionResolver.concreteIndices(context, "foo");
fail();
} catch (IndexNotFoundException e) {
- assertThat(e.getIndex(), equalTo("foo"));
+ assertThat(e.getIndex().getName(), equalTo("foo"));
}
results = indexNameExpressionResolver.concreteIndices(context, "foo*");
assertThat(results, emptyArray());
@@ -493,7 +493,7 @@ public class IndexNameExpressionResolverTests extends ESTestCase {
indexNameExpressionResolver.concreteIndices(context, "foo*", "bar");
fail();
} catch (IndexNotFoundException e) {
- assertThat(e.getIndex(), equalTo("bar"));
+ assertThat(e.getIndex().getName(), equalTo("bar"));
}
diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java
new file mode 100644
index 0000000000..a43da9e53f
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java
@@ -0,0 +1,93 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.cluster.metadata;
+
+import org.elasticsearch.Version;
+import org.elasticsearch.common.settings.IndexScopedSettings;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.indices.mapper.MapperRegistry;
+import org.elasticsearch.test.ESTestCase;
+import org.elasticsearch.test.VersionUtils;
+
+import java.util.Collections;
+
+public class MetaDataIndexUpgradeServiceTests extends ESTestCase {
+
+ public void testArchiveBrokenIndexSettings() {
+ MetaDataIndexUpgradeService service = new MetaDataIndexUpgradeService(Settings.EMPTY, new MapperRegistry(Collections.emptyMap(), Collections.emptyMap()), IndexScopedSettings.DEFAULT_SCOPED_SETTINGS);
+ IndexMetaData src = newIndexMeta("foo", Settings.EMPTY);
+ IndexMetaData indexMetaData = service.archiveBrokenIndexSettings(src);
+ assertSame(indexMetaData, src);
+
+ src = newIndexMeta("foo", Settings.builder().put("index.refresh_interval", "-200").build());
+ indexMetaData = service.archiveBrokenIndexSettings(src);
+ assertNotSame(indexMetaData, src);
+ assertEquals("-200", indexMetaData.getSettings().get("archived.index.refresh_interval"));
+
+ src = newIndexMeta("foo", Settings.builder().put("index.codec", "best_compression1").build());
+ indexMetaData = service.archiveBrokenIndexSettings(src);
+ assertNotSame(indexMetaData, src);
+ assertEquals("best_compression1", indexMetaData.getSettings().get("archived.index.codec"));
+
+ src = newIndexMeta("foo", Settings.builder().put("index.refresh.interval", "-1").build());
+ indexMetaData = service.archiveBrokenIndexSettings(src);
+ assertNotSame(indexMetaData, src);
+ assertEquals("-1", indexMetaData.getSettings().get("archived.index.refresh.interval"));
+
+ src = newIndexMeta("foo", indexMetaData.getSettings()); // double archive?
+ indexMetaData = service.archiveBrokenIndexSettings(src);
+ assertSame(indexMetaData, src);
+ }
+
+ public void testUpgrade() {
+ MetaDataIndexUpgradeService service = new MetaDataIndexUpgradeService(Settings.EMPTY, new MapperRegistry(Collections.emptyMap(), Collections.emptyMap()), IndexScopedSettings.DEFAULT_SCOPED_SETTINGS);
+ IndexMetaData src = newIndexMeta("foo", Settings.builder().put("index.refresh_interval", "-200").build());
+ assertFalse(service.isUpgraded(src));
+ src = service.upgradeIndexMetaData(src);
+ assertTrue(service.isUpgraded(src));
+ assertEquals("-200", src.getSettings().get("archived.index.refresh_interval"));
+ assertNull(src.getSettings().get("index.refresh_interval"));
+ assertSame(src, service.upgradeIndexMetaData(src)); // no double upgrade
+ }
+
+ public void testIsUpgraded() {
+ MetaDataIndexUpgradeService service = new MetaDataIndexUpgradeService(Settings.EMPTY, new MapperRegistry(Collections.emptyMap(), Collections.emptyMap()), IndexScopedSettings.DEFAULT_SCOPED_SETTINGS);
+ IndexMetaData src = newIndexMeta("foo", Settings.builder().put("index.refresh_interval", "-200").build());
+ assertFalse(service.isUpgraded(src));
+ Version version = VersionUtils.randomVersionBetween(random(), VersionUtils.getFirstVersion(), VersionUtils.getPreviousVersion());
+ src = newIndexMeta("foo", Settings.builder().put(IndexMetaData.SETTING_VERSION_UPGRADED, version).build());
+ assertFalse(service.isUpgraded(src));
+ src = newIndexMeta("foo", Settings.builder().put(IndexMetaData.SETTING_VERSION_UPGRADED, Version.CURRENT).build());
+ assertTrue(service.isUpgraded(src));
+ }
+
+ public static IndexMetaData newIndexMeta(String name, Settings indexSettings) {
+ Settings build = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
+ .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1)
+ .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
+ .put(IndexMetaData.SETTING_CREATION_DATE, 1)
+ .put(IndexMetaData.SETTING_INDEX_UUID, "BOOM")
+ .put(IndexMetaData.SETTING_VERSION_UPGRADED, Version.V_0_18_1_ID)
+ .put(indexSettings)
+ .build();
+ IndexMetaData metaData = IndexMetaData.builder(name).settings(build).build();
+ return metaData;
+ }
+
+}
diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java
index 4076286ce5..a2fa89b503 100644
--- a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java
@@ -23,7 +23,6 @@ import org.elasticsearch.Version;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentParser;
-import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.test.ESTestCase;
@@ -42,7 +41,7 @@ public class MetaDataTests extends ESTestCase {
.putAlias(AliasMetaData.builder("index").build());
try {
MetaData.builder().put(builder).build();
- fail("expection should have been thrown");
+ fail("exception should have been thrown");
} catch (IllegalStateException e) {
assertThat(e.getMessage(), equalTo("index and alias names need to be unique, but alias [index] and index [index] have the same name"));
}
diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/ToAndFromJsonMetaDataTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/ToAndFromJsonMetaDataTests.java
index cbb5b7dfbd..5886158506 100644
--- a/core/src/test/java/org/elasticsearch/cluster/metadata/ToAndFromJsonMetaDataTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/metadata/ToAndFromJsonMetaDataTests.java
@@ -55,7 +55,7 @@ public class ToAndFromJsonMetaDataTests extends ESTestCase {
.settings(settings(Version.CURRENT))
.numberOfShards(1)
.numberOfReplicas(2)
- .creationDate(2l))
+ .creationDate(2L))
.put(IndexMetaData.builder("test5")
.settings(settings(Version.CURRENT).put("setting1", "value1").put("setting2", "value2"))
.numberOfShards(1)
@@ -66,12 +66,12 @@ public class ToAndFromJsonMetaDataTests extends ESTestCase {
.settings(settings(Version.CURRENT).put("setting1", "value1").put("setting2", "value2"))
.numberOfShards(1)
.numberOfReplicas(2)
- .creationDate(2l))
+ .creationDate(2L))
.put(IndexMetaData.builder("test7")
.settings(settings(Version.CURRENT))
.numberOfShards(1)
.numberOfReplicas(2)
- .creationDate(2l)
+ .creationDate(2L)
.putMapping("mapping1", MAPPING_SOURCE1)
.putMapping("mapping2", MAPPING_SOURCE2))
.put(IndexMetaData.builder("test8")
@@ -84,7 +84,7 @@ public class ToAndFromJsonMetaDataTests extends ESTestCase {
.putAlias(newAliasMetaDataBuilder("alias2")))
.put(IndexMetaData.builder("test9")
.settings(settings(Version.CURRENT).put("setting1", "value1").put("setting2", "value2"))
- .creationDate(2l)
+ .creationDate(2L)
.numberOfShards(1)
.numberOfReplicas(2)
.putMapping("mapping1", MAPPING_SOURCE1)
@@ -125,7 +125,7 @@ public class ToAndFromJsonMetaDataTests extends ESTestCase {
.settings(settings(Version.CURRENT)
.put("setting1", "value1")
.put("setting2", "value2"))
- .creationDate(2l)
+ .creationDate(2L)
.numberOfShards(1)
.numberOfReplicas(2)
.putMapping("mapping1", MAPPING_SOURCE1)
@@ -152,14 +152,14 @@ public class ToAndFromJsonMetaDataTests extends ESTestCase {
IndexMetaData indexMetaData = parsedMetaData.index("test1");
assertThat(indexMetaData.getNumberOfShards(), equalTo(1));
assertThat(indexMetaData.getNumberOfReplicas(), equalTo(2));
- assertThat(indexMetaData.getCreationDate(), equalTo(-1l));
+ assertThat(indexMetaData.getCreationDate(), equalTo(-1L));
assertThat(indexMetaData.getSettings().getAsMap().size(), equalTo(3));
assertThat(indexMetaData.getMappings().size(), equalTo(0));
indexMetaData = parsedMetaData.index("test2");
assertThat(indexMetaData.getNumberOfShards(), equalTo(2));
assertThat(indexMetaData.getNumberOfReplicas(), equalTo(3));
- assertThat(indexMetaData.getCreationDate(), equalTo(-1l));
+ assertThat(indexMetaData.getCreationDate(), equalTo(-1L));
assertThat(indexMetaData.getSettings().getAsMap().size(), equalTo(5));
assertThat(indexMetaData.getSettings().get("setting1"), equalTo("value1"));
assertThat(indexMetaData.getSettings().get("setting2"), equalTo("value2"));
@@ -168,13 +168,13 @@ public class ToAndFromJsonMetaDataTests extends ESTestCase {
indexMetaData = parsedMetaData.index("test3");
assertThat(indexMetaData.getNumberOfShards(), equalTo(1));
assertThat(indexMetaData.getNumberOfReplicas(), equalTo(2));
- assertThat(indexMetaData.getCreationDate(), equalTo(-1l));
+ assertThat(indexMetaData.getCreationDate(), equalTo(-1L));
assertThat(indexMetaData.getSettings().getAsMap().size(), equalTo(3));
assertThat(indexMetaData.getMappings().size(), equalTo(1));
assertThat(indexMetaData.getMappings().get("mapping1").source().string(), equalTo(MAPPING_SOURCE1));
indexMetaData = parsedMetaData.index("test4");
- assertThat(indexMetaData.getCreationDate(), equalTo(2l));
+ assertThat(indexMetaData.getCreationDate(), equalTo(2L));
assertThat(indexMetaData.getNumberOfShards(), equalTo(1));
assertThat(indexMetaData.getNumberOfReplicas(), equalTo(2));
assertThat(indexMetaData.getSettings().getAsMap().size(), equalTo(4));
@@ -183,7 +183,7 @@ public class ToAndFromJsonMetaDataTests extends ESTestCase {
indexMetaData = parsedMetaData.index("test5");
assertThat(indexMetaData.getNumberOfShards(), equalTo(1));
assertThat(indexMetaData.getNumberOfReplicas(), equalTo(2));
- assertThat(indexMetaData.getCreationDate(), equalTo(-1l));
+ assertThat(indexMetaData.getCreationDate(), equalTo(-1L));
assertThat(indexMetaData.getSettings().getAsMap().size(), equalTo(5));
assertThat(indexMetaData.getSettings().get("setting1"), equalTo("value1"));
assertThat(indexMetaData.getSettings().get("setting2"), equalTo("value2"));
@@ -194,7 +194,7 @@ public class ToAndFromJsonMetaDataTests extends ESTestCase {
indexMetaData = parsedMetaData.index("test6");
assertThat(indexMetaData.getNumberOfShards(), equalTo(1));
assertThat(indexMetaData.getNumberOfReplicas(), equalTo(2));
- assertThat(indexMetaData.getCreationDate(), equalTo(2l));
+ assertThat(indexMetaData.getCreationDate(), equalTo(2L));
assertThat(indexMetaData.getSettings().getAsMap().size(), equalTo(6));
assertThat(indexMetaData.getSettings().get("setting1"), equalTo("value1"));
assertThat(indexMetaData.getSettings().get("setting2"), equalTo("value2"));
@@ -203,7 +203,7 @@ public class ToAndFromJsonMetaDataTests extends ESTestCase {
indexMetaData = parsedMetaData.index("test7");
assertThat(indexMetaData.getNumberOfShards(), equalTo(1));
assertThat(indexMetaData.getNumberOfReplicas(), equalTo(2));
- assertThat(indexMetaData.getCreationDate(), equalTo(2l));
+ assertThat(indexMetaData.getCreationDate(), equalTo(2L));
assertThat(indexMetaData.getSettings().getAsMap().size(), equalTo(4));
assertThat(indexMetaData.getMappings().size(), equalTo(2));
assertThat(indexMetaData.getMappings().get("mapping1").source().string(), equalTo(MAPPING_SOURCE1));
@@ -212,7 +212,7 @@ public class ToAndFromJsonMetaDataTests extends ESTestCase {
indexMetaData = parsedMetaData.index("test8");
assertThat(indexMetaData.getNumberOfShards(), equalTo(1));
assertThat(indexMetaData.getNumberOfReplicas(), equalTo(2));
- assertThat(indexMetaData.getCreationDate(), equalTo(-1l));
+ assertThat(indexMetaData.getCreationDate(), equalTo(-1L));
assertThat(indexMetaData.getSettings().getAsMap().size(), equalTo(5));
assertThat(indexMetaData.getSettings().get("setting1"), equalTo("value1"));
assertThat(indexMetaData.getSettings().get("setting2"), equalTo("value2"));
@@ -226,7 +226,7 @@ public class ToAndFromJsonMetaDataTests extends ESTestCase {
indexMetaData = parsedMetaData.index("test9");
assertThat(indexMetaData.getNumberOfShards(), equalTo(1));
assertThat(indexMetaData.getNumberOfReplicas(), equalTo(2));
- assertThat(indexMetaData.getCreationDate(), equalTo(2l));
+ assertThat(indexMetaData.getCreationDate(), equalTo(2L));
assertThat(indexMetaData.getSettings().getAsMap().size(), equalTo(6));
assertThat(indexMetaData.getSettings().get("setting1"), equalTo("value1"));
assertThat(indexMetaData.getSettings().get("setting2"), equalTo("value2"));
@@ -240,7 +240,7 @@ public class ToAndFromJsonMetaDataTests extends ESTestCase {
indexMetaData = parsedMetaData.index("test10");
assertThat(indexMetaData.getNumberOfShards(), equalTo(1));
assertThat(indexMetaData.getNumberOfReplicas(), equalTo(2));
- assertThat(indexMetaData.getCreationDate(), equalTo(-1l));
+ assertThat(indexMetaData.getCreationDate(), equalTo(-1L));
assertThat(indexMetaData.getSettings().getAsMap().size(), equalTo(5));
assertThat(indexMetaData.getSettings().get("setting1"), equalTo("value1"));
assertThat(indexMetaData.getSettings().get("setting2"), equalTo("value2"));
@@ -254,7 +254,7 @@ public class ToAndFromJsonMetaDataTests extends ESTestCase {
indexMetaData = parsedMetaData.index("test11");
assertThat(indexMetaData.getNumberOfShards(), equalTo(1));
assertThat(indexMetaData.getNumberOfReplicas(), equalTo(2));
- assertThat(indexMetaData.getCreationDate(), equalTo(-1l));
+ assertThat(indexMetaData.getCreationDate(), equalTo(-1L));
assertThat(indexMetaData.getSettings().getAsMap().size(), equalTo(5));
assertThat(indexMetaData.getSettings().get("setting1"), equalTo("value1"));
assertThat(indexMetaData.getSettings().get("setting2"), equalTo("value2"));
@@ -272,7 +272,7 @@ public class ToAndFromJsonMetaDataTests extends ESTestCase {
indexMetaData = parsedMetaData.index("test12");
assertThat(indexMetaData.getNumberOfShards(), equalTo(1));
assertThat(indexMetaData.getNumberOfReplicas(), equalTo(2));
- assertThat(indexMetaData.getCreationDate(), equalTo(2l));
+ assertThat(indexMetaData.getCreationDate(), equalTo(2L));
assertThat(indexMetaData.getSettings().getAsMap().size(), equalTo(6));
assertThat(indexMetaData.getSettings().get("setting1"), equalTo("value1"));
assertThat(indexMetaData.getSettings().get("setting2"), equalTo("value2"));
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/AllocationIdTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/AllocationIdTests.java
index 8f7ae0c822..00acf1ebab 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/AllocationIdTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/AllocationIdTests.java
@@ -23,6 +23,7 @@ import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
+import org.elasticsearch.index.Index;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
@@ -37,7 +38,7 @@ import static org.hamcrest.Matchers.nullValue;
public class AllocationIdTests extends ESTestCase {
public void testShardToStarted() {
logger.info("-- create unassigned shard");
- ShardRouting shard = ShardRouting.newUnassigned("test", 0, null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null));
+ ShardRouting shard = ShardRouting.newUnassigned(new Index("test","_na_"), 0, null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null));
assertThat(shard.allocationId(), nullValue());
logger.info("-- initialize the shard");
@@ -57,7 +58,7 @@ public class AllocationIdTests extends ESTestCase {
public void testSuccessfulRelocation() {
logger.info("-- build started shard");
- ShardRouting shard = ShardRouting.newUnassigned("test", 0, null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null));
+ ShardRouting shard = ShardRouting.newUnassigned(new Index("test","_na_"), 0, null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null));
shard.initialize("node1", -1);
shard.moveToStarted();
@@ -80,7 +81,7 @@ public class AllocationIdTests extends ESTestCase {
public void testCancelRelocation() {
logger.info("-- build started shard");
- ShardRouting shard = ShardRouting.newUnassigned("test", 0, null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null));
+ ShardRouting shard = ShardRouting.newUnassigned(new Index("test","_na_"), 0, null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null));
shard.initialize("node1", -1);
shard.moveToStarted();
@@ -100,7 +101,7 @@ public class AllocationIdTests extends ESTestCase {
public void testMoveToUnassigned() {
logger.info("-- build started shard");
- ShardRouting shard = ShardRouting.newUnassigned("test", 0, null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null));
+ ShardRouting shard = ShardRouting.newUnassigned(new Index("test","_na_"), 0, null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null));
shard.initialize("node1", -1);
shard.moveToStarted();
@@ -111,7 +112,7 @@ public class AllocationIdTests extends ESTestCase {
public void testReinitializing() {
logger.info("-- build started shard");
- ShardRouting shard = ShardRouting.newUnassigned("test", 0, null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null));
+ ShardRouting shard = ShardRouting.newUnassigned(new Index("test","_na_"), 0, null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null));
shard.initialize("node1", -1);
shard.moveToStarted();
AllocationId allocationId = shard.allocationId();
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java b/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java
index 9203b270b2..d911a1175c 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java
@@ -122,7 +122,7 @@ public class PrimaryAllocationIT extends ESIntegTestCase {
logger.info("--> check that the up-to-date primary shard gets promoted and that documents are available");
ensureYellow("test");
- assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()).get(), 2l);
+ assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()).get(), 2L);
}
public void testFailedAllocationOfStalePrimaryToDataNodeWithNoData() throws Exception {
@@ -141,7 +141,7 @@ public class PrimaryAllocationIT extends ESIntegTestCase {
assertThat(client().admin().cluster().prepareState().get().getState().getRoutingTable().index("test").getShards().get(0).primaryShard().unassignedInfo().getReason(), equalTo(UnassignedInfo.Reason.NODE_LEFT));
logger.info("--> force allocation of stale copy to node that does not have shard copy");
- client().admin().cluster().prepareReroute().add(new AllocateStalePrimaryAllocationCommand(new ShardId("test", 0), dataNodeWithNoShardCopy, true)).get();
+ client().admin().cluster().prepareReroute().add(new AllocateStalePrimaryAllocationCommand("test", 0, dataNodeWithNoShardCopy, true)).get();
logger.info("--> wait until shard is failed and becomes unassigned again");
assertBusy(() -> assertTrue(client().admin().cluster().prepareState().get().getState().getRoutingTable().index("test").allPrimaryShardsUnassigned()));
@@ -161,9 +161,9 @@ public class PrimaryAllocationIT extends ESIntegTestCase {
logger.info("--> adding allocation command for shard " + shardId);
// force allocation based on node id
if (useStaleReplica) {
- rerouteBuilder.add(new AllocateStalePrimaryAllocationCommand(new ShardId("test", shardId), storeStatus.getNode().getId(), true));
+ rerouteBuilder.add(new AllocateStalePrimaryAllocationCommand("test", shardId, storeStatus.getNode().getId(), true));
} else {
- rerouteBuilder.add(new AllocateEmptyPrimaryAllocationCommand(new ShardId("test", shardId), storeStatus.getNode().getId(), true));
+ rerouteBuilder.add(new AllocateEmptyPrimaryAllocationCommand("test", shardId, storeStatus.getNode().getId(), true));
}
}
rerouteBuilder.get();
@@ -171,7 +171,7 @@ public class PrimaryAllocationIT extends ESIntegTestCase {
logger.info("--> check that the stale primary shard gets allocated and that documents are available");
ensureYellow("test");
- assertHitCount(client().prepareSearch("test").setSize(0).setQuery(matchAllQuery()).get(), useStaleReplica ? 1l : 0l);
+ assertHitCount(client().prepareSearch("test").setSize(0).setQuery(matchAllQuery()).get(), useStaleReplica ? 1L : 0L);
}
public void testForcePrimaryShardIfAllocationDecidersSayNoAfterIndexCreation() throws ExecutionException, InterruptedException {
@@ -182,7 +182,7 @@ public class PrimaryAllocationIT extends ESIntegTestCase {
assertThat(client().admin().cluster().prepareState().get().getState().getRoutingTable().shardRoutingTable("test", 0).assignedShards(), empty());
- client().admin().cluster().prepareReroute().add(new AllocateEmptyPrimaryAllocationCommand(new ShardId("test", 0), node, true)).get();
+ client().admin().cluster().prepareReroute().add(new AllocateEmptyPrimaryAllocationCommand("test", 0, node, true)).get();
ensureGreen("test");
}
@@ -200,6 +200,6 @@ public class PrimaryAllocationIT extends ESIntegTestCase {
internalCluster().fullRestart();
logger.info("--> checking that index still gets allocated with only 1 shard copy being available");
ensureYellow("test");
- assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()).get(), 1l);
+ assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()).get(), 1L);
}
}
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/RoutingServiceTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/RoutingServiceTests.java
index e50272d2b0..5c922f07e4 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/RoutingServiceTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/RoutingServiceTests.java
@@ -198,7 +198,7 @@ public class RoutingServiceTests extends ESAllocationTestCase {
ShardRouting shortDelayUnassignedReplica = null;
ShardRouting longDelayUnassignedReplica = null;
for (ShardRouting shr : unassigned) {
- if (shr.getIndex().equals("short_delay")) {
+ if (shr.getIndexName().equals("short_delay")) {
shortDelayUnassignedReplica = shr;
} else {
longDelayUnassignedReplica = shr;
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java
index 713bf0aa31..741d62d74e 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java
@@ -71,8 +71,8 @@ public class RoutingTableTests extends ESAllocationTestCase {
.build();
this.testRoutingTable = new RoutingTable.Builder()
- .add(new IndexRoutingTable.Builder(TEST_INDEX_1).initializeAsNew(metaData.index(TEST_INDEX_1)).build())
- .add(new IndexRoutingTable.Builder(TEST_INDEX_2).initializeAsNew(metaData.index(TEST_INDEX_2)).build())
+ .add(new IndexRoutingTable.Builder(metaData.index(TEST_INDEX_1).getIndex()).initializeAsNew(metaData.index(TEST_INDEX_1)).build())
+ .add(new IndexRoutingTable.Builder(metaData.index(TEST_INDEX_2).getIndex()).initializeAsNew(metaData.index(TEST_INDEX_2)).build())
.build();
this.clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.DEFAULT).metaData(metaData).routingTable(testRoutingTable).build();
}
@@ -127,7 +127,7 @@ public class RoutingTableTests extends ESAllocationTestCase {
}
public void testIndex() {
- assertThat(this.testRoutingTable.index(TEST_INDEX_1).getIndex(), is(TEST_INDEX_1));
+ assertThat(this.testRoutingTable.index(TEST_INDEX_1).getIndex().getName(), is(TEST_INDEX_1));
assertThat(this.testRoutingTable.index("foobar"), is(nullValue()));
}
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/ShardRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/ShardRoutingTests.java
index 54e39cc227..db94742b1e 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/ShardRoutingTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/ShardRoutingTests.java
@@ -161,35 +161,35 @@ public class ShardRoutingTests extends ESTestCase {
switch (changeId) {
case 0:
// change index
- otherRouting = TestShardRouting.newShardRouting(otherRouting.index() + "a", otherRouting.id(), otherRouting.currentNodeId(), otherRouting.relocatingNodeId(),
+ otherRouting = TestShardRouting.newShardRouting(otherRouting.getIndexName() + "a", otherRouting.id(), otherRouting.currentNodeId(), otherRouting.relocatingNodeId(),
otherRouting.restoreSource(), otherRouting.primary(), otherRouting.state(), otherRouting.version(), otherRouting.unassignedInfo());
break;
case 1:
// change shard id
- otherRouting = TestShardRouting.newShardRouting(otherRouting.index(), otherRouting.id() + 1, otherRouting.currentNodeId(), otherRouting.relocatingNodeId(),
+ otherRouting = TestShardRouting.newShardRouting(otherRouting.getIndexName(), otherRouting.id() + 1, otherRouting.currentNodeId(), otherRouting.relocatingNodeId(),
otherRouting.restoreSource(), otherRouting.primary(), otherRouting.state(), otherRouting.version(), otherRouting.unassignedInfo());
break;
case 2:
// change current node
- otherRouting = TestShardRouting.newShardRouting(otherRouting.index(), otherRouting.id(), otherRouting.currentNodeId() == null ? "1" : otherRouting.currentNodeId() + "_1", otherRouting.relocatingNodeId(),
+ otherRouting = TestShardRouting.newShardRouting(otherRouting.getIndexName(), otherRouting.id(), otherRouting.currentNodeId() == null ? "1" : otherRouting.currentNodeId() + "_1", otherRouting.relocatingNodeId(),
otherRouting.restoreSource(), otherRouting.primary(), otherRouting.state(), otherRouting.version(), otherRouting.unassignedInfo());
break;
case 3:
// change relocating node
- otherRouting = TestShardRouting.newShardRouting(otherRouting.index(), otherRouting.id(), otherRouting.currentNodeId(),
+ otherRouting = TestShardRouting.newShardRouting(otherRouting.getIndexName(), otherRouting.id(), otherRouting.currentNodeId(),
otherRouting.relocatingNodeId() == null ? "1" : otherRouting.relocatingNodeId() + "_1",
otherRouting.restoreSource(), otherRouting.primary(), otherRouting.state(), otherRouting.version(), otherRouting.unassignedInfo());
break;
case 4:
// change restore source
- otherRouting = TestShardRouting.newShardRouting(otherRouting.index(), otherRouting.id(), otherRouting.currentNodeId(), otherRouting.relocatingNodeId(),
+ otherRouting = TestShardRouting.newShardRouting(otherRouting.getIndexName(), otherRouting.id(), otherRouting.currentNodeId(), otherRouting.relocatingNodeId(),
otherRouting.restoreSource() == null ? new RestoreSource(new SnapshotId("test", "s1"), Version.CURRENT, "test") :
new RestoreSource(otherRouting.restoreSource().snapshotId(), Version.CURRENT, otherRouting.index() + "_1"),
otherRouting.primary(), otherRouting.state(), otherRouting.version(), otherRouting.unassignedInfo());
break;
case 5:
// change primary flag
- otherRouting = TestShardRouting.newShardRouting(otherRouting.index(), otherRouting.id(), otherRouting.currentNodeId(), otherRouting.relocatingNodeId(),
+ otherRouting = TestShardRouting.newShardRouting(otherRouting.getIndexName(), otherRouting.id(), otherRouting.currentNodeId(), otherRouting.relocatingNodeId(),
otherRouting.restoreSource(), otherRouting.primary() == false, otherRouting.state(), otherRouting.version(), otherRouting.unassignedInfo());
break;
case 6:
@@ -204,7 +204,7 @@ public class ShardRoutingTests extends ESTestCase {
unassignedInfo = new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "test");
}
- otherRouting = TestShardRouting.newShardRouting(otherRouting.index(), otherRouting.id(), otherRouting.currentNodeId(), otherRouting.relocatingNodeId(),
+ otherRouting = TestShardRouting.newShardRouting(otherRouting.getIndexName(), otherRouting.id(), otherRouting.currentNodeId(), otherRouting.relocatingNodeId(),
otherRouting.restoreSource(), otherRouting.primary(), newState, otherRouting.version(), unassignedInfo);
break;
}
@@ -216,7 +216,7 @@ public class ShardRoutingTests extends ESTestCase {
if (randomBoolean()) {
// change unassigned info
- otherRouting = TestShardRouting.newShardRouting(otherRouting.index(), otherRouting.id(), otherRouting.currentNodeId(), otherRouting.relocatingNodeId(),
+ otherRouting = TestShardRouting.newShardRouting(otherRouting.getIndexName(), otherRouting.id(), otherRouting.currentNodeId(), otherRouting.relocatingNodeId(),
otherRouting.restoreSource(), otherRouting.primary(), otherRouting.state(), otherRouting.version(),
otherRouting.unassignedInfo() == null ? new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "test") :
new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, otherRouting.unassignedInfo().getMessage() + "_1"));
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java
index e277080ebf..e547405b33 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java
@@ -34,6 +34,7 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.index.Index;
import org.elasticsearch.test.ESAllocationTestCase;
import java.util.Collections;
@@ -160,15 +161,16 @@ public class UnassignedInfoTests extends ESAllocationTestCase {
MetaData metaData = MetaData.builder()
.put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(0))
.build();
+ final Index index = metaData.index("test").getIndex();
ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT)
.metaData(metaData)
- .routingTable(RoutingTable.builder().addAsNew(metaData.index("test")).build()).build();
+ .routingTable(RoutingTable.builder().addAsNew(metaData.index(index)).build()).build();
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder().put(newNode("node1"))).build();
clusterState = ClusterState.builder(clusterState).routingResult(allocation.reroute(clusterState, "reroute")).build();
// starting primaries
clusterState = ClusterState.builder(clusterState).routingResult(allocation.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING))).build();
- IndexRoutingTable.Builder builder = IndexRoutingTable.builder("test");
- for (IndexShardRoutingTable indexShardRoutingTable : clusterState.routingTable().index("test")) {
+ IndexRoutingTable.Builder builder = IndexRoutingTable.builder(index);
+ for (IndexShardRoutingTable indexShardRoutingTable : clusterState.routingTable().index(index)) {
builder.addIndexShard(indexShardRoutingTable);
}
builder.addReplica();
@@ -219,7 +221,7 @@ public class UnassignedInfoTests extends ESAllocationTestCase {
assertThat(clusterState.getRoutingNodes().shardsWithState(UNASSIGNED).size(), equalTo(1));
assertThat(clusterState.getRoutingNodes().shardsWithState(UNASSIGNED).get(0).unassignedInfo(), notNullValue());
assertThat(clusterState.getRoutingNodes().shardsWithState(UNASSIGNED).get(0).unassignedInfo().getReason(), equalTo(UnassignedInfo.Reason.NODE_LEFT));
- assertThat(clusterState.getRoutingNodes().shardsWithState(UNASSIGNED).get(0).unassignedInfo().getUnassignedTimeInMillis(), greaterThan(0l));
+ assertThat(clusterState.getRoutingNodes().shardsWithState(UNASSIGNED).get(0).unassignedInfo().getUnassignedTimeInMillis(), greaterThan(0L));
}
/**
@@ -250,7 +252,7 @@ public class UnassignedInfoTests extends ESAllocationTestCase {
assertThat(clusterState.getRoutingNodes().shardsWithState(UNASSIGNED).get(0).unassignedInfo().getReason(), equalTo(UnassignedInfo.Reason.ALLOCATION_FAILED));
assertThat(clusterState.getRoutingNodes().shardsWithState(UNASSIGNED).get(0).unassignedInfo().getMessage(), equalTo("test fail"));
assertThat(clusterState.getRoutingNodes().shardsWithState(UNASSIGNED).get(0).unassignedInfo().getDetails(), equalTo("test fail"));
- assertThat(clusterState.getRoutingNodes().shardsWithState(UNASSIGNED).get(0).unassignedInfo().getUnassignedTimeInMillis(), greaterThan(0l));
+ assertThat(clusterState.getRoutingNodes().shardsWithState(UNASSIGNED).get(0).unassignedInfo().getUnassignedTimeInMillis(), greaterThan(0L));
}
/**
@@ -274,9 +276,9 @@ public class UnassignedInfoTests extends ESAllocationTestCase {
UnassignedInfo unassignedInfo = new UnassignedInfo(RandomPicks.randomFrom(getRandom(), reasons), null);
long delay = unassignedInfo.updateDelay(unassignedInfo.getUnassignedTimeInNanos() + 1, // add 1 tick delay
Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "10h").build(), Settings.EMPTY);
- assertThat(delay, equalTo(0l));
+ assertThat(delay, equalTo(0L));
delay = unassignedInfo.getLastComputedLeftDelayNanos();
- assertThat(delay, equalTo(0l));
+ assertThat(delay, equalTo(0L));
}
/**
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java
index 65c2e57701..16d9acb8de 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java
@@ -88,7 +88,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase {
} else {
toNodeId = "node1";
}
- rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand(new ShardId("test", 0), existingNodeId, toNodeId)));
+ rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand("test", 0, existingNodeId, toNodeId)));
assertThat(rerouteResult.changed(), equalTo(true));
clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build();
assertThat(clusterState.getRoutingNodes().node(existingNodeId).get(0).state(), equalTo(ShardRoutingState.RELOCATING));
@@ -102,11 +102,11 @@ public class AllocationCommandsTests extends ESAllocationTestCase {
assertThat(clusterState.getRoutingNodes().node(toNodeId).get(0).state(), equalTo(ShardRoutingState.STARTED));
}
- private AbstractAllocateAllocationCommand randomAllocateCommand(ShardId shardId, String node) {
+ private AbstractAllocateAllocationCommand randomAllocateCommand(String index, int shardId, String node) {
return randomFrom(
- new AllocateReplicaAllocationCommand(shardId, node),
- new AllocateEmptyPrimaryAllocationCommand(shardId, node, true),
- new AllocateStalePrimaryAllocationCommand(shardId, node, true)
+ new AllocateReplicaAllocationCommand(index, shardId, node),
+ new AllocateEmptyPrimaryAllocationCommand(index, shardId, node, true),
+ new AllocateStalePrimaryAllocationCommand(index, shardId, node, true)
);
}
@@ -115,18 +115,19 @@ public class AllocationCommandsTests extends ESAllocationTestCase {
.put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), "none")
.put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), "none")
.build());
+ final String index = "test";
logger.info("--> building initial routing table");
MetaData metaData = MetaData.builder()
- .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1))
+ .put(IndexMetaData.builder(index).settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1))
.build();
// shard routing is added as "from recovery" instead of "new index creation" so that we can test below that allocating an empty
// primary with accept_data_loss flag set to false fails
RoutingTable routingTable = RoutingTable.builder()
- .addAsRecovery(metaData.index("test"))
+ .addAsRecovery(metaData.index(index))
.build();
ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.DEFAULT).metaData(metaData).routingTable(routingTable).build();
- ShardId shardId = new ShardId("test", 0);
+ final ShardId shardId = new ShardId(metaData.index(index).getIndex(), 0);
logger.info("--> adding 3 nodes on same rack and do rerouting");
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder()
@@ -141,7 +142,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase {
logger.info("--> allocating to non-existent node, should fail");
try {
- allocation.reroute(clusterState, new AllocationCommands(randomAllocateCommand(shardId, "node42")));
+ allocation.reroute(clusterState, new AllocationCommands(randomAllocateCommand(index, shardId.id(), "node42")));
fail("expected IllegalArgumentException when allocating to non-existing node");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), containsString("failed to resolve [node42], no matching nodes"));
@@ -149,7 +150,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase {
logger.info("--> allocating to non-data node, should fail");
try {
- allocation.reroute(clusterState, new AllocationCommands(randomAllocateCommand(shardId, "node4")));
+ allocation.reroute(clusterState, new AllocationCommands(randomAllocateCommand(index, shardId.id(), "node4")));
fail("expected IllegalArgumentException when allocating to non-data node");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), containsString("allocation can only be done on data nodes"));
@@ -157,7 +158,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase {
logger.info("--> allocating non-existing shard, should fail");
try {
- allocation.reroute(clusterState, new AllocationCommands(randomAllocateCommand(new ShardId("test", 1), "node2")));
+ allocation.reroute(clusterState, new AllocationCommands(randomAllocateCommand("test", 1, "node2")));
fail("expected ShardNotFoundException when allocating non-existing shard");
} catch (ShardNotFoundException e) {
assertThat(e.getMessage(), containsString("no such shard"));
@@ -165,7 +166,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase {
logger.info("--> allocating non-existing index, should fail");
try {
- allocation.reroute(clusterState, new AllocationCommands(randomAllocateCommand(new ShardId("test2", 0), "node2")));
+ allocation.reroute(clusterState, new AllocationCommands(randomAllocateCommand("test2", 0, "node2")));
fail("expected ShardNotFoundException when allocating non-existing index");
} catch (IndexNotFoundException e) {
assertThat(e.getMessage(), containsString("no such index"));
@@ -173,7 +174,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase {
logger.info("--> allocating empty primary with acceptDataLoss flag set to false");
try {
- allocation.reroute(clusterState, new AllocationCommands(new AllocateEmptyPrimaryAllocationCommand(new ShardId("test", 0), "node1", false)));
+ allocation.reroute(clusterState, new AllocationCommands(new AllocateEmptyPrimaryAllocationCommand("test", 0, "node1", false)));
fail("expected IllegalArgumentException when allocating empty primary with acceptDataLoss flag set to false");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), containsString("allocating an empty primary for " + shardId + " can result in data loss. Please confirm by setting the accept_data_loss parameter to true"));
@@ -181,14 +182,14 @@ public class AllocationCommandsTests extends ESAllocationTestCase {
logger.info("--> allocating stale primary with acceptDataLoss flag set to false");
try {
- allocation.reroute(clusterState, new AllocationCommands(new AllocateStalePrimaryAllocationCommand(shardId, "node1", false)));
+ allocation.reroute(clusterState, new AllocationCommands(new AllocateStalePrimaryAllocationCommand(index, shardId.id(), "node1", false)));
fail("expected IllegalArgumentException when allocating stale primary with acceptDataLoss flag set to false");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), containsString("allocating an empty primary for " + shardId + " can result in data loss. Please confirm by setting the accept_data_loss parameter to true"));
}
logger.info("--> allocating empty primary with acceptDataLoss flag set to true");
- rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateEmptyPrimaryAllocationCommand(new ShardId("test", 0), "node1", true)));
+ rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateEmptyPrimaryAllocationCommand("test", 0, "node1", true)));
assertThat(rerouteResult.changed(), equalTo(true));
clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build();
assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1));
@@ -204,13 +205,13 @@ public class AllocationCommandsTests extends ESAllocationTestCase {
logger.info("--> allocate the replica shard on the primary shard node, should fail");
try {
- allocation.reroute(clusterState, new AllocationCommands(new AllocateReplicaAllocationCommand(new ShardId("test", 0), "node1")));
+ allocation.reroute(clusterState, new AllocationCommands(new AllocateReplicaAllocationCommand("test", 0, "node1")));
fail("expected IllegalArgumentException when allocating replica shard on the primary shard node");
} catch (IllegalArgumentException e) {
}
logger.info("--> allocate the replica shard on on the second node");
- rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateReplicaAllocationCommand(new ShardId("test", 0), "node2")));
+ rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateReplicaAllocationCommand("test", 0, "node2")));
assertThat(rerouteResult.changed(), equalTo(true));
clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build();
assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1));
@@ -229,7 +230,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase {
logger.info("--> verify that we fail when there are no unassigned shards");
try {
- allocation.reroute(clusterState, new AllocationCommands(randomAllocateCommand(new ShardId("test", 0), "node3")));
+ allocation.reroute(clusterState, new AllocationCommands(randomAllocateCommand("test", 0, "node3")));
fail("expected IllegalArgumentException when allocating shard while no unassigned shard available");
} catch (IllegalArgumentException e) {
}
@@ -261,7 +262,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase {
assertThat(clusterState.getRoutingNodes().shardsWithState(INITIALIZING).size(), equalTo(0));
logger.info("--> allocating empty primary shard with accept_data_loss flag set to true");
- rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateEmptyPrimaryAllocationCommand(new ShardId("test", 0), "node1", true)));
+ rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateEmptyPrimaryAllocationCommand("test", 0, "node1", true)));
assertThat(rerouteResult.changed(), equalTo(true));
clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build();
assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1));
@@ -270,7 +271,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase {
logger.info("--> cancel primary allocation, make sure it fails...");
try {
- allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand(new ShardId("test", 0), "node1", false)));
+ allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand("test", 0, "node1", false)));
fail();
} catch (IllegalArgumentException e) {
}
@@ -284,13 +285,13 @@ public class AllocationCommandsTests extends ESAllocationTestCase {
logger.info("--> cancel primary allocation, make sure it fails...");
try {
- allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand(new ShardId("test", 0), "node1", false)));
+ allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand("test", 0, "node1", false)));
fail();
} catch (IllegalArgumentException e) {
}
logger.info("--> allocate the replica shard on on the second node");
- rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateReplicaAllocationCommand(new ShardId("test", 0), "node2")));
+ rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateReplicaAllocationCommand("test", 0, "node2")));
assertThat(rerouteResult.changed(), equalTo(true));
clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build();
assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1));
@@ -299,7 +300,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase {
assertThat(clusterState.getRoutingNodes().node("node2").shardsWithState(INITIALIZING).size(), equalTo(1));
logger.info("--> cancel the relocation allocation");
- rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand(new ShardId("test", 0), "node2", false)));
+ rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand("test", 0, "node2", false)));
assertThat(rerouteResult.changed(), equalTo(true));
clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build();
assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1));
@@ -308,7 +309,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase {
assertThat(clusterState.getRoutingNodes().node("node3").size(), equalTo(0));
logger.info("--> allocate the replica shard on on the second node");
- rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateReplicaAllocationCommand(new ShardId("test", 0), "node2")));
+ rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateReplicaAllocationCommand("test", 0, "node2")));
assertThat(rerouteResult.changed(), equalTo(true));
clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build();
assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1));
@@ -318,7 +319,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase {
logger.info("--> cancel the primary being replicated, make sure it fails");
try {
- allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand(new ShardId("test", 0), "node1", false)));
+ allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand("test", 0, "node1", false)));
fail();
} catch (IllegalArgumentException e) {
}
@@ -332,7 +333,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase {
assertThat(clusterState.getRoutingNodes().node("node2").shardsWithState(STARTED).size(), equalTo(1));
logger.info("--> cancel allocation of the replica shard");
- rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand(new ShardId("test", 0), "node2", false)));
+ rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand("test", 0, "node2", false)));
assertThat(rerouteResult.changed(), equalTo(true));
clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build();
assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1));
@@ -341,7 +342,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase {
assertThat(clusterState.getRoutingNodes().node("node3").size(), equalTo(0));
logger.info("--> allocate the replica shard on on the second node");
- rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateReplicaAllocationCommand(new ShardId("test", 0), "node2")));
+ rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateReplicaAllocationCommand("test", 0, "node2")));
clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build();
assertThat(rerouteResult.changed(), equalTo(true));
assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1));
@@ -357,7 +358,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase {
assertThat(clusterState.getRoutingNodes().node("node2").shardsWithState(STARTED).size(), equalTo(1));
logger.info("--> move the replica shard");
- rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand(new ShardId("test", 0), "node2", "node3")));
+ rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand("test", 0, "node2", "node3")));
clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build();
assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1));
assertThat(clusterState.getRoutingNodes().node("node1").shardsWithState(STARTED).size(), equalTo(1));
@@ -367,7 +368,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase {
assertThat(clusterState.getRoutingNodes().node("node3").shardsWithState(INITIALIZING).size(), equalTo(1));
logger.info("--> cancel the move of the replica shard");
- rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand(new ShardId("test", 0), "node3", false)));
+ rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand("test", 0, "node3", false)));
clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build();
assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1));
assertThat(clusterState.getRoutingNodes().node("node1").shardsWithState(STARTED).size(), equalTo(1));
@@ -376,7 +377,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase {
logger.info("--> cancel the primary allocation (with allow_primary set to true)");
- rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand(new ShardId("test", 0), "node1", true)));
+ rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand("test", 0, "node1", true)));
clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build();
assertThat(rerouteResult.changed(), equalTo(true));
assertThat(clusterState.getRoutingNodes().node("node2").shardsWithState(STARTED).get(0).primary(), equalTo(true));
@@ -386,33 +387,38 @@ public class AllocationCommandsTests extends ESAllocationTestCase {
public void testSerialization() throws Exception {
AllocationCommands commands = new AllocationCommands(
- new AllocateEmptyPrimaryAllocationCommand(new ShardId("test", 1), "node1", true),
- new AllocateStalePrimaryAllocationCommand(new ShardId("test", 2), "node1", true),
- new AllocateReplicaAllocationCommand(new ShardId("test", 2), "node1"),
- new MoveAllocationCommand(new ShardId("test", 3), "node2", "node3"),
- new CancelAllocationCommand(new ShardId("test", 4), "node5", true)
+ new AllocateEmptyPrimaryAllocationCommand("test", 1, "node1", true),
+ new AllocateStalePrimaryAllocationCommand("test", 2, "node1", true),
+ new AllocateReplicaAllocationCommand("test", 2, "node1"),
+ new MoveAllocationCommand("test", 3, "node2", "node3"),
+ new CancelAllocationCommand("test", 4, "node5", true)
);
BytesStreamOutput bytes = new BytesStreamOutput();
AllocationCommands.writeTo(commands, bytes);
AllocationCommands sCommands = AllocationCommands.readFrom(StreamInput.wrap(bytes.bytes()));
assertThat(sCommands.commands().size(), equalTo(5));
- assertThat(((AllocateEmptyPrimaryAllocationCommand) (sCommands.commands().get(0))).shardId(), equalTo(new ShardId("test", 1)));
+ assertThat(((AllocateEmptyPrimaryAllocationCommand) (sCommands.commands().get(0))).shardId(), equalTo(1));
+ assertThat(((AllocateEmptyPrimaryAllocationCommand) (sCommands.commands().get(0))).index(), equalTo("test"));
assertThat(((AllocateEmptyPrimaryAllocationCommand) (sCommands.commands().get(0))).node(), equalTo("node1"));
assertThat(((AllocateEmptyPrimaryAllocationCommand) (sCommands.commands().get(0))).acceptDataLoss(), equalTo(true));
- assertThat(((AllocateStalePrimaryAllocationCommand) (sCommands.commands().get(1))).shardId(), equalTo(new ShardId("test", 2)));
+ assertThat(((AllocateStalePrimaryAllocationCommand) (sCommands.commands().get(1))).shardId(), equalTo(2));
+ assertThat(((AllocateStalePrimaryAllocationCommand) (sCommands.commands().get(1))).index(), equalTo("test"));
assertThat(((AllocateStalePrimaryAllocationCommand) (sCommands.commands().get(1))).node(), equalTo("node1"));
assertThat(((AllocateStalePrimaryAllocationCommand) (sCommands.commands().get(1))).acceptDataLoss(), equalTo(true));
- assertThat(((AllocateReplicaAllocationCommand) (sCommands.commands().get(2))).shardId(), equalTo(new ShardId("test", 2)));
+ assertThat(((AllocateReplicaAllocationCommand) (sCommands.commands().get(2))).shardId(), equalTo(2));
+ assertThat(((AllocateReplicaAllocationCommand) (sCommands.commands().get(2))).index(), equalTo("test"));
assertThat(((AllocateReplicaAllocationCommand) (sCommands.commands().get(2))).node(), equalTo("node1"));
- assertThat(((MoveAllocationCommand) (sCommands.commands().get(3))).shardId(), equalTo(new ShardId("test", 3)));
+ assertThat(((MoveAllocationCommand) (sCommands.commands().get(3))).shardId(), equalTo(3));
+ assertThat(((MoveAllocationCommand) (sCommands.commands().get(3))).index(), equalTo("test"));
assertThat(((MoveAllocationCommand) (sCommands.commands().get(3))).fromNode(), equalTo("node2"));
assertThat(((MoveAllocationCommand) (sCommands.commands().get(3))).toNode(), equalTo("node3"));
- assertThat(((CancelAllocationCommand) (sCommands.commands().get(4))).shardId(), equalTo(new ShardId("test", 4)));
+ assertThat(((CancelAllocationCommand) (sCommands.commands().get(4))).shardId(), equalTo(4));
+ assertThat(((CancelAllocationCommand) (sCommands.commands().get(4))).index(), equalTo("test"));
assertThat(((CancelAllocationCommand) (sCommands.commands().get(4))).node(), equalTo("node5"));
assertThat(((CancelAllocationCommand) (sCommands.commands().get(4))).allowPrimary(), equalTo(true));
}
@@ -434,22 +440,27 @@ public class AllocationCommandsTests extends ESAllocationTestCase {
AllocationCommands sCommands = AllocationCommands.fromXContent(parser);
assertThat(sCommands.commands().size(), equalTo(5));
- assertThat(((AllocateEmptyPrimaryAllocationCommand) (sCommands.commands().get(0))).shardId(), equalTo(new ShardId("test", 1)));
+ assertThat(((AllocateEmptyPrimaryAllocationCommand) (sCommands.commands().get(0))).shardId(), equalTo(1));
+ assertThat(((AllocateEmptyPrimaryAllocationCommand) (sCommands.commands().get(0))).index(), equalTo("test"));
assertThat(((AllocateEmptyPrimaryAllocationCommand) (sCommands.commands().get(0))).node(), equalTo("node1"));
assertThat(((AllocateEmptyPrimaryAllocationCommand) (sCommands.commands().get(0))).acceptDataLoss(), equalTo(true));
- assertThat(((AllocateStalePrimaryAllocationCommand) (sCommands.commands().get(1))).shardId(), equalTo(new ShardId("test", 2)));
+ assertThat(((AllocateStalePrimaryAllocationCommand) (sCommands.commands().get(1))).shardId(), equalTo(2));
+ assertThat(((AllocateStalePrimaryAllocationCommand) (sCommands.commands().get(1))).index(), equalTo("test"));
assertThat(((AllocateStalePrimaryAllocationCommand) (sCommands.commands().get(1))).node(), equalTo("node1"));
assertThat(((AllocateStalePrimaryAllocationCommand) (sCommands.commands().get(1))).acceptDataLoss(), equalTo(true));
- assertThat(((AllocateReplicaAllocationCommand) (sCommands.commands().get(2))).shardId(), equalTo(new ShardId("test", 2)));
+ assertThat(((AllocateReplicaAllocationCommand) (sCommands.commands().get(2))).shardId(), equalTo(2));
+ assertThat(((AllocateReplicaAllocationCommand) (sCommands.commands().get(2))).index(), equalTo("test"));
assertThat(((AllocateReplicaAllocationCommand) (sCommands.commands().get(2))).node(), equalTo("node1"));
- assertThat(((MoveAllocationCommand) (sCommands.commands().get(3))).shardId(), equalTo(new ShardId("test", 3)));
+ assertThat(((MoveAllocationCommand) (sCommands.commands().get(3))).shardId(), equalTo(3));
+ assertThat(((MoveAllocationCommand) (sCommands.commands().get(3))).index(), equalTo("test"));
assertThat(((MoveAllocationCommand) (sCommands.commands().get(3))).fromNode(), equalTo("node2"));
assertThat(((MoveAllocationCommand) (sCommands.commands().get(3))).toNode(), equalTo("node3"));
- assertThat(((CancelAllocationCommand) (sCommands.commands().get(4))).shardId(), equalTo(new ShardId("test", 4)));
+ assertThat(((CancelAllocationCommand) (sCommands.commands().get(4))).shardId(), equalTo(4));
+ assertThat(((CancelAllocationCommand) (sCommands.commands().get(4))).index(), equalTo("test"));
assertThat(((CancelAllocationCommand) (sCommands.commands().get(4))).node(), equalTo("node5"));
assertThat(((CancelAllocationCommand) (sCommands.commands().get(4))).allowPrimary(), equalTo(true));
}
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationPriorityTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationPriorityTests.java
index 52aad66776..925258636c 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationPriorityTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationPriorityTests.java
@@ -74,26 +74,26 @@ public class AllocationPriorityTests extends ESAllocationTestCase {
routingTable = allocation.reroute(clusterState, "reroute").routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
assertEquals(2, clusterState.getRoutingNodes().shardsWithState(INITIALIZING).size());
- assertEquals(highPriorityName, clusterState.getRoutingNodes().shardsWithState(INITIALIZING).get(0).index());
- assertEquals(highPriorityName, clusterState.getRoutingNodes().shardsWithState(INITIALIZING).get(1).index());
+ assertEquals(highPriorityName, clusterState.getRoutingNodes().shardsWithState(INITIALIZING).get(0).getIndexName());
+ assertEquals(highPriorityName, clusterState.getRoutingNodes().shardsWithState(INITIALIZING).get(1).getIndexName());
routingTable = allocation.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
assertEquals(2, clusterState.getRoutingNodes().shardsWithState(INITIALIZING).size());
- assertEquals(lowPriorityName, clusterState.getRoutingNodes().shardsWithState(INITIALIZING).get(0).index());
- assertEquals(lowPriorityName, clusterState.getRoutingNodes().shardsWithState(INITIALIZING).get(1).index());
+ assertEquals(lowPriorityName, clusterState.getRoutingNodes().shardsWithState(INITIALIZING).get(0).getIndexName());
+ assertEquals(lowPriorityName, clusterState.getRoutingNodes().shardsWithState(INITIALIZING).get(1).getIndexName());
routingTable = allocation.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
assertEquals(clusterState.getRoutingNodes().shardsWithState(INITIALIZING).toString(),2, clusterState.getRoutingNodes().shardsWithState(INITIALIZING).size());
- assertEquals(highPriorityName, clusterState.getRoutingNodes().shardsWithState(INITIALIZING).get(0).index());
- assertEquals(highPriorityName, clusterState.getRoutingNodes().shardsWithState(INITIALIZING).get(1).index());
+ assertEquals(highPriorityName, clusterState.getRoutingNodes().shardsWithState(INITIALIZING).get(0).getIndexName());
+ assertEquals(highPriorityName, clusterState.getRoutingNodes().shardsWithState(INITIALIZING).get(1).getIndexName());
routingTable = allocation.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
assertEquals(2, clusterState.getRoutingNodes().shardsWithState(INITIALIZING).size());
- assertEquals(lowPriorityName, clusterState.getRoutingNodes().shardsWithState(INITIALIZING).get(0).index());
- assertEquals(lowPriorityName, clusterState.getRoutingNodes().shardsWithState(INITIALIZING).get(1).index());
+ assertEquals(lowPriorityName, clusterState.getRoutingNodes().shardsWithState(INITIALIZING).get(0).getIndexName());
+ assertEquals(lowPriorityName, clusterState.getRoutingNodes().shardsWithState(INITIALIZING).get(1).getIndexName());
}
}
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AwarenessAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AwarenessAllocationTests.java
index eb94b6de10..8810fc4739 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AwarenessAllocationTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AwarenessAllocationTests.java
@@ -872,10 +872,10 @@ public class AwarenessAllocationTests extends ESAllocationTestCase {
if (routing.primary()) {
primaryNode = routing.currentNodeId();
} else if (routing.initializing()) {
- commands.add(new CancelAllocationCommand(routing.shardId(), routing.currentNodeId(), false));
+ commands.add(new CancelAllocationCommand(routing.shardId().getIndexName(), routing.id(), routing.currentNodeId(), false));
}
}
- commands.add(new MoveAllocationCommand(new ShardId("test", 0), primaryNode, "A-4"));
+ commands.add(new MoveAllocationCommand("test", 0, primaryNode, "A-4"));
routingTable = strategy.reroute(clusterState, commands).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/CatAllocationTestCase.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/CatAllocationTestCase.java
index 8ac6c4fced..422851a229 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/CatAllocationTestCase.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/CatAllocationTestCase.java
@@ -98,10 +98,10 @@ public abstract class CatAllocationTestCase extends ESAllocationTestCase {
for(Idx idx : indices.values()) {
IndexMetaData idxMeta = IndexMetaData.builder(idx.name).settings(settings(Version.CURRENT)).numberOfShards(idx.numShards()).numberOfReplicas(idx.numReplicas()).build();
builder.put(idxMeta, false);
- IndexRoutingTable.Builder tableBuilder = new IndexRoutingTable.Builder(idx.name).initializeAsRecovery(idxMeta);
+ IndexRoutingTable.Builder tableBuilder = new IndexRoutingTable.Builder(idxMeta.getIndex()).initializeAsRecovery(idxMeta);
Map<Integer, IndexShardRoutingTable> shardIdToRouting = new HashMap<>();
for (ShardRouting r : idx.routing) {
- IndexShardRoutingTable refData = new IndexShardRoutingTable.Builder(new ShardId(idx.name, r.id())).addShard(r).build();
+ IndexShardRoutingTable refData = new IndexShardRoutingTable.Builder(r.shardId()).addShard(r).build();
if (shardIdToRouting.containsKey(r.getId())) {
refData = new IndexShardRoutingTable.Builder(shardIdToRouting.get(r.getId())).addShard(r).build();
}
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ClusterRebalanceRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ClusterRebalanceRoutingTests.java
index 827da901dc..beb7c134c1 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ClusterRebalanceRoutingTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ClusterRebalanceRoutingTests.java
@@ -28,7 +28,6 @@ import org.elasticsearch.cluster.routing.RoutingNodes;
import org.elasticsearch.cluster.routing.RoutingTable;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider;
-import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
@@ -130,7 +129,7 @@ public class ClusterRebalanceRoutingTests extends ESAllocationTestCase {
routingNodes = clusterState.getRoutingNodes();
assertThat(routingNodes.node("node3").size(), equalTo(1));
- assertThat(routingNodes.node("node3").get(0).shardId().index().name(), equalTo("test1"));
+ assertThat(routingNodes.node("node3").get(0).shardId().getIndex().getName(), equalTo("test1"));
}
@@ -235,7 +234,7 @@ public class ClusterRebalanceRoutingTests extends ESAllocationTestCase {
routingNodes = clusterState.getRoutingNodes();
assertThat(routingNodes.node("node3").size(), equalTo(1));
- assertThat(routingNodes.node("node3").get(0).shardId().index().name(), equalTo("test1"));
+ assertThat(routingNodes.node("node3").get(0).shardId().getIndex().getName(), equalTo("test1"));
}
public void testClusterPrimariesActive2() {
@@ -442,7 +441,7 @@ public class ClusterRebalanceRoutingTests extends ESAllocationTestCase {
routingNodes = clusterState.getRoutingNodes();
assertThat(routingNodes.node("node3").size(), equalTo(1));
- assertThat(routingNodes.node("node3").get(0).shardId().index().name(), anyOf(equalTo("test1"), equalTo("test2")));
+ assertThat(routingNodes.node("node3").get(0).shardId().getIndex().getName(), anyOf(equalTo("test1"), equalTo("test2")));
}
public void testClusterAllActive2() {
@@ -643,7 +642,7 @@ public class ClusterRebalanceRoutingTests extends ESAllocationTestCase {
RoutingNodes.UnassignedShards.UnassignedIterator iterator = unassigned.iterator();
while (iterator.hasNext()) {
ShardRouting next = iterator.next();
- if ("test1".equals(next.index())) {
+ if ("test1".equals(next.index().getName())) {
iterator.removeAndIgnore();
}
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/DeadNodesAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/DeadNodesAllocationTests.java
index cb09fb93b6..82496f31bd 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/DeadNodesAllocationTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/DeadNodesAllocationTests.java
@@ -149,7 +149,7 @@ public class DeadNodesAllocationTests extends ESAllocationTestCase {
logger.info("--> moving primary shard to node3");
rerouteResult = allocation.reroute(clusterState, new AllocationCommands(
- new MoveAllocationCommand(clusterState.routingTable().index("test").shard(0).primaryShard().shardId(), clusterState.routingTable().index("test").shard(0).primaryShard().currentNodeId(), "node3"))
+ new MoveAllocationCommand("test", 0, clusterState.routingTable().index("test").shard(0).primaryShard().currentNodeId(), "node3"))
);
assertThat(rerouteResult.changed(), equalTo(true));
clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build();
@@ -223,7 +223,7 @@ public class DeadNodesAllocationTests extends ESAllocationTestCase {
logger.info("--> moving primary shard to node3");
rerouteResult = allocation.reroute(clusterState, new AllocationCommands(
- new MoveAllocationCommand(clusterState.routingTable().index("test").shard(0).primaryShard().shardId(), clusterState.routingTable().index("test").shard(0).primaryShard().currentNodeId(), "node3"))
+ new MoveAllocationCommand("test",0 , clusterState.routingTable().index("test").shard(0).primaryShard().currentNodeId(), "node3"))
);
assertThat(rerouteResult.changed(), equalTo(true));
clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build();
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ExpectedShardSizeAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ExpectedShardSizeAllocationTests.java
index 8807816d2e..4b720036b9 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ExpectedShardSizeAllocationTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ExpectedShardSizeAllocationTests.java
@@ -35,7 +35,6 @@ import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationComman
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.test.ESAllocationTestCase;
import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING;
@@ -54,7 +53,7 @@ public class ExpectedShardSizeAllocationTests extends ESAllocationTestCase {
return new ClusterInfo() {
@Override
public Long getShardSize(ShardRouting shardRouting) {
- if (shardRouting.index().equals("test") && shardRouting.shardId().getId() == 0) {
+ if (shardRouting.getIndexName().equals("test") && shardRouting.shardId().getId() == 0) {
return byteSize;
}
return null;
@@ -112,7 +111,7 @@ public class ExpectedShardSizeAllocationTests extends ESAllocationTestCase {
return new ClusterInfo() {
@Override
public Long getShardSize(ShardRouting shardRouting) {
- if (shardRouting.index().equals("test") && shardRouting.shardId().getId() == 0) {
+ if (shardRouting.getIndexName().equals("test") && shardRouting.shardId().getId() == 0) {
return byteSize;
}
return null;
@@ -150,7 +149,7 @@ public class ExpectedShardSizeAllocationTests extends ESAllocationTestCase {
} else {
toNodeId = "node1";
}
- rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand(new ShardId("test", 0), existingNodeId, toNodeId)));
+ rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand("test", 0, existingNodeId, toNodeId)));
assertThat(rerouteResult.changed(), equalTo(true));
clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build();
assertEquals(clusterState.getRoutingNodes().node(existingNodeId).get(0).state(), ShardRoutingState.RELOCATING);
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java
index b8ab9c1359..0c687bc665 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java
@@ -109,7 +109,7 @@ public class FailedShardsRoutingTests extends ESAllocationTestCase {
logger.info("--> moving primary shard to node3");
rerouteResult = allocation.reroute(clusterState, new AllocationCommands(
- new MoveAllocationCommand(clusterState.routingTable().index("test").shard(0).primaryShard().shardId(), clusterState.routingTable().index("test").shard(0).primaryShard().currentNodeId(), "node3"))
+ new MoveAllocationCommand("test", 0, clusterState.routingTable().index("test").shard(0).primaryShard().currentNodeId(), "node3"))
);
assertThat(rerouteResult.changed(), equalTo(true));
clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build();
@@ -125,7 +125,7 @@ public class FailedShardsRoutingTests extends ESAllocationTestCase {
logger.info("--> moving primary shard to node3");
rerouteResult = allocation.reroute(clusterState, new AllocationCommands(
- new MoveAllocationCommand(clusterState.routingTable().index("test").shard(0).primaryShard().shardId(), clusterState.routingTable().index("test").shard(0).primaryShard().currentNodeId(), "node3"))
+ new MoveAllocationCommand("test", 0, clusterState.routingTable().index("test").shard(0).primaryShard().currentNodeId(), "node3"))
);
assertThat(rerouteResult.changed(), equalTo(true));
clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build();
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java
index 809b01cb61..062a95c867 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java
@@ -299,26 +299,26 @@ public class NodeVersionAllocationDeciderTests extends ESAllocationTestCase {
}
public void testRebalanceDoesNotAllocatePrimaryAndReplicasOnDifferentVersionNodes() {
- ShardId shard1 = new ShardId("test1", 0);
- ShardId shard2 = new ShardId("test2", 0);
+ ShardId shard1 = new ShardId("test1", "_na_", 0);
+ ShardId shard2 = new ShardId("test2", "_na_", 0);
final DiscoveryNode newNode = new DiscoveryNode("newNode", DummyTransportAddress.INSTANCE, Version.CURRENT);
final DiscoveryNode oldNode1 = new DiscoveryNode("oldNode1", DummyTransportAddress.INSTANCE, VersionUtils.getPreviousVersion());
final DiscoveryNode oldNode2 = new DiscoveryNode("oldNode2", DummyTransportAddress.INSTANCE, VersionUtils.getPreviousVersion());
MetaData metaData = MetaData.builder()
- .put(IndexMetaData.builder(shard1.getIndex()).settings(settings(Version.CURRENT).put(Settings.EMPTY)).numberOfShards(1).numberOfReplicas(1))
- .put(IndexMetaData.builder(shard2.getIndex()).settings(settings(Version.CURRENT).put(Settings.EMPTY)).numberOfShards(1).numberOfReplicas(1))
+ .put(IndexMetaData.builder(shard1.getIndexName()).settings(settings(Version.CURRENT).put(Settings.EMPTY)).numberOfShards(1).numberOfReplicas(1))
+ .put(IndexMetaData.builder(shard2.getIndexName()).settings(settings(Version.CURRENT).put(Settings.EMPTY)).numberOfShards(1).numberOfReplicas(1))
.build();
RoutingTable routingTable = RoutingTable.builder()
.add(IndexRoutingTable.builder(shard1.getIndex())
.addIndexShard(new IndexShardRoutingTable.Builder(shard1)
- .addShard(TestShardRouting.newShardRouting(shard1.getIndex(), shard1.getId(), newNode.id(), true, ShardRoutingState.STARTED, 10))
- .addShard(TestShardRouting.newShardRouting(shard1.getIndex(), shard1.getId(), oldNode1.id(), false, ShardRoutingState.STARTED, 10))
+ .addShard(TestShardRouting.newShardRouting(shard1.getIndexName(), shard1.getId(), newNode.id(), true, ShardRoutingState.STARTED, 10))
+ .addShard(TestShardRouting.newShardRouting(shard1.getIndexName(), shard1.getId(), oldNode1.id(), false, ShardRoutingState.STARTED, 10))
.build())
)
.add(IndexRoutingTable.builder(shard2.getIndex())
.addIndexShard(new IndexShardRoutingTable.Builder(shard2)
- .addShard(TestShardRouting.newShardRouting(shard2.getIndex(), shard2.getId(), newNode.id(), true, ShardRoutingState.STARTED, 10))
- .addShard(TestShardRouting.newShardRouting(shard2.getIndex(), shard2.getId(), oldNode1.id(), false, ShardRoutingState.STARTED, 10))
+ .addShard(TestShardRouting.newShardRouting(shard2.getIndexName(), shard2.getId(), newNode.id(), true, ShardRoutingState.STARTED, 10))
+ .addShard(TestShardRouting.newShardRouting(shard2.getIndexName(), shard2.getId(), oldNode1.id(), false, ShardRoutingState.STARTED, 10))
.build())
)
.build();
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RebalanceAfterActiveTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RebalanceAfterActiveTests.java
index 4672f339c7..df0c4a8fa3 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RebalanceAfterActiveTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RebalanceAfterActiveTests.java
@@ -66,7 +66,7 @@ public class RebalanceAfterActiveTests extends ESAllocationTestCase {
return new ClusterInfo() {
@Override
public Long getShardSize(ShardRouting shardRouting) {
- if (shardRouting.index().equals("test")) {
+ if (shardRouting.getIndexName().equals("test")) {
return sizes[shardRouting.getId()];
}
return null; }
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardVersioningTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardVersioningTests.java
index e1586c433a..36f51675b2 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardVersioningTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardVersioningTests.java
@@ -64,14 +64,14 @@ public class ShardVersioningTests extends ESAllocationTestCase {
for (int i = 0; i < routingTable.index("test1").shards().size(); i++) {
assertThat(routingTable.index("test1").shard(i).shards().size(), equalTo(2));
assertThat(routingTable.index("test1").shard(i).primaryShard().state(), equalTo(INITIALIZING));
- assertThat(routingTable.index("test1").shard(i).primaryShard().version(), equalTo(1l));
+ assertThat(routingTable.index("test1").shard(i).primaryShard().version(), equalTo(1L));
assertThat(routingTable.index("test1").shard(i).replicaShards().get(0).state(), equalTo(UNASSIGNED));
}
for (int i = 0; i < routingTable.index("test2").shards().size(); i++) {
assertThat(routingTable.index("test2").shard(i).shards().size(), equalTo(2));
assertThat(routingTable.index("test2").shard(i).primaryShard().state(), equalTo(INITIALIZING));
- assertThat(routingTable.index("test2").shard(i).primaryShard().version(), equalTo(1l));
+ assertThat(routingTable.index("test2").shard(i).primaryShard().version(), equalTo(1L));
assertThat(routingTable.index("test2").shard(i).replicaShards().get(0).state(), equalTo(UNASSIGNED));
}
@@ -84,17 +84,17 @@ public class ShardVersioningTests extends ESAllocationTestCase {
for (int i = 0; i < routingTable.index("test1").shards().size(); i++) {
assertThat(routingTable.index("test1").shard(i).shards().size(), equalTo(2));
assertThat(routingTable.index("test1").shard(i).primaryShard().state(), equalTo(STARTED));
- assertThat(routingTable.index("test1").shard(i).primaryShard().version(), equalTo(2l));
+ assertThat(routingTable.index("test1").shard(i).primaryShard().version(), equalTo(2L));
assertThat(routingTable.index("test1").shard(i).replicaShards().get(0).state(), equalTo(INITIALIZING));
- assertThat(routingTable.index("test1").shard(i).replicaShards().get(0).version(), equalTo(2l));
+ assertThat(routingTable.index("test1").shard(i).replicaShards().get(0).version(), equalTo(2L));
}
for (int i = 0; i < routingTable.index("test2").shards().size(); i++) {
assertThat(routingTable.index("test2").shard(i).shards().size(), equalTo(2));
assertThat(routingTable.index("test2").shard(i).primaryShard().state(), equalTo(INITIALIZING));
- assertThat(routingTable.index("test2").shard(i).primaryShard().version(), equalTo(1l));
+ assertThat(routingTable.index("test2").shard(i).primaryShard().version(), equalTo(1L));
assertThat(routingTable.index("test2").shard(i).replicaShards().get(0).state(), equalTo(UNASSIGNED));
- assertThat(routingTable.index("test2").shard(i).replicaShards().get(0).version(), equalTo(1l));
+ assertThat(routingTable.index("test2").shard(i).replicaShards().get(0).version(), equalTo(1L));
}
}
} \ No newline at end of file
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsLimitAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsLimitAllocationTests.java
index ac539c421b..0830747a9d 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsLimitAllocationTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsLimitAllocationTests.java
@@ -206,10 +206,10 @@ public class ShardsLimitAllocationTests extends ESAllocationTestCase {
assertThat(numberOfShardsOfType(clusterState.getRoutingNodes(), STARTED), equalTo(10));
for (ShardRouting shardRouting : clusterState.getRoutingNodes().node("node1")) {
- assertThat(shardRouting.index(), equalTo("test"));
+ assertThat(shardRouting.getIndexName(), equalTo("test"));
}
for (ShardRouting shardRouting : clusterState.getRoutingNodes().node("node2")) {
- assertThat(shardRouting.index(), equalTo("test1"));
+ assertThat(shardRouting.getIndexName(), equalTo("test1"));
}
logger.info("update " + ShardsLimitAllocationDecider.INDEX_TOTAL_SHARDS_PER_NODE_SETTING.getKey() + " for test, see that things move");
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardNoReplicasRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardNoReplicasRoutingTests.java
index bf41ad8a05..90263acb13 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardNoReplicasRoutingTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardNoReplicasRoutingTests.java
@@ -274,8 +274,8 @@ public class SingleShardNoReplicasRoutingTests extends ESAllocationTestCase {
assertThat(nodeIndex, lessThan(25));
// check that we don't have a shard associated with a node with the same index name (we have a single shard)
for (ShardRouting shardRoutingEntry : routingNode) {
- assertThat(encounteredIndices, not(hasItem(shardRoutingEntry.index())));
- encounteredIndices.add(shardRoutingEntry.index());
+ assertThat(encounteredIndices, not(hasItem(shardRoutingEntry.getIndexName())));
+ encounteredIndices.add(shardRoutingEntry.getIndexName());
}
}
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/StartedShardsRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/StartedShardsRoutingTests.java
index 0712e9cd02..bbf5396f39 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/StartedShardsRoutingTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/StartedShardsRoutingTests.java
@@ -31,6 +31,7 @@ import org.elasticsearch.cluster.routing.RoutingTable;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.ShardRoutingState;
import org.elasticsearch.cluster.routing.TestShardRouting;
+import org.elasticsearch.index.Index;
import org.elasticsearch.test.ESAllocationTestCase;
import java.util.Arrays;
@@ -47,14 +48,15 @@ public class StartedShardsRoutingTests extends ESAllocationTestCase {
.settings(settings(Version.CURRENT))
.numberOfShards(3).numberOfReplicas(0)
.build();
+ final Index index = indexMetaData.getIndex();
ClusterState.Builder stateBuilder = ClusterState.builder(ClusterName.DEFAULT)
.nodes(DiscoveryNodes.builder().put(newNode("node1")).put(newNode("node2")))
.metaData(MetaData.builder().put(indexMetaData, false));
- final ShardRouting initShard = TestShardRouting.newShardRouting("test", 0, "node1", true, ShardRoutingState.INITIALIZING, 1);
- final ShardRouting startedShard = TestShardRouting.newShardRouting("test", 1, "node2", true, ShardRoutingState.STARTED, 1);
- final ShardRouting relocatingShard = TestShardRouting.newShardRouting("test", 2, "node1", "node2", true, ShardRoutingState.RELOCATING, 1);
- stateBuilder.routingTable(RoutingTable.builder().add(IndexRoutingTable.builder("test")
+ final ShardRouting initShard = TestShardRouting.newShardRouting(index, 0, "node1", true, ShardRoutingState.INITIALIZING, 1);
+ final ShardRouting startedShard = TestShardRouting.newShardRouting(index, 1, "node2", true, ShardRoutingState.STARTED, 1);
+ final ShardRouting relocatingShard = TestShardRouting.newShardRouting(index, 2, "node1", "node2", true, ShardRoutingState.RELOCATING, 1);
+ stateBuilder.routingTable(RoutingTable.builder().add(IndexRoutingTable.builder(index)
.addIndexShard(new IndexShardRoutingTable.Builder(initShard.shardId()).addShard(initShard).build())
.addIndexShard(new IndexShardRoutingTable.Builder(startedShard.shardId()).addShard(startedShard).build())
.addIndexShard(new IndexShardRoutingTable.Builder(relocatingShard.shardId()).addShard(relocatingShard).build())).build());
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ThrottlingAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ThrottlingAllocationTests.java
index 1d60436d3c..5573780e2e 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ThrottlingAllocationTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ThrottlingAllocationTests.java
@@ -284,7 +284,7 @@ public class ThrottlingAllocationTests extends ESAllocationTestCase {
assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node2"), 0);
assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node3"), 0);
- RoutingAllocation.Result reroute = strategy.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand(clusterState.getRoutingNodes().node("node1").get(0).shardId(), "node1", "node2")));
+ RoutingAllocation.Result reroute = strategy.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand("test", clusterState.getRoutingNodes().node("node1").get(0).shardId().id(), "node1", "node2")));
assertEquals(reroute.explanations().explanations().size(), 1);
assertEquals(reroute.explanations().explanations().get(0).decisions().type(), Decision.Type.YES);
routingTable = reroute.routingTable();
@@ -297,7 +297,7 @@ public class ThrottlingAllocationTests extends ESAllocationTestCase {
assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node3"), 0);
// outgoing throttles
- reroute = strategy.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand(clusterState.getRoutingNodes().node("node3").get(0).shardId(), "node3", "node1")), true);
+ reroute = strategy.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand("test", clusterState.getRoutingNodes().node("node3").get(0).shardId().id(), "node3", "node1")), true);
assertEquals(reroute.explanations().explanations().size(), 1);
assertEquals(reroute.explanations().explanations().get(0).decisions().type(), Decision.Type.THROTTLE);
assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node1"), 0);
@@ -312,7 +312,7 @@ public class ThrottlingAllocationTests extends ESAllocationTestCase {
assertThat(routingTable.shardsWithState(UNASSIGNED).size(), equalTo(0));
// incoming throttles
- reroute = strategy.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand(clusterState.getRoutingNodes().node("node3").get(0).shardId(), "node3", "node2")), true);
+ reroute = strategy.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand("test", clusterState.getRoutingNodes().node("node3").get(0).shardId().id(), "node3", "node2")), true);
assertEquals(reroute.explanations().explanations().size(), 1);
assertEquals(reroute.explanations().explanations().get(0).decisions().type(), Decision.Type.THROTTLE);
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java
index fa52503eac..6c5862682f 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java
@@ -46,7 +46,6 @@ import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationComman
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.LocalTransportAddress;
-import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.test.ESAllocationTestCase;
import org.elasticsearch.test.gateway.NoopGatewayAllocator;
@@ -320,8 +319,8 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase {
logger.info("--> adding node1 and node2 node");
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder()
- .put(newNode("node1"))
- .put(newNode("node2"))
+ .put(newNode("node1"))
+ .put(newNode("node2"))
).build();
routingTable = strategy.reroute(clusterState, "reroute").routingTable();
@@ -504,7 +503,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase {
logger.info("--> adding node5");
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes())
- .put(newNode("node5"))
+ .put(newNode("node5"))
).build();
routingTable = strategy.reroute(clusterState, "reroute").routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
@@ -757,8 +756,8 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase {
logger.info("--> adding two nodes");
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder()
- .put(newNode("node1"))
- .put(newNode("node2"))
+ .put(newNode("node1"))
+ .put(newNode("node2"))
).build();
routingTable = strategy.reroute(clusterState, "reroute").routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
@@ -777,17 +776,17 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase {
logger.info("--> adding node3");
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes())
- .put(newNode("node3"))
+ .put(newNode("node3"))
).build();
- AllocationCommand relocate1 = new MoveAllocationCommand(new ShardId("test", 0), "node2", "node3");
+ AllocationCommand relocate1 = new MoveAllocationCommand("test", 0, "node2", "node3");
AllocationCommands cmds = new AllocationCommands(relocate1);
routingTable = strategy.reroute(clusterState, cmds).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
logShardStates(clusterState);
- AllocationCommand relocate2 = new MoveAllocationCommand(new ShardId("test2", 0), "node2", "node3");
+ AllocationCommand relocate2 = new MoveAllocationCommand("test2", 0, "node2", "node3");
cmds = new AllocationCommands(relocate2);
try {
@@ -848,14 +847,14 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase {
ShardRouting secondRouting = TestShardRouting.newShardRouting("test", 1, "node1", null, null, true, ShardRoutingState.STARTED, 1);
RoutingNode firstRoutingNode = new RoutingNode("node1", discoveryNode1, Arrays.asList(firstRouting, secondRouting));
RoutingTable.Builder builder = RoutingTable.builder().add(
- IndexRoutingTable.builder("test")
- .addIndexShard(new IndexShardRoutingTable.Builder(new ShardId("test", 0))
- .addShard(firstRouting)
- .build()
+ IndexRoutingTable.builder(firstRouting.index())
+ .addIndexShard(new IndexShardRoutingTable.Builder(firstRouting.shardId())
+ .addShard(firstRouting)
+ .build()
)
- .addIndexShard(new IndexShardRoutingTable.Builder(new ShardId("test", 1))
- .addShard(secondRouting)
- .build()
+ .addIndexShard(new IndexShardRoutingTable.Builder(secondRouting.shardId())
+ .addShard(secondRouting)
+ .build()
)
);
ClusterState clusterState = ClusterState.builder(baseClusterState).routingTable(builder.build()).build();
@@ -868,14 +867,14 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase {
secondRouting = TestShardRouting.newShardRouting("test", 1, "node1", "node2", null, true, ShardRoutingState.RELOCATING, 1);
firstRoutingNode = new RoutingNode("node1", discoveryNode1, Arrays.asList(firstRouting, secondRouting));
builder = RoutingTable.builder().add(
- IndexRoutingTable.builder("test")
- .addIndexShard(new IndexShardRoutingTable.Builder(new ShardId("test", 0))
- .addShard(firstRouting)
- .build()
+ IndexRoutingTable.builder(firstRouting.index())
+ .addIndexShard(new IndexShardRoutingTable.Builder(firstRouting.shardId())
+ .addShard(firstRouting)
+ .build()
)
- .addIndexShard(new IndexShardRoutingTable.Builder(new ShardId("test", 1))
- .addShard(secondRouting)
- .build()
+ .addIndexShard(new IndexShardRoutingTable.Builder(secondRouting.shardId())
+ .addShard(secondRouting)
+ .build()
)
);
clusterState = ClusterState.builder(baseClusterState).routingTable(builder.build()).build();
@@ -897,7 +896,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase {
}
};
AllocationDeciders deciders = new AllocationDeciders(Settings.EMPTY, new HashSet<>(Arrays.asList(
- new SameShardAllocationDecider(Settings.EMPTY), diskThresholdDecider
+ new SameShardAllocationDecider(Settings.EMPTY), diskThresholdDecider
)));
AllocationService strategy = new AllocationService(settingsBuilder()
.put("cluster.routing.allocation.node_concurrent_recoveries", 10)
@@ -967,14 +966,14 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase {
RoutingNode firstRoutingNode = new RoutingNode("node2", discoveryNode2, Arrays.asList(firstRouting, secondRouting));
RoutingTable.Builder builder = RoutingTable.builder().add(
- IndexRoutingTable.builder("test")
- .addIndexShard(new IndexShardRoutingTable.Builder(new ShardId("test", 0))
- .addShard(firstRouting)
- .build()
+ IndexRoutingTable.builder(firstRouting.index())
+ .addIndexShard(new IndexShardRoutingTable.Builder(firstRouting.shardId())
+ .addShard(firstRouting)
+ .build()
)
- .addIndexShard(new IndexShardRoutingTable.Builder(new ShardId("test", 1))
- .addShard(secondRouting)
- .build()
+ .addIndexShard(new IndexShardRoutingTable.Builder(secondRouting.shardId())
+ .addShard(secondRouting)
+ .build()
)
);
ClusterState clusterState = ClusterState.builder(baseClusterState).routingTable(builder.build()).build();
@@ -1024,14 +1023,14 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase {
secondRouting = TestShardRouting.newShardRouting("test", 1, "node2", "node3", null, true, ShardRoutingState.RELOCATING, 1);
firstRoutingNode = new RoutingNode("node2", discoveryNode2, Arrays.asList(firstRouting, secondRouting));
builder = RoutingTable.builder().add(
- IndexRoutingTable.builder("test")
- .addIndexShard(new IndexShardRoutingTable.Builder(new ShardId("test", 0))
- .addShard(firstRouting)
- .build()
+ IndexRoutingTable.builder(firstRouting.index())
+ .addIndexShard(new IndexShardRoutingTable.Builder(firstRouting.shardId())
+ .addShard(firstRouting)
+ .build()
)
- .addIndexShard(new IndexShardRoutingTable.Builder(new ShardId("test", 1))
- .addShard(secondRouting)
- .build()
+ .addIndexShard(new IndexShardRoutingTable.Builder(secondRouting.shardId())
+ .addShard(secondRouting)
+ .build()
)
);
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java
index 8551af718e..511449ebc6 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java
@@ -42,6 +42,7 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.DummyTransportAddress;
import org.elasticsearch.common.transport.LocalTransportAddress;
import org.elasticsearch.common.unit.ByteSizeValue;
+import org.elasticsearch.index.Index;
import org.elasticsearch.test.ESTestCase;
import java.util.Arrays;
@@ -95,14 +96,16 @@ public class DiskThresholdDeciderUnitTests extends ESTestCase {
ClusterInfoService cis = EmptyClusterInfoService.INSTANCE;
DiskThresholdDecider decider = new DiskThresholdDecider(Settings.EMPTY, nss, cis, null);
- ShardRouting test_0 = ShardRouting.newUnassigned("test", 0, null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
- DiscoveryNode node_0 = new DiscoveryNode("node_0", DummyTransportAddress.INSTANCE, Version.CURRENT);
- DiscoveryNode node_1 = new DiscoveryNode("node_1", DummyTransportAddress.INSTANCE, Version.CURRENT);
-
MetaData metaData = MetaData.builder()
.put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1))
.build();
+ final Index index = metaData.index("test").getIndex();
+
+ ShardRouting test_0 = ShardRouting.newUnassigned(index, 0, null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
+ DiscoveryNode node_0 = new DiscoveryNode("node_0", DummyTransportAddress.INSTANCE, Version.CURRENT);
+ DiscoveryNode node_1 = new DiscoveryNode("node_1", DummyTransportAddress.INSTANCE, Version.CURRENT);
+
RoutingTable routingTable = RoutingTable.builder()
.addAsNew(metaData.index("test"))
.build();
@@ -140,32 +143,33 @@ public class DiskThresholdDeciderUnitTests extends ESTestCase {
DiscoveryNode node_0 = new DiscoveryNode("node_0", DummyTransportAddress.INSTANCE, Version.CURRENT);
DiscoveryNode node_1 = new DiscoveryNode("node_1", DummyTransportAddress.INSTANCE, Version.CURRENT);
- ShardRouting test_0 = ShardRouting.newUnassigned("test", 0, null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
+ MetaData metaData = MetaData.builder()
+ .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1))
+ .build();
+ final IndexMetaData indexMetaData = metaData.index("test");
+
+ ShardRouting test_0 = ShardRouting.newUnassigned(indexMetaData.getIndex(), 0, null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
ShardRoutingHelper.initialize(test_0, node_0.getId());
ShardRoutingHelper.moveToStarted(test_0);
shardRoutingMap.put(test_0, "/node0/least");
- ShardRouting test_1 = ShardRouting.newUnassigned("test", 1, null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
+ ShardRouting test_1 = ShardRouting.newUnassigned(indexMetaData.getIndex(), 1, null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
ShardRoutingHelper.initialize(test_1, node_1.getId());
ShardRoutingHelper.moveToStarted(test_1);
shardRoutingMap.put(test_1, "/node1/least");
- ShardRouting test_2 = ShardRouting.newUnassigned("test", 2, null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
+ ShardRouting test_2 = ShardRouting.newUnassigned(indexMetaData.getIndex(), 2, null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
ShardRoutingHelper.initialize(test_2, node_1.getId());
ShardRoutingHelper.moveToStarted(test_2);
shardRoutingMap.put(test_2, "/node1/most");
- ShardRouting test_3 = ShardRouting.newUnassigned("test", 3, null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
+ ShardRouting test_3 = ShardRouting.newUnassigned(indexMetaData.getIndex(), 3, null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
ShardRoutingHelper.initialize(test_3, node_1.getId());
ShardRoutingHelper.moveToStarted(test_3);
// Intentionally not in the shardRoutingMap. We want to test what happens when we don't know where it is.
- MetaData metaData = MetaData.builder()
- .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1))
- .build();
-
RoutingTable routingTable = RoutingTable.builder()
- .addAsNew(metaData.index("test"))
+ .addAsNew(indexMetaData)
.build();
ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.DEFAULT).metaData(metaData).routingTable(routingTable).build();
@@ -220,37 +224,38 @@ public class DiskThresholdDeciderUnitTests extends ESTestCase {
shardSizes.put("[test][2][r]", 1000L);
shardSizes.put("[other][0][p]", 10000L);
ClusterInfo info = new DevNullClusterInfo(ImmutableOpenMap.of(), ImmutableOpenMap.of(), shardSizes.build());
- ShardRouting test_0 = ShardRouting.newUnassigned("test", 0, null, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
+ final Index index = new Index("test", "_na_");
+ ShardRouting test_0 = ShardRouting.newUnassigned(index, 0, null, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
ShardRoutingHelper.initialize(test_0, "node1");
ShardRoutingHelper.moveToStarted(test_0);
ShardRoutingHelper.relocate(test_0, "node2");
- ShardRouting test_1 = ShardRouting.newUnassigned("test", 1, null, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
+ ShardRouting test_1 = ShardRouting.newUnassigned(index, 1, null, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
ShardRoutingHelper.initialize(test_1, "node2");
ShardRoutingHelper.moveToStarted(test_1);
ShardRoutingHelper.relocate(test_1, "node1");
- ShardRouting test_2 = ShardRouting.newUnassigned("test", 2, null, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
+ ShardRouting test_2 = ShardRouting.newUnassigned(index, 2, null, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
ShardRoutingHelper.initialize(test_2, "node1");
ShardRoutingHelper.moveToStarted(test_2);
- assertEquals(1000l, DiskThresholdDecider.getShardSize(test_2, info));
- assertEquals(100l, DiskThresholdDecider.getShardSize(test_1, info));
- assertEquals(10l, DiskThresholdDecider.getShardSize(test_0, info));
+ assertEquals(1000L, DiskThresholdDecider.getShardSize(test_2, info));
+ assertEquals(100L, DiskThresholdDecider.getShardSize(test_1, info));
+ assertEquals(10L, DiskThresholdDecider.getShardSize(test_0, info));
RoutingNode node = new RoutingNode("node1", new DiscoveryNode("node1", LocalTransportAddress.PROTO, Version.CURRENT), Arrays.asList(test_0, test_1.buildTargetRelocatingShard(), test_2));
- assertEquals(100l, DiskThresholdDecider.sizeOfRelocatingShards(node, info, false, "/dev/null"));
- assertEquals(90l, DiskThresholdDecider.sizeOfRelocatingShards(node, info, true, "/dev/null"));
- assertEquals(0l, DiskThresholdDecider.sizeOfRelocatingShards(node, info, true, "/dev/some/other/dev"));
- assertEquals(0l, DiskThresholdDecider.sizeOfRelocatingShards(node, info, true, "/dev/some/other/dev"));
+ assertEquals(100L, DiskThresholdDecider.sizeOfRelocatingShards(node, info, false, "/dev/null"));
+ assertEquals(90L, DiskThresholdDecider.sizeOfRelocatingShards(node, info, true, "/dev/null"));
+ assertEquals(0L, DiskThresholdDecider.sizeOfRelocatingShards(node, info, true, "/dev/some/other/dev"));
+ assertEquals(0L, DiskThresholdDecider.sizeOfRelocatingShards(node, info, true, "/dev/some/other/dev"));
- ShardRouting test_3 = ShardRouting.newUnassigned("test", 3, null, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
+ ShardRouting test_3 = ShardRouting.newUnassigned(index, 3, null, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
ShardRoutingHelper.initialize(test_3, "node1");
ShardRoutingHelper.moveToStarted(test_3);
- assertEquals(0l, DiskThresholdDecider.getShardSize(test_3, info));
+ assertEquals(0L, DiskThresholdDecider.getShardSize(test_3, info));
- ShardRouting other_0 = ShardRouting.newUnassigned("other", 0, null, randomBoolean(), new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
+ ShardRouting other_0 = ShardRouting.newUnassigned(new Index("other", "_NA_"), 0, null, randomBoolean(), new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
ShardRoutingHelper.initialize(other_0, "node2");
ShardRoutingHelper.moveToStarted(other_0);
ShardRoutingHelper.relocate(other_0, "node1");
@@ -258,11 +263,11 @@ public class DiskThresholdDeciderUnitTests extends ESTestCase {
node = new RoutingNode("node1", new DiscoveryNode("node1", LocalTransportAddress.PROTO, Version.CURRENT), Arrays.asList(test_0, test_1.buildTargetRelocatingShard(), test_2, other_0.buildTargetRelocatingShard()));
if (other_0.primary()) {
- assertEquals(10100l, DiskThresholdDecider.sizeOfRelocatingShards(node, info, false, "/dev/null"));
- assertEquals(10090l, DiskThresholdDecider.sizeOfRelocatingShards(node, info, true, "/dev/null"));
+ assertEquals(10100L, DiskThresholdDecider.sizeOfRelocatingShards(node, info, false, "/dev/null"));
+ assertEquals(10090L, DiskThresholdDecider.sizeOfRelocatingShards(node, info, true, "/dev/null"));
} else {
- assertEquals(100l, DiskThresholdDecider.sizeOfRelocatingShards(node, info, false, "/dev/null"));
- assertEquals(90l, DiskThresholdDecider.sizeOfRelocatingShards(node, info, true, "/dev/null"));
+ assertEquals(100L, DiskThresholdDecider.sizeOfRelocatingShards(node, info, false, "/dev/null"));
+ assertEquals(90L, DiskThresholdDecider.sizeOfRelocatingShards(node, info, true, "/dev/null"));
}
}
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationTests.java
index 62005a3fa1..bd1738b59b 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationTests.java
@@ -233,18 +233,18 @@ public class EnableAllocationTests extends ESAllocationTestCase {
case PRIMARIES:
for (ShardRouting routing : mutableShardRoutings) {
assertTrue("only primaries are allowed to relocate", routing.primary());
- assertThat("only test index can rebalance", routing.getIndex(), equalTo("test"));
+ assertThat("only test index can rebalance", routing.getIndexName(), equalTo("test"));
}
break;
case REPLICAS:
for (ShardRouting routing : mutableShardRoutings) {
assertFalse("only replicas are allowed to relocate", routing.primary());
- assertThat("only test index can rebalance", routing.getIndex(), equalTo("test"));
+ assertThat("only test index can rebalance", routing.getIndexName(), equalTo("test"));
}
break;
case ALL:
for (ShardRouting routing : mutableShardRoutings) {
- assertThat("only test index can rebalance", routing.getIndex(), equalTo("test"));
+ assertThat("only test index can rebalance", routing.getIndexName(), equalTo("test"));
}
break;
default:
diff --git a/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java b/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java
index 80df54518b..5e01aef90c 100644
--- a/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java
+++ b/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java
@@ -21,13 +21,11 @@ package org.elasticsearch.cluster.settings;
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequestBuilder;
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse;
-import org.elasticsearch.cluster.ClusterName;
-import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider;
+import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit;
-import org.elasticsearch.common.util.concurrent.EsExecutors;
import org.elasticsearch.discovery.DiscoverySettings;
import org.elasticsearch.index.store.IndexStoreConfig;
import org.elasticsearch.test.ESIntegTestCase;
@@ -97,7 +95,7 @@ public class ClusterSettingsIT extends ESIntegTestCase {
assertAcked(response);
assertThat(response.getTransientSettings().getAsMap().get(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey()), equalTo("1s"));
- assertThat(discoverySettings.getPublishTimeout().seconds(), equalTo(1l));
+ assertThat(discoverySettings.getPublishTimeout().seconds(), equalTo(1L));
assertThat(discoverySettings.getPublishDiff(), equalTo(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.get(Settings.EMPTY)));
@@ -120,7 +118,7 @@ public class ClusterSettingsIT extends ESIntegTestCase {
assertAcked(response);
assertThat(response.getTransientSettings().getAsMap().get(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey()), equalTo("1s"));
- assertThat(discoverySettings.getPublishTimeout().seconds(), equalTo(1l));
+ assertThat(discoverySettings.getPublishTimeout().seconds(), equalTo(1L));
assertFalse(discoverySettings.getPublishDiff());
response = client().admin().cluster()
.prepareUpdateSettings()
@@ -140,7 +138,7 @@ public class ClusterSettingsIT extends ESIntegTestCase {
assertAcked(response);
assertThat(response.getPersistentSettings().getAsMap().get(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey()), equalTo("1s"));
- assertThat(discoverySettings.getPublishTimeout().seconds(), equalTo(1l));
+ assertThat(discoverySettings.getPublishTimeout().seconds(), equalTo(1L));
assertThat(discoverySettings.getPublishDiff(), equalTo(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.get(Settings.EMPTY)));
@@ -164,7 +162,7 @@ public class ClusterSettingsIT extends ESIntegTestCase {
assertAcked(response);
assertThat(response.getPersistentSettings().getAsMap().get(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey()), equalTo("1s"));
- assertThat(discoverySettings.getPublishTimeout().seconds(), equalTo(1l));
+ assertThat(discoverySettings.getPublishTimeout().seconds(), equalTo(1L));
assertFalse(discoverySettings.getPublishDiff());
response = client().admin().cluster()
.prepareUpdateSettings()
@@ -256,7 +254,7 @@ public class ClusterSettingsIT extends ESIntegTestCase {
assertAcked(response);
assertThat(response.getTransientSettings().getAsMap().get(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey()), equalTo("1s"));
- assertThat(discoverySettings.getPublishTimeout().seconds(), equalTo(1l));
+ assertThat(discoverySettings.getPublishTimeout().seconds(), equalTo(1L));
try {
client().admin().cluster()
@@ -268,7 +266,7 @@ public class ClusterSettingsIT extends ESIntegTestCase {
assertEquals(ex.getMessage(), "Failed to parse setting [discovery.zen.publish_timeout] with value [whatever] as a time value: unit is missing or unrecognized");
}
- assertThat(discoverySettings.getPublishTimeout().seconds(), equalTo(1l));
+ assertThat(discoverySettings.getPublishTimeout().seconds(), equalTo(1L));
try {
client().admin().cluster()
@@ -280,7 +278,7 @@ public class ClusterSettingsIT extends ESIntegTestCase {
assertEquals(ex.getMessage(), "Failed to parse value [-1] for setting [discovery.zen.publish_timeout] must be >= 0s");
}
- assertThat(discoverySettings.getPublishTimeout().seconds(), equalTo(1l));
+ assertThat(discoverySettings.getPublishTimeout().seconds(), equalTo(1L));
}
public void testClusterUpdateSettingsWithBlocks() {
@@ -329,16 +327,30 @@ public class ClusterSettingsIT extends ESIntegTestCase {
}
}
- private void createNode(Settings settings) {
- internalCluster().startNode(Settings.builder()
- .put(ClusterName.SETTING, "ClusterSettingsIT")
- .put("node.name", "ClusterSettingsIT")
- .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
- .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)
- .put(EsExecutors.PROCESSORS, 1) // limit the number of threads created
- .put("http.enabled", false)
- .put("config.ignore_system_properties", true) // make sure we get what we set :)
- .put(settings)
- );
+ public void testLoggerLevelUpdate() {
+ assertAcked(prepareCreate("test"));
+ final String rootLevel = ESLoggerFactory.getRootLogger().getLevel();
+ final String testLevel = ESLoggerFactory.getLogger("test").getLevel();
+ try {
+ client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder().put("logger._root", "BOOM")).execute().actionGet();
+ fail("Expected IllegalArgumentException");
+ } catch (IllegalArgumentException e) {
+ assertEquals("No enum constant org.elasticsearch.common.logging.ESLoggerFactory.LogLevel.BOOM", e.getMessage());
+ }
+
+ try {
+ client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder().put("logger.test", "TRACE").put("logger._root", "trace")).execute().actionGet();
+ assertEquals("TRACE", ESLoggerFactory.getLogger("test").getLevel());
+ assertEquals("TRACE", ESLoggerFactory.getRootLogger().getLevel());
+ } finally {
+ if (randomBoolean()) {
+ client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder().putNull("logger.test").putNull("logger._root")).execute().actionGet();
+ } else {
+ client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder().putNull("logger.*")).execute().actionGet();
+ }
+ assertEquals(testLevel, ESLoggerFactory.getLogger("test").getLevel());
+ assertEquals(rootLevel, ESLoggerFactory.getRootLogger().getLevel());
+ }
}
+
}
diff --git a/core/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java b/core/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java
index ced1e0097a..2f1bbd6fa5 100644
--- a/core/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java
@@ -59,28 +59,28 @@ import static org.hamcrest.Matchers.sameInstance;
public class RoutingIteratorTests extends ESAllocationTestCase {
public void testEmptyIterator() {
ShardShuffler shuffler = new RotationShardShuffler(0);
- ShardIterator shardIterator = new PlainShardIterator(new ShardId("test1", 0), shuffler.shuffle(Collections.<ShardRouting>emptyList()));
+ ShardIterator shardIterator = new PlainShardIterator(new ShardId("test1", "_na_", 0), shuffler.shuffle(Collections.<ShardRouting>emptyList()));
assertThat(shardIterator.remaining(), equalTo(0));
assertThat(shardIterator.nextOrNull(), nullValue());
assertThat(shardIterator.remaining(), equalTo(0));
assertThat(shardIterator.nextOrNull(), nullValue());
assertThat(shardIterator.remaining(), equalTo(0));
- shardIterator = new PlainShardIterator(new ShardId("test1", 0), shuffler.shuffle(Collections.<ShardRouting>emptyList()));
+ shardIterator = new PlainShardIterator(new ShardId("test1", "_na_", 0), shuffler.shuffle(Collections.<ShardRouting>emptyList()));
assertThat(shardIterator.remaining(), equalTo(0));
assertThat(shardIterator.nextOrNull(), nullValue());
assertThat(shardIterator.remaining(), equalTo(0));
assertThat(shardIterator.nextOrNull(), nullValue());
assertThat(shardIterator.remaining(), equalTo(0));
- shardIterator = new PlainShardIterator(new ShardId("test1", 0), shuffler.shuffle(Collections.<ShardRouting>emptyList()));
+ shardIterator = new PlainShardIterator(new ShardId("test1", "_na_", 0), shuffler.shuffle(Collections.<ShardRouting>emptyList()));
assertThat(shardIterator.remaining(), equalTo(0));
assertThat(shardIterator.nextOrNull(), nullValue());
assertThat(shardIterator.remaining(), equalTo(0));
assertThat(shardIterator.nextOrNull(), nullValue());
assertThat(shardIterator.remaining(), equalTo(0));
- shardIterator = new PlainShardIterator(new ShardId("test1", 0), shuffler.shuffle(Collections.<ShardRouting>emptyList()));
+ shardIterator = new PlainShardIterator(new ShardId("test1", "_na_", 0), shuffler.shuffle(Collections.<ShardRouting>emptyList()));
assertThat(shardIterator.remaining(), equalTo(0));
assertThat(shardIterator.nextOrNull(), nullValue());
assertThat(shardIterator.remaining(), equalTo(0));
diff --git a/core/src/test/java/org/elasticsearch/common/blobstore/BlobStoreTests.java b/core/src/test/java/org/elasticsearch/common/blobstore/BlobStoreTests.java
deleted file mode 100644
index 80afa5d51f..0000000000
--- a/core/src/test/java/org/elasticsearch/common/blobstore/BlobStoreTests.java
+++ /dev/null
@@ -1,139 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.elasticsearch.common.blobstore;
-
-import org.apache.lucene.util.BytesRef;
-import org.apache.lucene.util.BytesRefBuilder;
-import org.apache.lucene.util.LuceneTestCase;
-import org.elasticsearch.common.blobstore.fs.FsBlobStore;
-import org.elasticsearch.common.bytes.BytesArray;
-import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.common.unit.ByteSizeUnit;
-import org.elasticsearch.common.unit.ByteSizeValue;
-import org.elasticsearch.test.ESTestCase;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.nio.file.Path;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.Map;
-
-import static org.hamcrest.CoreMatchers.equalTo;
-import static org.hamcrest.CoreMatchers.notNullValue;
-
-@LuceneTestCase.SuppressFileSystems("ExtrasFS")
-public class BlobStoreTests extends ESTestCase {
- public void testWriteRead() throws IOException {
- final BlobStore store = newBlobStore();
- final BlobContainer container = store.blobContainer(new BlobPath());
- byte[] data = randomBytes(randomIntBetween(10, scaledRandomIntBetween(1024, 1 << 16)));
- container.writeBlob("foobar", new BytesArray(data));
- try (InputStream stream = container.readBlob("foobar")) {
- BytesRefBuilder target = new BytesRefBuilder();
- while (target.length() < data.length) {
- byte[] buffer = new byte[scaledRandomIntBetween(1, data.length - target.length())];
- int offset = scaledRandomIntBetween(0, buffer.length - 1);
- int read = stream.read(buffer, offset, buffer.length - offset);
- target.append(new BytesRef(buffer, offset, read));
- }
- assertEquals(data.length, target.length());
- assertArrayEquals(data, Arrays.copyOfRange(target.bytes(), 0, target.length()));
- }
- store.close();
- }
-
- public void testMoveAndList() throws IOException {
- final BlobStore store = newBlobStore();
- final BlobContainer container = store.blobContainer(new BlobPath());
- assertThat(container.listBlobs().size(), equalTo(0));
- int numberOfFooBlobs = randomIntBetween(0, 10);
- int numberOfBarBlobs = randomIntBetween(3, 20);
- Map<String, Long> generatedBlobs = new HashMap<>();
- for (int i = 0; i < numberOfFooBlobs; i++) {
- int length = randomIntBetween(10, 100);
- String name = "foo-" + i + "-";
- generatedBlobs.put(name, (long) length);
- createRandomBlob(container, name, length);
- }
- for (int i = 1; i < numberOfBarBlobs; i++) {
- int length = randomIntBetween(10, 100);
- String name = "bar-" + i + "-";
- generatedBlobs.put(name, (long) length);
- createRandomBlob(container, name, length);
- }
- int length = randomIntBetween(10, 100);
- String name = "bar-0-";
- generatedBlobs.put(name, (long) length);
- byte[] data = createRandomBlob(container, name, length);
-
- Map<String, BlobMetaData> blobs = container.listBlobs();
- assertThat(blobs.size(), equalTo(numberOfFooBlobs + numberOfBarBlobs));
- for (Map.Entry<String, Long> generated : generatedBlobs.entrySet()) {
- BlobMetaData blobMetaData = blobs.get(generated.getKey());
- assertThat(generated.getKey(), blobMetaData, notNullValue());
- assertThat(blobMetaData.name(), equalTo(generated.getKey()));
- assertThat(blobMetaData.length(), equalTo(generated.getValue()));
- }
-
- assertThat(container.listBlobsByPrefix("foo-").size(), equalTo(numberOfFooBlobs));
- assertThat(container.listBlobsByPrefix("bar-").size(), equalTo(numberOfBarBlobs));
- assertThat(container.listBlobsByPrefix("baz-").size(), equalTo(0));
-
- String newName = "bar-new";
- // Move to a new location
- container.move(name, newName);
- assertThat(container.listBlobsByPrefix(name).size(), equalTo(0));
- blobs = container.listBlobsByPrefix(newName);
- assertThat(blobs.size(), equalTo(1));
- assertThat(blobs.get(newName).length(), equalTo(generatedBlobs.get(name)));
- assertThat(data, equalTo(readBlobFully(container, newName, length)));
- store.close();
- }
-
- protected byte[] createRandomBlob(BlobContainer container, String name, int length) throws IOException {
- byte[] data = randomBytes(length);
- container.writeBlob(name, new BytesArray(data));
- return data;
- }
-
- protected byte[] readBlobFully(BlobContainer container, String name, int length) throws IOException {
- byte[] data = new byte[length];
- try (InputStream inputStream = container.readBlob(name)) {
- assertThat(inputStream.read(data), equalTo(length));
- assertThat(inputStream.read(), equalTo(-1));
- }
- return data;
- }
-
- protected byte[] randomBytes(int length) {
- byte[] data = new byte[length];
- for (int i = 0; i < data.length; i++) {
- data[i] = (byte) randomInt();
- }
- return data;
- }
-
- protected BlobStore newBlobStore() throws IOException {
- Path tempDir = createTempDir();
- Settings settings = randomBoolean() ? Settings.EMPTY : Settings.builder().put("buffer_size", new ByteSizeValue(randomIntBetween(1, 100), ByteSizeUnit.KB)).build();
- FsBlobStore store = new FsBlobStore(settings, tempDir);
- return store;
- }
-}
diff --git a/core/src/test/java/org/elasticsearch/common/blobstore/FsBlobStoreContainerTests.java b/core/src/test/java/org/elasticsearch/common/blobstore/FsBlobStoreContainerTests.java
new file mode 100644
index 0000000000..63c04b1c5e
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/common/blobstore/FsBlobStoreContainerTests.java
@@ -0,0 +1,38 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.common.blobstore;
+
+import org.apache.lucene.util.LuceneTestCase;
+import org.elasticsearch.common.blobstore.fs.FsBlobStore;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.unit.ByteSizeUnit;
+import org.elasticsearch.common.unit.ByteSizeValue;
+import org.elasticsearch.test.ESBlobStoreContainerTestCase;
+
+import java.io.IOException;
+import java.nio.file.Path;
+
+@LuceneTestCase.SuppressFileSystems("ExtrasFS")
+public class FsBlobStoreContainerTests extends ESBlobStoreContainerTestCase {
+ protected BlobStore newBlobStore() throws IOException {
+ Path tempDir = createTempDir();
+ Settings settings = randomBoolean() ? Settings.EMPTY : Settings.builder().put("buffer_size", new ByteSizeValue(randomIntBetween(1, 100), ByteSizeUnit.KB)).build();
+ return new FsBlobStore(settings, tempDir);
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/common/blobstore/FsBlobStoreTests.java b/core/src/test/java/org/elasticsearch/common/blobstore/FsBlobStoreTests.java
new file mode 100644
index 0000000000..f6f53549ce
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/common/blobstore/FsBlobStoreTests.java
@@ -0,0 +1,38 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.common.blobstore;
+
+import org.apache.lucene.util.LuceneTestCase;
+import org.elasticsearch.common.blobstore.fs.FsBlobStore;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.unit.ByteSizeUnit;
+import org.elasticsearch.common.unit.ByteSizeValue;
+import org.elasticsearch.test.ESBlobStoreTestCase;
+
+import java.io.IOException;
+import java.nio.file.Path;
+
+@LuceneTestCase.SuppressFileSystems("ExtrasFS")
+public class FsBlobStoreTests extends ESBlobStoreTestCase {
+ protected BlobStore newBlobStore() throws IOException {
+ Path tempDir = createTempDir();
+ Settings settings = randomBoolean() ? Settings.EMPTY : Settings.builder().put("buffer_size", new ByteSizeValue(randomIntBetween(1, 100), ByteSizeUnit.KB)).build();
+ return new FsBlobStore(settings, tempDir);
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java b/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java
index 279e31aadd..9311db44da 100644
--- a/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java
+++ b/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java
@@ -89,7 +89,6 @@ public abstract class AbstractShapeBuilderTestCase<SB extends ShapeBuilder> exte
}
XContentBuilder builder = testShape.toXContent(contentBuilder, ToXContent.EMPTY_PARAMS);
XContentParser shapeParser = XContentHelper.createParser(builder.bytes());
- XContentHelper.createParser(builder.bytes());
shapeParser.nextToken();
ShapeBuilder parsedShape = ShapeBuilder.parse(shapeParser);
assertNotSame(testShape, parsedShape);
diff --git a/core/src/test/java/org/elasticsearch/common/io/FileSystemUtilsTests.java b/core/src/test/java/org/elasticsearch/common/io/FileSystemUtilsTests.java
index 4f2b8f6811..b6266773bf 100644
--- a/core/src/test/java/org/elasticsearch/common/io/FileSystemUtilsTests.java
+++ b/core/src/test/java/org/elasticsearch/common/io/FileSystemUtilsTests.java
@@ -48,91 +48,6 @@ public class FileSystemUtilsTests extends ESTestCase {
dst = createTempDir();
Files.createDirectories(src);
Files.createDirectories(dst);
-
- // We first copy sources test files from src/test/resources
- // Because after when the test runs, src files are moved to their destination
- final Path path = getDataPath("/org/elasticsearch/common/io/copyappend");
- FileSystemUtils.copyDirectoryRecursively(path, src);
- }
-
- public void testMoveOverExistingFileAndAppend() throws IOException {
-
- FileSystemUtils.moveFilesWithoutOverwriting(src.resolve("v1"), dst, ".new");
- assertFileContent(dst, "file1.txt", "version1");
- assertFileContent(dst, "dir/file2.txt", "version1");
-
- FileSystemUtils.moveFilesWithoutOverwriting(src.resolve("v2"), dst, ".new");
- assertFileContent(dst, "file1.txt", "version1");
- assertFileContent(dst, "dir/file2.txt", "version1");
- assertFileContent(dst, "file1.txt.new", "version2");
- assertFileContent(dst, "dir/file2.txt.new", "version2");
- assertFileContent(dst, "file3.txt", "version1");
- assertFileContent(dst, "dir/subdir/file4.txt", "version1");
-
- FileSystemUtils.moveFilesWithoutOverwriting(src.resolve("v3"), dst, ".new");
- assertFileContent(dst, "file1.txt", "version1");
- assertFileContent(dst, "dir/file2.txt", "version1");
- assertFileContent(dst, "file1.txt.new", "version3");
- assertFileContent(dst, "dir/file2.txt.new", "version3");
- assertFileContent(dst, "file3.txt", "version1");
- assertFileContent(dst, "dir/subdir/file4.txt", "version1");
- assertFileContent(dst, "file3.txt.new", "version2");
- assertFileContent(dst, "dir/subdir/file4.txt.new", "version2");
- assertFileContent(dst, "dir/subdir/file5.txt", "version1");
- }
-
- public void testMoveOverExistingFileAndIgnore() throws IOException {
- Path dest = createTempDir();
-
- FileSystemUtils.moveFilesWithoutOverwriting(src.resolve("v1"), dest, null);
- assertFileContent(dest, "file1.txt", "version1");
- assertFileContent(dest, "dir/file2.txt", "version1");
-
- FileSystemUtils.moveFilesWithoutOverwriting(src.resolve("v2"), dest, null);
- assertFileContent(dest, "file1.txt", "version1");
- assertFileContent(dest, "dir/file2.txt", "version1");
- assertFileContent(dest, "file1.txt.new", null);
- assertFileContent(dest, "dir/file2.txt.new", null);
- assertFileContent(dest, "file3.txt", "version1");
- assertFileContent(dest, "dir/subdir/file4.txt", "version1");
-
- FileSystemUtils.moveFilesWithoutOverwriting(src.resolve("v3"), dest, null);
- assertFileContent(dest, "file1.txt", "version1");
- assertFileContent(dest, "dir/file2.txt", "version1");
- assertFileContent(dest, "file1.txt.new", null);
- assertFileContent(dest, "dir/file2.txt.new", null);
- assertFileContent(dest, "file3.txt", "version1");
- assertFileContent(dest, "dir/subdir/file4.txt", "version1");
- assertFileContent(dest, "file3.txt.new", null);
- assertFileContent(dest, "dir/subdir/file4.txt.new", null);
- assertFileContent(dest, "dir/subdir/file5.txt", "version1");
- }
-
- public void testMoveFilesDoesNotCreateSameFileWithSuffix() throws Exception {
- Path[] dirs = new Path[] { createTempDir(), createTempDir(), createTempDir()};
- for (Path dir : dirs) {
- Files.write(dir.resolve("file1.txt"), "file1".getBytes(StandardCharsets.UTF_8));
- Files.createDirectory(dir.resolve("dir"));
- Files.write(dir.resolve("dir").resolve("file2.txt"), "file2".getBytes(StandardCharsets.UTF_8));
- }
-
- FileSystemUtils.moveFilesWithoutOverwriting(dirs[0], dst, ".new");
- assertFileContent(dst, "file1.txt", "file1");
- assertFileContent(dst, "dir/file2.txt", "file2");
-
- // do the same operation again, make sure, no .new files have been added
- FileSystemUtils.moveFilesWithoutOverwriting(dirs[1], dst, ".new");
- assertFileContent(dst, "file1.txt", "file1");
- assertFileContent(dst, "dir/file2.txt", "file2");
- assertFileNotExists(dst.resolve("file1.txt.new"));
- assertFileNotExists(dst.resolve("dir").resolve("file2.txt.new"));
-
- // change file content, make sure it gets updated
- Files.write(dirs[2].resolve("dir").resolve("file2.txt"), "UPDATED".getBytes(StandardCharsets.UTF_8));
- FileSystemUtils.moveFilesWithoutOverwriting(dirs[2], dst, ".new");
- assertFileContent(dst, "file1.txt", "file1");
- assertFileContent(dst, "dir/file2.txt", "file2");
- assertFileContent(dst, "dir/file2.txt.new", "UPDATED");
}
public void testAppend() {
diff --git a/core/src/test/java/org/elasticsearch/common/logging/log4j/Log4jESLoggerTests.java b/core/src/test/java/org/elasticsearch/common/logging/log4j/Log4jESLoggerTests.java
index 8f9c900907..ed8a5cffbf 100644
--- a/core/src/test/java/org/elasticsearch/common/logging/log4j/Log4jESLoggerTests.java
+++ b/core/src/test/java/org/elasticsearch/common/logging/log4j/Log4jESLoggerTests.java
@@ -27,6 +27,7 @@ import org.apache.log4j.spi.LoggingEvent;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.env.Environment;
import org.elasticsearch.test.ESTestCase;
import org.junit.After;
@@ -53,8 +54,8 @@ public class Log4jESLoggerTests extends ESTestCase {
Path configDir = getDataPath("config");
// Need to set custom path.conf so we can use a custom logging.yml file for the test
Settings settings = Settings.builder()
- .put("path.conf", configDir.toAbsolutePath())
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_CONF_SETTING.getKey(), configDir.toAbsolutePath())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
LogConfigurator.configure(settings, true);
diff --git a/core/src/test/java/org/elasticsearch/common/logging/log4j/LoggingConfigurationTests.java b/core/src/test/java/org/elasticsearch/common/logging/log4j/LoggingConfigurationTests.java
index 2a08dd1e55..5d90edaf7a 100644
--- a/core/src/test/java/org/elasticsearch/common/logging/log4j/LoggingConfigurationTests.java
+++ b/core/src/test/java/org/elasticsearch/common/logging/log4j/LoggingConfigurationTests.java
@@ -54,8 +54,8 @@ public class LoggingConfigurationTests extends ESTestCase {
try {
Path configDir = getDataPath("config");
Settings settings = Settings.builder()
- .put("path.conf", configDir.toAbsolutePath())
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_CONF_SETTING.getKey(), configDir.toAbsolutePath())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
LogConfigurator.configure(settings, true);
@@ -84,8 +84,8 @@ public class LoggingConfigurationTests extends ESTestCase {
Files.write(loggingConf, "{\"json\": \"foo\"}".getBytes(StandardCharsets.UTF_8));
Environment environment = new Environment(
Settings.builder()
- .put("path.conf", tmpDir.toAbsolutePath())
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_CONF_SETTING.getKey(), tmpDir.toAbsolutePath())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build());
Settings.Builder builder = Settings.builder();
@@ -101,8 +101,8 @@ public class LoggingConfigurationTests extends ESTestCase {
Files.write(loggingConf, "key: value".getBytes(StandardCharsets.UTF_8));
Environment environment = new Environment(
Settings.builder()
- .put("path.conf", tmpDir.toAbsolutePath())
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_CONF_SETTING.getKey(), tmpDir.toAbsolutePath())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build());
Settings.Builder builder = Settings.builder();
@@ -120,8 +120,8 @@ public class LoggingConfigurationTests extends ESTestCase {
Files.write(loggingConf2, "yaml: bar".getBytes(StandardCharsets.UTF_8));
Environment environment = new Environment(
Settings.builder()
- .put("path.conf", tmpDir.toAbsolutePath())
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_CONF_SETTING.getKey(), tmpDir.toAbsolutePath())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build());
Settings.Builder builder = Settings.builder();
@@ -138,8 +138,8 @@ public class LoggingConfigurationTests extends ESTestCase {
Files.write(invalidSuffix, "yml: bar".getBytes(StandardCharsets.UTF_8));
Environment environment = new Environment(
Settings.builder()
- .put("path.conf", invalidSuffix.toAbsolutePath())
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_CONF_SETTING.getKey(), invalidSuffix.toAbsolutePath())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build());
Settings.Builder builder = Settings.builder();
@@ -157,8 +157,8 @@ public class LoggingConfigurationTests extends ESTestCase {
Files.write(loggingConf, "appender.file.type: file\n".getBytes(StandardCharsets.UTF_8), StandardOpenOption.APPEND);
Environment environment = InternalSettingsPreparer.prepareEnvironment(
Settings.builder()
- .put("path.conf", tmpDir.toAbsolutePath())
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_CONF_SETTING.getKey(), tmpDir.toAbsolutePath())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put("logger.test_resolve_order", "TRACE, console")
.put("appender.console.type", "console")
.put("appender.console.layout.type", "consolePattern")
@@ -186,8 +186,8 @@ public class LoggingConfigurationTests extends ESTestCase {
StandardCharsets.UTF_8);
Environment environment = InternalSettingsPreparer.prepareEnvironment(
Settings.builder()
- .put("path.conf", tmpDir.toAbsolutePath())
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_CONF_SETTING.getKey(), tmpDir.toAbsolutePath())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build(), new CliToolTestCase.MockTerminal());
LogConfigurator.configure(environment.settings(), false);
ESLogger esLogger = Log4jESLoggerFactory.getLogger("test_config_not_read");
diff --git a/core/src/test/java/org/elasticsearch/common/lucene/ShardCoreKeyMapTests.java b/core/src/test/java/org/elasticsearch/common/lucene/ShardCoreKeyMapTests.java
index 0c14e1a0bc..5cba06741a 100644
--- a/core/src/test/java/org/elasticsearch/common/lucene/ShardCoreKeyMapTests.java
+++ b/core/src/test/java/org/elasticsearch/common/lucene/ShardCoreKeyMapTests.java
@@ -62,7 +62,7 @@ public class ShardCoreKeyMapTests extends ESTestCase {
try (Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir)) {
writer.addDocument(new Document());
- try (DirectoryReader dirReader = ElasticsearchDirectoryReader.wrap(writer.getReader(), new ShardId("index1", 1))) {
+ try (DirectoryReader dirReader = ElasticsearchDirectoryReader.wrap(writer.getReader(), new ShardId("index1", "_na_", 1))) {
reader = dirReader.leaves().get(0).reader();
}
}
@@ -89,9 +89,9 @@ public class ShardCoreKeyMapTests extends ESTestCase {
RandomIndexWriter w3 = new RandomIndexWriter(random(), dir3);
w3.addDocument(new Document());
- ShardId shardId1 = new ShardId("index1", 1);
- ShardId shardId2 = new ShardId("index1", 3);
- ShardId shardId3 = new ShardId("index2", 2);
+ ShardId shardId1 = new ShardId("index1", "_na_", 1);
+ ShardId shardId2 = new ShardId("index1", "_na_", 3);
+ ShardId shardId3 = new ShardId("index2", "_na_", 2);
ElasticsearchDirectoryReader reader1 = ElasticsearchDirectoryReader.wrap(w1.getReader(), shardId1);
ElasticsearchDirectoryReader reader2 = ElasticsearchDirectoryReader.wrap(w2.getReader(), shardId2);
diff --git a/core/src/test/java/org/elasticsearch/common/lucene/index/ESDirectoryReaderTests.java b/core/src/test/java/org/elasticsearch/common/lucene/index/ESDirectoryReaderTests.java
index 3c4a34d952..7fb3ec0c2e 100644
--- a/core/src/test/java/org/elasticsearch/common/lucene/index/ESDirectoryReaderTests.java
+++ b/core/src/test/java/org/elasticsearch/common/lucene/index/ESDirectoryReaderTests.java
@@ -54,7 +54,7 @@ public class ESDirectoryReaderTests extends ESTestCase {
iw.addDocument(doc);
// open reader
- ShardId shardId = new ShardId(new Index("fake"), 1);
+ ShardId shardId = new ShardId("fake", "_na_", 1);
DirectoryReader ir = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(iw, true), shardId);
assertEquals(2, ir.numDocs());
assertEquals(1, ir.leaves().size());
diff --git a/core/src/test/java/org/elasticsearch/common/lucene/search/morelikethis/MoreLikeThisQueryTests.java b/core/src/test/java/org/elasticsearch/common/lucene/search/morelikethis/MoreLikeThisQueryTests.java
index 573138c50f..b0e2ea873c 100644
--- a/core/src/test/java/org/elasticsearch/common/lucene/search/morelikethis/MoreLikeThisQueryTests.java
+++ b/core/src/test/java/org/elasticsearch/common/lucene/search/morelikethis/MoreLikeThisQueryTests.java
@@ -62,7 +62,7 @@ public class MoreLikeThisQueryTests extends ESTestCase {
mltQuery.setMinTermFrequency(1);
mltQuery.setMinDocFreq(1);
long count = searcher.count(mltQuery);
- assertThat(count, equalTo(2l));
+ assertThat(count, equalTo(2L));
reader.close();
indexWriter.close();
diff --git a/core/src/test/java/org/elasticsearch/common/lucene/store/InputStreamIndexInputTests.java b/core/src/test/java/org/elasticsearch/common/lucene/store/InputStreamIndexInputTests.java
index 99acdde7af..e8d8b914a4 100644
--- a/core/src/test/java/org/elasticsearch/common/lucene/store/InputStreamIndexInputTests.java
+++ b/core/src/test/java/org/elasticsearch/common/lucene/store/InputStreamIndexInputTests.java
@@ -51,7 +51,7 @@ public class InputStreamIndexInputTests extends ESTestCase {
for (int i = 0; i < 3; i++) {
InputStreamIndexInput is = new InputStreamIndexInput(input, 1);
assertThat(input.getFilePointer(), lessThan(input.length()));
- assertThat(is.actualSizeToRead(), equalTo(1l));
+ assertThat(is.actualSizeToRead(), equalTo(1L));
assertThat(is.read(), equalTo(1));
assertThat(is.read(), equalTo(-1));
}
@@ -59,14 +59,14 @@ public class InputStreamIndexInputTests extends ESTestCase {
for (int i = 0; i < 3; i++) {
InputStreamIndexInput is = new InputStreamIndexInput(input, 1);
assertThat(input.getFilePointer(), lessThan(input.length()));
- assertThat(is.actualSizeToRead(), equalTo(1l));
+ assertThat(is.actualSizeToRead(), equalTo(1L));
assertThat(is.read(), equalTo(2));
assertThat(is.read(), equalTo(-1));
}
assertThat(input.getFilePointer(), equalTo(input.length()));
InputStreamIndexInput is = new InputStreamIndexInput(input, 1);
- assertThat(is.actualSizeToRead(), equalTo(0l));
+ assertThat(is.actualSizeToRead(), equalTo(0L));
assertThat(is.read(), equalTo(-1));
}
@@ -89,7 +89,7 @@ public class InputStreamIndexInputTests extends ESTestCase {
for (int i = 0; i < 3; i++) {
assertThat(input.getFilePointer(), lessThan(input.length()));
InputStreamIndexInput is = new InputStreamIndexInput(input, 1);
- assertThat(is.actualSizeToRead(), equalTo(1l));
+ assertThat(is.actualSizeToRead(), equalTo(1L));
assertThat(is.read(read), equalTo(1));
assertThat(read[0], equalTo((byte) 1));
}
@@ -97,14 +97,14 @@ public class InputStreamIndexInputTests extends ESTestCase {
for (int i = 0; i < 3; i++) {
assertThat(input.getFilePointer(), lessThan(input.length()));
InputStreamIndexInput is = new InputStreamIndexInput(input, 1);
- assertThat(is.actualSizeToRead(), equalTo(1l));
+ assertThat(is.actualSizeToRead(), equalTo(1L));
assertThat(is.read(read), equalTo(1));
assertThat(read[0], equalTo((byte) 2));
}
assertThat(input.getFilePointer(), equalTo(input.length()));
InputStreamIndexInput is = new InputStreamIndexInput(input, 1);
- assertThat(is.actualSizeToRead(), equalTo(0l));
+ assertThat(is.actualSizeToRead(), equalTo(0L));
assertThat(is.read(read), equalTo(-1));
}
@@ -124,28 +124,28 @@ public class InputStreamIndexInputTests extends ESTestCase {
assertThat(input.getFilePointer(), lessThan(input.length()));
InputStreamIndexInput is = new InputStreamIndexInput(input, 2);
- assertThat(is.actualSizeToRead(), equalTo(2l));
+ assertThat(is.actualSizeToRead(), equalTo(2L));
assertThat(is.read(), equalTo(1));
assertThat(is.read(), equalTo(1));
assertThat(is.read(), equalTo(-1));
assertThat(input.getFilePointer(), lessThan(input.length()));
is = new InputStreamIndexInput(input, 2);
- assertThat(is.actualSizeToRead(), equalTo(2l));
+ assertThat(is.actualSizeToRead(), equalTo(2L));
assertThat(is.read(), equalTo(1));
assertThat(is.read(), equalTo(2));
assertThat(is.read(), equalTo(-1));
assertThat(input.getFilePointer(), lessThan(input.length()));
is = new InputStreamIndexInput(input, 2);
- assertThat(is.actualSizeToRead(), equalTo(2l));
+ assertThat(is.actualSizeToRead(), equalTo(2L));
assertThat(is.read(), equalTo(2));
assertThat(is.read(), equalTo(2));
assertThat(is.read(), equalTo(-1));
assertThat(input.getFilePointer(), equalTo(input.length()));
is = new InputStreamIndexInput(input, 2);
- assertThat(is.actualSizeToRead(), equalTo(0l));
+ assertThat(is.actualSizeToRead(), equalTo(0L));
assertThat(is.read(), equalTo(-1));
}
@@ -167,28 +167,28 @@ public class InputStreamIndexInputTests extends ESTestCase {
assertThat(input.getFilePointer(), lessThan(input.length()));
InputStreamIndexInput is = new InputStreamIndexInput(input, 2);
- assertThat(is.actualSizeToRead(), equalTo(2l));
+ assertThat(is.actualSizeToRead(), equalTo(2L));
assertThat(is.read(read), equalTo(2));
assertThat(read[0], equalTo((byte) 1));
assertThat(read[1], equalTo((byte) 1));
assertThat(input.getFilePointer(), lessThan(input.length()));
is = new InputStreamIndexInput(input, 2);
- assertThat(is.actualSizeToRead(), equalTo(2l));
+ assertThat(is.actualSizeToRead(), equalTo(2L));
assertThat(is.read(read), equalTo(2));
assertThat(read[0], equalTo((byte) 1));
assertThat(read[1], equalTo((byte) 2));
assertThat(input.getFilePointer(), lessThan(input.length()));
is = new InputStreamIndexInput(input, 2);
- assertThat(is.actualSizeToRead(), equalTo(2l));
+ assertThat(is.actualSizeToRead(), equalTo(2L));
assertThat(is.read(read), equalTo(2));
assertThat(read[0], equalTo((byte) 2));
assertThat(read[1], equalTo((byte) 2));
assertThat(input.getFilePointer(), equalTo(input.length()));
is = new InputStreamIndexInput(input, 2);
- assertThat(is.actualSizeToRead(), equalTo(0l));
+ assertThat(is.actualSizeToRead(), equalTo(0L));
assertThat(is.read(read), equalTo(-1));
}
@@ -210,7 +210,7 @@ public class InputStreamIndexInputTests extends ESTestCase {
assertThat(input.getFilePointer(), lessThan(input.length()));
InputStreamIndexInput is = new InputStreamIndexInput(input, 4);
- assertThat(is.actualSizeToRead(), equalTo(4l));
+ assertThat(is.actualSizeToRead(), equalTo(4L));
assertThat(is.read(read), equalTo(4));
assertThat(read[0], equalTo((byte) 1));
assertThat(read[1], equalTo((byte) 1));
@@ -219,14 +219,14 @@ public class InputStreamIndexInputTests extends ESTestCase {
assertThat(input.getFilePointer(), lessThan(input.length()));
is = new InputStreamIndexInput(input, 4);
- assertThat(is.actualSizeToRead(), equalTo(2l));
+ assertThat(is.actualSizeToRead(), equalTo(2L));
assertThat(is.read(read), equalTo(2));
assertThat(read[0], equalTo((byte) 2));
assertThat(read[1], equalTo((byte) 2));
assertThat(input.getFilePointer(), equalTo(input.length()));
is = new InputStreamIndexInput(input, 4);
- assertThat(is.actualSizeToRead(), equalTo(0l));
+ assertThat(is.actualSizeToRead(), equalTo(0L));
assertThat(is.read(read), equalTo(-1));
}
diff --git a/core/src/test/java/org/elasticsearch/common/lucene/uid/VersionsTests.java b/core/src/test/java/org/elasticsearch/common/lucene/uid/VersionsTests.java
index d6abcfe773..7539d2aa63 100644
--- a/core/src/test/java/org/elasticsearch/common/lucene/uid/VersionsTests.java
+++ b/core/src/test/java/org/elasticsearch/common/lucene/uid/VersionsTests.java
@@ -78,7 +78,7 @@ public class VersionsTests extends ESTestCase {
public void testVersions() throws Exception {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER));
- DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", 1));
+ DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", "_na_", 1));
MatcherAssert.assertThat(Versions.loadVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")), equalTo(Versions.NOT_FOUND));
Document doc = new Document();
@@ -93,8 +93,8 @@ public class VersionsTests extends ESTestCase {
doc.add(new NumericDocValuesField(VersionFieldMapper.NAME, 1));
writer.updateDocument(new Term(UidFieldMapper.NAME, "1"), doc);
directoryReader = reopen(directoryReader);
- assertThat(Versions.loadVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")), equalTo(1l));
- assertThat(Versions.loadDocIdAndVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")).version, equalTo(1l));
+ assertThat(Versions.loadVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")), equalTo(1L));
+ assertThat(Versions.loadDocIdAndVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")).version, equalTo(1L));
doc = new Document();
Field uid = new Field(UidFieldMapper.NAME, "1", UidFieldMapper.Defaults.FIELD_TYPE);
@@ -103,8 +103,8 @@ public class VersionsTests extends ESTestCase {
doc.add(version);
writer.updateDocument(new Term(UidFieldMapper.NAME, "1"), doc);
directoryReader = reopen(directoryReader);
- assertThat(Versions.loadVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")), equalTo(2l));
- assertThat(Versions.loadDocIdAndVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")).version, equalTo(2l));
+ assertThat(Versions.loadVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")), equalTo(2L));
+ assertThat(Versions.loadDocIdAndVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")).version, equalTo(2L));
// test reuse of uid field
doc = new Document();
@@ -114,8 +114,8 @@ public class VersionsTests extends ESTestCase {
writer.updateDocument(new Term(UidFieldMapper.NAME, "1"), doc);
directoryReader = reopen(directoryReader);
- assertThat(Versions.loadVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")), equalTo(3l));
- assertThat(Versions.loadDocIdAndVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")).version, equalTo(3l));
+ assertThat(Versions.loadVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")), equalTo(3L));
+ assertThat(Versions.loadDocIdAndVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")).version, equalTo(3L));
writer.deleteDocuments(new Term(UidFieldMapper.NAME, "1"));
directoryReader = reopen(directoryReader);
@@ -145,17 +145,17 @@ public class VersionsTests extends ESTestCase {
docs.add(doc);
writer.updateDocuments(new Term(UidFieldMapper.NAME, "1"), docs);
- DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", 1));
- assertThat(Versions.loadVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")), equalTo(5l));
- assertThat(Versions.loadDocIdAndVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")).version, equalTo(5l));
+ DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", "_na_", 1));
+ assertThat(Versions.loadVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")), equalTo(5L));
+ assertThat(Versions.loadDocIdAndVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")).version, equalTo(5L));
version.setLongValue(6L);
writer.updateDocuments(new Term(UidFieldMapper.NAME, "1"), docs);
version.setLongValue(7L);
writer.updateDocuments(new Term(UidFieldMapper.NAME, "1"), docs);
directoryReader = reopen(directoryReader);
- assertThat(Versions.loadVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")), equalTo(7l));
- assertThat(Versions.loadDocIdAndVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")).version, equalTo(7l));
+ assertThat(Versions.loadVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")), equalTo(7L));
+ assertThat(Versions.loadDocIdAndVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")).version, equalTo(7L));
writer.deleteDocuments(new Term(UidFieldMapper.NAME, "1"));
directoryReader = reopen(directoryReader);
@@ -170,7 +170,7 @@ public class VersionsTests extends ESTestCase {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER));
- DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", 1));
+ DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", "_na_", 1));
MatcherAssert.assertThat(Versions.loadVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")), equalTo(Versions.NOT_FOUND));
Document doc = new Document();
@@ -184,8 +184,8 @@ public class VersionsTests extends ESTestCase {
writer.commit();
directoryReader = reopen(directoryReader);
- assertThat(Versions.loadVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")), equalTo(1l));
- assertThat(Versions.loadVersion(directoryReader, new Term(UidFieldMapper.NAME, "2")), equalTo(2l));
+ assertThat(Versions.loadVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")), equalTo(1L));
+ assertThat(Versions.loadVersion(directoryReader, new Term(UidFieldMapper.NAME, "2")), equalTo(2L));
assertThat(Versions.loadVersion(directoryReader, new Term(UidFieldMapper.NAME, "3")), equalTo(Versions.NOT_FOUND));
directoryReader.close();
writer.close();
@@ -281,7 +281,7 @@ public class VersionsTests extends ESTestCase {
// Force merge and check versions
iw.forceMerge(1, true);
- final LeafReader ir = SlowCompositeReaderWrapper.wrap(ElasticsearchDirectoryReader.wrap(DirectoryReader.open(iw.getDirectory()), new ShardId("foo", 1)));
+ final LeafReader ir = SlowCompositeReaderWrapper.wrap(ElasticsearchDirectoryReader.wrap(DirectoryReader.open(iw.getDirectory()), new ShardId("foo", "_na_", 1)));
final NumericDocValues versions = ir.getNumericDocValues(VersionFieldMapper.NAME);
assertThat(versions, notNullValue());
for (int i = 0; i < ir.maxDoc(); ++i) {
@@ -334,7 +334,7 @@ public class VersionsTests extends ESTestCase {
assertEquals(87, Versions.loadVersion(reader, new Term(UidFieldMapper.NAME, "6")));
assertEquals(size+1, Versions.lookupStates.size());
// now wrap the reader
- DirectoryReader wrapped = ElasticsearchDirectoryReader.wrap(reader, new ShardId("bogus", 5));
+ DirectoryReader wrapped = ElasticsearchDirectoryReader.wrap(reader, new ShardId("bogus", "_na_", 5));
assertEquals(87, Versions.loadVersion(wrapped, new Term(UidFieldMapper.NAME, "6")));
// same size map: core cache key is shared
assertEquals(size+1, Versions.lookupStates.size());
diff --git a/core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java b/core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java
index 82cabf7ec5..0fb9b1a92f 100644
--- a/core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java
+++ b/core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java
@@ -82,7 +82,7 @@ public class NetworkModuleTests extends ModuleTestCase {
static class FakeRestHandler extends BaseRestHandler {
public FakeRestHandler() {
- super(null, null, null);
+ super(null, null);
}
@Override
protected void handleRequest(RestRequest request, RestChannel channel, Client client) throws Exception {}
@@ -143,7 +143,7 @@ public class NetworkModuleTests extends ModuleTestCase {
}
// not added if http is disabled
- settings = Settings.builder().put(NetworkModule.HTTP_ENABLED, false).build();
+ settings = Settings.builder().put(NetworkModule.HTTP_ENABLED.getKey(), false).build();
module = new NetworkModule(new NetworkService(settings), settings, false, null);
assertNotBound(module, HttpServerTransport.class);
}
diff --git a/core/src/test/java/org/elasticsearch/common/rounding/TimeZoneRoundingTests.java b/core/src/test/java/org/elasticsearch/common/rounding/TimeZoneRoundingTests.java
index e90691ee40..2c4d78adbd 100644
--- a/core/src/test/java/org/elasticsearch/common/rounding/TimeZoneRoundingTests.java
+++ b/core/src/test/java/org/elasticsearch/common/rounding/TimeZoneRoundingTests.java
@@ -101,8 +101,8 @@ public class TimeZoneRoundingTests extends ESTestCase {
int timezoneOffset = -2;
Rounding tzRounding = TimeZoneRounding.builder(DateTimeUnit.DAY_OF_MONTH).timeZone(DateTimeZone.forOffsetHours(timezoneOffset))
.build();
- assertThat(tzRounding.round(0), equalTo(0l - TimeValue.timeValueHours(24 + timezoneOffset).millis()));
- assertThat(tzRounding.nextRoundingValue(0l - TimeValue.timeValueHours(24 + timezoneOffset).millis()), equalTo(0l - TimeValue
+ assertThat(tzRounding.round(0), equalTo(0L - TimeValue.timeValueHours(24 + timezoneOffset).millis()));
+ assertThat(tzRounding.nextRoundingValue(0L - TimeValue.timeValueHours(24 + timezoneOffset).millis()), equalTo(0L - TimeValue
.timeValueHours(timezoneOffset).millis()));
tzRounding = TimeZoneRounding.builder(DateTimeUnit.DAY_OF_MONTH).timeZone(DateTimeZone.forID("-08:00")).build();
@@ -135,8 +135,8 @@ public class TimeZoneRoundingTests extends ESTestCase {
public void testTimeTimeZoneRounding() {
// hour unit
Rounding tzRounding = TimeZoneRounding.builder(DateTimeUnit.HOUR_OF_DAY).timeZone(DateTimeZone.forOffsetHours(-2)).build();
- assertThat(tzRounding.round(0), equalTo(0l));
- assertThat(tzRounding.nextRoundingValue(0l), equalTo(TimeValue.timeValueHours(1l).getMillis()));
+ assertThat(tzRounding.round(0), equalTo(0L));
+ assertThat(tzRounding.nextRoundingValue(0L), equalTo(TimeValue.timeValueHours(1L).getMillis()));
tzRounding = TimeZoneRounding.builder(DateTimeUnit.HOUR_OF_DAY).timeZone(DateTimeZone.forOffsetHours(-2)).build();
assertThat(tzRounding.round(utc("2009-02-03T01:01:01")), equalTo(utc("2009-02-03T01:00:00")));
diff --git a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java
index 088d4fa5ac..58f5cde65c 100644
--- a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java
+++ b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java
@@ -22,6 +22,7 @@ import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider;
+import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.index.IndexModule;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.transport.TransportService;
@@ -225,4 +226,77 @@ public class ScopedSettingsTests extends ESTestCase {
return metaData;
}
+ public void testKeyPattern() {
+ assertTrue(AbstractScopedSettings.isValidKey("a.b.c-b.d"));
+ assertTrue(AbstractScopedSettings.isValidKey("a.b.c.d"));
+ assertTrue(AbstractScopedSettings.isValidKey("a.b_012.c_b.d"));
+ assertTrue(AbstractScopedSettings.isValidKey("a"));
+ assertFalse(AbstractScopedSettings.isValidKey("a b"));
+ assertFalse(AbstractScopedSettings.isValidKey(""));
+ assertFalse(AbstractScopedSettings.isValidKey("\""));
+
+ try {
+ new IndexScopedSettings(
+ Settings.EMPTY, Collections.singleton(Setting.groupSetting("boo .", false, Setting.Scope.INDEX)));
+ fail();
+ } catch (IllegalArgumentException e) {
+ assertEquals("illegal settings key: [boo .]", e.getMessage());
+ }
+ new IndexScopedSettings(
+ Settings.EMPTY, Collections.singleton(Setting.groupSetting("boo.", false, Setting.Scope.INDEX)));
+ try {
+ new IndexScopedSettings(
+ Settings.EMPTY, Collections.singleton(Setting.boolSetting("boo.", true, false, Setting.Scope.INDEX)));
+ fail();
+ } catch (IllegalArgumentException e) {
+ assertEquals("illegal settings key: [boo.]", e.getMessage());
+ }
+ new IndexScopedSettings(
+ Settings.EMPTY, Collections.singleton(Setting.boolSetting("boo", true, false, Setting.Scope.INDEX)));
+ }
+
+ public void testLoggingUpdates() {
+ final String level = ESLoggerFactory.getRootLogger().getLevel();
+ final String testLevel = ESLoggerFactory.getLogger("test").getLevel();
+ String property = System.getProperty("es.logger.level");
+ Settings.Builder builder = Settings.builder();
+ if (property != null) {
+ builder.put("logger.level", property);
+ }
+ try {
+ ClusterSettings settings = new ClusterSettings(builder.build(), ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
+ try {
+ settings.validate(Settings.builder().put("logger._root", "boom").build());
+ fail();
+ } catch (IllegalArgumentException ex) {
+ assertEquals("No enum constant org.elasticsearch.common.logging.ESLoggerFactory.LogLevel.BOOM", ex.getMessage());
+ }
+ assertEquals(level, ESLoggerFactory.getRootLogger().getLevel());
+ settings.applySettings(Settings.builder().put("logger._root", "TRACE").build());
+ assertEquals("TRACE", ESLoggerFactory.getRootLogger().getLevel());
+ settings.applySettings(Settings.builder().build());
+ assertEquals(level, ESLoggerFactory.getRootLogger().getLevel());
+ settings.applySettings(Settings.builder().put("logger.test", "TRACE").build());
+ assertEquals("TRACE", ESLoggerFactory.getLogger("test").getLevel());
+ settings.applySettings(Settings.builder().build());
+ assertEquals(testLevel, ESLoggerFactory.getLogger("test").getLevel());
+ } finally {
+ ESLoggerFactory.getRootLogger().setLevel(level);
+ ESLoggerFactory.getLogger("test").setLevel(testLevel);
+ }
+ }
+
+ public void testFallbackToLoggerLevel() {
+ final String level = ESLoggerFactory.getRootLogger().getLevel();
+ try {
+ ClusterSettings settings = new ClusterSettings(Settings.builder().put("logger.level", "ERROR").build(), ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
+ assertEquals(level, ESLoggerFactory.getRootLogger().getLevel());
+ settings.applySettings(Settings.builder().put("logger._root", "TRACE").build());
+ assertEquals("TRACE", ESLoggerFactory.getRootLogger().getLevel());
+ settings.applySettings(Settings.builder().build()); // here we fall back to 'logger.level' which is our default.
+ assertEquals("ERROR", ESLoggerFactory.getRootLogger().getLevel());
+ } finally {
+ ESLoggerFactory.getRootLogger().setLevel(level);
+ }
+ }
}
diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java
index cccfa37320..f5b84fb366 100644
--- a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java
+++ b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java
@@ -27,6 +27,7 @@ import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
+import java.util.Set;
import java.util.concurrent.atomic.AtomicReference;
public class SettingTests extends ESTestCase {
@@ -44,6 +45,12 @@ public class SettingTests extends ESTestCase {
assertFalse(byteSizeValueSetting.isGroupSetting());
ByteSizeValue byteSizeValue = byteSizeValueSetting.get(Settings.EMPTY);
assertEquals(byteSizeValue.bytes(), 1024);
+
+ byteSizeValueSetting = Setting.byteSizeSetting("a.byte.size", s -> "2048b", true, Setting.Scope.CLUSTER);
+ byteSizeValue = byteSizeValueSetting.get(Settings.EMPTY);
+ assertEquals(byteSizeValue.bytes(), 2048);
+
+
AtomicReference<ByteSizeValue> value = new AtomicReference<>(null);
ClusterSettings.SettingUpdater<ByteSizeValue> settingUpdater = byteSizeValueSetting.newUpdater(value::set, logger);
try {
@@ -104,13 +111,17 @@ public class SettingTests extends ESTestCase {
TimeValue defautlValue = TimeValue.timeValueMillis(randomIntBetween(0, 1000000));
Setting<TimeValue> setting = Setting.positiveTimeSetting("my.time.value", defautlValue, randomBoolean(), Setting.Scope.CLUSTER);
assertFalse(setting.isGroupSetting());
- String aDefault = setting.getDefault(Settings.EMPTY);
+ String aDefault = setting.getDefaultRaw(Settings.EMPTY);
assertEquals(defautlValue.millis() + "ms", aDefault);
assertEquals(defautlValue.millis(), setting.get(Settings.EMPTY).millis());
+ assertEquals(defautlValue, setting.getDefault(Settings.EMPTY));
Setting<String> secondaryDefault = new Setting<>("foo.bar", (s) -> s.get("old.foo.bar", "some_default"), (s) -> s, randomBoolean(), Setting.Scope.CLUSTER);
assertEquals("some_default", secondaryDefault.get(Settings.EMPTY));
assertEquals("42", secondaryDefault.get(Settings.builder().put("old.foo.bar", 42).build()));
+ Setting<String> secondaryDefaultViaSettings = new Setting<>("foo.bar", secondaryDefault, (s) -> s, randomBoolean(), Setting.Scope.CLUSTER);
+ assertEquals("some_default", secondaryDefaultViaSettings.get(Settings.EMPTY));
+ assertEquals("42", secondaryDefaultViaSettings.get(Settings.builder().put("old.foo.bar", 42).build()));
}
public void testComplexType() {
@@ -298,6 +309,26 @@ public class SettingTests extends ESTestCase {
for (int i = 0; i < intValues.size(); i++) {
assertEquals(i, intValues.get(i).intValue());
}
+
+ Setting<List<String>> settingWithFallback = Setting.listSetting("foo.baz", listSetting, s -> s, true, Setting.Scope.CLUSTER);
+ value = settingWithFallback.get(Settings.EMPTY);
+ assertEquals(1, value.size());
+ assertEquals("foo,bar", value.get(0));
+
+ value = settingWithFallback.get(Settings.builder().putArray("foo.bar", "1", "2").build());
+ assertEquals(2, value.size());
+ assertEquals("1", value.get(0));
+ assertEquals("2", value.get(1));
+
+ value = settingWithFallback.get(Settings.builder().putArray("foo.baz", "3", "4").build());
+ assertEquals(2, value.size());
+ assertEquals("3", value.get(0));
+ assertEquals("4", value.get(1));
+
+ value = settingWithFallback.get(Settings.builder().putArray("foo.baz", "3", "4").putArray("foo.bar", "1", "2").build());
+ assertEquals(2, value.size());
+ assertEquals("3", value.get(0));
+ assertEquals("4", value.get(1));
}
public void testListSettingAcceptsNumberSyntax() {
@@ -316,6 +347,42 @@ public class SettingTests extends ESTestCase {
assertFalse(listSetting.match("foo_bar.1"));
assertTrue(listSetting.match("foo.bar"));
assertTrue(listSetting.match("foo.bar." + randomIntBetween(0,10000)));
+ }
+
+ public void testDynamicKeySetting() {
+ Setting<Boolean> setting = Setting.dynamicKeySetting("foo.", "false", Boolean::parseBoolean, false, Setting.Scope.CLUSTER);
+ assertTrue(setting.hasComplexMatcher());
+ assertTrue(setting.match("foo.bar"));
+ assertFalse(setting.match("foo"));
+ Setting<Boolean> concreteSetting = setting.getConcreteSetting("foo.bar");
+ assertTrue(concreteSetting.get(Settings.builder().put("foo.bar", "true").build()));
+ assertFalse(concreteSetting.get(Settings.builder().put("foo.baz", "true").build()));
+
+ try {
+ setting.getConcreteSetting("foo");
+ fail();
+ } catch (IllegalArgumentException ex) {
+ assertEquals("key must match setting but didn't [foo]", ex.getMessage());
+ }
+ }
+
+ public void testMinMaxInt() {
+ Setting<Integer> integerSetting = Setting.intSetting("foo.bar", 1, 0, 10, false, Setting.Scope.CLUSTER);
+ try {
+ integerSetting.get(Settings.builder().put("foo.bar", 11).build());
+ fail();
+ } catch (IllegalArgumentException ex) {
+ assertEquals("Failed to parse value [11] for setting [foo.bar] must be =< 10", ex.getMessage());
+ }
+
+ try {
+ integerSetting.get(Settings.builder().put("foo.bar", -1).build());
+ fail();
+ } catch (IllegalArgumentException ex) {
+ assertEquals("Failed to parse value [-1] for setting [foo.bar] must be >= 0", ex.getMessage());
+ }
+ assertEquals(5, integerSetting.get(Settings.builder().put("foo.bar", 5).build()).intValue());
+ assertEquals(1, integerSetting.get(Settings.EMPTY).intValue());
}
}
diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java
new file mode 100644
index 0000000000..290eec0c0b
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java
@@ -0,0 +1,101 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.settings;
+
+import org.elasticsearch.common.inject.ModuleTestCase;
+
+public class SettingsModuleTests extends ModuleTestCase {
+
+ public void testValidate() {
+ {
+ Settings settings = Settings.builder().put("cluster.routing.allocation.balance.shard", "2.0").build();
+ SettingsModule module = new SettingsModule(settings, new SettingsFilter(Settings.EMPTY));
+ assertInstanceBinding(module, Settings.class, (s) -> s == settings);
+ }
+ {
+ Settings settings = Settings.builder().put("cluster.routing.allocation.balance.shard", "[2.0]").build();
+ SettingsModule module = new SettingsModule(settings, new SettingsFilter(Settings.EMPTY));
+ try {
+ assertInstanceBinding(module, Settings.class, (s) -> s == settings);
+ fail();
+ } catch (IllegalArgumentException ex) {
+ assertEquals("Failed to parse value [[2.0]] for setting [cluster.routing.allocation.balance.shard]", ex.getMessage());
+ }
+ }
+ }
+
+ public void testRegisterSettings() {
+ {
+ Settings settings = Settings.builder().put("some.custom.setting", "2.0").build();
+ SettingsModule module = new SettingsModule(settings, new SettingsFilter(Settings.EMPTY));
+ module.registerSetting(Setting.floatSetting("some.custom.setting", 1.0f, false, Setting.Scope.CLUSTER));
+ assertInstanceBinding(module, Settings.class, (s) -> s == settings);
+ }
+ {
+ Settings settings = Settings.builder().put("some.custom.setting", "false").build();
+ SettingsModule module = new SettingsModule(settings, new SettingsFilter(Settings.EMPTY));
+ module.registerSetting(Setting.floatSetting("some.custom.setting", 1.0f, false, Setting.Scope.CLUSTER));
+ try {
+ assertInstanceBinding(module, Settings.class, (s) -> s == settings);
+ fail();
+ } catch (IllegalArgumentException ex) {
+ assertEquals("Failed to parse value [false] for setting [some.custom.setting]", ex.getMessage());
+ }
+ }
+ }
+
+ public void testTribeSetting() {
+ {
+ Settings settings = Settings.builder().put("tribe.t1.cluster.routing.allocation.balance.shard", "2.0").build();
+ SettingsModule module = new SettingsModule(settings, new SettingsFilter(Settings.EMPTY));
+ assertInstanceBinding(module, Settings.class, (s) -> s == settings);
+ }
+ {
+ Settings settings = Settings.builder().put("tribe.t1.cluster.routing.allocation.balance.shard", "[2.0]").build();
+ SettingsModule module = new SettingsModule(settings, new SettingsFilter(Settings.EMPTY));
+ try {
+ assertInstanceBinding(module, Settings.class, (s) -> s == settings);
+ fail();
+ } catch (IllegalArgumentException ex) {
+ assertEquals("Failed to parse value [[2.0]] for setting [cluster.routing.allocation.balance.shard]", ex.getMessage());
+ }
+ }
+ }
+
+ public void testLoggerSettings() {
+ {
+ Settings settings = Settings.builder().put("logger._root", "TRACE").put("logger.transport", "INFO").build();
+ SettingsModule module = new SettingsModule(settings, new SettingsFilter(Settings.EMPTY));
+ assertInstanceBinding(module, Settings.class, (s) -> s == settings);
+ }
+
+ {
+ Settings settings = Settings.builder().put("logger._root", "BOOM").put("logger.transport", "WOW").build();
+ SettingsModule module = new SettingsModule(settings, new SettingsFilter(Settings.EMPTY));
+ try {
+ assertInstanceBinding(module, Settings.class, (s) -> s == settings);
+ fail();
+ } catch (IllegalArgumentException ex) {
+ assertEquals("No enum constant org.elasticsearch.common.logging.ESLoggerFactory.LogLevel.BOOM", ex.getMessage());
+ }
+ }
+
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/common/unit/ByteSizeUnitTests.java b/core/src/test/java/org/elasticsearch/common/unit/ByteSizeUnitTests.java
index 70ea1d19cb..ab6d281894 100644
--- a/core/src/test/java/org/elasticsearch/common/unit/ByteSizeUnitTests.java
+++ b/core/src/test/java/org/elasticsearch/common/unit/ByteSizeUnitTests.java
@@ -34,47 +34,47 @@ import static org.hamcrest.Matchers.equalTo;
*/
public class ByteSizeUnitTests extends ESTestCase {
public void testBytes() {
- assertThat(BYTES.toBytes(1), equalTo(1l));
- assertThat(BYTES.toKB(1024), equalTo(1l));
- assertThat(BYTES.toMB(1024 * 1024), equalTo(1l));
- assertThat(BYTES.toGB(1024 * 1024 * 1024), equalTo(1l));
+ assertThat(BYTES.toBytes(1), equalTo(1L));
+ assertThat(BYTES.toKB(1024), equalTo(1L));
+ assertThat(BYTES.toMB(1024 * 1024), equalTo(1L));
+ assertThat(BYTES.toGB(1024 * 1024 * 1024), equalTo(1L));
}
public void testKB() {
- assertThat(KB.toBytes(1), equalTo(1024l));
- assertThat(KB.toKB(1), equalTo(1l));
- assertThat(KB.toMB(1024), equalTo(1l));
- assertThat(KB.toGB(1024 * 1024), equalTo(1l));
+ assertThat(KB.toBytes(1), equalTo(1024L));
+ assertThat(KB.toKB(1), equalTo(1L));
+ assertThat(KB.toMB(1024), equalTo(1L));
+ assertThat(KB.toGB(1024 * 1024), equalTo(1L));
}
public void testMB() {
- assertThat(MB.toBytes(1), equalTo(1024l * 1024));
- assertThat(MB.toKB(1), equalTo(1024l));
- assertThat(MB.toMB(1), equalTo(1l));
- assertThat(MB.toGB(1024), equalTo(1l));
+ assertThat(MB.toBytes(1), equalTo(1024L * 1024));
+ assertThat(MB.toKB(1), equalTo(1024L));
+ assertThat(MB.toMB(1), equalTo(1L));
+ assertThat(MB.toGB(1024), equalTo(1L));
}
public void testGB() {
- assertThat(GB.toBytes(1), equalTo(1024l * 1024 * 1024));
- assertThat(GB.toKB(1), equalTo(1024l * 1024));
- assertThat(GB.toMB(1), equalTo(1024l));
- assertThat(GB.toGB(1), equalTo(1l));
+ assertThat(GB.toBytes(1), equalTo(1024L * 1024 * 1024));
+ assertThat(GB.toKB(1), equalTo(1024L * 1024));
+ assertThat(GB.toMB(1), equalTo(1024L));
+ assertThat(GB.toGB(1), equalTo(1L));
}
public void testTB() {
- assertThat(TB.toBytes(1), equalTo(1024l * 1024 * 1024 * 1024));
- assertThat(TB.toKB(1), equalTo(1024l * 1024 * 1024));
- assertThat(TB.toMB(1), equalTo(1024l * 1024));
- assertThat(TB.toGB(1), equalTo(1024l));
- assertThat(TB.toTB(1), equalTo(1l));
+ assertThat(TB.toBytes(1), equalTo(1024L * 1024 * 1024 * 1024));
+ assertThat(TB.toKB(1), equalTo(1024L * 1024 * 1024));
+ assertThat(TB.toMB(1), equalTo(1024L * 1024));
+ assertThat(TB.toGB(1), equalTo(1024L));
+ assertThat(TB.toTB(1), equalTo(1L));
}
public void testPB() {
- assertThat(PB.toBytes(1), equalTo(1024l * 1024 * 1024 * 1024 * 1024));
- assertThat(PB.toKB(1), equalTo(1024l * 1024 * 1024 * 1024));
- assertThat(PB.toMB(1), equalTo(1024l * 1024 * 1024));
- assertThat(PB.toGB(1), equalTo(1024l * 1024));
- assertThat(PB.toTB(1), equalTo(1024l));
- assertThat(PB.toPB(1), equalTo(1l));
+ assertThat(PB.toBytes(1), equalTo(1024L * 1024 * 1024 * 1024 * 1024));
+ assertThat(PB.toKB(1), equalTo(1024L * 1024 * 1024 * 1024));
+ assertThat(PB.toMB(1), equalTo(1024L * 1024 * 1024));
+ assertThat(PB.toGB(1), equalTo(1024L * 1024));
+ assertThat(PB.toTB(1), equalTo(1024L));
+ assertThat(PB.toPB(1), equalTo(1L));
}
}
diff --git a/core/src/test/java/org/elasticsearch/common/unit/ByteSizeValueTests.java b/core/src/test/java/org/elasticsearch/common/unit/ByteSizeValueTests.java
index 56e6179832..b075e9d56d 100644
--- a/core/src/test/java/org/elasticsearch/common/unit/ByteSizeValueTests.java
+++ b/core/src/test/java/org/elasticsearch/common/unit/ByteSizeValueTests.java
@@ -32,15 +32,15 @@ import static org.hamcrest.Matchers.is;
*/
public class ByteSizeValueTests extends ESTestCase {
public void testActualPeta() {
- MatcherAssert.assertThat(new ByteSizeValue(4, ByteSizeUnit.PB).bytes(), equalTo(4503599627370496l));
+ MatcherAssert.assertThat(new ByteSizeValue(4, ByteSizeUnit.PB).bytes(), equalTo(4503599627370496L));
}
public void testActualTera() {
- MatcherAssert.assertThat(new ByteSizeValue(4, ByteSizeUnit.TB).bytes(), equalTo(4398046511104l));
+ MatcherAssert.assertThat(new ByteSizeValue(4, ByteSizeUnit.TB).bytes(), equalTo(4398046511104L));
}
public void testActual() {
- MatcherAssert.assertThat(new ByteSizeValue(4, ByteSizeUnit.GB).bytes(), equalTo(4294967296l));
+ MatcherAssert.assertThat(new ByteSizeValue(4, ByteSizeUnit.GB).bytes(), equalTo(4294967296L));
}
public void testSimple() {
diff --git a/core/src/test/java/org/elasticsearch/common/unit/DistanceUnitTests.java b/core/src/test/java/org/elasticsearch/common/unit/DistanceUnitTests.java
index 25c3a13627..5d7bbb3ca1 100644
--- a/core/src/test/java/org/elasticsearch/common/unit/DistanceUnitTests.java
+++ b/core/src/test/java/org/elasticsearch/common/unit/DistanceUnitTests.java
@@ -19,6 +19,10 @@
package org.elasticsearch.common.unit;
+import com.carrotsearch.randomizedtesting.generators.RandomStrings;
+
+import org.elasticsearch.common.io.stream.BytesStreamOutput;
+import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.test.ESTestCase;
import static org.hamcrest.Matchers.closeTo;
@@ -73,4 +77,21 @@ public class DistanceUnitTests extends ESTestCase {
assertEquals(7, DistanceUnit.MILES.ordinal());
assertEquals(8, DistanceUnit.METERS.ordinal());
}
+
+ public void testReadWrite() throws Exception {
+ for (DistanceUnit unit : DistanceUnit.values()) {
+ try (BytesStreamOutput out = new BytesStreamOutput()) {
+ unit.writeTo(out);
+ try (StreamInput in = StreamInput.wrap(out.bytes())) {
+ assertThat("Roundtrip serialisation failed.", DistanceUnit.readDistanceUnit(in), equalTo(unit));
+ }
+ }
+ }
+ }
+
+ public void testFromString() {
+ for (DistanceUnit unit : DistanceUnit.values()) {
+ assertThat("Roundtrip string parsing failed.", DistanceUnit.fromString(unit.toString()), equalTo(unit));
+ }
+ }
}
diff --git a/core/src/test/java/org/elasticsearch/common/unit/FuzzinessTests.java b/core/src/test/java/org/elasticsearch/common/unit/FuzzinessTests.java
index 4c64e04ec3..a6de68b3a4 100644
--- a/core/src/test/java/org/elasticsearch/common/unit/FuzzinessTests.java
+++ b/core/src/test/java/org/elasticsearch/common/unit/FuzzinessTests.java
@@ -40,7 +40,7 @@ public class FuzzinessTests extends ESTestCase {
assertThat(Fuzziness.build(randomFrom(options)).asInt(), equalTo(1));
assertThat(Fuzziness.build(randomFrom(options)).asFloat(), equalTo(1f));
assertThat(Fuzziness.build(randomFrom(options)).asDouble(), equalTo(1d));
- assertThat(Fuzziness.build(randomFrom(options)).asLong(), equalTo(1l));
+ assertThat(Fuzziness.build(randomFrom(options)).asLong(), equalTo(1L));
assertThat(Fuzziness.build(randomFrom(options)).asShort(), equalTo((short) 1));
}
@@ -143,7 +143,7 @@ public class FuzzinessTests extends ESTestCase {
assertThat(Fuzziness.AUTO.asInt(), equalTo(1));
assertThat(Fuzziness.AUTO.asFloat(), equalTo(1f));
assertThat(Fuzziness.AUTO.asDouble(), equalTo(1d));
- assertThat(Fuzziness.AUTO.asLong(), equalTo(1l));
+ assertThat(Fuzziness.AUTO.asLong(), equalTo(1L));
assertThat(Fuzziness.AUTO.asShort(), equalTo((short) 1));
assertThat(Fuzziness.AUTO.asTimeValue(), equalTo(TimeValue.parseTimeValue("1ms", TimeValue.timeValueMillis(1), "fuzziness")));
diff --git a/core/src/test/java/org/elasticsearch/common/unit/SizeValueTests.java b/core/src/test/java/org/elasticsearch/common/unit/SizeValueTests.java
index f2f85e0c7f..b5fc54de7d 100644
--- a/core/src/test/java/org/elasticsearch/common/unit/SizeValueTests.java
+++ b/core/src/test/java/org/elasticsearch/common/unit/SizeValueTests.java
@@ -29,7 +29,7 @@ import static org.hamcrest.Matchers.is;
public class SizeValueTests extends ESTestCase {
public void testThatConversionWorks() {
SizeValue sizeValue = new SizeValue(1000);
- assertThat(sizeValue.kilo(), is(1l));
+ assertThat(sizeValue.kilo(), is(1L));
assertThat(sizeValue.toString(), is("1k"));
sizeValue = new SizeValue(1000, SizeUnit.KILO);
diff --git a/core/src/test/java/org/elasticsearch/common/unit/TimeValueTests.java b/core/src/test/java/org/elasticsearch/common/unit/TimeValueTests.java
index 2945d86fe5..20568826d4 100644
--- a/core/src/test/java/org/elasticsearch/common/unit/TimeValueTests.java
+++ b/core/src/test/java/org/elasticsearch/common/unit/TimeValueTests.java
@@ -60,7 +60,7 @@ public class TimeValueTests extends ESTestCase {
}
public void testMinusOne() {
- assertThat(new TimeValue(-1).nanos(), lessThan(0l));
+ assertThat(new TimeValue(-1).nanos(), lessThan(0L));
}
public void testParseTimeValue() {
diff --git a/core/src/test/java/org/elasticsearch/common/util/BigArraysTests.java b/core/src/test/java/org/elasticsearch/common/util/BigArraysTests.java
index bf55a33050..1735515bf3 100644
--- a/core/src/test/java/org/elasticsearch/common/util/BigArraysTests.java
+++ b/core/src/test/java/org/elasticsearch/common/util/BigArraysTests.java
@@ -37,8 +37,8 @@ import java.util.Arrays;
public class BigArraysTests extends ESSingleNodeTestCase {
- public static BigArrays randombigArrays() {
- final PageCacheRecycler recycler = randomBoolean() ? null : ESSingleNodeTestCase.getInstanceFromNode(PageCacheRecycler.class);
+ private BigArrays randombigArrays() {
+ final PageCacheRecycler recycler = randomBoolean() ? null : getInstanceFromNode(PageCacheRecycler.class);
return new MockBigArrays(recycler, new NoneCircuitBreakerService());
}
diff --git a/core/src/test/java/org/elasticsearch/common/util/BytesRefHashTests.java b/core/src/test/java/org/elasticsearch/common/util/BytesRefHashTests.java
index a26a06a09a..01c27a65ab 100644
--- a/core/src/test/java/org/elasticsearch/common/util/BytesRefHashTests.java
+++ b/core/src/test/java/org/elasticsearch/common/util/BytesRefHashTests.java
@@ -25,6 +25,8 @@ import com.carrotsearch.hppc.cursors.ObjectLongCursor;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.TestUtil;
+import org.elasticsearch.cache.recycler.PageCacheRecycler;
+import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
import org.elasticsearch.test.ESSingleNodeTestCase;
import java.util.HashMap;
@@ -38,13 +40,18 @@ public class BytesRefHashTests extends ESSingleNodeTestCase {
BytesRefHash hash;
+ private BigArrays randombigArrays() {
+ final PageCacheRecycler recycler = randomBoolean() ? null : getInstanceFromNode(PageCacheRecycler.class);
+ return new MockBigArrays(recycler, new NoneCircuitBreakerService());
+ }
+
private void newHash() {
if (hash != null) {
hash.close();
}
// Test high load factors to make sure that collision resolution works fine
final float maxLoadFactor = 0.6f + randomFloat() * 0.39f;
- hash = new BytesRefHash(randomIntBetween(0, 100), maxLoadFactor, BigArraysTests.randombigArrays());
+ hash = new BytesRefHash(randomIntBetween(0, 100), maxLoadFactor, randombigArrays());
}
@Override
diff --git a/core/src/test/java/org/elasticsearch/common/util/LongHashTests.java b/core/src/test/java/org/elasticsearch/common/util/LongHashTests.java
index f5ae388db7..9439044a7b 100644
--- a/core/src/test/java/org/elasticsearch/common/util/LongHashTests.java
+++ b/core/src/test/java/org/elasticsearch/common/util/LongHashTests.java
@@ -22,6 +22,8 @@ package org.elasticsearch.common.util;
import com.carrotsearch.hppc.LongLongHashMap;
import com.carrotsearch.hppc.LongLongMap;
import com.carrotsearch.hppc.cursors.LongLongCursor;
+import org.elasticsearch.cache.recycler.PageCacheRecycler;
+import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
import org.elasticsearch.test.ESSingleNodeTestCase;
import java.util.HashMap;
@@ -33,6 +35,11 @@ import java.util.Set;
public class LongHashTests extends ESSingleNodeTestCase {
LongHash hash;
+ private BigArrays randombigArrays() {
+ final PageCacheRecycler recycler = randomBoolean() ? null : getInstanceFromNode(PageCacheRecycler.class);
+ return new MockBigArrays(recycler, new NoneCircuitBreakerService());
+ }
+
private void newHash() {
if (hash != null) {
hash.close();
@@ -40,7 +47,7 @@ public class LongHashTests extends ESSingleNodeTestCase {
// Test high load factors to make sure that collision resolution works fine
final float maxLoadFactor = 0.6f + randomFloat() * 0.39f;
- hash = new LongHash(randomIntBetween(0, 100), maxLoadFactor, BigArraysTests.randombigArrays());
+ hash = new LongHash(randomIntBetween(0, 100), maxLoadFactor, randombigArrays());
}
@Override
diff --git a/core/src/test/java/org/elasticsearch/common/util/LongObjectHashMapTests.java b/core/src/test/java/org/elasticsearch/common/util/LongObjectHashMapTests.java
index bf091828ca..1775f86199 100644
--- a/core/src/test/java/org/elasticsearch/common/util/LongObjectHashMapTests.java
+++ b/core/src/test/java/org/elasticsearch/common/util/LongObjectHashMapTests.java
@@ -20,12 +20,20 @@
package org.elasticsearch.common.util;
import com.carrotsearch.hppc.LongObjectHashMap;
+import org.elasticsearch.cache.recycler.PageCacheRecycler;
+import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
import org.elasticsearch.test.ESSingleNodeTestCase;
public class LongObjectHashMapTests extends ESSingleNodeTestCase {
+
+ private BigArrays randombigArrays() {
+ final PageCacheRecycler recycler = randomBoolean() ? null : getInstanceFromNode(PageCacheRecycler.class);
+ return new MockBigArrays(recycler, new NoneCircuitBreakerService());
+ }
+
public void testDuel() {
final LongObjectHashMap<Object> map1 = new LongObjectHashMap<>();
- final LongObjectPagedHashMap<Object> map2 = new LongObjectPagedHashMap<>(randomInt(42), 0.6f + randomFloat() * 0.39f, BigArraysTests.randombigArrays());
+ final LongObjectPagedHashMap<Object> map2 = new LongObjectPagedHashMap<>(randomInt(42), 0.6f + randomFloat() * 0.39f, randombigArrays());
final int maxKey = randomIntBetween(1, 10000);
final int iters = scaledRandomIntBetween(10000, 100000);
for (int i = 0; i < iters; ++i) {
diff --git a/core/src/test/java/org/elasticsearch/common/util/MultiDataPathUpgraderTests.java b/core/src/test/java/org/elasticsearch/common/util/MultiDataPathUpgraderTests.java
deleted file mode 100644
index 25c765e648..0000000000
--- a/core/src/test/java/org/elasticsearch/common/util/MultiDataPathUpgraderTests.java
+++ /dev/null
@@ -1,297 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.elasticsearch.common.util;
-
-import org.apache.lucene.util.CollectionUtil;
-import org.apache.lucene.util.LuceneTestCase;
-import org.apache.lucene.util.TestUtil;
-import org.elasticsearch.bwcompat.OldIndexBackwardsCompatibilityIT;
-import org.elasticsearch.cluster.metadata.IndexMetaData;
-import org.elasticsearch.cluster.routing.AllocationId;
-import org.elasticsearch.common.Strings;
-import org.elasticsearch.common.collect.Tuple;
-import org.elasticsearch.common.io.FileSystemUtils;
-import org.elasticsearch.common.util.set.Sets;
-import org.elasticsearch.env.NodeEnvironment;
-import org.elasticsearch.gateway.MetaDataStateFormat;
-import org.elasticsearch.index.shard.ShardId;
-import org.elasticsearch.index.shard.ShardPath;
-import org.elasticsearch.index.shard.ShardStateMetaData;
-import org.elasticsearch.test.ESTestCase;
-
-import java.io.BufferedWriter;
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.URISyntaxException;
-import java.nio.charset.StandardCharsets;
-import java.nio.file.DirectoryStream;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.util.ArrayList;
-import java.util.Comparator;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Locale;
-import java.util.Map;
-
-/**
- */
-@LuceneTestCase.SuppressFileSystems("ExtrasFS")
-public class MultiDataPathUpgraderTests extends ESTestCase {
-
- public void testUpgradeRandomPaths() throws IOException {
- try (NodeEnvironment nodeEnvironment = newNodeEnvironment()) {
- final String uuid = Strings.base64UUID();
- final ShardId shardId = new ShardId("foo", 0);
- final Path[] shardDataPaths = nodeEnvironment.availableShardPaths(shardId);
- if (nodeEnvironment.nodeDataPaths().length == 1) {
- MultiDataPathUpgrader helper = new MultiDataPathUpgrader(nodeEnvironment);
- assertFalse(helper.needsUpgrading(shardId));
- return;
- }
- int numIdxFiles = 0;
- int numTranslogFiles = 0;
- int metaStateVersion = 0;
- for (Path shardPath : shardDataPaths) {
- final Path translog = shardPath.resolve(ShardPath.TRANSLOG_FOLDER_NAME);
- final Path idx = shardPath.resolve(ShardPath.INDEX_FOLDER_NAME);
- Files.createDirectories(translog);
- Files.createDirectories(idx);
- int numFiles = randomIntBetween(1, 10);
- for (int i = 0; i < numFiles; i++, numIdxFiles++) {
- String filename = Integer.toString(numIdxFiles);
- try (BufferedWriter w = Files.newBufferedWriter(idx.resolve(filename + ".tst"), StandardCharsets.UTF_8)) {
- w.write(filename);
- }
- }
- numFiles = randomIntBetween(1, 10);
- for (int i = 0; i < numFiles; i++, numTranslogFiles++) {
- String filename = Integer.toString(numTranslogFiles);
- try (BufferedWriter w = Files.newBufferedWriter(translog.resolve(filename + ".translog"), StandardCharsets.UTF_8)) {
- w.write(filename);
- }
- }
- ++metaStateVersion;
- ShardStateMetaData.FORMAT.write(new ShardStateMetaData(metaStateVersion, true, uuid, AllocationId.newInitializing()), metaStateVersion, shardDataPaths);
- }
- final Path path = randomFrom(shardDataPaths);
- ShardPath targetPath = new ShardPath(false, path, path, uuid, new ShardId("foo", 0));
- MultiDataPathUpgrader helper = new MultiDataPathUpgrader(nodeEnvironment);
- helper.upgrade(shardId, targetPath);
- assertFalse(helper.needsUpgrading(shardId));
- if (shardDataPaths.length > 1) {
- for (Path shardPath : shardDataPaths) {
- if (shardPath.equals(targetPath.getDataPath())) {
- continue;
- }
- final Path translog = shardPath.resolve(ShardPath.TRANSLOG_FOLDER_NAME);
- final Path idx = shardPath.resolve(ShardPath.INDEX_FOLDER_NAME);
- final Path state = shardPath.resolve(MetaDataStateFormat.STATE_DIR_NAME);
- assertFalse(Files.exists(translog));
- assertFalse(Files.exists(idx));
- assertFalse(Files.exists(state));
- assertFalse(Files.exists(shardPath));
- }
- }
-
- final ShardStateMetaData stateMetaData = ShardStateMetaData.FORMAT.loadLatestState(logger, targetPath.getShardStatePath());
- assertEquals(metaStateVersion, stateMetaData.version);
- assertTrue(stateMetaData.primary);
- assertEquals(uuid, stateMetaData.indexUUID);
- final Path translog = targetPath.getDataPath().resolve(ShardPath.TRANSLOG_FOLDER_NAME);
- final Path idx = targetPath.getDataPath().resolve(ShardPath.INDEX_FOLDER_NAME);
- Files.deleteIfExists(idx.resolve("write.lock"));
- assertEquals(numTranslogFiles, FileSystemUtils.files(translog).length);
- assertEquals(numIdxFiles, FileSystemUtils.files(idx).length);
- final HashSet<Path> translogFiles = Sets.newHashSet(FileSystemUtils.files(translog));
- for (int i = 0; i < numTranslogFiles; i++) {
- final String name = Integer.toString(i);
- translogFiles.contains(translog.resolve(name + ".translog"));
- byte[] content = Files.readAllBytes(translog.resolve(name + ".translog"));
- assertEquals(name , new String(content, StandardCharsets.UTF_8));
- }
- final HashSet<Path> idxFiles = Sets.newHashSet(FileSystemUtils.files(idx));
- for (int i = 0; i < numIdxFiles; i++) {
- final String name = Integer.toString(i);
- idxFiles.contains(idx.resolve(name + ".tst"));
- byte[] content = Files.readAllBytes(idx.resolve(name + ".tst"));
- assertEquals(name , new String(content, StandardCharsets.UTF_8));
- }
- }
- }
-
- /**
- * Run upgrade on a real bwc index
- */
- public void testUpgradeRealIndex() throws IOException, URISyntaxException {
- List<Path> indexes = new ArrayList<>();
- try (DirectoryStream<Path> stream = Files.newDirectoryStream(getBwcIndicesPath(), "index-*.zip")) {
- for (Path path : stream) {
- indexes.add(path);
- }
- }
- CollectionUtil.introSort(indexes, new Comparator<Path>() {
- @Override
- public int compare(Path o1, Path o2) {
- return o1.getFileName().compareTo(o2.getFileName());
- }
- });
- final ShardId shardId = new ShardId("test", 0);
- final Path path = randomFrom(indexes);
- final Path indexFile = path;
- final String indexName = indexFile.getFileName().toString().replace(".zip", "").toLowerCase(Locale.ROOT);
- try (NodeEnvironment nodeEnvironment = newNodeEnvironment()) {
- if (nodeEnvironment.nodeDataPaths().length == 1) {
- MultiDataPathUpgrader helper = new MultiDataPathUpgrader(nodeEnvironment);
- assertFalse(helper.needsUpgrading(shardId));
- return;
- }
- Path unzipDir = createTempDir();
- Path unzipDataDir = unzipDir.resolve("data");
- // decompress the index
- try (InputStream stream = Files.newInputStream(indexFile)) {
- TestUtil.unzip(stream, unzipDir);
- }
- // check it is unique
- assertTrue(Files.exists(unzipDataDir));
- Path[] list = FileSystemUtils.files(unzipDataDir);
- if (list.length != 1) {
- throw new IllegalStateException("Backwards index must contain exactly one cluster but was " + list.length);
- }
- // the bwc scripts packs the indices under this path
- Path src = list[0].resolve("nodes/0/indices/" + indexName);
- assertTrue("[" + indexFile + "] missing index dir: " + src.toString(), Files.exists(src));
- Path[] multiDataPath = new Path[nodeEnvironment.nodeDataPaths().length];
- int i = 0;
- for (NodeEnvironment.NodePath nodePath : nodeEnvironment.nodePaths()) {
- multiDataPath[i++] = nodePath.indicesPath;
- }
- logger.info("--> injecting index [{}] into multiple data paths", indexName);
- OldIndexBackwardsCompatibilityIT.copyIndex(logger, src, indexName, multiDataPath);
- final ShardPath shardPath = new ShardPath(false, nodeEnvironment.availableShardPaths(new ShardId(indexName, 0))[0], nodeEnvironment.availableShardPaths(new ShardId(indexName, 0))[0], IndexMetaData.INDEX_UUID_NA_VALUE, new ShardId(indexName, 0));
-
- logger.info("{}", (Object)FileSystemUtils.files(shardPath.resolveIndex()));
-
- MultiDataPathUpgrader helper = new MultiDataPathUpgrader(nodeEnvironment);
- helper.upgrade(new ShardId(indexName, 0), shardPath);
- helper.checkIndex(shardPath);
- assertFalse(helper.needsUpgrading(new ShardId(indexName, 0)));
- }
- }
-
- public void testNeedsUpgrade() throws IOException {
- try (NodeEnvironment nodeEnvironment = newNodeEnvironment()) {
- String uuid = Strings.randomBase64UUID();
- final ShardId shardId = new ShardId("foo", 0);
- ShardStateMetaData.FORMAT.write(new ShardStateMetaData(1, true, uuid, AllocationId.newInitializing()), 1, nodeEnvironment.availableShardPaths(shardId));
- MultiDataPathUpgrader helper = new MultiDataPathUpgrader(nodeEnvironment);
- boolean multiDataPaths = nodeEnvironment.nodeDataPaths().length > 1;
- boolean needsUpgrading = helper.needsUpgrading(shardId);
- if (multiDataPaths) {
- assertTrue(needsUpgrading);
- } else {
- assertFalse(needsUpgrading);
- }
- }
- }
-
- public void testPickTargetShardPath() throws IOException {
- try (NodeEnvironment nodeEnvironment = newNodeEnvironment()) {
- final ShardId shard = new ShardId("foo", 0);
- final Path[] paths = nodeEnvironment.availableShardPaths(shard);
- if (paths.length == 1) {
- MultiDataPathUpgrader helper = new MultiDataPathUpgrader(nodeEnvironment);
- try {
- helper.pickShardPath(new ShardId("foo", 0));
- fail("one path needs no upgrading");
- } catch (IllegalStateException ex) {
- // only one path
- }
- } else {
- final Map<Path, Tuple<Long, Long>> pathToSpace = new HashMap<>();
- final Path expectedPath;
- if (randomBoolean()) { // path with most of the file bytes
- expectedPath = randomFrom(paths);
- long[] used = new long[paths.length];
- long sumSpaceUsed = 0;
- for (int i = 0; i < used.length; i++) {
- long spaceUsed = paths[i] == expectedPath ? randomIntBetween(101, 200) : randomIntBetween(10, 100);
- sumSpaceUsed += spaceUsed;
- used[i] = spaceUsed;
- }
- for (int i = 0; i < used.length; i++) {
- long availalbe = randomIntBetween((int)(2*sumSpaceUsed-used[i]), 4 * (int)sumSpaceUsed);
- pathToSpace.put(paths[i], new Tuple<>(availalbe, used[i]));
- }
- } else { // path with largest available space
- expectedPath = randomFrom(paths);
- long[] used = new long[paths.length];
- long sumSpaceUsed = 0;
- for (int i = 0; i < used.length; i++) {
- long spaceUsed = randomIntBetween(10, 100);
- sumSpaceUsed += spaceUsed;
- used[i] = spaceUsed;
- }
-
- for (int i = 0; i < used.length; i++) {
- long availalbe = paths[i] == expectedPath ? randomIntBetween((int)(sumSpaceUsed), (int)(2*sumSpaceUsed)) : randomIntBetween(0, (int)(sumSpaceUsed) - 1) ;
- pathToSpace.put(paths[i], new Tuple<>(availalbe, used[i]));
- }
-
- }
- MultiDataPathUpgrader helper = new MultiDataPathUpgrader(nodeEnvironment) {
- @Override
- protected long getUsabelSpace(NodeEnvironment.NodePath path) throws IOException {
- return pathToSpace.get(path.resolve(shard)).v1();
- }
-
- @Override
- protected long getSpaceUsedByShard(Path path) throws IOException {
- return pathToSpace.get(path).v2();
- }
- };
- String uuid = Strings.randomBase64UUID();
- ShardStateMetaData.FORMAT.write(new ShardStateMetaData(1, true, uuid, AllocationId.newInitializing()), 1, paths);
- final ShardPath shardPath = helper.pickShardPath(new ShardId("foo", 0));
- assertEquals(expectedPath, shardPath.getDataPath());
- assertEquals(expectedPath, shardPath.getShardStatePath());
- }
-
- MultiDataPathUpgrader helper = new MultiDataPathUpgrader(nodeEnvironment) {
- @Override
- protected long getUsabelSpace(NodeEnvironment.NodePath path) throws IOException {
- return randomIntBetween(0, 10);
- }
-
- @Override
- protected long getSpaceUsedByShard(Path path) throws IOException {
- return randomIntBetween(11, 20);
- }
- };
-
- try {
- helper.pickShardPath(new ShardId("foo", 0));
- fail("not enough space");
- } catch (IllegalStateException ex) {
- // not enough space
- }
- }
- }
-}
diff --git a/core/src/test/java/org/elasticsearch/common/util/concurrent/EsExecutorsTests.java b/core/src/test/java/org/elasticsearch/common/util/concurrent/EsExecutorsTests.java
index b59c8dd1cb..57da614e68 100644
--- a/core/src/test/java/org/elasticsearch/common/util/concurrent/EsExecutorsTests.java
+++ b/core/src/test/java/org/elasticsearch/common/util/concurrent/EsExecutorsTests.java
@@ -20,6 +20,7 @@
package org.elasticsearch.common.util.concurrent;
import org.elasticsearch.ExceptionsHelper;
+import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.test.ESTestCase;
import org.hamcrest.Matcher;
@@ -38,12 +39,13 @@ import static org.hamcrest.Matchers.lessThan;
*/
public class EsExecutorsTests extends ESTestCase {
+ private final ThreadContext threadContext = new ThreadContext(Settings.EMPTY);
private TimeUnit randomTimeUnit() {
return TimeUnit.values()[between(0, TimeUnit.values().length - 1)];
}
public void testFixedForcedExecution() throws Exception {
- EsThreadPoolExecutor executor = EsExecutors.newFixed(getTestName(), 1, 1, EsExecutors.daemonThreadFactory("test"));
+ EsThreadPoolExecutor executor = EsExecutors.newFixed(getTestName(), 1, 1, EsExecutors.daemonThreadFactory("test"), threadContext);
final CountDownLatch wait = new CountDownLatch(1);
final CountDownLatch exec1Wait = new CountDownLatch(1);
@@ -105,7 +107,7 @@ public class EsExecutorsTests extends ESTestCase {
}
public void testFixedRejected() throws Exception {
- EsThreadPoolExecutor executor = EsExecutors.newFixed(getTestName(), 1, 1, EsExecutors.daemonThreadFactory("test"));
+ EsThreadPoolExecutor executor = EsExecutors.newFixed(getTestName(), 1, 1, EsExecutors.daemonThreadFactory("test"), threadContext);
final CountDownLatch wait = new CountDownLatch(1);
final CountDownLatch exec1Wait = new CountDownLatch(1);
@@ -163,7 +165,7 @@ public class EsExecutorsTests extends ESTestCase {
final int max = between(min + 1, 6);
final ThreadBarrier barrier = new ThreadBarrier(max + 1);
- ThreadPoolExecutor pool = EsExecutors.newScaling(getTestName(), min, max, between(1, 100), randomTimeUnit(), EsExecutors.daemonThreadFactory("test"));
+ ThreadPoolExecutor pool = EsExecutors.newScaling(getTestName(), min, max, between(1, 100), randomTimeUnit(), EsExecutors.daemonThreadFactory("test"), threadContext);
assertThat("Min property", pool.getCorePoolSize(), equalTo(min));
assertThat("Max property", pool.getMaximumPoolSize(), equalTo(max));
@@ -199,7 +201,7 @@ public class EsExecutorsTests extends ESTestCase {
final int max = between(min + 1, 6);
final ThreadBarrier barrier = new ThreadBarrier(max + 1);
- final ThreadPoolExecutor pool = EsExecutors.newScaling(getTestName(), min, max, between(1, 100), TimeUnit.MILLISECONDS, EsExecutors.daemonThreadFactory("test"));
+ final ThreadPoolExecutor pool = EsExecutors.newScaling(getTestName(), min, max, between(1, 100), TimeUnit.MILLISECONDS, EsExecutors.daemonThreadFactory("test"), threadContext);
assertThat("Min property", pool.getCorePoolSize(), equalTo(min));
assertThat("Max property", pool.getMaximumPoolSize(), equalTo(max));
@@ -242,7 +244,7 @@ public class EsExecutorsTests extends ESTestCase {
int queue = between(0, 100);
int actions = queue + pool;
final CountDownLatch latch = new CountDownLatch(1);
- EsThreadPoolExecutor executor = EsExecutors.newFixed(getTestName(), pool, queue, EsExecutors.daemonThreadFactory("dummy"));
+ EsThreadPoolExecutor executor = EsExecutors.newFixed(getTestName(), pool, queue, EsExecutors.daemonThreadFactory("dummy"), threadContext);
try {
for (int i = 0; i < actions; i++) {
executor.execute(new Runnable() {
@@ -321,4 +323,65 @@ public class EsExecutorsTests extends ESTestCase {
assertThat(message, containsString("completed tasks = " + actions));
}
}
+
+ public void testInheritContext() throws InterruptedException {
+ int pool = between(1, 10);
+ int queue = between(0, 100);
+ final CountDownLatch latch = new CountDownLatch(1);
+ final CountDownLatch executed = new CountDownLatch(1);
+
+ threadContext.putHeader("foo", "bar");
+ final Integer one = new Integer(1);
+ threadContext.putTransient("foo", one);
+ EsThreadPoolExecutor executor = EsExecutors.newFixed(getTestName(), pool, queue, EsExecutors.daemonThreadFactory("dummy"), threadContext);
+ try {
+ executor.execute(() -> {
+ try {
+ latch.await();
+ } catch (InterruptedException e) {
+ fail();
+ }
+ assertEquals(threadContext.getHeader("foo"), "bar");
+ assertSame(threadContext.getTransient("foo"), one);
+ assertNull(threadContext.getHeader("bar"));
+ assertNull(threadContext.getTransient("bar"));
+ executed.countDown();
+ });
+ threadContext.putTransient("bar", "boom");
+ threadContext.putHeader("bar", "boom");
+ latch.countDown();
+ executed.await();
+
+ } finally {
+ latch.countDown();
+ terminate(executor);
+ }
+ }
+
+ public void testGetTasks() throws InterruptedException {
+ int pool = between(1, 10);
+ int queue = between(0, 100);
+ final CountDownLatch latch = new CountDownLatch(1);
+ final CountDownLatch executed = new CountDownLatch(1);
+ EsThreadPoolExecutor executor = EsExecutors.newFixed(getTestName(), pool, queue, EsExecutors.daemonThreadFactory("dummy"), threadContext);
+ try {
+ Runnable r = () -> {
+ latch.countDown();
+ try {
+ executed.await();
+ } catch (InterruptedException e) {
+ fail();
+ }
+ };
+ executor.execute(r);
+ latch.await();
+ executor.getTasks().forEach((runnable) -> assertSame(runnable, r));
+ executed.countDown();
+
+ } finally {
+ latch.countDown();
+ terminate(executor);
+ }
+
+ }
}
diff --git a/core/src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedExecutorsTests.java b/core/src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedExecutorsTests.java
index 685e06afb1..50b7d5f775 100644
--- a/core/src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedExecutorsTests.java
+++ b/core/src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedExecutorsTests.java
@@ -19,6 +19,7 @@
package org.elasticsearch.common.util.concurrent;
import org.elasticsearch.common.Priority;
+import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.threadpool.ThreadPool;
@@ -43,6 +44,9 @@ import static org.hamcrest.Matchers.is;
*
*/
public class PrioritizedExecutorsTests extends ESTestCase {
+
+ private final ThreadContext holder = new ThreadContext(Settings.EMPTY);
+
public void testPriorityQueue() throws Exception {
PriorityBlockingQueue<Priority> queue = new PriorityBlockingQueue<>();
List<Priority> priorities = Arrays.asList(Priority.values());
@@ -63,7 +67,7 @@ public class PrioritizedExecutorsTests extends ESTestCase {
}
public void testSubmitPrioritizedExecutorWithRunnables() throws Exception {
- ExecutorService executor = EsExecutors.newSinglePrioritizing(getTestName(), EsExecutors.daemonThreadFactory(getTestName()));
+ ExecutorService executor = EsExecutors.newSinglePrioritizing(getTestName(), EsExecutors.daemonThreadFactory(getTestName()), holder);
List<Integer> results = new ArrayList<>(8);
CountDownLatch awaitingLatch = new CountDownLatch(1);
CountDownLatch finishedLatch = new CountDownLatch(8);
@@ -92,7 +96,7 @@ public class PrioritizedExecutorsTests extends ESTestCase {
}
public void testExecutePrioritizedExecutorWithRunnables() throws Exception {
- ExecutorService executor = EsExecutors.newSinglePrioritizing(getTestName(), EsExecutors.daemonThreadFactory(getTestName()));
+ ExecutorService executor = EsExecutors.newSinglePrioritizing(getTestName(), EsExecutors.daemonThreadFactory(getTestName()), holder);
List<Integer> results = new ArrayList<>(8);
CountDownLatch awaitingLatch = new CountDownLatch(1);
CountDownLatch finishedLatch = new CountDownLatch(8);
@@ -121,7 +125,7 @@ public class PrioritizedExecutorsTests extends ESTestCase {
}
public void testSubmitPrioritizedExecutorWithCallables() throws Exception {
- ExecutorService executor = EsExecutors.newSinglePrioritizing(getTestName(), EsExecutors.daemonThreadFactory(getTestName()));
+ ExecutorService executor = EsExecutors.newSinglePrioritizing(getTestName(), EsExecutors.daemonThreadFactory(getTestName()), holder);
List<Integer> results = new ArrayList<>(8);
CountDownLatch awaitingLatch = new CountDownLatch(1);
CountDownLatch finishedLatch = new CountDownLatch(8);
@@ -150,7 +154,7 @@ public class PrioritizedExecutorsTests extends ESTestCase {
}
public void testSubmitPrioritizedExecutorWithMixed() throws Exception {
- ExecutorService executor = EsExecutors.newSinglePrioritizing(getTestName(), EsExecutors.daemonThreadFactory(getTestName()));
+ ExecutorService executor = EsExecutors.newSinglePrioritizing(getTestName(), EsExecutors.daemonThreadFactory(getTestName()), holder);
List<Integer> results = new ArrayList<>(8);
CountDownLatch awaitingLatch = new CountDownLatch(1);
CountDownLatch finishedLatch = new CountDownLatch(8);
@@ -180,7 +184,7 @@ public class PrioritizedExecutorsTests extends ESTestCase {
public void testTimeout() throws Exception {
ScheduledExecutorService timer = Executors.newSingleThreadScheduledExecutor(EsExecutors.daemonThreadFactory(getTestName()));
- PrioritizedEsThreadPoolExecutor executor = EsExecutors.newSinglePrioritizing(getTestName(), EsExecutors.daemonThreadFactory(getTestName()));
+ PrioritizedEsThreadPoolExecutor executor = EsExecutors.newSinglePrioritizing(getTestName(), EsExecutors.daemonThreadFactory(getTestName()), holder);
final CountDownLatch invoked = new CountDownLatch(1);
final CountDownLatch block = new CountDownLatch(1);
executor.execute(new Runnable() {
@@ -243,7 +247,7 @@ public class PrioritizedExecutorsTests extends ESTestCase {
ThreadPool threadPool = new ThreadPool("test");
final ScheduledThreadPoolExecutor timer = (ScheduledThreadPoolExecutor) threadPool.scheduler();
final AtomicBoolean timeoutCalled = new AtomicBoolean();
- PrioritizedEsThreadPoolExecutor executor = EsExecutors.newSinglePrioritizing(getTestName(), EsExecutors.daemonThreadFactory(getTestName()));
+ PrioritizedEsThreadPoolExecutor executor = EsExecutors.newSinglePrioritizing(getTestName(), EsExecutors.daemonThreadFactory(getTestName()), holder);
final CountDownLatch invoked = new CountDownLatch(1);
executor.execute(new Runnable() {
@Override
diff --git a/core/src/test/java/org/elasticsearch/common/util/concurrent/ThreadContextTests.java b/core/src/test/java/org/elasticsearch/common/util/concurrent/ThreadContextTests.java
new file mode 100644
index 0000000000..cbf58bf9da
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/common/util/concurrent/ThreadContextTests.java
@@ -0,0 +1,238 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.common.util.concurrent;
+
+import org.elasticsearch.common.io.stream.BytesStreamOutput;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.test.ESTestCase;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.HashMap;
+
+public class ThreadContextTests extends ESTestCase {
+
+ public void testStashContext() {
+ Settings build = Settings.builder().put("request.headers.default", "1").build();
+ ThreadContext threadContext = new ThreadContext(build);
+ threadContext.putHeader("foo", "bar");
+ threadContext.putTransient("ctx.foo", new Integer(1));
+ assertEquals("bar", threadContext.getHeader("foo"));
+ assertEquals(new Integer(1), threadContext.getTransient("ctx.foo"));
+ assertEquals("1", threadContext.getHeader("default"));
+ try (ThreadContext.StoredContext ctx = threadContext.stashContext()) {
+ assertNull(threadContext.getHeader("foo"));
+ assertNull(threadContext.getTransient("ctx.foo"));
+ assertEquals("1", threadContext.getHeader("default"));
+ }
+
+ assertEquals("bar", threadContext.getHeader("foo"));
+ assertEquals(new Integer(1), threadContext.getTransient("ctx.foo"));
+ assertEquals("1", threadContext.getHeader("default"));
+ }
+
+ public void testStashAndMerge() {
+ Settings build = Settings.builder().put("request.headers.default", "1").build();
+ ThreadContext threadContext = new ThreadContext(build);
+ threadContext.putHeader("foo", "bar");
+ threadContext.putTransient("ctx.foo", new Integer(1));
+ assertEquals("bar", threadContext.getHeader("foo"));
+ assertEquals(new Integer(1), threadContext.getTransient("ctx.foo"));
+ assertEquals("1", threadContext.getHeader("default"));
+ HashMap<String, String> toMerge = new HashMap<>();
+ toMerge.put("foo", "baz");
+ toMerge.put("simon", "says");
+ try (ThreadContext.StoredContext ctx = threadContext.stashAndMergeHeaders(toMerge)) {
+ assertEquals("bar", threadContext.getHeader("foo"));
+ assertEquals("says", threadContext.getHeader("simon"));
+ assertNull(threadContext.getTransient("ctx.foo"));
+ assertEquals("1", threadContext.getHeader("default"));
+ }
+
+ assertNull(threadContext.getHeader("simon"));
+ assertEquals("bar", threadContext.getHeader("foo"));
+ assertEquals(new Integer(1), threadContext.getTransient("ctx.foo"));
+ assertEquals("1", threadContext.getHeader("default"));
+ }
+
+ public void testStoreContext() {
+ Settings build = Settings.builder().put("request.headers.default", "1").build();
+ ThreadContext threadContext = new ThreadContext(build);
+ threadContext.putHeader("foo", "bar");
+ threadContext.putTransient("ctx.foo", new Integer(1));
+ assertEquals("bar", threadContext.getHeader("foo"));
+ assertEquals(new Integer(1), threadContext.getTransient("ctx.foo"));
+ assertEquals("1", threadContext.getHeader("default"));
+ ThreadContext.StoredContext storedContext = threadContext.newStoredContext();
+ threadContext.putHeader("foo.bar", "baz");
+ try (ThreadContext.StoredContext ctx = threadContext.stashContext()) {
+ assertNull(threadContext.getHeader("foo"));
+ assertNull(threadContext.getTransient("ctx.foo"));
+ assertEquals("1", threadContext.getHeader("default"));
+ }
+
+ assertEquals("bar", threadContext.getHeader("foo"));
+ assertEquals(new Integer(1), threadContext.getTransient("ctx.foo"));
+ assertEquals("1", threadContext.getHeader("default"));
+ assertEquals("baz", threadContext.getHeader("foo.bar"));
+ if (randomBoolean()) {
+ storedContext.restore();
+ } else {
+ storedContext.close();
+ }
+ assertEquals("bar", threadContext.getHeader("foo"));
+ assertEquals(new Integer(1), threadContext.getTransient("ctx.foo"));
+ assertEquals("1", threadContext.getHeader("default"));
+ assertNull(threadContext.getHeader("foo.bar"));
+ }
+
+ public void testCopyHeaders() {
+ Settings build = Settings.builder().put("request.headers.default", "1").build();
+ ThreadContext threadContext = new ThreadContext(build);
+ threadContext.copyHeaders(Collections.<String,String>emptyMap().entrySet());
+ threadContext.copyHeaders(Collections.<String,String>singletonMap("foo", "bar").entrySet());
+ assertEquals("bar", threadContext.getHeader("foo"));
+ }
+
+ public void testAccessClosed() throws IOException {
+ Settings build = Settings.builder().put("request.headers.default", "1").build();
+ ThreadContext threadContext = new ThreadContext(build);
+ threadContext.putHeader("foo", "bar");
+ threadContext.putTransient("ctx.foo", new Integer(1));
+
+ threadContext.close();
+ try {
+ threadContext.getHeader("foo");
+ fail();
+ } catch (IllegalStateException ise) {
+ assertEquals("threadcontext is already closed", ise.getMessage());
+ }
+
+ try {
+ threadContext.putTransient("foo", new Object());
+ fail();
+ } catch (IllegalStateException ise) {
+ assertEquals("threadcontext is already closed", ise.getMessage());
+ }
+
+ try {
+ threadContext.putHeader("boom", "boom");
+ fail();
+ } catch (IllegalStateException ise) {
+ assertEquals("threadcontext is already closed", ise.getMessage());
+ }
+ }
+
+ public void testSerialize() throws IOException {
+ Settings build = Settings.builder().put("request.headers.default", "1").build();
+ ThreadContext threadContext = new ThreadContext(build);
+ threadContext.putHeader("foo", "bar");
+ threadContext.putTransient("ctx.foo", new Integer(1));
+ BytesStreamOutput out = new BytesStreamOutput();
+ threadContext.writeTo(out);
+ try (ThreadContext.StoredContext ctx = threadContext.stashContext()) {
+ assertNull(threadContext.getHeader("foo"));
+ assertNull(threadContext.getTransient("ctx.foo"));
+ assertEquals("1", threadContext.getHeader("default"));
+
+ threadContext.readHeaders(StreamInput.wrap(out.bytes()));
+ assertEquals("bar", threadContext.getHeader("foo"));
+ assertNull(threadContext.getTransient("ctx.foo"));
+ }
+ assertEquals("bar", threadContext.getHeader("foo"));
+ assertEquals(new Integer(1), threadContext.getTransient("ctx.foo"));
+ assertEquals("1", threadContext.getHeader("default"));
+ }
+
+ public void testSerializeInDifferentContext() throws IOException {
+ BytesStreamOutput out = new BytesStreamOutput();
+ {
+ Settings build = Settings.builder().put("request.headers.default", "1").build();
+ ThreadContext threadContext = new ThreadContext(build);
+ threadContext.putHeader("foo", "bar");
+ threadContext.putTransient("ctx.foo", new Integer(1));
+
+ assertEquals("bar", threadContext.getHeader("foo"));
+ assertNotNull(threadContext.getTransient("ctx.foo"));
+ assertEquals("1", threadContext.getHeader("default"));
+ threadContext.writeTo(out);
+ }
+ {
+ Settings otherSettings = Settings.builder().put("request.headers.default", "5").build();
+ ThreadContext otherhreadContext = new ThreadContext(otherSettings);
+ otherhreadContext.readHeaders(StreamInput.wrap(out.bytes()));
+
+ assertEquals("bar", otherhreadContext.getHeader("foo"));
+ assertNull(otherhreadContext.getTransient("ctx.foo"));
+ assertEquals("1", otherhreadContext.getHeader("default"));
+ }
+ }
+
+ public void testSerializeInDifferentContextNoDefaults() throws IOException {
+ BytesStreamOutput out = new BytesStreamOutput();
+ {
+ ThreadContext threadContext = new ThreadContext(Settings.EMPTY);
+ threadContext.putHeader("foo", "bar");
+ threadContext.putTransient("ctx.foo", new Integer(1));
+
+ assertEquals("bar", threadContext.getHeader("foo"));
+ assertNotNull(threadContext.getTransient("ctx.foo"));
+ assertNull(threadContext.getHeader("default"));
+ threadContext.writeTo(out);
+ }
+ {
+ Settings otherSettings = Settings.builder().put("request.headers.default", "5").build();
+ ThreadContext otherhreadContext = new ThreadContext(otherSettings);
+ otherhreadContext.readHeaders(StreamInput.wrap(out.bytes()));
+
+ assertEquals("bar", otherhreadContext.getHeader("foo"));
+ assertNull(otherhreadContext.getTransient("ctx.foo"));
+ assertEquals("5", otherhreadContext.getHeader("default"));
+ }
+ }
+
+
+ public void testCanResetDefault() {
+ Settings build = Settings.builder().put("request.headers.default", "1").build();
+ ThreadContext threadContext = new ThreadContext(build);
+ threadContext.putHeader("default", "2");
+ assertEquals("2", threadContext.getHeader("default"));
+ }
+
+ public void testStashAndMergeWithModifiedDefaults() {
+ Settings build = Settings.builder().put("request.headers.default", "1").build();
+ ThreadContext threadContext = new ThreadContext(build);
+ HashMap<String, String> toMerge = new HashMap<>();
+ toMerge.put("default", "2");
+ try (ThreadContext.StoredContext ctx = threadContext.stashAndMergeHeaders(toMerge)) {
+ assertEquals("2", threadContext.getHeader("default"));
+ }
+
+ build = Settings.builder().put("request.headers.default", "1").build();
+ threadContext = new ThreadContext(build);
+ threadContext.putHeader("default", "4");
+ toMerge = new HashMap<>();
+ toMerge.put("default", "2");
+ try (ThreadContext.StoredContext ctx = threadContext.stashAndMergeHeaders(toMerge)) {
+ assertEquals("4", threadContext.getHeader("default"));
+ }
+ }
+
+}
diff --git a/core/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java b/core/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java
index 757b79af38..9d17e3328b 100644
--- a/core/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java
+++ b/core/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java
@@ -315,8 +315,8 @@ public class ObjectParserTests extends ESTestCase {
assertArrayEquals(parse.double_array_field.toArray(), Arrays.asList(2.1d).toArray());
assertEquals(parse.double_field, 2.1d, 0.0d);
- assertArrayEquals(parse.long_array_field.toArray(), Arrays.asList(4l).toArray());
- assertEquals(parse.long_field, 4l);
+ assertArrayEquals(parse.long_array_field.toArray(), Arrays.asList(4L).toArray());
+ assertEquals(parse.long_field, 4L);
assertArrayEquals(parse.string_array_field.toArray(), Arrays.asList("5").toArray());
assertEquals(parse.string_field, "5");
diff --git a/core/src/test/java/org/elasticsearch/deps/joda/SimpleJodaTests.java b/core/src/test/java/org/elasticsearch/deps/joda/SimpleJodaTests.java
index bb531c41da..5cb30a15f1 100644
--- a/core/src/test/java/org/elasticsearch/deps/joda/SimpleJodaTests.java
+++ b/core/src/test/java/org/elasticsearch/deps/joda/SimpleJodaTests.java
@@ -63,7 +63,7 @@ public class SimpleJodaTests extends ESTestCase {
DateTimeFormatter formatter = ISODateTimeFormat.dateTimeNoMillis().withZone(DateTimeZone.UTC);
long millis = formatter.parseMillis("1970-01-01T00:00:00Z");
- assertThat(millis, equalTo(0l));
+ assertThat(millis, equalTo(0L));
}
public void testUpperBound() {
@@ -79,20 +79,20 @@ public class SimpleJodaTests extends ESTestCase {
public void testIsoDateFormatDateOptionalTimeUTC() {
DateTimeFormatter formatter = ISODateTimeFormat.dateOptionalTimeParser().withZone(DateTimeZone.UTC);
long millis = formatter.parseMillis("1970-01-01T00:00:00Z");
- assertThat(millis, equalTo(0l));
+ assertThat(millis, equalTo(0L));
millis = formatter.parseMillis("1970-01-01T00:00:00.001Z");
- assertThat(millis, equalTo(1l));
+ assertThat(millis, equalTo(1L));
millis = formatter.parseMillis("1970-01-01T00:00:00.1Z");
- assertThat(millis, equalTo(100l));
+ assertThat(millis, equalTo(100L));
millis = formatter.parseMillis("1970-01-01T00:00:00.1");
- assertThat(millis, equalTo(100l));
+ assertThat(millis, equalTo(100L));
millis = formatter.parseMillis("1970-01-01T00:00:00");
- assertThat(millis, equalTo(0l));
+ assertThat(millis, equalTo(0L));
millis = formatter.parseMillis("1970-01-01");
- assertThat(millis, equalTo(0l));
+ assertThat(millis, equalTo(0L));
millis = formatter.parseMillis("1970");
- assertThat(millis, equalTo(0l));
+ assertThat(millis, equalTo(0L));
try {
formatter.parseMillis("1970 kuku");
@@ -109,15 +109,15 @@ public class SimpleJodaTests extends ESTestCase {
public void testIsoVsCustom() {
DateTimeFormatter formatter = ISODateTimeFormat.dateOptionalTimeParser().withZone(DateTimeZone.UTC);
long millis = formatter.parseMillis("1970-01-01T00:00:00");
- assertThat(millis, equalTo(0l));
+ assertThat(millis, equalTo(0L));
formatter = DateTimeFormat.forPattern("yyyy/MM/dd HH:mm:ss").withZone(DateTimeZone.UTC);
millis = formatter.parseMillis("1970/01/01 00:00:00");
- assertThat(millis, equalTo(0l));
+ assertThat(millis, equalTo(0L));
FormatDateTimeFormatter formatter2 = Joda.forPattern("yyyy/MM/dd HH:mm:ss");
millis = formatter2.parser().parseMillis("1970/01/01 00:00:00");
- assertThat(millis, equalTo(0l));
+ assertThat(millis, equalTo(0L));
}
public void testWriteAndParse() {
@@ -345,19 +345,19 @@ public class SimpleJodaTests extends ESTestCase {
public void testThatEpochParserIsIdempotent() {
FormatDateTimeFormatter formatter = Joda.forPattern("epoch_millis");
DateTime dateTime = formatter.parser().parseDateTime("1234567890123");
- assertThat(dateTime.getMillis(), is(1234567890123l));
+ assertThat(dateTime.getMillis(), is(1234567890123L));
dateTime = formatter.printer().parseDateTime("1234567890456");
- assertThat(dateTime.getMillis(), is(1234567890456l));
+ assertThat(dateTime.getMillis(), is(1234567890456L));
dateTime = formatter.parser().parseDateTime("1234567890789");
- assertThat(dateTime.getMillis(), is(1234567890789l));
+ assertThat(dateTime.getMillis(), is(1234567890789L));
FormatDateTimeFormatter secondsFormatter = Joda.forPattern("epoch_second");
DateTime secondsDateTime = secondsFormatter.parser().parseDateTime("1234567890");
- assertThat(secondsDateTime.getMillis(), is(1234567890000l));
+ assertThat(secondsDateTime.getMillis(), is(1234567890000L));
secondsDateTime = secondsFormatter.printer().parseDateTime("1234567890");
- assertThat(secondsDateTime.getMillis(), is(1234567890000l));
+ assertThat(secondsDateTime.getMillis(), is(1234567890000L));
secondsDateTime = secondsFormatter.parser().parseDateTime("1234567890");
- assertThat(secondsDateTime.getMillis(), is(1234567890000l));
+ assertThat(secondsDateTime.getMillis(), is(1234567890000L));
}
public void testThatDefaultFormatterChecksForCorrectYearLength() throws Exception {
diff --git a/core/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java b/core/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java
index 2a1b146da9..64b1f5756c 100644
--- a/core/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java
+++ b/core/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java
@@ -30,6 +30,7 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.discovery.local.LocalDiscovery;
import org.elasticsearch.discovery.zen.ZenDiscovery;
import org.elasticsearch.discovery.zen.elect.ElectMasterService;
+import org.elasticsearch.node.Node;
import org.elasticsearch.node.service.NodeService;
/**
@@ -45,8 +46,8 @@ public class DiscoveryModuleTests extends ModuleTestCase {
public void testRegisterMasterElectionService() {
- Settings settings = Settings.builder().put("node.local", false).
- put(DiscoveryModule.ZEN_MASTER_SERVICE_TYPE_KEY, "custom").build();
+ Settings settings = Settings.builder().put(Node.NODE_LOCAL_SETTING.getKey(), false).
+ put(DiscoveryModule.ZEN_MASTER_SERVICE_TYPE_SETTING.getKey(), "custom").build();
DiscoveryModule module = new DiscoveryModule(settings);
module.addElectMasterService("custom", DummyMasterElectionService.class);
assertBinding(module, ElectMasterService.class, DummyMasterElectionService.class);
@@ -54,8 +55,8 @@ public class DiscoveryModuleTests extends ModuleTestCase {
}
public void testLoadUnregisteredMasterElectionService() {
- Settings settings = Settings.builder().put("node.local", false).
- put(DiscoveryModule.ZEN_MASTER_SERVICE_TYPE_KEY, "foobar").build();
+ Settings settings = Settings.builder().put(Node.NODE_LOCAL_SETTING.getKey(), false).
+ put(DiscoveryModule.ZEN_MASTER_SERVICE_TYPE_SETTING.getKey(), "foobar").build();
DiscoveryModule module = new DiscoveryModule(settings);
module.addElectMasterService("custom", DummyMasterElectionService.class);
assertBindingFailure(module, "Unknown master service type [foobar]");
@@ -63,15 +64,15 @@ public class DiscoveryModuleTests extends ModuleTestCase {
public void testRegisterDefaults() {
boolean local = randomBoolean();
- Settings settings = Settings.builder().put("node.local", local).build();
+ Settings settings = Settings.builder().put(Node.NODE_LOCAL_SETTING.getKey(), local).build();
DiscoveryModule module = new DiscoveryModule(settings);
assertBinding(module, Discovery.class, local ? LocalDiscovery.class : ZenDiscovery.class);
}
public void testRegisterDiscovery() {
boolean local = randomBoolean();
- Settings settings = Settings.builder().put("node.local", local).
- put(DiscoveryModule.DISCOVERY_TYPE_KEY, "custom").build();
+ Settings settings = Settings.builder().put(Node.NODE_LOCAL_SETTING.getKey(), local).
+ put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), "custom").build();
DiscoveryModule module = new DiscoveryModule(settings);
module.addDiscoveryType("custom", DummyDisco.class);
assertBinding(module, Discovery.class, DummyDisco.class);
diff --git a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java
index e9fa8e495d..c282f3ef18 100644
--- a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java
+++ b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java
@@ -45,6 +45,7 @@ import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.Tuple;
+import org.elasticsearch.common.math.MathUtils;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.discovery.zen.ZenDiscovery;
@@ -111,6 +112,7 @@ import static org.hamcrest.Matchers.nullValue;
@ClusterScope(scope = Scope.TEST, numDataNodes = 0, transportClientRatio = 0)
@ESIntegTestCase.SuppressLocalMode
+@TestLogging("_root:DEBUG,cluster.service:TRACE")
public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase {
private static final TimeValue DISRUPTION_HEALING_OVERHEAD = TimeValue.timeValueSeconds(40); // we use 30s as timeout in many places.
@@ -163,12 +165,11 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase {
}
final static Settings DEFAULT_SETTINGS = Settings.builder()
- .put(FaultDetection.SETTING_PING_TIMEOUT, "1s") // for hitting simulated network failures quickly
- .put(FaultDetection.SETTING_PING_RETRIES, "1") // for hitting simulated network failures quickly
+ .put(FaultDetection.PING_TIMEOUT_SETTING.getKey(), "1s") // for hitting simulated network failures quickly
+ .put(FaultDetection.PING_RETRIES_SETTING.getKey(), "1") // for hitting simulated network failures quickly
.put("discovery.zen.join_timeout", "10s") // still long to induce failures but to long so test won't time out
.put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "1s") // <-- for hitting simulated network failures quickly
.put("http.enabled", false) // just to make test quicker
- .put("gateway.local.list_timeout", "10s") // still long to induce failures but to long so test won't time out
.build();
@Override
@@ -421,7 +422,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase {
*/
// NOTE: if you remove the awaitFix, make sure to port the test to the 1.x branch
@LuceneTestCase.AwaitsFix(bugUrl = "needs some more work to stabilize")
- @TestLogging("action.index:TRACE,action.get:TRACE,discovery:TRACE,cluster.service:TRACE,indices.recovery:TRACE,indices.cluster:TRACE")
+ @TestLogging("_root:DEBUG,action.index:TRACE,action.get:TRACE,discovery:TRACE,cluster.service:TRACE,indices.recovery:TRACE,indices.cluster:TRACE")
public void testAckedIndexing() throws Exception {
// TODO: add node count randomizaion
final List<String> nodes = startCluster(3);
@@ -465,10 +466,10 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase {
logger.info("[{}] Acquired semaphore and it has {} permits left", name, semaphore.availablePermits());
try {
id = Integer.toString(idGenerator.incrementAndGet());
- int shard = Murmur3HashFunction.hash(id) % numPrimaries;
+ int shard = MathUtils.mod(Murmur3HashFunction.hash(id), numPrimaries);
logger.trace("[{}] indexing id [{}] through node [{}] targeting shard [{}]", name, id, node, shard);
IndexResponse response = client.prepareIndex("test", "type", id).setSource("{}").setTimeout("1s").get();
- assertThat(response.getVersion(), equalTo(1l));
+ assertThat(response.getVersion(), equalTo(1L));
ackedDocs.put(id, node);
logger.trace("[{}] indexed id [{}] through node [{}]", name, id, node);
} catch (ElasticsearchException e) {
@@ -704,7 +705,6 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase {
* Test that a document which is indexed on the majority side of a partition, is available from the minority side,
* once the partition is healed
*/
- @TestLogging(value = "cluster.service:TRACE")
public void testRejoinDocumentExistsInAllShardCopies() throws Exception {
List<String> nodes = startCluster(3);
@@ -728,14 +728,14 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase {
IndexResponse indexResponse = internalCluster().client(notIsolatedNode).prepareIndex("test", "type").setSource("field", "value").get();
- assertThat(indexResponse.getVersion(), equalTo(1l));
+ assertThat(indexResponse.getVersion(), equalTo(1L));
logger.info("Verifying if document exists via node[" + notIsolatedNode + "]");
GetResponse getResponse = internalCluster().client(notIsolatedNode).prepareGet("test", "type", indexResponse.getId())
.setPreference("_local")
.get();
assertThat(getResponse.isExists(), is(true));
- assertThat(getResponse.getVersion(), equalTo(1l));
+ assertThat(getResponse.getVersion(), equalTo(1L));
assertThat(getResponse.getId(), equalTo(indexResponse.getId()));
scheme.stopDisrupting();
@@ -749,7 +749,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase {
.setPreference("_local")
.get();
assertThat(getResponse.isExists(), is(true));
- assertThat(getResponse.getVersion(), equalTo(1l));
+ assertThat(getResponse.getVersion(), equalTo(1L));
assertThat(getResponse.getId(), equalTo(indexResponse.getId()));
}
}
@@ -794,7 +794,6 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase {
assertMaster(masterNode, nodes);
}
- @TestLogging("discovery.zen:TRACE,cluster.service:TRACE")
public void testIsolatedUnicastNodes() throws Exception {
List<String> nodes = startCluster(4, -1, new int[]{0});
// Figure out what is the elected master node
@@ -961,7 +960,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase {
// don't wait for initial state, wat want to add the disruption while the cluster is forming..
internalCluster().startNodesAsync(3,
Settings.builder()
- .put(DiscoveryService.SETTING_INITIAL_STATE_TIMEOUT, "1ms")
+ .put(DiscoveryService.INITIAL_STATE_TIMEOUT_SETTING.getKey(), "1ms")
.put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "3s")
.build()).get();
@@ -978,7 +977,6 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase {
* sure that the node is removed form the cluster, that the node start pinging and that
* the cluster reforms when healed.
*/
- @TestLogging("discovery.zen:TRACE,action:TRACE")
public void testNodeNotReachableFromMaster() throws Exception {
startCluster(3);
@@ -1044,14 +1042,14 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase {
CountDownLatch beginRelocationLatch = new CountDownLatch(1);
CountDownLatch endRelocationLatch = new CountDownLatch(1);
transportServiceNode2.addTracer(new IndicesStoreIntegrationIT.ReclocationStartEndTracer(logger, beginRelocationLatch, endRelocationLatch));
- internalCluster().client().admin().cluster().prepareReroute().add(new MoveAllocationCommand(new ShardId("test", 0), node_1, node_2)).get();
+ internalCluster().client().admin().cluster().prepareReroute().add(new MoveAllocationCommand("test", 0, node_1, node_2)).get();
// wait for relocation to start
beginRelocationLatch.await();
disruption.startDisrupting();
// wait for relocation to finish
endRelocationLatch.await();
// now search for the documents and see if we get a reply
- assertThat(client().prepareSearch().setSize(0).get().getHits().totalHits(), equalTo(100l));
+ assertThat(client().prepareSearch().setSize(0).get().getHits().totalHits(), equalTo(100L));
}
public void testIndexImportedFromDataOnlyNodesIfMasterLostDataFolder() throws Exception {
diff --git a/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java b/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java
index e7a10b0f62..e3279d2839 100644
--- a/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java
+++ b/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java
@@ -131,8 +131,8 @@ public class ZenFaultDetectionTests extends ESTestCase {
Settings.Builder settings = Settings.builder();
boolean shouldRetry = randomBoolean();
// make sure we don't ping again after the initial ping
- settings.put(FaultDetection.SETTING_CONNECT_ON_NETWORK_DISCONNECT, shouldRetry)
- .put(FaultDetection.SETTING_PING_INTERVAL, "5m");
+ settings.put(FaultDetection.CONNECT_ON_NETWORK_DISCONNECT_SETTING.getKey(), shouldRetry)
+ .put(FaultDetection.PING_INTERVAL_SETTING.getKey(), "5m");
ClusterState clusterState = ClusterState.builder(new ClusterName("test")).nodes(buildNodesForA(true)).build();
NodesFaultDetection nodesFDA = new NodesFaultDetection(settings.build(), threadPool, serviceA, clusterState.getClusterName());
nodesFDA.setLocalNode(nodeA);
@@ -179,8 +179,8 @@ public class ZenFaultDetectionTests extends ESTestCase {
Settings.Builder settings = Settings.builder();
boolean shouldRetry = randomBoolean();
// make sure we don't ping
- settings.put(FaultDetection.SETTING_CONNECT_ON_NETWORK_DISCONNECT, shouldRetry)
- .put(FaultDetection.SETTING_PING_INTERVAL, "5m");
+ settings.put(FaultDetection.CONNECT_ON_NETWORK_DISCONNECT_SETTING.getKey(), shouldRetry)
+ .put(FaultDetection.PING_INTERVAL_SETTING.getKey(), "5m");
ClusterName clusterName = new ClusterName(randomAsciiOfLengthBetween(3, 20));
final ClusterState state = ClusterState.builder(clusterName).nodes(buildNodesForA(false)).build();
MasterFaultDetection masterFD = new MasterFaultDetection(settings.build(), threadPool, serviceA, clusterName,
diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java b/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java
index 9f9c0420c2..6c564a9774 100644
--- a/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java
+++ b/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java
@@ -45,6 +45,7 @@ import org.elasticsearch.discovery.zen.elect.ElectMasterService;
import org.elasticsearch.discovery.zen.fd.FaultDetection;
import org.elasticsearch.discovery.zen.membership.MembershipAction;
import org.elasticsearch.discovery.zen.publish.PublishClusterStateAction;
+import org.elasticsearch.node.Node;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.TestCustomMetaData;
import org.elasticsearch.test.junit.annotations.TestLogging;
@@ -77,6 +78,7 @@ import static org.hamcrest.Matchers.sameInstance;
@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, numClientNodes = 0)
@ESIntegTestCase.SuppressLocalMode
+@TestLogging("_root:DEBUG")
public class ZenDiscoveryIT extends ESIntegTestCase {
public void testChangeRejoinOnMasterOptionIsDynamic() throws Exception {
Settings nodeSettings = Settings.settingsBuilder()
@@ -95,18 +97,18 @@ public class ZenDiscoveryIT extends ESIntegTestCase {
public void testNoShardRelocationsOccurWhenElectedMasterNodeFails() throws Exception {
Settings defaultSettings = Settings.builder()
- .put(FaultDetection.SETTING_PING_TIMEOUT, "1s")
- .put(FaultDetection.SETTING_PING_RETRIES, "1")
+ .put(FaultDetection.PING_TIMEOUT_SETTING.getKey(), "1s")
+ .put(FaultDetection.PING_RETRIES_SETTING.getKey(), "1")
.put("discovery.type", "zen")
.build();
Settings masterNodeSettings = Settings.builder()
- .put("node.data", false)
+ .put(Node.NODE_DATA_SETTING.getKey(), false)
.put(defaultSettings)
.build();
internalCluster().startNodesAsync(2, masterNodeSettings).get();
Settings dateNodeSettings = Settings.builder()
- .put("node.master", false)
+ .put(Node.NODE_MASTER_SETTING.getKey(), false)
.put(defaultSettings)
.build();
internalCluster().startNodesAsync(2, dateNodeSettings).get();
@@ -139,21 +141,20 @@ public class ZenDiscoveryIT extends ESIntegTestCase {
assertThat(numRecoveriesAfterNewMaster, equalTo(numRecoveriesBeforeNewMaster));
}
- @TestLogging(value = "action.admin.cluster.health:TRACE")
public void testNodeFailuresAreProcessedOnce() throws ExecutionException, InterruptedException, IOException {
Settings defaultSettings = Settings.builder()
- .put(FaultDetection.SETTING_PING_TIMEOUT, "1s")
- .put(FaultDetection.SETTING_PING_RETRIES, "1")
+ .put(FaultDetection.PING_TIMEOUT_SETTING.getKey(), "1s")
+ .put(FaultDetection.PING_RETRIES_SETTING.getKey(), "1")
.put("discovery.type", "zen")
.build();
Settings masterNodeSettings = Settings.builder()
- .put("node.data", false)
+ .put(Node.NODE_DATA_SETTING.getKey(), false)
.put(defaultSettings)
.build();
String master = internalCluster().startNode(masterNodeSettings);
Settings dateNodeSettings = Settings.builder()
- .put("node.master", false)
+ .put(Node.NODE_MASTER_SETTING.getKey(), false)
.put(defaultSettings)
.build();
internalCluster().startNodesAsync(2, dateNodeSettings).get();
diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPingIT.java b/core/src/test/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPingIT.java
index a3b2caca31..f44faea19a 100644
--- a/core/src/test/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPingIT.java
+++ b/core/src/test/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPingIT.java
@@ -37,6 +37,7 @@ import org.elasticsearch.node.service.NodeService;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
+import org.elasticsearch.transport.TransportSettings;
import org.elasticsearch.transport.netty.NettyTransport;
import java.net.InetSocketAddress;
@@ -48,7 +49,7 @@ public class UnicastZenPingIT extends ESTestCase {
Settings settings = Settings.EMPTY;
int startPort = 11000 + randomIntBetween(0, 1000);
int endPort = startPort + 10;
- settings = Settings.builder().put(settings).put("transport.tcp.port", startPort + "-" + endPort).build();
+ settings = Settings.builder().put(settings).put(TransportSettings.PORT.getKey(), startPort + "-" + endPort).build();
ThreadPool threadPool = new ThreadPool(getClass().getName());
ClusterName clusterName = new ClusterName("test");
diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java
index 6faa02e16d..738c671327 100644
--- a/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java
+++ b/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java
@@ -43,6 +43,7 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.discovery.Discovery;
import org.elasticsearch.discovery.DiscoverySettings;
import org.elasticsearch.discovery.zen.DiscoveryNodesProvider;
+import org.elasticsearch.node.Node;
import org.elasticsearch.node.service.NodeService;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.junit.annotations.TestLogging;
@@ -486,7 +487,7 @@ public class PublishClusterStateActionTests extends ESTestCase {
discoveryNodesBuilder.put(createMockNode("node" + i).discoveryNode);
}
final int dataNodes = randomIntBetween(0, 5);
- final Settings dataSettings = Settings.builder().put("node.master", false).build();
+ final Settings dataSettings = Settings.builder().put(Node.NODE_MASTER_SETTING.getKey(), false).build();
for (int i = 0; i < dataNodes; i++) {
discoveryNodesBuilder.put(createMockNode("data_" + i, dataSettings).discoveryNode);
}
@@ -544,7 +545,7 @@ public class PublishClusterStateActionTests extends ESTestCase {
}
final int dataNodes = randomIntBetween(0, 3); // data nodes don't matter
for (int i = 0; i < dataNodes; i++) {
- final MockNode mockNode = createMockNode("data_" + i, Settings.builder().put("node.master", false).build());
+ final MockNode mockNode = createMockNode("data_" + i, Settings.builder().put(Node.NODE_MASTER_SETTING.getKey(), false).build());
discoveryNodesBuilder.put(mockNode.discoveryNode);
if (randomBoolean()) {
// we really don't care - just chaos monkey
diff --git a/core/src/test/java/org/elasticsearch/document/DocumentActionsIT.java b/core/src/test/java/org/elasticsearch/document/DocumentActionsIT.java
index 52f19d7dee..6f002e8404 100644
--- a/core/src/test/java/org/elasticsearch/document/DocumentActionsIT.java
+++ b/core/src/test/java/org/elasticsearch/document/DocumentActionsIT.java
@@ -157,14 +157,14 @@ public class DocumentActionsIT extends ESIntegTestCase {
// test successful
SearchResponse countResponse = client().prepareSearch("test").setSize(0).setQuery(termQuery("_type", "type1")).execute().actionGet();
assertNoFailures(countResponse);
- assertThat(countResponse.getHits().totalHits(), equalTo(2l));
+ assertThat(countResponse.getHits().totalHits(), equalTo(2L));
assertThat(countResponse.getSuccessfulShards(), equalTo(numShards.numPrimaries));
assertThat(countResponse.getFailedShards(), equalTo(0));
// count with no query is a match all one
countResponse = client().prepareSearch("test").setSize(0).execute().actionGet();
assertThat("Failures " + countResponse.getShardFailures(), countResponse.getShardFailures() == null ? 0 : countResponse.getShardFailures().length, equalTo(0));
- assertThat(countResponse.getHits().totalHits(), equalTo(2l));
+ assertThat(countResponse.getHits().totalHits(), equalTo(2L));
assertThat(countResponse.getSuccessfulShards(), equalTo(numShards.numPrimaries));
assertThat(countResponse.getFailedShards(), equalTo(0));
}
diff --git a/core/src/test/java/org/elasticsearch/env/EnvironmentTests.java b/core/src/test/java/org/elasticsearch/env/EnvironmentTests.java
index 79f9efbb81..0a62d2829d 100644
--- a/core/src/test/java/org/elasticsearch/env/EnvironmentTests.java
+++ b/core/src/test/java/org/elasticsearch/env/EnvironmentTests.java
@@ -40,8 +40,8 @@ public class EnvironmentTests extends ESTestCase {
public Environment newEnvironment(Settings settings) throws IOException {
Settings build = Settings.builder()
.put(settings)
- .put("path.home", createTempDir().toAbsolutePath())
- .putArray("path.data", tmpPaths()).build();
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath())
+ .putArray(Environment.PATH_DATA_SETTING.getKey(), tmpPaths()).build();
return new Environment(build);
}
@@ -49,7 +49,7 @@ public class EnvironmentTests extends ESTestCase {
Environment environment = newEnvironment();
assertThat(environment.resolveRepoFile("/test/repos/repo1"), nullValue());
assertThat(environment.resolveRepoFile("test/repos/repo1"), nullValue());
- environment = newEnvironment(settingsBuilder().putArray("path.repo", "/test/repos", "/another/repos", "/test/repos/../other").build());
+ environment = newEnvironment(settingsBuilder().putArray(Environment.PATH_REPO_SETTING.getKey(), "/test/repos", "/another/repos", "/test/repos/../other").build());
assertThat(environment.resolveRepoFile("/test/repos/repo1"), notNullValue());
assertThat(environment.resolveRepoFile("test/repos/repo1"), notNullValue());
assertThat(environment.resolveRepoFile("/another/repos/repo1"), notNullValue());
diff --git a/core/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java b/core/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java
index acee455bb6..d255a80fbb 100644
--- a/core/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java
+++ b/core/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java
@@ -18,10 +18,12 @@
*/
package org.elasticsearch.env;
+import org.apache.lucene.index.SegmentInfos;
import org.apache.lucene.store.LockObtainFailedException;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.cluster.metadata.IndexMetaData;
+import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
@@ -41,44 +43,84 @@ import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import static org.hamcrest.CoreMatchers.equalTo;
+import static org.hamcrest.Matchers.arrayWithSize;
+import static org.hamcrest.Matchers.containsString;
+import static org.hamcrest.Matchers.empty;
@LuceneTestCase.SuppressFileSystems("ExtrasFS") // TODO: fix test to allow extras
public class NodeEnvironmentTests extends ESTestCase {
private final IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("foo", Settings.EMPTY);
+ public void testNodeLockSillySettings() {
+ try {
+ NodeEnvironment.MAX_LOCAL_STORAGE_NODES_SETTING.get(Settings.builder()
+ .put(NodeEnvironment.MAX_LOCAL_STORAGE_NODES_SETTING.getKey(), between(Integer.MIN_VALUE, 0)).build());
+ fail("expected failure");
+ } catch (IllegalArgumentException e) {
+ assertThat(e.getMessage(), containsString("must be >= 1"));
+ }
+
+ // Even though its silly MAXINT nodes is a-ok!
+ int value = between(1, Integer.MAX_VALUE);
+ int max = NodeEnvironment.MAX_LOCAL_STORAGE_NODES_SETTING.get(
+ Settings.builder().put(NodeEnvironment.MAX_LOCAL_STORAGE_NODES_SETTING.getKey(), value).build());
+ assertEquals(value, max);
+ }
+
public void testNodeLockSingleEnvironment() throws IOException {
NodeEnvironment env = newNodeEnvironment(Settings.builder()
- .put("node.max_local_storage_nodes", 1).build());
+ .put(NodeEnvironment.MAX_LOCAL_STORAGE_NODES_SETTING.getKey(), 1).build());
Settings settings = env.getSettings();
- String[] dataPaths = env.getSettings().getAsArray("path.data");
+ List<String> dataPaths = Environment.PATH_DATA_SETTING.get(env.getSettings());
try {
+ // Reuse the same location and attempt to lock again
new NodeEnvironment(settings, new Environment(settings));
- fail("env is already locked");
+ fail("env has already locked all the data directories it is allowed");
} catch (IllegalStateException ex) {
-
+ assertThat(ex.getMessage(), containsString("Failed to obtain node lock"));
}
- env.close();
- // now can recreate and lock it
+ // Close the environment that holds the lock and make sure we can get the lock after release
+ env.close();
env = new NodeEnvironment(settings, new Environment(settings));
- assertEquals(env.nodeDataPaths().length, dataPaths.length);
+ assertThat(env.nodeDataPaths(), arrayWithSize(dataPaths.size()));
- for (int i = 0; i < dataPaths.length; i++) {
- assertTrue(env.nodeDataPaths()[i].startsWith(PathUtils.get(dataPaths[i])));
+ for (int i = 0; i < dataPaths.size(); i++) {
+ assertTrue(env.nodeDataPaths()[i].startsWith(PathUtils.get(dataPaths.get(i))));
}
env.close();
- assertTrue("LockedShards: " + env.lockedShards(), env.lockedShards().isEmpty());
+ assertThat(env.lockedShards(), empty());
+ }
+ @SuppressForbidden(reason = "System.out.*")
+ public void testSegmentInfosTracing() {
+ // Defaults to not hooking up std out
+ assertNull(SegmentInfos.getInfoStream());
+
+ try {
+ // False means don't hook up std out
+ NodeEnvironment.applySegmentInfosTrace(
+ Settings.builder().put(NodeEnvironment.ENABLE_LUCENE_SEGMENT_INFOS_TRACE_SETTING.getKey(), false).build());
+ assertNull(SegmentInfos.getInfoStream());
+
+ // But true means hook std out up statically
+ NodeEnvironment.applySegmentInfosTrace(
+ Settings.builder().put(NodeEnvironment.ENABLE_LUCENE_SEGMENT_INFOS_TRACE_SETTING.getKey(), true).build());
+ assertEquals(System.out, SegmentInfos.getInfoStream());
+ } finally {
+ // Clean up after ourselves
+ SegmentInfos.setInfoStream(null);
+ }
}
public void testNodeLockMultipleEnvironment() throws IOException {
final NodeEnvironment first = newNodeEnvironment();
- String[] dataPaths = first.getSettings().getAsArray("path.data");
+ List<String> dataPaths = Environment.PATH_DATA_SETTING.get(first.getSettings());
NodeEnvironment second = new NodeEnvironment(first.getSettings(), new Environment(first.getSettings()));
- assertEquals(first.nodeDataPaths().length, dataPaths.length);
- assertEquals(second.nodeDataPaths().length, dataPaths.length);
- for (int i = 0; i < dataPaths.length; i++) {
+ assertEquals(first.nodeDataPaths().length, dataPaths.size());
+ assertEquals(second.nodeDataPaths().length, dataPaths.size());
+ for (int i = 0; i < dataPaths.size(); i++) {
assertEquals(first.nodeDataPaths()[i].getParent(), second.nodeDataPaths()[i].getParent());
}
IOUtils.close(first, second);
@@ -87,21 +129,21 @@ public class NodeEnvironmentTests extends ESTestCase {
public void testShardLock() throws IOException {
final NodeEnvironment env = newNodeEnvironment();
- ShardLock fooLock = env.shardLock(new ShardId("foo", 0));
- assertEquals(new ShardId("foo", 0), fooLock.getShardId());
+ ShardLock fooLock = env.shardLock(new ShardId("foo", "_na_", 0));
+ assertEquals(new ShardId("foo", "_na_", 0), fooLock.getShardId());
try {
- env.shardLock(new ShardId("foo", 0));
+ env.shardLock(new ShardId("foo", "_na_", 0));
fail("shard is locked");
} catch (LockObtainFailedException ex) {
// expected
}
- for (Path path : env.indexPaths(new Index("foo"))) {
+ for (Path path : env.indexPaths("foo")) {
Files.createDirectories(path.resolve("0"));
Files.createDirectories(path.resolve("1"));
}
try {
- env.lockAllForIndex(new Index("foo"), idxSettings, randomIntBetween(0, 10));
+ env.lockAllForIndex(new Index("foo", "_na_"), idxSettings, randomIntBetween(0, 10));
fail("shard 0 is locked");
} catch (LockObtainFailedException ex) {
// expected
@@ -109,11 +151,11 @@ public class NodeEnvironmentTests extends ESTestCase {
fooLock.close();
// can lock again?
- env.shardLock(new ShardId("foo", 0)).close();
+ env.shardLock(new ShardId("foo", "_na_", 0)).close();
- List<ShardLock> locks = env.lockAllForIndex(new Index("foo"), idxSettings, randomIntBetween(0, 10));
+ List<ShardLock> locks = env.lockAllForIndex(new Index("foo", "_na_"), idxSettings, randomIntBetween(0, 10));
try {
- env.shardLock(new ShardId("foo", 0));
+ env.shardLock(new ShardId("foo", "_na_", 0));
fail("shard is locked");
} catch (LockObtainFailedException ex) {
// expected
@@ -127,7 +169,7 @@ public class NodeEnvironmentTests extends ESTestCase {
final NodeEnvironment env = newNodeEnvironment();
final int numIndices = randomIntBetween(1, 10);
for (int i = 0; i < numIndices; i++) {
- for (Path path : env.indexPaths(new Index("foo" + i))) {
+ for (Path path : env.indexPaths("foo" + i)) {
Files.createDirectories(path);
}
}
@@ -142,44 +184,44 @@ public class NodeEnvironmentTests extends ESTestCase {
public void testDeleteSafe() throws IOException, InterruptedException {
final NodeEnvironment env = newNodeEnvironment();
- ShardLock fooLock = env.shardLock(new ShardId("foo", 0));
- assertEquals(new ShardId("foo", 0), fooLock.getShardId());
+ ShardLock fooLock = env.shardLock(new ShardId("foo", "_na_", 0));
+ assertEquals(new ShardId("foo", "_na_", 0), fooLock.getShardId());
- for (Path path : env.indexPaths(new Index("foo"))) {
+ for (Path path : env.indexPaths("foo")) {
Files.createDirectories(path.resolve("0"));
Files.createDirectories(path.resolve("1"));
}
try {
- env.deleteShardDirectorySafe(new ShardId("foo", 0), idxSettings);
+ env.deleteShardDirectorySafe(new ShardId("foo", "_na_", 0), idxSettings);
fail("shard is locked");
} catch (LockObtainFailedException ex) {
// expected
}
- for (Path path : env.indexPaths(new Index("foo"))) {
+ for (Path path : env.indexPaths("foo")) {
assertTrue(Files.exists(path.resolve("0")));
assertTrue(Files.exists(path.resolve("1")));
}
- env.deleteShardDirectorySafe(new ShardId("foo", 1), idxSettings);
+ env.deleteShardDirectorySafe(new ShardId("foo", "_na_", 1), idxSettings);
- for (Path path : env.indexPaths(new Index("foo"))) {
+ for (Path path : env.indexPaths("foo")) {
assertTrue(Files.exists(path.resolve("0")));
assertFalse(Files.exists(path.resolve("1")));
}
try {
- env.deleteIndexDirectorySafe(new Index("foo"), randomIntBetween(0, 10), idxSettings);
+ env.deleteIndexDirectorySafe(new Index("foo", "_na_"), randomIntBetween(0, 10), idxSettings);
fail("shard is locked");
} catch (LockObtainFailedException ex) {
// expected
}
fooLock.close();
- for (Path path : env.indexPaths(new Index("foo"))) {
+ for (Path path : env.indexPaths("foo")) {
assertTrue(Files.exists(path));
}
@@ -200,7 +242,7 @@ public class NodeEnvironmentTests extends ESTestCase {
@Override
protected void doRun() throws Exception {
start.await();
- try (ShardLock autoCloses = env.shardLock(new ShardId("foo", 0))) {
+ try (ShardLock autoCloses = env.shardLock(new ShardId("foo", "_na_", 0))) {
blockLatch.countDown();
Thread.sleep(randomIntBetween(1, 10));
}
@@ -215,11 +257,11 @@ public class NodeEnvironmentTests extends ESTestCase {
start.countDown();
blockLatch.await();
- env.deleteIndexDirectorySafe(new Index("foo"), 5000, idxSettings);
+ env.deleteIndexDirectorySafe(new Index("foo", "_na_"), 5000, idxSettings);
assertNull(threadException.get());
- for (Path path : env.indexPaths(new Index("foo"))) {
+ for (Path path : env.indexPaths("foo")) {
assertFalse(Files.exists(path));
}
latch.await();
@@ -258,7 +300,7 @@ public class NodeEnvironmentTests extends ESTestCase {
for (int i = 0; i < iters; i++) {
int shard = randomIntBetween(0, counts.length - 1);
try {
- try (ShardLock autoCloses = env.shardLock(new ShardId("foo", shard), scaledRandomIntBetween(0, 10))) {
+ try (ShardLock autoCloses = env.shardLock(new ShardId("foo", "_na_", shard), scaledRandomIntBetween(0, 10))) {
counts[shard].value++;
countsAtomic[shard].incrementAndGet();
assertEquals(flipFlop[shard].incrementAndGet(), 1);
@@ -294,8 +336,8 @@ public class NodeEnvironmentTests extends ESTestCase {
IndexSettings s1 = IndexSettingsModule.newIndexSettings("myindex", Settings.EMPTY);
IndexSettings s2 = IndexSettingsModule.newIndexSettings("myindex", Settings.builder().put(IndexMetaData.SETTING_DATA_PATH, "/tmp/foo").build());
- ShardId sid = new ShardId("myindex", 0);
- Index i = new Index("myindex");
+ Index index = new Index("myindex", "_na_");
+ ShardId sid = new ShardId(index, 0);
assertFalse("no settings should mean no custom data path", s1.hasCustomDataPath());
assertTrue("settings with path_data should have a custom data path", s2.hasCustomDataPath());
@@ -308,11 +350,11 @@ public class NodeEnvironmentTests extends ESTestCase {
equalTo(stringsToPaths(dataPaths, "elasticsearch/nodes/0/indices/myindex/0")));
assertThat("index paths uses the regular template",
- env.indexPaths(i), equalTo(stringsToPaths(dataPaths, "elasticsearch/nodes/0/indices/myindex")));
+ env.indexPaths(index.getName()), equalTo(stringsToPaths(dataPaths, "elasticsearch/nodes/0/indices/myindex")));
env.close();
NodeEnvironment env2 = newNodeEnvironment(dataPaths, "/tmp",
- Settings.builder().put(NodeEnvironment.ADD_NODE_ID_TO_CUSTOM_PATH, false).build());
+ Settings.builder().put(NodeEnvironment.ADD_NODE_ID_TO_CUSTOM_PATH.getKey(), false).build());
assertThat(env2.availableShardPaths(sid), equalTo(env2.availableShardPaths(sid)));
assertThat(env2.resolveCustomLocation(s2, sid), equalTo(PathUtils.get("/tmp/foo/myindex/0")));
@@ -322,7 +364,7 @@ public class NodeEnvironmentTests extends ESTestCase {
equalTo(stringsToPaths(dataPaths, "elasticsearch/nodes/0/indices/myindex/0")));
assertThat("index paths uses the regular template",
- env2.indexPaths(i), equalTo(stringsToPaths(dataPaths, "elasticsearch/nodes/0/indices/myindex")));
+ env2.indexPaths(index.getName()), equalTo(stringsToPaths(dataPaths, "elasticsearch/nodes/0/indices/myindex")));
env2.close();
}
@@ -355,25 +397,25 @@ public class NodeEnvironmentTests extends ESTestCase {
public NodeEnvironment newNodeEnvironment(Settings settings) throws IOException {
Settings build = Settings.builder()
.put(settings)
- .put("path.home", createTempDir().toAbsolutePath().toString())
- .putArray("path.data", tmpPaths()).build();
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString())
+ .putArray(Environment.PATH_DATA_SETTING.getKey(), tmpPaths()).build();
return new NodeEnvironment(build, new Environment(build));
}
public NodeEnvironment newNodeEnvironment(String[] dataPaths, Settings settings) throws IOException {
Settings build = Settings.builder()
.put(settings)
- .put("path.home", createTempDir().toAbsolutePath().toString())
- .putArray("path.data", dataPaths).build();
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString())
+ .putArray(Environment.PATH_DATA_SETTING.getKey(), dataPaths).build();
return new NodeEnvironment(build, new Environment(build));
}
public NodeEnvironment newNodeEnvironment(String[] dataPaths, String sharedDataPath, Settings settings) throws IOException {
Settings build = Settings.builder()
.put(settings)
- .put("path.home", createTempDir().toAbsolutePath().toString())
- .put("path.shared_data", sharedDataPath)
- .putArray("path.data", dataPaths).build();
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString())
+ .put(Environment.PATH_SHARED_DATA_SETTING.getKey(), sharedDataPath)
+ .putArray(Environment.PATH_DATA_SETTING.getKey(), dataPaths).build();
return new NodeEnvironment(build, new Environment(build));
}
}
diff --git a/core/src/test/java/org/elasticsearch/fieldstats/FieldStatsIntegrationIT.java b/core/src/test/java/org/elasticsearch/fieldstats/FieldStatsIntegrationIT.java
index a661575cbe..f5e99ab256 100644
--- a/core/src/test/java/org/elasticsearch/fieldstats/FieldStatsIntegrationIT.java
+++ b/core/src/test/java/org/elasticsearch/fieldstats/FieldStatsIntegrationIT.java
@@ -144,32 +144,32 @@ public class FieldStatsIntegrationIT extends ESIntegTestCase {
// default:
FieldStatsResponse response = client().prepareFieldStats().setFields("value").get();
assertAllSuccessful(response);
- assertThat(response.getAllFieldStats().get("value").getMinValue(), equalTo(-10l));
- assertThat(response.getAllFieldStats().get("value").getMaxValue(), equalTo(300l));
+ assertThat(response.getAllFieldStats().get("value").getMinValue(), equalTo(-10L));
+ assertThat(response.getAllFieldStats().get("value").getMaxValue(), equalTo(300L));
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
- assertThat(response.getIndicesMergedFieldStats().get("_all").get("value").getMinValue(), equalTo(-10l));
- assertThat(response.getIndicesMergedFieldStats().get("_all").get("value").getMaxValue(), equalTo(300l));
+ assertThat(response.getIndicesMergedFieldStats().get("_all").get("value").getMinValue(), equalTo(-10L));
+ assertThat(response.getIndicesMergedFieldStats().get("_all").get("value").getMaxValue(), equalTo(300L));
// Level: cluster
response = client().prepareFieldStats().setFields("value").setLevel("cluster").get();
assertAllSuccessful(response);
- assertThat(response.getAllFieldStats().get("value").getMinValue(), equalTo(-10l));
- assertThat(response.getAllFieldStats().get("value").getMaxValue(), equalTo(300l));
+ assertThat(response.getAllFieldStats().get("value").getMinValue(), equalTo(-10L));
+ assertThat(response.getAllFieldStats().get("value").getMaxValue(), equalTo(300L));
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
- assertThat(response.getIndicesMergedFieldStats().get("_all").get("value").getMinValue(), equalTo(-10l));
- assertThat(response.getIndicesMergedFieldStats().get("_all").get("value").getMaxValue(), equalTo(300l));
+ assertThat(response.getIndicesMergedFieldStats().get("_all").get("value").getMinValue(), equalTo(-10L));
+ assertThat(response.getIndicesMergedFieldStats().get("_all").get("value").getMaxValue(), equalTo(300L));
// Level: indices
response = client().prepareFieldStats().setFields("value").setLevel("indices").get();
assertAllSuccessful(response);
assertThat(response.getAllFieldStats(), nullValue());
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(3));
- assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo(-10l));
- assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMaxValue(), equalTo(100l));
- assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo(101l));
- assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMaxValue(), equalTo(200l));
- assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMinValue(), equalTo(201l));
- assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMaxValue(), equalTo(300l));
+ assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo(-10L));
+ assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMaxValue(), equalTo(100L));
+ assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo(101L));
+ assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMaxValue(), equalTo(200L));
+ assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMinValue(), equalTo(201L));
+ assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMaxValue(), equalTo(300L));
// Illegal level option:
try {
@@ -189,8 +189,8 @@ public class FieldStatsIntegrationIT extends ESIntegTestCase {
));
ensureGreen("test1", "test2");
- client().prepareIndex("test1", "test").setSource("value", 1l).get();
- client().prepareIndex("test1", "test").setSource("value", 2l).get();
+ client().prepareIndex("test1", "test").setSource("value", 1L).get();
+ client().prepareIndex("test1", "test").setSource("value", 2L).get();
client().prepareIndex("test2", "test").setSource("value", "a").get();
client().prepareIndex("test2", "test").setSource("value", "b").get();
refresh();
@@ -205,8 +205,8 @@ public class FieldStatsIntegrationIT extends ESIntegTestCase {
FieldStatsResponse response = client().prepareFieldStats().setFields("value").setLevel("indices").get();
assertAllSuccessful(response);
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(2));
- assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo(1l));
- assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMaxValue(), equalTo(2l));
+ assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo(1L));
+ assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMaxValue(), equalTo(2L));
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo(new BytesRef("a")));
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMaxValue(), equalTo(new BytesRef("b")));
}
@@ -235,8 +235,8 @@ public class FieldStatsIntegrationIT extends ESIntegTestCase {
assertAllSuccessful(response);
assertThat(response.getAllFieldStats(), nullValue());
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
- assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMinValue(), equalTo(201l));
- assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMaxValue(), equalTo(300l));
+ assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMinValue(), equalTo(201L));
+ assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMaxValue(), equalTo(300L));
response = client().prepareFieldStats()
.setFields("value")
@@ -246,10 +246,10 @@ public class FieldStatsIntegrationIT extends ESIntegTestCase {
assertAllSuccessful(response);
assertThat(response.getAllFieldStats(), nullValue());
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(2));
- assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo(-10l));
- assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMaxValue(), equalTo(100l));
- assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo(101l));
- assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMaxValue(), equalTo(200l));
+ assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo(-10L));
+ assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMaxValue(), equalTo(100L));
+ assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo(101L));
+ assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMaxValue(), equalTo(200L));
response = client().prepareFieldStats()
.setFields("value")
@@ -259,10 +259,10 @@ public class FieldStatsIntegrationIT extends ESIntegTestCase {
assertAllSuccessful(response);
assertThat(response.getAllFieldStats(), nullValue());
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(2));
- assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo(101l));
- assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMaxValue(), equalTo(200l));
- assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMinValue(), equalTo(201l));
- assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMaxValue(), equalTo(300l));
+ assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo(101L));
+ assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMaxValue(), equalTo(200L));
+ assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMinValue(), equalTo(201L));
+ assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMaxValue(), equalTo(300L));
response = client().prepareFieldStats()
.setFields("value")
@@ -290,8 +290,8 @@ public class FieldStatsIntegrationIT extends ESIntegTestCase {
assertAllSuccessful(response);
assertThat(response.getAllFieldStats(), nullValue());
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
- assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo(101l));
- assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMaxValue(), equalTo(200l));
+ assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo(101L));
+ assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMaxValue(), equalTo(200L));
response = client().prepareFieldStats()
.setFields("value")
@@ -301,8 +301,8 @@ public class FieldStatsIntegrationIT extends ESIntegTestCase {
assertAllSuccessful(response);
assertThat(response.getAllFieldStats(), nullValue());
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
- assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMinValue(), equalTo(201l));
- assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMaxValue(), equalTo(300l));
+ assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMinValue(), equalTo(201L));
+ assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMaxValue(), equalTo(300L));
}
public void testIncompatibleFilter() throws Exception {
diff --git a/core/src/test/java/org/elasticsearch/fieldstats/FieldStatsTests.java b/core/src/test/java/org/elasticsearch/fieldstats/FieldStatsTests.java
index 60cf2ef5dc..63437d4ebe 100644
--- a/core/src/test/java/org/elasticsearch/fieldstats/FieldStatsTests.java
+++ b/core/src/test/java/org/elasticsearch/fieldstats/FieldStatsTests.java
@@ -66,9 +66,9 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
}
public void testLong() {
- testNumberRange("field1", "long", 312321312312412l, 312321312312422l);
+ testNumberRange("field1", "long", 312321312312412L, 312321312312422L);
testNumberRange("field1", "long", -5, 5);
- testNumberRange("field1", "long", -312321312312422l, -312321312312412l);
+ testNumberRange("field1", "long", -312321312312422L, -312321312312412L);
}
public void testString() {
@@ -79,8 +79,8 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
client().admin().indices().prepareRefresh().get();
FieldStatsResponse result = client().prepareFieldStats().setFields("field").get();
- assertThat(result.getAllFieldStats().get("field").getMaxDoc(), equalTo(11l));
- assertThat(result.getAllFieldStats().get("field").getDocCount(), equalTo(11l));
+ assertThat(result.getAllFieldStats().get("field").getMaxDoc(), equalTo(11L));
+ assertThat(result.getAllFieldStats().get("field").getDocCount(), equalTo(11L));
assertThat(result.getAllFieldStats().get("field").getDensity(), equalTo(100));
assertThat(result.getAllFieldStats().get("field").getMinValue(), equalTo(new BytesRef(String.format(Locale.ENGLISH, "%03d", 0))));
assertThat(result.getAllFieldStats().get("field").getMaxValue(), equalTo(new BytesRef(String.format(Locale.ENGLISH, "%03d", 10))));
@@ -97,8 +97,8 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
client().admin().indices().prepareRefresh().get();
FieldStatsResponse result = client().prepareFieldStats().setFields(fieldName).get();
- assertThat(result.getAllFieldStats().get(fieldName).getMaxDoc(), equalTo(11l));
- assertThat(result.getAllFieldStats().get(fieldName).getDocCount(), equalTo(11l));
+ assertThat(result.getAllFieldStats().get(fieldName).getMaxDoc(), equalTo(11L));
+ assertThat(result.getAllFieldStats().get(fieldName).getDocCount(), equalTo(11L));
assertThat(result.getAllFieldStats().get(fieldName).getDensity(), equalTo(100));
assertThat(result.getAllFieldStats().get(fieldName).getMinValue(), equalTo(-1d));
assertThat(result.getAllFieldStats().get(fieldName).getMaxValue(), equalTo(9d));
@@ -114,8 +114,8 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
client().admin().indices().prepareRefresh().get();
FieldStatsResponse result = client().prepareFieldStats().setFields(fieldName).get();
- assertThat(result.getAllFieldStats().get(fieldName).getMaxDoc(), equalTo(11l));
- assertThat(result.getAllFieldStats().get(fieldName).getDocCount(), equalTo(11l));
+ assertThat(result.getAllFieldStats().get(fieldName).getMaxDoc(), equalTo(11L));
+ assertThat(result.getAllFieldStats().get(fieldName).getDocCount(), equalTo(11L));
assertThat(result.getAllFieldStats().get(fieldName).getDensity(), equalTo(100));
assertThat(result.getAllFieldStats().get(fieldName).getMinValue(), equalTo(-1f));
assertThat(result.getAllFieldStats().get(fieldName).getMaxValue(), equalTo(9f));
@@ -144,44 +144,44 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
public void testMerge() {
List<FieldStats> stats = new ArrayList<>();
- stats.add(new FieldStats.Long(1, 1l, 1l, 1l, 1l, 1l));
- stats.add(new FieldStats.Long(1, 1l, 1l, 1l, 1l, 1l));
- stats.add(new FieldStats.Long(1, 1l, 1l, 1l, 1l, 1l));
+ stats.add(new FieldStats.Long(1, 1L, 1L, 1L, 1L, 1L));
+ stats.add(new FieldStats.Long(1, 1L, 1L, 1L, 1L, 1L));
+ stats.add(new FieldStats.Long(1, 1L, 1L, 1L, 1L, 1L));
- FieldStats stat = new FieldStats.Long(1, 1l, 1l, 1l, 1l, 1l);
+ FieldStats stat = new FieldStats.Long(1, 1L, 1L, 1L, 1L, 1L);
for (FieldStats otherStat : stats) {
stat.append(otherStat);
}
- assertThat(stat.getMaxDoc(), equalTo(4l));
- assertThat(stat.getDocCount(), equalTo(4l));
- assertThat(stat.getSumDocFreq(), equalTo(4l));
- assertThat(stat.getSumTotalTermFreq(), equalTo(4l));
+ assertThat(stat.getMaxDoc(), equalTo(4L));
+ assertThat(stat.getDocCount(), equalTo(4L));
+ assertThat(stat.getSumDocFreq(), equalTo(4L));
+ assertThat(stat.getSumTotalTermFreq(), equalTo(4L));
}
public void testMerge_notAvailable() {
List<FieldStats> stats = new ArrayList<>();
- stats.add(new FieldStats.Long(1, 1l, 1l, 1l, 1l, 1l));
- stats.add(new FieldStats.Long(1, 1l, 1l, 1l, 1l, 1l));
- stats.add(new FieldStats.Long(1, 1l, 1l, 1l, 1l, 1l));
+ stats.add(new FieldStats.Long(1, 1L, 1L, 1L, 1L, 1L));
+ stats.add(new FieldStats.Long(1, 1L, 1L, 1L, 1L, 1L));
+ stats.add(new FieldStats.Long(1, 1L, 1L, 1L, 1L, 1L));
- FieldStats stat = new FieldStats.Long(1, -1l, -1l, -1l, 1l, 1l);
+ FieldStats stat = new FieldStats.Long(1, -1L, -1L, -1L, 1L, 1L);
for (FieldStats otherStat : stats) {
stat.append(otherStat);
}
- assertThat(stat.getMaxDoc(), equalTo(4l));
- assertThat(stat.getDocCount(), equalTo(-1l));
- assertThat(stat.getSumDocFreq(), equalTo(-1l));
- assertThat(stat.getSumTotalTermFreq(), equalTo(-1l));
+ assertThat(stat.getMaxDoc(), equalTo(4L));
+ assertThat(stat.getDocCount(), equalTo(-1L));
+ assertThat(stat.getSumDocFreq(), equalTo(-1L));
+ assertThat(stat.getSumTotalTermFreq(), equalTo(-1L));
- stats.add(new FieldStats.Long(1, -1l, -1l, -1l, 1l, 1l));
+ stats.add(new FieldStats.Long(1, -1L, -1L, -1L, 1L, 1L));
stat = stats.remove(0);
for (FieldStats otherStat : stats) {
stat.append(otherStat);
}
- assertThat(stat.getMaxDoc(), equalTo(4l));
- assertThat(stat.getDocCount(), equalTo(-1l));
- assertThat(stat.getSumDocFreq(), equalTo(-1l));
- assertThat(stat.getSumTotalTermFreq(), equalTo(-1l));
+ assertThat(stat.getMaxDoc(), equalTo(4L));
+ assertThat(stat.getDocCount(), equalTo(-1L));
+ assertThat(stat.getSumDocFreq(), equalTo(-1L));
+ assertThat(stat.getSumTotalTermFreq(), equalTo(-1L));
}
public void testInvalidField() {
@@ -213,9 +213,9 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
public void testNumberFiltering() {
createIndex("test1", Settings.EMPTY, "type", "value", "type=long");
- client().prepareIndex("test1", "test").setSource("value", 1l).get();
+ client().prepareIndex("test1", "test").setSource("value", 1L).get();
createIndex("test2", Settings.EMPTY, "type", "value", "type=long");
- client().prepareIndex("test2", "test").setSource("value", 3l).get();
+ client().prepareIndex("test2", "test").setSource("value", 3L).get();
client().admin().indices().prepareRefresh().get();
FieldStatsResponse response = client().prepareFieldStats()
@@ -223,8 +223,8 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
.setLevel("indices")
.get();
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(2));
- assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo(1l));
- assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo(3l));
+ assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo(1L));
+ assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo(3L));
response = client().prepareFieldStats()
.setFields("value")
@@ -246,7 +246,7 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
.setLevel("indices")
.get();
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
- assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo(1l));
+ assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo(1L));
response = client().prepareFieldStats()
.setFields("value")
@@ -254,7 +254,7 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
.setLevel("indices")
.get();
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
- assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo(1l));
+ assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo(1L));
response = client().prepareFieldStats()
.setFields("value")
@@ -269,7 +269,7 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
.setLevel("indices")
.get();
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
- assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo(3l));
+ assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo(3L));
response = client().prepareFieldStats()
.setFields("value")
@@ -277,7 +277,7 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
.setLevel("indices")
.get();
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
- assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo(3l));
+ assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo(3L));
response = client().prepareFieldStats()
.setFields("value")
@@ -292,8 +292,8 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
.setLevel("indices")
.get();
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(2));
- assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo(1l));
- assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo(3l));
+ assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo(1L));
+ assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo(3L));
response = client().prepareFieldStats()
.setFields("value")
diff --git a/core/src/test/java/org/elasticsearch/gateway/AsyncShardFetchTests.java b/core/src/test/java/org/elasticsearch/gateway/AsyncShardFetchTests.java
index e81db454e0..c6431e6fcf 100644
--- a/core/src/test/java/org/elasticsearch/gateway/AsyncShardFetchTests.java
+++ b/core/src/test/java/org/elasticsearch/gateway/AsyncShardFetchTests.java
@@ -243,7 +243,7 @@ public class AsyncShardFetchTests extends ESTestCase {
private AtomicInteger reroute = new AtomicInteger();
public TestFetch(ThreadPool threadPool) {
- super(Loggers.getLogger(TestFetch.class), "test", new ShardId("test", 1), null);
+ super(Loggers.getLogger(TestFetch.class), "test", new ShardId("test", "_na_", 1), null);
this.threadPool = threadPool;
}
diff --git a/core/src/test/java/org/elasticsearch/gateway/DanglingIndicesStateTests.java b/core/src/test/java/org/elasticsearch/gateway/DanglingIndicesStateTests.java
index 6b28b7f789..95c52f8993 100644
--- a/core/src/test/java/org/elasticsearch/gateway/DanglingIndicesStateTests.java
+++ b/core/src/test/java/org/elasticsearch/gateway/DanglingIndicesStateTests.java
@@ -107,7 +107,7 @@ public class DanglingIndicesStateTests extends ESTestCase {
IndexMetaData dangledIndex = IndexMetaData.builder("test1").settings(indexSettings).build();
metaStateService.writeIndex("test_write", dangledIndex, null);
- for (Path path : env.indexPaths(new Index("test1"))) {
+ for (Path path : env.indexPaths("test1")) {
Files.move(path, path.getParent().resolve("test1_renamed"));
}
diff --git a/core/src/test/java/org/elasticsearch/gateway/GatewayIndexStateIT.java b/core/src/test/java/org/elasticsearch/gateway/GatewayIndexStateIT.java
index c804239c69..52c8ed2d40 100644
--- a/core/src/test/java/org/elasticsearch/gateway/GatewayIndexStateIT.java
+++ b/core/src/test/java/org/elasticsearch/gateway/GatewayIndexStateIT.java
@@ -32,6 +32,7 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.indices.IndexClosedException;
+import org.elasticsearch.node.Node;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
import org.elasticsearch.test.ESIntegTestCase.Scope;
@@ -180,7 +181,7 @@ public class GatewayIndexStateIT extends ESIntegTestCase {
logger.info("--> cleaning nodes");
logger.info("--> starting 1 master node non data");
- internalCluster().startNode(settingsBuilder().put("node.data", false).build());
+ internalCluster().startNode(settingsBuilder().put(Node.NODE_DATA_SETTING.getKey(), false).build());
logger.info("--> create an index");
client().admin().indices().prepareCreate("test").execute().actionGet();
@@ -189,7 +190,7 @@ public class GatewayIndexStateIT extends ESIntegTestCase {
internalCluster().closeNonSharedNodes(false);
logger.info("--> starting 1 master node non data again");
- internalCluster().startNode(settingsBuilder().put("node.data", false).build());
+ internalCluster().startNode(settingsBuilder().put(Node.NODE_DATA_SETTING.getKey(), false).build());
logger.info("--> waiting for test index to be created");
ClusterHealthResponse health = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setIndices("test").execute().actionGet();
@@ -204,8 +205,8 @@ public class GatewayIndexStateIT extends ESIntegTestCase {
logger.info("--> cleaning nodes");
logger.info("--> starting 1 master node non data");
- internalCluster().startNode(settingsBuilder().put("node.data", false).build());
- internalCluster().startNode(settingsBuilder().put("node.master", false).build());
+ internalCluster().startNode(settingsBuilder().put(Node.NODE_DATA_SETTING.getKey(), false).build());
+ internalCluster().startNode(settingsBuilder().put(Node.NODE_MASTER_SETTING.getKey(), false).build());
logger.info("--> create an index");
client().admin().indices().prepareCreate("test").execute().actionGet();
@@ -231,7 +232,7 @@ public class GatewayIndexStateIT extends ESIntegTestCase {
logger.info("--> verify 1 doc in the index");
for (int i = 0; i < 10; i++) {
- assertHitCount(client().prepareSearch().setQuery(matchAllQuery()).get(), 1l);
+ assertHitCount(client().prepareSearch().setQuery(matchAllQuery()).get(), 1L);
}
logger.info("--> closing test index...");
@@ -249,9 +250,9 @@ public class GatewayIndexStateIT extends ESIntegTestCase {
assertThat(health.isTimedOut(), equalTo(false));
logger.info("--> verify 1 doc in the index");
- assertHitCount(client().prepareSearch().setQuery(matchAllQuery()).get(), 1l);
+ assertHitCount(client().prepareSearch().setQuery(matchAllQuery()).get(), 1L);
for (int i = 0; i < 10; i++) {
- assertHitCount(client().prepareSearch().setQuery(matchAllQuery()).get(), 1l);
+ assertHitCount(client().prepareSearch().setQuery(matchAllQuery()).get(), 1L);
}
}
@@ -267,7 +268,7 @@ public class GatewayIndexStateIT extends ESIntegTestCase {
logger.info("--> verify 1 doc in the index");
for (int i = 0; i < 10; i++) {
- assertHitCount(client().prepareSearch().setQuery(matchAllQuery()).get(), 1l);
+ assertHitCount(client().prepareSearch().setQuery(matchAllQuery()).get(), 1L);
}
assertThat(client().prepareGet("test", "type1", "1").execute().actionGet().isExists(), equalTo(true));
@@ -327,7 +328,7 @@ public class GatewayIndexStateIT extends ESIntegTestCase {
logger.info("--> verify 1 doc in the index");
for (int i = 0; i < 10; i++) {
- assertHitCount(client().prepareSearch().setQuery(matchAllQuery()).get(), 1l);
+ assertHitCount(client().prepareSearch().setQuery(matchAllQuery()).get(), 1L);
}
assertThat(client().prepareGet("test", "type1", "1").execute().actionGet().isExists(), equalTo(true));
diff --git a/core/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java b/core/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java
index 0de220a8fa..422aea7013 100644
--- a/core/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java
+++ b/core/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java
@@ -183,7 +183,7 @@ public class GatewayMetaStateTests extends ESAllocationTestCase {
if (expectMetaData) {
assertThat(indices.hasNext(), equalTo(true));
- assertThat(indices.next().getNewMetaData().getIndex(), equalTo("test"));
+ assertThat(indices.next().getNewMetaData().getIndex().getName(), equalTo("test"));
assertThat(indices.hasNext(), equalTo(false));
} else {
assertThat(indices.hasNext(), equalTo(false));
diff --git a/core/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java b/core/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java
index 98b62dc18b..36540355e4 100644
--- a/core/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java
+++ b/core/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java
@@ -215,7 +215,7 @@ public class MetaDataStateFormatTests extends ESTestCase {
long checksumAfterCorruption;
long actualChecksumAfterCorruption;
try (ChecksumIndexInput input = dir.openChecksumInput(fileToCorrupt.getFileName().toString(), IOContext.DEFAULT)) {
- assertThat(input.getFilePointer(), is(0l));
+ assertThat(input.getFilePointer(), is(0L));
input.seek(input.length() - 8); // one long is the checksum... 8 bytes
checksumAfterCorruption = input.getChecksum();
actualChecksumAfterCorruption = input.readLong();
@@ -357,7 +357,7 @@ public class MetaDataStateFormatTests extends ESTestCase {
ImmutableOpenMap<String,IndexMetaData> indices = loadedMetaData.indices();
assertThat(indices.size(), equalTo(latestMetaData.indices().size()));
for (IndexMetaData original : latestMetaData) {
- IndexMetaData deserialized = indices.get(original.getIndex());
+ IndexMetaData deserialized = indices.get(original.getIndex().getName());
assertThat(deserialized, notNullValue());
assertThat(deserialized.getVersion(), equalTo(original.getVersion()));
assertThat(deserialized.getNumberOfReplicas(), equalTo(original.getNumberOfReplicas()));
diff --git a/core/src/test/java/org/elasticsearch/gateway/MetaDataWriteDataNodesIT.java b/core/src/test/java/org/elasticsearch/gateway/MetaDataWriteDataNodesIT.java
index d2f7bb888c..bada7faa8c 100644
--- a/core/src/test/java/org/elasticsearch/gateway/MetaDataWriteDataNodesIT.java
+++ b/core/src/test/java/org/elasticsearch/gateway/MetaDataWriteDataNodesIT.java
@@ -170,7 +170,7 @@ public class MetaDataWriteDataNodesIT extends ESIntegTestCase {
private boolean indexDirectoryExists(String nodeName, String indexName) {
NodeEnvironment nodeEnv = ((InternalTestCluster) cluster()).getInstance(NodeEnvironment.class, nodeName);
- for (Path path : nodeEnv.indexPaths(new Index(indexName))) {
+ for (Path path : nodeEnv.indexPaths(indexName)) {
if (Files.exists(path)) {
return true;
}
diff --git a/core/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java b/core/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java
index 44c1fae649..a61354458c 100644
--- a/core/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java
+++ b/core/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java
@@ -35,15 +35,15 @@ import org.elasticsearch.cluster.routing.ShardRoutingState;
import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders;
import org.elasticsearch.common.Nullable;
+import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.test.ESAllocationTestCase;
import org.junit.Before;
-import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
-import java.util.HashSet;
import java.util.Map;
import static org.hamcrest.Matchers.anyOf;
@@ -53,7 +53,7 @@ import static org.hamcrest.Matchers.equalTo;
*/
public class PrimaryShardAllocatorTests extends ESAllocationTestCase {
- private final ShardId shardId = new ShardId("test", 0);
+ private final ShardId shardId = new ShardId("test", "_na_", 0);
private final DiscoveryNode node1 = newNode("node1");
private final DiscoveryNode node2 = newNode("node2");
private final DiscoveryNode node3 = newNode("node3");
@@ -104,7 +104,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase {
} else {
allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders(), false, Version.V_2_1_0);
}
- testAllocator.addData(node1, -1, null);
+ testAllocator.addData(node1, -1, null, randomBoolean());
boolean changed = testAllocator.allocateUnassigned(allocation);
assertThat(changed, equalTo(false));
assertThat(allocation.routingNodes().unassigned().ignored().size(), equalTo(1));
@@ -116,7 +116,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase {
*/
public void testNoMatchingAllocationIdFound() {
RoutingAllocation allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders(), false, Version.CURRENT, "id2");
- testAllocator.addData(node1, 1, "id1");
+ testAllocator.addData(node1, 1, "id1", randomBoolean());
boolean changed = testAllocator.allocateUnassigned(allocation);
assertThat(changed, equalTo(false));
assertThat(allocation.routingNodes().unassigned().ignored().size(), equalTo(1));
@@ -129,7 +129,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase {
*/
public void testNoActiveAllocationIds() {
RoutingAllocation allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders(), false, Version.V_2_1_1);
- testAllocator.addData(node1, 1, null);
+ testAllocator.addData(node1, 1, null, randomBoolean());
boolean changed = testAllocator.allocateUnassigned(allocation);
assertThat(changed, equalTo(true));
assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true));
@@ -144,10 +144,10 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase {
final RoutingAllocation allocation;
if (randomBoolean()) {
allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders(), false, randomFrom(Version.V_2_0_0, Version.CURRENT), "allocId1");
- testAllocator.addData(node1, 1, "allocId1", new CorruptIndexException("test", "test"));
+ testAllocator.addData(node1, 1, "allocId1", randomBoolean(), new CorruptIndexException("test", "test"));
} else {
allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders(), false, Version.V_2_1_1);
- testAllocator.addData(node1, 3, null, new CorruptIndexException("test", "test"));
+ testAllocator.addData(node1, 3, null, randomBoolean(), new CorruptIndexException("test", "test"));
}
boolean changed = testAllocator.allocateUnassigned(allocation);
assertThat(changed, equalTo(false));
@@ -162,10 +162,10 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase {
final RoutingAllocation allocation;
if (randomBoolean()) {
allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders(), false, randomFrom(Version.V_2_0_0, Version.CURRENT), "allocId1");
- testAllocator.addData(node1, 1, "allocId1");
+ testAllocator.addData(node1, 1, "allocId1", randomBoolean());
} else {
allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders(), false, Version.V_2_2_0);
- testAllocator.addData(node1, 3, null);
+ testAllocator.addData(node1, 3, null, randomBoolean());
}
boolean changed = testAllocator.allocateUnassigned(allocation);
assertThat(changed, equalTo(true));
@@ -175,6 +175,24 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase {
}
/**
+ * Tests that when there was a node that previously had the primary, it will be allocated to that same node again.
+ */
+ public void testPreferAllocatingPreviousPrimary() {
+ String primaryAllocId = Strings.randomBase64UUID();
+ String replicaAllocId = Strings.randomBase64UUID();
+ RoutingAllocation allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders(), false, randomFrom(Version.V_2_0_0, Version.CURRENT), primaryAllocId, replicaAllocId);
+ boolean node1HasPrimaryShard = randomBoolean();
+ testAllocator.addData(node1, 1, node1HasPrimaryShard ? primaryAllocId : replicaAllocId, node1HasPrimaryShard);
+ testAllocator.addData(node2, 1, node1HasPrimaryShard ? replicaAllocId : primaryAllocId, !node1HasPrimaryShard);
+ boolean changed = testAllocator.allocateUnassigned(allocation);
+ assertThat(changed, equalTo(true));
+ assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true));
+ assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(1));
+ DiscoveryNode allocatedNode = node1HasPrimaryShard ? node1 : node2;
+ assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).get(0).currentNodeId(), equalTo(allocatedNode.id()));
+ }
+
+ /**
* Tests that when there is a node to allocate to, but it is throttling (and it is the only one),
* it will be moved to ignore unassigned until it can be allocated to.
*/
@@ -182,10 +200,10 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase {
final RoutingAllocation allocation;
if (randomBoolean()) {
allocation = routingAllocationWithOnePrimaryNoReplicas(throttleAllocationDeciders(), false, randomFrom(Version.V_2_0_0, Version.CURRENT), "allocId1");
- testAllocator.addData(node1, 1, "allocId1");
+ testAllocator.addData(node1, 1, "allocId1", randomBoolean());
} else {
allocation = routingAllocationWithOnePrimaryNoReplicas(throttleAllocationDeciders(), false, Version.V_2_2_0);
- testAllocator.addData(node1, 3, null);
+ testAllocator.addData(node1, 3, null, randomBoolean());
}
boolean changed = testAllocator.allocateUnassigned(allocation);
assertThat(changed, equalTo(false));
@@ -201,10 +219,10 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase {
final RoutingAllocation allocation;
if (randomBoolean()) {
allocation = routingAllocationWithOnePrimaryNoReplicas(noAllocationDeciders(), false, randomFrom(Version.V_2_0_0, Version.CURRENT), "allocId1");
- testAllocator.addData(node1, 1, "allocId1");
+ testAllocator.addData(node1, 1, "allocId1", randomBoolean());
} else {
allocation = routingAllocationWithOnePrimaryNoReplicas(noAllocationDeciders(), false, Version.V_2_0_0);
- testAllocator.addData(node1, 3, null);
+ testAllocator.addData(node1, 3, null, randomBoolean());
}
boolean changed = testAllocator.allocateUnassigned(allocation);
assertThat(changed, equalTo(true));
@@ -218,7 +236,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase {
*/
public void testAllocateToTheHighestVersionOnLegacyIndex() {
RoutingAllocation allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders(), false, Version.V_2_0_0);
- testAllocator.addData(node1, 10, null).addData(node2, 12, null);
+ testAllocator.addData(node1, 10, null, randomBoolean()).addData(node2, 12, null, randomBoolean());
boolean changed = testAllocator.allocateUnassigned(allocation);
assertThat(changed, equalTo(true));
assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true));
@@ -232,7 +250,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase {
*/
public void testRestore() {
RoutingAllocation allocation = getRestoreRoutingAllocation(yesAllocationDeciders());
- testAllocator.addData(node1, 1, randomFrom(null, "allocId"));
+ testAllocator.addData(node1, 1, randomFrom(null, "allocId"), randomBoolean());
boolean changed = testAllocator.allocateUnassigned(allocation);
assertThat(changed, equalTo(true));
assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true));
@@ -245,7 +263,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase {
*/
public void testRestoreThrottle() {
RoutingAllocation allocation = getRestoreRoutingAllocation(throttleAllocationDeciders());
- testAllocator.addData(node1, 1, randomFrom(null, "allocId"));
+ testAllocator.addData(node1, 1, randomFrom(null, "allocId"), randomBoolean());
boolean changed = testAllocator.allocateUnassigned(allocation);
assertThat(changed, equalTo(false));
assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(false));
@@ -257,7 +275,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase {
*/
public void testRestoreForcesAllocateIfShardAvailable() {
RoutingAllocation allocation = getRestoreRoutingAllocation(noAllocationDeciders());
- testAllocator.addData(node1, 1, randomFrom(null, "some allocId"));
+ testAllocator.addData(node1, 1, randomFrom(null, "some allocId"), randomBoolean());
boolean changed = testAllocator.allocateUnassigned(allocation);
assertThat(changed, equalTo(true));
assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true));
@@ -270,7 +288,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase {
*/
public void testRestoreDoesNotAssignIfNoShardAvailable() {
RoutingAllocation allocation = getRestoreRoutingAllocation(yesAllocationDeciders());
- testAllocator.addData(node1, -1, null);
+ testAllocator.addData(node1, -1, null, false);
boolean changed = testAllocator.allocateUnassigned(allocation);
assertThat(changed, equalTo(false));
assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true));
@@ -280,12 +298,12 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase {
private RoutingAllocation getRestoreRoutingAllocation(AllocationDeciders allocationDeciders) {
Version version = randomFrom(Version.CURRENT, Version.V_2_0_0);
MetaData metaData = MetaData.builder()
- .put(IndexMetaData.builder(shardId.getIndex()).settings(settings(version)).numberOfShards(1).numberOfReplicas(0)
- .putActiveAllocationIds(0, version == Version.CURRENT ? new HashSet<>(Arrays.asList("allocId")) : Collections.emptySet()))
+ .put(IndexMetaData.builder(shardId.getIndexName()).settings(settings(version)).numberOfShards(1).numberOfReplicas(0)
+ .putActiveAllocationIds(0, version == Version.CURRENT ? Sets.newHashSet("allocId") : Collections.emptySet()))
.build();
RoutingTable routingTable = RoutingTable.builder()
- .addAsRestore(metaData.index(shardId.getIndex()), new RestoreSource(new SnapshotId("test", "test"), version, shardId.getIndex()))
+ .addAsRestore(metaData.index(shardId.getIndex()), new RestoreSource(new SnapshotId("test", "test"), version, shardId.getIndexName()))
.build();
ClusterState state = ClusterState.builder(org.elasticsearch.cluster.ClusterName.DEFAULT)
.metaData(metaData)
@@ -300,7 +318,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase {
*/
public void testRecoverOnAnyNode() {
RoutingAllocation allocation = getRecoverOnAnyNodeRoutingAllocation(yesAllocationDeciders());
- testAllocator.addData(node1, 1, randomFrom(null, "allocId"));
+ testAllocator.addData(node1, 1, randomFrom(null, "allocId"), randomBoolean());
boolean changed = testAllocator.allocateUnassigned(allocation);
assertThat(changed, equalTo(true));
assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true));
@@ -313,7 +331,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase {
*/
public void testRecoverOnAnyNodeThrottle() {
RoutingAllocation allocation = getRestoreRoutingAllocation(throttleAllocationDeciders());
- testAllocator.addData(node1, 1, randomFrom(null, "allocId"));
+ testAllocator.addData(node1, 1, randomFrom(null, "allocId"), randomBoolean());
boolean changed = testAllocator.allocateUnassigned(allocation);
assertThat(changed, equalTo(false));
assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(false));
@@ -325,7 +343,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase {
*/
public void testRecoverOnAnyNodeForcesAllocateIfShardAvailable() {
RoutingAllocation allocation = getRecoverOnAnyNodeRoutingAllocation(noAllocationDeciders());
- testAllocator.addData(node1, 1, randomFrom(null, "allocId"));
+ testAllocator.addData(node1, 1, randomFrom(null, "allocId"), randomBoolean());
boolean changed = testAllocator.allocateUnassigned(allocation);
assertThat(changed, equalTo(true));
assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true));
@@ -338,7 +356,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase {
*/
public void testRecoverOnAnyNodeDoesNotAssignIfNoShardAvailable() {
RoutingAllocation allocation = getRecoverOnAnyNodeRoutingAllocation(yesAllocationDeciders());
- testAllocator.addData(node1, -1, null);
+ testAllocator.addData(node1, -1, null, randomBoolean());
boolean changed = testAllocator.allocateUnassigned(allocation);
assertThat(changed, equalTo(false));
assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true));
@@ -348,14 +366,14 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase {
private RoutingAllocation getRecoverOnAnyNodeRoutingAllocation(AllocationDeciders allocationDeciders) {
Version version = randomFrom(Version.CURRENT, Version.V_2_0_0);
MetaData metaData = MetaData.builder()
- .put(IndexMetaData.builder(shardId.getIndex()).settings(settings(version)
+ .put(IndexMetaData.builder(shardId.getIndexName()).settings(settings(version)
.put(IndexMetaData.SETTING_SHARED_FILESYSTEM, true)
.put(IndexMetaData.SETTING_SHARED_FS_ALLOW_RECOVERY_ON_ANY_NODE, true))
- .numberOfShards(1).numberOfReplicas(0).putActiveAllocationIds(0, version == Version.CURRENT ? new HashSet<>(Arrays.asList("allocId")) : Collections.emptySet()))
+ .numberOfShards(1).numberOfReplicas(0).putActiveAllocationIds(0, version == Version.CURRENT ? Sets.newHashSet("allocId") : Collections.emptySet()))
.build();
RoutingTable routingTable = RoutingTable.builder()
- .addAsRestore(metaData.index(shardId.getIndex()), new RestoreSource(new SnapshotId("test", "test"), Version.CURRENT, shardId.getIndex()))
+ .addAsRestore(metaData.index(shardId.getIndex()), new RestoreSource(new SnapshotId("test", "test"), Version.CURRENT, shardId.getIndexName()))
.build();
ClusterState state = ClusterState.builder(org.elasticsearch.cluster.ClusterName.DEFAULT)
.metaData(metaData)
@@ -370,7 +388,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase {
*/
public void testEnoughCopiesFoundForAllocationOnLegacyIndex() {
MetaData metaData = MetaData.builder()
- .put(IndexMetaData.builder(shardId.getIndex()).settings(settings(Version.V_2_0_0)).numberOfShards(1).numberOfReplicas(2))
+ .put(IndexMetaData.builder(shardId.getIndexName()).settings(settings(Version.V_2_0_0)).numberOfShards(1).numberOfReplicas(2))
.build();
RoutingTable routingTable = RoutingTable.builder()
.addAsRecovery(metaData.index(shardId.getIndex()))
@@ -387,7 +405,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase {
assertThat(allocation.routingNodes().unassigned().ignored().get(0).shardId(), equalTo(shardId));
assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(2)); // replicas
- testAllocator.addData(node1, 1, null);
+ testAllocator.addData(node1, 1, null, randomBoolean());
allocation = new RoutingAllocation(yesAllocationDeciders(), new RoutingNodes(state, false), state.nodes(), null, System.nanoTime());
changed = testAllocator.allocateUnassigned(allocation);
assertThat(changed, equalTo(false));
@@ -395,7 +413,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase {
assertThat(allocation.routingNodes().unassigned().ignored().get(0).shardId(), equalTo(shardId));
assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(2)); // replicas
- testAllocator.addData(node2, 1, null);
+ testAllocator.addData(node2, 1, null, randomBoolean());
allocation = new RoutingAllocation(yesAllocationDeciders(), new RoutingNodes(state, false), state.nodes(), null, System.nanoTime());
changed = testAllocator.allocateUnassigned(allocation);
assertThat(changed, equalTo(true));
@@ -411,7 +429,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase {
*/
public void testEnoughCopiesFoundForAllocationOnLegacyIndexWithDifferentVersion() {
MetaData metaData = MetaData.builder()
- .put(IndexMetaData.builder(shardId.getIndex()).settings(settings(Version.V_2_0_0)).numberOfShards(1).numberOfReplicas(2))
+ .put(IndexMetaData.builder(shardId.getIndexName()).settings(settings(Version.V_2_0_0)).numberOfShards(1).numberOfReplicas(2))
.build();
RoutingTable routingTable = RoutingTable.builder()
.addAsRecovery(metaData.index(shardId.getIndex()))
@@ -428,7 +446,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase {
assertThat(allocation.routingNodes().unassigned().ignored().get(0).shardId(), equalTo(shardId));
assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(2)); // replicas
- testAllocator.addData(node1, 1, null);
+ testAllocator.addData(node1, 1, null, randomBoolean());
allocation = new RoutingAllocation(yesAllocationDeciders(), new RoutingNodes(state, false), state.nodes(), null, System.nanoTime());
changed = testAllocator.allocateUnassigned(allocation);
assertThat(changed, equalTo(false));
@@ -436,7 +454,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase {
assertThat(allocation.routingNodes().unassigned().ignored().get(0).shardId(), equalTo(shardId));
assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(2)); // replicas
- testAllocator.addData(node2, 2, null);
+ testAllocator.addData(node2, 2, null, randomBoolean());
allocation = new RoutingAllocation(yesAllocationDeciders(), new RoutingNodes(state, false), state.nodes(), null, System.nanoTime());
changed = testAllocator.allocateUnassigned(allocation);
assertThat(changed, equalTo(true));
@@ -448,8 +466,8 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase {
private RoutingAllocation routingAllocationWithOnePrimaryNoReplicas(AllocationDeciders deciders, boolean asNew, Version version, String... activeAllocationIds) {
MetaData metaData = MetaData.builder()
- .put(IndexMetaData.builder(shardId.getIndex()).settings(settings(version))
- .numberOfShards(1).numberOfReplicas(0).putActiveAllocationIds(0, new HashSet<>(Arrays.asList(activeAllocationIds))))
+ .put(IndexMetaData.builder(shardId.getIndexName()).settings(settings(version))
+ .numberOfShards(1).numberOfReplicas(0).putActiveAllocationIds(0, Sets.newHashSet(activeAllocationIds)))
.build();
RoutingTable.Builder routingTableBuilder = RoutingTable.builder();
if (asNew) {
@@ -477,15 +495,15 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase {
return this;
}
- public TestAllocator addData(DiscoveryNode node, long version, String allocationId) {
- return addData(node, version, allocationId, null);
+ public TestAllocator addData(DiscoveryNode node, long version, String allocationId, boolean primary) {
+ return addData(node, version, allocationId, primary, null);
}
- public TestAllocator addData(DiscoveryNode node, long version, String allocationId, @Nullable Throwable storeException) {
+ public TestAllocator addData(DiscoveryNode node, long version, String allocationId, boolean primary, @Nullable Throwable storeException) {
if (data == null) {
data = new HashMap<>();
}
- data.put(node, new TransportNodesListGatewayStartedShards.NodeGatewayStartedShards(node, version, allocationId, storeException));
+ data.put(node, new TransportNodesListGatewayStartedShards.NodeGatewayStartedShards(node, version, allocationId, primary, storeException));
return this;
}
diff --git a/core/src/test/java/org/elasticsearch/gateway/PriorityComparatorTests.java b/core/src/test/java/org/elasticsearch/gateway/PriorityComparatorTests.java
index 3b7e62216c..4231f1215f 100644
--- a/core/src/test/java/org/elasticsearch/gateway/PriorityComparatorTests.java
+++ b/core/src/test/java/org/elasticsearch/gateway/PriorityComparatorTests.java
@@ -60,9 +60,9 @@ public class PriorityComparatorTests extends ESTestCase {
});
RoutingNodes.UnassignedShards.UnassignedIterator iterator = shards.iterator();
ShardRouting next = iterator.next();
- assertEquals("newest", next.index());
+ assertEquals("newest", next.getIndexName());
next = iterator.next();
- assertEquals("oldest", next.index());
+ assertEquals("oldest", next.getIndexName());
assertFalse(iterator.hasNext());
}
@@ -90,9 +90,9 @@ public class PriorityComparatorTests extends ESTestCase {
});
RoutingNodes.UnassignedShards.UnassignedIterator iterator = shards.iterator();
ShardRouting next = iterator.next();
- assertEquals("oldest", next.index());
+ assertEquals("oldest", next.getIndexName());
next = iterator.next();
- assertEquals("newest", next.index());
+ assertEquals("newest", next.getIndexName());
assertFalse(iterator.hasNext());
}
@@ -126,8 +126,8 @@ public class PriorityComparatorTests extends ESTestCase {
ShardRouting previous = null;
for (ShardRouting routing : shards) {
if (previous != null) {
- IndexMeta prevMeta = map.get(previous.getIndex());
- IndexMeta currentMeta = map.get(routing.getIndex());
+ IndexMeta prevMeta = map.get(previous.getIndexName());
+ IndexMeta currentMeta = map.get(routing.getIndexName());
if (prevMeta.priority == currentMeta.priority) {
if (prevMeta.creationDate == currentMeta.creationDate) {
if (prevMeta.name.equals(currentMeta.name) == false) {
diff --git a/core/src/test/java/org/elasticsearch/gateway/QuorumGatewayIT.java b/core/src/test/java/org/elasticsearch/gateway/QuorumGatewayIT.java
index a817b23949..399ef9bada 100644
--- a/core/src/test/java/org/elasticsearch/gateway/QuorumGatewayIT.java
+++ b/core/src/test/java/org/elasticsearch/gateway/QuorumGatewayIT.java
@@ -67,7 +67,7 @@ public class QuorumGatewayIT extends ESIntegTestCase {
refresh();
for (int i = 0; i < 10; i++) {
- assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()).get(), 2l);
+ assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()).get(), 2L);
}
logger.info("--> restart all nodes");
internalCluster().fullRestart(new RestartCallback() {
@@ -89,7 +89,7 @@ public class QuorumGatewayIT extends ESIntegTestCase {
activeClient.prepareIndex("test", "type1", "3").setSource(jsonBuilder().startObject().field("field", "value3").endObject()).get();
assertNoFailures(activeClient.admin().indices().prepareRefresh().get());
for (int i = 0; i < 10; i++) {
- assertHitCount(activeClient.prepareSearch().setSize(0).setQuery(matchAllQuery()).get(), 3l);
+ assertHitCount(activeClient.prepareSearch().setSize(0).setQuery(matchAllQuery()).get(), 3L);
}
}
}
@@ -100,7 +100,7 @@ public class QuorumGatewayIT extends ESIntegTestCase {
ensureGreen();
for (int i = 0; i < 10; i++) {
- assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()).get(), 3l);
+ assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()).get(), 3L);
}
}
}
diff --git a/core/src/test/java/org/elasticsearch/gateway/RecoverAfterNodesIT.java b/core/src/test/java/org/elasticsearch/gateway/RecoverAfterNodesIT.java
index 3dd6597a6e..59f7dd2464 100644
--- a/core/src/test/java/org/elasticsearch/gateway/RecoverAfterNodesIT.java
+++ b/core/src/test/java/org/elasticsearch/gateway/RecoverAfterNodesIT.java
@@ -24,6 +24,7 @@ import org.elasticsearch.cluster.block.ClusterBlock;
import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.node.Node;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
import org.elasticsearch.test.ESIntegTestCase.Scope;
@@ -81,13 +82,13 @@ public class RecoverAfterNodesIT extends ESIntegTestCase {
public void testRecoverAfterMasterNodes() throws Exception {
logger.info("--> start master_node (1)");
- Client master1 = startNode(settingsBuilder().put("gateway.recover_after_master_nodes", 2).put("node.data", false).put("node.master", true));
+ Client master1 = startNode(settingsBuilder().put("gateway.recover_after_master_nodes", 2).put(Node.NODE_DATA_SETTING.getKey(), false).put(Node.NODE_MASTER_SETTING.getKey(), true));
assertThat(master1.admin().cluster().prepareState().setLocal(true).execute().actionGet()
.getState().blocks().global(ClusterBlockLevel.METADATA_WRITE),
hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK));
logger.info("--> start data_node (1)");
- Client data1 = startNode(settingsBuilder().put("gateway.recover_after_master_nodes", 2).put("node.data", true).put("node.master", false));
+ Client data1 = startNode(settingsBuilder().put("gateway.recover_after_master_nodes", 2).put(Node.NODE_DATA_SETTING.getKey(), true).put(Node.NODE_MASTER_SETTING.getKey(), false));
assertThat(master1.admin().cluster().prepareState().setLocal(true).execute().actionGet()
.getState().blocks().global(ClusterBlockLevel.METADATA_WRITE),
hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK));
@@ -96,7 +97,7 @@ public class RecoverAfterNodesIT extends ESIntegTestCase {
hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK));
logger.info("--> start data_node (2)");
- Client data2 = startNode(settingsBuilder().put("gateway.recover_after_master_nodes", 2).put("node.data", true).put("node.master", false));
+ Client data2 = startNode(settingsBuilder().put("gateway.recover_after_master_nodes", 2).put(Node.NODE_DATA_SETTING.getKey(), true).put(Node.NODE_MASTER_SETTING.getKey(), false));
assertThat(master1.admin().cluster().prepareState().setLocal(true).execute().actionGet()
.getState().blocks().global(ClusterBlockLevel.METADATA_WRITE),
hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK));
@@ -108,7 +109,7 @@ public class RecoverAfterNodesIT extends ESIntegTestCase {
hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK));
logger.info("--> start master_node (2)");
- Client master2 = startNode(settingsBuilder().put("gateway.recover_after_master_nodes", 2).put("node.data", false).put("node.master", true));
+ Client master2 = startNode(settingsBuilder().put("gateway.recover_after_master_nodes", 2).put(Node.NODE_DATA_SETTING.getKey(), false).put(Node.NODE_MASTER_SETTING.getKey(), true));
assertThat(waitForNoBlocksOnNode(BLOCK_WAIT_TIMEOUT, master1).isEmpty(), equalTo(true));
assertThat(waitForNoBlocksOnNode(BLOCK_WAIT_TIMEOUT, master2).isEmpty(), equalTo(true));
assertThat(waitForNoBlocksOnNode(BLOCK_WAIT_TIMEOUT, data1).isEmpty(), equalTo(true));
@@ -117,13 +118,13 @@ public class RecoverAfterNodesIT extends ESIntegTestCase {
public void testRecoverAfterDataNodes() throws Exception {
logger.info("--> start master_node (1)");
- Client master1 = startNode(settingsBuilder().put("gateway.recover_after_data_nodes", 2).put("node.data", false).put("node.master", true));
+ Client master1 = startNode(settingsBuilder().put("gateway.recover_after_data_nodes", 2).put(Node.NODE_DATA_SETTING.getKey(), false).put(Node.NODE_MASTER_SETTING.getKey(), true));
assertThat(master1.admin().cluster().prepareState().setLocal(true).execute().actionGet()
.getState().blocks().global(ClusterBlockLevel.METADATA_WRITE),
hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK));
logger.info("--> start data_node (1)");
- Client data1 = startNode(settingsBuilder().put("gateway.recover_after_data_nodes", 2).put("node.data", true).put("node.master", false));
+ Client data1 = startNode(settingsBuilder().put("gateway.recover_after_data_nodes", 2).put(Node.NODE_DATA_SETTING.getKey(), true).put(Node.NODE_MASTER_SETTING.getKey(), false));
assertThat(master1.admin().cluster().prepareState().setLocal(true).execute().actionGet()
.getState().blocks().global(ClusterBlockLevel.METADATA_WRITE),
hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK));
@@ -132,7 +133,7 @@ public class RecoverAfterNodesIT extends ESIntegTestCase {
hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK));
logger.info("--> start master_node (2)");
- Client master2 = startNode(settingsBuilder().put("gateway.recover_after_data_nodes", 2).put("node.data", false).put("node.master", true));
+ Client master2 = startNode(settingsBuilder().put("gateway.recover_after_data_nodes", 2).put(Node.NODE_DATA_SETTING.getKey(), false).put(Node.NODE_MASTER_SETTING.getKey(), true));
assertThat(master2.admin().cluster().prepareState().setLocal(true).execute().actionGet()
.getState().blocks().global(ClusterBlockLevel.METADATA_WRITE),
hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK));
@@ -144,7 +145,7 @@ public class RecoverAfterNodesIT extends ESIntegTestCase {
hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK));
logger.info("--> start data_node (2)");
- Client data2 = startNode(settingsBuilder().put("gateway.recover_after_data_nodes", 2).put("node.data", true).put("node.master", false));
+ Client data2 = startNode(settingsBuilder().put("gateway.recover_after_data_nodes", 2).put(Node.NODE_DATA_SETTING.getKey(), true).put(Node.NODE_MASTER_SETTING.getKey(), false));
assertThat(waitForNoBlocksOnNode(BLOCK_WAIT_TIMEOUT, master1).isEmpty(), equalTo(true));
assertThat(waitForNoBlocksOnNode(BLOCK_WAIT_TIMEOUT, master2).isEmpty(), equalTo(true));
assertThat(waitForNoBlocksOnNode(BLOCK_WAIT_TIMEOUT, data1).isEmpty(), equalTo(true));
diff --git a/core/src/test/java/org/elasticsearch/gateway/RecoveryBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/gateway/RecoveryBackwardsCompatibilityIT.java
index 87a10625c5..2fce6e44c1 100644
--- a/core/src/test/java/org/elasticsearch/gateway/RecoveryBackwardsCompatibilityIT.java
+++ b/core/src/test/java/org/elasticsearch/gateway/RecoveryBackwardsCompatibilityIT.java
@@ -98,8 +98,8 @@ public class RecoveryBackwardsCompatibilityIT extends ESBackcompatTestCase {
final String recoverStateAsJSON = XContentHelper.toString(recoveryState, params);
if (!recoveryState.getPrimary()) {
RecoveryState.Index index = recoveryState.getIndex();
- assertThat(recoverStateAsJSON, index.recoveredBytes(), equalTo(0l));
- assertThat(recoverStateAsJSON, index.reusedBytes(), greaterThan(0l));
+ assertThat(recoverStateAsJSON, index.recoveredBytes(), equalTo(0L));
+ assertThat(recoverStateAsJSON, index.reusedBytes(), greaterThan(0L));
assertThat(recoverStateAsJSON, index.reusedBytes(), equalTo(index.totalBytes()));
assertThat(recoverStateAsJSON, index.recoveredFileCount(), equalTo(0));
assertThat(recoverStateAsJSON, index.reusedFileCount(), equalTo(index.totalFileCount()));
diff --git a/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java b/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java
index d26f0fbf41..a08a0722a8 100644
--- a/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java
+++ b/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java
@@ -28,6 +28,7 @@ import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDeci
import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
+import org.elasticsearch.env.Environment;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.indices.recovery.RecoveryState;
@@ -331,10 +332,10 @@ public class RecoveryFromGatewayIT extends ESIntegTestCase {
public void testReusePeerRecovery() throws Exception {
final Settings settings = settingsBuilder()
.put("action.admin.cluster.node.shutdown.delay", "10ms")
- .put(MockFSIndexStore.INDEX_CHECK_INDEX_ON_CLOSE_SETTING, false)
+ .put(MockFSIndexStore.INDEX_CHECK_INDEX_ON_CLOSE_SETTING.getKey(), false)
.put("gateway.recover_after_nodes", 4)
- .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_INCOMING_RECOVERIES_SETTING, 4)
- .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING, 4)
+ .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_INCOMING_RECOVERIES_SETTING.getKey(), 4)
+ .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING.getKey(), 4)
.put(MockFSDirectoryService.CRASH_INDEX_SETTING.getKey(), false).build();
internalCluster().startNodesAsync(4, settings).get();
@@ -408,7 +409,7 @@ public class RecoveryFromGatewayIT extends ESIntegTestCase {
recoveryState.getShardId().getId(), recoveryState.getSourceNode().name(), recoveryState.getTargetNode().name(),
recoveryState.getIndex().recoveredBytes(), recoveryState.getIndex().reusedBytes());
assertThat("no bytes should be recovered", recoveryState.getIndex().recoveredBytes(), equalTo(recovered));
- assertThat("data should have been reused", recoveryState.getIndex().reusedBytes(), greaterThan(0l));
+ assertThat("data should have been reused", recoveryState.getIndex().reusedBytes(), greaterThan(0L));
// we have to recover the segments file since we commit the translog ID on engine startup
assertThat("all bytes should be reused except of the segments file", recoveryState.getIndex().reusedBytes(), equalTo(recoveryState.getIndex().totalBytes() - recovered));
assertThat("no files should be recovered except of the segments file", recoveryState.getIndex().recoveredFileCount(), equalTo(1));
@@ -420,7 +421,7 @@ public class RecoveryFromGatewayIT extends ESIntegTestCase {
recoveryState.getShardId().getId(), recoveryState.getSourceNode().name(), recoveryState.getTargetNode().name(),
recoveryState.getIndex().recoveredBytes(), recoveryState.getIndex().reusedBytes());
}
- assertThat(recoveryState.getIndex().recoveredBytes(), equalTo(0l));
+ assertThat(recoveryState.getIndex().recoveredBytes(), equalTo(0L));
assertThat(recoveryState.getIndex().reusedBytes(), equalTo(recoveryState.getIndex().totalBytes()));
assertThat(recoveryState.getIndex().recoveredFileCount(), equalTo(0));
assertThat(recoveryState.getIndex().reusedFileCount(), equalTo(recoveryState.getIndex().totalFileCount()));
@@ -438,11 +439,11 @@ public class RecoveryFromGatewayIT extends ESIntegTestCase {
public void testRecoveryDifferentNodeOrderStartup() throws Exception {
// we need different data paths so we make sure we start the second node fresh
- final String node_1 = internalCluster().startNode(settingsBuilder().put("path.data", createTempDir()).build());
+ final String node_1 = internalCluster().startNode(settingsBuilder().put(Environment.PATH_DATA_SETTING.getKey(), createTempDir()).build());
client().prepareIndex("test", "type1", "1").setSource("field", "value").execute().actionGet();
- internalCluster().startNode(settingsBuilder().put("path.data", createTempDir()).build());
+ internalCluster().startNode(settingsBuilder().put(Environment.PATH_DATA_SETTING.getKey(), createTempDir()).build());
ensureGreen();
diff --git a/core/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java b/core/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java
index 87d83ed418..cbfc9d34d5 100644
--- a/core/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java
+++ b/core/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java
@@ -43,6 +43,7 @@ import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders;
import org.elasticsearch.cluster.routing.allocation.decider.Decision;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.store.Store;
@@ -51,11 +52,9 @@ import org.elasticsearch.indices.store.TransportNodesListShardStoreMetaData;
import org.elasticsearch.test.ESAllocationTestCase;
import org.junit.Before;
-import java.util.Arrays;
import java.util.Collections;
import java.util.EnumSet;
import java.util.HashMap;
-import java.util.HashSet;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
@@ -65,7 +64,7 @@ import static org.hamcrest.Matchers.equalTo;
/**
*/
public class ReplicaShardAllocatorTests extends ESAllocationTestCase {
- private final ShardId shardId = new ShardId("test", 0);
+ private final ShardId shardId = new ShardId("test", "_na_", 0);
private final DiscoveryNode node1 = newNode("node1");
private final DiscoveryNode node2 = newNode("node2");
private final DiscoveryNode node3 = newNode("node3");
@@ -287,9 +286,9 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase {
private RoutingAllocation onePrimaryOnNode1And1Replica(AllocationDeciders deciders, Settings settings, UnassignedInfo.Reason reason) {
ShardRouting primaryShard = TestShardRouting.newShardRouting(shardId.getIndex(), shardId.getId(), node1.id(), true, ShardRoutingState.STARTED, 10);
MetaData metaData = MetaData.builder()
- .put(IndexMetaData.builder(shardId.getIndex()).settings(settings(Version.CURRENT).put(settings))
+ .put(IndexMetaData.builder(shardId.getIndexName()).settings(settings(Version.CURRENT).put(settings))
.numberOfShards(1).numberOfReplicas(1)
- .putActiveAllocationIds(0, new HashSet<>(Arrays.asList(primaryShard.allocationId().getId()))))
+ .putActiveAllocationIds(0, Sets.newHashSet(primaryShard.allocationId().getId())))
.build();
RoutingTable routingTable = RoutingTable.builder()
.add(IndexRoutingTable.builder(shardId.getIndex())
@@ -309,9 +308,9 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase {
private RoutingAllocation onePrimaryOnNode1And1ReplicaRecovering(AllocationDeciders deciders) {
ShardRouting primaryShard = TestShardRouting.newShardRouting(shardId.getIndex(), shardId.getId(), node1.id(), true, ShardRoutingState.STARTED, 10);
MetaData metaData = MetaData.builder()
- .put(IndexMetaData.builder(shardId.getIndex()).settings(settings(Version.CURRENT))
+ .put(IndexMetaData.builder(shardId.getIndexName()).settings(settings(Version.CURRENT))
.numberOfShards(1).numberOfReplicas(1)
- .putActiveAllocationIds(0, new HashSet<>(Arrays.asList(primaryShard.allocationId().getId()))))
+ .putActiveAllocationIds(0, Sets.newHashSet(primaryShard.allocationId().getId())))
.build();
RoutingTable routingTable = RoutingTable.builder()
.add(IndexRoutingTable.builder(shardId.getIndex())
diff --git a/core/src/test/java/org/elasticsearch/gateway/ReusePeerRecoverySharedTest.java b/core/src/test/java/org/elasticsearch/gateway/ReusePeerRecoverySharedTest.java
index e9d6154f71..6f188ef428 100644
--- a/core/src/test/java/org/elasticsearch/gateway/ReusePeerRecoverySharedTest.java
+++ b/core/src/test/java/org/elasticsearch/gateway/ReusePeerRecoverySharedTest.java
@@ -127,7 +127,7 @@ public class ReusePeerRecoverySharedTest {
recoveryState.getSourceNode().name(), recoveryState.getTargetNode().name(),
recoveryState.getIndex().recoveredBytes(), recoveryState.getIndex().reusedBytes());
assertThat("no bytes should be recovered", recoveryState.getIndex().recoveredBytes(), equalTo(recovered));
- assertThat("data should have been reused", recoveryState.getIndex().reusedBytes(), greaterThan(0l));
+ assertThat("data should have been reused", recoveryState.getIndex().reusedBytes(), greaterThan(0L));
// we have to recover the segments file since we commit the translog ID on engine startup
assertThat("all bytes should be reused except of the segments file", recoveryState.getIndex().reusedBytes(),
equalTo(recoveryState.getIndex().totalBytes() - recovered));
@@ -142,7 +142,7 @@ public class ReusePeerRecoverySharedTest {
recoveryState.getShardId().getId(), recoveryState.getSourceNode().name(), recoveryState.getTargetNode().name(),
recoveryState.getIndex().recoveredBytes(), recoveryState.getIndex().reusedBytes());
}
- assertThat(recoveryState.getIndex().recoveredBytes(), equalTo(0l));
+ assertThat(recoveryState.getIndex().recoveredBytes(), equalTo(0L));
assertThat(recoveryState.getIndex().reusedBytes(), equalTo(recoveryState.getIndex().totalBytes()));
assertThat(recoveryState.getIndex().recoveredFileCount(), equalTo(0));
assertThat(recoveryState.getIndex().reusedFileCount(), equalTo(recoveryState.getIndex().totalFileCount()));
diff --git a/core/src/test/java/org/elasticsearch/get/GetActionIT.java b/core/src/test/java/org/elasticsearch/get/GetActionIT.java
index 6cc6def493..f0f9031190 100644
--- a/core/src/test/java/org/elasticsearch/get/GetActionIT.java
+++ b/core/src/test/java/org/elasticsearch/get/GetActionIT.java
@@ -43,7 +43,6 @@ import org.elasticsearch.index.mapper.internal.TimestampFieldMapper;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.InternalSettingsPlugin;
-import org.elasticsearch.test.junit.annotations.TestLogging;
import java.io.IOException;
import java.util.Collection;
@@ -254,12 +253,12 @@ public class GetActionIT extends ESIntegTestCase {
public void testGetDocWithMultivaluedFields() throws Exception {
String mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties")
- .startObject("field").field("type", "string").field("store", "yes").endObject()
+ .startObject("field").field("type", "string").field("store", true).endObject()
.endObject()
.endObject().endObject().string();
String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type2")
.startObject("properties")
- .startObject("field").field("type", "string").field("store", "yes").endObject()
+ .startObject("field").field("type", "string").field("store", true).endObject()
.endObject()
.endObject().endObject().string();
assertAcked(prepareCreate("test")
@@ -463,12 +462,12 @@ public class GetActionIT extends ESIntegTestCase {
response = client().prepareGet(indexOrAlias(), "type1", "1").setVersion(Versions.MATCH_ANY).get();
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
- assertThat(response.getVersion(), equalTo(1l));
+ assertThat(response.getVersion(), equalTo(1L));
response = client().prepareGet(indexOrAlias(), "type1", "1").setVersion(1).get();
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
- assertThat(response.getVersion(), equalTo(1l));
+ assertThat(response.getVersion(), equalTo(1L));
try {
client().prepareGet(indexOrAlias(), "type1", "1").setVersion(2).get();
@@ -484,13 +483,13 @@ public class GetActionIT extends ESIntegTestCase {
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
assertThat(response.getIndex(), equalTo("test"));
- assertThat(response.getVersion(), equalTo(1l));
+ assertThat(response.getVersion(), equalTo(1L));
response = client().prepareGet(indexOrAlias(), "type1", "1").setVersion(1).setRealtime(false).get();
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
assertThat(response.getIndex(), equalTo("test"));
- assertThat(response.getVersion(), equalTo(1l));
+ assertThat(response.getVersion(), equalTo(1L));
try {
client().prepareGet(indexOrAlias(), "type1", "1").setVersion(2).setRealtime(false).get();
@@ -508,7 +507,7 @@ public class GetActionIT extends ESIntegTestCase {
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
assertThat(response.getIndex(), equalTo("test"));
- assertThat(response.getVersion(), equalTo(2l));
+ assertThat(response.getVersion(), equalTo(2L));
try {
client().prepareGet(indexOrAlias(), "type1", "1").setVersion(1).get();
@@ -521,7 +520,7 @@ public class GetActionIT extends ESIntegTestCase {
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
assertThat(response.getIndex(), equalTo("test"));
- assertThat(response.getVersion(), equalTo(2l));
+ assertThat(response.getVersion(), equalTo(2L));
// From Lucene index:
refresh();
@@ -530,7 +529,7 @@ public class GetActionIT extends ESIntegTestCase {
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
assertThat(response.getIndex(), equalTo("test"));
- assertThat(response.getVersion(), equalTo(2l));
+ assertThat(response.getVersion(), equalTo(2L));
try {
client().prepareGet(indexOrAlias(), "type1", "1").setVersion(1).setRealtime(false).get();
@@ -543,7 +542,7 @@ public class GetActionIT extends ESIntegTestCase {
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
assertThat(response.getIndex(), equalTo("test"));
- assertThat(response.getVersion(), equalTo(2l));
+ assertThat(response.getVersion(), equalTo(2L));
}
public void testMultiGetWithVersion() throws Exception {
@@ -745,7 +744,6 @@ public class GetActionIT extends ESIntegTestCase {
}
}
- @TestLogging("index.shard.service:TRACE,cluster.service:TRACE,action.admin.indices.flush:TRACE")
public void testGetFieldsComplexField() throws Exception {
assertAcked(prepareCreate("my-index")
.setSettings(Settings.settingsBuilder().put("index.refresh_interval", -1))
@@ -753,7 +751,7 @@ public class GetActionIT extends ESIntegTestCase {
.startObject("field1").field("type", "object").startObject("properties")
.startObject("field2").field("type", "object").startObject("properties")
.startObject("field3").field("type", "object").startObject("properties")
- .startObject("field4").field("type", "string").field("store", "yes")
+ .startObject("field4").field("type", "string").field("store", true)
.endObject().endObject()
.endObject().endObject()
.endObject().endObject()
diff --git a/core/src/test/java/org/elasticsearch/http/netty/HttpPublishPortIT.java b/core/src/test/java/org/elasticsearch/http/netty/HttpPublishPortIT.java
index 4d73b52576..b6cf9d9189 100644
--- a/core/src/test/java/org/elasticsearch/http/netty/HttpPublishPortIT.java
+++ b/core/src/test/java/org/elasticsearch/http/netty/HttpPublishPortIT.java
@@ -20,10 +20,11 @@ package org.elasticsearch.http.netty;
import org.elasticsearch.action.admin.cluster.node.info.NodeInfo;
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse;
+import org.elasticsearch.common.network.NetworkModule;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.BoundTransportAddress;
import org.elasticsearch.common.transport.InetSocketTransportAddress;
-import org.elasticsearch.node.Node;
+import org.elasticsearch.http.HttpTransportSettings;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
import org.elasticsearch.test.ESIntegTestCase.Scope;
@@ -39,8 +40,8 @@ public class HttpPublishPortIT extends ESIntegTestCase {
protected Settings nodeSettings(int nodeOrdinal) {
return Settings.settingsBuilder()
.put(super.nodeSettings(nodeOrdinal))
- .put(Node.HTTP_ENABLED, true)
- .put("http.publish_port", 9080)
+ .put(NetworkModule.HTTP_ENABLED.getKey(), true)
+ .put(HttpTransportSettings.SETTING_HTTP_PUBLISH_PORT.getKey(), 9080)
.build();
}
diff --git a/core/src/test/java/org/elasticsearch/http/netty/NettyHttpChannelTests.java b/core/src/test/java/org/elasticsearch/http/netty/NettyHttpChannelTests.java
index cb111a7198..6311e56834 100644
--- a/core/src/test/java/org/elasticsearch/http/netty/NettyHttpChannelTests.java
+++ b/core/src/test/java/org/elasticsearch/http/netty/NettyHttpChannelTests.java
@@ -25,6 +25,7 @@ import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.network.NetworkService;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.MockBigArrays;
+import org.elasticsearch.http.HttpTransportSettings;
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
import org.elasticsearch.rest.RestResponse;
import org.elasticsearch.rest.RestStatus;
@@ -81,9 +82,9 @@ public class NettyHttpChannelTests extends ESTestCase {
public void testCorsEnabledWithoutAllowOrigins() {
// Set up a HTTP transport with only the CORS enabled setting
Settings settings = Settings.builder()
- .put(NettyHttpServerTransport.SETTING_CORS_ENABLED, true)
+ .put(HttpTransportSettings.SETTING_CORS_ENABLED.getKey(), true)
.build();
- httpServerTransport = new NettyHttpServerTransport(settings, networkService, bigArrays);
+ httpServerTransport = new NettyHttpServerTransport(settings, networkService, bigArrays, threadPool);
HttpRequest httpRequest = new TestHttpRequest();
httpRequest.headers().add(HttpHeaders.Names.ORIGIN, "remote");
httpRequest.headers().add(HttpHeaders.Names.USER_AGENT, "Mozilla fake");
@@ -104,10 +105,10 @@ public class NettyHttpChannelTests extends ESTestCase {
public void testCorsEnabledWithAllowOrigins() {
// create a http transport with CORS enabled and allow origin configured
Settings settings = Settings.builder()
- .put(NettyHttpServerTransport.SETTING_CORS_ENABLED, true)
- .put(NettyHttpServerTransport.SETTING_CORS_ALLOW_ORIGIN, "remote-host")
+ .put(HttpTransportSettings.SETTING_CORS_ENABLED.getKey(), true)
+ .put(HttpTransportSettings.SETTING_CORS_ALLOW_ORIGIN.getKey(), "remote-host")
.build();
- httpServerTransport = new NettyHttpServerTransport(settings, networkService, bigArrays);
+ httpServerTransport = new NettyHttpServerTransport(settings, networkService, bigArrays, threadPool);
HttpRequest httpRequest = new TestHttpRequest();
httpRequest.headers().add(HttpHeaders.Names.ORIGIN, "remote");
httpRequest.headers().add(HttpHeaders.Names.USER_AGENT, "Mozilla fake");
diff --git a/core/src/test/java/org/elasticsearch/http/netty/NettyHttpServerPipeliningTests.java b/core/src/test/java/org/elasticsearch/http/netty/NettyHttpServerPipeliningTests.java
index 95cb5b46b5..6afe8a0aef 100644
--- a/core/src/test/java/org/elasticsearch/http/netty/NettyHttpServerPipeliningTests.java
+++ b/core/src/test/java/org/elasticsearch/http/netty/NettyHttpServerPipeliningTests.java
@@ -23,6 +23,7 @@ import org.elasticsearch.common.network.NetworkService;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.InetSocketTransportAddress;
import org.elasticsearch.common.util.MockBigArrays;
+import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.http.HttpServerTransport;
import org.elasticsearch.http.netty.NettyHttpServerTransport.HttpChannelPipelineFactory;
import org.elasticsearch.http.netty.pipelining.OrderedDownstreamChannelEvent;
@@ -132,13 +133,13 @@ public class NettyHttpServerPipeliningTests extends ESTestCase {
private final ExecutorService executorService;
public CustomNettyHttpServerTransport(Settings settings) {
- super(settings, NettyHttpServerPipeliningTests.this.networkService, NettyHttpServerPipeliningTests.this.bigArrays);
+ super(settings, NettyHttpServerPipeliningTests.this.networkService, NettyHttpServerPipeliningTests.this.bigArrays, NettyHttpServerPipeliningTests.this.threadPool);
this.executorService = Executors.newFixedThreadPool(5);
}
@Override
public ChannelPipelineFactory configureServerChannelPipelineFactory() {
- return new CustomHttpChannelPipelineFactory(this, executorService);
+ return new CustomHttpChannelPipelineFactory(this, executorService, NettyHttpServerPipeliningTests.this.threadPool.getThreadContext());
}
@Override
@@ -152,8 +153,8 @@ public class NettyHttpServerPipeliningTests extends ESTestCase {
private final ExecutorService executorService;
- public CustomHttpChannelPipelineFactory(NettyHttpServerTransport transport, ExecutorService executorService) {
- super(transport, randomBoolean());
+ public CustomHttpChannelPipelineFactory(NettyHttpServerTransport transport, ExecutorService executorService, ThreadContext threadContext) {
+ super(transport, randomBoolean(), threadContext);
this.executorService = executorService;
}
diff --git a/core/src/test/java/org/elasticsearch/http/netty/NettyPipeliningDisabledIT.java b/core/src/test/java/org/elasticsearch/http/netty/NettyPipeliningDisabledIT.java
index f4ce3756e6..8f7765dcc8 100644
--- a/core/src/test/java/org/elasticsearch/http/netty/NettyPipeliningDisabledIT.java
+++ b/core/src/test/java/org/elasticsearch/http/netty/NettyPipeliningDisabledIT.java
@@ -18,6 +18,7 @@
*/
package org.elasticsearch.http.netty;
+import org.elasticsearch.common.network.NetworkModule;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.InetSocketTransportAddress;
import org.elasticsearch.http.HttpServerTransport;
@@ -45,7 +46,7 @@ import static org.hamcrest.Matchers.hasSize;
public class NettyPipeliningDisabledIT extends ESIntegTestCase {
@Override
protected Settings nodeSettings(int nodeOrdinal) {
- return settingsBuilder().put(super.nodeSettings(nodeOrdinal)).put(Node.HTTP_ENABLED, true).put("http.pipelining", false).build();
+ return settingsBuilder().put(super.nodeSettings(nodeOrdinal)).put(NetworkModule.HTTP_ENABLED.getKey(), true).put("http.pipelining", false).build();
}
public void testThatNettyHttpServerDoesNotSupportPipelining() throws Exception {
diff --git a/core/src/test/java/org/elasticsearch/http/netty/NettyPipeliningEnabledIT.java b/core/src/test/java/org/elasticsearch/http/netty/NettyPipeliningEnabledIT.java
index 9e5971c1d4..93f54cb762 100644
--- a/core/src/test/java/org/elasticsearch/http/netty/NettyPipeliningEnabledIT.java
+++ b/core/src/test/java/org/elasticsearch/http/netty/NettyPipeliningEnabledIT.java
@@ -18,6 +18,7 @@
*/
package org.elasticsearch.http.netty;
+import org.elasticsearch.common.network.NetworkModule;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.InetSocketTransportAddress;
import org.elasticsearch.http.HttpServerTransport;
@@ -42,7 +43,7 @@ import static org.hamcrest.Matchers.is;
public class NettyPipeliningEnabledIT extends ESIntegTestCase {
@Override
protected Settings nodeSettings(int nodeOrdinal) {
- return settingsBuilder().put(super.nodeSettings(nodeOrdinal)).put(Node.HTTP_ENABLED, true).put("http.pipelining", true).build();
+ return settingsBuilder().put(super.nodeSettings(nodeOrdinal)).put(NetworkModule.HTTP_ENABLED.getKey(), true).put("http.pipelining", true).build();
}
public void testThatNettyHttpServerSupportsPipelining() throws Exception {
diff --git a/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java
index eae3e65c40..e6701ab5bd 100644
--- a/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java
+++ b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java
@@ -18,15 +18,6 @@
*/
package org.elasticsearch.index;
-import java.io.IOException;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.Set;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicBoolean;
-import java.util.function.Consumer;
-
import org.apache.lucene.index.AssertingDirectoryReader;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.FieldInvertState;
@@ -70,7 +61,9 @@ import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCacheListener;
import org.elasticsearch.indices.mapper.MapperRegistry;
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import org.elasticsearch.script.ScriptContextRegistry;
+import org.elasticsearch.script.ScriptEngineRegistry;
import org.elasticsearch.script.ScriptEngineService;
+import org.elasticsearch.script.ScriptSettings;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.IndexSettingsModule;
@@ -78,6 +71,14 @@ import org.elasticsearch.test.engine.MockEngineFactory;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.watcher.ResourceWatcherService;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Set;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.function.Consumer;
+
import static java.util.Collections.emptyMap;
public class IndexModuleTests extends ESTestCase {
@@ -106,9 +107,12 @@ public class IndexModuleTests extends ESTestCase {
PageCacheRecycler recycler = new PageCacheRecycler(settings, threadPool);
BigArrays bigArrays = new BigArrays(recycler, circuitBreakerService);
IndicesFieldDataCache indicesFieldDataCache = new IndicesFieldDataCache(settings, new IndicesFieldDataCacheListener(circuitBreakerService), threadPool);
- Set<ScriptEngineService> scriptEngines = new HashSet<>();
+ Set<ScriptEngineService> scriptEngines = Collections.emptySet();
scriptEngines.addAll(Arrays.asList(scriptEngineServices));
- ScriptService scriptService = new ScriptService(settings, environment, scriptEngines, new ResourceWatcherService(settings, threadPool), new ScriptContextRegistry(Collections.emptyList()));
+ ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Collections.emptyList());
+ ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(Collections.emptyList());
+ ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry);
+ ScriptService scriptService = new ScriptService(settings, environment, scriptEngines, new ResourceWatcherService(settings, threadPool), scriptEngineRegistry, scriptContextRegistry, scriptSettings);
IndicesQueriesRegistry indicesQueriesRegistry = new IndicesQueriesRegistry(settings, emptyMap());
return new NodeServicesProvider(threadPool, indicesQueryCache, null, warmer, bigArrays, client, scriptService, indicesQueriesRegistry, indicesFieldDataCache, circuitBreakerService);
}
@@ -116,9 +120,9 @@ public class IndexModuleTests extends ESTestCase {
@Override
public void setUp() throws Exception {
super.setUp();
- index = new Index("foo");
- settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).put("path.home", createTempDir().toString()).build();
- indexSettings = IndexSettingsModule.newIndexSettings(index, settings);
+ settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build();
+ indexSettings = IndexSettingsModule.newIndexSettings("foo", settings);
+ index = indexSettings.getIndex();
environment = new Environment(settings);
nodeServicesProvider = newNodeServiceProvider(settings, environment, null);
nodeEnvironment = new NodeEnvironment(settings, environment);
@@ -147,9 +151,14 @@ public class IndexModuleTests extends ESTestCase {
public void testRegisterIndexStore() throws IOException {
- final Index index = new Index("foo");
- final Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).put("path.home", createTempDir().toString()).put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), "foo_store").build();
+ final Settings settings = Settings
+ .builder()
+ .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
+ .put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), "foo_store")
+ .build();
IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(index, settings);
+ final Index index = indexSettings.getIndex();
IndexModule module = new IndexModule(indexSettings, null, new AnalysisRegistry(null, environment));
module.addIndexStore("foo_store", FooStore::new);
IndexService indexService = module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider, mapperRegistry);
@@ -210,9 +219,9 @@ public class IndexModuleTests extends ESTestCase {
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put("index.similarity.my_similarity.type", "test_similarity")
.put("index.similarity.my_similarity.key", "there is a key")
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
- IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(new Index("foo"), indexSettings), null, new AnalysisRegistry(null, environment));
+ IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings("foo", indexSettings), null, new AnalysisRegistry(null, environment));
module.addSimilarity("test_similarity", (string, settings) -> new SimilarityProvider() {
@Override
public String name() {
@@ -238,9 +247,9 @@ public class IndexModuleTests extends ESTestCase {
Settings indexSettings = Settings.settingsBuilder()
.put("index.similarity.my_similarity.type", "test_similarity")
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
- IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(new Index("foo"), indexSettings), null, new AnalysisRegistry(null, environment));
+ IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings("foo", indexSettings), null, new AnalysisRegistry(null, environment));
try {
module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider, mapperRegistry);
} catch (IllegalArgumentException ex) {
@@ -251,10 +260,10 @@ public class IndexModuleTests extends ESTestCase {
public void testSetupWithoutType() throws IOException {
Settings indexSettings = Settings.settingsBuilder()
.put("index.similarity.my_similarity.foo", "bar")
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.build();
- IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(new Index("foo"), indexSettings), null, new AnalysisRegistry(null, environment));
+ IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings("foo", indexSettings), null, new AnalysisRegistry(null, environment));
try {
module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider, mapperRegistry);
} catch (IllegalArgumentException ex) {
@@ -264,9 +273,9 @@ public class IndexModuleTests extends ESTestCase {
public void testCannotRegisterProvidedImplementations() {
Settings indexSettings = Settings.settingsBuilder()
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
- IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(new Index("foo"), indexSettings), null, new AnalysisRegistry(null, environment));
+ IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings("foo", indexSettings), null, new AnalysisRegistry(null, environment));
try {
module.registerQueryCache("index", IndexQueryCache::new);
fail("only once");
@@ -292,9 +301,9 @@ public class IndexModuleTests extends ESTestCase {
public void testRegisterCustomQueryCache() throws IOException {
Settings indexSettings = Settings.settingsBuilder()
.put(IndexModule.INDEX_QUERY_CACHE_TYPE_SETTING.getKey(), "custom")
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
- IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(new Index("foo"), indexSettings), null, new AnalysisRegistry(null, environment));
+ IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings("foo", indexSettings), null, new AnalysisRegistry(null, environment));
module.registerQueryCache("custom", (a, b) -> new CustomQueryCache());
try {
module.registerQueryCache("custom", (a, b) -> new CustomQueryCache());
@@ -310,9 +319,9 @@ public class IndexModuleTests extends ESTestCase {
public void testDefaultQueryCacheImplIsSelected() throws IOException {
Settings indexSettings = Settings.settingsBuilder()
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
- IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(new Index("foo"), indexSettings), null, new AnalysisRegistry(null, environment));
+ IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings("foo", indexSettings), null, new AnalysisRegistry(null, environment));
IndexService indexService = module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider, mapperRegistry);
assertTrue(indexService.cache().query() instanceof IndexQueryCache);
indexService.close("simon says", false);
@@ -330,7 +339,7 @@ public class IndexModuleTests extends ESTestCase {
@Override
public Index index() {
- return new Index("test");
+ return new Index("test", "_na_");
}
@Override
diff --git a/core/src/test/java/org/elasticsearch/index/IndexServiceTests.java b/core/src/test/java/org/elasticsearch/index/IndexServiceTests.java
index 15f9a3e78b..252fa9d3f4 100644
--- a/core/src/test/java/org/elasticsearch/index/IndexServiceTests.java
+++ b/core/src/test/java/org/elasticsearch/index/IndexServiceTests.java
@@ -39,6 +39,7 @@ import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.threadpool.ThreadPool;
import java.io.IOException;
+import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
@@ -264,7 +265,7 @@ public class IndexServiceTests extends ESSingleNodeTestCase {
}
public void testFsyncTaskIsRunning() throws IOException {
- IndexService indexService = createIndex("test", Settings.EMPTY);
+ IndexService indexService = createIndex("test", Settings.builder().put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.ASYNC).build());
IndexService.AsyncTranslogFSync fsyncTask = indexService.getFsyncTask();
assertNotNull(fsyncTask);
assertEquals(5000, fsyncTask.getInterval().millis());
@@ -274,6 +275,9 @@ public class IndexServiceTests extends ESSingleNodeTestCase {
indexService.close("simon says", false);
assertFalse(fsyncTask.isScheduled());
assertTrue(fsyncTask.isClosed());
+
+ indexService = createIndex("test1", Settings.EMPTY);
+ assertNull(indexService.getFsyncTask());
}
public void testRefreshActuallyWorks() throws Exception {
@@ -307,7 +311,7 @@ public class IndexServiceTests extends ESSingleNodeTestCase {
public void testAsyncFsyncActuallyWorks() throws Exception {
Settings settings = Settings.builder()
- .put(IndexSettings.INDEX_TRANSLOG_SYNC_INTERVAL_SETTING.getKey(), "10ms") // very often :)
+ .put(IndexSettings.INDEX_TRANSLOG_SYNC_INTERVAL_SETTING.getKey(), "100ms") // very often :)
.put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.ASYNC)
.build();
IndexService indexService = createIndex("test", settings);
@@ -320,11 +324,43 @@ public class IndexServiceTests extends ESSingleNodeTestCase {
});
}
- public void testNoFsyncTaskIfDisabled() {
+ public void testRescheduleAsyncFsync() throws Exception {
Settings settings = Settings.builder()
- .put(IndexSettings.INDEX_TRANSLOG_SYNC_INTERVAL_SETTING.getKey(), "0ms") // disable
+ .put(IndexSettings.INDEX_TRANSLOG_SYNC_INTERVAL_SETTING.getKey(), "100ms") // very often :)
+ .put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.REQUEST)
.build();
IndexService indexService = createIndex("test", settings);
+ ensureGreen("test");
+ assertNull(indexService.getFsyncTask());
+ IndexMetaData metaData = IndexMetaData.builder(indexService.getMetaData()).settings(Settings.builder().put(indexService.getMetaData().getSettings()).put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.ASYNC)).build();
+ indexService.updateMetaData(metaData);
+ assertNotNull(indexService.getFsyncTask());
+ assertTrue(indexService.getRefreshTask().mustReschedule());
+ client().prepareIndex("test", "test", "1").setSource("{\"foo\": \"bar\"}").get();
+ IndexShard shard = indexService.getShard(0);
+ assertBusy(() -> {
+ assertFalse(shard.getTranslog().syncNeeded());
+ });
+
+ metaData = IndexMetaData.builder(indexService.getMetaData()).settings(Settings.builder().put(indexService.getMetaData().getSettings()).put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.REQUEST)).build();
+ indexService.updateMetaData(metaData);
assertNull(indexService.getFsyncTask());
+
+ metaData = IndexMetaData.builder(indexService.getMetaData()).settings(Settings.builder().put(indexService.getMetaData().getSettings()).put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.ASYNC)).build();
+ indexService.updateMetaData(metaData);
+ assertNotNull(indexService.getFsyncTask());
+
+ }
+
+ public void testIllegalFsyncInterval() {
+ Settings settings = Settings.builder()
+ .put(IndexSettings.INDEX_TRANSLOG_SYNC_INTERVAL_SETTING.getKey(), "0ms") // disable
+ .build();
+ try {
+ createIndex("test", settings);
+ fail();
+ } catch (IllegalArgumentException ex) {
+ assertEquals("Failed to parse value [0ms] for setting [index.translog.sync_interval] must be >= 100ms", ex.getMessage());
+ }
}
}
diff --git a/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java b/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java
index 56179d5390..09fec10a62 100644
--- a/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java
+++ b/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java
@@ -114,7 +114,7 @@ public class IndexSettingsTests extends ESTestCase {
if (settings.length > 0) {
settingSet.addAll(Arrays.asList(settings));
}
- return new IndexSettings(metaData, nodeSettings, (idx) -> Regex.simpleMatch(idx, metaData.getIndex()), new IndexScopedSettings(Settings.EMPTY, settingSet));
+ return new IndexSettings(metaData, nodeSettings, (idx) -> Regex.simpleMatch(idx, metaData.getIndex().getName()), new IndexScopedSettings(Settings.EMPTY, settingSet));
}
diff --git a/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java b/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java
index 7012ebbc5a..5d54f7731c 100644
--- a/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java
+++ b/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java
@@ -36,6 +36,7 @@ import org.elasticsearch.cluster.routing.RoutingNodes;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
+import org.elasticsearch.env.Environment;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.ShadowIndexShard;
import org.elasticsearch.index.translog.TranslogStats;
@@ -86,7 +87,7 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase {
private Settings nodeSettings(String dataPath) {
return Settings.builder()
.put("node.add_id_to_custom_path", false)
- .put("path.shared_data", dataPath)
+ .put(Environment.PATH_SHARED_DATA_SETTING.getKey(), dataPath)
.put("index.store.fs.fs_lock", randomFrom("native", "simple"))
.build();
}
@@ -443,7 +444,7 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase {
Path dataPath = createTempDir();
Settings nodeSettings = Settings.builder()
.put("node.add_id_to_custom_path", false)
- .put("path.shared_data", dataPath)
+ .put(Environment.PATH_SHARED_DATA_SETTING.getKey(), dataPath)
.build();
String node1 = internalCluster().startNode(nodeSettings);
diff --git a/core/src/test/java/org/elasticsearch/index/MergePolicySettingsTests.java b/core/src/test/java/org/elasticsearch/index/MergePolicySettingsTests.java
index 1e3e51c083..9caf9790c7 100644
--- a/core/src/test/java/org/elasticsearch/index/MergePolicySettingsTests.java
+++ b/core/src/test/java/org/elasticsearch/index/MergePolicySettingsTests.java
@@ -37,7 +37,7 @@ import static org.elasticsearch.index.IndexSettingsTests.newIndexMeta;
import static org.hamcrest.Matchers.equalTo;
public class MergePolicySettingsTests extends ESTestCase {
- protected final ShardId shardId = new ShardId(new Index("index"), 1);
+ protected final ShardId shardId = new ShardId("index", "_na_", 1);
public void testCompoundFileSettings() throws IOException {
assertThat(new MergePolicyConfig(logger, indexSettings(Settings.EMPTY)).getMergePolicy().getNoCFSRatio(), equalTo(0.1));
diff --git a/core/src/test/java/org/elasticsearch/index/TransportIndexFailuresIT.java b/core/src/test/java/org/elasticsearch/index/TransportIndexFailuresIT.java
index 1f08346689..9dfeb4438a 100644
--- a/core/src/test/java/org/elasticsearch/index/TransportIndexFailuresIT.java
+++ b/core/src/test/java/org/elasticsearch/index/TransportIndexFailuresIT.java
@@ -53,8 +53,8 @@ public class TransportIndexFailuresIT extends ESIntegTestCase {
private static final Settings nodeSettings = Settings.settingsBuilder()
.put("discovery.type", "zen") // <-- To override the local setting if set externally
- .put(FaultDetection.SETTING_PING_TIMEOUT, "1s") // <-- for hitting simulated network failures quickly
- .put(FaultDetection.SETTING_PING_RETRIES, "1") // <-- for hitting simulated network failures quickly
+ .put(FaultDetection.PING_TIMEOUT_SETTING.getKey(), "1s") // <-- for hitting simulated network failures quickly
+ .put(FaultDetection.PING_RETRIES_SETTING.getKey(), "1") // <-- for hitting simulated network failures quickly
.put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "1s") // <-- for hitting simulated network failures quickly
.put("discovery.zen.minimum_master_nodes", 1)
.build();
diff --git a/core/src/test/java/org/elasticsearch/index/VersionTypeTests.java b/core/src/test/java/org/elasticsearch/index/VersionTypeTests.java
index d54d1a953b..837d998dfe 100644
--- a/core/src/test/java/org/elasticsearch/index/VersionTypeTests.java
+++ b/core/src/test/java/org/elasticsearch/index/VersionTypeTests.java
@@ -192,24 +192,24 @@ public class VersionTypeTests extends ESTestCase {
}
public void testUpdateVersion() {
- assertThat(VersionType.INTERNAL.updateVersion(Versions.NOT_SET, 10), equalTo(1l));
- assertThat(VersionType.INTERNAL.updateVersion(Versions.NOT_FOUND, 10), equalTo(1l));
- assertThat(VersionType.INTERNAL.updateVersion(1, 1), equalTo(2l));
- assertThat(VersionType.INTERNAL.updateVersion(2, Versions.MATCH_ANY), equalTo(3l));
+ assertThat(VersionType.INTERNAL.updateVersion(Versions.NOT_SET, 10), equalTo(1L));
+ assertThat(VersionType.INTERNAL.updateVersion(Versions.NOT_FOUND, 10), equalTo(1L));
+ assertThat(VersionType.INTERNAL.updateVersion(1, 1), equalTo(2L));
+ assertThat(VersionType.INTERNAL.updateVersion(2, Versions.MATCH_ANY), equalTo(3L));
- assertThat(VersionType.EXTERNAL.updateVersion(Versions.NOT_SET, 10), equalTo(10l));
- assertThat(VersionType.EXTERNAL.updateVersion(Versions.NOT_FOUND, 10), equalTo(10l));
- assertThat(VersionType.EXTERNAL.updateVersion(1, 10), equalTo(10l));
+ assertThat(VersionType.EXTERNAL.updateVersion(Versions.NOT_SET, 10), equalTo(10L));
+ assertThat(VersionType.EXTERNAL.updateVersion(Versions.NOT_FOUND, 10), equalTo(10L));
+ assertThat(VersionType.EXTERNAL.updateVersion(1, 10), equalTo(10L));
- assertThat(VersionType.EXTERNAL_GTE.updateVersion(Versions.NOT_SET, 10), equalTo(10l));
- assertThat(VersionType.EXTERNAL_GTE.updateVersion(Versions.NOT_FOUND, 10), equalTo(10l));
- assertThat(VersionType.EXTERNAL_GTE.updateVersion(1, 10), equalTo(10l));
- assertThat(VersionType.EXTERNAL_GTE.updateVersion(10, 10), equalTo(10l));
+ assertThat(VersionType.EXTERNAL_GTE.updateVersion(Versions.NOT_SET, 10), equalTo(10L));
+ assertThat(VersionType.EXTERNAL_GTE.updateVersion(Versions.NOT_FOUND, 10), equalTo(10L));
+ assertThat(VersionType.EXTERNAL_GTE.updateVersion(1, 10), equalTo(10L));
+ assertThat(VersionType.EXTERNAL_GTE.updateVersion(10, 10), equalTo(10L));
- assertThat(VersionType.FORCE.updateVersion(Versions.NOT_SET, 10), equalTo(10l));
- assertThat(VersionType.FORCE.updateVersion(Versions.NOT_FOUND, 10), equalTo(10l));
- assertThat(VersionType.FORCE.updateVersion(11, 10), equalTo(10l));
+ assertThat(VersionType.FORCE.updateVersion(Versions.NOT_SET, 10), equalTo(10L));
+ assertThat(VersionType.FORCE.updateVersion(Versions.NOT_FOUND, 10), equalTo(10L));
+ assertThat(VersionType.FORCE.updateVersion(11, 10), equalTo(10L));
// Old indexing code
// if (index.versionType() == VersionType.INTERNAL) { // internal version type
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/ASCIIFoldingTokenFilterFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/ASCIIFoldingTokenFilterFactoryTests.java
index 17bd9d587b..ba3f8b2e10 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/ASCIIFoldingTokenFilterFactoryTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/ASCIIFoldingTokenFilterFactoryTests.java
@@ -21,6 +21,7 @@ package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.core.WhitespaceTokenizer;
+import org.elasticsearch.env.Environment;
import org.elasticsearch.test.ESTokenStreamTestCase;
import java.io.IOException;
@@ -31,7 +32,7 @@ import static org.elasticsearch.common.settings.Settings.settingsBuilder;
public class ASCIIFoldingTokenFilterFactoryTests extends ESTokenStreamTestCase {
public void testDefault() throws IOException {
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder()
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put("index.analysis.filter.my_ascii_folding.type", "asciifolding")
.build());
TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_ascii_folding");
@@ -44,7 +45,7 @@ public class ASCIIFoldingTokenFilterFactoryTests extends ESTokenStreamTestCase {
public void testPreserveOriginal() throws IOException {
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder()
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put("index.analysis.filter.my_ascii_folding.type", "asciifolding")
.put("index.analysis.filter.my_ascii_folding.preserve_original", true)
.build());
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisFactoryTests.java
index b2df4a9d41..afb34cda8a 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisFactoryTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisFactoryTests.java
@@ -179,6 +179,8 @@ public class AnalysisFactoryTests extends ESTestCase {
put("typeaspayload", Void.class);
// fingerprint
put("fingerprint", Void.class);
+ // for tee-sinks
+ put("daterecognizer", Void.class);
}};
public void testTokenFilters() {
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java
index f844d9ac7a..1eb1e93f09 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java
@@ -68,8 +68,7 @@ public class AnalysisModuleTests extends ModuleTestCase {
}
public AnalysisService getAnalysisService(AnalysisRegistry registry, Settings settings) throws IOException {
- Index index = new Index("test");
- IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(index, settings);
+ IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings);
return registry.build(idxSettings);
}
@@ -81,7 +80,7 @@ public class AnalysisModuleTests extends ModuleTestCase {
private Settings loadFromClasspath(String path) {
return settingsBuilder().loadFromStream(path, getClass().getResourceAsStream(path))
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
}
@@ -106,7 +105,7 @@ public class AnalysisModuleTests extends ModuleTestCase {
String yaml = "/org/elasticsearch/index/analysis/test1.yml";
Settings settings2 = settingsBuilder()
.loadFromStream(yaml, getClass().getResourceAsStream(yaml))
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_0_90_0)
.build();
AnalysisRegistry newRegistry = getNewRegistry(settings2);
@@ -130,7 +129,7 @@ public class AnalysisModuleTests extends ModuleTestCase {
private void assertTokenFilter(String name, Class clazz) throws IOException {
Settings settings = Settings.settingsBuilder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
- .put("path.home", createTempDir().toString()).build();
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build();
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
TokenFilterFactory tokenFilter = analysisService.tokenFilter(name);
Tokenizer tokenizer = new WhitespaceTokenizer();
@@ -215,7 +214,7 @@ public class AnalysisModuleTests extends ModuleTestCase {
public void testWordListPath() throws Exception {
Settings settings = Settings.builder()
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
Environment env = new Environment(settings);
String[] words = new String[]{"donau", "dampf", "schiff", "spargel", "creme", "suppe"};
@@ -243,7 +242,7 @@ public class AnalysisModuleTests extends ModuleTestCase {
public void testUnderscoreInAnalyzerName() throws IOException {
Settings settings = Settings.builder()
.put("index.analysis.analyzer._invalid_name.tokenizer", "keyword")
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put(IndexMetaData.SETTING_VERSION_CREATED, "1")
.build();
try {
@@ -258,7 +257,7 @@ public class AnalysisModuleTests extends ModuleTestCase {
Settings settings = Settings.builder()
.put("index.analysis.analyzer.valid_name.tokenizer", "keyword")
.put("index.analysis.analyzer.valid_name.alias", "_invalid_name")
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put(IndexMetaData.SETTING_VERSION_CREATED, "1")
.build();
try {
@@ -275,7 +274,7 @@ public class AnalysisModuleTests extends ModuleTestCase {
.put("index.analysis.analyzer.custom1.position_offset_gap", "128")
.put("index.analysis.analyzer.custom2.tokenizer", "standard")
.put("index.analysis.analyzer.custom2.position_increment_gap", "256")
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_1_0_0,
Version.V_1_7_1))
.build();
@@ -295,7 +294,7 @@ public class AnalysisModuleTests extends ModuleTestCase {
.put("index.analysis.analyzer.custom.tokenizer", "standard")
.put("index.analysis.analyzer.custom.position_offset_gap", "128")
.put("index.analysis.analyzer.custom.position_increment_gap", "256")
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_1_0_0,
Version.V_1_7_1))
.build();
@@ -312,7 +311,7 @@ public class AnalysisModuleTests extends ModuleTestCase {
Settings settings = settingsBuilder()
.put("index.analysis.analyzer.custom.tokenizer", "standard")
.put("index.analysis.analyzer.custom.position_offset_gap", "128")
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.build();
try {
@@ -326,7 +325,7 @@ public class AnalysisModuleTests extends ModuleTestCase {
public void testRegisterHunspellDictionary() throws Exception {
Settings settings = settingsBuilder()
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.build();
AnalysisModule module = new AnalysisModule(new Environment(settings));
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisServiceTests.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisServiceTests.java
index f467aa289f..11ef2df40c 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisServiceTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisServiceTests.java
@@ -53,8 +53,12 @@ public class AnalysisServiceTests extends ESTestCase {
public void testDefaultAnalyzers() throws IOException {
Version version = VersionUtils.randomVersion(getRandom());
- Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).put("path.home", createTempDir().toString()).build();
- IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(new Index("index"), settings);
+ Settings settings = Settings
+ .builder()
+ .put(IndexMetaData.SETTING_VERSION_CREATED, version)
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
+ .build();
+ IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings);
assertThat(analysisService.defaultIndexAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class));
assertThat(analysisService.defaultSearchAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class));
@@ -64,7 +68,7 @@ public class AnalysisServiceTests extends ESTestCase {
public void testOverrideDefaultAnalyzer() throws IOException {
Version version = VersionUtils.randomVersion(getRandom());
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
- AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings(new Index("index"), settings),
+ AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings("index", settings),
Collections.singletonMap("default", analyzerProvider("default")),
Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap());
assertThat(analysisService.defaultIndexAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class));
@@ -76,7 +80,7 @@ public class AnalysisServiceTests extends ESTestCase {
Version version = VersionUtils.randomVersionBetween(getRandom(), Version.V_3_0_0, Version.CURRENT);
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
try {
- AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings(new Index("index"), settings),
+ AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings("index", settings),
Collections.singletonMap("default_index", new PreBuiltAnalyzerProvider("default_index", AnalyzerScope.INDEX, new EnglishAnalyzer())),
Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap());
fail("Expected ISE");
@@ -89,7 +93,7 @@ public class AnalysisServiceTests extends ESTestCase {
public void testBackCompatOverrideDefaultIndexAnalyzer() {
Version version = VersionUtils.randomVersionBetween(getRandom(), VersionUtils.getFirstVersion(), VersionUtils.getPreviousVersion(Version.V_3_0_0));
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
- AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings(new Index("index"), settings),
+ AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings("index", settings),
Collections.singletonMap("default_index", analyzerProvider("default_index")),
Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap());
assertThat(analysisService.defaultIndexAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class));
@@ -100,7 +104,7 @@ public class AnalysisServiceTests extends ESTestCase {
public void testOverrideDefaultSearchAnalyzer() {
Version version = VersionUtils.randomVersion(getRandom());
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
- AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings(new Index("index"), settings),
+ AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings("index", settings),
Collections.singletonMap("default_search", analyzerProvider("default_search")),
Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap());
assertThat(analysisService.defaultIndexAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class));
@@ -114,7 +118,7 @@ public class AnalysisServiceTests extends ESTestCase {
Map<String, AnalyzerProvider> analyzers = new HashMap<>();
analyzers.put("default_index", analyzerProvider("default_index"));
analyzers.put("default_search", analyzerProvider("default_search"));
- AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings(new Index("index"), settings),
+ AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings("index", settings),
analyzers, Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap());
assertThat(analysisService.defaultIndexAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class));
assertThat(analysisService.defaultSearchAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class));
@@ -123,7 +127,7 @@ public class AnalysisServiceTests extends ESTestCase {
public void testConfigureCamelCaseTokenFilter() throws IOException {
// tests a filter that
- Settings settings = Settings.builder().put("path.home", createTempDir().toString()).build();
+ Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build();
Settings indexSettings = settingsBuilder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put("index.analysis.filter.wordDelimiter.type", "word_delimiter")
@@ -133,7 +137,7 @@ public class AnalysisServiceTests extends ESTestCase {
.put("index.analysis.analyzer.custom_analyzer_1.tokenizer", "whitespace")
.putArray("index.analysis.analyzer.custom_analyzer_1.filter", "lowercase", "word_delimiter").build();
- IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(new Index("index"), indexSettings);
+ IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings);
AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings);
try (NamedAnalyzer custom_analyser = analysisService.analyzer("custom_analyzer")) {
assertNotNull(custom_analyser);
@@ -169,7 +173,7 @@ public class AnalysisServiceTests extends ESTestCase {
}
public void testCameCaseOverride() throws IOException {
- Settings settings = Settings.builder().put("path.home", createTempDir().toString()).build();
+ Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build();
Settings indexSettings = settingsBuilder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put("index.analysis.filter.wordDelimiter.type", "word_delimiter")
@@ -178,7 +182,7 @@ public class AnalysisServiceTests extends ESTestCase {
.putArray("index.analysis.analyzer.custom_analyzer.filter", "lowercase", "wordDelimiter")
.put("index.analysis.analyzer.custom_analyzer_1.tokenizer", "whitespace")
.putArray("index.analysis.analyzer.custom_analyzer_1.filter", "lowercase", "word_delimiter").build();
- IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(new Index("index"), indexSettings);
+ IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings);
AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings);
TokenFilterFactory word_delimiter = analysisService.tokenFilter("word_delimiter");
@@ -188,7 +192,7 @@ public class AnalysisServiceTests extends ESTestCase {
assertSame(analysisService.tokenFilter("porterStem"), analysisService.tokenFilter("porter_stem"));
//unconfigured
- IndexSettings idxSettings1 = IndexSettingsModule.newIndexSettings(new Index("index"), settingsBuilder()
+ IndexSettings idxSettings1 = IndexSettingsModule.newIndexSettings("index", settingsBuilder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build());
AnalysisService analysisService1 = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings1);
assertSame(analysisService1.tokenFilter("wordDelimiter"), analysisService1.tokenFilter("word_delimiter"));
@@ -196,10 +200,10 @@ public class AnalysisServiceTests extends ESTestCase {
}
public void testBuiltInAnalyzersAreCached() throws IOException {
- Settings settings = Settings.builder().put("path.home", createTempDir().toString()).build();
+ Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build();
Settings indexSettings = settingsBuilder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
- IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(new Index("index"), indexSettings);
+ IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings);
AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings);
AnalysisService otherAnalysisSergice = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings);
final int numIters = randomIntBetween(5, 20);
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTestsHelper.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTestsHelper.java
index 1404716b0c..e1f9e36bbd 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTestsHelper.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTestsHelper.java
@@ -37,7 +37,7 @@ public class AnalysisTestsHelper {
public static AnalysisService createAnalysisServiceFromClassPath(Path baseDir, String resource) throws IOException {
Settings settings = Settings.settingsBuilder()
.loadFromStream(resource, AnalysisTestsHelper.class.getResourceAsStream(resource))
- .put("path.home", baseDir.toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), baseDir.toString())
.build();
return createAnalysisServiceFromSettings(settings);
@@ -45,11 +45,10 @@ public class AnalysisTestsHelper {
public static AnalysisService createAnalysisServiceFromSettings(
Settings settings) throws IOException {
- Index index = new Index("test");
if (settings.get(IndexMetaData.SETTING_VERSION_CREATED) == null) {
settings = Settings.builder().put(settings).put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
}
- IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(index, settings);
+ IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings);
Environment environment = new Environment(settings);
return new AnalysisRegistry(new HunspellService(settings, environment, Collections.emptyMap()), environment).build(idxSettings);
}
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalyzerBackwardsCompatTests.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalyzerBackwardsCompatTests.java
index 63acbc81c8..a163d9e42b 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/AnalyzerBackwardsCompatTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/AnalyzerBackwardsCompatTests.java
@@ -19,6 +19,7 @@
package org.elasticsearch.index.analysis;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.env.Environment;
import org.elasticsearch.test.ESTokenStreamTestCase;
import java.io.IOException;
@@ -41,7 +42,7 @@ public class AnalyzerBackwardsCompatTests extends ESTokenStreamTestCase {
builder.put(SETTING_VERSION_CREATED, version);
}
builder.put("index.analysis.analyzer.foo.type", type);
- builder.put("path.home", createTempDir().toString());
+ builder.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString());
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(builder.build());
NamedAnalyzer analyzer = analysisService.analyzer("foo");
assertNotNull(analyzer);
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/CharFilterTests.java b/core/src/test/java/org/elasticsearch/index/analysis/CharFilterTests.java
index dd08d47013..dd2df7ed55 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/CharFilterTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/CharFilterTests.java
@@ -33,16 +33,15 @@ import static org.elasticsearch.common.settings.Settings.settingsBuilder;
*/
public class CharFilterTests extends ESTokenStreamTestCase {
public void testMappingCharFilter() throws Exception {
- Index index = new Index("test");
Settings settings = settingsBuilder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put("index.analysis.char_filter.my_mapping.type", "mapping")
.putArray("index.analysis.char_filter.my_mapping.mappings", "ph=>f", "qu=>q")
.put("index.analysis.analyzer.custom_with_char_filter.tokenizer", "standard")
.putArray("index.analysis.analyzer.custom_with_char_filter.char_filter", "my_mapping")
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
- IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(index, settings);
+ IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings);
AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings);
NamedAnalyzer analyzer1 = analysisService.analyzer("custom_with_char_filter");
@@ -53,14 +52,13 @@ public class CharFilterTests extends ESTokenStreamTestCase {
}
public void testHtmlStripCharFilter() throws Exception {
- Index index = new Index("test");
Settings settings = settingsBuilder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put("index.analysis.analyzer.custom_with_char_filter.tokenizer", "standard")
.putArray("index.analysis.analyzer.custom_with_char_filter.char_filter", "html_strip")
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
- IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(index, settings);
+ IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings);
AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings);
NamedAnalyzer analyzer1 = analysisService.analyzer("custom_with_char_filter");
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/CompoundAnalysisTests.java b/core/src/test/java/org/elasticsearch/index/analysis/CompoundAnalysisTests.java
index a097d55f4a..fbedf42d08 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/CompoundAnalysisTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/CompoundAnalysisTests.java
@@ -50,9 +50,8 @@ import static org.hamcrest.Matchers.instanceOf;
*/
public class CompoundAnalysisTests extends ESTestCase {
public void testDefaultsCompoundAnalysis() throws Exception {
- Index index = new Index("test");
Settings settings = getJsonSettings();
- IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(index, settings);
+ IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings);
AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings),
Collections.emptyMap(),Collections.singletonMap("myfilter", MyFilterTokenFilterFactory::new),Collections.emptyMap(),Collections.emptyMap()).build(idxSettings);
@@ -70,8 +69,7 @@ public class CompoundAnalysisTests extends ESTestCase {
}
private List<String> analyze(Settings settings, String analyzerName, String text) throws IOException {
- Index index = new Index("test");
- IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(index, settings);
+ IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings);
AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings),
Collections.emptyMap(), Collections.singletonMap("myfilter", MyFilterTokenFilterFactory::new),Collections.emptyMap(),Collections.emptyMap()).build(idxSettings);
@@ -98,7 +96,7 @@ public class CompoundAnalysisTests extends ESTestCase {
return settingsBuilder()
.loadFromStream(json, getClass().getResourceAsStream(json))
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
}
@@ -107,7 +105,7 @@ public class CompoundAnalysisTests extends ESTestCase {
return settingsBuilder()
.loadFromStream(yaml, getClass().getResourceAsStream(yaml))
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
}
}
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/HunspellTokenFilterFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/HunspellTokenFilterFactoryTests.java
index 02c4e1a264..51d8b9214a 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/HunspellTokenFilterFactoryTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/HunspellTokenFilterFactoryTests.java
@@ -19,6 +19,7 @@
package org.elasticsearch.index.analysis;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.env.Environment;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
@@ -30,8 +31,8 @@ import static org.hamcrest.Matchers.is;
public class HunspellTokenFilterFactoryTests extends ESTestCase {
public void testDedup() throws IOException {
Settings settings = settingsBuilder()
- .put("path.home", createTempDir().toString())
- .put("path.conf", getDataPath("/indices/analyze/conf_dir"))
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
+ .put(Environment.PATH_CONF_SETTING.getKey(), getDataPath("/indices/analyze/conf_dir"))
.put("index.analysis.filter.en_US.type", "hunspell")
.put("index.analysis.filter.en_US.locale", "en_US")
.build();
@@ -43,8 +44,8 @@ public class HunspellTokenFilterFactoryTests extends ESTestCase {
assertThat(hunspellTokenFilter.dedup(), is(true));
settings = settingsBuilder()
- .put("path.home", createTempDir().toString())
- .put("path.conf", getDataPath("/indices/analyze/conf_dir"))
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
+ .put(Environment.PATH_CONF_SETTING.getKey(), getDataPath("/indices/analyze/conf_dir"))
.put("index.analysis.filter.en_US.type", "hunspell")
.put("index.analysis.filter.en_US.dedup", false)
.put("index.analysis.filter.en_US.locale", "en_US")
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/KeepFilterFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/KeepFilterFactoryTests.java
index 99c936cd34..a7179daff2 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/KeepFilterFactoryTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/KeepFilterFactoryTests.java
@@ -22,6 +22,7 @@ package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.core.WhitespaceTokenizer;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.env.Environment;
import org.elasticsearch.test.ESTokenStreamTestCase;
import org.junit.Assert;
@@ -41,7 +42,7 @@ public class KeepFilterFactoryTests extends ESTokenStreamTestCase {
public void testLoadOverConfiguredSettings() {
Settings settings = Settings.settingsBuilder()
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put("index.analysis.filter.broken_keep_filter.type", "keep")
.put("index.analysis.filter.broken_keep_filter.keep_words_path", "does/not/exists.txt")
.put("index.analysis.filter.broken_keep_filter.keep_words", "[\"Hello\", \"worlD\"]")
@@ -57,7 +58,7 @@ public class KeepFilterFactoryTests extends ESTokenStreamTestCase {
public void testKeepWordsPathSettings() {
Settings settings = Settings.settingsBuilder()
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put("index.analysis.filter.non_broken_keep_filter.type", "keep")
.put("index.analysis.filter.non_broken_keep_filter.keep_words_path", "does/not/exists.txt")
.build();
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/KeepTypesFilterFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/KeepTypesFilterFactoryTests.java
index 1e8a0ba16e..9111c929f9 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/KeepTypesFilterFactoryTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/KeepTypesFilterFactoryTests.java
@@ -22,6 +22,7 @@ package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.standard.StandardTokenizer;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.env.Environment;
import org.elasticsearch.test.ESTokenStreamTestCase;
import java.io.IOException;
@@ -32,7 +33,7 @@ import static org.hamcrest.Matchers.instanceOf;
public class KeepTypesFilterFactoryTests extends ESTokenStreamTestCase {
public void testKeepTypes() throws IOException {
Settings settings = Settings.settingsBuilder()
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put("index.analysis.filter.keep_numbers.type", "keep_types")
.putArray("index.analysis.filter.keep_numbers.types", new String[] {"<NUM>", "<SOMETHINGELSE>"})
.build();
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/LimitTokenCountFilterFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/LimitTokenCountFilterFactoryTests.java
index e133ffc79a..b266be9f2b 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/LimitTokenCountFilterFactoryTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/LimitTokenCountFilterFactoryTests.java
@@ -22,6 +22,7 @@ package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.core.WhitespaceTokenizer;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.env.Environment;
import org.elasticsearch.test.ESTokenStreamTestCase;
import java.io.IOException;
@@ -31,7 +32,7 @@ public class LimitTokenCountFilterFactoryTests extends ESTokenStreamTestCase {
public void testDefault() throws IOException {
Settings settings = Settings.settingsBuilder()
.put("index.analysis.filter.limit_default.type", "limit")
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
{
@@ -58,7 +59,7 @@ public class LimitTokenCountFilterFactoryTests extends ESTokenStreamTestCase {
.put("index.analysis.filter.limit_1.type", "limit")
.put("index.analysis.filter.limit_1.max_token_count", 3)
.put("index.analysis.filter.limit_1.consume_all_tokens", true)
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
TokenFilterFactory tokenFilter = analysisService.tokenFilter("limit_1");
@@ -73,7 +74,7 @@ public class LimitTokenCountFilterFactoryTests extends ESTokenStreamTestCase {
.put("index.analysis.filter.limit_1.type", "limit")
.put("index.analysis.filter.limit_1.max_token_count", 3)
.put("index.analysis.filter.limit_1.consume_all_tokens", false)
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
TokenFilterFactory tokenFilter = analysisService.tokenFilter("limit_1");
@@ -89,7 +90,7 @@ public class LimitTokenCountFilterFactoryTests extends ESTokenStreamTestCase {
.put("index.analysis.filter.limit_1.type", "limit")
.put("index.analysis.filter.limit_1.max_token_count", 17)
.put("index.analysis.filter.limit_1.consume_all_tokens", true)
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
TokenFilterFactory tokenFilter = analysisService.tokenFilter("limit_1");
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/NGramTokenizerFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/NGramTokenizerFactoryTests.java
index 0cb3abbd1f..9d8efb1de4 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/NGramTokenizerFactoryTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/NGramTokenizerFactoryTests.java
@@ -48,7 +48,7 @@ import static org.hamcrest.Matchers.instanceOf;
public class NGramTokenizerFactoryTests extends ESTokenStreamTestCase {
public void testParseTokenChars() {
- final Index index = new Index("test");
+ final Index index = new Index("test", "_na_");
final String name = "ngr";
final Settings indexSettings = newAnalysisSettingsBuilder().build();
IndexSettings indexProperties = IndexSettingsModule.newIndexSettings(index, indexSettings);
@@ -71,7 +71,7 @@ public class NGramTokenizerFactoryTests extends ESTokenStreamTestCase {
}
public void testNoTokenChars() throws IOException {
- final Index index = new Index("test");
+ final Index index = new Index("test", "_na_");
final String name = "ngr";
final Settings indexSettings = newAnalysisSettingsBuilder().build();
final Settings settings = newAnalysisSettingsBuilder().put("min_gram", 2).put("max_gram", 4).putArray("token_chars", new String[0]).build();
@@ -82,7 +82,7 @@ public class NGramTokenizerFactoryTests extends ESTokenStreamTestCase {
public void testPreTokenization() throws IOException {
// Make sure that pretokenization works well and that it can be used even with token chars which are supplementary characters
- final Index index = new Index("test");
+ final Index index = new Index("test", "_na_");
final String name = "ngr";
final Settings indexSettings = newAnalysisSettingsBuilder().build();
Settings settings = newAnalysisSettingsBuilder().put("min_gram", 2).put("max_gram", 3).put("token_chars", "letter,digit").build();
@@ -99,7 +99,7 @@ public class NGramTokenizerFactoryTests extends ESTokenStreamTestCase {
public void testPreTokenizationEdge() throws IOException {
// Make sure that pretokenization works well and that it can be used even with token chars which are supplementary characters
- final Index index = new Index("test");
+ final Index index = new Index("test", "_na_");
final String name = "ngr";
final Settings indexSettings = newAnalysisSettingsBuilder().build();
Settings settings = newAnalysisSettingsBuilder().put("min_gram", 2).put("max_gram", 3).put("token_chars", "letter,digit").build();
@@ -117,7 +117,7 @@ public class NGramTokenizerFactoryTests extends ESTokenStreamTestCase {
public void testBackwardsCompatibilityEdgeNgramTokenFilter() throws Exception {
int iters = scaledRandomIntBetween(20, 100);
for (int i = 0; i < iters; i++) {
- final Index index = new Index("test");
+ final Index index = new Index("test", "_na_");
final String name = "ngr";
Version v = randomVersion(random());
if (v.onOrAfter(Version.V_0_90_2)) {
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/PatternCaptureTokenFilterTests.java b/core/src/test/java/org/elasticsearch/index/analysis/PatternCaptureTokenFilterTests.java
index 4b7119df01..ed307f610b 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/PatternCaptureTokenFilterTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/PatternCaptureTokenFilterTests.java
@@ -35,12 +35,12 @@ public class PatternCaptureTokenFilterTests extends ESTokenStreamTestCase {
public void testPatternCaptureTokenFilter() throws Exception {
String json = "/org/elasticsearch/index/analysis/pattern_capture.json";
Settings settings = settingsBuilder()
- .put("path.home", createTempDir())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
.loadFromStream(json, getClass().getResourceAsStream(json))
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.build();
- IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(new Index("index"), settings);
+ IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings);
NamedAnalyzer analyzer1 = analysisService.analyzer("single");
@@ -58,7 +58,7 @@ public class PatternCaptureTokenFilterTests extends ESTokenStreamTestCase {
public void testNoPatterns() {
try {
- new PatternCaptureGroupTokenFilterFactory(IndexSettingsModule.newIndexSettings(new Index("test"), Settings.EMPTY), null, "pattern_capture", settingsBuilder().put("pattern", "foobar").build());
+ new PatternCaptureGroupTokenFilterFactory(IndexSettingsModule.newIndexSettings("test", Settings.EMPTY), null, "pattern_capture", settingsBuilder().put("pattern", "foobar").build());
fail ("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), containsString("required setting 'patterns' is missing"));
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/StemmerTokenFilterFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/StemmerTokenFilterFactoryTests.java
index 737a991f0e..37844dce69 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/StemmerTokenFilterFactoryTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/StemmerTokenFilterFactoryTests.java
@@ -25,6 +25,7 @@ import org.apache.lucene.analysis.en.PorterStemFilter;
import org.apache.lucene.analysis.snowball.SnowballFilter;
import org.elasticsearch.Version;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.env.Environment;
import org.elasticsearch.test.ESTokenStreamTestCase;
import org.elasticsearch.test.VersionUtils;
@@ -50,7 +51,7 @@ public class StemmerTokenFilterFactoryTests extends ESTokenStreamTestCase {
.put("index.analysis.analyzer.my_english.tokenizer","whitespace")
.put("index.analysis.analyzer.my_english.filter","my_english")
.put(SETTING_VERSION_CREATED,v)
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
@@ -83,7 +84,7 @@ public class StemmerTokenFilterFactoryTests extends ESTokenStreamTestCase {
.put("index.analysis.analyzer.my_porter2.tokenizer","whitespace")
.put("index.analysis.analyzer.my_porter2.filter","my_porter2")
.put(SETTING_VERSION_CREATED,v)
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/StopAnalyzerTests.java b/core/src/test/java/org/elasticsearch/index/analysis/StopAnalyzerTests.java
index 90e55e98d7..4d316424a4 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/StopAnalyzerTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/StopAnalyzerTests.java
@@ -35,10 +35,10 @@ public class StopAnalyzerTests extends ESTokenStreamTestCase {
String json = "/org/elasticsearch/index/analysis/stop.json";
Settings settings = settingsBuilder()
.loadFromStream(json, getClass().getResourceAsStream(json))
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.build();
- IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(new Index("index"), settings);
+ IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings);
NamedAnalyzer analyzer1 = analysisService.analyzer("analyzer1");
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/StopTokenFilterTests.java b/core/src/test/java/org/elasticsearch/index/analysis/StopTokenFilterTests.java
index 1dbd9ac2bd..2804f522af 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/StopTokenFilterTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/StopTokenFilterTests.java
@@ -28,6 +28,7 @@ import org.apache.lucene.search.suggest.analyzing.SuggestStopFilter;
import org.apache.lucene.util.Version;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.Settings.Builder;
+import org.elasticsearch.env.Environment;
import org.elasticsearch.test.ESTokenStreamTestCase;
import java.io.IOException;
@@ -44,7 +45,7 @@ public class StopTokenFilterTests extends ESTokenStreamTestCase {
if (random().nextBoolean()) {
builder.put("index.analysis.filter.my_stop.version", "5.0");
}
- builder.put("path.home", createTempDir().toString());
+ builder.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString());
Settings settings = builder.build();
try {
AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
@@ -67,7 +68,7 @@ public class StopTokenFilterTests extends ESTokenStreamTestCase {
} else {
// don't specify
}
- builder.put("path.home", createTempDir().toString());
+ builder.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString());
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(builder.build());
TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_stop");
assertThat(tokenFilter, instanceOf(StopTokenFilterFactory.class));
@@ -86,7 +87,7 @@ public class StopTokenFilterTests extends ESTokenStreamTestCase {
.put("index.analysis.filter.my_stop.type", "stop")
.put("index.analysis.filter.my_stop.enable_position_increments", false)
.put("index.analysis.filter.my_stop.version", "4.3")
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_stop");
@@ -101,7 +102,7 @@ public class StopTokenFilterTests extends ESTokenStreamTestCase {
Settings settings = Settings.settingsBuilder()
.put("index.analysis.filter.my_stop.type", "stop")
.put("index.analysis.filter.my_stop.remove_trailing", false)
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_stop");
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/WordDelimiterTokenFilterFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/WordDelimiterTokenFilterFactoryTests.java
index 54810028ae..a041694dde 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/WordDelimiterTokenFilterFactoryTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/WordDelimiterTokenFilterFactoryTests.java
@@ -21,6 +21,7 @@ package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.core.WhitespaceTokenizer;
+import org.elasticsearch.env.Environment;
import org.elasticsearch.test.ESTokenStreamTestCase;
import java.io.IOException;
@@ -31,7 +32,7 @@ import static org.elasticsearch.common.settings.Settings.settingsBuilder;
public class WordDelimiterTokenFilterFactoryTests extends ESTokenStreamTestCase {
public void testDefault() throws IOException {
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder()
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put("index.analysis.filter.my_word_delimiter.type", "word_delimiter")
.build());
TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_word_delimiter");
@@ -44,7 +45,7 @@ public class WordDelimiterTokenFilterFactoryTests extends ESTokenStreamTestCase
public void testCatenateWords() throws IOException {
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder()
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put("index.analysis.filter.my_word_delimiter.type", "word_delimiter")
.put("index.analysis.filter.my_word_delimiter.catenate_words", "true")
.put("index.analysis.filter.my_word_delimiter.generate_word_parts", "false")
@@ -59,7 +60,7 @@ public class WordDelimiterTokenFilterFactoryTests extends ESTokenStreamTestCase
public void testCatenateNumbers() throws IOException {
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder()
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put("index.analysis.filter.my_word_delimiter.type", "word_delimiter")
.put("index.analysis.filter.my_word_delimiter.generate_number_parts", "false")
.put("index.analysis.filter.my_word_delimiter.catenate_numbers", "true")
@@ -74,7 +75,7 @@ public class WordDelimiterTokenFilterFactoryTests extends ESTokenStreamTestCase
public void testCatenateAll() throws IOException {
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder()
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put("index.analysis.filter.my_word_delimiter.type", "word_delimiter")
.put("index.analysis.filter.my_word_delimiter.generate_word_parts", "false")
.put("index.analysis.filter.my_word_delimiter.generate_number_parts", "false")
@@ -90,7 +91,7 @@ public class WordDelimiterTokenFilterFactoryTests extends ESTokenStreamTestCase
public void testSplitOnCaseChange() throws IOException {
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder()
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put("index.analysis.filter.my_word_delimiter.type", "word_delimiter")
.put("index.analysis.filter.my_word_delimiter.split_on_case_change", "false")
.build());
@@ -104,7 +105,7 @@ public class WordDelimiterTokenFilterFactoryTests extends ESTokenStreamTestCase
public void testPreserveOriginal() throws IOException {
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder()
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put("index.analysis.filter.my_word_delimiter.type", "word_delimiter")
.put("index.analysis.filter.my_word_delimiter.preserve_original", "true")
.build());
@@ -118,7 +119,7 @@ public class WordDelimiterTokenFilterFactoryTests extends ESTokenStreamTestCase
public void testStemEnglishPossessive() throws IOException {
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder()
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put("index.analysis.filter.my_word_delimiter.type", "word_delimiter")
.put("index.analysis.filter.my_word_delimiter.stem_english_possessive", "false")
.build());
@@ -133,7 +134,7 @@ public class WordDelimiterTokenFilterFactoryTests extends ESTokenStreamTestCase
/** Correct offset order when doing both parts and concatenation: PowerShot is a synonym of Power */
public void testPartsAndCatenate() throws IOException {
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder()
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put("index.analysis.filter.my_word_delimiter.type", "word_delimiter")
.put("index.analysis.filter.my_word_delimiter.catenate_words", "true")
.put("index.analysis.filter.my_word_delimiter.generate_word_parts", "true")
@@ -150,7 +151,7 @@ public class WordDelimiterTokenFilterFactoryTests extends ESTokenStreamTestCase
* old offset order when doing both parts and concatenation: PowerShot is a synonym of Shot */
public void testDeprecatedPartsAndCatenate() throws IOException {
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder()
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put("index.analysis.filter.my_word_delimiter.type", "word_delimiter")
.put("index.analysis.filter.my_word_delimiter.catenate_words", "true")
.put("index.analysis.filter.my_word_delimiter.generate_word_parts", "true")
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/commongrams/CommonGramsTokenFilterFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/commongrams/CommonGramsTokenFilterFactoryTests.java
index f7c346c657..a9d3c8820f 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/commongrams/CommonGramsTokenFilterFactoryTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/commongrams/CommonGramsTokenFilterFactoryTests.java
@@ -23,6 +23,7 @@ import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.core.WhitespaceTokenizer;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.env.Environment;
import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.analysis.AnalysisTestsHelper;
import org.elasticsearch.index.analysis.TokenFilterFactory;
@@ -38,7 +39,7 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase {
public void testDefault() throws IOException {
Settings settings = Settings.settingsBuilder()
.put("index.analysis.filter.common_grams_default.type", "common_grams")
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
try {
@@ -54,7 +55,7 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase {
{
Settings settings = Settings.settingsBuilder().put("index.analysis.filter.common_grams_default.type", "common_grams")
.putArray("index.analysis.filter.common_grams_default.common_words", "chromosome", "protein")
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
@@ -71,7 +72,7 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase {
{
Settings settings = Settings.settingsBuilder().put("index.analysis.filter.common_grams_default.type", "common_grams")
.put("index.analysis.filter.common_grams_default.query_mode", false)
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.putArray("index.analysis.filter.common_grams_default.common_words", "chromosome", "protein")
.build();
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
@@ -90,7 +91,7 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase {
{
Settings settings = Settings.settingsBuilder().put("index.analysis.filter.common_grams_1.type", "common_grams")
.put("index.analysis.filter.common_grams_1.ignore_case", true)
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.putArray("index.analysis.filter.common_grams_1.common_words", "the", "Or", "Not", "a", "is", "an", "they", "are")
.build();
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
@@ -104,7 +105,7 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase {
{
Settings settings = Settings.settingsBuilder().put("index.analysis.filter.common_grams_2.type", "common_grams")
.put("index.analysis.filter.common_grams_2.ignore_case", false)
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.putArray("index.analysis.filter.common_grams_2.common_words", "the", "Or", "noT", "a", "is", "an", "they", "are")
.build();
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
@@ -118,7 +119,7 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase {
{
Settings settings = Settings.settingsBuilder().put("index.analysis.filter.common_grams_3.type", "common_grams")
.putArray("index.analysis.filter.common_grams_3.common_words", "the", "or", "not", "a", "is", "an", "they", "are")
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
TokenFilterFactory tokenFilter = analysisService.tokenFilter("common_grams_3");
@@ -134,7 +135,7 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase {
String json = "/org/elasticsearch/index/analysis/commongrams/commongrams.json";
Settings settings = Settings.settingsBuilder()
.loadFromStream(json, getClass().getResourceAsStream(json))
- .put("path.home", createHome())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createHome())
.build();
{
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
@@ -158,7 +159,7 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase {
.put("index.analysis.filter.common_grams_1.query_mode", true)
.putArray("index.analysis.filter.common_grams_1.common_words", "the", "Or", "Not", "a", "is", "an", "they", "are")
.put("index.analysis.filter.common_grams_1.ignore_case", true)
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
TokenFilterFactory tokenFilter = analysisService.tokenFilter("common_grams_1");
@@ -173,7 +174,7 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase {
.put("index.analysis.filter.common_grams_2.query_mode", true)
.putArray("index.analysis.filter.common_grams_2.common_words", "the", "Or", "noT", "a", "is", "an", "they", "are")
.put("index.analysis.filter.common_grams_2.ignore_case", false)
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
TokenFilterFactory tokenFilter = analysisService.tokenFilter("common_grams_2");
@@ -187,7 +188,7 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase {
Settings settings = Settings.settingsBuilder().put("index.analysis.filter.common_grams_3.type", "common_grams")
.put("index.analysis.filter.common_grams_3.query_mode", true)
.putArray("index.analysis.filter.common_grams_3.common_words", "the", "Or", "noT", "a", "is", "an", "they", "are")
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
TokenFilterFactory tokenFilter = analysisService.tokenFilter("common_grams_3");
@@ -201,7 +202,7 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase {
Settings settings = Settings.settingsBuilder().put("index.analysis.filter.common_grams_4.type", "common_grams")
.put("index.analysis.filter.common_grams_4.query_mode", true)
.putArray("index.analysis.filter.common_grams_4.common_words", "the", "or", "not", "a", "is", "an", "they", "are")
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
TokenFilterFactory tokenFilter = analysisService.tokenFilter("common_grams_4");
@@ -217,7 +218,7 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase {
String json = "/org/elasticsearch/index/analysis/commongrams/commongrams_query_mode.json";
Settings settings = Settings.settingsBuilder()
.loadFromStream(json, getClass().getResourceAsStream(json))
- .put("path.home", createHome())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createHome())
.build();
{
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTests.java b/core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTests.java
index 3a6adca1c6..104d778860 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTests.java
@@ -64,10 +64,10 @@ public class SynonymsAnalysisTests extends ESTestCase {
String json = "/org/elasticsearch/index/analysis/synonyms/synonyms.json";
Settings settings = settingsBuilder().
loadFromStream(json, getClass().getResourceAsStream(json))
- .put("path.home", home)
+ .put(Environment.PATH_HOME_SETTING.getKey(), home)
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
- IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(new Index("index"), settings);
+ IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings);
diff --git a/core/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java b/core/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java
index 69831d7471..4f2f12e6cb 100644
--- a/core/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java
+++ b/core/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java
@@ -34,7 +34,6 @@ import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.join.BitSetProducer;
-import org.apache.lucene.store.BaseDirectoryWrapper;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.Accountable;
@@ -57,7 +56,7 @@ import static org.hamcrest.Matchers.equalTo;
public class BitSetFilterCacheTests extends ESTestCase {
- private static final IndexSettings INDEX_SETTINGS = IndexSettingsModule.newIndexSettings(new Index("test"), Settings.EMPTY);
+ private static final IndexSettings INDEX_SETTINGS = IndexSettingsModule.newIndexSettings("test", Settings.EMPTY);
private final IndicesWarmer warmer = new IndicesWarmer(Settings.EMPTY, null);
@@ -93,7 +92,7 @@ public class BitSetFilterCacheTests extends ESTestCase {
writer.commit();
DirectoryReader reader = DirectoryReader.open(writer, false);
- reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId(new Index("test"), 0));
+ reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId("test", "_na_", 0));
IndexSearcher searcher = new IndexSearcher(reader);
BitsetFilterCache cache = new BitsetFilterCache(INDEX_SETTINGS, warmer, new BitsetFilterCache.Listener() {
@@ -118,7 +117,7 @@ public class BitSetFilterCacheTests extends ESTestCase {
writer.forceMerge(1);
reader.close();
reader = DirectoryReader.open(writer, false);
- reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId(new Index("test"), 0));
+ reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId("test", "_na_", 0));
searcher = new IndexSearcher(reader);
assertThat(matchCount(filter, reader), equalTo(3));
@@ -144,7 +143,7 @@ public class BitSetFilterCacheTests extends ESTestCase {
writer.addDocument(document);
writer.commit();
final DirectoryReader writerReader = DirectoryReader.open(writer, false);
- final IndexReader reader = ElasticsearchDirectoryReader.wrap(writerReader, new ShardId("test", 0));
+ final IndexReader reader = ElasticsearchDirectoryReader.wrap(writerReader, new ShardId("test", "_na_", 0));
final AtomicLong stats = new AtomicLong();
final AtomicInteger onCacheCalls = new AtomicInteger();
@@ -157,7 +156,7 @@ public class BitSetFilterCacheTests extends ESTestCase {
stats.addAndGet(accountable.ramBytesUsed());
if (writerReader != reader) {
assertNotNull(shardId);
- assertEquals("test", shardId.index().name());
+ assertEquals("test", shardId.getIndexName());
assertEquals(0, shardId.id());
} else {
assertNull(shardId);
@@ -170,7 +169,7 @@ public class BitSetFilterCacheTests extends ESTestCase {
stats.addAndGet(-accountable.ramBytesUsed());
if (writerReader != reader) {
assertNotNull(shardId);
- assertEquals("test", shardId.index().name());
+ assertEquals("test", shardId.getIndexName());
assertEquals(0, shardId.id());
} else {
assertNull(shardId);
@@ -218,7 +217,7 @@ public class BitSetFilterCacheTests extends ESTestCase {
writer.addDocument(new Document());
DirectoryReader reader = DirectoryReader.open(writer, true);
writer.close();
- reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId(new Index("test2"), 0));
+ reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId("test2", "_na_", 0));
BitSetProducer producer = cache.getBitSetProducer(new MatchAllDocsQuery());
diff --git a/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java b/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java
index 7cfe52d1c0..3d912d41c3 100644
--- a/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java
+++ b/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java
@@ -106,9 +106,9 @@ public class CodecTests extends ESTestCase {
private static CodecService createCodecService() throws IOException {
Settings nodeSettings = settingsBuilder()
- .put("path.home", createTempDir())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
.build();
- IndexSettings settings = IndexSettingsModule.newIndexSettings(new Index("_na"), nodeSettings);
+ IndexSettings settings = IndexSettingsModule.newIndexSettings("_na", nodeSettings);
SimilarityService similarityService = new SimilarityService(settings, Collections.emptyMap());
AnalysisService analysisService = new AnalysisRegistry(null, new Environment(nodeSettings)).build(settings);
MapperRegistry mapperRegistry = new MapperRegistry(Collections.emptyMap(), Collections.emptyMap());
diff --git a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java
index 2b72018aa8..cd39806e22 100644
--- a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java
+++ b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java
@@ -134,8 +134,8 @@ import static org.hamcrest.Matchers.nullValue;
public class InternalEngineTests extends ESTestCase {
- protected final ShardId shardId = new ShardId(new Index("index"), 1);
- private static final IndexSettings INDEX_SETTINGS = IndexSettingsModule.newIndexSettings(new Index("index"), Settings.EMPTY);
+ protected final ShardId shardId = new ShardId(new Index("index", "_na_"), 1);
+ private static final IndexSettings INDEX_SETTINGS = IndexSettingsModule.newIndexSettings("index", Settings.EMPTY);
protected ThreadPool threadPool;
@@ -275,7 +275,7 @@ public class InternalEngineTests extends ESTestCase {
public void onFailedEngine(String reason, @Nullable Throwable t) {
// we don't need to notify anybody in this test
}
- }, new TranslogHandler(shardId.index().getName(), logger), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), translogConfig, TimeValue.timeValueMinutes(5));
+ }, new TranslogHandler(shardId.getIndexName(), logger), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), translogConfig, TimeValue.timeValueMinutes(5));
try {
config.setCreate(Lucene.indexExists(store.directory()) == false);
} catch (IOException e) {
@@ -293,8 +293,8 @@ public class InternalEngineTests extends ESTestCase {
Engine engine = createEngine(defaultSettings, store, createTempDir(), NoMergePolicy.INSTANCE)) {
List<Segment> segments = engine.segments(false);
assertThat(segments.isEmpty(), equalTo(true));
- assertThat(engine.segmentsStats().getCount(), equalTo(0l));
- assertThat(engine.segmentsStats().getMemoryInBytes(), equalTo(0l));
+ assertThat(engine.segmentsStats().getCount(), equalTo(0L));
+ assertThat(engine.segmentsStats().getMemoryInBytes(), equalTo(0L));
// create a doc and refresh
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null);
@@ -307,12 +307,12 @@ public class InternalEngineTests extends ESTestCase {
segments = engine.segments(false);
assertThat(segments.size(), equalTo(1));
SegmentsStats stats = engine.segmentsStats();
- assertThat(stats.getCount(), equalTo(1l));
- assertThat(stats.getTermsMemoryInBytes(), greaterThan(0l));
- assertThat(stats.getStoredFieldsMemoryInBytes(), greaterThan(0l));
- assertThat(stats.getTermVectorsMemoryInBytes(), equalTo(0l));
- assertThat(stats.getNormsMemoryInBytes(), greaterThan(0l));
- assertThat(stats.getDocValuesMemoryInBytes(), greaterThan(0l));
+ assertThat(stats.getCount(), equalTo(1L));
+ assertThat(stats.getTermsMemoryInBytes(), greaterThan(0L));
+ assertThat(stats.getStoredFieldsMemoryInBytes(), greaterThan(0L));
+ assertThat(stats.getTermVectorsMemoryInBytes(), equalTo(0L));
+ assertThat(stats.getNormsMemoryInBytes(), greaterThan(0L));
+ assertThat(stats.getDocValuesMemoryInBytes(), greaterThan(0L));
assertThat(segments.get(0).isCommitted(), equalTo(false));
assertThat(segments.get(0).isSearch(), equalTo(true));
assertThat(segments.get(0).getNumDocs(), equalTo(2));
@@ -324,7 +324,7 @@ public class InternalEngineTests extends ESTestCase {
segments = engine.segments(false);
assertThat(segments.size(), equalTo(1));
- assertThat(engine.segmentsStats().getCount(), equalTo(1l));
+ assertThat(engine.segmentsStats().getCount(), equalTo(1L));
assertThat(segments.get(0).isCommitted(), equalTo(true));
assertThat(segments.get(0).isSearch(), equalTo(true));
assertThat(segments.get(0).getNumDocs(), equalTo(2));
@@ -337,10 +337,10 @@ public class InternalEngineTests extends ESTestCase {
segments = engine.segments(false);
assertThat(segments.size(), equalTo(2));
- assertThat(engine.segmentsStats().getCount(), equalTo(2l));
+ assertThat(engine.segmentsStats().getCount(), equalTo(2L));
assertThat(engine.segmentsStats().getTermsMemoryInBytes(), greaterThan(stats.getTermsMemoryInBytes()));
assertThat(engine.segmentsStats().getStoredFieldsMemoryInBytes(), greaterThan(stats.getStoredFieldsMemoryInBytes()));
- assertThat(engine.segmentsStats().getTermVectorsMemoryInBytes(), equalTo(0l));
+ assertThat(engine.segmentsStats().getTermVectorsMemoryInBytes(), equalTo(0L));
assertThat(engine.segmentsStats().getNormsMemoryInBytes(), greaterThan(stats.getNormsMemoryInBytes()));
assertThat(engine.segmentsStats().getDocValuesMemoryInBytes(), greaterThan(stats.getDocValuesMemoryInBytes()));
assertThat(segments.get(0).getGeneration() < segments.get(1).getGeneration(), equalTo(true));
@@ -363,7 +363,7 @@ public class InternalEngineTests extends ESTestCase {
segments = engine.segments(false);
assertThat(segments.size(), equalTo(2));
- assertThat(engine.segmentsStats().getCount(), equalTo(2l));
+ assertThat(engine.segmentsStats().getCount(), equalTo(2L));
assertThat(segments.get(0).getGeneration() < segments.get(1).getGeneration(), equalTo(true));
assertThat(segments.get(0).isCommitted(), equalTo(true));
assertThat(segments.get(0).isSearch(), equalTo(true));
@@ -384,7 +384,7 @@ public class InternalEngineTests extends ESTestCase {
segments = engine.segments(false);
assertThat(segments.size(), equalTo(3));
- assertThat(engine.segmentsStats().getCount(), equalTo(3l));
+ assertThat(engine.segmentsStats().getCount(), equalTo(3L));
assertThat(segments.get(0).getGeneration() < segments.get(1).getGeneration(), equalTo(true));
assertThat(segments.get(0).isCommitted(), equalTo(true));
assertThat(segments.get(0).isSearch(), equalTo(true));
@@ -494,7 +494,7 @@ public class InternalEngineTests extends ESTestCase {
engine.index(new Engine.Index(newUid("1"), doc));
CommitStats stats1 = engine.commitStats();
- assertThat(stats1.getGeneration(), greaterThan(0l));
+ assertThat(stats1.getGeneration(), greaterThan(0L));
assertThat(stats1.getId(), notNullValue());
assertThat(stats1.getUserData(), hasKey(Translog.TRANSLOG_GENERATION_KEY));
@@ -895,46 +895,46 @@ public class InternalEngineTests extends ESTestCase {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null);
Engine.Index create = new Engine.Index(newUid("1"), doc, Versions.MATCH_DELETED);
engine.index(create);
- assertThat(create.version(), equalTo(1l));
+ assertThat(create.version(), equalTo(1L));
create = new Engine.Index(newUid("1"), doc, create.version(), create.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0);
replicaEngine.index(create);
- assertThat(create.version(), equalTo(1l));
+ assertThat(create.version(), equalTo(1L));
}
public void testVersioningNewIndex() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null);
Engine.Index index = new Engine.Index(newUid("1"), doc);
engine.index(index);
- assertThat(index.version(), equalTo(1l));
+ assertThat(index.version(), equalTo(1L));
index = new Engine.Index(newUid("1"), doc, index.version(), index.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0);
replicaEngine.index(index);
- assertThat(index.version(), equalTo(1l));
+ assertThat(index.version(), equalTo(1L));
}
public void testExternalVersioningNewIndex() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null);
Engine.Index index = new Engine.Index(newUid("1"), doc, 12, VersionType.EXTERNAL, PRIMARY, 0);
engine.index(index);
- assertThat(index.version(), equalTo(12l));
+ assertThat(index.version(), equalTo(12L));
index = new Engine.Index(newUid("1"), doc, index.version(), index.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0);
replicaEngine.index(index);
- assertThat(index.version(), equalTo(12l));
+ assertThat(index.version(), equalTo(12L));
}
public void testVersioningIndexConflict() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null);
Engine.Index index = new Engine.Index(newUid("1"), doc);
engine.index(index);
- assertThat(index.version(), equalTo(1l));
+ assertThat(index.version(), equalTo(1L));
index = new Engine.Index(newUid("1"), doc);
engine.index(index);
- assertThat(index.version(), equalTo(2l));
+ assertThat(index.version(), equalTo(2L));
- index = new Engine.Index(newUid("1"), doc, 1l, VersionType.INTERNAL, Engine.Operation.Origin.PRIMARY, 0);
+ index = new Engine.Index(newUid("1"), doc, 1L, VersionType.INTERNAL, Engine.Operation.Origin.PRIMARY, 0);
try {
engine.index(index);
fail();
@@ -943,7 +943,7 @@ public class InternalEngineTests extends ESTestCase {
}
// future versions should not work as well
- index = new Engine.Index(newUid("1"), doc, 3l, VersionType.INTERNAL, PRIMARY, 0);
+ index = new Engine.Index(newUid("1"), doc, 3L, VersionType.INTERNAL, PRIMARY, 0);
try {
engine.index(index);
fail();
@@ -956,11 +956,11 @@ public class InternalEngineTests extends ESTestCase {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null);
Engine.Index index = new Engine.Index(newUid("1"), doc, 12, VersionType.EXTERNAL, PRIMARY, 0);
engine.index(index);
- assertThat(index.version(), equalTo(12l));
+ assertThat(index.version(), equalTo(12L));
index = new Engine.Index(newUid("1"), doc, 14, VersionType.EXTERNAL, PRIMARY, 0);
engine.index(index);
- assertThat(index.version(), equalTo(14l));
+ assertThat(index.version(), equalTo(14L));
index = new Engine.Index(newUid("1"), doc, 13, VersionType.EXTERNAL, PRIMARY, 0);
try {
@@ -975,15 +975,15 @@ public class InternalEngineTests extends ESTestCase {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null);
Engine.Index index = new Engine.Index(newUid("1"), doc);
engine.index(index);
- assertThat(index.version(), equalTo(1l));
+ assertThat(index.version(), equalTo(1L));
index = new Engine.Index(newUid("1"), doc);
engine.index(index);
- assertThat(index.version(), equalTo(2l));
+ assertThat(index.version(), equalTo(2L));
engine.flush();
- index = new Engine.Index(newUid("1"), doc, 1l, VersionType.INTERNAL, PRIMARY, 0);
+ index = new Engine.Index(newUid("1"), doc, 1L, VersionType.INTERNAL, PRIMARY, 0);
try {
engine.index(index);
fail();
@@ -992,7 +992,7 @@ public class InternalEngineTests extends ESTestCase {
}
// future versions should not work as well
- index = new Engine.Index(newUid("1"), doc, 3l, VersionType.INTERNAL, PRIMARY, 0);
+ index = new Engine.Index(newUid("1"), doc, 3L, VersionType.INTERNAL, PRIMARY, 0);
try {
engine.index(index);
fail();
@@ -1005,11 +1005,11 @@ public class InternalEngineTests extends ESTestCase {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null);
Engine.Index index = new Engine.Index(newUid("1"), doc, 12, VersionType.EXTERNAL, PRIMARY, 0);
engine.index(index);
- assertThat(index.version(), equalTo(12l));
+ assertThat(index.version(), equalTo(12L));
index = new Engine.Index(newUid("1"), doc, 14, VersionType.EXTERNAL, PRIMARY, 0);
engine.index(index);
- assertThat(index.version(), equalTo(14l));
+ assertThat(index.version(), equalTo(14L));
engine.flush();
@@ -1121,13 +1121,13 @@ public class InternalEngineTests extends ESTestCase {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null);
Engine.Index index = new Engine.Index(newUid("1"), doc);
engine.index(index);
- assertThat(index.version(), equalTo(1l));
+ assertThat(index.version(), equalTo(1L));
index = new Engine.Index(newUid("1"), doc);
engine.index(index);
- assertThat(index.version(), equalTo(2l));
+ assertThat(index.version(), equalTo(2L));
- Engine.Delete delete = new Engine.Delete("test", "1", newUid("1"), 1l, VersionType.INTERNAL, PRIMARY, 0, false);
+ Engine.Delete delete = new Engine.Delete("test", "1", newUid("1"), 1L, VersionType.INTERNAL, PRIMARY, 0, false);
try {
engine.delete(delete);
fail();
@@ -1136,7 +1136,7 @@ public class InternalEngineTests extends ESTestCase {
}
// future versions should not work as well
- delete = new Engine.Delete("test", "1", newUid("1"), 3l, VersionType.INTERNAL, PRIMARY, 0, false);
+ delete = new Engine.Delete("test", "1", newUid("1"), 3L, VersionType.INTERNAL, PRIMARY, 0, false);
try {
engine.delete(delete);
fail();
@@ -1145,12 +1145,12 @@ public class InternalEngineTests extends ESTestCase {
}
// now actually delete
- delete = new Engine.Delete("test", "1", newUid("1"), 2l, VersionType.INTERNAL, PRIMARY, 0, false);
+ delete = new Engine.Delete("test", "1", newUid("1"), 2L, VersionType.INTERNAL, PRIMARY, 0, false);
engine.delete(delete);
- assertThat(delete.version(), equalTo(3l));
+ assertThat(delete.version(), equalTo(3L));
// now check if we can index to a delete doc with version
- index = new Engine.Index(newUid("1"), doc, 2l, VersionType.INTERNAL, PRIMARY, 0);
+ index = new Engine.Index(newUid("1"), doc, 2L, VersionType.INTERNAL, PRIMARY, 0);
try {
engine.index(index);
fail();
@@ -1171,15 +1171,15 @@ public class InternalEngineTests extends ESTestCase {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null);
Engine.Index index = new Engine.Index(newUid("1"), doc);
engine.index(index);
- assertThat(index.version(), equalTo(1l));
+ assertThat(index.version(), equalTo(1L));
index = new Engine.Index(newUid("1"), doc);
engine.index(index);
- assertThat(index.version(), equalTo(2l));
+ assertThat(index.version(), equalTo(2L));
engine.flush();
- Engine.Delete delete = new Engine.Delete("test", "1", newUid("1"), 1l, VersionType.INTERNAL, PRIMARY, 0, false);
+ Engine.Delete delete = new Engine.Delete("test", "1", newUid("1"), 1L, VersionType.INTERNAL, PRIMARY, 0, false);
try {
engine.delete(delete);
fail();
@@ -1188,7 +1188,7 @@ public class InternalEngineTests extends ESTestCase {
}
// future versions should not work as well
- delete = new Engine.Delete("test", "1", newUid("1"), 3l, VersionType.INTERNAL, PRIMARY, 0, false);
+ delete = new Engine.Delete("test", "1", newUid("1"), 3L, VersionType.INTERNAL, PRIMARY, 0, false);
try {
engine.delete(delete);
fail();
@@ -1199,14 +1199,14 @@ public class InternalEngineTests extends ESTestCase {
engine.flush();
// now actually delete
- delete = new Engine.Delete("test", "1", newUid("1"), 2l, VersionType.INTERNAL, PRIMARY, 0, false);
+ delete = new Engine.Delete("test", "1", newUid("1"), 2L, VersionType.INTERNAL, PRIMARY, 0, false);
engine.delete(delete);
- assertThat(delete.version(), equalTo(3l));
+ assertThat(delete.version(), equalTo(3L));
engine.flush();
// now check if we can index to a delete doc with version
- index = new Engine.Index(newUid("1"), doc, 2l, VersionType.INTERNAL, PRIMARY, 0);
+ index = new Engine.Index(newUid("1"), doc, 2L, VersionType.INTERNAL, PRIMARY, 0);
try {
engine.index(index);
fail();
@@ -1227,7 +1227,7 @@ public class InternalEngineTests extends ESTestCase {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null);
Engine.Index create = new Engine.Index(newUid("1"), doc, Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, 0);
engine.index(create);
- assertThat(create.version(), equalTo(1l));
+ assertThat(create.version(), equalTo(1L));
create = new Engine.Index(newUid("1"), doc, Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, 0);
try {
@@ -1242,7 +1242,7 @@ public class InternalEngineTests extends ESTestCase {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null);
Engine.Index create = new Engine.Index(newUid("1"), doc, Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, 0);
engine.index(create);
- assertThat(create.version(), equalTo(1l));
+ assertThat(create.version(), equalTo(1L));
engine.flush();
@@ -1259,19 +1259,19 @@ public class InternalEngineTests extends ESTestCase {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null);
Engine.Index index = new Engine.Index(newUid("1"), doc);
engine.index(index);
- assertThat(index.version(), equalTo(1l));
+ assertThat(index.version(), equalTo(1L));
index = new Engine.Index(newUid("1"), doc);
engine.index(index);
- assertThat(index.version(), equalTo(2l));
+ assertThat(index.version(), equalTo(2L));
// apply the second index to the replica, should work fine
index = new Engine.Index(newUid("1"), doc, index.version(), VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0);
replicaEngine.index(index);
- assertThat(index.version(), equalTo(2l));
+ assertThat(index.version(), equalTo(2L));
// now, the old one should not work
- index = new Engine.Index(newUid("1"), doc, 1l, VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0);
+ index = new Engine.Index(newUid("1"), doc, 1L, VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0);
try {
replicaEngine.index(index);
fail();
@@ -1281,10 +1281,10 @@ public class InternalEngineTests extends ESTestCase {
// second version on replica should fail as well
try {
- index = new Engine.Index(newUid("1"), doc, 2l
+ index = new Engine.Index(newUid("1"), doc, 2L
, VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0);
replicaEngine.index(index);
- assertThat(index.version(), equalTo(2l));
+ assertThat(index.version(), equalTo(2L));
} catch (VersionConflictEngineException e) {
// all is well
}
@@ -1294,33 +1294,33 @@ public class InternalEngineTests extends ESTestCase {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null);
Engine.Index index = new Engine.Index(newUid("1"), doc);
engine.index(index);
- assertThat(index.version(), equalTo(1l));
+ assertThat(index.version(), equalTo(1L));
// apply the first index to the replica, should work fine
- index = new Engine.Index(newUid("1"), doc, 1l
+ index = new Engine.Index(newUid("1"), doc, 1L
, VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0);
replicaEngine.index(index);
- assertThat(index.version(), equalTo(1l));
+ assertThat(index.version(), equalTo(1L));
// index it again
index = new Engine.Index(newUid("1"), doc);
engine.index(index);
- assertThat(index.version(), equalTo(2l));
+ assertThat(index.version(), equalTo(2L));
// now delete it
Engine.Delete delete = new Engine.Delete("test", "1", newUid("1"));
engine.delete(delete);
- assertThat(delete.version(), equalTo(3l));
+ assertThat(delete.version(), equalTo(3L));
// apply the delete on the replica (skipping the second index)
- delete = new Engine.Delete("test", "1", newUid("1"), 3l
+ delete = new Engine.Delete("test", "1", newUid("1"), 3L
, VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0, false);
replicaEngine.delete(delete);
- assertThat(delete.version(), equalTo(3l));
+ assertThat(delete.version(), equalTo(3L));
// second time delete with same version should fail
try {
- delete = new Engine.Delete("test", "1", newUid("1"), 3l
+ delete = new Engine.Delete("test", "1", newUid("1"), 3L
, VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0, false);
replicaEngine.delete(delete);
fail("excepted VersionConflictEngineException to be thrown");
@@ -1330,7 +1330,7 @@ public class InternalEngineTests extends ESTestCase {
// now do the second index on the replica, it should fail
try {
- index = new Engine.Index(newUid("1"), doc, 2l, VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0);
+ index = new Engine.Index(newUid("1"), doc, 2L, VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0);
replicaEngine.index(index);
fail("excepted VersionConflictEngineException to be thrown");
} catch (VersionConflictEngineException e) {
@@ -1610,7 +1610,7 @@ public class InternalEngineTests extends ESTestCase {
ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, -1, -1, testDocument(), new BytesArray("{}"), null);
Engine.Index firstIndexRequest = new Engine.Index(newUid(Integer.toString(i)), doc, Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime());
engine.index(firstIndexRequest);
- assertThat(firstIndexRequest.version(), equalTo(1l));
+ assertThat(firstIndexRequest.version(), equalTo(1L));
}
engine.refresh("test");
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
@@ -1662,7 +1662,7 @@ public class InternalEngineTests extends ESTestCase {
ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, -1, -1, testDocument(), new BytesArray("{}"), null);
Engine.Index firstIndexRequest = new Engine.Index(newUid(Integer.toString(i)), doc, Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime());
engine.index(firstIndexRequest);
- assertThat(firstIndexRequest.version(), equalTo(1l));
+ assertThat(firstIndexRequest.version(), equalTo(1L));
}
engine.refresh("test");
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
@@ -1757,7 +1757,7 @@ public class InternalEngineTests extends ESTestCase {
ParsedDocument doc = testParsedDocument("extra" + Integer.toString(i), "extra" + Integer.toString(i), "test", null, -1, -1, testDocument(), new BytesArray("{}"), null);
Engine.Index firstIndexRequest = new Engine.Index(newUid(Integer.toString(i)), doc, Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime());
engine.index(firstIndexRequest);
- assertThat(firstIndexRequest.version(), equalTo(1l));
+ assertThat(firstIndexRequest.version(), equalTo(1L));
}
engine.refresh("test");
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
@@ -1786,7 +1786,7 @@ public class InternalEngineTests extends ESTestCase {
ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, -1, -1, testDocument(), new BytesArray("{}"), null);
Engine.Index firstIndexRequest = new Engine.Index(newUid(Integer.toString(i)), doc, Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime());
engine.index(firstIndexRequest);
- assertThat(firstIndexRequest.version(), equalTo(1l));
+ assertThat(firstIndexRequest.version(), equalTo(1L));
}
engine.refresh("test");
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
@@ -1835,7 +1835,7 @@ public class InternalEngineTests extends ESTestCase {
ParsedDocument doc = testParsedDocument(uuidValue, Integer.toString(randomId), "test", null, -1, -1, testDocument(), new BytesArray("{}"), null);
Engine.Index firstIndexRequest = new Engine.Index(newUid(uuidValue), doc, 1, VersionType.EXTERNAL, PRIMARY, System.nanoTime());
engine.index(firstIndexRequest);
- assertThat(firstIndexRequest.version(), equalTo(1l));
+ assertThat(firstIndexRequest.version(), equalTo(1L));
if (flush) {
engine.flush();
}
@@ -1844,7 +1844,7 @@ public class InternalEngineTests extends ESTestCase {
Engine.Index idxRequest = new Engine.Index(newUid(uuidValue), doc, 2, VersionType.EXTERNAL, PRIMARY, System.nanoTime());
engine.index(idxRequest);
engine.refresh("test");
- assertThat(idxRequest.version(), equalTo(2l));
+ assertThat(idxRequest.version(), equalTo(2L));
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
TopDocs topDocs = searcher.searcher().search(new MatchAllDocsQuery(), numDocs + 1);
assertThat(topDocs.totalHits, equalTo(numDocs + 1));
@@ -1879,10 +1879,10 @@ public class InternalEngineTests extends ESTestCase {
public final AtomicInteger recoveredOps = new AtomicInteger(0);
public TranslogHandler(String indexName, ESLogger logger) {
- super(new ShardId("test", 0), null, logger);
+ super(new ShardId("test", "_na_", 0), null, logger);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
RootObjectMapper.Builder rootBuilder = new RootObjectMapper.Builder("test");
- Index index = new Index(indexName);
+ Index index = new Index(indexName, "_na_");
IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(index, settings);
AnalysisService analysisService = new AnalysisService(indexSettings, Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap());
SimilarityService similarityService = new SimilarityService(indexSettings, Collections.emptyMap());
@@ -1909,7 +1909,7 @@ public class InternalEngineTests extends ESTestCase {
ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, -1, -1, testDocument(), new BytesArray("{}"), null);
Engine.Index firstIndexRequest = new Engine.Index(newUid(Integer.toString(i)), doc, Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime());
engine.index(firstIndexRequest);
- assertThat(firstIndexRequest.version(), equalTo(1l));
+ assertThat(firstIndexRequest.version(), equalTo(1L));
}
engine.refresh("test");
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
diff --git a/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java
index 71ad0e1690..6b81512b79 100644
--- a/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java
+++ b/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java
@@ -88,7 +88,7 @@ import static org.hamcrest.Matchers.nullValue;
*/
public class ShadowEngineTests extends ESTestCase {
- protected final ShardId shardId = new ShardId(new Index("index"), 1);
+ protected final ShardId shardId = new ShardId("index", "_na_", 1);
protected ThreadPool threadPool;
@@ -181,7 +181,7 @@ public class ShadowEngineTests extends ESTestCase {
protected Store createStore(final Directory directory) throws IOException {
- IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(shardId.index(), Settings.EMPTY);
+ IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(shardId.getIndex(), Settings.EMPTY);
final DirectoryService directoryService = new DirectoryService(shardId, indexSettings) {
@Override
public Directory newDirectory() throws IOException {
@@ -252,7 +252,7 @@ public class ShadowEngineTests extends ESTestCase {
primaryEngine.index(new Engine.Index(newUid("1"), doc));
CommitStats stats1 = replicaEngine.commitStats();
- assertThat(stats1.getGeneration(), greaterThan(0l));
+ assertThat(stats1.getGeneration(), greaterThan(0L));
assertThat(stats1.getId(), notNullValue());
assertThat(stats1.getUserData(), hasKey(Translog.TRANSLOG_GENERATION_KEY));
@@ -276,8 +276,8 @@ public class ShadowEngineTests extends ESTestCase {
primaryEngine = createInternalEngine(defaultSettings, store, createTempDir(), NoMergePolicy.INSTANCE);
List<Segment> segments = primaryEngine.segments(false);
assertThat(segments.isEmpty(), equalTo(true));
- assertThat(primaryEngine.segmentsStats().getCount(), equalTo(0l));
- assertThat(primaryEngine.segmentsStats().getMemoryInBytes(), equalTo(0l));
+ assertThat(primaryEngine.segmentsStats().getCount(), equalTo(0L));
+ assertThat(primaryEngine.segmentsStats().getMemoryInBytes(), equalTo(0L));
// create a doc and refresh
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null);
@@ -290,12 +290,12 @@ public class ShadowEngineTests extends ESTestCase {
segments = primaryEngine.segments(false);
assertThat(segments.size(), equalTo(1));
SegmentsStats stats = primaryEngine.segmentsStats();
- assertThat(stats.getCount(), equalTo(1l));
- assertThat(stats.getTermsMemoryInBytes(), greaterThan(0l));
- assertThat(stats.getStoredFieldsMemoryInBytes(), greaterThan(0l));
- assertThat(stats.getTermVectorsMemoryInBytes(), equalTo(0l));
- assertThat(stats.getNormsMemoryInBytes(), greaterThan(0l));
- assertThat(stats.getDocValuesMemoryInBytes(), greaterThan(0l));
+ assertThat(stats.getCount(), equalTo(1L));
+ assertThat(stats.getTermsMemoryInBytes(), greaterThan(0L));
+ assertThat(stats.getStoredFieldsMemoryInBytes(), greaterThan(0L));
+ assertThat(stats.getTermVectorsMemoryInBytes(), equalTo(0L));
+ assertThat(stats.getNormsMemoryInBytes(), greaterThan(0L));
+ assertThat(stats.getDocValuesMemoryInBytes(), greaterThan(0L));
assertThat(segments.get(0).isCommitted(), equalTo(false));
assertThat(segments.get(0).isSearch(), equalTo(true));
assertThat(segments.get(0).getNumDocs(), equalTo(2));
@@ -307,12 +307,12 @@ public class ShadowEngineTests extends ESTestCase {
segments = replicaEngine.segments(false);
assertThat(segments.size(), equalTo(0));
stats = replicaEngine.segmentsStats();
- assertThat(stats.getCount(), equalTo(0l));
- assertThat(stats.getTermsMemoryInBytes(), equalTo(0l));
- assertThat(stats.getStoredFieldsMemoryInBytes(), equalTo(0l));
- assertThat(stats.getTermVectorsMemoryInBytes(), equalTo(0l));
- assertThat(stats.getNormsMemoryInBytes(), equalTo(0l));
- assertThat(stats.getDocValuesMemoryInBytes(), equalTo(0l));
+ assertThat(stats.getCount(), equalTo(0L));
+ assertThat(stats.getTermsMemoryInBytes(), equalTo(0L));
+ assertThat(stats.getStoredFieldsMemoryInBytes(), equalTo(0L));
+ assertThat(stats.getTermVectorsMemoryInBytes(), equalTo(0L));
+ assertThat(stats.getNormsMemoryInBytes(), equalTo(0L));
+ assertThat(stats.getDocValuesMemoryInBytes(), equalTo(0L));
assertThat(segments.size(), equalTo(0));
// flush the primary engine
@@ -323,7 +323,7 @@ public class ShadowEngineTests extends ESTestCase {
// Check that the primary AND replica sees segments now
segments = primaryEngine.segments(false);
assertThat(segments.size(), equalTo(1));
- assertThat(primaryEngine.segmentsStats().getCount(), equalTo(1l));
+ assertThat(primaryEngine.segmentsStats().getCount(), equalTo(1L));
assertThat(segments.get(0).isCommitted(), equalTo(true));
assertThat(segments.get(0).isSearch(), equalTo(true));
assertThat(segments.get(0).getNumDocs(), equalTo(2));
@@ -332,7 +332,7 @@ public class ShadowEngineTests extends ESTestCase {
segments = replicaEngine.segments(false);
assertThat(segments.size(), equalTo(1));
- assertThat(replicaEngine.segmentsStats().getCount(), equalTo(1l));
+ assertThat(replicaEngine.segmentsStats().getCount(), equalTo(1L));
assertThat(segments.get(0).isCommitted(), equalTo(true));
assertThat(segments.get(0).isSearch(), equalTo(true));
assertThat(segments.get(0).getNumDocs(), equalTo(2));
@@ -346,10 +346,10 @@ public class ShadowEngineTests extends ESTestCase {
segments = primaryEngine.segments(false);
assertThat(segments.size(), equalTo(2));
- assertThat(primaryEngine.segmentsStats().getCount(), equalTo(2l));
+ assertThat(primaryEngine.segmentsStats().getCount(), equalTo(2L));
assertThat(primaryEngine.segmentsStats().getTermsMemoryInBytes(), greaterThan(stats.getTermsMemoryInBytes()));
assertThat(primaryEngine.segmentsStats().getStoredFieldsMemoryInBytes(), greaterThan(stats.getStoredFieldsMemoryInBytes()));
- assertThat(primaryEngine.segmentsStats().getTermVectorsMemoryInBytes(), equalTo(0l));
+ assertThat(primaryEngine.segmentsStats().getTermVectorsMemoryInBytes(), equalTo(0L));
assertThat(primaryEngine.segmentsStats().getNormsMemoryInBytes(), greaterThan(stats.getNormsMemoryInBytes()));
assertThat(primaryEngine.segmentsStats().getDocValuesMemoryInBytes(), greaterThan(stats.getDocValuesMemoryInBytes()));
assertThat(segments.get(0).getGeneration() < segments.get(1).getGeneration(), equalTo(true));
@@ -370,10 +370,10 @@ public class ShadowEngineTests extends ESTestCase {
segments = replicaEngine.segments(false);
assertThat(segments.size(), equalTo(2));
- assertThat(replicaEngine.segmentsStats().getCount(), equalTo(2l));
+ assertThat(replicaEngine.segmentsStats().getCount(), equalTo(2L));
assertThat(replicaEngine.segmentsStats().getTermsMemoryInBytes(), greaterThan(stats.getTermsMemoryInBytes()));
assertThat(replicaEngine.segmentsStats().getStoredFieldsMemoryInBytes(), greaterThan(stats.getStoredFieldsMemoryInBytes()));
- assertThat(replicaEngine.segmentsStats().getTermVectorsMemoryInBytes(), equalTo(0l));
+ assertThat(replicaEngine.segmentsStats().getTermVectorsMemoryInBytes(), equalTo(0L));
assertThat(replicaEngine.segmentsStats().getNormsMemoryInBytes(), greaterThan(stats.getNormsMemoryInBytes()));
assertThat(replicaEngine.segmentsStats().getDocValuesMemoryInBytes(), greaterThan(stats.getDocValuesMemoryInBytes()));
assertThat(segments.get(0).getGeneration() < segments.get(1).getGeneration(), equalTo(true));
@@ -393,7 +393,7 @@ public class ShadowEngineTests extends ESTestCase {
segments = primaryEngine.segments(false);
assertThat(segments.size(), equalTo(2));
- assertThat(primaryEngine.segmentsStats().getCount(), equalTo(2l));
+ assertThat(primaryEngine.segmentsStats().getCount(), equalTo(2L));
assertThat(segments.get(0).getGeneration() < segments.get(1).getGeneration(), equalTo(true));
assertThat(segments.get(0).isCommitted(), equalTo(true));
assertThat(segments.get(0).isSearch(), equalTo(true));
@@ -416,7 +416,7 @@ public class ShadowEngineTests extends ESTestCase {
segments = primaryEngine.segments(false);
assertThat(segments.size(), equalTo(3));
- assertThat(primaryEngine.segmentsStats().getCount(), equalTo(3l));
+ assertThat(primaryEngine.segmentsStats().getCount(), equalTo(3L));
assertThat(segments.get(0).getGeneration() < segments.get(1).getGeneration(), equalTo(true));
assertThat(segments.get(0).isCommitted(), equalTo(true));
assertThat(segments.get(0).isSearch(), equalTo(true));
diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java
index 743be63785..37e530cc7f 100644
--- a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java
+++ b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java
@@ -206,7 +206,7 @@ public abstract class AbstractFieldDataImplTestCase extends AbstractFieldDataTes
IndexFieldData indexFieldData = getForField("value");
AtomicFieldData fieldData = indexFieldData.load(refreshReader());
// Some impls (FST) return size 0 and some (PagedBytes) do take size in the case no actual data is loaded
- assertThat(fieldData.ramBytesUsed(), greaterThanOrEqualTo(0l));
+ assertThat(fieldData.ramBytesUsed(), greaterThanOrEqualTo(0L));
SortedBinaryDocValues bytesValues = fieldData.getBytesValues();
diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java
index 07ae1e70a4..0bdbfb5872 100644
--- a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java
+++ b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java
@@ -142,7 +142,7 @@ public abstract class AbstractFieldDataTestCase extends ESSingleNodeTestCase {
if (readerContext != null) {
readerContext.reader().close();
}
- topLevelReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", 1));
+ topLevelReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", "_na_", 1));
LeafReader reader = SlowCompositeReaderWrapper.wrap(topLevelReader);
readerContext = reader.getContext();
return readerContext;
@@ -174,7 +174,7 @@ public abstract class AbstractFieldDataTestCase extends ESSingleNodeTestCase {
AtomicFieldData previous = null;
for (int i = 0; i < max; i++) {
AtomicFieldData current = fieldData.load(readerContext);
- assertThat(current.ramBytesUsed(), equalTo(0l));
+ assertThat(current.ramBytesUsed(), equalTo(0L));
if (previous != null) {
assertThat(current, not(sameInstance(previous)));
}
diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java
index 82b2cca79a..31a17a684e 100644
--- a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java
+++ b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java
@@ -49,7 +49,6 @@ import org.apache.lucene.util.UnicodeUtil;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.index.Index;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource;
@@ -389,7 +388,7 @@ public abstract class AbstractStringFieldDataTestCase extends AbstractFieldDataI
}
}
DirectoryReader directoryReader = DirectoryReader.open(writer, true);
- directoryReader = ElasticsearchDirectoryReader.wrap(directoryReader, new ShardId(new Index("test"), 0));
+ directoryReader = ElasticsearchDirectoryReader.wrap(directoryReader, new ShardId(indexService.index(), 0));
IndexSearcher searcher = new IndexSearcher(directoryReader);
IndexFieldData<?> fieldData = getForField("text");
final Object missingValue;
@@ -486,17 +485,17 @@ public abstract class AbstractStringFieldDataTestCase extends AbstractFieldDataI
values.setDocument(0);
assertThat(values.cardinality(), equalTo(2));
long ord = values.nextOrd();
- assertThat(ord, equalTo(3l));
+ assertThat(ord, equalTo(3L));
assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("02"));
ord = values.nextOrd();
- assertThat(ord, equalTo(5l));
+ assertThat(ord, equalTo(5L));
assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("04"));
values.setDocument(1);
assertThat(values.cardinality(), equalTo(0));
values.setDocument(2);
assertThat(values.cardinality(), equalTo(1));
ord = values.nextOrd();
- assertThat(ord, equalTo(4l));
+ assertThat(ord, equalTo(4L));
assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("03"));
// Second segment
@@ -507,37 +506,37 @@ public abstract class AbstractStringFieldDataTestCase extends AbstractFieldDataI
values.setDocument(0);
assertThat(values.cardinality(), equalTo(3));
ord = values.nextOrd();
- assertThat(ord, equalTo(5l));
+ assertThat(ord, equalTo(5L));
assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("04"));
ord = values.nextOrd();
- assertThat(ord, equalTo(6l));
+ assertThat(ord, equalTo(6L));
assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("05"));
ord = values.nextOrd();
- assertThat(ord, equalTo(7l));
+ assertThat(ord, equalTo(7L));
assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("06"));
values.setDocument(1);
assertThat(values.cardinality(), equalTo(3));
ord = values.nextOrd();
- assertThat(ord, equalTo(7l));
+ assertThat(ord, equalTo(7L));
assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("06"));
ord = values.nextOrd();
- assertThat(ord, equalTo(8l));
+ assertThat(ord, equalTo(8L));
assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("07"));
ord = values.nextOrd();
- assertThat(ord, equalTo(9l));
+ assertThat(ord, equalTo(9L));
assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("08"));
values.setDocument(2);
assertThat(values.cardinality(), equalTo(0));
values.setDocument(3);
assertThat(values.cardinality(), equalTo(3));
ord = values.nextOrd();
- assertThat(ord, equalTo(9l));
+ assertThat(ord, equalTo(9L));
assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("08"));
ord = values.nextOrd();
- assertThat(ord, equalTo(10l));
+ assertThat(ord, equalTo(10L));
assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("09"));
ord = values.nextOrd();
- assertThat(ord, equalTo(11l));
+ assertThat(ord, equalTo(11L));
assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("10"));
// Third segment
@@ -549,13 +548,13 @@ public abstract class AbstractStringFieldDataTestCase extends AbstractFieldDataI
values.setDocument(0);
assertThat(values.cardinality(), equalTo(3));
ord = values.nextOrd();
- assertThat(ord, equalTo(0l));
+ assertThat(ord, equalTo(0L));
assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("!08"));
ord = values.nextOrd();
- assertThat(ord, equalTo(1l));
+ assertThat(ord, equalTo(1L));
assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("!09"));
ord = values.nextOrd();
- assertThat(ord, equalTo(2l));
+ assertThat(ord, equalTo(2L));
assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("!10"));
}
@@ -621,6 +620,6 @@ public abstract class AbstractStringFieldDataTestCase extends AbstractFieldDataI
assertThat(ifd.loadGlobal(topLevelReader), not(sameInstance(globalOrdinals)));
ifdService.clear();
- assertThat(indicesFieldDataCache.getCache().weight(), equalTo(0l));
+ assertThat(indicesFieldDataCache.getCache().weight(), equalTo(0L));
}
}
diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/BinaryDVFieldDataTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/BinaryDVFieldDataTests.java
index ca207fbdc2..30669e8340 100644
--- a/core/src/test/java/org/elasticsearch/index/fielddata/BinaryDVFieldDataTests.java
+++ b/core/src/test/java/org/elasticsearch/index/fielddata/BinaryDVFieldDataTests.java
@@ -46,7 +46,7 @@ public class BinaryDVFieldDataTests extends AbstractFieldDataTestCase {
.startObject("properties")
.startObject("field")
.field("type", "binary")
- .startObject("fielddata").field("format", "doc_values").endObject()
+ .field("doc_values", true)
.endObject()
.endObject()
.endObject().endObject().string();
diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/DisabledFieldDataFormatTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/DisabledFieldDataFormatTests.java
deleted file mode 100644
index 0601a30242..0000000000
--- a/core/src/test/java/org/elasticsearch/index/fielddata/DisabledFieldDataFormatTests.java
+++ /dev/null
@@ -1,115 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.index.fielddata;
-
-import org.elasticsearch.action.search.SearchPhaseExecutionException;
-import org.elasticsearch.action.search.SearchResponse;
-import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.common.xcontent.XContentFactory;
-import org.elasticsearch.search.aggregations.AggregationBuilders;
-import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode;
-import org.elasticsearch.test.ESSingleNodeTestCase;
-
-import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
-import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFailures;
-import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
-
-public class DisabledFieldDataFormatTests extends ESSingleNodeTestCase {
-
- public void test() throws Exception {
- createIndex("test", Settings.EMPTY, "type", "s", "type=string");
- logger.info("indexing data start");
- for (int i = 0; i < 10; ++i) {
- client().prepareIndex("test", "type", Integer.toString(i)).setSource("s", "value" + i).execute().actionGet();
- }
- logger.info("indexing data end");
-
- final int searchCycles = 1;
-
- client().admin().indices().prepareRefresh().execute().actionGet();
-
- // disable field data
- updateFormat("disabled");
-
- SubAggCollectionMode aggCollectionMode = randomFrom(SubAggCollectionMode.values());
- SearchResponse resp = null;
- // try to run something that relies on field data and make sure that it fails
- for (int i = 0; i < searchCycles; i++) {
- try {
- resp = client().prepareSearch("test").setPreference(Integer.toString(i)).addAggregation(AggregationBuilders.terms("t").field("s")
- .collectMode(aggCollectionMode)).execute().actionGet();
- assertFailures(resp);
- } catch (SearchPhaseExecutionException e) {
- // expected
- }
- }
-
- // enable it again
- updateFormat("paged_bytes");
-
- // try to run something that relies on field data and make sure that it works
- for (int i = 0; i < searchCycles; i++) {
- resp = client().prepareSearch("test").setPreference(Integer.toString(i)).addAggregation(AggregationBuilders.terms("t").field("s")
- .collectMode(aggCollectionMode)).execute().actionGet();
- assertNoFailures(resp);
- }
-
- // disable it again
- updateFormat("disabled");
-
- // this time, it should work because segments are already loaded
- for (int i = 0; i < searchCycles; i++) {
- resp = client().prepareSearch("test").setPreference(Integer.toString(i)).addAggregation(AggregationBuilders.terms("t").field("s")
- .collectMode(aggCollectionMode)).execute().actionGet();
- assertNoFailures(resp);
- }
-
- // but add more docs and the new segment won't be loaded
- client().prepareIndex("test", "type", "-1").setSource("s", "value").execute().actionGet();
- client().admin().indices().prepareRefresh().execute().actionGet();
- for (int i = 0; i < searchCycles; i++) {
- try {
- resp = client().prepareSearch("test").setPreference(Integer.toString(i)).addAggregation(AggregationBuilders.terms("t").field("s")
- .collectMode(aggCollectionMode)).execute().actionGet();
- assertFailures(resp);
- } catch (SearchPhaseExecutionException e) {
- // expected
- }
- }
- }
-
- private void updateFormat(final String format) throws Exception {
- logger.info(">> put mapping start {}", format);
- assertAcked(client().admin().indices().preparePutMapping("test").setType("type").setSource(
- XContentFactory.jsonBuilder().startObject().startObject("type")
- .startObject("properties")
- .startObject("s")
- .field("type", "string")
- .startObject("fielddata")
- .field("format", format)
- .endObject()
- .endObject()
- .endObject()
- .endObject()
- .endObject()).get());
- logger.info(">> put mapping end {}", format);
- }
-
-}
diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/FieldDataCacheTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/FieldDataCacheTests.java
index 35a74ea384..241cbe0e68 100644
--- a/core/src/test/java/org/elasticsearch/index/fielddata/FieldDataCacheTests.java
+++ b/core/src/test/java/org/elasticsearch/index/fielddata/FieldDataCacheTests.java
@@ -67,7 +67,7 @@ public class FieldDataCacheTests extends ESTestCase {
}
}
iw.close();
- DirectoryReader ir = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(dir), new ShardId("_index", 0));
+ DirectoryReader ir = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(dir), new ShardId("_index", "_na_", 0));
DummyAccountingFieldDataCache fieldDataCache = new DummyAccountingFieldDataCache();
// Testing SortedSetDVOrdinalsIndexFieldData:
diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/FieldDataLoadingIT.java b/core/src/test/java/org/elasticsearch/index/fielddata/FieldDataLoadingIT.java
index fc8a830f9c..d88ef884eb 100644
--- a/core/src/test/java/org/elasticsearch/index/fielddata/FieldDataLoadingIT.java
+++ b/core/src/test/java/org/elasticsearch/index/fielddata/FieldDataLoadingIT.java
@@ -43,7 +43,7 @@ public class FieldDataLoadingIT extends ESIntegTestCase {
client().admin().indices().prepareRefresh("test").get();
ClusterStatsResponse response = client().admin().cluster().prepareClusterStats().get();
- assertThat(response.getIndicesStats().getFieldData().getMemorySizeInBytes(), greaterThan(0l));
+ assertThat(response.getIndicesStats().getFieldData().getMemorySizeInBytes(), greaterThan(0L));
}
public void testEagerGlobalOrdinalsFieldDataLoading() throws Exception {
@@ -60,7 +60,7 @@ public class FieldDataLoadingIT extends ESIntegTestCase {
client().admin().indices().prepareRefresh("test").get();
ClusterStatsResponse response = client().admin().cluster().prepareClusterStats().get();
- assertThat(response.getIndicesStats().getFieldData().getMemorySizeInBytes(), greaterThan(0l));
+ assertThat(response.getIndicesStats().getFieldData().getMemorySizeInBytes(), greaterThan(0L));
}
}
diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java
index 3d4f63daa3..da2899bf2c 100644
--- a/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java
+++ b/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java
@@ -33,7 +33,6 @@ import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.Accountable;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.fielddata.plain.PagedBytesAtomicFieldData;
import org.elasticsearch.index.fielddata.plain.PagedBytesIndexFieldData;
@@ -145,14 +144,14 @@ public class IndexFieldDataServiceTests extends ESSingleNodeTestCase {
writer.addDocument(doc);
DirectoryReader open = DirectoryReader.open(writer, true);
final boolean wrap = randomBoolean();
- final IndexReader reader = wrap ? ElasticsearchDirectoryReader.wrap(open, new ShardId("test", 1)) : open;
+ final IndexReader reader = wrap ? ElasticsearchDirectoryReader.wrap(open, new ShardId("test", "_na_", 1)) : open;
final AtomicInteger onCacheCalled = new AtomicInteger();
final AtomicInteger onRemovalCalled = new AtomicInteger();
ifdService.setListener(new IndexFieldDataCache.Listener() {
@Override
public void onCache(ShardId shardId, String fieldName, FieldDataType fieldDataType, Accountable ramUsage) {
if (wrap) {
- assertEquals(new ShardId("test", 1), shardId);
+ assertEquals(new ShardId("test", "_na_", 1), shardId);
} else {
assertNull(shardId);
}
@@ -162,7 +161,7 @@ public class IndexFieldDataServiceTests extends ESSingleNodeTestCase {
@Override
public void onRemoval(ShardId shardId, String fieldName, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes) {
if (wrap) {
- assertEquals(new ShardId("test", 1), shardId);
+ assertEquals(new ShardId("test", "_na_", 1), shardId);
} else {
assertNull(shardId);
}
@@ -207,7 +206,7 @@ public class IndexFieldDataServiceTests extends ESSingleNodeTestCase {
ThreadPool threadPool = new ThreadPool("random_threadpool_name");
try {
IndicesFieldDataCache cache = new IndicesFieldDataCache(Settings.EMPTY, null, threadPool);
- IndexFieldDataService ifds = new IndexFieldDataService(IndexSettingsModule.newIndexSettings(new Index("test"), Settings.EMPTY), cache, null, null);
+ IndexFieldDataService ifds = new IndexFieldDataService(IndexSettingsModule.newIndexSettings("test", Settings.EMPTY), cache, null, null);
ft.setName("some_long");
ft.setHasDocValues(true);
ifds.getForField(ft); // no exception
@@ -234,4 +233,23 @@ public class IndexFieldDataServiceTests extends ESSingleNodeTestCase {
public void testRequireDocValuesOnBools() {
doTestRequireDocValues(new BooleanFieldMapper.BooleanFieldType());
}
+
+ public void testDisabled() {
+ ThreadPool threadPool = new ThreadPool("random_threadpool_name");
+ StringFieldMapper.StringFieldType ft = new StringFieldMapper.StringFieldType();
+ try {
+ IndicesFieldDataCache cache = new IndicesFieldDataCache(Settings.EMPTY, null, threadPool);
+ IndexFieldDataService ifds = new IndexFieldDataService(IndexSettingsModule.newIndexSettings("test", Settings.EMPTY), cache, null, null);
+ ft.setName("some_str");
+ ft.setFieldDataType(new FieldDataType("string", Settings.builder().put(FieldDataType.FORMAT_KEY, "disabled").build()));
+ try {
+ ifds.getForField(ft);
+ fail();
+ } catch (IllegalStateException e) {
+ assertThat(e.getMessage(), containsString("Field data loading is forbidden on [some_str]"));
+ }
+ } finally {
+ threadPool.shutdown();
+ }
+ }
}
diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/ParentChildFieldDataTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/ParentChildFieldDataTests.java
index 0187ababfe..1e0d8ecdf0 100644
--- a/core/src/test/java/org/elasticsearch/index/fielddata/ParentChildFieldDataTests.java
+++ b/core/src/test/java/org/elasticsearch/index/fielddata/ParentChildFieldDataTests.java
@@ -164,11 +164,11 @@ public class ParentChildFieldDataTests extends AbstractFieldDataTestCase {
}
public void testSorting() throws Exception {
- IndexFieldData indexFieldData = getForField(childType);
+ IndexFieldData indexFieldData = getForField(parentType);
IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer, true));
IndexFieldData.XFieldComparatorSource comparator = indexFieldData.comparatorSource("_last", MultiValueMode.MIN, null);
- TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(new SortField(ParentFieldMapper.NAME, comparator, false)));
+ TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(new SortField(ParentFieldMapper.joinField(parentType), comparator, false)));
assertThat(topDocs.totalHits, equalTo(8));
assertThat(topDocs.scoreDocs.length, equalTo(8));
assertThat(topDocs.scoreDocs[0].doc, equalTo(0));
@@ -188,7 +188,7 @@ public class ParentChildFieldDataTests extends AbstractFieldDataTestCase {
assertThat(topDocs.scoreDocs[7].doc, equalTo(7));
assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[7]).fields[0]), equalTo(null));
- topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(new SortField(ParentFieldMapper.NAME, comparator, true)));
+ topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(new SortField(ParentFieldMapper.joinField(parentType), comparator, true)));
assertThat(topDocs.totalHits, equalTo(8));
assertThat(topDocs.scoreDocs.length, equalTo(8));
assertThat(topDocs.scoreDocs[0].doc, equalTo(3));
diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingDisabledTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingDisabledTests.java
index f8859efa02..748dd0a0a1 100644
--- a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingDisabledTests.java
+++ b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingDisabledTests.java
@@ -73,7 +73,7 @@ public class DynamicMappingDisabledTests extends ESSingleNodeTestCase {
transport = new LocalTransport(settings, THREAD_POOL, Version.CURRENT, new NamedWriteableRegistry());
transportService = new TransportService(transport, THREAD_POOL);
indicesService = getInstanceFromNode(IndicesService.class);
- shardStateAction = new ShardStateAction(settings, clusterService, transportService, null, null);
+ shardStateAction = new ShardStateAction(settings, clusterService, transportService, null, null, THREAD_POOL);
actionFilters = new ActionFilters(Collections.emptySet());
indexNameExpressionResolver = new IndexNameExpressionResolver(settings);
autoCreateIndex = new AutoCreateIndex(settings, indexNameExpressionResolver);
diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java
index 1a4fb0d9c4..6de49877ce 100644
--- a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java
+++ b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java
@@ -374,7 +374,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
public void testReuseExistingMappings() throws IOException, Exception {
IndexService indexService = createIndex("test", Settings.EMPTY, "type",
- "my_field1", "type=string,store=yes",
+ "my_field1", "type=string,store=true",
"my_field2", "type=integer,precision_step=10",
"my_field3", "type=long,doc_values=false",
"my_field4", "type=float,index_options=freqs",
diff --git a/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java b/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java
index c8d7e4ac14..966edf8262 100644
--- a/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java
+++ b/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java
@@ -38,13 +38,10 @@ public abstract class FieldTypeTestCase extends ESTestCase {
public final String property;
/** true if this modifier only makes types incompatible in strict mode, false otherwise */
public final boolean strictOnly;
- /** true if reversing the order of checkCompatibility arguments should result in the same conflicts, false otherwise **/
- public final boolean symmetric;
- public Modifier(String property, boolean strictOnly, boolean symmetric) {
+ public Modifier(String property, boolean strictOnly) {
this.property = property;
this.strictOnly = strictOnly;
- this.symmetric = symmetric;
}
/** Modifies the property */
@@ -57,25 +54,25 @@ public abstract class FieldTypeTestCase extends ESTestCase {
}
private final List<Modifier> modifiers = new ArrayList<>(Arrays.asList(
- new Modifier("boost", true, true) {
+ new Modifier("boost", true) {
@Override
public void modify(MappedFieldType ft) {
ft.setBoost(1.1f);
}
},
- new Modifier("doc_values", false, false) {
+ new Modifier("doc_values", false) {
@Override
public void modify(MappedFieldType ft) {
ft.setHasDocValues(ft.hasDocValues() == false);
}
},
- new Modifier("analyzer", false, true) {
+ new Modifier("analyzer", false) {
@Override
public void modify(MappedFieldType ft) {
ft.setIndexAnalyzer(new NamedAnalyzer("bar", new StandardAnalyzer()));
}
},
- new Modifier("analyzer", false, true) {
+ new Modifier("analyzer", false) {
@Override
public void modify(MappedFieldType ft) {
ft.setIndexAnalyzer(new NamedAnalyzer("bar", new StandardAnalyzer()));
@@ -85,13 +82,13 @@ public abstract class FieldTypeTestCase extends ESTestCase {
other.setIndexAnalyzer(new NamedAnalyzer("foo", new StandardAnalyzer()));
}
},
- new Modifier("search_analyzer", true, true) {
+ new Modifier("search_analyzer", true) {
@Override
public void modify(MappedFieldType ft) {
ft.setSearchAnalyzer(new NamedAnalyzer("bar", new StandardAnalyzer()));
}
},
- new Modifier("search_analyzer", true, true) {
+ new Modifier("search_analyzer", true) {
@Override
public void modify(MappedFieldType ft) {
ft.setSearchAnalyzer(new NamedAnalyzer("bar", new StandardAnalyzer()));
@@ -101,13 +98,13 @@ public abstract class FieldTypeTestCase extends ESTestCase {
other.setSearchAnalyzer(new NamedAnalyzer("foo", new StandardAnalyzer()));
}
},
- new Modifier("search_quote_analyzer", true, true) {
+ new Modifier("search_quote_analyzer", true) {
@Override
public void modify(MappedFieldType ft) {
ft.setSearchQuoteAnalyzer(new NamedAnalyzer("bar", new StandardAnalyzer()));
}
},
- new Modifier("search_quote_analyzer", true, true) {
+ new Modifier("search_quote_analyzer", true) {
@Override
public void modify(MappedFieldType ft) {
ft.setSearchQuoteAnalyzer(new NamedAnalyzer("bar", new StandardAnalyzer()));
@@ -117,13 +114,13 @@ public abstract class FieldTypeTestCase extends ESTestCase {
other.setSearchQuoteAnalyzer(new NamedAnalyzer("foo", new StandardAnalyzer()));
}
},
- new Modifier("similarity", false, true) {
+ new Modifier("similarity", false) {
@Override
public void modify(MappedFieldType ft) {
ft.setSimilarity(new BM25SimilarityProvider("foo", Settings.EMPTY));
}
},
- new Modifier("similarity", false, true) {
+ new Modifier("similarity", false) {
@Override
public void modify(MappedFieldType ft) {
ft.setSimilarity(new BM25SimilarityProvider("foo", Settings.EMPTY));
@@ -133,19 +130,19 @@ public abstract class FieldTypeTestCase extends ESTestCase {
other.setSimilarity(new BM25SimilarityProvider("bar", Settings.EMPTY));
}
},
- new Modifier("norms.loading", true, true) {
+ new Modifier("norms.loading", true) {
@Override
public void modify(MappedFieldType ft) {
ft.setNormsLoading(MappedFieldType.Loading.LAZY);
}
},
- new Modifier("fielddata", true, true) {
+ new Modifier("fielddata", true) {
@Override
public void modify(MappedFieldType ft) {
ft.setFieldDataType(new FieldDataType("foo", Settings.builder().put("loading", "eager").build()));
}
},
- new Modifier("null_value", true, true) {
+ new Modifier("null_value", true) {
@Override
public void modify(MappedFieldType ft) {
ft.setNullValue(dummyNullValue);
@@ -334,23 +331,14 @@ public abstract class FieldTypeTestCase extends ESTestCase {
assertCompatible(modifier.property, ft1, ft2, false);
assertNotCompatible(modifier.property, ft1, ft2, true, conflicts);
assertCompatible(modifier.property, ft2, ft1, false); // always symmetric when not strict
- if (modifier.symmetric) {
- assertNotCompatible(modifier.property, ft2, ft1, true, conflicts);
- } else {
- assertCompatible(modifier.property, ft2, ft1, true);
- }
+ assertNotCompatible(modifier.property, ft2, ft1, true, conflicts);
} else {
// not compatible whether strict or not
String conflict = "different [" + modifier.property + "]";
assertNotCompatible(modifier.property, ft1, ft2, true, conflict);
assertNotCompatible(modifier.property, ft1, ft2, false, conflict);
- if (modifier.symmetric) {
- assertNotCompatible(modifier.property, ft2, ft1, true, conflict);
- assertNotCompatible(modifier.property, ft2, ft1, false, conflict);
- } else {
- assertCompatible(modifier.property, ft2, ft1, true);
- assertCompatible(modifier.property, ft2, ft1, false);
- }
+ assertNotCompatible(modifier.property, ft2, ft1, true, conflict);
+ assertNotCompatible(modifier.property, ft2, ft1, false, conflict);
}
}
}
diff --git a/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java b/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java
index 83eeaf36ff..7cc8e10bef 100644
--- a/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java
+++ b/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java
@@ -20,11 +20,13 @@
package org.elasticsearch.index.mapper;
import org.apache.lucene.index.Term;
+import org.apache.lucene.queries.TermsQuery;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
+import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.lucene.search.Queries;
@@ -40,6 +42,7 @@ import java.util.HashSet;
import java.util.concurrent.ExecutionException;
import static org.hamcrest.CoreMatchers.containsString;
+import static org.hamcrest.CoreMatchers.nullValue;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasToString;
@@ -103,7 +106,7 @@ public class MapperServiceTests extends ESSingleNodeTestCase {
fail();
} catch (Throwable t) {
if (t instanceof ExecutionException) {
- t = ((ExecutionException) t).getCause();
+ t = t.getCause();
}
final Throwable throwable = ExceptionsHelper.unwrapCause(t);
if (throwable instanceof IllegalArgumentException) {
@@ -120,7 +123,7 @@ public class MapperServiceTests extends ESSingleNodeTestCase {
fail();
} catch (Throwable t) {
if (t instanceof ExecutionException) {
- t = ((ExecutionException) t).getCause();
+ t = t.getCause();
}
final Throwable throwable = ExceptionsHelper.unwrapCause(t);
if (throwable instanceof IllegalArgumentException) {
@@ -132,21 +135,4 @@ public class MapperServiceTests extends ESSingleNodeTestCase {
assertFalse(indexService.mapperService().hasMapping(MapperService.DEFAULT_MAPPING));
}
- public void testSearchFilter() {
- IndexService indexService = createIndex("index1", client().admin().indices().prepareCreate("index1")
- .addMapping("type1", "field1", "type=nested")
- .addMapping("type2", new Object[0])
- );
-
- Query searchFilter = indexService.mapperService().searchFilter("type1", "type3");
- Query expectedQuery = new BooleanQuery.Builder()
- .add(new BooleanQuery.Builder()
- .add(new ConstantScoreQuery(new TermQuery(new Term(TypeFieldMapper.NAME, "type1"))), BooleanClause.Occur.SHOULD)
- .add(new TermQuery(new Term(TypeFieldMapper.NAME, "type3")), BooleanClause.Occur.SHOULD)
- .build(), BooleanClause.Occur.MUST
- )
- .add(Queries.newNonNestedFilter(), BooleanClause.Occur.MUST)
- .build();
- assertThat(searchFilter, equalTo(new ConstantScoreQuery(expectedQuery)));
- }
}
diff --git a/core/src/test/java/org/elasticsearch/index/mapper/binary/BinaryMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/binary/BinaryMappingTests.java
index 308478ad49..7be0cc8031 100644
--- a/core/src/test/java/org/elasticsearch/index/mapper/binary/BinaryMappingTests.java
+++ b/core/src/test/java/org/elasticsearch/index/mapper/binary/BinaryMappingTests.java
@@ -63,7 +63,7 @@ public class BinaryMappingTests extends ESSingleNodeTestCase {
.startObject("properties")
.startObject("field")
.field("type", "binary")
- .field("store", "yes")
+ .field("store", true)
.endObject()
.endObject()
.endObject().endObject().string();
diff --git a/core/src/test/java/org/elasticsearch/index/mapper/boost/CustomBoostMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/boost/CustomBoostMappingTests.java
index 0a0f746ccd..51eef673c5 100644
--- a/core/src/test/java/org/elasticsearch/index/mapper/boost/CustomBoostMappingTests.java
+++ b/core/src/test/java/org/elasticsearch/index/mapper/boost/CustomBoostMappingTests.java
@@ -59,7 +59,7 @@ public class CustomBoostMappingTests extends ESSingleNodeTestCase {
ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject()
.startObject("s_field").field("value", "s_value").field("boost", 2.0f).endObject()
- .startObject("l_field").field("value", 1l).field("boost", 3.0f).endObject()
+ .startObject("l_field").field("value", 1L).field("boost", 3.0f).endObject()
.startObject("i_field").field("value", 1).field("boost", 4.0f).endObject()
.startObject("sh_field").field("value", 1).field("boost", 5.0f).endObject()
.startObject("b_field").field("value", 1).field("boost", 6.0f).endObject()
diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/BooleanFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/BooleanFieldMapperTests.java
index 2175f2ce3e..d66b1508dc 100644
--- a/core/src/test/java/org/elasticsearch/index/mapper/core/BooleanFieldMapperTests.java
+++ b/core/src/test/java/org/elasticsearch/index/mapper/core/BooleanFieldMapperTests.java
@@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper.core;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.index.DirectoryReader;
+import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LeafReader;
@@ -28,8 +29,11 @@ import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.BytesRef;
+import org.elasticsearch.Version;
+import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent;
+import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
@@ -39,10 +43,15 @@ import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ParsedDocument;
+import org.elasticsearch.index.mapper.ParseContext.Document;
+import org.elasticsearch.index.mapper.string.SimpleStringMappingTests;
+import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase;
+import org.elasticsearch.test.InternalSettingsPlugin;
import org.junit.Before;
import java.io.IOException;
+import java.util.Collection;
public class BooleanFieldMapperTests extends ESSingleNodeTestCase {
@@ -55,6 +64,11 @@ public class BooleanFieldMapperTests extends ESSingleNodeTestCase {
parser = indexService.mapperService().documentMapperParser();
}
+ @Override
+ protected Collection<Class<? extends Plugin>> getPlugins() {
+ return pluginList(InternalSettingsPlugin.class);
+ }
+
public void testDefaults() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "boolean").endObject().endObject()
@@ -135,4 +149,71 @@ public class BooleanFieldMapperTests extends ESSingleNodeTestCase {
ParsedDocument doc = mapper.parse("test", "type", "1", source);
assertNotNull(doc.rootDoc().getField("field.as_string"));
}
+
+ public void testDocValues() throws Exception {
+ String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
+ .startObject("properties")
+ .startObject("bool1")
+ .field("type", "boolean")
+ .endObject()
+ .startObject("bool2")
+ .field("type", "boolean")
+ .field("index", false)
+ .endObject()
+ .startObject("bool3")
+ .field("type", "boolean")
+ .field("index", true)
+ .endObject()
+ .endObject()
+ .endObject().endObject().string();
+
+ DocumentMapper defaultMapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
+
+ ParsedDocument parsedDoc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
+ .startObject()
+ .field("bool1", true)
+ .field("bool2", true)
+ .field("bool3", true)
+ .endObject()
+ .bytes());
+ Document doc = parsedDoc.rootDoc();
+ assertEquals(DocValuesType.SORTED_NUMERIC, SimpleStringMappingTests.docValuesType(doc, "bool1"));
+ assertEquals(DocValuesType.SORTED_NUMERIC, SimpleStringMappingTests.docValuesType(doc, "bool2"));
+ assertEquals(DocValuesType.SORTED_NUMERIC, SimpleStringMappingTests.docValuesType(doc, "bool3"));
+ }
+
+ public void testBwCompatDocValues() throws Exception {
+ Settings oldIndexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_2_0).build();
+ indexService = createIndex("test_old", oldIndexSettings);
+ parser = indexService.mapperService().documentMapperParser();
+ String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
+ .startObject("properties")
+ .startObject("bool1")
+ .field("type", "boolean")
+ .endObject()
+ .startObject("bool2")
+ .field("type", "boolean")
+ .field("index", "no")
+ .endObject()
+ .startObject("bool3")
+ .field("type", "boolean")
+ .field("index", "not_analyzed")
+ .endObject()
+ .endObject()
+ .endObject().endObject().string();
+
+ DocumentMapper defaultMapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
+
+ ParsedDocument parsedDoc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
+ .startObject()
+ .field("bool1", true)
+ .field("bool2", true)
+ .field("bool3", true)
+ .endObject()
+ .bytes());
+ Document doc = parsedDoc.rootDoc();
+ assertEquals(DocValuesType.SORTED_NUMERIC, SimpleStringMappingTests.docValuesType(doc, "bool1"));
+ assertEquals(DocValuesType.NONE, SimpleStringMappingTests.docValuesType(doc, "bool2"));
+ assertEquals(DocValuesType.SORTED_NUMERIC, SimpleStringMappingTests.docValuesType(doc, "bool3"));
+ }
}
diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/CompletionFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/CompletionFieldTypeTests.java
index 7ec1814a59..6f5225dd2c 100644
--- a/core/src/test/java/org/elasticsearch/index/mapper/core/CompletionFieldTypeTests.java
+++ b/core/src/test/java/org/elasticsearch/index/mapper/core/CompletionFieldTypeTests.java
@@ -34,21 +34,21 @@ public class CompletionFieldTypeTests extends FieldTypeTestCase {
@Before
public void setupProperties() {
- addModifier(new Modifier("preserve_separators", false, true) {
+ addModifier(new Modifier("preserve_separators", false) {
@Override
public void modify(MappedFieldType ft) {
CompletionFieldMapper.CompletionFieldType cft = (CompletionFieldMapper.CompletionFieldType)ft;
cft.setPreserveSep(false);
}
});
- addModifier(new Modifier("preserve_position_increments", false, true) {
+ addModifier(new Modifier("preserve_position_increments", false) {
@Override
public void modify(MappedFieldType ft) {
CompletionFieldMapper.CompletionFieldType cft = (CompletionFieldMapper.CompletionFieldType)ft;
cft.setPreservePositionIncrements(false);
}
});
- addModifier(new Modifier("context_mappings", false, true) {
+ addModifier(new Modifier("context_mappings", false) {
@Override
public void modify(MappedFieldType ft) {
CompletionFieldMapper.CompletionFieldType cft = (CompletionFieldMapper.CompletionFieldType)ft;
diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/DateFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/DateFieldTypeTests.java
index 3c37af6f49..0e009891cf 100644
--- a/core/src/test/java/org/elasticsearch/index/mapper/core/DateFieldTypeTests.java
+++ b/core/src/test/java/org/elasticsearch/index/mapper/core/DateFieldTypeTests.java
@@ -35,19 +35,19 @@ public class DateFieldTypeTests extends FieldTypeTestCase {
@Before
public void setupProperties() {
setDummyNullValue(10);
- addModifier(new Modifier("format", true, true) {
+ addModifier(new Modifier("format", true) {
@Override
public void modify(MappedFieldType ft) {
((DateFieldMapper.DateFieldType) ft).setDateTimeFormatter(Joda.forPattern("basic_week_date", Locale.ROOT));
}
});
- addModifier(new Modifier("locale", true, true) {
+ addModifier(new Modifier("locale", true) {
@Override
public void modify(MappedFieldType ft) {
((DateFieldMapper.DateFieldType) ft).setDateTimeFormatter(Joda.forPattern("date_optional_time", Locale.CANADA));
}
});
- addModifier(new Modifier("numeric_resolution", true, true) {
+ addModifier(new Modifier("numeric_resolution", true) {
@Override
public void modify(MappedFieldType ft) {
((DateFieldMapper.DateFieldType)ft).setTimeUnit(TimeUnit.HOURS);
diff --git a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalValuesMapperIntegrationIT.java b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalValuesMapperIntegrationIT.java
index 7e519c3b72..f581f1f6a4 100644
--- a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalValuesMapperIntegrationIT.java
+++ b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalValuesMapperIntegrationIT.java
@@ -89,12 +89,12 @@ public class ExternalValuesMapperIntegrationIT extends ESIntegTestCase {
.startObject("fields")
.startObject("g")
.field("type", "string")
- .field("store", "yes")
+ .field("store", true)
.startObject("fields")
.startObject("raw")
.field("type", "string")
.field("index", "not_analyzed")
- .field("store", "yes")
+ .field("store", true)
.endObject()
.endObject()
.endObject()
diff --git a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/SimpleExternalMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/SimpleExternalMappingTests.java
index c42924132f..96c099df6d 100644
--- a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/SimpleExternalMappingTests.java
+++ b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/SimpleExternalMappingTests.java
@@ -118,12 +118,12 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase {
.startObject("fields")
.startObject("field")
.field("type", "string")
- .field("store", "yes")
+ .field("store", true)
.startObject("fields")
.startObject("raw")
.field("type", "string")
.field("index", "not_analyzed")
- .field("store", "yes")
+ .field("store", true)
.endObject()
.endObject()
.endObject()
diff --git a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java
index 2ea19b0245..db5781a77e 100644
--- a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java
+++ b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java
@@ -330,7 +330,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
public void testLatLonValuesStored() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true)
- .field("store", "yes").endObject().endObject().endObject().endObject().string();
+ .field("store", true).endObject().endObject().endObject().endObject().string();
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
@@ -357,7 +357,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
public void testArrayLatLonValues() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true)
- .field("store", "yes").endObject().endObject().endObject().endObject().string();
+ .field("store", true).endObject().endObject().endObject().endObject().string();
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
@@ -416,7 +416,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
public void testLatLonInOneValueStored() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
- .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("store", "yes").endObject().endObject()
+ .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("store", true).endObject().endObject()
.endObject().endObject().string();
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
@@ -443,7 +443,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
public void testLatLonInOneValueArray() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true)
- .field("store", "yes").endObject().endObject().endObject().endObject().string();
+ .field("store", true).endObject().endObject().endObject().endObject().string();
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
@@ -528,7 +528,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
public void testLonLatArrayStored() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true)
- .field("store", "yes").endObject().endObject().endObject().endObject().string();
+ .field("store", true).endObject().endObject().endObject().endObject().string();
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
@@ -554,7 +554,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
public void testLonLatArrayArrayStored() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true)
- .field("store", "yes").endObject().endObject().endObject().endObject().string();
+ .field("store", true).endObject().endObject().endObject().endObject().string();
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
diff --git a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldTypeTests.java
index 19eb536e32..6934d06a50 100644
--- a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldTypeTests.java
+++ b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldTypeTests.java
@@ -32,13 +32,13 @@ public class GeoPointFieldTypeTests extends FieldTypeTestCase {
@Before
public void setupProperties() {
- addModifier(new Modifier("geohash", false, true) {
+ addModifier(new Modifier("geohash", false) {
@Override
public void modify(MappedFieldType ft) {
((BaseGeoPointFieldMapper.GeoPointFieldType)ft).setGeoHashEnabled(new StringFieldMapper.StringFieldType(), 1, true);
}
});
- addModifier(new Modifier("lat_lon", false, true) {
+ addModifier(new Modifier("lat_lon", false) {
@Override
public void modify(MappedFieldType ft) {
((BaseGeoPointFieldMapper.GeoPointFieldType)ft).setLatLonEnabled(new DoubleFieldMapper.DoubleFieldType(), new DoubleFieldMapper.DoubleFieldType());
diff --git a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldTypeTests.java
index 7ce99aa737..3407661b52 100644
--- a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldTypeTests.java
+++ b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldTypeTests.java
@@ -31,37 +31,37 @@ public class GeoShapeFieldTypeTests extends FieldTypeTestCase {
@Before
public void setupProperties() {
- addModifier(new Modifier("tree", false, true) {
+ addModifier(new Modifier("tree", false) {
@Override
public void modify(MappedFieldType ft) {
((GeoShapeFieldMapper.GeoShapeFieldType)ft).setTree("quadtree");
}
});
- addModifier(new Modifier("strategy", false, true) {
+ addModifier(new Modifier("strategy", false) {
@Override
public void modify(MappedFieldType ft) {
((GeoShapeFieldMapper.GeoShapeFieldType)ft).setStrategyName("term");
}
});
- addModifier(new Modifier("tree_levels", false, true) {
+ addModifier(new Modifier("tree_levels", false) {
@Override
public void modify(MappedFieldType ft) {
((GeoShapeFieldMapper.GeoShapeFieldType)ft).setTreeLevels(10);
}
});
- addModifier(new Modifier("precision", false, true) {
+ addModifier(new Modifier("precision", false) {
@Override
public void modify(MappedFieldType ft) {
((GeoShapeFieldMapper.GeoShapeFieldType)ft).setPrecisionInMeters(20);
}
});
- addModifier(new Modifier("distance_error_pct", true, true) {
+ addModifier(new Modifier("distance_error_pct", true) {
@Override
public void modify(MappedFieldType ft) {
((GeoShapeFieldMapper.GeoShapeFieldType)ft).setDefaultDistanceErrorPct(0.5);
}
});
- addModifier(new Modifier("orientation", true, true) {
+ addModifier(new Modifier("orientation", true) {
@Override
public void modify(MappedFieldType ft) {
((GeoShapeFieldMapper.GeoShapeFieldType)ft).setOrientation(ShapeBuilder.Orientation.LEFT);
diff --git a/core/src/test/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldTypeTests.java
index 83aa779a61..fd0c344c26 100644
--- a/core/src/test/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldTypeTests.java
+++ b/core/src/test/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldTypeTests.java
@@ -30,7 +30,7 @@ public class FieldNamesFieldTypeTests extends FieldTypeTestCase {
@Before
public void setupProperties() {
- addModifier(new Modifier("enabled", true, true) {
+ addModifier(new Modifier("enabled", true) {
@Override
public void modify(MappedFieldType ft) {
FieldNamesFieldMapper.FieldNamesFieldType fnft = (FieldNamesFieldMapper.FieldNamesFieldType)ft;
diff --git a/core/src/test/java/org/elasticsearch/index/mapper/internal/ParentFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/internal/ParentFieldMapperTests.java
index 0d52b66dfb..7083d9fa6b 100644
--- a/core/src/test/java/org/elasticsearch/index/mapper/internal/ParentFieldMapperTests.java
+++ b/core/src/test/java/org/elasticsearch/index/mapper/internal/ParentFieldMapperTests.java
@@ -47,10 +47,10 @@ public class ParentFieldMapperTests extends ESTestCase {
assertThat(parentFieldMapper.getParentJoinFieldType().hasDocValues(), is(true));
assertThat(parentFieldMapper.getParentJoinFieldType().docValuesType(), equalTo(DocValuesType.SORTED));
- assertThat(parentFieldMapper.getChildJoinFieldType().name(), equalTo("_parent#parent"));
- assertThat(parentFieldMapper.getChildJoinFieldType().fieldDataType().getLoading(), equalTo(Loading.LAZY));
- assertThat(parentFieldMapper.getChildJoinFieldType().hasDocValues(), is(true));
- assertThat(parentFieldMapper.getChildJoinFieldType().docValuesType(), equalTo(DocValuesType.SORTED));
+ assertThat(parentFieldMapper.fieldType().name(), equalTo("_parent#parent"));
+ assertThat(parentFieldMapper.fieldType().fieldDataType().getLoading(), equalTo(Loading.LAZY));
+ assertThat(parentFieldMapper.fieldType().hasDocValues(), is(true));
+ assertThat(parentFieldMapper.fieldType().docValuesType(), equalTo(DocValuesType.SORTED));
}
public void testPost2Dot0EagerLoading() {
@@ -65,10 +65,10 @@ public class ParentFieldMapperTests extends ESTestCase {
assertThat(parentFieldMapper.getParentJoinFieldType().hasDocValues(), is(true));
assertThat(parentFieldMapper.getParentJoinFieldType().docValuesType(), equalTo(DocValuesType.SORTED));
- assertThat(parentFieldMapper.getChildJoinFieldType().name(), equalTo("_parent#parent"));
- assertThat(parentFieldMapper.getChildJoinFieldType().fieldDataType().getLoading(), equalTo(Loading.EAGER));
- assertThat(parentFieldMapper.getChildJoinFieldType().hasDocValues(), is(true));
- assertThat(parentFieldMapper.getChildJoinFieldType().docValuesType(), equalTo(DocValuesType.SORTED));
+ assertThat(parentFieldMapper.fieldType().name(), equalTo("_parent#parent"));
+ assertThat(parentFieldMapper.fieldType().fieldDataType().getLoading(), equalTo(Loading.EAGER));
+ assertThat(parentFieldMapper.fieldType().hasDocValues(), is(true));
+ assertThat(parentFieldMapper.fieldType().docValuesType(), equalTo(DocValuesType.SORTED));
}
public void testPost2Dot0EagerGlobalOrdinalsLoading() {
@@ -83,10 +83,10 @@ public class ParentFieldMapperTests extends ESTestCase {
assertThat(parentFieldMapper.getParentJoinFieldType().hasDocValues(), is(true));
assertThat(parentFieldMapper.getParentJoinFieldType().docValuesType(), equalTo(DocValuesType.SORTED));
- assertThat(parentFieldMapper.getChildJoinFieldType().name(), equalTo("_parent#parent"));
- assertThat(parentFieldMapper.getChildJoinFieldType().fieldDataType().getLoading(), equalTo(Loading.EAGER_GLOBAL_ORDINALS));
- assertThat(parentFieldMapper.getChildJoinFieldType().hasDocValues(), is(true));
- assertThat(parentFieldMapper.getChildJoinFieldType().docValuesType(), equalTo(DocValuesType.SORTED));
+ assertThat(parentFieldMapper.fieldType().name(), equalTo("_parent#parent"));
+ assertThat(parentFieldMapper.fieldType().fieldDataType().getLoading(), equalTo(Loading.EAGER_GLOBAL_ORDINALS));
+ assertThat(parentFieldMapper.fieldType().hasDocValues(), is(true));
+ assertThat(parentFieldMapper.fieldType().docValuesType(), equalTo(DocValuesType.SORTED));
}
private static Settings post2Dot0IndexSettings() {
diff --git a/core/src/test/java/org/elasticsearch/index/mapper/lucene/StoredNumericValuesTests.java b/core/src/test/java/org/elasticsearch/index/mapper/lucene/StoredNumericValuesTests.java
index 89e6630ffa..0cd6fa0e1c 100644
--- a/core/src/test/java/org/elasticsearch/index/mapper/lucene/StoredNumericValuesTests.java
+++ b/core/src/test/java/org/elasticsearch/index/mapper/lucene/StoredNumericValuesTests.java
@@ -54,9 +54,9 @@ public class StoredNumericValuesTests extends ESSingleNodeTestCase {
.startObject()
.startObject("type")
.startObject("properties")
- .startObject("field1").field("type", "integer").field("store", "yes").endObject()
- .startObject("field2").field("type", "float").field("store", "yes").endObject()
- .startObject("field3").field("type", "long").field("store", "yes").endObject()
+ .startObject("field1").field("type", "integer").field("store", true).endObject()
+ .startObject("field2").field("type", "float").field("store", true).endObject()
+ .startObject("field3").field("type", "long").field("store", true).endObject()
.endObject()
.endObject()
.endObject()
@@ -80,9 +80,9 @@ public class StoredNumericValuesTests extends ESSingleNodeTestCase {
Document doc2 = new Document();
doc2.add(new StoredField("field1", new BytesRef(Numbers.intToBytes(1))));
doc2.add(new StoredField("field2", new BytesRef(Numbers.floatToBytes(1.1f))));
- doc2.add(new StoredField("field3", new BytesRef(Numbers.longToBytes(1l))));
- doc2.add(new StoredField("field3", new BytesRef(Numbers.longToBytes(2l))));
- doc2.add(new StoredField("field3", new BytesRef(Numbers.longToBytes(3l))));
+ doc2.add(new StoredField("field3", new BytesRef(Numbers.longToBytes(1L))));
+ doc2.add(new StoredField("field3", new BytesRef(Numbers.longToBytes(2L))));
+ doc2.add(new StoredField("field3", new BytesRef(Numbers.longToBytes(3L))));
writer.addDocument(doc2);
DirectoryReader reader = DirectoryReader.open(writer, true);
@@ -98,9 +98,9 @@ public class StoredNumericValuesTests extends ESSingleNodeTestCase {
assertThat(fieldsVisitor.fields().get("field2").size(), equalTo(1));
assertThat((Float) fieldsVisitor.fields().get("field2").get(0), equalTo(1.1f));
assertThat(fieldsVisitor.fields().get("field3").size(), equalTo(3));
- assertThat((Long) fieldsVisitor.fields().get("field3").get(0), equalTo(1l));
- assertThat((Long) fieldsVisitor.fields().get("field3").get(1), equalTo(2l));
- assertThat((Long) fieldsVisitor.fields().get("field3").get(2), equalTo(3l));
+ assertThat((Long) fieldsVisitor.fields().get("field3").get(0), equalTo(1L));
+ assertThat((Long) fieldsVisitor.fields().get("field3").get(1), equalTo(2L));
+ assertThat((Long) fieldsVisitor.fields().get("field3").get(2), equalTo(3L));
// Make sure the doc gets loaded as if it was stored in the new way
fieldsVisitor.reset();
@@ -112,9 +112,9 @@ public class StoredNumericValuesTests extends ESSingleNodeTestCase {
assertThat(fieldsVisitor.fields().get("field2").size(), equalTo(1));
assertThat((Float) fieldsVisitor.fields().get("field2").get(0), equalTo(1.1f));
assertThat(fieldsVisitor.fields().get("field3").size(), equalTo(3));
- assertThat((Long) fieldsVisitor.fields().get("field3").get(0), equalTo(1l));
- assertThat((Long) fieldsVisitor.fields().get("field3").get(1), equalTo(2l));
- assertThat((Long) fieldsVisitor.fields().get("field3").get(2), equalTo(3l));
+ assertThat((Long) fieldsVisitor.fields().get("field3").get(0), equalTo(1L));
+ assertThat((Long) fieldsVisitor.fields().get("field3").get(1), equalTo(2L));
+ assertThat((Long) fieldsVisitor.fields().get("field3").get(2), equalTo(3L));
reader.close();
writer.close();
diff --git a/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldsIntegrationIT.java b/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldsIntegrationIT.java
index e4892583cf..347e4dd920 100644
--- a/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldsIntegrationIT.java
+++ b/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldsIntegrationIT.java
@@ -67,11 +67,11 @@ public class MultiFieldsIntegrationIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch("my-index")
.setQuery(matchQuery("title", "multi"))
.get();
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
searchResponse = client().prepareSearch("my-index")
.setQuery(matchQuery("title.not_analyzed", "Multi fields"))
.get();
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
assertAcked(
client().admin().indices().preparePutMapping("my-index").setType("my-type")
@@ -98,7 +98,7 @@ public class MultiFieldsIntegrationIT extends ESIntegTestCase {
searchResponse = client().prepareSearch("my-index")
.setQuery(matchQuery("title.uncased", "Multi"))
.get();
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
}
public void testGeoPointMultiField() throws Exception {
@@ -127,9 +127,9 @@ public class MultiFieldsIntegrationIT extends ESIntegTestCase {
SearchResponse countResponse = client().prepareSearch("my-index").setSize(0)
.setQuery(constantScoreQuery(geoDistanceQuery("a").point(51, 19).distance(50, DistanceUnit.KILOMETERS)))
.get();
- assertThat(countResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(countResponse.getHits().totalHits(), equalTo(1L));
countResponse = client().prepareSearch("my-index").setSize(0).setQuery(matchQuery("a.b", point.toString())).get();
- assertThat(countResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(countResponse.getHits().totalHits(), equalTo(1L));
}
public void testTokenCountMultiField() throws Exception {
@@ -167,7 +167,7 @@ public class MultiFieldsIntegrationIT extends ESIntegTestCase {
client().prepareIndex("my-index", "my-type", "1").setSource("a", "my tokens").setRefresh(true).get();
SearchResponse countResponse = client().prepareSearch("my-index").setSize(0).setQuery(matchQuery("a.b", "my tokens")).get();
- assertThat(countResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(countResponse.getHits().totalHits(), equalTo(1L));
}
public void testCompletionMultiField() throws Exception {
@@ -192,7 +192,7 @@ public class MultiFieldsIntegrationIT extends ESIntegTestCase {
client().prepareIndex("my-index", "my-type", "1").setSource("a", "complete me").setRefresh(true).get();
SearchResponse countResponse = client().prepareSearch("my-index").setSize(0).setQuery(matchQuery("a.b", "complete me")).get();
- assertThat(countResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(countResponse.getHits().totalHits(), equalTo(1L));
}
public void testIpMultiField() throws Exception {
@@ -217,7 +217,7 @@ public class MultiFieldsIntegrationIT extends ESIntegTestCase {
client().prepareIndex("my-index", "my-type", "1").setSource("a", "127.0.0.1").setRefresh(true).get();
SearchResponse countResponse = client().prepareSearch("my-index").setSize(0).setQuery(matchQuery("a.b", "127.0.0.1")).get();
- assertThat(countResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(countResponse.getHits().totalHits(), equalTo(1L));
}
private XContentBuilder createMappingSource(String fieldType) throws IOException {
diff --git a/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java b/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java
index e68817e9ea..e0c55a85e0 100644
--- a/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java
+++ b/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java
@@ -23,6 +23,7 @@ import org.apache.lucene.analysis.NumericTokenStream;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.DocValuesType;
+import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
@@ -281,23 +282,98 @@ public class SimpleNumericTests extends ESSingleNodeTestCase {
public void testDocValues() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties")
+ .startObject("int1")
+ .field("type", "integer")
+ .endObject()
+ .startObject("int2")
+ .field("type", "integer")
+ .field("index", false)
+ .endObject()
+ .startObject("double1")
+ .field("type", "double")
+ .endObject()
+ .startObject("double2")
+ .field("type", "integer")
+ .field("index", false)
+ .endObject()
+ .endObject()
+ .endObject().endObject().string();
+
+ DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
+
+ ParsedDocument parsedDoc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
+ .startObject()
+ .field("int1", "1234")
+ .field("double1", "1234")
+ .field("int2", "1234")
+ .field("double2", "1234")
+ .endObject()
+ .bytes());
+ Document doc = parsedDoc.rootDoc();
+ assertEquals(DocValuesType.SORTED_NUMERIC, SimpleStringMappingTests.docValuesType(doc, "int1"));
+ assertEquals(DocValuesType.SORTED_NUMERIC, SimpleStringMappingTests.docValuesType(doc, "double1"));
+ assertEquals(DocValuesType.SORTED_NUMERIC, SimpleStringMappingTests.docValuesType(doc, "int2"));
+ assertEquals(DocValuesType.SORTED_NUMERIC, SimpleStringMappingTests.docValuesType(doc, "double2"));
+
+ }
+
+ public void testBwCompatDocValues() throws Exception {
+ String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
+ .startObject("properties")
+ .startObject("int1")
+ .field("type", "integer")
+ .endObject()
+ .startObject("int2")
+ .field("type", "integer")
+ .field("index", "no")
+ .endObject()
+ .startObject("double1")
+ .field("type", "double")
+ .endObject()
+ .startObject("double2")
+ .field("type", "integer")
+ .field("index", "no")
+ .endObject()
+ .endObject()
+ .endObject().endObject().string();
+
+ Settings oldIndexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_2_0).build();
+ DocumentMapper defaultMapper = createIndex("test", oldIndexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
+
+ ParsedDocument parsedDoc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
+ .startObject()
+ .field("int1", "1234")
+ .field("double1", "1234")
+ .field("int2", "1234")
+ .field("double2", "1234")
+ .endObject()
+ .bytes());
+ Document doc = parsedDoc.rootDoc();
+ assertEquals(DocValuesType.SORTED_NUMERIC, SimpleStringMappingTests.docValuesType(doc, "int1"));
+ assertEquals(DocValuesType.SORTED_NUMERIC, SimpleStringMappingTests.docValuesType(doc, "double1"));
+ assertEquals(DocValuesType.NONE, SimpleStringMappingTests.docValuesType(doc, "int2"));
+ assertEquals(DocValuesType.NONE, SimpleStringMappingTests.docValuesType(doc, "double2"));
+ }
+
+ public void testUnIndex() throws IOException {
+ String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
+ .startObject("properties")
.startObject("int")
.field("type", "integer")
- .startObject("fielddata")
- .field("format", "doc_values")
- .endObject()
+ .field("index", false)
.endObject()
.startObject("double")
.field("type", "double")
- .startObject("fielddata")
- .field("format", "doc_values")
- .endObject()
+ .field("index", false)
.endObject()
.endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
+ assertEquals("{\"type\":{\"properties\":{\"double\":{\"type\":\"double\",\"index\":false},\"int\":{\"type\":\"integer\",\"index\":false}}}}",
+ defaultMapper.mapping().toString());
+
ParsedDocument parsedDoc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("int", "1234")
@@ -305,8 +381,32 @@ public class SimpleNumericTests extends ESSingleNodeTestCase {
.endObject()
.bytes());
final Document doc = parsedDoc.rootDoc();
- assertEquals(DocValuesType.SORTED_NUMERIC, SimpleStringMappingTests.docValuesType(doc, "int"));
- assertEquals(DocValuesType.SORTED_NUMERIC, SimpleStringMappingTests.docValuesType(doc, "double"));
+ for (IndexableField field : doc.getFields("int")) {
+ assertEquals(IndexOptions.NONE, field.fieldType().indexOptions());
+ }
+ for (IndexableField field : doc.getFields("double")) {
+ assertEquals(IndexOptions.NONE, field.fieldType().indexOptions());
+ }
+ }
+
+ public void testBwCompatIndex() throws IOException {
+ String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
+ .startObject("properties")
+ .startObject("int")
+ .field("type", "integer")
+ .field("index", "no")
+ .endObject()
+ .startObject("double")
+ .field("type", "double")
+ .field("index", "not_analyzed")
+ .endObject()
+ .endObject()
+ .endObject().endObject().string();
+
+ Settings oldSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_2_0).build();
+ DocumentMapper defaultMapper = createIndex("test", oldSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
+ assertEquals("{\"type\":{\"properties\":{\"double\":{\"type\":\"double\"},\"int\":{\"type\":\"integer\",\"index\":false}}}}",
+ defaultMapper.mapping().toString());
}
public void testDocValuesOnNested() throws Exception {
diff --git a/core/src/test/java/org/elasticsearch/index/mapper/parent/ParentMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/parent/ParentMappingTests.java
index f6bbde47e9..41dd8d957b 100644
--- a/core/src/test/java/org/elasticsearch/index/mapper/parent/ParentMappingTests.java
+++ b/core/src/test/java/org/elasticsearch/index/mapper/parent/ParentMappingTests.java
@@ -55,6 +55,6 @@ public class ParentMappingTests extends ESSingleNodeTestCase {
.endObject()
.bytes()).type("type").id("1").parent("1122"));
- assertEquals(Uid.createUid("p_type", "1122"), doc.rootDoc().get("_parent"));
+ assertEquals("1122", doc.rootDoc().getBinaryValue("_parent#p_type").utf8ToString());
}
}
diff --git a/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java
index d32dcad543..6114185ccf 100644
--- a/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java
+++ b/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java
@@ -33,7 +33,6 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.IndexService;
-import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser;
@@ -45,11 +44,14 @@ import org.elasticsearch.index.mapper.ParseContext.Document;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.core.StringFieldMapper;
import org.elasticsearch.index.mapper.core.StringFieldMapper.Builder;
+import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase;
+import org.elasticsearch.test.InternalSettingsPlugin;
import org.elasticsearch.test.VersionUtils;
import org.junit.Before;
import java.util.Arrays;
+import java.util.Collection;
import java.util.Map;
import static java.util.Collections.emptyMap;
@@ -61,7 +63,11 @@ import static org.hamcrest.Matchers.nullValue;
/**
*/
public class SimpleStringMappingTests extends ESSingleNodeTestCase {
- private static Settings DOC_VALUES_SETTINGS = Settings.builder().put(FieldDataType.FORMAT_KEY, FieldDataType.DOC_VALUES_FORMAT_VALUE).build();
+
+ @Override
+ protected Collection<Class<? extends Plugin>> getPlugins() {
+ return pluginList(InternalSettingsPlugin.class);
+ }
IndexService indexService;
DocumentMapperParser parser;
@@ -363,47 +369,71 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase {
assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectorPayloads(), equalTo(true));
}
- public void testDocValuesFielddata() throws Exception {
- IndexService indexService = createIndex("index");
- DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
+ public void testDocValues() throws Exception {
+ // doc values only work on non-analyzed content
final BuilderContext ctx = new BuilderContext(indexService.getIndexSettings().getSettings(), new ContentPath(1));
+ try {
+ new StringFieldMapper.Builder("anything").docValues(true).build(ctx);
+ fail();
+ } catch (Exception e) { /* OK */ }
assertFalse(new Builder("anything").index(false).build(ctx).fieldType().hasDocValues());
- assertTrue(new Builder("anything").index(false).fieldDataSettings(DOC_VALUES_SETTINGS).build(ctx).fieldType().hasDocValues());
+ assertTrue(new Builder("anything").index(true).tokenized(false).build(ctx).fieldType().hasDocValues());
+ assertFalse(new Builder("anything").index(true).tokenized(true).build(ctx).fieldType().hasDocValues());
+ assertFalse(new Builder("anything").index(false).tokenized(false).docValues(false).build(ctx).fieldType().hasDocValues());
assertTrue(new Builder("anything").index(false).docValues(true).build(ctx).fieldType().hasDocValues());
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
- .startObject("properties")
- .startObject("str1")
- .field("type", "string")
- .startObject("fielddata")
- .field("format", "paged_bytes")
+ .startObject("properties")
+ .startObject("str1")
+ .field("type", "string")
+ .field("index", "no")
.endObject()
- .endObject()
- .startObject("str2")
- .field("type", "string")
- .field("index", "not_analyzed")
- .startObject("fielddata")
- .field("format", "doc_values")
+ .startObject("str2")
+ .field("type", "string")
+ .field("index", "not_analyzed")
+ .endObject()
+ .startObject("str3")
+ .field("type", "string")
+ .field("index", "analyzed")
.endObject()
- .endObject()
- .endObject()
- .endObject().endObject().string();
+ .startObject("str4")
+ .field("type", "string")
+ .field("index", "not_analyzed")
+ .field("doc_values", false)
+ .endObject()
+ .startObject("str5")
+ .field("type", "string")
+ .field("index", "no")
+ .field("doc_values", false)
+ .endObject()
+ .endObject()
+ .endObject().endObject().string();
DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping));
ParsedDocument parsedDoc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
- .startObject()
- .field("str1", "1234")
- .field("str2", "1234")
- .endObject()
- .bytes());
+ .startObject()
+ .field("str1", "1234")
+ .field("str2", "1234")
+ .field("str3", "1234")
+ .field("str4", "1234")
+ .field("str5", "1234")
+ .endObject()
+ .bytes());
final Document doc = parsedDoc.rootDoc();
assertEquals(DocValuesType.NONE, docValuesType(doc, "str1"));
assertEquals(DocValuesType.SORTED_SET, docValuesType(doc, "str2"));
+ assertEquals(DocValuesType.NONE, docValuesType(doc, "str3"));
+ assertEquals(DocValuesType.NONE, docValuesType(doc, "str4"));
+ assertEquals(DocValuesType.NONE, docValuesType(doc, "str5"));
+
}
- public void testDocValues() throws Exception {
+ public void testBwCompatDocValues() throws Exception {
+ Settings oldIndexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_2_0).build();
+ indexService = createIndex("test_old", oldIndexSettings);
+ parser = indexService.mapperService().documentMapperParser();
// doc values only work on non-analyzed content
final BuilderContext ctx = new BuilderContext(indexService.getIndexSettings().getSettings(), new ContentPath(1));
try {
diff --git a/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingOnClusterIT.java b/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingOnClusterIT.java
index 35034dfd91..8b6aa79406 100644
--- a/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingOnClusterIT.java
+++ b/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingOnClusterIT.java
@@ -167,7 +167,7 @@ public class UpdateMappingOnClusterIT extends ESIntegTestCase {
private void compareMappingOnNodes(GetMappingsResponse previousMapping) {
// make sure all nodes have same cluster state
- for (Client client : cluster()) {
+ for (Client client : cluster().getClients()) {
GetMappingsResponse currentMapping = client.admin().indices().prepareGetMappings(INDEX).addTypes(TYPE).setLocal(true).get();
assertThat(previousMapping.getMappings().get(INDEX).get(TYPE).source(), equalTo(currentMapping.getMappings().get(INDEX).get(TYPE).source()));
}
diff --git a/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java b/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java
index 52aa7ea48f..9f2b33b96b 100644
--- a/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java
+++ b/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java
@@ -22,7 +22,6 @@ package org.elasticsearch.index.query;
import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.core.io.JsonStringEncoder;
-
import org.apache.lucene.search.BoostQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
@@ -88,7 +87,9 @@ import org.elasticsearch.script.MockScriptEngine;
import org.elasticsearch.script.Script.ScriptParseException;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptContextRegistry;
+import org.elasticsearch.script.ScriptEngineRegistry;
import org.elasticsearch.script.ScriptEngineService;
+import org.elasticsearch.script.ScriptSettings;
import org.elasticsearch.script.ScriptModule;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.SearchModule;
@@ -186,16 +187,16 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
Version version = randomBoolean() ? Version.CURRENT : VersionUtils.randomVersionBetween(random(), Version.V_2_0_0_beta1, Version.CURRENT);
Settings settings = Settings.settingsBuilder()
.put("name", AbstractQueryTestCase.class.toString())
- .put("path.home", createTempDir())
- .put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING, false)
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
+ .put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING.getKey(), false)
.build();
Settings indexSettings = Settings.settingsBuilder()
.put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
- index = new Index(randomAsciiOfLengthBetween(1, 10));
+ index = new Index(randomAsciiOfLengthBetween(1, 10), "_na_");
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(index, indexSettings);
final TestClusterService clusterService = new TestClusterService();
clusterService.setState(new ClusterState.Builder(clusterService.state()).metaData(new MetaData.Builder().put(
- new IndexMetaData.Builder(index.name()).settings(indexSettings).numberOfShards(1).numberOfReplicas(0))));
+ new IndexMetaData.Builder(index.getName()).settings(indexSettings).numberOfShards(1).numberOfReplicas(0))));
SettingsModule settingsModule = new SettingsModule(settings, new SettingsFilter(settings));
settingsModule.registerSetting(InternalSettingsPlugin.VERSION_CREATED);
final Client proxy = (Client) Proxy.newProxyInstance(
@@ -214,13 +215,13 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
bindMapperExtension();
}
},
- new ScriptModule(settings) {
+ new ScriptModule(settingsModule) {
@Override
protected void configure() {
Settings settings = Settings.builder()
- .put("path.home", createTempDir())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
// no file watching, so we don't need a ResourceWatcherService
- .put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING, false)
+ .put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING.getKey(), false)
.build();
MockScriptEngine mockScriptEngine = new MockScriptEngine();
Multibinder<ScriptEngineService> multibinder = Multibinder.newSetBinder(binder(), ScriptEngineService.class);
@@ -228,9 +229,14 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
Set<ScriptEngineService> engines = new HashSet<>();
engines.add(mockScriptEngine);
List<ScriptContext.Plugin> customContexts = new ArrayList<>();
- bind(ScriptContextRegistry.class).toInstance(new ScriptContextRegistry(customContexts));
+ ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Collections.singletonList(new ScriptEngineRegistry.ScriptEngineRegistration(MockScriptEngine.class, MockScriptEngine.TYPES)));
+ bind(ScriptEngineRegistry.class).toInstance(scriptEngineRegistry);
+ ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(customContexts);
+ bind(ScriptContextRegistry.class).toInstance(scriptContextRegistry);
+ ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry);
+ bind(ScriptSettings.class).toInstance(scriptSettings);
try {
- ScriptService scriptService = new ScriptService(settings, new Environment(settings), engines, null, new ScriptContextRegistry(customContexts));
+ ScriptService scriptService = new ScriptService(settings, new Environment(settings), engines, null, scriptEngineRegistry, scriptContextRegistry, scriptSettings);
bind(ScriptService.class).toInstance(scriptService);
} catch(IOException e) {
throw new IllegalStateException("error while binding ScriptService", e);
diff --git a/core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java
index b15513222a..30dbcdf4b6 100644
--- a/core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java
+++ b/core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java
@@ -280,7 +280,7 @@ public class BoolQueryBuilderTests extends AbstractQueryTestCase<BoolQueryBuilde
.minimumNumberShouldMatch("3")
.disableCoord(true)
.buildAsBytes()).toQuery(createShardContext());
- assertEquals(0, bq.getMinimumNumberShouldMatch());
+ assertEquals(3, bq.getMinimumNumberShouldMatch());
}
public void testFromJson() throws IOException {
diff --git a/core/src/test/java/org/elasticsearch/index/query/FuzzyQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/FuzzyQueryBuilderTests.java
index c9f5a268bd..7ccad1ffd2 100644
--- a/core/src/test/java/org/elasticsearch/index/query/FuzzyQueryBuilderTests.java
+++ b/core/src/test/java/org/elasticsearch/index/query/FuzzyQueryBuilderTests.java
@@ -141,8 +141,8 @@ public class FuzzyQueryBuilderTests extends AbstractQueryTestCase<FuzzyQueryBuil
Query parsedQuery = parseQuery(query).toQuery(createShardContext());
assertThat(parsedQuery, instanceOf(NumericRangeQuery.class));
NumericRangeQuery fuzzyQuery = (NumericRangeQuery) parsedQuery;
- assertThat(fuzzyQuery.getMin().longValue(), equalTo(7l));
- assertThat(fuzzyQuery.getMax().longValue(), equalTo(17l));
+ assertThat(fuzzyQuery.getMin().longValue(), equalTo(7L));
+ assertThat(fuzzyQuery.getMax().longValue(), equalTo(17L));
}
public void testFromJson() throws IOException {
diff --git a/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java
index 15d37150b3..8a2792eaf4 100644
--- a/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java
+++ b/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java
@@ -85,18 +85,6 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase<HasChildQue
final MapperService mapperService = queryShardContext().getMapperService();
final IndexFieldDataService fieldData = indexFieldDataService();
TestSearchContext testSearchContext = new TestSearchContext() {
- private InnerHitsContext context;
-
-
- @Override
- public void innerHits(InnerHitsContext innerHitsContext) {
- context = innerHitsContext;
- }
-
- @Override
- public InnerHitsContext innerHits() {
- return context;
- }
@Override
public MapperService mapperService() {
@@ -149,7 +137,7 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase<HasChildQue
assertEquals(innerHits.sort().getSort().length, 1);
assertEquals(innerHits.sort().getSort()[0].getField(), STRING_FIELD_NAME);
} else {
- assertNull(SearchContext.current().innerHits());
+ assertThat(SearchContext.current().innerHits().getInnerHits().size(), equalTo(0));
}
}
}
diff --git a/core/src/test/java/org/elasticsearch/index/query/HasParentQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/HasParentQueryBuilderTests.java
index 9c38df3e64..887564123b 100644
--- a/core/src/test/java/org/elasticsearch/index/query/HasParentQueryBuilderTests.java
+++ b/core/src/test/java/org/elasticsearch/index/query/HasParentQueryBuilderTests.java
@@ -80,18 +80,6 @@ public class HasParentQueryBuilderTests extends AbstractQueryTestCase<HasParentQ
final MapperService mapperService = queryShardContext().getMapperService();
final IndexFieldDataService fieldData = indexFieldDataService();
TestSearchContext testSearchContext = new TestSearchContext() {
- private InnerHitsContext context;
-
-
- @Override
- public void innerHits(InnerHitsContext innerHitsContext) {
- context = innerHitsContext;
- }
-
- @Override
- public InnerHitsContext innerHits() {
- return context;
- }
@Override
public MapperService mapperService() {
@@ -139,7 +127,7 @@ public class HasParentQueryBuilderTests extends AbstractQueryTestCase<HasParentQ
assertEquals(innerHits.sort().getSort().length, 1);
assertEquals(innerHits.sort().getSort()[0].getField(), STRING_FIELD_NAME);
} else {
- assertNull(SearchContext.current().innerHits());
+ assertThat(SearchContext.current().innerHits().getInnerHits().size(), equalTo(0));
}
}
}
diff --git a/core/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java
index 9de7a87d81..9f202a3f16 100644
--- a/core/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java
+++ b/core/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java
@@ -27,16 +27,17 @@ import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.MapperService;
-import org.elasticsearch.index.query.support.QueryInnerHits;
-import org.elasticsearch.search.fetch.innerhits.InnerHitsBuilder;
-import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.TestSearchContext;
+import org.elasticsearch.index.query.support.QueryInnerHits;
+import org.elasticsearch.search.fetch.innerhits.InnerHitsBuilder;
+import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
import java.io.IOException;
import static org.hamcrest.CoreMatchers.instanceOf;
+import static org.hamcrest.CoreMatchers.equalTo;
public class NestedQueryBuilderTests extends AbstractQueryTestCase<NestedQueryBuilder> {
@@ -60,18 +61,6 @@ public class NestedQueryBuilderTests extends AbstractQueryTestCase<NestedQueryBu
final MapperService mapperService = queryShardContext().getMapperService();
final IndexFieldDataService fieldData = indexFieldDataService();
TestSearchContext testSearchContext = new TestSearchContext() {
- private InnerHitsContext context;
-
-
- @Override
- public void innerHits(InnerHitsContext innerHitsContext) {
- context = innerHitsContext;
- }
-
- @Override
- public InnerHitsContext innerHits() {
- return context;
- }
@Override
public MapperService mapperService() {
@@ -119,7 +108,7 @@ public class NestedQueryBuilderTests extends AbstractQueryTestCase<NestedQueryBu
assertEquals(innerHits.sort().getSort().length, 1);
assertEquals(innerHits.sort().getSort()[0].getField(), STRING_FIELD_NAME);
} else {
- assertNull(SearchContext.current().innerHits());
+ assertThat(SearchContext.current().innerHits().getInnerHits().size(), equalTo(0));
}
}
}
diff --git a/core/src/test/java/org/elasticsearch/index/query/ParentIdQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/ParentIdQueryBuilderTests.java
new file mode 100644
index 0000000000..fdb66c7916
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/index/query/ParentIdQueryBuilderTests.java
@@ -0,0 +1,115 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.index.query;
+
+import org.apache.lucene.search.DocValuesTermsQuery;
+import org.apache.lucene.search.Query;
+import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
+import org.elasticsearch.common.compress.CompressedXContent;
+import org.elasticsearch.index.fielddata.IndexFieldDataService;
+import org.elasticsearch.index.mapper.MapperService;
+import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
+import org.elasticsearch.search.internal.SearchContext;
+import org.elasticsearch.test.TestSearchContext;
+import org.hamcrest.Matchers;
+
+import java.io.IOException;
+
+public class ParentIdQueryBuilderTests extends AbstractQueryTestCase<ParentIdQueryBuilder> {
+
+ protected static final String PARENT_TYPE = "parent";
+ protected static final String CHILD_TYPE = "child";
+
+ @Override
+ public void setUp() throws Exception {
+ super.setUp();
+ MapperService mapperService = queryShardContext().getMapperService();
+ mapperService.merge(PARENT_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(PARENT_TYPE,
+ STRING_FIELD_NAME, "type=string",
+ INT_FIELD_NAME, "type=integer",
+ DOUBLE_FIELD_NAME, "type=double",
+ BOOLEAN_FIELD_NAME, "type=boolean",
+ DATE_FIELD_NAME, "type=date",
+ OBJECT_FIELD_NAME, "type=object"
+ ).string()), MapperService.MergeReason.MAPPING_UPDATE, false);
+ mapperService.merge(CHILD_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(CHILD_TYPE,
+ "_parent", "type=" + PARENT_TYPE,
+ STRING_FIELD_NAME, "type=string",
+ INT_FIELD_NAME, "type=integer",
+ DOUBLE_FIELD_NAME, "type=double",
+ BOOLEAN_FIELD_NAME, "type=boolean",
+ DATE_FIELD_NAME, "type=date",
+ OBJECT_FIELD_NAME, "type=object"
+ ).string()), MapperService.MergeReason.MAPPING_UPDATE, false);
+ }
+
+ @Override
+ protected void setSearchContext(String[] types) {
+ final MapperService mapperService = queryShardContext().getMapperService();
+ final IndexFieldDataService fieldData = indexFieldDataService();
+ TestSearchContext testSearchContext = new TestSearchContext() {
+
+ @Override
+ public MapperService mapperService() {
+ return mapperService; // need to build / parse inner hits sort fields
+ }
+
+ @Override
+ public IndexFieldDataService fieldData() {
+ return fieldData; // need to build / parse inner hits sort fields
+ }
+ };
+ testSearchContext.setTypes(types);
+ SearchContext.setCurrent(testSearchContext);
+ }
+
+ @Override
+ protected ParentIdQueryBuilder doCreateTestQueryBuilder() {
+ return new ParentIdQueryBuilder(CHILD_TYPE, randomAsciiOfLength(4));
+ }
+
+ @Override
+ protected void doAssertLuceneQuery(ParentIdQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
+ assertThat(query, Matchers.instanceOf(DocValuesTermsQuery.class));
+ DocValuesTermsQuery termsQuery = (DocValuesTermsQuery) query;
+ // there are no getters to get the field and terms on DocValuesTermsQuery, so lets validate by creating a
+ // new query based on the builder:
+ assertThat(termsQuery, Matchers.equalTo(new DocValuesTermsQuery("_parent#" + PARENT_TYPE, queryBuilder.getId())));
+ }
+
+ public void testFromJson() throws IOException {
+ String query =
+ "{\n" +
+ " \"parent_id\" : {\n" +
+ " \"type\" : \"child\",\n" +
+ " \"id\" : \"123\",\n" +
+ " \"boost\" : 3.0,\n" +
+ " \"_name\" : \"name\"" +
+ " }\n" +
+ "}";
+ ParentIdQueryBuilder queryBuilder = (ParentIdQueryBuilder) parseQuery(query);
+ checkGeneratedJson(query, queryBuilder);
+ assertThat(queryBuilder.getType(), Matchers.equalTo("child"));
+ assertThat(queryBuilder.getId(), Matchers.equalTo("123"));
+ assertThat(queryBuilder.boost(), Matchers.equalTo(3f));
+ assertThat(queryBuilder.queryName(), Matchers.equalTo("name"));
+ }
+
+}
diff --git a/core/src/test/java/org/elasticsearch/index/query/QueryDSLDocumentationTests.java b/core/src/test/java/org/elasticsearch/index/query/QueryDSLDocumentationTests.java
index 3e89949821..cb9177369c 100644
--- a/core/src/test/java/org/elasticsearch/index/query/QueryDSLDocumentationTests.java
+++ b/core/src/test/java/org/elasticsearch/index/query/QueryDSLDocumentationTests.java
@@ -138,6 +138,7 @@ public class QueryDSLDocumentationTests extends ESTestCase {
functionScoreQuery(functions);
}
+ @SuppressWarnings("deprecation") // fuzzy queries will be removed in 4.0
public void testFuzzy() {
fuzzyQuery("name", "kimchy");
}
diff --git a/core/src/test/java/org/elasticsearch/index/query/QueryShardContextTests.java b/core/src/test/java/org/elasticsearch/index/query/QueryShardContextTests.java
new file mode 100644
index 0000000000..f78700d4a1
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/index/query/QueryShardContextTests.java
@@ -0,0 +1,83 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.index.query;
+
+import org.elasticsearch.Version;
+import org.elasticsearch.cluster.metadata.IndexMetaData;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.index.Index;
+import org.elasticsearch.index.IndexSettings;
+import org.elasticsearch.index.mapper.MappedFieldType;
+import org.elasticsearch.index.mapper.MapperService;
+import org.elasticsearch.index.mapper.core.StringFieldMapper;
+import org.elasticsearch.test.ESTestCase;
+
+import java.util.Collections;
+
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.instanceOf;
+import static org.hamcrest.Matchers.notNullValue;
+import static org.hamcrest.Matchers.nullValue;
+import static org.hamcrest.Matchers.sameInstance;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+public class QueryShardContextTests extends ESTestCase {
+
+ public void testFailIfFieldMappingNotFound() {
+ IndexMetaData.Builder indexMetadata = new IndexMetaData.Builder("index");
+ indexMetadata.settings(Settings.builder().put("index.version.created", Version.CURRENT)
+ .put("index.number_of_shards", 1)
+ .put("index.number_of_replicas", 1)
+ );
+ IndexSettings indexSettings = new IndexSettings(indexMetadata.build(), Settings.EMPTY);
+ MapperService mapperService = mock(MapperService.class);
+ when(mapperService.getIndexSettings()).thenReturn(indexSettings);
+ QueryShardContext context = new QueryShardContext(
+ indexSettings, null, null, null, mapperService, null, null, null
+ );
+
+ context.setAllowUnmappedFields(false);
+ MappedFieldType fieldType = new StringFieldMapper.StringFieldType();
+ MappedFieldType result = context.failIfFieldMappingNotFound("name", fieldType);
+ assertThat(result, sameInstance(fieldType));
+ try {
+ context.failIfFieldMappingNotFound("name", null);
+ fail("exception expected");
+ } catch (QueryShardException e) {
+ assertThat(e.getMessage(), equalTo("No field mapping can be found for the field with name [name]"));
+ }
+
+ context.setAllowUnmappedFields(true);
+ result = context.failIfFieldMappingNotFound("name", fieldType);
+ assertThat(result, sameInstance(fieldType));
+ result = context.failIfFieldMappingNotFound("name", null);
+ assertThat(result, nullValue());
+
+ context.setAllowUnmappedFields(false);
+ context.setMapUnmappedFieldAsString(true);
+ result = context.failIfFieldMappingNotFound("name", fieldType);
+ assertThat(result, sameInstance(fieldType));
+ result = context.failIfFieldMappingNotFound("name", null);
+ assertThat(result, notNullValue());
+ assertThat(result, instanceOf(StringFieldMapper.StringFieldType.class));
+ assertThat(result.name(), equalTo("name"));
+ }
+
+}
diff --git a/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java
index 40062742b3..6a20bc78ab 100644
--- a/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java
+++ b/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java
@@ -302,8 +302,8 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase<QueryStr
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
Query query = queryStringQuery("12~0.2").defaultField(INT_FIELD_NAME).toQuery(createShardContext());
NumericRangeQuery fuzzyQuery = (NumericRangeQuery) query;
- assertThat(fuzzyQuery.getMin().longValue(), equalTo(12l));
- assertThat(fuzzyQuery.getMax().longValue(), equalTo(12l));
+ assertThat(fuzzyQuery.getMin().longValue(), equalTo(12L));
+ assertThat(fuzzyQuery.getMax().longValue(), equalTo(12L));
}
public void testTimezone() throws Exception {
diff --git a/core/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java
index ffaf211b24..f7dc3b2d8e 100644
--- a/core/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java
+++ b/core/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java
@@ -242,7 +242,7 @@ public class TermsQueryBuilderTests extends AbstractQueryTestCase<TermsQueryBuil
TermsQueryBuilder builder = new TermsQueryBuilder("foo", new long[]{1, 3, 4});
TermsQueryBuilder copy = assertSerialization(builder);
List<Object> values = copy.values();
- assertEquals(Arrays.asList(1l, 3l, 4l), values);
+ assertEquals(Arrays.asList(1L, 3L, 4L), values);
}
}
diff --git a/core/src/test/java/org/elasticsearch/index/query/plugin/CustomQueryParserIT.java b/core/src/test/java/org/elasticsearch/index/query/plugin/CustomQueryParserIT.java
index 960a43416d..886be82c36 100644
--- a/core/src/test/java/org/elasticsearch/index/query/plugin/CustomQueryParserIT.java
+++ b/core/src/test/java/org/elasticsearch/index/query/plugin/CustomQueryParserIT.java
@@ -59,11 +59,11 @@ public class CustomQueryParserIT extends ESIntegTestCase {
}
public void testCustomDummyQuery() {
- assertHitCount(client().prepareSearch("index").setQuery(new DummyQueryParserPlugin.DummyQueryBuilder()).get(), 1l);
+ assertHitCount(client().prepareSearch("index").setQuery(new DummyQueryParserPlugin.DummyQueryBuilder()).get(), 1L);
}
public void testCustomDummyQueryWithinBooleanQuery() {
- assertHitCount(client().prepareSearch("index").setQuery(new BoolQueryBuilder().must(new DummyQueryParserPlugin.DummyQueryBuilder())).get(), 1l);
+ assertHitCount(client().prepareSearch("index").setQuery(new BoolQueryBuilder().must(new DummyQueryParserPlugin.DummyQueryBuilder())).get(), 1L);
}
private static QueryShardContext queryShardContext() {
diff --git a/core/src/test/java/org/elasticsearch/index/search/nested/AbstractNumberNestedSortingTestCase.java b/core/src/test/java/org/elasticsearch/index/search/nested/AbstractNumberNestedSortingTestCase.java
index 65dfd8a5af..4ef84d118f 100644
--- a/core/src/test/java/org/elasticsearch/index/search/nested/AbstractNumberNestedSortingTestCase.java
+++ b/core/src/test/java/org/elasticsearch/index/search/nested/AbstractNumberNestedSortingTestCase.java
@@ -38,7 +38,6 @@ import org.apache.lucene.search.join.ScoreMode;
import org.apache.lucene.search.join.ToParentBlockJoinQuery;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
import org.elasticsearch.common.lucene.search.Queries;
-import org.elasticsearch.index.Index;
import org.elasticsearch.index.fielddata.AbstractFieldDataTestCase;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource;
@@ -220,7 +219,7 @@ public abstract class AbstractNumberNestedSortingTestCase extends AbstractFieldD
MultiValueMode sortMode = MultiValueMode.SUM;
DirectoryReader directoryReader = DirectoryReader.open(writer, false);
- directoryReader = ElasticsearchDirectoryReader.wrap(directoryReader, new ShardId(new Index("test"), 0));
+ directoryReader = ElasticsearchDirectoryReader.wrap(directoryReader, new ShardId(indexService.index(), 0));
IndexSearcher searcher = new IndexSearcher(directoryReader);
Query parentFilter = new TermQuery(new Term("__type", "parent"));
Query childFilter = Queries.not(parentFilter);
diff --git a/core/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java b/core/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java
index a58fea831d..ff82b7c43a 100644
--- a/core/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java
+++ b/core/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java
@@ -43,7 +43,6 @@ import org.apache.lucene.util.TestUtil;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.index.Index;
import org.elasticsearch.index.fielddata.AbstractFieldDataTestCase;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.fielddata.IndexFieldData;
@@ -95,7 +94,7 @@ public class NestedSortingTests extends AbstractFieldDataTestCase {
MultiValueMode sortMode = randomFrom(Arrays.asList(MultiValueMode.MIN, MultiValueMode.MAX));
DirectoryReader reader = DirectoryReader.open(writer, false);
- reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId(new Index("test"), 0));
+ reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId(indexService.index(), 0));
IndexSearcher searcher = new IndexSearcher(reader);
PagedBytesIndexFieldData indexFieldData1 = getForField("f");
IndexFieldData<?> indexFieldData2 = NoOrdinalsStringFieldDataTests.hideOrdinals(indexFieldData1);
@@ -280,7 +279,7 @@ public class NestedSortingTests extends AbstractFieldDataTestCase {
MultiValueMode sortMode = MultiValueMode.MIN;
DirectoryReader reader = DirectoryReader.open(writer, false);
- reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId(new Index("test"), 0));
+ reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId(indexService.index(), 0));
IndexSearcher searcher = new IndexSearcher(reader);
PagedBytesIndexFieldData indexFieldData = getForField("field2");
Query parentFilter = new TermQuery(new Term("__type", "parent"));
diff --git a/core/src/test/java/org/elasticsearch/index/shard/IndexSearcherWrapperTests.java b/core/src/test/java/org/elasticsearch/index/shard/IndexSearcherWrapperTests.java
index b7f2dd09f3..cf95f22ae3 100644
--- a/core/src/test/java/org/elasticsearch/index/shard/IndexSearcherWrapperTests.java
+++ b/core/src/test/java/org/elasticsearch/index/shard/IndexSearcherWrapperTests.java
@@ -56,7 +56,7 @@ public class IndexSearcherWrapperTests extends ESTestCase {
doc.add(new StringField("id", "1", random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
doc.add(new TextField("field", "doc", random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
writer.addDocument(doc);
- DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", 1));
+ DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", "_na_", 1));
IndexSearcher searcher = new IndexSearcher(open);
assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits);
final AtomicInteger closeCalls = new AtomicInteger(0);
@@ -106,7 +106,7 @@ public class IndexSearcherWrapperTests extends ESTestCase {
doc.add(new StringField("id", "1", random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
doc.add(new TextField("field", "doc", random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
writer.addDocument(doc);
- DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", 1));
+ DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", "_na_", 1));
IndexSearcher searcher = new IndexSearcher(open);
assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits);
searcher.setSimilarity(iwc.getSimilarity());
@@ -148,7 +148,7 @@ public class IndexSearcherWrapperTests extends ESTestCase {
doc.add(new StringField("id", "1", random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
doc.add(new TextField("field", "doc", random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
writer.addDocument(doc);
- DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", 1));
+ DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", "_na_", 1));
IndexSearcher searcher = new IndexSearcher(open);
assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits);
searcher.setSimilarity(iwc.getSimilarity());
@@ -168,7 +168,7 @@ public class IndexSearcherWrapperTests extends ESTestCase {
doc.add(new StringField("id", "1", random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
doc.add(new TextField("field", "doc", random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
writer.addDocument(doc);
- DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", 1));
+ DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", "_na_", 1));
IndexSearcher searcher = new IndexSearcher(open);
assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits);
searcher.setSimilarity(iwc.getSimilarity());
@@ -258,7 +258,7 @@ public class IndexSearcherWrapperTests extends ESTestCase {
public DirectoryReader getDelegate() {
if (hideDelegate) {
try {
- return ElasticsearchDirectoryReader.wrap(super.getDelegate(), new ShardId("foo", 1));
+ return ElasticsearchDirectoryReader.wrap(super.getDelegate(), new ShardId("foo", "_na_", 1));
} catch (IOException e) {
throw new RuntimeException(e);
}
diff --git a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java
index fd32091d89..9a4e6a814a 100644
--- a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java
+++ b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java
@@ -68,6 +68,7 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.env.Environment;
import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.env.ShardLock;
+import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.NodeServicesProvider;
@@ -137,7 +138,7 @@ public class IndexShardTests extends ESSingleNodeTestCase {
public void testWriteShardState() throws Exception {
try (NodeEnvironment env = newNodeEnvironment()) {
- ShardId id = new ShardId("foo", 1);
+ ShardId id = new ShardId("foo", "_na_", 1);
long version = between(1, Integer.MAX_VALUE / 2);
boolean primary = randomBoolean();
AllocationId allocationId = randomBoolean() ? null : randomAllocationId();
@@ -163,7 +164,7 @@ public class IndexShardTests extends ESSingleNodeTestCase {
createIndex("test");
ensureGreen();
NodeEnvironment env = getInstanceFromNode(NodeEnvironment.class);
- Path[] shardPaths = env.availableShardPaths(new ShardId("test", 0));
+ Path[] shardPaths = env.availableShardPaths(new ShardId("test", "_na_", 0));
logger.info("--> paths: [{}]", (Object)shardPaths);
// Should not be able to acquire the lock because it's already open
try {
@@ -175,7 +176,7 @@ public class IndexShardTests extends ESSingleNodeTestCase {
// Test without the regular shard lock to assume we can acquire it
// (worst case, meaning that the shard lock could be acquired and
// we're green to delete the shard's directory)
- ShardLock sLock = new DummyShardLock(new ShardId("test", 0));
+ ShardLock sLock = new DummyShardLock(new ShardId("test", "_na_", 0));
try {
env.deleteShardDirectoryUnderLock(sLock, IndexSettingsModule.newIndexSettings("test", Settings.EMPTY));
fail("should not have been able to delete the directory");
@@ -250,7 +251,7 @@ public class IndexShardTests extends ESSingleNodeTestCase {
ShardStateMetaData shardStateMetaData = load(logger, env.availableShardPaths(shard.shardId));
assertEquals(shardStateMetaData, getShardStateMetadata(shard));
- routing = TestShardRouting.newShardRouting(shard.shardId.index().getName(), shard.shardId.id(), routing.currentNodeId(), null, routing.primary(), ShardRoutingState.INITIALIZING, shard.shardRouting.allocationId(), shard.shardRouting.version() + 1);
+ routing = TestShardRouting.newShardRouting(shard.shardId.getIndex(), shard.shardId.id(), routing.currentNodeId(), null, routing.primary(), ShardRoutingState.INITIALIZING, shard.shardRouting.allocationId(), shard.shardRouting.version() + 1);
shard.updateRoutingEntry(routing, true);
shard.deleteShardState();
@@ -274,7 +275,7 @@ public class IndexShardTests extends ESSingleNodeTestCase {
ShardPath shardPath = ShardPath.loadShardPath(logger, env, shard.shardId(), test.getIndexSettings());
assertNotNull(shardPath);
// but index can't be opened for a failed shard
- assertThat("store index should be corrupted", Store.canOpenIndex(logger, shardPath.resolveIndex()), equalTo(false));
+ assertThat("store index should be corrupted", Store.canOpenIndex(logger, shardPath.resolveIndex(), shard.shardId()), equalTo(false));
}
ShardStateMetaData getShardStateMetadata(IndexShard shard) {
@@ -407,7 +408,7 @@ public class IndexShardTests extends ESSingleNodeTestCase {
}
private void setDurability(IndexShard shard, Translog.Durability durability) {
- client().admin().indices().prepareUpdateSettings(shard.shardId.getIndex()).setSettings(settingsBuilder().put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), durability.name()).build()).get();
+ client().admin().indices().prepareUpdateSettings(shard.shardId.getIndexName()).setSettings(settingsBuilder().put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), durability.name()).build()).get();
assertEquals(durability, shard.getTranslogDurability());
}
@@ -441,7 +442,7 @@ public class IndexShardTests extends ESSingleNodeTestCase {
client().prepareIndex("test", "bar", "1").setSource("{}").setRefresh(true).get();
client().admin().indices().prepareFlush("test").get();
SearchResponse response = client().prepareSearch("test").get();
- assertHitCount(response, 1l);
+ assertHitCount(response, 1L);
IndicesService indicesService = getInstanceFromNode(IndicesService.class);
IndexService test = indicesService.indexService("test");
IndexShard shard = test.getShardOrNull(0);
@@ -456,7 +457,7 @@ public class IndexShardTests extends ESSingleNodeTestCase {
createIndex("test");
ensureGreen("test");
response = client().prepareSearch("test").get();
- assertHitCount(response, 0l);
+ assertHitCount(response, 0L);
}
public void testIndexDirIsDeletedWhenShardRemoved() throws Exception {
@@ -470,7 +471,7 @@ public class IndexShardTests extends ESSingleNodeTestCase {
ensureGreen("test");
client().prepareIndex("test", "bar", "1").setSource("{}").setRefresh(true).get();
SearchResponse response = client().prepareSearch("test").get();
- assertHitCount(response, 1l);
+ assertHitCount(response, 1L);
client().admin().indices().prepareDelete("test").get();
assertPathHasBeenCleared(idxPath);
}
@@ -995,7 +996,7 @@ public class IndexShardTests extends ESSingleNodeTestCase {
MappedFieldType foo = newShard.mapperService().fullName("foo");
IndexFieldData.Global ifd = shard.indexFieldDataService().getForField(foo);
FieldDataStats before = shard.fieldData().stats("foo");
- assertThat(before.getMemorySizeInBytes(), equalTo(0l));
+ assertThat(before.getMemorySizeInBytes(), equalTo(0L));
FieldDataStats after = null;
try (Engine.Searcher searcher = newShard.acquireSearcher("test")) {
assumeTrue("we have to have more than one segment", searcher.getDirectoryReader().leaves().size() > 1);
@@ -1003,7 +1004,7 @@ public class IndexShardTests extends ESSingleNodeTestCase {
after = shard.fieldData().stats("foo");
assertEquals(after.getEvictions(), before.getEvictions());
// If a field doesn't exist an empty IndexFieldData is returned and that isn't cached:
- assertThat(after.getMemorySizeInBytes(), equalTo(0l));
+ assertThat(after.getMemorySizeInBytes(), equalTo(0L));
}
assertEquals(shard.fieldData().stats("foo").getEvictions(), before.getEvictions());
assertEquals(shard.fieldData().stats("foo").getMemorySizeInBytes(), after.getMemorySizeInBytes());
@@ -1087,4 +1088,65 @@ public class IndexShardTests extends ESSingleNodeTestCase {
newShard.performBatchRecovery(operations);
assertFalse(newShard.getTranslog().syncNeeded());
}
+
+ public void testIndexingBufferDuringInternalRecovery() throws IOException {
+ createIndex("index");
+ client().admin().indices().preparePutMapping("index").setType("testtype").setSource(jsonBuilder().startObject()
+ .startObject("testtype")
+ .startObject("properties")
+ .startObject("foo")
+ .field("type", "string")
+ .endObject()
+ .endObject().endObject().endObject()).get();
+ ensureGreen();
+ IndicesService indicesService = getInstanceFromNode(IndicesService.class);
+ IndexService test = indicesService.indexService("index");
+ IndexShard shard = test.getShardOrNull(0);
+ ShardRouting routing = new ShardRouting(shard.routingEntry());
+ test.removeShard(0, "b/c britta says so");
+ IndexShard newShard = test.createShard(routing);
+ newShard.shardRouting = routing;
+ DiscoveryNode localNode = new DiscoveryNode("foo", DummyTransportAddress.INSTANCE, Version.CURRENT);
+ newShard.markAsRecovering("for testing", new RecoveryState(newShard.shardId(), routing.primary(), RecoveryState.Type.REPLICA, localNode, localNode));
+ // Shard is still inactive since we haven't started recovering yet
+ assertFalse(newShard.isActive());
+ newShard.prepareForIndexRecovery();
+ // Shard is still inactive since we haven't started recovering yet
+ assertFalse(newShard.isActive());
+ newShard.performTranslogRecovery(true);
+ // Shard should now be active since we did recover:
+ assertTrue(newShard.isActive());
+ }
+
+ public void testIndexingBufferDuringPeerRecovery() throws IOException {
+ createIndex("index");
+ client().admin().indices().preparePutMapping("index").setType("testtype").setSource(jsonBuilder().startObject()
+ .startObject("testtype")
+ .startObject("properties")
+ .startObject("foo")
+ .field("type", "string")
+ .endObject()
+ .endObject().endObject().endObject()).get();
+ ensureGreen();
+ IndicesService indicesService = getInstanceFromNode(IndicesService.class);
+ IndexService test = indicesService.indexService("index");
+ IndexShard shard = test.getShardOrNull(0);
+ ShardRouting routing = new ShardRouting(shard.routingEntry());
+ test.removeShard(0, "b/c britta says so");
+ IndexShard newShard = test.createShard(routing);
+ newShard.shardRouting = routing;
+ DiscoveryNode localNode = new DiscoveryNode("foo", DummyTransportAddress.INSTANCE, Version.CURRENT);
+ newShard.markAsRecovering("for testing", new RecoveryState(newShard.shardId(), routing.primary(), RecoveryState.Type.REPLICA, localNode, localNode));
+ // Shard is still inactive since we haven't started recovering yet
+ assertFalse(newShard.isActive());
+ List<Translog.Operation> operations = new ArrayList<>();
+ operations.add(new Translog.Index("testtype", "1", jsonBuilder().startObject().field("foo", "bar").endObject().bytes().toBytes()));
+ newShard.prepareForIndexRecovery();
+ newShard.skipTranslogRecovery();
+ // Shard is still inactive since we haven't started recovering yet
+ assertFalse(newShard.isActive());
+ newShard.performBatchRecovery(operations);
+ // Shard should now be active since we did recover:
+ assertTrue(newShard.isActive());
+ }
}
diff --git a/core/src/test/java/org/elasticsearch/index/shard/NewPathForShardTests.java b/core/src/test/java/org/elasticsearch/index/shard/NewPathForShardTests.java
index c3a2d65748..f1515cd559 100644
--- a/core/src/test/java/org/elasticsearch/index/shard/NewPathForShardTests.java
+++ b/core/src/test/java/org/elasticsearch/index/shard/NewPathForShardTests.java
@@ -49,7 +49,7 @@ import java.util.Map;
/** Separate test class from ShardPathTests because we need static (BeforeClass) setup to install mock filesystems... */
public class NewPathForShardTests extends ESTestCase {
- private static final IndexSettings INDEX_SETTINGS = IndexSettingsModule.newIndexSettings(new Index("index"), Settings.EMPTY);
+ private static final IndexSettings INDEX_SETTINGS = IndexSettingsModule.newIndexSettings("index", Settings.EMPTY);
// Sneakiness to install mock file stores so we can pretend how much free space we have on each path.data:
private static MockFileStore aFileStore = new MockFileStore("mocka");
@@ -167,8 +167,8 @@ public class NewPathForShardTests extends ESTestCase {
path.resolve("b").toString()};
Settings settings = Settings.builder()
- .put("path.home", path)
- .putArray("path.data", paths).build();
+ .put(Environment.PATH_HOME_SETTING.getKey(), path)
+ .putArray(Environment.PATH_DATA_SETTING.getKey(), paths).build();
NodeEnvironment nodeEnv = new NodeEnvironment(settings, new Environment(settings));
// Make sure all our mocking above actually worked:
@@ -182,7 +182,7 @@ public class NewPathForShardTests extends ESTestCase {
aFileStore.usableSpace = 100000;
bFileStore.usableSpace = 1000;
- ShardId shardId = new ShardId("index", 0);
+ ShardId shardId = new ShardId("index", "_na_", 0);
ShardPath result = ShardPath.selectNewPathForShard(nodeEnv, shardId, INDEX_SETTINGS, 100, Collections.<Path,Integer>emptyMap());
assertTrue(result.getDataPath().toString().contains(aPathPart));
@@ -190,7 +190,7 @@ public class NewPathForShardTests extends ESTestCase {
aFileStore.usableSpace = 1000;
bFileStore.usableSpace = 100000;
- shardId = new ShardId("index", 0);
+ shardId = new ShardId("index", "_na_", 0);
result = ShardPath.selectNewPathForShard(nodeEnv, shardId, INDEX_SETTINGS, 100, Collections.<Path,Integer>emptyMap());
assertTrue(result.getDataPath().toString().contains(bPathPart));
diff --git a/core/src/test/java/org/elasticsearch/index/shard/ShardPathTests.java b/core/src/test/java/org/elasticsearch/index/shard/ShardPathTests.java
index 5a82a8942a..640ae9e606 100644
--- a/core/src/test/java/org/elasticsearch/index/shard/ShardPathTests.java
+++ b/core/src/test/java/org/elasticsearch/index/shard/ShardPathTests.java
@@ -22,6 +22,7 @@ import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.routing.AllocationId;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.env.Environment;
import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.IndexSettingsModule;
@@ -41,14 +42,14 @@ public class ShardPathTests extends ESTestCase {
Settings.Builder builder = settingsBuilder().put(IndexMetaData.SETTING_INDEX_UUID, "0xDEADBEEF")
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT);
Settings settings = builder.build();
- ShardId shardId = new ShardId("foo", 0);
+ ShardId shardId = new ShardId("foo", "_na_", 0);
Path[] paths = env.availableShardPaths(shardId);
Path path = randomFrom(paths);
ShardStateMetaData.FORMAT.write(new ShardStateMetaData(2, true, "0xDEADBEEF", AllocationId.newInitializing()), 2, path);
- ShardPath shardPath = ShardPath.loadShardPath(logger, env, shardId, IndexSettingsModule.newIndexSettings(shardId.index(), settings));
+ ShardPath shardPath = ShardPath.loadShardPath(logger, env, shardId, IndexSettingsModule.newIndexSettings(shardId.getIndex(), settings));
assertEquals(path, shardPath.getDataPath());
assertEquals("0xDEADBEEF", shardPath.getIndexUUID());
- assertEquals("foo", shardPath.getShardId().getIndex());
+ assertEquals("foo", shardPath.getShardId().getIndexName());
assertEquals(path.resolve("translog"), shardPath.resolveTranslog());
assertEquals(path.resolve("index"), shardPath.resolveIndex());
}
@@ -59,12 +60,12 @@ public class ShardPathTests extends ESTestCase {
Settings.Builder builder = settingsBuilder().put(IndexMetaData.SETTING_INDEX_UUID, "0xDEADBEEF")
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT);
Settings settings = builder.build();
- ShardId shardId = new ShardId("foo", 0);
+ ShardId shardId = new ShardId("foo", "_na_", 0);
Path[] paths = env.availableShardPaths(shardId);
assumeTrue("This test tests multi data.path but we only got one", paths.length > 1);
int id = randomIntBetween(1, 10);
ShardStateMetaData.FORMAT.write(new ShardStateMetaData(id, true, "0xDEADBEEF", AllocationId.newInitializing()), id, paths);
- ShardPath.loadShardPath(logger, env, shardId, IndexSettingsModule.newIndexSettings(shardId.index(), settings));
+ ShardPath.loadShardPath(logger, env, shardId, IndexSettingsModule.newIndexSettings(shardId.getIndex(), settings));
fail("Expected IllegalStateException");
} catch (IllegalStateException e) {
assertThat(e.getMessage(), containsString("more than one shard state found"));
@@ -76,12 +77,12 @@ public class ShardPathTests extends ESTestCase {
Settings.Builder builder = settingsBuilder().put(IndexMetaData.SETTING_INDEX_UUID, "foobar")
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT);
Settings settings = builder.build();
- ShardId shardId = new ShardId("foo", 0);
+ ShardId shardId = new ShardId("foo", "_na_", 0);
Path[] paths = env.availableShardPaths(shardId);
Path path = randomFrom(paths);
int id = randomIntBetween(1, 10);
ShardStateMetaData.FORMAT.write(new ShardStateMetaData(id, true, "0xDEADBEEF", AllocationId.newInitializing()), id, path);
- ShardPath.loadShardPath(logger, env, shardId, IndexSettingsModule.newIndexSettings(shardId.index(), settings));
+ ShardPath.loadShardPath(logger, env, shardId, IndexSettingsModule.newIndexSettings(shardId.getIndex(), settings));
fail("Expected IllegalStateException");
} catch (IllegalStateException e) {
assertThat(e.getMessage(), containsString("expected: foobar on shard path"));
@@ -91,7 +92,7 @@ public class ShardPathTests extends ESTestCase {
public void testIllegalCustomDataPath() {
final Path path = createTempDir().resolve("foo").resolve("0");
try {
- new ShardPath(true, path, path, "foo", new ShardId("foo", 0));
+ new ShardPath(true, path, path, "foo", new ShardId("foo", "_na_", 0));
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), is("shard state path must be different to the data path when using custom data paths"));
@@ -100,7 +101,7 @@ public class ShardPathTests extends ESTestCase {
public void testValidCtor() {
final Path path = createTempDir().resolve("foo").resolve("0");
- ShardPath shardPath = new ShardPath(false, path, path, "foo", new ShardId("foo", 0));
+ ShardPath shardPath = new ShardPath(false, path, path, "foo", new ShardId("foo", "_na_", 0));
assertFalse(shardPath.isCustomDataPath());
assertEquals(shardPath.getDataPath(), path);
assertEquals(shardPath.getShardStatePath(), path);
@@ -118,8 +119,8 @@ public class ShardPathTests extends ESTestCase {
final Path path = createTempDir();
final boolean includeNodeId = randomBoolean();
indexSetttings = indexSettingsBuilder.put(IndexMetaData.SETTING_DATA_PATH, "custom").build();
- nodeSettings = settingsBuilder().put("path.shared_data", path.toAbsolutePath().toAbsolutePath())
- .put(NodeEnvironment.ADD_NODE_ID_TO_CUSTOM_PATH, includeNodeId).build();
+ nodeSettings = settingsBuilder().put(Environment.PATH_SHARED_DATA_SETTING.getKey(), path.toAbsolutePath().toAbsolutePath())
+ .put(NodeEnvironment.ADD_NODE_ID_TO_CUSTOM_PATH.getKey(), includeNodeId).build();
if (includeNodeId) {
customPath = path.resolve("custom").resolve("0");
} else {
@@ -131,11 +132,11 @@ public class ShardPathTests extends ESTestCase {
nodeSettings = Settings.EMPTY;
}
try (final NodeEnvironment env = newNodeEnvironment(nodeSettings)) {
- ShardId shardId = new ShardId("foo", 0);
+ ShardId shardId = new ShardId("foo", "_na_", 0);
Path[] paths = env.availableShardPaths(shardId);
Path path = randomFrom(paths);
ShardStateMetaData.FORMAT.write(new ShardStateMetaData(2, true, "0xDEADBEEF", AllocationId.newInitializing()), 2, path);
- ShardPath shardPath = ShardPath.loadShardPath(logger, env, shardId, IndexSettingsModule.newIndexSettings(shardId.index(), indexSetttings));
+ ShardPath shardPath = ShardPath.loadShardPath(logger, env, shardId, IndexSettingsModule.newIndexSettings(shardId.getIndex(), indexSetttings));
boolean found = false;
for (Path p : env.nodeDataPaths()) {
if (p.equals(shardPath.getRootStatePath())) {
diff --git a/core/src/test/java/org/elasticsearch/index/shard/ShardUtilsTests.java b/core/src/test/java/org/elasticsearch/index/shard/ShardUtilsTests.java
index a59dcb49ac..105179a1f5 100644
--- a/core/src/test/java/org/elasticsearch/index/shard/ShardUtilsTests.java
+++ b/core/src/test/java/org/elasticsearch/index/shard/ShardUtilsTests.java
@@ -38,7 +38,7 @@ public class ShardUtilsTests extends ESTestCase {
BaseDirectoryWrapper dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig());
writer.commit();
- ShardId id = new ShardId("foo", random().nextInt());
+ ShardId id = new ShardId("foo", "_na_", random().nextInt());
try (DirectoryReader reader = DirectoryReader.open(writer, random().nextBoolean())) {
ElasticsearchDirectoryReader wrap = ElasticsearchDirectoryReader.wrap(reader, id);
assertEquals(id, ShardUtils.extractShardId(wrap));
diff --git a/core/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java b/core/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java
index 30f68ccb70..926ea49ba4 100644
--- a/core/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java
+++ b/core/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java
@@ -20,6 +20,7 @@
package org.elasticsearch.index.similarity;
import org.apache.lucene.search.similarities.ClassicSimilarity;
+import org.apache.lucene.search.similarities.DFISimilarity;
import org.apache.lucene.search.similarities.AfterEffectL;
import org.apache.lucene.search.similarities.BM25Similarity;
import org.apache.lucene.search.similarities.BasicModelG;
@@ -38,6 +39,7 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser;
+import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase;
@@ -93,7 +95,7 @@ public class SimilarityTests extends ESSingleNodeTestCase {
Settings indexSettings = Settings.settingsBuilder()
.put("index.similarity.my_similarity.type", "BM25")
.put("index.similarity.my_similarity.k1", 2.0f)
- .put("index.similarity.my_similarity.b", 1.5f)
+ .put("index.similarity.my_similarity.b", 0.5f)
.put("index.similarity.my_similarity.discount_overlaps", false)
.build();
IndexService indexService = createIndex("foo", indexSettings);
@@ -102,7 +104,7 @@ public class SimilarityTests extends ESSingleNodeTestCase {
BM25Similarity similarity = (BM25Similarity) documentMapper.mappers().getMapper("field1").fieldType().similarity().get();
assertThat(similarity.getK1(), equalTo(2.0f));
- assertThat(similarity.getB(), equalTo(1.5f));
+ assertThat(similarity.getB(), equalTo(0.5f));
assertThat(similarity.getDiscountOverlaps(), equalTo(false));
}
@@ -156,6 +158,23 @@ public class SimilarityTests extends ESSingleNodeTestCase {
assertThat(((NormalizationH2) similarity.getNormalization()).getC(), equalTo(3f));
}
+ public void testResolveSimilaritiesFromMapping_DFI() throws IOException {
+ String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
+ .startObject("properties")
+ .startObject("field1").field("type", "string").field("similarity", "my_similarity").endObject()
+ .endObject()
+ .endObject().endObject().string();
+
+ Settings indexSettings = Settings.settingsBuilder()
+ .put("index.similarity.my_similarity.type", "DFI")
+ .build();
+ IndexService indexService = createIndex("foo", indexSettings);
+ DocumentMapper documentMapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
+ MappedFieldType fieldType = documentMapper.mappers().getMapper("field1").fieldType();
+ assertThat(fieldType.similarity(), instanceOf(DFISimilarityProvider.class));
+ assertThat(fieldType.similarity().get(), instanceOf(DFISimilarity.class));
+ }
+
public void testResolveSimilaritiesFromMapping_LMDirichlet() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties")
diff --git a/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java b/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java
index 2c9de235b7..0f68030745 100644
--- a/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java
+++ b/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java
@@ -58,6 +58,7 @@ import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.indices.recovery.RecoveryFileChunkRequest;
import org.elasticsearch.indices.recovery.RecoveryTarget;
import org.elasticsearch.monitor.fs.FsInfo;
+import org.elasticsearch.node.Node;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.snapshots.SnapshotState;
import org.elasticsearch.test.CorruptionUtils;
@@ -372,7 +373,7 @@ public class CorruptedFileIT extends ESIntegTestCase {
int numDocs = scaledRandomIntBetween(100, 1000);
internalCluster().ensureAtLeastNumDataNodes(2);
if (cluster().numDataNodes() < 3) {
- internalCluster().startNode(Settings.builder().put("node.data", true).put("node.client", false).put("node.master", false));
+ internalCluster().startNode(Settings.builder().put(Node.NODE_DATA_SETTING.getKey(), true).put(Node.NODE_MASTER_SETTING.getKey(), false));
}
NodesStatsResponse nodeStats = client().admin().cluster().prepareNodesStats().get();
List<NodeStats> dataNodeStats = new ArrayList<>();
diff --git a/core/src/test/java/org/elasticsearch/index/store/ExceptionRetryIT.java b/core/src/test/java/org/elasticsearch/index/store/ExceptionRetryIT.java
index 234de11b51..715fac55a7 100644
--- a/core/src/test/java/org/elasticsearch/index/store/ExceptionRetryIT.java
+++ b/core/src/test/java/org/elasticsearch/index/store/ExceptionRetryIT.java
@@ -122,7 +122,7 @@ public class ExceptionRetryIT extends ESIntegTestCase {
if (!uniqueIds.add(searchResponse.getHits().getHits()[i].getId())) {
if (!found_duplicate_already) {
SearchResponse dupIdResponse = client().prepareSearch("index").setQuery(termQuery("_id", searchResponse.getHits().getHits()[i].getId())).setExplain(true).get();
- assertThat(dupIdResponse.getHits().totalHits(), greaterThan(1l));
+ assertThat(dupIdResponse.getHits().totalHits(), greaterThan(1L));
logger.info("found a duplicate id:");
for (SearchHit hit : dupIdResponse.getHits()) {
logger.info("Doc {} was found on shard {}", hit.getId(), hit.getShard().getShardId());
@@ -134,7 +134,7 @@ public class ExceptionRetryIT extends ESIntegTestCase {
}
}
assertSearchResponse(searchResponse);
- assertThat(dupCounter, equalTo(0l));
+ assertThat(dupCounter, equalTo(0L));
assertHitCount(searchResponse, numDocs);
}
}
diff --git a/core/src/test/java/org/elasticsearch/index/store/IndexStoreTests.java b/core/src/test/java/org/elasticsearch/index/store/IndexStoreTests.java
index 300e4bb9ab..d9000e23a6 100644
--- a/core/src/test/java/org/elasticsearch/index/store/IndexStoreTests.java
+++ b/core/src/test/java/org/elasticsearch/index/store/IndexStoreTests.java
@@ -25,6 +25,7 @@ import org.apache.lucene.store.MMapDirectory;
import org.apache.lucene.store.NIOFSDirectory;
import org.apache.lucene.store.NoLockFactory;
import org.apache.lucene.store.SimpleFSDirectory;
+import org.apache.lucene.store.StoreRateLimiting;
import org.apache.lucene.util.Constants;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
@@ -51,8 +52,8 @@ public class IndexStoreTests extends ESTestCase {
final IndexModule.Type type = RandomPicks.randomFrom(random(), values);
Settings settings = Settings.settingsBuilder().put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), type.name().toLowerCase(Locale.ROOT))
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
- IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(new Index("foo"), settings);
- FsDirectoryService service = new FsDirectoryService(indexSettings, null, new ShardPath(false, tempDir, tempDir, "foo", new ShardId("foo", 0)));
+ IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("foo", settings);
+ FsDirectoryService service = new FsDirectoryService(indexSettings, null, new ShardPath(false, tempDir, tempDir, "foo", new ShardId("foo", "_na_", 0)));
try (final Directory directory = service.newFSDirectory(tempDir, NoLockFactory.INSTANCE)) {
switch (type) {
case NIOFS:
@@ -84,7 +85,7 @@ public class IndexStoreTests extends ESTestCase {
public void testStoreDirectoryDefault() throws IOException {
final Path tempDir = createTempDir().resolve("foo").resolve("0");
- FsDirectoryService service = new FsDirectoryService(IndexSettingsModule.newIndexSettings(new Index("foo"), Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build()), null, new ShardPath(false, tempDir, tempDir, "foo", new ShardId("foo", 0)));
+ FsDirectoryService service = new FsDirectoryService(IndexSettingsModule.newIndexSettings("foo", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build()), null, new ShardPath(false, tempDir, tempDir, "foo", new ShardId("foo", "_na_", 0)));
try (final Directory directory = service.newFSDirectory(tempDir, NoLockFactory.INSTANCE)) {
if (Constants.WINDOWS) {
assertTrue(directory.toString(), directory instanceof MMapDirectory || directory instanceof SimpleFSDirectory);
@@ -95,4 +96,24 @@ public class IndexStoreTests extends ESTestCase {
}
}
}
+
+ public void testUpdateThrottleType() throws IOException {
+ Settings settings = Settings.settingsBuilder().put(IndexStoreConfig.INDICES_STORE_THROTTLE_TYPE_SETTING.getKey(), "all")
+ .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
+ IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("foo", settings);
+ IndexStoreConfig indexStoreConfig = new IndexStoreConfig(settings);
+ IndexStore store = new IndexStore(indexSettings, indexStoreConfig);
+ assertEquals(StoreRateLimiting.Type.NONE, store.rateLimiting().getType());
+ assertEquals(StoreRateLimiting.Type.ALL, indexStoreConfig.getNodeRateLimiter().getType());
+ assertNotSame(indexStoreConfig.getNodeRateLimiter(), store.rateLimiting());
+
+ store.setType(IndexStore.IndexRateLimitingType.fromString("NODE"));
+ assertEquals(StoreRateLimiting.Type.ALL, store.rateLimiting().getType());
+ assertSame(indexStoreConfig.getNodeRateLimiter(), store.rateLimiting());
+
+ store.setType(IndexStore.IndexRateLimitingType.fromString("merge"));
+ assertEquals(StoreRateLimiting.Type.MERGE, store.rateLimiting().getType());
+ assertNotSame(indexStoreConfig.getNodeRateLimiter(), store.rateLimiting());
+ assertEquals(StoreRateLimiting.Type.ALL, indexStoreConfig.getNodeRateLimiter().getType());
+ }
}
diff --git a/core/src/test/java/org/elasticsearch/index/store/StoreTests.java b/core/src/test/java/org/elasticsearch/index/store/StoreTests.java
index ed98fc1bac..1d77dd93d8 100644
--- a/core/src/test/java/org/elasticsearch/index/store/StoreTests.java
+++ b/core/src/test/java/org/elasticsearch/index/store/StoreTests.java
@@ -109,10 +109,10 @@ import static org.hamcrest.Matchers.nullValue;
public class StoreTests extends ESTestCase {
- private static final IndexSettings INDEX_SETTINGS = IndexSettingsModule.newIndexSettings(new Index("index"), Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, org.elasticsearch.Version.CURRENT).build());
+ private static final IndexSettings INDEX_SETTINGS = IndexSettingsModule.newIndexSettings("index", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, org.elasticsearch.Version.CURRENT).build());
public void testRefCount() throws IOException {
- final ShardId shardId = new ShardId(new Index("index"), 1);
+ final ShardId shardId = new ShardId("index", "_na_", 1);
DirectoryService directoryService = new LuceneManagedDirectoryService(random());
IndexSettings indexSettings = INDEX_SETTINGS;
@@ -361,7 +361,7 @@ public class StoreTests extends ESTestCase {
// agree on the oldest version of a segment. We should fix this test by
// switching to a static bw index
public void testWriteLegacyChecksums() throws IOException {
- final ShardId shardId = new ShardId(new Index("index"), 1);
+ final ShardId shardId = new ShardId("index", "_na_", 1);
DirectoryService directoryService = new LuceneManagedDirectoryService(random());
Store store = new Store(shardId, INDEX_SETTINGS, directoryService, new DummyShardLock(shardId));
// set default codec - all segments need checksums
@@ -445,7 +445,7 @@ public class StoreTests extends ESTestCase {
}
public void testNewChecksums() throws IOException {
- final ShardId shardId = new ShardId(new Index("index"), 1);
+ final ShardId shardId = new ShardId("index", "_na_", 1);
DirectoryService directoryService = new LuceneManagedDirectoryService(random());
Store store = new Store(shardId, INDEX_SETTINGS, directoryService, new DummyShardLock(shardId));
// set default codec - all segments need checksums
@@ -504,7 +504,7 @@ public class StoreTests extends ESTestCase {
}
public void testMixedChecksums() throws IOException {
- final ShardId shardId = new ShardId(new Index("index"), 1);
+ final ShardId shardId = new ShardId("index", "_na_", 1);
DirectoryService directoryService = new LuceneManagedDirectoryService(random());
Store store = new Store(shardId, INDEX_SETTINGS, directoryService, new DummyShardLock(shardId));
// this time random codec....
@@ -595,7 +595,7 @@ public class StoreTests extends ESTestCase {
}
public void testRenameFile() throws IOException {
- final ShardId shardId = new ShardId(new Index("index"), 1);
+ final ShardId shardId = new ShardId("index", "_na_", 1);
DirectoryService directoryService = new LuceneManagedDirectoryService(random(), false);
Store store = new Store(shardId, INDEX_SETTINGS, directoryService, new DummyShardLock(shardId));
{
@@ -791,7 +791,7 @@ public class StoreTests extends ESTestCase {
public void assertDeleteContent(Store store, DirectoryService service) throws IOException {
deleteContent(store.directory());
assertThat(Arrays.toString(store.directory().listAll()), store.directory().listAll().length, equalTo(0));
- assertThat(store.stats().sizeInBytes(), equalTo(0l));
+ assertThat(store.stats().sizeInBytes(), equalTo(0L));
assertThat(service.newDirectory().listAll().length, equalTo(0));
}
@@ -867,7 +867,7 @@ public class StoreTests extends ESTestCase {
IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random)).setCodec(TestUtil.getDefaultCodec());
iwc.setMergePolicy(NoMergePolicy.INSTANCE);
iwc.setUseCompoundFile(random.nextBoolean());
- final ShardId shardId = new ShardId(new Index("index"), 1);
+ final ShardId shardId = new ShardId("index", "_na_", 1);
DirectoryService directoryService = new LuceneManagedDirectoryService(random);
Store store = new Store(shardId, INDEX_SETTINGS, directoryService, new DummyShardLock(shardId));
IndexWriter writer = new IndexWriter(store.directory(), iwc);
@@ -897,7 +897,7 @@ public class StoreTests extends ESTestCase {
IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random)).setCodec(TestUtil.getDefaultCodec());
iwc.setMergePolicy(NoMergePolicy.INSTANCE);
iwc.setUseCompoundFile(random.nextBoolean());
- final ShardId shardId = new ShardId(new Index("index"), 1);
+ final ShardId shardId = new ShardId("index", "_na_", 1);
DirectoryService directoryService = new LuceneManagedDirectoryService(random);
store = new Store(shardId, INDEX_SETTINGS, directoryService, new DummyShardLock(shardId));
IndexWriter writer = new IndexWriter(store.directory(), iwc);
@@ -995,7 +995,7 @@ public class StoreTests extends ESTestCase {
}
public void testCleanupFromSnapshot() throws IOException {
- final ShardId shardId = new ShardId(new Index("index"), 1);
+ final ShardId shardId = new ShardId("index", "_na_", 1);
DirectoryService directoryService = new LuceneManagedDirectoryService(random());
Store store = new Store(shardId, INDEX_SETTINGS, directoryService, new DummyShardLock(shardId));
// this time random codec....
@@ -1109,7 +1109,7 @@ public class StoreTests extends ESTestCase {
metaDataMap.put("_0_1.del", new StoreFileMetaData("_0_1.del", 42, "foobarbaz", null, new BytesRef()));
Store.MetadataSnapshot snapshot = new Store.MetadataSnapshot(unmodifiableMap(metaDataMap), emptyMap(), 0);
- final ShardId shardId = new ShardId(new Index("index"), 1);
+ final ShardId shardId = new ShardId("index", "_na_", 1);
DirectoryService directoryService = new LuceneManagedDirectoryService(random());
Store store = new Store(shardId, INDEX_SETTINGS, directoryService, new DummyShardLock(shardId));
for (String file : metaDataMap.keySet()) {
@@ -1126,7 +1126,7 @@ public class StoreTests extends ESTestCase {
}
public void testOnCloseCallback() throws IOException {
- final ShardId shardId = new ShardId(new Index(randomRealisticUnicodeOfCodepointLengthBetween(1, 10)), randomIntBetween(0, 100));
+ final ShardId shardId = new ShardId(new Index(randomRealisticUnicodeOfCodepointLengthBetween(1, 10), "_na_"), randomIntBetween(0, 100));
DirectoryService directoryService = new LuceneManagedDirectoryService(random());
final AtomicInteger count = new AtomicInteger(0);
final ShardLock lock = new DummyShardLock(shardId);
@@ -1150,12 +1150,12 @@ public class StoreTests extends ESTestCase {
}
public void testStoreStats() throws IOException {
- final ShardId shardId = new ShardId(new Index("index"), 1);
+ final ShardId shardId = new ShardId("index", "_na_", 1);
DirectoryService directoryService = new LuceneManagedDirectoryService(random());
Settings settings = Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, org.elasticsearch.Version.CURRENT)
.put(Store.INDEX_STORE_STATS_REFRESH_INTERVAL_SETTING.getKey(), TimeValue.timeValueMinutes(0)).build();
- Store store = new Store(shardId, IndexSettingsModule.newIndexSettings(new Index("index"), settings), directoryService, new DummyShardLock(shardId));
+ Store store = new Store(shardId, IndexSettingsModule.newIndexSettings("index", settings), directoryService, new DummyShardLock(shardId));
long initialStoreSize = 0;
for (String extraFiles : store.directory().listAll()) {
assertTrue("expected extraFS file but got: " + extraFiles, extraFiles.startsWith("extra"));
@@ -1244,7 +1244,7 @@ public class StoreTests extends ESTestCase {
}
public void testUserDataRead() throws IOException {
- final ShardId shardId = new ShardId(new Index("index"), 1);
+ final ShardId shardId = new ShardId("index", "_na_", 1);
DirectoryService directoryService = new LuceneManagedDirectoryService(random());
Store store = new Store(shardId, INDEX_SETTINGS, directoryService, new DummyShardLock(shardId));
IndexWriterConfig config = newIndexWriterConfig(random(), new MockAnalyzer(random())).setCodec(TestUtil.getDefaultCodec());
@@ -1279,7 +1279,7 @@ public class StoreTests extends ESTestCase {
public void testStreamStoreFilesMetaData() throws Exception {
Store.MetadataSnapshot metadataSnapshot = createMetaDataSnapshot();
- TransportNodesListShardStoreMetaData.StoreFilesMetaData outStoreFileMetaData = new TransportNodesListShardStoreMetaData.StoreFilesMetaData(randomBoolean(), new ShardId("test", 0),metadataSnapshot);
+ TransportNodesListShardStoreMetaData.StoreFilesMetaData outStoreFileMetaData = new TransportNodesListShardStoreMetaData.StoreFilesMetaData(randomBoolean(), new ShardId("test", "_na_", 0),metadataSnapshot);
ByteArrayOutputStream outBuffer = new ByteArrayOutputStream();
OutputStreamStreamOutput out = new OutputStreamStreamOutput(outBuffer);
org.elasticsearch.Version targetNodeVersion = randomVersion(random());
@@ -1298,7 +1298,7 @@ public class StoreTests extends ESTestCase {
public void testMarkCorruptedOnTruncatedSegmentsFile() throws IOException {
IndexWriterConfig iwc = newIndexWriterConfig();
- final ShardId shardId = new ShardId(new Index("index"), 1);
+ final ShardId shardId = new ShardId("index", "_na_", 1);
DirectoryService directoryService = new LuceneManagedDirectoryService(random());
Store store = new Store(shardId, INDEX_SETTINGS, directoryService, new DummyShardLock(shardId));
IndexWriter writer = new IndexWriter(store.directory(), iwc);
@@ -1342,19 +1342,19 @@ public class StoreTests extends ESTestCase {
}
public void testCanOpenIndex() throws IOException {
+ final ShardId shardId = new ShardId("index", "_na_", 1);
IndexWriterConfig iwc = newIndexWriterConfig();
Path tempDir = createTempDir();
final BaseDirectoryWrapper dir = newFSDirectory(tempDir);
- assertFalse(Store.canOpenIndex(logger, tempDir));
+ assertFalse(Store.canOpenIndex(logger, tempDir,shardId));
IndexWriter writer = new IndexWriter(dir, iwc);
Document doc = new Document();
doc.add(new StringField("id", "1", random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
writer.addDocument(doc);
writer.commit();
writer.close();
- assertTrue(Store.canOpenIndex(logger, tempDir));
+ assertTrue(Store.canOpenIndex(logger, tempDir, shardId));
- final ShardId shardId = new ShardId(new Index("index"), 1);
DirectoryService directoryService = new DirectoryService(shardId, INDEX_SETTINGS) {
@Override
public long throttleTimeInNanos() {
@@ -1368,12 +1368,12 @@ public class StoreTests extends ESTestCase {
};
Store store = new Store(shardId, INDEX_SETTINGS, directoryService, new DummyShardLock(shardId));
store.markStoreCorrupted(new CorruptIndexException("foo", "bar"));
- assertFalse(Store.canOpenIndex(logger, tempDir));
+ assertFalse(Store.canOpenIndex(logger, tempDir, shardId));
store.close();
}
public void testDeserializeCorruptionException() throws IOException {
- final ShardId shardId = new ShardId(new Index("index"), 1);
+ final ShardId shardId = new ShardId("index", "_na_", 1);
final Directory dir = new RAMDirectory(); // I use ram dir to prevent that virusscanner being a PITA
DirectoryService directoryService = new DirectoryService(shardId, INDEX_SETTINGS) {
@Override
@@ -1413,7 +1413,7 @@ public class StoreTests extends ESTestCase {
}
public void testCanReadOldCorruptionMarker() throws IOException {
- final ShardId shardId = new ShardId(new Index("index"), 1);
+ final ShardId shardId = new ShardId("index", "_na_", 1);
final Directory dir = new RAMDirectory(); // I use ram dir to prevent that virusscanner being a PITA
DirectoryService directoryService = new DirectoryService(shardId, INDEX_SETTINGS) {
@Override
diff --git a/core/src/test/java/org/elasticsearch/index/suggest/stats/SuggestStatsIT.java b/core/src/test/java/org/elasticsearch/index/suggest/stats/SuggestStatsIT.java
index 68c19d56e5..72be682e86 100644
--- a/core/src/test/java/org/elasticsearch/index/suggest/stats/SuggestStatsIT.java
+++ b/core/src/test/java/org/elasticsearch/index/suggest/stats/SuggestStatsIT.java
@@ -102,7 +102,7 @@ public class SuggestStatsIT extends ESIntegTestCase {
IndicesStatsResponse indicesStats = client().admin().indices().prepareStats().execute().actionGet();
// check current
- assertThat(indicesStats.getTotal().getSuggest().getCurrent(), equalTo(0l));
+ assertThat(indicesStats.getTotal().getSuggest().getCurrent(), equalTo(0L));
// check suggest count
assertThat(indicesStats.getTotal().getSuggest().getCount(), equalTo((long) (suggestAllIdx * totalShards + suggestIdx1 * shardsIdx1 + suggestIdx2 * shardsIdx2)));
@@ -111,7 +111,7 @@ public class SuggestStatsIT extends ESIntegTestCase {
logger.info("iter {}, iter1 {}, iter2 {}, {}", suggestAllIdx, suggestIdx1, suggestIdx2, endTime - startTime);
// check suggest time
- assertThat(indicesStats.getTotal().getSuggest().getTimeInMillis(), greaterThan(0l));
+ assertThat(indicesStats.getTotal().getSuggest().getTimeInMillis(), greaterThan(0L));
// the upperbound is num shards * total time since we do searches in parallel
assertThat(indicesStats.getTotal().getSuggest().getTimeInMillis(), lessThanOrEqualTo(totalShards * (endTime - startTime)));
@@ -123,12 +123,12 @@ public class SuggestStatsIT extends ESIntegTestCase {
SuggestStats suggestStats = stat.getIndices().getSuggest();
logger.info("evaluating {}", stat.getNode());
if (nodeIdsWithIndex.contains(stat.getNode().getId())) {
- assertThat(suggestStats.getCount(), greaterThan(0l));
- assertThat(suggestStats.getTimeInMillis(), greaterThan(0l));
+ assertThat(suggestStats.getCount(), greaterThan(0L));
+ assertThat(suggestStats.getTimeInMillis(), greaterThan(0L));
num++;
} else {
- assertThat(suggestStats.getCount(), equalTo(0l));
- assertThat(suggestStats.getTimeInMillis(), equalTo(0l));
+ assertThat(suggestStats.getCount(), equalTo(0L));
+ assertThat(suggestStats.getTimeInMillis(), equalTo(0L));
}
}
diff --git a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java
index e193532823..c888c88454 100644
--- a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java
+++ b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java
@@ -91,7 +91,7 @@ import static org.hamcrest.Matchers.greaterThanOrEqualTo;
@LuceneTestCase.SuppressFileSystems("ExtrasFS")
public class TranslogTests extends ESTestCase {
- protected final ShardId shardId = new ShardId(new Index("index"), 1);
+ protected final ShardId shardId = new ShardId("index", "_na_", 1);
protected Translog translog;
protected Path translogDir;
@@ -141,7 +141,7 @@ public class TranslogTests extends ESTestCase {
.put(IndexMetaData.SETTING_VERSION_CREATED, org.elasticsearch.Version.CURRENT)
.build();
ByteSizeValue bufferSize = randomBoolean() ? TranslogConfig.DEFAULT_BUFFER_SIZE : new ByteSizeValue(10 + randomInt(128 * 1024), ByteSizeUnit.BYTES);
- return new TranslogConfig(shardId, path, IndexSettingsModule.newIndexSettings(shardId.index(), build), BigArrays.NON_RECYCLING_INSTANCE, bufferSize);
+ return new TranslogConfig(shardId, path, IndexSettingsModule.newIndexSettings(shardId.getIndex(), build), BigArrays.NON_RECYCLING_INSTANCE, bufferSize);
}
protected void addToTranslogAndList(Translog translog, ArrayList<Translog.Operation> list, Translog.Operation op) throws IOException {
@@ -288,7 +288,7 @@ public class TranslogTests extends ESTestCase {
public void testStats() throws IOException {
final long firstOperationPosition = translog.getFirstOperationPosition();
TranslogStats stats = stats();
- assertThat(stats.estimatedNumberOfOperations(), equalTo(0l));
+ assertThat(stats.estimatedNumberOfOperations(), equalTo(0L));
long lastSize = stats.getTranslogSizeInBytes();
assertThat((int) firstOperationPosition, greaterThan(CodecUtil.headerLength(TranslogWriter.TRANSLOG_CODEC)));
assertThat(lastSize, equalTo(firstOperationPosition));
@@ -296,14 +296,14 @@ public class TranslogTests extends ESTestCase {
translog.add(new Translog.Index("test", "1", new byte[]{1}));
stats = stats();
total.add(stats);
- assertThat(stats.estimatedNumberOfOperations(), equalTo(1l));
+ assertThat(stats.estimatedNumberOfOperations(), equalTo(1L));
assertThat(stats.getTranslogSizeInBytes(), greaterThan(lastSize));
lastSize = stats.getTranslogSizeInBytes();
translog.add(new Translog.Delete(newUid("2")));
stats = stats();
total.add(stats);
- assertThat(stats.estimatedNumberOfOperations(), equalTo(2l));
+ assertThat(stats.estimatedNumberOfOperations(), equalTo(2L));
assertThat(stats.getTranslogSizeInBytes(), greaterThan(lastSize));
lastSize = stats.getTranslogSizeInBytes();
@@ -311,13 +311,13 @@ public class TranslogTests extends ESTestCase {
translog.prepareCommit();
stats = stats();
total.add(stats);
- assertThat(stats.estimatedNumberOfOperations(), equalTo(3l));
+ assertThat(stats.estimatedNumberOfOperations(), equalTo(3L));
assertThat(stats.getTranslogSizeInBytes(), greaterThan(lastSize));
translog.commit();
stats = stats();
total.add(stats);
- assertThat(stats.estimatedNumberOfOperations(), equalTo(0l));
+ assertThat(stats.estimatedNumberOfOperations(), equalTo(0L));
assertThat(stats.getTranslogSizeInBytes(), equalTo(firstOperationPosition));
assertEquals(6, total.estimatedNumberOfOperations());
assertEquals(431, total.getTranslogSizeInBytes());
@@ -896,6 +896,42 @@ public class TranslogTests extends ESTestCase {
IOUtils.close(writer);
}
+ public void testFailWriterWhileClosing() throws IOException {
+ Path tempDir = createTempDir();
+ final FailSwitch fail = new FailSwitch();
+ fail.failNever();
+ TranslogConfig config = getTranslogConfig(tempDir);
+ try (Translog translog = getFailableTranslog(fail, config)) {
+ final TranslogWriter writer = translog.createWriter(0);
+ final int numOps = randomIntBetween(10, 100);
+ byte[] bytes = new byte[4];
+ ByteArrayDataOutput out = new ByteArrayDataOutput(bytes);
+ for (int i = 0; i < numOps; i++) {
+ out.reset(bytes);
+ out.writeInt(i);
+ writer.add(new BytesArray(bytes));
+ }
+ writer.sync();
+ try {
+ fail.failAlways();
+ writer.closeIntoReader();
+ fail();
+ } catch (MockDirectoryWrapper.FakeIOException ex) {
+ }
+ try (TranslogReader reader = translog.openReader(writer.path(), Checkpoint.read(translog.location().resolve(Translog.CHECKPOINT_FILE_NAME)))) {
+ for (int i = 0; i < numOps; i++) {
+ ByteBuffer buffer = ByteBuffer.allocate(4);
+ reader.readBytes(buffer, reader.getFirstOperationOffset() + 4 * i);
+ buffer.flip();
+ final int value = buffer.getInt();
+ assertEquals(i, value);
+ }
+ }
+
+ }
+
+ }
+
public void testBasicRecovery() throws IOException {
List<Translog.Location> locations = new ArrayList<>();
int translogOperations = randomIntBetween(10, 100);
@@ -947,7 +983,7 @@ public class TranslogTests extends ESTestCase {
if (op == prepareOp) {
translogGeneration = translog.getGeneration();
translog.prepareCommit();
- assertEquals("expected this to be the first commit", 1l, translogGeneration.translogFileGeneration);
+ assertEquals("expected this to be the first commit", 1L, translogGeneration.translogFileGeneration);
assertNotNull(translogGeneration.translogUUID);
}
}
@@ -998,7 +1034,7 @@ public class TranslogTests extends ESTestCase {
if (op == prepareOp) {
translogGeneration = translog.getGeneration();
translog.prepareCommit();
- assertEquals("expected this to be the first commit", 1l, translogGeneration.translogFileGeneration);
+ assertEquals("expected this to be the first commit", 1L, translogGeneration.translogFileGeneration);
assertNotNull(translogGeneration.translogUUID);
}
}
@@ -1054,7 +1090,7 @@ public class TranslogTests extends ESTestCase {
if (op == prepareOp) {
translogGeneration = translog.getGeneration();
translog.prepareCommit();
- assertEquals("expected this to be the first commit", 1l, translogGeneration.translogFileGeneration);
+ assertEquals("expected this to be the first commit", 1L, translogGeneration.translogFileGeneration);
assertNotNull(translogGeneration.translogUUID);
}
}
@@ -1357,7 +1393,6 @@ public class TranslogTests extends ESTestCase {
Path tempDir = createTempDir();
final FailSwitch fail = new FailSwitch();
TranslogConfig config = getTranslogConfig(tempDir);
- assumeFalse("this won't work if we sync on any op", config.isSyncOnEachOperation());
Translog translog = getFailableTranslog(fail, config, false, true);
LineFileDocs lineFileDocs = new LineFileDocs(random()); // writes pretty big docs so we cross buffer boarders regularly
translog.add(new Translog.Index("test", "1", lineFileDocs.nextDoc().toString().getBytes(Charset.forName("UTF-8"))));
diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesLifecycleListenerIT.java b/core/src/test/java/org/elasticsearch/indices/IndicesLifecycleListenerIT.java
index ed8c27892b..f1f8a8222c 100644
--- a/core/src/test/java/org/elasticsearch/indices/IndicesLifecycleListenerIT.java
+++ b/core/src/test/java/org/elasticsearch/indices/IndicesLifecycleListenerIT.java
@@ -130,7 +130,7 @@ public class IndicesLifecycleListenerIT extends ESIntegTestCase {
throw new RuntimeException("FAIL");
}
});
- client().admin().cluster().prepareReroute().add(new MoveAllocationCommand(new ShardId("index1", 0), node1, node2)).get();
+ client().admin().cluster().prepareReroute().add(new MoveAllocationCommand("index1", 0, node1, node2)).get();
ensureGreen("index1");
ClusterState state = client().admin().cluster().prepareState().get().getState();
List<ShardRouting> shard = state.getRoutingNodes().shardsWithState(ShardRoutingState.STARTED);
diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java
index 6d0a895511..aad4e34c3d 100644
--- a/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java
+++ b/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java
@@ -472,19 +472,19 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase {
.setIndicesOptions(IndicesOptions.lenientExpandOpen())
.setQuery(matchAllQuery())
.execute().actionGet();
- assertHitCount(response, 0l);
+ assertHitCount(response, 0L);
response = client().prepareSearch("test2","test3").setQuery(matchAllQuery())
.setIndicesOptions(IndicesOptions.lenientExpandOpen())
.execute().actionGet();
- assertHitCount(response, 0l);
+ assertHitCount(response, 0L);
//you should still be able to run empty searches without things blowing up
response = client().prepareSearch()
.setIndicesOptions(IndicesOptions.lenientExpandOpen())
.setQuery(matchAllQuery())
.execute().actionGet();
- assertHitCount(response, 1l);
+ assertHitCount(response, 1L);
}
public void testAllMissingStrict() throws Exception {
diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java b/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java
index 522ebfb0f3..e9f1f6be51 100644
--- a/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java
+++ b/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java
@@ -64,8 +64,8 @@ public class IndicesServiceTests extends ESSingleNodeTestCase {
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, randomIntBetween(1, 4))
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, randomIntBetween(0, 3))
.build());
- assertFalse("shard on shared filesystem", indicesService.canDeleteIndexContents(new Index("test"), idxSettings, false));
- assertTrue("shard on shared filesystem and closed", indicesService.canDeleteIndexContents(new Index("test"), idxSettings, true));
+ assertFalse("shard on shared filesystem", indicesService.canDeleteIndexContents(idxSettings.getIndex(), idxSettings, false));
+ assertTrue("shard on shared filesystem and closed", indicesService.canDeleteIndexContents(idxSettings.getIndex(), idxSettings, true));
}
public void testCanDeleteShardContent() {
@@ -73,12 +73,12 @@ public class IndicesServiceTests extends ESSingleNodeTestCase {
IndexMetaData meta = IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(
1).build();
IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test", meta.getSettings());
- assertFalse("no shard location", indicesService.canDeleteShardContent(new ShardId("test", 0), indexSettings));
+ assertFalse("no shard location", indicesService.canDeleteShardContent(new ShardId("test", "_na_", 0), indexSettings));
IndexService test = createIndex("test");
assertTrue(test.hasShard(0));
- assertFalse("shard is allocated", indicesService.canDeleteShardContent(new ShardId("test", 0), indexSettings));
+ assertFalse("shard is allocated", indicesService.canDeleteShardContent(new ShardId("test", "_na_", 0), indexSettings));
test.removeShard(0, "boom");
- assertTrue("shard is removed", indicesService.canDeleteShardContent(new ShardId("test", 0), indexSettings));
+ assertTrue("shard is removed", indicesService.canDeleteShardContent(new ShardId("test", "_na_", 0), indexSettings));
}
public void testDeleteIndexStore() throws Exception {
@@ -175,7 +175,7 @@ public class IndicesServiceTests extends ESSingleNodeTestCase {
if (randomBoolean()) {
indicesService.addPendingDelete(new ShardId(test.index(), 0), test.getIndexSettings());
indicesService.addPendingDelete(new ShardId(test.index(), 1), test.getIndexSettings());
- indicesService.addPendingDelete(new ShardId("bogus", 1), test.getIndexSettings());
+ indicesService.addPendingDelete(new ShardId("bogus", "_na_", 1), test.getIndexSettings());
assertEquals(indicesService.numPendingDeletes(test.index()), 2);
// shard lock released... we can now delete
indicesService.processPendingDeletes(test.index(), test.getIndexSettings(), new TimeValue(0, TimeUnit.MILLISECONDS));
diff --git a/core/src/test/java/org/elasticsearch/indices/analyze/HunspellServiceIT.java b/core/src/test/java/org/elasticsearch/indices/analyze/HunspellServiceIT.java
index 722a4ebde8..c539e2a68a 100644
--- a/core/src/test/java/org/elasticsearch/indices/analyze/HunspellServiceIT.java
+++ b/core/src/test/java/org/elasticsearch/indices/analyze/HunspellServiceIT.java
@@ -22,6 +22,7 @@ import org.apache.lucene.analysis.hunspell.Dictionary;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.env.Environment;
import org.elasticsearch.indices.analysis.HunspellService;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
@@ -39,9 +40,9 @@ import static org.hamcrest.Matchers.notNullValue;
public class HunspellServiceIT extends ESIntegTestCase {
public void testLocaleDirectoryWithNodeLevelConfig() throws Exception {
Settings settings = Settings.settingsBuilder()
- .put("path.conf", getDataPath("/indices/analyze/conf_dir"))
- .put(HUNSPELL_LAZY_LOAD, randomBoolean())
- .put(HUNSPELL_IGNORE_CASE, true)
+ .put(Environment.PATH_CONF_SETTING.getKey(), getDataPath("/indices/analyze/conf_dir"))
+ .put(HUNSPELL_LAZY_LOAD.getKey(), randomBoolean())
+ .put(HUNSPELL_IGNORE_CASE.getKey(), true)
.build();
internalCluster().startNode(settings);
@@ -52,9 +53,9 @@ public class HunspellServiceIT extends ESIntegTestCase {
public void testLocaleDirectoryWithLocaleSpecificConfig() throws Exception {
Settings settings = Settings.settingsBuilder()
- .put("path.conf", getDataPath("/indices/analyze/conf_dir"))
- .put(HUNSPELL_LAZY_LOAD, randomBoolean())
- .put(HUNSPELL_IGNORE_CASE, true)
+ .put(Environment.PATH_CONF_SETTING.getKey(), getDataPath("/indices/analyze/conf_dir"))
+ .put(HUNSPELL_LAZY_LOAD.getKey(), randomBoolean())
+ .put(HUNSPELL_IGNORE_CASE.getKey(), true)
.put("indices.analysis.hunspell.dictionary.en_US.strict_affix_parsing", false)
.put("indices.analysis.hunspell.dictionary.en_US.ignore_case", false)
.build();
@@ -74,8 +75,8 @@ public class HunspellServiceIT extends ESIntegTestCase {
public void testDicWithNoAff() throws Exception {
Settings settings = Settings.settingsBuilder()
- .put("path.conf", getDataPath("/indices/analyze/no_aff_conf_dir"))
- .put(HUNSPELL_LAZY_LOAD, randomBoolean())
+ .put(Environment.PATH_CONF_SETTING.getKey(), getDataPath("/indices/analyze/no_aff_conf_dir"))
+ .put(HUNSPELL_LAZY_LOAD.getKey(), randomBoolean())
.build();
Dictionary dictionary = null;
@@ -92,8 +93,8 @@ public class HunspellServiceIT extends ESIntegTestCase {
public void testDicWithTwoAffs() throws Exception {
Settings settings = Settings.settingsBuilder()
- .put("path.conf", getDataPath("/indices/analyze/two_aff_conf_dir"))
- .put(HUNSPELL_LAZY_LOAD, randomBoolean())
+ .put(Environment.PATH_CONF_SETTING.getKey(), getDataPath("/indices/analyze/two_aff_conf_dir"))
+ .put(HUNSPELL_LAZY_LOAD.getKey(), randomBoolean())
.build();
Dictionary dictionary = null;
diff --git a/core/src/test/java/org/elasticsearch/indices/cache/query/IndicesQueryCacheTests.java b/core/src/test/java/org/elasticsearch/indices/cache/query/IndicesQueryCacheTests.java
index 83295e9548..ff5dc9ad70 100644
--- a/core/src/test/java/org/elasticsearch/indices/cache/query/IndicesQueryCacheTests.java
+++ b/core/src/test/java/org/elasticsearch/indices/cache/query/IndicesQueryCacheTests.java
@@ -86,13 +86,13 @@ public class IndicesQueryCacheTests extends ESTestCase {
w.addDocument(new Document());
DirectoryReader r = DirectoryReader.open(w, false);
w.close();
- ShardId shard = new ShardId(new Index("index"), 0);
+ ShardId shard = new ShardId("index", "_na_", 0);
r = ElasticsearchDirectoryReader.wrap(r, shard);
IndexSearcher s = new IndexSearcher(r);
s.setQueryCachingPolicy(QueryCachingPolicy.ALWAYS_CACHE);
Settings settings = Settings.builder()
- .put(IndicesQueryCache.INDICES_CACHE_QUERY_COUNT, 10)
+ .put(IndicesQueryCache.INDICES_CACHE_QUERY_COUNT_SETTING.getKey(), 10)
.build();
IndicesQueryCache cache = new IndicesQueryCache(settings);
s.setQueryCache(cache);
@@ -156,7 +156,7 @@ public class IndicesQueryCacheTests extends ESTestCase {
w1.addDocument(new Document());
DirectoryReader r1 = DirectoryReader.open(w1, false);
w1.close();
- ShardId shard1 = new ShardId(new Index("index"), 0);
+ ShardId shard1 = new ShardId("index", "_na_", 0);
r1 = ElasticsearchDirectoryReader.wrap(r1, shard1);
IndexSearcher s1 = new IndexSearcher(r1);
s1.setQueryCachingPolicy(QueryCachingPolicy.ALWAYS_CACHE);
@@ -166,13 +166,13 @@ public class IndicesQueryCacheTests extends ESTestCase {
w2.addDocument(new Document());
DirectoryReader r2 = DirectoryReader.open(w2, false);
w2.close();
- ShardId shard2 = new ShardId(new Index("index"), 1);
+ ShardId shard2 = new ShardId("index", "_na_", 1);
r2 = ElasticsearchDirectoryReader.wrap(r2, shard2);
IndexSearcher s2 = new IndexSearcher(r2);
s2.setQueryCachingPolicy(QueryCachingPolicy.ALWAYS_CACHE);
Settings settings = Settings.builder()
- .put(IndicesQueryCache.INDICES_CACHE_QUERY_COUNT, 10)
+ .put(IndicesQueryCache.INDICES_CACHE_QUERY_COUNT_SETTING.getKey(), 10)
.build();
IndicesQueryCache cache = new IndicesQueryCache(settings);
s1.setQueryCache(cache);
@@ -281,7 +281,7 @@ public class IndicesQueryCacheTests extends ESTestCase {
w1.addDocument(new Document());
DirectoryReader r1 = DirectoryReader.open(w1, false);
w1.close();
- ShardId shard1 = new ShardId(new Index("index"), 0);
+ ShardId shard1 = new ShardId("index", "_na_", 0);
r1 = ElasticsearchDirectoryReader.wrap(r1, shard1);
IndexSearcher s1 = new IndexSearcher(r1);
s1.setQueryCachingPolicy(QueryCachingPolicy.ALWAYS_CACHE);
@@ -291,13 +291,13 @@ public class IndicesQueryCacheTests extends ESTestCase {
w2.addDocument(new Document());
DirectoryReader r2 = DirectoryReader.open(w2, false);
w2.close();
- ShardId shard2 = new ShardId(new Index("index"), 1);
+ ShardId shard2 = new ShardId("index", "_na_", 1);
r2 = ElasticsearchDirectoryReader.wrap(r2, shard2);
IndexSearcher s2 = new IndexSearcher(r2);
s2.setQueryCachingPolicy(QueryCachingPolicy.ALWAYS_CACHE);
Settings settings = Settings.builder()
- .put(IndicesQueryCache.INDICES_CACHE_QUERY_COUNT, 10)
+ .put(IndicesQueryCache.INDICES_CACHE_QUERY_COUNT_SETTING.getKey(), 10)
.build();
IndicesQueryCache cache = new IndicesQueryCache(settings);
s1.setQueryCache(cache);
diff --git a/core/src/test/java/org/elasticsearch/indices/cache/query/IndicesRequestCacheIT.java b/core/src/test/java/org/elasticsearch/indices/cache/query/IndicesRequestCacheIT.java
index 46402c6054..f09e033648 100644
--- a/core/src/test/java/org/elasticsearch/indices/cache/query/IndicesRequestCacheIT.java
+++ b/core/src/test/java/org/elasticsearch/indices/cache/query/IndicesRequestCacheIT.java
@@ -54,7 +54,7 @@ public class IndicesRequestCacheIT extends ESIntegTestCase {
assertSearchResponse(r1);
// The cached is actually used
- assertThat(client().admin().indices().prepareStats("index").setRequestCache(true).get().getTotal().getRequestCache().getMemorySizeInBytes(), greaterThan(0l));
+ assertThat(client().admin().indices().prepareStats("index").setRequestCache(true).get().getTotal().getRequestCache().getMemorySizeInBytes(), greaterThan(0L));
for (int i = 0; i < 10; ++i) {
final SearchResponse r2 = client().prepareSearch("index").setSize(0).setSearchType(SearchType.QUERY_THEN_FETCH)
diff --git a/core/src/test/java/org/elasticsearch/indices/flush/FlushIT.java b/core/src/test/java/org/elasticsearch/indices/flush/FlushIT.java
index c8a80f6621..d74e490fdb 100644
--- a/core/src/test/java/org/elasticsearch/indices/flush/FlushIT.java
+++ b/core/src/test/java/org/elasticsearch/indices/flush/FlushIT.java
@@ -30,11 +30,11 @@ import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationComman
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
+import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.test.ESIntegTestCase;
-import org.elasticsearch.test.junit.annotations.TestLogging;
import java.io.IOException;
import java.util.Arrays;
@@ -86,12 +86,13 @@ public class FlushIT extends ESIntegTestCase {
}
}
- @TestLogging("indices:TRACE")
public void testSyncedFlush() throws ExecutionException, InterruptedException, IOException {
internalCluster().ensureAtLeastNumDataNodes(2);
prepareCreate("test").setSettings(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1).get();
ensureGreen();
+ final Index index = client().admin().cluster().prepareState().get().getState().metaData().index("test").getIndex();
+
IndexStats indexStats = client().admin().indices().prepareStats("test").get().getIndex("test");
for (ShardStats shardStats : indexStats.getShards()) {
assertNull(shardStats.getCommitStats().getUserData().get(Engine.SYNC_COMMIT_ID));
@@ -100,7 +101,7 @@ public class FlushIT extends ESIntegTestCase {
ShardsSyncedFlushResult result;
if (randomBoolean()) {
logger.info("--> sync flushing shard 0");
- result = SyncedFlushUtil.attemptSyncedFlush(internalCluster(), new ShardId("test", 0));
+ result = SyncedFlushUtil.attemptSyncedFlush(internalCluster(), new ShardId(index, 0));
} else {
logger.info("--> sync flushing index [test]");
SyncedFlushResponse indicesResult = client().admin().indices().prepareSyncedFlush("test").get();
@@ -123,7 +124,7 @@ public class FlushIT extends ESIntegTestCase {
ShardRouting shardRouting = clusterState.getRoutingTable().index("test").shard(0).iterator().next();
String currentNodeName = clusterState.nodes().resolveNode(shardRouting.currentNodeId()).name();
assertFalse(currentNodeName.equals(newNodeName));
- internalCluster().client().admin().cluster().prepareReroute().add(new MoveAllocationCommand(new ShardId("test", 0), currentNodeName, newNodeName)).get();
+ internalCluster().client().admin().cluster().prepareReroute().add(new MoveAllocationCommand("test", 0, currentNodeName, newNodeName)).get();
client().admin().cluster().prepareHealth()
.setWaitForRelocatingShards(0)
diff --git a/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTests.java b/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTests.java
index e4c9cb8a7e..c30a5adaac 100644
--- a/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTests.java
+++ b/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTests.java
@@ -133,7 +133,7 @@ public class SyncedFlushSingleNodeTests extends ESSingleNodeTestCase {
SyncedFlushService flushService = getInstanceFromNode(SyncedFlushService.class);
SyncedFlushUtil.LatchedListener listener = new SyncedFlushUtil.LatchedListener();
- flushService.attemptSyncedFlush(new ShardId("test", 1), listener);
+ flushService.attemptSyncedFlush(new ShardId("test", "_na_", 1), listener);
listener.latch.await();
assertNotNull(listener.error);
assertNull(listener.result);
@@ -151,7 +151,7 @@ public class SyncedFlushSingleNodeTests extends ESSingleNodeTestCase {
assertEquals("closed", listener.error.getMessage());
listener = new SyncedFlushUtil.LatchedListener();
- flushService.attemptSyncedFlush(new ShardId("index not found", 0), listener);
+ flushService.attemptSyncedFlush(new ShardId("index not found", "_na_", 0), listener);
listener.latch.await();
assertNotNull(listener.error);
assertNull(listener.result);
diff --git a/core/src/test/java/org/elasticsearch/indices/mapping/ConcurrentDynamicTemplateIT.java b/core/src/test/java/org/elasticsearch/indices/mapping/ConcurrentDynamicTemplateIT.java
index 0946d51a45..feb9863ec1 100644
--- a/core/src/test/java/org/elasticsearch/indices/mapping/ConcurrentDynamicTemplateIT.java
+++ b/core/src/test/java/org/elasticsearch/indices/mapping/ConcurrentDynamicTemplateIT.java
@@ -44,7 +44,7 @@ public class ConcurrentDynamicTemplateIT extends ESIntegTestCase {
final String fieldName = "field";
final String mapping = "{ \"" + mappingType + "\": {" +
"\"dynamic_templates\": ["
- + "{ \"" + fieldName + "\": {" + "\"path_match\": \"*\"," + "\"mapping\": {" + "\"type\": \"string\"," + "\"store\": \"yes\","
+ + "{ \"" + fieldName + "\": {" + "\"path_match\": \"*\"," + "\"mapping\": {" + "\"type\": \"string\"," + "\"store\": true,"
+ "\"index\": \"analyzed\", \"analyzer\": \"whitespace\" } } } ] } }";
// The 'fieldNames' array is used to help with retrieval of index terms
// after testing
diff --git a/core/src/test/java/org/elasticsearch/indices/mapping/DedicatedMasterGetFieldMappingIT.java b/core/src/test/java/org/elasticsearch/indices/mapping/DedicatedMasterGetFieldMappingIT.java
index 62745a6424..1e51133eeb 100644
--- a/core/src/test/java/org/elasticsearch/indices/mapping/DedicatedMasterGetFieldMappingIT.java
+++ b/core/src/test/java/org/elasticsearch/indices/mapping/DedicatedMasterGetFieldMappingIT.java
@@ -20,6 +20,7 @@
package org.elasticsearch.indices.mapping;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.node.Node;
import org.junit.Before;
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
@@ -32,7 +33,7 @@ public class DedicatedMasterGetFieldMappingIT extends SimpleGetFieldMappingsIT {
@Before
public void before1() throws Exception {
Settings settings = settingsBuilder()
- .put("node.data", false)
+ .put(Node.NODE_DATA_SETTING.getKey(), false)
.build();
internalCluster().startNodesAsync(settings, Settings.EMPTY).get();
}
diff --git a/core/src/test/java/org/elasticsearch/indices/mapping/SimpleGetFieldMappingsIT.java b/core/src/test/java/org/elasticsearch/indices/mapping/SimpleGetFieldMappingsIT.java
index b96e9bff80..a993130a91 100644
--- a/core/src/test/java/org/elasticsearch/indices/mapping/SimpleGetFieldMappingsIT.java
+++ b/core/src/test/java/org/elasticsearch/indices/mapping/SimpleGetFieldMappingsIT.java
@@ -143,7 +143,7 @@ public class SimpleGetFieldMappingsIT extends ESIntegTestCase {
GetFieldMappingsResponse response = client().admin().indices().prepareGetFieldMappings().setFields("num", "field1", "obj.subfield").includeDefaults(true).get();
- assertThat((Map<String, Object>) response.fieldMappings("test", "type", "num").sourceAsMap().get("num"), hasEntry("index", (Object) "not_analyzed"));
+ assertThat((Map<String, Object>) response.fieldMappings("test", "type", "num").sourceAsMap().get("num"), hasEntry("index", Boolean.TRUE));
assertThat((Map<String, Object>) response.fieldMappings("test", "type", "num").sourceAsMap().get("num"), hasEntry("type", (Object) "long"));
assertThat((Map<String, Object>) response.fieldMappings("test", "type", "field1").sourceAsMap().get("field1"), hasEntry("index", (Object) "analyzed"));
assertThat((Map<String, Object>) response.fieldMappings("test", "type", "field1").sourceAsMap().get("field1"), hasEntry("type", (Object) "string"));
diff --git a/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java b/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java
index 1af04e295d..6cdd4cf348 100644
--- a/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java
+++ b/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java
@@ -64,11 +64,11 @@ public class CircuitBreakerServiceIT extends ESIntegTestCase {
logger.info("--> resetting breaker settings");
Settings resetSettings = settingsBuilder()
.put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(),
- HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING.getDefault(null))
+ HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING.getDefaultRaw(null))
.put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING.getKey(),
- HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING.getDefault(null))
+ HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING.getDefaultRaw(null))
.put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(),
- HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getDefault(null))
+ HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getDefaultRaw(null))
.put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_OVERHEAD_SETTING.getKey(), 1.0)
.build();
assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(resetSettings));
diff --git a/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java b/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java
index dacf23758e..12acea4f9a 100644
--- a/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java
+++ b/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java
@@ -33,7 +33,6 @@ import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand;
-import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
@@ -50,7 +49,6 @@ import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
import org.elasticsearch.test.ESIntegTestCase.Scope;
import org.elasticsearch.test.InternalTestCluster;
-import org.elasticsearch.test.junit.annotations.TestLogging;
import org.elasticsearch.test.store.MockFSDirectoryService;
import org.elasticsearch.test.store.MockFSIndexStore;
import org.elasticsearch.test.transport.MockTransportService;
@@ -243,7 +241,6 @@ public class IndexRecoveryIT extends ESIntegTestCase {
validateIndexRecoveryState(nodeBRecoveryState.getIndex());
}
- @TestLogging("indices.recovery:TRACE")
public void testRerouteRecovery() throws Exception {
logger.info("--> start node A");
final String nodeA = internalCluster().startNode();
@@ -261,7 +258,7 @@ public class IndexRecoveryIT extends ESIntegTestCase {
logger.info("--> move shard from: {} to: {}", nodeA, nodeB);
client().admin().cluster().prepareReroute()
- .add(new MoveAllocationCommand(new ShardId(INDEX_NAME, 0), nodeA, nodeB))
+ .add(new MoveAllocationCommand(INDEX_NAME, 0, nodeA, nodeB))
.execute().actionGet().getState();
logger.info("--> waiting for recovery to start both on source and target");
@@ -352,10 +349,10 @@ public class IndexRecoveryIT extends ESIntegTestCase {
assertThat(recoveryStats.currentAsSource(), equalTo(0));
assertThat(recoveryStats.currentAsTarget(), equalTo(0));
if (nodeStats.getNode().name().equals(nodeA)) {
- assertThat("node A throttling should be >0", recoveryStats.throttleTime().millis(), greaterThan(0l));
+ assertThat("node A throttling should be >0", recoveryStats.throttleTime().millis(), greaterThan(0L));
}
if (nodeStats.getNode().name().equals(nodeB)) {
- assertThat("node B throttling should be >0 ", recoveryStats.throttleTime().millis(), greaterThan(0l));
+ assertThat("node B throttling should be >0 ", recoveryStats.throttleTime().millis(), greaterThan(0L));
}
}
@@ -371,10 +368,10 @@ public class IndexRecoveryIT extends ESIntegTestCase {
assertThat(recoveryStats.currentAsSource(), equalTo(0));
assertThat(recoveryStats.currentAsTarget(), equalTo(0));
if (nodeStats.getNode().name().equals(nodeA)) {
- assertThat("node A throttling should be >0", recoveryStats.throttleTime().millis(), greaterThan(0l));
+ assertThat("node A throttling should be >0", recoveryStats.throttleTime().millis(), greaterThan(0L));
}
if (nodeStats.getNode().name().equals(nodeB)) {
- assertThat("node B throttling should be >0 ", recoveryStats.throttleTime().millis(), greaterThan(0l));
+ assertThat("node B throttling should be >0 ", recoveryStats.throttleTime().millis(), greaterThan(0L));
}
}
@@ -387,7 +384,7 @@ public class IndexRecoveryIT extends ESIntegTestCase {
logger.info("--> move replica shard from: {} to: {}", nodeA, nodeC);
client().admin().cluster().prepareReroute()
- .add(new MoveAllocationCommand(new ShardId(INDEX_NAME, 0), nodeA, nodeC))
+ .add(new MoveAllocationCommand(INDEX_NAME, 0, nodeA, nodeC))
.execute().actionGet().getState();
response = client().admin().indices().prepareRecoveries(INDEX_NAME).execute().actionGet();
diff --git a/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java b/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java
index a64b8606ae..c8cad5be29 100644
--- a/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java
+++ b/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java
@@ -58,7 +58,7 @@ import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
public class RecoverySourceHandlerTests extends ESTestCase {
- private static final IndexSettings INDEX_SETTINGS = IndexSettingsModule.newIndexSettings(new Index("index"), Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, org.elasticsearch.Version.CURRENT).build());
+ private static final IndexSettings INDEX_SETTINGS = IndexSettingsModule.newIndexSettings("index", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, org.elasticsearch.Version.CURRENT).build());
private final ShardId shardId = new ShardId(INDEX_SETTINGS.getIndex(), 1);
private final ClusterSettings service = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
diff --git a/core/src/test/java/org/elasticsearch/indices/recovery/RecoveryStateTests.java b/core/src/test/java/org/elasticsearch/indices/recovery/RecoveryStateTests.java
index 4c1a6420bf..f81d979218 100644
--- a/core/src/test/java/org/elasticsearch/indices/recovery/RecoveryStateTests.java
+++ b/core/src/test/java/org/elasticsearch/indices/recovery/RecoveryStateTests.java
@@ -148,8 +148,8 @@ public class RecoveryStateTests extends ESTestCase {
}
timer.start();
- assertThat(timer.startTime(), greaterThan(0l));
- assertThat(timer.stopTime(), equalTo(0l));
+ assertThat(timer.startTime(), greaterThan(0L));
+ assertThat(timer.stopTime(), equalTo(0L));
Timer lastRead = streamer.serializeDeserialize();
final long time = lastRead.time();
assertThat(time, lessThanOrEqualTo(timer.time()));
@@ -164,7 +164,7 @@ public class RecoveryStateTests extends ESTestCase {
if (randomBoolean()) {
timer.stop();
assertThat(timer.stopTime(), greaterThanOrEqualTo(timer.startTime()));
- assertThat(timer.time(), greaterThan(0l));
+ assertThat(timer.time(), greaterThan(0L));
lastRead = streamer.serializeDeserialize();
assertThat(lastRead.startTime(), equalTo(timer.startTime()));
assertThat(lastRead.time(), equalTo(timer.time()));
@@ -172,13 +172,13 @@ public class RecoveryStateTests extends ESTestCase {
}
timer.reset();
- assertThat(timer.startTime(), equalTo(0l));
- assertThat(timer.time(), equalTo(0l));
- assertThat(timer.stopTime(), equalTo(0l));
+ assertThat(timer.startTime(), equalTo(0L));
+ assertThat(timer.time(), equalTo(0L));
+ assertThat(timer.stopTime(), equalTo(0L));
lastRead = streamer.serializeDeserialize();
- assertThat(lastRead.startTime(), equalTo(0l));
- assertThat(lastRead.time(), equalTo(0l));
- assertThat(lastRead.stopTime(), equalTo(0l));
+ assertThat(lastRead.startTime(), equalTo(0L));
+ assertThat(lastRead.time(), equalTo(0L));
+ assertThat(lastRead.stopTime(), equalTo(0L));
}
@@ -242,7 +242,7 @@ public class RecoveryStateTests extends ESTestCase {
assertThat(index.reusedFileCount(), equalTo(totalReused));
assertThat(index.totalRecoverFiles(), equalTo(filesToRecover.size()));
assertThat(index.recoveredFileCount(), equalTo(0));
- assertThat(index.recoveredBytes(), equalTo(0l));
+ assertThat(index.recoveredBytes(), equalTo(0L));
assertThat(index.recoveredFilesPercent(), equalTo(filesToRecover.size() == 0 ? 100.0f : 0.0f));
assertThat(index.recoveredBytesPercent(), equalTo(filesToRecover.size() == 0 ? 100.0f : 0.0f));
@@ -296,7 +296,7 @@ public class RecoveryStateTests extends ESTestCase {
if (completeRecovery) {
assertThat(filesToRecover.size(), equalTo(0));
index.stop();
- assertThat(index.time(), greaterThanOrEqualTo(0l));
+ assertThat(index.time(), greaterThanOrEqualTo(0L));
}
logger.info("testing serialized information");
@@ -346,7 +346,7 @@ public class RecoveryStateTests extends ESTestCase {
stages[i] = stages[j];
stages[j] = t;
try {
- RecoveryState state = new RecoveryState(new ShardId("bla", 0), randomBoolean(), randomFrom(Type.values()), discoveryNode, discoveryNode);
+ RecoveryState state = new RecoveryState(new ShardId("bla", "_na_", 0), randomBoolean(), randomFrom(Type.values()), discoveryNode, discoveryNode);
for (Stage stage : stages) {
state.setStage(stage);
}
@@ -360,7 +360,7 @@ public class RecoveryStateTests extends ESTestCase {
i = randomIntBetween(1, stages.length - 1);
ArrayList<Stage> list = new ArrayList<>(Arrays.asList(Arrays.copyOfRange(stages, 0, i)));
list.addAll(Arrays.asList(stages));
- RecoveryState state = new RecoveryState(new ShardId("bla", 0), randomBoolean(), randomFrom(Type.values()), discoveryNode, discoveryNode);
+ RecoveryState state = new RecoveryState(new ShardId("bla", "_na_", 0), randomBoolean(), randomFrom(Type.values()), discoveryNode, discoveryNode);
for (Stage stage : list) {
state.setStage(stage);
}
@@ -457,15 +457,15 @@ public class RecoveryStateTests extends ESTestCase {
// we don't need to test the time aspect, it's done in the timer test
verifyIndex.start();
- assertThat(verifyIndex.checkIndexTime(), equalTo(0l));
+ assertThat(verifyIndex.checkIndexTime(), equalTo(0L));
// force one
VerifyIndex lastRead = streamer.serializeDeserialize();
- assertThat(lastRead.checkIndexTime(), equalTo(0l));
+ assertThat(lastRead.checkIndexTime(), equalTo(0L));
long took = randomLong();
if (took < 0) {
took = -took;
- took = Math.max(0l, took);
+ took = Math.max(0L, took);
}
verifyIndex.checkIndexTime(took);
diff --git a/core/src/test/java/org/elasticsearch/indices/recovery/StartRecoveryRequestTests.java b/core/src/test/java/org/elasticsearch/indices/recovery/StartRecoveryRequestTests.java
index 4ad298e39a..c7a7852e42 100644
--- a/core/src/test/java/org/elasticsearch/indices/recovery/StartRecoveryRequestTests.java
+++ b/core/src/test/java/org/elasticsearch/indices/recovery/StartRecoveryRequestTests.java
@@ -40,13 +40,13 @@ public class StartRecoveryRequestTests extends ESTestCase {
public void testSerialization() throws Exception {
Version targetNodeVersion = randomVersion(random());
StartRecoveryRequest outRequest = new StartRecoveryRequest(
- new ShardId("test", 0),
+ new ShardId("test", "_na_", 0),
new DiscoveryNode("a", new LocalTransportAddress("1"), targetNodeVersion),
new DiscoveryNode("b", new LocalTransportAddress("1"), targetNodeVersion),
true,
Store.MetadataSnapshot.EMPTY,
RecoveryState.Type.RELOCATION,
- 1l
+ 1L
);
ByteArrayOutputStream outBuffer = new ByteArrayOutputStream();
diff --git a/core/src/test/java/org/elasticsearch/indices/settings/UpdateNumberOfReplicasIT.java b/core/src/test/java/org/elasticsearch/indices/settings/UpdateNumberOfReplicasIT.java
index dc48b96c23..b257e3bcd5 100644
--- a/core/src/test/java/org/elasticsearch/indices/settings/UpdateNumberOfReplicasIT.java
+++ b/core/src/test/java/org/elasticsearch/indices/settings/UpdateNumberOfReplicasIT.java
@@ -68,7 +68,7 @@ public class UpdateNumberOfReplicasIT extends ESIntegTestCase {
for (int i = 0; i < 10; i++) {
SearchResponse countResponse = client().prepareSearch().setSize(0).setQuery(matchAllQuery()).get();
- assertHitCount(countResponse, 10l);
+ assertHitCount(countResponse, 10L);
}
logger.info("Increasing the number of replicas from 1 to 2");
@@ -98,7 +98,7 @@ public class UpdateNumberOfReplicasIT extends ESIntegTestCase {
for (int i = 0; i < 10; i++) {
SearchResponse countResponse = client().prepareSearch().setSize(0).setQuery(matchAllQuery()).get();
- assertHitCount(countResponse, 10l);
+ assertHitCount(countResponse, 10L);
}
logger.info("Decreasing number of replicas from 2 to 0");
diff --git a/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java b/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java
index 67fc5acd09..78d5e2203f 100644
--- a/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java
+++ b/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java
@@ -205,7 +205,7 @@ public class UpdateSettingsIT extends ESIntegTestCase {
// No merge IO throttling should have happened:
NodesStatsResponse nodesStats = client().admin().cluster().prepareNodesStats().setIndices(true).get();
for(NodeStats stats : nodesStats.getNodes()) {
- assertThat(stats.getIndices().getStore().getThrottleTime().getMillis(), equalTo(0l));
+ assertThat(stats.getIndices().getStore().getThrottleTime().getMillis(), equalTo(0L));
}
logger.info("test: set low merge throttling");
diff --git a/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java b/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java
index 4bf752886c..8a9fa19185 100644
--- a/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java
+++ b/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java
@@ -74,6 +74,7 @@ import static org.hamcrest.Matchers.instanceOf;
*/
@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, numClientNodes = 0, transportClientRatio = 0)
@ESIntegTestCase.SuppressLocalMode
+@TestLogging("_root:DEBUG")
public class RareClusterStateIT extends ESIntegTestCase {
@Override
protected int numberOfShards() {
@@ -103,7 +104,6 @@ public class RareClusterStateIT extends ESIntegTestCase {
allocator.allocateUnassigned(routingAllocation);
}
- @TestLogging("gateway:TRACE")
public void testAssignmentWithJustAddedNodes() throws Exception {
internalCluster().startNode();
final String index = "index";
@@ -167,11 +167,10 @@ public class RareClusterStateIT extends ESIntegTestCase {
});
}
- @TestLogging("cluster.service:TRACE")
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/14932")
public void testDeleteCreateInOneBulk() throws Exception {
internalCluster().startNodesAsync(2, Settings.builder()
- .put(DiscoveryModule.DISCOVERY_TYPE_KEY, "zen")
+ .put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), "zen")
.build()).get();
assertFalse(client().admin().cluster().prepareHealth().setWaitForNodes("2").get().isTimedOut());
prepareCreate("test").setSettings(IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, true).addMapping("type").get();
diff --git a/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java b/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java
index 18c03e3739..38e81f7eba 100644
--- a/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java
+++ b/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java
@@ -78,7 +78,7 @@ public class IndexStatsIT extends ESIntegTestCase {
protected Settings nodeSettings(int nodeOrdinal) {
//Filter/Query cache is cleaned periodically, default is 60s, so make sure it runs often. Thread.sleep for 60s is bad
return Settings.settingsBuilder().put(super.nodeSettings(nodeOrdinal))
- .put(IndicesRequestCache.INDICES_CACHE_REQUEST_CLEAN_INTERVAL, "1ms")
+ .put(IndicesRequestCache.INDICES_CACHE_REQUEST_CLEAN_INTERVAL.getKey(), "1ms")
.put(IndexModule.INDEX_QUERY_CACHE_EVERYTHING_SETTING.getKey(), true)
.put(IndexModule.INDEX_QUERY_CACHE_TYPE_SETTING.getKey(), IndexModule.INDEX_QUERY_CACHE)
.build();
@@ -92,18 +92,18 @@ public class IndexStatsIT extends ESIntegTestCase {
client().admin().indices().prepareRefresh().execute().actionGet();
NodesStatsResponse nodesStats = client().admin().cluster().prepareNodesStats().setIndices(true).execute().actionGet();
- assertThat(nodesStats.getNodes()[0].getIndices().getFieldData().getMemorySizeInBytes() + nodesStats.getNodes()[1].getIndices().getFieldData().getMemorySizeInBytes(), equalTo(0l));
+ assertThat(nodesStats.getNodes()[0].getIndices().getFieldData().getMemorySizeInBytes() + nodesStats.getNodes()[1].getIndices().getFieldData().getMemorySizeInBytes(), equalTo(0L));
IndicesStatsResponse indicesStats = client().admin().indices().prepareStats("test").clear().setFieldData(true).execute().actionGet();
- assertThat(indicesStats.getTotal().getFieldData().getMemorySizeInBytes(), equalTo(0l));
+ assertThat(indicesStats.getTotal().getFieldData().getMemorySizeInBytes(), equalTo(0L));
// sort to load it to field data...
client().prepareSearch().addSort("field", SortOrder.ASC).execute().actionGet();
client().prepareSearch().addSort("field", SortOrder.ASC).execute().actionGet();
nodesStats = client().admin().cluster().prepareNodesStats().setIndices(true).execute().actionGet();
- assertThat(nodesStats.getNodes()[0].getIndices().getFieldData().getMemorySizeInBytes() + nodesStats.getNodes()[1].getIndices().getFieldData().getMemorySizeInBytes(), greaterThan(0l));
+ assertThat(nodesStats.getNodes()[0].getIndices().getFieldData().getMemorySizeInBytes() + nodesStats.getNodes()[1].getIndices().getFieldData().getMemorySizeInBytes(), greaterThan(0L));
indicesStats = client().admin().indices().prepareStats("test").clear().setFieldData(true).execute().actionGet();
- assertThat(indicesStats.getTotal().getFieldData().getMemorySizeInBytes(), greaterThan(0l));
+ assertThat(indicesStats.getTotal().getFieldData().getMemorySizeInBytes(), greaterThan(0L));
// sort to load it to field data...
client().prepareSearch().addSort("field2", SortOrder.ASC).execute().actionGet();
@@ -111,20 +111,20 @@ public class IndexStatsIT extends ESIntegTestCase {
// now check the per field stats
nodesStats = client().admin().cluster().prepareNodesStats().setIndices(new CommonStatsFlags().set(CommonStatsFlags.Flag.FieldData, true).fieldDataFields("*")).execute().actionGet();
- assertThat(nodesStats.getNodes()[0].getIndices().getFieldData().getMemorySizeInBytes() + nodesStats.getNodes()[1].getIndices().getFieldData().getMemorySizeInBytes(), greaterThan(0l));
- assertThat(nodesStats.getNodes()[0].getIndices().getFieldData().getFields().get("field") + nodesStats.getNodes()[1].getIndices().getFieldData().getFields().get("field"), greaterThan(0l));
+ assertThat(nodesStats.getNodes()[0].getIndices().getFieldData().getMemorySizeInBytes() + nodesStats.getNodes()[1].getIndices().getFieldData().getMemorySizeInBytes(), greaterThan(0L));
+ assertThat(nodesStats.getNodes()[0].getIndices().getFieldData().getFields().get("field") + nodesStats.getNodes()[1].getIndices().getFieldData().getFields().get("field"), greaterThan(0L));
assertThat(nodesStats.getNodes()[0].getIndices().getFieldData().getFields().get("field") + nodesStats.getNodes()[1].getIndices().getFieldData().getFields().get("field"), lessThan(nodesStats.getNodes()[0].getIndices().getFieldData().getMemorySizeInBytes() + nodesStats.getNodes()[1].getIndices().getFieldData().getMemorySizeInBytes()));
indicesStats = client().admin().indices().prepareStats("test").clear().setFieldData(true).setFieldDataFields("*").execute().actionGet();
- assertThat(indicesStats.getTotal().getFieldData().getMemorySizeInBytes(), greaterThan(0l));
- assertThat(indicesStats.getTotal().getFieldData().getFields().get("field"), greaterThan(0l));
+ assertThat(indicesStats.getTotal().getFieldData().getMemorySizeInBytes(), greaterThan(0L));
+ assertThat(indicesStats.getTotal().getFieldData().getFields().get("field"), greaterThan(0L));
assertThat(indicesStats.getTotal().getFieldData().getFields().get("field"), lessThan(indicesStats.getTotal().getFieldData().getMemorySizeInBytes()));
client().admin().indices().prepareClearCache().setFieldDataCache(true).execute().actionGet();
nodesStats = client().admin().cluster().prepareNodesStats().setIndices(true).execute().actionGet();
- assertThat(nodesStats.getNodes()[0].getIndices().getFieldData().getMemorySizeInBytes() + nodesStats.getNodes()[1].getIndices().getFieldData().getMemorySizeInBytes(), equalTo(0l));
+ assertThat(nodesStats.getNodes()[0].getIndices().getFieldData().getMemorySizeInBytes() + nodesStats.getNodes()[1].getIndices().getFieldData().getMemorySizeInBytes(), equalTo(0L));
indicesStats = client().admin().indices().prepareStats("test").clear().setFieldData(true).execute().actionGet();
- assertThat(indicesStats.getTotal().getFieldData().getMemorySizeInBytes(), equalTo(0l));
+ assertThat(indicesStats.getTotal().getFieldData().getMemorySizeInBytes(), equalTo(0L));
}
@@ -140,14 +140,14 @@ public class IndexStatsIT extends ESIntegTestCase {
NodesStatsResponse nodesStats = client().admin().cluster().prepareNodesStats().setIndices(true)
.execute().actionGet();
- assertThat(nodesStats.getNodes()[0].getIndices().getFieldData().getMemorySizeInBytes() + nodesStats.getNodes()[1].getIndices().getFieldData().getMemorySizeInBytes(), equalTo(0l));
- assertThat(nodesStats.getNodes()[0].getIndices().getQueryCache().getMemorySizeInBytes() + nodesStats.getNodes()[1].getIndices().getQueryCache().getMemorySizeInBytes(), equalTo(0l));
+ assertThat(nodesStats.getNodes()[0].getIndices().getFieldData().getMemorySizeInBytes() + nodesStats.getNodes()[1].getIndices().getFieldData().getMemorySizeInBytes(), equalTo(0L));
+ assertThat(nodesStats.getNodes()[0].getIndices().getQueryCache().getMemorySizeInBytes() + nodesStats.getNodes()[1].getIndices().getQueryCache().getMemorySizeInBytes(), equalTo(0L));
IndicesStatsResponse indicesStats = client().admin().indices().prepareStats("test")
.clear().setFieldData(true).setQueryCache(true)
.execute().actionGet();
- assertThat(indicesStats.getTotal().getFieldData().getMemorySizeInBytes(), equalTo(0l));
- assertThat(indicesStats.getTotal().getQueryCache().getMemorySizeInBytes(), equalTo(0l));
+ assertThat(indicesStats.getTotal().getFieldData().getMemorySizeInBytes(), equalTo(0L));
+ assertThat(indicesStats.getTotal().getQueryCache().getMemorySizeInBytes(), equalTo(0L));
// sort to load it to field data and filter to load filter cache
client().prepareSearch()
@@ -161,27 +161,27 @@ public class IndexStatsIT extends ESIntegTestCase {
nodesStats = client().admin().cluster().prepareNodesStats().setIndices(true)
.execute().actionGet();
- assertThat(nodesStats.getNodes()[0].getIndices().getFieldData().getMemorySizeInBytes() + nodesStats.getNodes()[1].getIndices().getFieldData().getMemorySizeInBytes(), greaterThan(0l));
- assertThat(nodesStats.getNodes()[0].getIndices().getQueryCache().getMemorySizeInBytes() + nodesStats.getNodes()[1].getIndices().getQueryCache().getMemorySizeInBytes(), greaterThan(0l));
+ assertThat(nodesStats.getNodes()[0].getIndices().getFieldData().getMemorySizeInBytes() + nodesStats.getNodes()[1].getIndices().getFieldData().getMemorySizeInBytes(), greaterThan(0L));
+ assertThat(nodesStats.getNodes()[0].getIndices().getQueryCache().getMemorySizeInBytes() + nodesStats.getNodes()[1].getIndices().getQueryCache().getMemorySizeInBytes(), greaterThan(0L));
indicesStats = client().admin().indices().prepareStats("test")
.clear().setFieldData(true).setQueryCache(true)
.execute().actionGet();
- assertThat(indicesStats.getTotal().getFieldData().getMemorySizeInBytes(), greaterThan(0l));
- assertThat(indicesStats.getTotal().getQueryCache().getMemorySizeInBytes(), greaterThan(0l));
+ assertThat(indicesStats.getTotal().getFieldData().getMemorySizeInBytes(), greaterThan(0L));
+ assertThat(indicesStats.getTotal().getQueryCache().getMemorySizeInBytes(), greaterThan(0L));
client().admin().indices().prepareClearCache().execute().actionGet();
Thread.sleep(100); // Make sure the filter cache entries have been removed...
nodesStats = client().admin().cluster().prepareNodesStats().setIndices(true)
.execute().actionGet();
- assertThat(nodesStats.getNodes()[0].getIndices().getFieldData().getMemorySizeInBytes() + nodesStats.getNodes()[1].getIndices().getFieldData().getMemorySizeInBytes(), equalTo(0l));
- assertThat(nodesStats.getNodes()[0].getIndices().getQueryCache().getMemorySizeInBytes() + nodesStats.getNodes()[1].getIndices().getQueryCache().getMemorySizeInBytes(), equalTo(0l));
+ assertThat(nodesStats.getNodes()[0].getIndices().getFieldData().getMemorySizeInBytes() + nodesStats.getNodes()[1].getIndices().getFieldData().getMemorySizeInBytes(), equalTo(0L));
+ assertThat(nodesStats.getNodes()[0].getIndices().getQueryCache().getMemorySizeInBytes() + nodesStats.getNodes()[1].getIndices().getQueryCache().getMemorySizeInBytes(), equalTo(0L));
indicesStats = client().admin().indices().prepareStats("test")
.clear().setFieldData(true).setQueryCache(true)
.execute().actionGet();
- assertThat(indicesStats.getTotal().getFieldData().getMemorySizeInBytes(), equalTo(0l));
- assertThat(indicesStats.getTotal().getQueryCache().getMemorySizeInBytes(), equalTo(0l));
+ assertThat(indicesStats.getTotal().getFieldData().getMemorySizeInBytes(), equalTo(0L));
+ assertThat(indicesStats.getTotal().getQueryCache().getMemorySizeInBytes(), equalTo(0L));
}
public void testQueryCache() throws Exception {
@@ -218,15 +218,15 @@ public class IndexStatsIT extends ESIntegTestCase {
}
}
- assertThat(client().admin().indices().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getMemorySizeInBytes(), equalTo(0l));
- assertThat(client().admin().indices().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), equalTo(0l));
- assertThat(client().admin().indices().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getMissCount(), equalTo(0l));
+ assertThat(client().admin().indices().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getMemorySizeInBytes(), equalTo(0L));
+ assertThat(client().admin().indices().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), equalTo(0L));
+ assertThat(client().admin().indices().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getMissCount(), equalTo(0L));
for (int i = 0; i < 10; i++) {
assertThat(client().prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0).get().getHits().getTotalHits(), equalTo((long) numDocs));
- assertThat(client().admin().indices().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getMemorySizeInBytes(), greaterThan(0l));
+ assertThat(client().admin().indices().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getMemorySizeInBytes(), greaterThan(0L));
}
- assertThat(client().admin().indices().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), greaterThan(0l));
- assertThat(client().admin().indices().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getMissCount(), greaterThan(0l));
+ assertThat(client().admin().indices().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), greaterThan(0L));
+ assertThat(client().admin().indices().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getMissCount(), greaterThan(0L));
// index the data again...
IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs];
@@ -242,25 +242,25 @@ public class IndexStatsIT extends ESIntegTestCase {
assertBusy(new Runnable() {
@Override
public void run() {
- assertThat(client().admin().indices().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getMemorySizeInBytes(), equalTo(0l));
+ assertThat(client().admin().indices().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getMemorySizeInBytes(), equalTo(0L));
}
});
for (int i = 0; i < 10; i++) {
assertThat(client().prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0).get().getHits().getTotalHits(), equalTo((long) numDocs));
- assertThat(client().admin().indices().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getMemorySizeInBytes(), greaterThan(0l));
+ assertThat(client().admin().indices().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getMemorySizeInBytes(), greaterThan(0L));
}
client().admin().indices().prepareClearCache().setRequestCache(true).get(); // clean the cache
- assertThat(client().admin().indices().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getMemorySizeInBytes(), equalTo(0l));
+ assertThat(client().admin().indices().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getMemorySizeInBytes(), equalTo(0L));
// test explicit request parameter
assertThat(client().prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0).setRequestCache(false).get().getHits().getTotalHits(), equalTo((long) numDocs));
- assertThat(client().admin().indices().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getMemorySizeInBytes(), equalTo(0l));
+ assertThat(client().admin().indices().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getMemorySizeInBytes(), equalTo(0L));
assertThat(client().prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0).setRequestCache(true).get().getHits().getTotalHits(), equalTo((long) numDocs));
- assertThat(client().admin().indices().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getMemorySizeInBytes(), greaterThan(0l));
+ assertThat(client().admin().indices().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getMemorySizeInBytes(), greaterThan(0L));
// set the index level setting to false, and see that the reverse works
@@ -268,10 +268,10 @@ public class IndexStatsIT extends ESIntegTestCase {
assertAcked(client().admin().indices().prepareUpdateSettings("idx").setSettings(Settings.builder().put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), false)));
assertThat(client().prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0).get().getHits().getTotalHits(), equalTo((long) numDocs));
- assertThat(client().admin().indices().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getMemorySizeInBytes(), equalTo(0l));
+ assertThat(client().admin().indices().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getMemorySizeInBytes(), equalTo(0L));
assertThat(client().prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0).setRequestCache(true).get().getHits().getTotalHits(), equalTo((long) numDocs));
- assertThat(client().admin().indices().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getMemorySizeInBytes(), greaterThan(0l));
+ assertThat(client().admin().indices().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getMemorySizeInBytes(), greaterThan(0L));
}
public void testNonThrottleStats() throws Exception {
@@ -303,7 +303,7 @@ public class IndexStatsIT extends ESIntegTestCase {
//nodesStats = client().admin().cluster().prepareNodesStats().setIndices(true).get();
stats = client().admin().indices().prepareStats().execute().actionGet();
- assertThat(stats.getPrimaries().getIndexing().getTotal().getThrottleTime().millis(), equalTo(0l));
+ assertThat(stats.getPrimaries().getIndexing().getTotal().getThrottleTime().millis(), equalTo(0L));
}
public void testThrottleStats() throws Exception {
@@ -370,33 +370,33 @@ public class IndexStatsIT extends ESIntegTestCase {
long totalExpectedWrites = test1ExpectedWrites + test2ExpectedWrites;
IndicesStatsResponse stats = client().admin().indices().prepareStats().execute().actionGet();
- assertThat(stats.getPrimaries().getDocs().getCount(), equalTo(3l));
+ assertThat(stats.getPrimaries().getDocs().getCount(), equalTo(3L));
assertThat(stats.getTotal().getDocs().getCount(), equalTo(totalExpectedWrites));
- assertThat(stats.getPrimaries().getIndexing().getTotal().getIndexCount(), equalTo(3l));
- assertThat(stats.getPrimaries().getIndexing().getTotal().getIndexFailedCount(), equalTo(0l));
+ assertThat(stats.getPrimaries().getIndexing().getTotal().getIndexCount(), equalTo(3L));
+ assertThat(stats.getPrimaries().getIndexing().getTotal().getIndexFailedCount(), equalTo(0L));
assertThat(stats.getPrimaries().getIndexing().getTotal().isThrottled(), equalTo(false));
- assertThat(stats.getPrimaries().getIndexing().getTotal().getThrottleTime().millis(), equalTo(0l));
+ assertThat(stats.getPrimaries().getIndexing().getTotal().getThrottleTime().millis(), equalTo(0L));
assertThat(stats.getTotal().getIndexing().getTotal().getIndexCount(), equalTo(totalExpectedWrites));
assertThat(stats.getTotal().getStore(), notNullValue());
assertThat(stats.getTotal().getMerge(), notNullValue());
assertThat(stats.getTotal().getFlush(), notNullValue());
assertThat(stats.getTotal().getRefresh(), notNullValue());
- assertThat(stats.getIndex("test1").getPrimaries().getDocs().getCount(), equalTo(2l));
+ assertThat(stats.getIndex("test1").getPrimaries().getDocs().getCount(), equalTo(2L));
assertThat(stats.getIndex("test1").getTotal().getDocs().getCount(), equalTo(test1ExpectedWrites));
assertThat(stats.getIndex("test1").getPrimaries().getStore(), notNullValue());
assertThat(stats.getIndex("test1").getPrimaries().getMerge(), notNullValue());
assertThat(stats.getIndex("test1").getPrimaries().getFlush(), notNullValue());
assertThat(stats.getIndex("test1").getPrimaries().getRefresh(), notNullValue());
- assertThat(stats.getIndex("test2").getPrimaries().getDocs().getCount(), equalTo(1l));
+ assertThat(stats.getIndex("test2").getPrimaries().getDocs().getCount(), equalTo(1L));
assertThat(stats.getIndex("test2").getTotal().getDocs().getCount(), equalTo(test2ExpectedWrites));
// make sure that number of requests in progress is 0
- assertThat(stats.getIndex("test1").getTotal().getIndexing().getTotal().getIndexCurrent(), equalTo(0l));
- assertThat(stats.getIndex("test1").getTotal().getIndexing().getTotal().getDeleteCurrent(), equalTo(0l));
- assertThat(stats.getIndex("test1").getTotal().getSearch().getTotal().getFetchCurrent(), equalTo(0l));
- assertThat(stats.getIndex("test1").getTotal().getSearch().getTotal().getQueryCurrent(), equalTo(0l));
+ assertThat(stats.getIndex("test1").getTotal().getIndexing().getTotal().getIndexCurrent(), equalTo(0L));
+ assertThat(stats.getIndex("test1").getTotal().getIndexing().getTotal().getDeleteCurrent(), equalTo(0L));
+ assertThat(stats.getIndex("test1").getTotal().getSearch().getTotal().getFetchCurrent(), equalTo(0L));
+ assertThat(stats.getIndex("test1").getTotal().getSearch().getTotal().getQueryCurrent(), equalTo(0L));
// check flags
stats = client().admin().indices().prepareStats().clear()
@@ -414,32 +414,32 @@ public class IndexStatsIT extends ESIntegTestCase {
// check types
stats = client().admin().indices().prepareStats().setTypes("type1", "type").execute().actionGet();
- assertThat(stats.getPrimaries().getIndexing().getTypeStats().get("type1").getIndexCount(), equalTo(1l));
- assertThat(stats.getPrimaries().getIndexing().getTypeStats().get("type").getIndexCount(), equalTo(1l));
- assertThat(stats.getPrimaries().getIndexing().getTypeStats().get("type1").getIndexFailedCount(), equalTo(0l));
+ assertThat(stats.getPrimaries().getIndexing().getTypeStats().get("type1").getIndexCount(), equalTo(1L));
+ assertThat(stats.getPrimaries().getIndexing().getTypeStats().get("type").getIndexCount(), equalTo(1L));
+ assertThat(stats.getPrimaries().getIndexing().getTypeStats().get("type1").getIndexFailedCount(), equalTo(0L));
assertThat(stats.getPrimaries().getIndexing().getTypeStats().get("type2"), nullValue());
- assertThat(stats.getPrimaries().getIndexing().getTypeStats().get("type1").getIndexCurrent(), equalTo(0l));
- assertThat(stats.getPrimaries().getIndexing().getTypeStats().get("type1").getDeleteCurrent(), equalTo(0l));
+ assertThat(stats.getPrimaries().getIndexing().getTypeStats().get("type1").getIndexCurrent(), equalTo(0L));
+ assertThat(stats.getPrimaries().getIndexing().getTypeStats().get("type1").getDeleteCurrent(), equalTo(0L));
- assertThat(stats.getTotal().getGet().getCount(), equalTo(0l));
+ assertThat(stats.getTotal().getGet().getCount(), equalTo(0L));
// check get
GetResponse getResponse = client().prepareGet("test1", "type1", "1").execute().actionGet();
assertThat(getResponse.isExists(), equalTo(true));
stats = client().admin().indices().prepareStats().execute().actionGet();
- assertThat(stats.getTotal().getGet().getCount(), equalTo(1l));
- assertThat(stats.getTotal().getGet().getExistsCount(), equalTo(1l));
- assertThat(stats.getTotal().getGet().getMissingCount(), equalTo(0l));
+ assertThat(stats.getTotal().getGet().getCount(), equalTo(1L));
+ assertThat(stats.getTotal().getGet().getExistsCount(), equalTo(1L));
+ assertThat(stats.getTotal().getGet().getMissingCount(), equalTo(0L));
// missing get
getResponse = client().prepareGet("test1", "type1", "2").execute().actionGet();
assertThat(getResponse.isExists(), equalTo(false));
stats = client().admin().indices().prepareStats().execute().actionGet();
- assertThat(stats.getTotal().getGet().getCount(), equalTo(2l));
- assertThat(stats.getTotal().getGet().getExistsCount(), equalTo(1l));
- assertThat(stats.getTotal().getGet().getMissingCount(), equalTo(1l));
+ assertThat(stats.getTotal().getGet().getCount(), equalTo(2L));
+ assertThat(stats.getTotal().getGet().getExistsCount(), equalTo(1L));
+ assertThat(stats.getTotal().getGet().getMissingCount(), equalTo(1L));
// clear all
stats = client().admin().indices().prepareStats()
@@ -476,8 +476,8 @@ public class IndexStatsIT extends ESIntegTestCase {
} catch (VersionConflictEngineException e) {}
stats = client().admin().indices().prepareStats().setTypes("type1", "type2").execute().actionGet();
- assertThat(stats.getIndex("test1").getTotal().getIndexing().getTotal().getIndexFailedCount(), equalTo(2l));
- assertThat(stats.getIndex("test2").getTotal().getIndexing().getTotal().getIndexFailedCount(), equalTo(1l));
+ assertThat(stats.getIndex("test1").getTotal().getIndexing().getTotal().getIndexFailedCount(), equalTo(2L));
+ assertThat(stats.getIndex("test2").getTotal().getIndexing().getTotal().getIndexFailedCount(), equalTo(1L));
assertThat(stats.getPrimaries().getIndexing().getTypeStats().get("type1").getIndexFailedCount(), equalTo(1L));
assertThat(stats.getPrimaries().getIndexing().getTypeStats().get("type2").getIndexFailedCount(), equalTo(1L));
assertThat(stats.getTotal().getIndexing().getTotal().getIndexFailedCount(), equalTo(3L));
@@ -516,7 +516,7 @@ public class IndexStatsIT extends ESIntegTestCase {
.execute().actionGet();
assertThat(stats.getTotal().getMerge(), notNullValue());
- assertThat(stats.getTotal().getMerge().getTotal(), greaterThan(0l));
+ assertThat(stats.getTotal().getMerge().getTotal(), greaterThan(0L));
}
public void testSegmentsStats() {
@@ -531,9 +531,9 @@ public class IndexStatsIT extends ESIntegTestCase {
}
IndicesStatsResponse stats = client().admin().indices().prepareStats().setSegments(true).get();
- assertThat(stats.getTotal().getSegments().getIndexWriterMemoryInBytes(), greaterThan(0l));
- assertThat(stats.getTotal().getSegments().getIndexWriterMaxMemoryInBytes(), greaterThan(0l));
- assertThat(stats.getTotal().getSegments().getVersionMapMemoryInBytes(), greaterThan(0l));
+ assertThat(stats.getTotal().getSegments().getIndexWriterMemoryInBytes(), greaterThan(0L));
+ assertThat(stats.getTotal().getSegments().getIndexWriterMaxMemoryInBytes(), greaterThan(0L));
+ assertThat(stats.getTotal().getSegments().getVersionMapMemoryInBytes(), greaterThan(0L));
client().admin().indices().prepareFlush().get();
client().admin().indices().prepareForceMerge().setMaxNumSegments(1).execute().actionGet();
@@ -542,7 +542,7 @@ public class IndexStatsIT extends ESIntegTestCase {
assertThat(stats.getTotal().getSegments(), notNullValue());
assertThat(stats.getTotal().getSegments().getCount(), equalTo((long) test1.totalNumShards));
assumeTrue("test doesn't work with 4.6.0", org.elasticsearch.Version.CURRENT.luceneVersion != Version.LUCENE_4_6_0);
- assertThat(stats.getTotal().getSegments().getMemoryInBytes(), greaterThan(0l));
+ assertThat(stats.getTotal().getSegments().getMemoryInBytes(), greaterThan(0L));
}
public void testAllFlags() throws Exception {
@@ -713,33 +713,33 @@ public class IndexStatsIT extends ESIntegTestCase {
IndicesStatsRequestBuilder builder = client().admin().indices().prepareStats();
IndicesStatsResponse stats = builder.execute().actionGet();
- assertThat(stats.getTotal().fieldData.getMemorySizeInBytes(), greaterThan(0l));
+ assertThat(stats.getTotal().fieldData.getMemorySizeInBytes(), greaterThan(0L));
assertThat(stats.getTotal().fieldData.getFields(), is(nullValue()));
stats = builder.setFieldDataFields("bar").execute().actionGet();
- assertThat(stats.getTotal().fieldData.getMemorySizeInBytes(), greaterThan(0l));
+ assertThat(stats.getTotal().fieldData.getMemorySizeInBytes(), greaterThan(0L));
assertThat(stats.getTotal().fieldData.getFields().containsKey("bar"), is(true));
- assertThat(stats.getTotal().fieldData.getFields().get("bar"), greaterThan(0l));
+ assertThat(stats.getTotal().fieldData.getFields().get("bar"), greaterThan(0L));
assertThat(stats.getTotal().fieldData.getFields().containsKey("baz"), is(false));
stats = builder.setFieldDataFields("bar", "baz").execute().actionGet();
- assertThat(stats.getTotal().fieldData.getMemorySizeInBytes(), greaterThan(0l));
+ assertThat(stats.getTotal().fieldData.getMemorySizeInBytes(), greaterThan(0L));
assertThat(stats.getTotal().fieldData.getFields().containsKey("bar"), is(true));
- assertThat(stats.getTotal().fieldData.getFields().get("bar"), greaterThan(0l));
+ assertThat(stats.getTotal().fieldData.getFields().get("bar"), greaterThan(0L));
assertThat(stats.getTotal().fieldData.getFields().containsKey("baz"), is(true));
- assertThat(stats.getTotal().fieldData.getFields().get("baz"), greaterThan(0l));
+ assertThat(stats.getTotal().fieldData.getFields().get("baz"), greaterThan(0L));
stats = builder.setFieldDataFields("*").execute().actionGet();
- assertThat(stats.getTotal().fieldData.getMemorySizeInBytes(), greaterThan(0l));
+ assertThat(stats.getTotal().fieldData.getMemorySizeInBytes(), greaterThan(0L));
assertThat(stats.getTotal().fieldData.getFields().containsKey("bar"), is(true));
- assertThat(stats.getTotal().fieldData.getFields().get("bar"), greaterThan(0l));
+ assertThat(stats.getTotal().fieldData.getFields().get("bar"), greaterThan(0L));
assertThat(stats.getTotal().fieldData.getFields().containsKey("baz"), is(true));
- assertThat(stats.getTotal().fieldData.getFields().get("baz"), greaterThan(0l));
+ assertThat(stats.getTotal().fieldData.getFields().get("baz"), greaterThan(0L));
stats = builder.setFieldDataFields("*r").execute().actionGet();
- assertThat(stats.getTotal().fieldData.getMemorySizeInBytes(), greaterThan(0l));
+ assertThat(stats.getTotal().fieldData.getMemorySizeInBytes(), greaterThan(0L));
assertThat(stats.getTotal().fieldData.getFields().containsKey("bar"), is(true));
- assertThat(stats.getTotal().fieldData.getFields().get("bar"), greaterThan(0l));
+ assertThat(stats.getTotal().fieldData.getFields().get("bar"), greaterThan(0L));
assertThat(stats.getTotal().fieldData.getFields().containsKey("baz"), is(false));
}
@@ -758,33 +758,33 @@ public class IndexStatsIT extends ESIntegTestCase {
IndicesStatsRequestBuilder builder = client().admin().indices().prepareStats();
IndicesStatsResponse stats = builder.execute().actionGet();
- assertThat(stats.getTotal().completion.getSizeInBytes(), greaterThan(0l));
+ assertThat(stats.getTotal().completion.getSizeInBytes(), greaterThan(0L));
assertThat(stats.getTotal().completion.getFields(), is(nullValue()));
stats = builder.setCompletionFields("bar.completion").execute().actionGet();
- assertThat(stats.getTotal().completion.getSizeInBytes(), greaterThan(0l));
+ assertThat(stats.getTotal().completion.getSizeInBytes(), greaterThan(0L));
assertThat(stats.getTotal().completion.getFields().containsKey("bar.completion"), is(true));
- assertThat(stats.getTotal().completion.getFields().get("bar.completion"), greaterThan(0l));
+ assertThat(stats.getTotal().completion.getFields().get("bar.completion"), greaterThan(0L));
assertThat(stats.getTotal().completion.getFields().containsKey("baz.completion"), is(false));
stats = builder.setCompletionFields("bar.completion", "baz.completion").execute().actionGet();
- assertThat(stats.getTotal().completion.getSizeInBytes(), greaterThan(0l));
+ assertThat(stats.getTotal().completion.getSizeInBytes(), greaterThan(0L));
assertThat(stats.getTotal().completion.getFields().containsKey("bar.completion"), is(true));
- assertThat(stats.getTotal().completion.getFields().get("bar.completion"), greaterThan(0l));
+ assertThat(stats.getTotal().completion.getFields().get("bar.completion"), greaterThan(0L));
assertThat(stats.getTotal().completion.getFields().containsKey("baz.completion"), is(true));
- assertThat(stats.getTotal().completion.getFields().get("baz.completion"), greaterThan(0l));
+ assertThat(stats.getTotal().completion.getFields().get("baz.completion"), greaterThan(0L));
stats = builder.setCompletionFields("*").execute().actionGet();
- assertThat(stats.getTotal().completion.getSizeInBytes(), greaterThan(0l));
+ assertThat(stats.getTotal().completion.getSizeInBytes(), greaterThan(0L));
assertThat(stats.getTotal().completion.getFields().containsKey("bar.completion"), is(true));
- assertThat(stats.getTotal().completion.getFields().get("bar.completion"), greaterThan(0l));
+ assertThat(stats.getTotal().completion.getFields().get("bar.completion"), greaterThan(0L));
assertThat(stats.getTotal().completion.getFields().containsKey("baz.completion"), is(true));
- assertThat(stats.getTotal().completion.getFields().get("baz.completion"), greaterThan(0l));
+ assertThat(stats.getTotal().completion.getFields().get("baz.completion"), greaterThan(0L));
stats = builder.setCompletionFields("*r*").execute().actionGet();
- assertThat(stats.getTotal().completion.getSizeInBytes(), greaterThan(0l));
+ assertThat(stats.getTotal().completion.getSizeInBytes(), greaterThan(0L));
assertThat(stats.getTotal().completion.getFields().containsKey("bar.completion"), is(true));
- assertThat(stats.getTotal().completion.getFields().get("bar.completion"), greaterThan(0l));
+ assertThat(stats.getTotal().completion.getFields().get("bar.completion"), greaterThan(0L));
assertThat(stats.getTotal().completion.getFields().containsKey("baz.completion"), is(false));
}
@@ -802,23 +802,23 @@ public class IndexStatsIT extends ESIntegTestCase {
IndicesStatsRequestBuilder builder = client().admin().indices().prepareStats();
IndicesStatsResponse stats = builder.execute().actionGet();
- assertThat(stats.getTotal().search.getTotal().getQueryCount(), greaterThan(0l));
+ assertThat(stats.getTotal().search.getTotal().getQueryCount(), greaterThan(0L));
assertThat(stats.getTotal().search.getGroupStats(), is(nullValue()));
stats = builder.setGroups("bar").execute().actionGet();
- assertThat(stats.getTotal().search.getGroupStats().get("bar").getQueryCount(), greaterThan(0l));
+ assertThat(stats.getTotal().search.getGroupStats().get("bar").getQueryCount(), greaterThan(0L));
assertThat(stats.getTotal().search.getGroupStats().containsKey("baz"), is(false));
stats = builder.setGroups("bar", "baz").execute().actionGet();
- assertThat(stats.getTotal().search.getGroupStats().get("bar").getQueryCount(), greaterThan(0l));
- assertThat(stats.getTotal().search.getGroupStats().get("baz").getQueryCount(), greaterThan(0l));
+ assertThat(stats.getTotal().search.getGroupStats().get("bar").getQueryCount(), greaterThan(0L));
+ assertThat(stats.getTotal().search.getGroupStats().get("baz").getQueryCount(), greaterThan(0L));
stats = builder.setGroups("*").execute().actionGet();
- assertThat(stats.getTotal().search.getGroupStats().get("bar").getQueryCount(), greaterThan(0l));
- assertThat(stats.getTotal().search.getGroupStats().get("baz").getQueryCount(), greaterThan(0l));
+ assertThat(stats.getTotal().search.getGroupStats().get("bar").getQueryCount(), greaterThan(0L));
+ assertThat(stats.getTotal().search.getGroupStats().get("baz").getQueryCount(), greaterThan(0L));
stats = builder.setGroups("*r").execute().actionGet();
- assertThat(stats.getTotal().search.getGroupStats().get("bar").getQueryCount(), greaterThan(0l));
+ assertThat(stats.getTotal().search.getGroupStats().get("bar").getQueryCount(), greaterThan(0L));
assertThat(stats.getTotal().search.getGroupStats().containsKey("baz"), is(false));
}
@@ -836,23 +836,23 @@ public class IndexStatsIT extends ESIntegTestCase {
IndicesStatsRequestBuilder builder = client().admin().indices().prepareStats();
IndicesStatsResponse stats = builder.execute().actionGet();
- assertThat(stats.getTotal().indexing.getTotal().getIndexCount(), greaterThan(0l));
+ assertThat(stats.getTotal().indexing.getTotal().getIndexCount(), greaterThan(0L));
assertThat(stats.getTotal().indexing.getTypeStats(), is(nullValue()));
stats = builder.setTypes("bar").execute().actionGet();
- assertThat(stats.getTotal().indexing.getTypeStats().get("bar").getIndexCount(), greaterThan(0l));
+ assertThat(stats.getTotal().indexing.getTypeStats().get("bar").getIndexCount(), greaterThan(0L));
assertThat(stats.getTotal().indexing.getTypeStats().containsKey("baz"), is(false));
stats = builder.setTypes("bar", "baz").execute().actionGet();
- assertThat(stats.getTotal().indexing.getTypeStats().get("bar").getIndexCount(), greaterThan(0l));
- assertThat(stats.getTotal().indexing.getTypeStats().get("baz").getIndexCount(), greaterThan(0l));
+ assertThat(stats.getTotal().indexing.getTypeStats().get("bar").getIndexCount(), greaterThan(0L));
+ assertThat(stats.getTotal().indexing.getTypeStats().get("baz").getIndexCount(), greaterThan(0L));
stats = builder.setTypes("*").execute().actionGet();
- assertThat(stats.getTotal().indexing.getTypeStats().get("bar").getIndexCount(), greaterThan(0l));
- assertThat(stats.getTotal().indexing.getTypeStats().get("baz").getIndexCount(), greaterThan(0l));
+ assertThat(stats.getTotal().indexing.getTypeStats().get("bar").getIndexCount(), greaterThan(0L));
+ assertThat(stats.getTotal().indexing.getTypeStats().get("baz").getIndexCount(), greaterThan(0L));
stats = builder.setTypes("*r").execute().actionGet();
- assertThat(stats.getTotal().indexing.getTypeStats().get("bar").getIndexCount(), greaterThan(0l));
+ assertThat(stats.getTotal().indexing.getTypeStats().get("bar").getIndexCount(), greaterThan(0L));
assertThat(stats.getTotal().indexing.getTypeStats().containsKey("baz"), is(false));
}
diff --git a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java
index 18d56eee88..dcf3dbaf4b 100644
--- a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java
+++ b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java
@@ -36,16 +36,17 @@ import org.elasticsearch.cluster.routing.ShardRoutingState;
import org.elasticsearch.cluster.routing.TestShardRouting;
import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand;
import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider;
-import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.env.Environment;
import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.indices.recovery.RecoverySource;
+import org.elasticsearch.node.Node;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
@@ -53,7 +54,6 @@ import org.elasticsearch.test.ESIntegTestCase.Scope;
import org.elasticsearch.test.InternalTestCluster;
import org.elasticsearch.test.disruption.BlockClusterStateProcessing;
import org.elasticsearch.test.disruption.SingleNodeDisruption;
-import org.elasticsearch.test.junit.annotations.TestLogging;
import org.elasticsearch.test.transport.MockTransportService;
import org.elasticsearch.transport.ConnectTransportException;
import org.elasticsearch.transport.TransportException;
@@ -82,11 +82,11 @@ import static org.hamcrest.Matchers.equalTo;
public class IndicesStoreIntegrationIT extends ESIntegTestCase {
@Override
protected Settings nodeSettings(int nodeOrdinal) { // simplify this and only use a single data path
- return Settings.settingsBuilder().put(super.nodeSettings(nodeOrdinal)).put("path.data", "")
+ return Settings.settingsBuilder().put(super.nodeSettings(nodeOrdinal)).put(Environment.PATH_DATA_SETTING.getKey(), "")
// by default this value is 1 sec in tests (30 sec in practice) but we adding disruption here
// which is between 1 and 2 sec can cause each of the shard deletion requests to timeout.
// to prevent this we are setting the timeout here to something highish ie. the default in practice
- .put(IndicesStore.INDICES_STORE_DELETE_SHARD_TIMEOUT, new TimeValue(30, TimeUnit.SECONDS))
+ .put(IndicesStore.INDICES_STORE_DELETE_SHARD_TIMEOUT.getKey(), new TimeValue(30, TimeUnit.SECONDS))
.build();
}
@@ -102,9 +102,9 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase {
}
public void testIndexCleanup() throws Exception {
- final String masterNode = internalCluster().startNode(Settings.builder().put("node.data", false));
- final String node_1 = internalCluster().startNode(Settings.builder().put("node.master", false));
- final String node_2 = internalCluster().startNode(Settings.builder().put("node.master", false));
+ final String masterNode = internalCluster().startNode(Settings.builder().put(Node.NODE_DATA_SETTING.getKey(), false));
+ final String node_1 = internalCluster().startNode(Settings.builder().put(Node.NODE_MASTER_SETTING.getKey(), false));
+ final String node_2 = internalCluster().startNode(Settings.builder().put(Node.NODE_MASTER_SETTING.getKey(), false));
logger.info("--> creating index [test] with one shard and on replica");
assertAcked(prepareCreate("test").setSettings(
Settings.builder().put(indexSettings())
@@ -120,7 +120,7 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase {
assertThat(Files.exists(indexDirectory(node_2, "test")), equalTo(true));
logger.info("--> starting node server3");
- final String node_3 = internalCluster().startNode(Settings.builder().put("node.master", false));
+ final String node_3 = internalCluster().startNode(Settings.builder().put(Node.NODE_MASTER_SETTING.getKey(), false));
logger.info("--> running cluster_health");
ClusterHealthResponse clusterHealth = client().admin().cluster().prepareHealth()
.setWaitForNodes("4")
@@ -144,7 +144,7 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase {
CountDownLatch beginRelocationLatch = new CountDownLatch(1);
CountDownLatch endRelocationLatch = new CountDownLatch(1);
transportServiceNode3.addTracer(new ReclocationStartEndTracer(logger, beginRelocationLatch, endRelocationLatch));
- internalCluster().client().admin().cluster().prepareReroute().add(new MoveAllocationCommand(new ShardId("test", 0), node_1, node_3)).get();
+ internalCluster().client().admin().cluster().prepareReroute().add(new MoveAllocationCommand("test", 0, node_1, node_3)).get();
// wait for relocation to start
beginRelocationLatch.await();
disruption.startDisrupting();
@@ -154,7 +154,7 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase {
sleep(50);
disruption.stopDisrupting();
} else {
- internalCluster().client().admin().cluster().prepareReroute().add(new MoveAllocationCommand(new ShardId("test", 0), node_1, node_3)).get();
+ internalCluster().client().admin().cluster().prepareReroute().add(new MoveAllocationCommand("test", 0, node_1, node_3)).get();
}
clusterHealth = client().admin().cluster().prepareHealth()
.setWaitForRelocatingShards(0)
@@ -209,7 +209,7 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase {
});
logger.info("--> move shard from {} to {}, and wait for relocation to finish", node_1, node_2);
- internalCluster().client().admin().cluster().prepareReroute().add(new MoveAllocationCommand(new ShardId("test", 0), node_1, node_2)).get();
+ internalCluster().client().admin().cluster().prepareReroute().add(new MoveAllocationCommand("test", 0, node_1, node_2)).get();
shardActiveRequestSent.await();
ClusterHealthResponse clusterHealth = client().admin().cluster().prepareHealth()
.setWaitForRelocatingShards(0)
@@ -289,12 +289,11 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase {
assertThat(waitForShardDeletion(node_4, "test", 0), equalTo(false));
}
- @TestLogging("cluster.service:TRACE")
public void testShardActiveElsewhereDoesNotDeleteAnother() throws Exception {
InternalTestCluster.Async<String> masterFuture = internalCluster().startNodeAsync(
- Settings.builder().put("node.master", true, "node.data", false).build());
+ Settings.builder().put(Node.NODE_MASTER_SETTING.getKey(), true, Node.NODE_DATA_SETTING.getKey(), false).build());
InternalTestCluster.Async<List<String>> nodesFutures = internalCluster().startNodesAsync(4,
- Settings.builder().put("node.master", false, "node.data", true).build());
+ Settings.builder().put(Node.NODE_MASTER_SETTING.getKey(), false, Node.NODE_DATA_SETTING.getKey(), true).build());
final String masterNode = masterFuture.get();
final String node1 = nodesFutures.get().get(0);
@@ -354,7 +353,7 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase {
logger.debug("--> starting the two old nodes back");
internalCluster().startNodesAsync(2,
- Settings.builder().put("node.master", false, "node.data", true).build());
+ Settings.builder().put(Node.NODE_MASTER_SETTING.getKey(), false, Node.NODE_DATA_SETTING.getKey(), true).build());
assertFalse(client().admin().cluster().prepareHealth().setWaitForNodes("5").get().isTimedOut());
@@ -385,7 +384,7 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase {
waitNoPendingTasksOnAll();
ClusterStateResponse stateResponse = client().admin().cluster().prepareState().get();
-
+ final Index index = stateResponse.getState().metaData().index("test").getIndex();
RoutingNode routingNode = stateResponse.getState().getRoutingNodes().node(nonMasterId);
final int[] node2Shards = new int[routingNode.numberOfOwningShards()];
int i = 0;
@@ -409,10 +408,10 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase {
internalCluster().getInstance(ClusterService.class, nonMasterNode).submitStateUpdateTask("test", new ClusterStateUpdateTask(Priority.IMMEDIATE) {
@Override
public ClusterState execute(ClusterState currentState) throws Exception {
- IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder("test");
+ IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(index);
for (int i = 0; i < numShards; i++) {
indexRoutingTableBuilder.addIndexShard(
- new IndexShardRoutingTable.Builder(new ShardId("test", i))
+ new IndexShardRoutingTable.Builder(new ShardId(index, i))
.addShard(TestShardRouting.newShardRouting("test", i, masterId, true, ShardRoutingState.STARTED, shardVersions[shardIds[i]]))
.build()
);
@@ -440,14 +439,14 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase {
private Path indexDirectory(String server, String index) {
NodeEnvironment env = internalCluster().getInstance(NodeEnvironment.class, server);
- final Path[] paths = env.indexPaths(new Index(index));
+ final Path[] paths = env.indexPaths(index);
assert paths.length == 1;
return paths[0];
}
private Path shardDirectory(String server, String index, int shard) {
NodeEnvironment env = internalCluster().getInstance(NodeEnvironment.class, server);
- final Path[] paths = env.availableShardPaths(new ShardId(index, shard));
+ final Path[] paths = env.availableShardPaths(new ShardId(index, "_na_", shard));
assert paths.length == 1;
return paths[0];
}
diff --git a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreTests.java b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreTests.java
index ec6a3b3849..223e486dd2 100644
--- a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreTests.java
+++ b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreTests.java
@@ -30,9 +30,12 @@ import org.elasticsearch.cluster.routing.IndexShardRoutingTable;
import org.elasticsearch.cluster.routing.ShardRoutingState;
import org.elasticsearch.cluster.routing.TestShardRouting;
import org.elasticsearch.cluster.routing.UnassignedInfo;
+import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.LocalTransportAddress;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.test.ESTestCase;
+import org.elasticsearch.test.cluster.TestClusterService;
+import org.elasticsearch.transport.TransportService;
import org.junit.Before;
import java.util.Arrays;
@@ -60,7 +63,7 @@ public class IndicesStoreTests extends ESTestCase {
@Before
public void before() {
localNode = new DiscoveryNode("abc", new LocalTransportAddress("abc"), Version.CURRENT);
- indicesStore = new IndicesStore();
+ indicesStore = new IndicesStore(Settings.EMPTY, null, new TestClusterService(), new TransportService(null, null), null);
}
public void testShardCanBeDeletedNoShardRouting() throws Exception {
@@ -69,7 +72,7 @@ public class IndicesStoreTests extends ESTestCase {
ClusterState.Builder clusterState = ClusterState.builder(new ClusterName("test"));
clusterState.metaData(MetaData.builder().put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(numShards).numberOfReplicas(numReplicas)));
- IndexShardRoutingTable.Builder routingTable = new IndexShardRoutingTable.Builder(new ShardId("test", 1));
+ IndexShardRoutingTable.Builder routingTable = new IndexShardRoutingTable.Builder(new ShardId("test", "_na_", 1));
assertFalse(indicesStore.shardCanBeDeleted(clusterState.build(), routingTable.build()));
}
@@ -80,7 +83,7 @@ public class IndicesStoreTests extends ESTestCase {
ClusterState.Builder clusterState = ClusterState.builder(new ClusterName("test"));
clusterState.metaData(MetaData.builder().put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(numShards).numberOfReplicas(numReplicas)));
- IndexShardRoutingTable.Builder routingTable = new IndexShardRoutingTable.Builder(new ShardId("test", 1));
+ IndexShardRoutingTable.Builder routingTable = new IndexShardRoutingTable.Builder(new ShardId("test", "_na_", 1));
for (int i = 0; i < numShards; i++) {
int unStartedShard = randomInt(numReplicas);
@@ -108,7 +111,7 @@ public class IndicesStoreTests extends ESTestCase {
ClusterState.Builder clusterState = ClusterState.builder(new ClusterName("test"));
clusterState.metaData(MetaData.builder().put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(numShards).numberOfReplicas(numReplicas)));
clusterState.nodes(DiscoveryNodes.builder().localNodeId(localNode.id()).put(localNode).put(new DiscoveryNode("xyz", new LocalTransportAddress("xyz"), Version.CURRENT)));
- IndexShardRoutingTable.Builder routingTable = new IndexShardRoutingTable.Builder(new ShardId("test", 1));
+ IndexShardRoutingTable.Builder routingTable = new IndexShardRoutingTable.Builder(new ShardId("test", "_na_", 1));
int localShardId = randomInt(numShards - 1);
for (int i = 0; i < numShards; i++) {
String nodeId = i == localShardId ? localNode.getId() : randomBoolean() ? "abc" : "xyz";
@@ -130,7 +133,7 @@ public class IndicesStoreTests extends ESTestCase {
ClusterState.Builder clusterState = ClusterState.builder(new ClusterName("test"));
clusterState.metaData(MetaData.builder().put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(numShards).numberOfReplicas(numReplicas)));
clusterState.nodes(DiscoveryNodes.builder().localNodeId(localNode.id()).put(localNode));
- IndexShardRoutingTable.Builder routingTable = new IndexShardRoutingTable.Builder(new ShardId("test", 1));
+ IndexShardRoutingTable.Builder routingTable = new IndexShardRoutingTable.Builder(new ShardId("test", "_na_", 1));
for (int i = 0; i < numShards; i++) {
String relocatingNodeId = randomBoolean() ? null : "def";
routingTable.addShard(TestShardRouting.newShardRouting("test", i, "xyz", relocatingNodeId, true, ShardRoutingState.STARTED, 0));
@@ -152,7 +155,7 @@ public class IndicesStoreTests extends ESTestCase {
ClusterState.Builder clusterState = ClusterState.builder(new ClusterName("test"));
clusterState.metaData(MetaData.builder().put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(numShards).numberOfReplicas(numReplicas)));
clusterState.nodes(DiscoveryNodes.builder().localNodeId(localNode.id()).put(localNode).put(new DiscoveryNode("xyz", new LocalTransportAddress("xyz"), nodeVersion)));
- IndexShardRoutingTable.Builder routingTable = new IndexShardRoutingTable.Builder(new ShardId("test", 1));
+ IndexShardRoutingTable.Builder routingTable = new IndexShardRoutingTable.Builder(new ShardId("test", "_na_", 1));
for (int i = 0; i < numShards; i++) {
routingTable.addShard(TestShardRouting.newShardRouting("test", i, "xyz", null, true, ShardRoutingState.STARTED, 0));
for (int j = 0; j < numReplicas; j++) {
@@ -177,7 +180,7 @@ public class IndicesStoreTests extends ESTestCase {
.put(new DiscoveryNode("xyz", new LocalTransportAddress("xyz"), Version.CURRENT))
.put(new DiscoveryNode("def", new LocalTransportAddress("def"), nodeVersion) // <-- only set relocating, since we're testing that in this test
));
- IndexShardRoutingTable.Builder routingTable = new IndexShardRoutingTable.Builder(new ShardId("test", 1));
+ IndexShardRoutingTable.Builder routingTable = new IndexShardRoutingTable.Builder(new ShardId("test", "_na_", 1));
for (int i = 0; i < numShards; i++) {
routingTable.addShard(TestShardRouting.newShardRouting("test", i, "xyz", "def", true, ShardRoutingState.STARTED, 0));
for (int j = 0; j < numReplicas; j++) {
diff --git a/core/src/test/java/org/elasticsearch/indices/template/IndexTemplateBlocksIT.java b/core/src/test/java/org/elasticsearch/indices/template/IndexTemplateBlocksIT.java
index c46c038529..11e2d7d2ac 100644
--- a/core/src/test/java/org/elasticsearch/indices/template/IndexTemplateBlocksIT.java
+++ b/core/src/test/java/org/elasticsearch/indices/template/IndexTemplateBlocksIT.java
@@ -38,8 +38,8 @@ public class IndexTemplateBlocksIT extends ESIntegTestCase {
.setTemplate("te*")
.setOrder(0)
.addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
- .startObject("field1").field("type", "string").field("store", "yes").endObject()
- .startObject("field2").field("type", "string").field("store", "yes").field("index", "not_analyzed").endObject()
+ .startObject("field1").field("type", "string").field("store", true).endObject()
+ .startObject("field2").field("type", "string").field("store", true).field("index", "not_analyzed").endObject()
.endObject().endObject().endObject())
.execute().actionGet();
diff --git a/core/src/test/java/org/elasticsearch/indices/template/IndexTemplateFilteringIT.java b/core/src/test/java/org/elasticsearch/indices/template/IndexTemplateFilteringIT.java
index ee0f874808..8e0d5a882c 100644
--- a/core/src/test/java/org/elasticsearch/indices/template/IndexTemplateFilteringIT.java
+++ b/core/src/test/java/org/elasticsearch/indices/template/IndexTemplateFilteringIT.java
@@ -57,7 +57,7 @@ public class IndexTemplateFilteringIT extends ESIntegTestCase {
.setTemplate("no_match")
.addMapping("type3", "field3", "type=string").get();
- assertAcked(prepareCreate("test").putHeader("header_test", "header_value"));
+ assertAcked(prepareCreate("test"));
GetMappingsResponse response = client().admin().indices().prepareGetMappings("test").get();
assertThat(response, notNullValue());
@@ -70,7 +70,7 @@ public class IndexTemplateFilteringIT extends ESIntegTestCase {
@Override
public boolean apply(CreateIndexClusterStateUpdateRequest request, IndexTemplateMetaData template) {
//make sure that no_match template is filtered out before the custom filters as it doesn't match the index name
- return (template.name().equals("template2") || template.name().equals("no_match")) && request.originalMessage().getHeader("header_test").equals("header_value");
+ return (template.name().equals("template2") || template.name().equals("no_match"));
}
}
diff --git a/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java b/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java
index 5aaed6b5d9..fbfaa93df8 100644
--- a/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java
+++ b/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java
@@ -63,6 +63,7 @@ import static org.hamcrest.Matchers.nullValue;
*
*/
public class SimpleIndexTemplateIT extends ESIntegTestCase {
+
public void testSimpleIndexTemplateTests() throws Exception {
// clean all templates setup by the framework.
client().admin().indices().prepareDeleteTemplate("*").get();
@@ -77,8 +78,8 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase {
.setSettings(indexSettings())
.setOrder(0)
.addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
- .startObject("field1").field("type", "string").field("store", "yes").endObject()
- .startObject("field2").field("type", "string").field("store", "yes").field("index", "not_analyzed").endObject()
+ .startObject("field1").field("type", "string").field("store", true).endObject()
+ .startObject("field2").field("type", "string").field("store", true).field("index", "not_analyzed").endObject()
.endObject().endObject().endObject())
.get();
@@ -87,7 +88,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase {
.setSettings(indexSettings())
.setOrder(1)
.addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
- .startObject("field2").field("type", "string").field("store", "no").endObject()
+ .startObject("field2").field("type", "string").field("store", false).endObject()
.endObject().endObject().endObject())
.get();
@@ -98,7 +99,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase {
.setCreate(true)
.setOrder(1)
.addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
- .startObject("field2").field("type", "string").field("store", "no").endObject()
+ .startObject("field2").field("type", "string").field("store", false).endObject()
.endObject().endObject().endObject())
, IndexTemplateAlreadyExistsException.class
);
@@ -144,8 +145,8 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase {
.setTemplate("te*")
.setOrder(0)
.addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
- .startObject("field1").field("type", "string").field("store", "yes").endObject()
- .startObject("field2").field("type", "string").field("store", "yes").field("index", "not_analyzed").endObject()
+ .startObject("field1").field("type", "string").field("store", true).endObject()
+ .startObject("field2").field("type", "string").field("store", true).field("index", "not_analyzed").endObject()
.endObject().endObject().endObject())
.execute().actionGet();
@@ -169,8 +170,8 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase {
.setTemplate("te*")
.setOrder(0)
.addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
- .startObject("field1").field("type", "string").field("store", "yes").endObject()
- .startObject("field2").field("type", "string").field("store", "yes").field("index", "not_analyzed").endObject()
+ .startObject("field1").field("type", "string").field("store", true).endObject()
+ .startObject("field2").field("type", "string").field("store", true).field("index", "not_analyzed").endObject()
.endObject().endObject().endObject())
.execute().actionGet();
@@ -189,8 +190,8 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase {
.setTemplate("te*")
.setOrder(0)
.addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
- .startObject("field1").field("type", "string").field("store", "yes").endObject()
- .startObject("field2").field("type", "string").field("store", "yes").field("index", "not_analyzed").endObject()
+ .startObject("field1").field("type", "string").field("store", true).endObject()
+ .startObject("field2").field("type", "string").field("store", true).field("index", "not_analyzed").endObject()
.endObject().endObject().endObject())
.execute().actionGet();
@@ -212,8 +213,8 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase {
.setTemplate("te*")
.setOrder(0)
.addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
- .startObject("field1").field("type", "string").field("store", "yes").endObject()
- .startObject("field2").field("type", "string").field("store", "yes").field("index", "not_analyzed").endObject()
+ .startObject("field1").field("type", "string").field("store", true).endObject()
+ .startObject("field2").field("type", "string").field("store", true).field("index", "not_analyzed").endObject()
.endObject().endObject().endObject())
.execute().actionGet();
@@ -222,8 +223,8 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase {
.setTemplate("te*")
.setOrder(0)
.addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
- .startObject("field1").field("type", "string").field("store", "yes").endObject()
- .startObject("field2").field("type", "string").field("store", "yes").field("index", "not_analyzed").endObject()
+ .startObject("field1").field("type", "string").field("store", true).endObject()
+ .startObject("field2").field("type", "string").field("store", true).field("index", "not_analyzed").endObject()
.endObject().endObject().endObject())
.execute().actionGet();
@@ -232,8 +233,8 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase {
.setTemplate("te*")
.setOrder(0)
.addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
- .startObject("field1").field("type", "string").field("store", "yes").endObject()
- .startObject("field2").field("type", "string").field("store", "yes").field("index", "not_analyzed").endObject()
+ .startObject("field1").field("type", "string").field("store", true).endObject()
+ .startObject("field2").field("type", "string").field("store", true).field("index", "not_analyzed").endObject()
.endObject().endObject().endObject())
.execute().actionGet();
@@ -368,21 +369,21 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase {
refresh();
SearchResponse searchResponse = client().prepareSearch("test_index").get();
- assertHitCount(searchResponse, 5l);
+ assertHitCount(searchResponse, 5L);
searchResponse = client().prepareSearch("simple_alias").get();
- assertHitCount(searchResponse, 5l);
+ assertHitCount(searchResponse, 5L);
searchResponse = client().prepareSearch("templated_alias-test_index").get();
- assertHitCount(searchResponse, 5l);
+ assertHitCount(searchResponse, 5L);
searchResponse = client().prepareSearch("filtered_alias").get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertThat(searchResponse.getHits().getAt(0).type(), equalTo("type2"));
// Search the complex filter alias
searchResponse = client().prepareSearch("complex_filtered_alias").get();
- assertHitCount(searchResponse, 3l);
+ assertHitCount(searchResponse, 3L);
Set<String> types = new HashSet<>();
for (SearchHit searchHit : searchResponse.getHits().getHits()) {
@@ -420,10 +421,10 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase {
refresh();
SearchResponse searchResponse = client().prepareSearch("test_index").get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
searchResponse = client().prepareSearch("my_alias").get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertThat(searchResponse.getHits().getAt(0).type(), equalTo("type2"));
}
@@ -455,13 +456,13 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase {
refresh();
SearchResponse searchResponse = client().prepareSearch("test_index").get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
searchResponse = client().prepareSearch("alias1").get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
searchResponse = client().prepareSearch("alias2").get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertThat(searchResponse.getHits().getAt(0).type(), equalTo("type2"));
}
@@ -626,7 +627,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase {
assertThat(response.getItems()[0].getIndex(), equalTo("a2"));
assertThat(response.getItems()[0].getType(), equalTo("test"));
assertThat(response.getItems()[0].getId(), equalTo("test"));
- assertThat(response.getItems()[0].getVersion(), equalTo(1l));
+ assertThat(response.getItems()[0].getVersion(), equalTo(1L));
client().prepareIndex("b1", "test", "test").setSource("{}").get();
response = client().prepareBulk().add(new IndexRequest("b2", "test", "test").source("{}")).get();
@@ -635,7 +636,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase {
assertThat(response.getItems()[0].getIndex(), equalTo("b2"));
assertThat(response.getItems()[0].getType(), equalTo("test"));
assertThat(response.getItems()[0].getId(), equalTo("test"));
- assertThat(response.getItems()[0].getVersion(), equalTo(1l));
+ assertThat(response.getItems()[0].getVersion(), equalTo(1L));
client().prepareIndex("c1", "test", "test").setSource("{}").get();
response = client().prepareBulk().add(new IndexRequest("c2", "test", "test").source("{}")).get();
@@ -644,7 +645,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase {
assertThat(response.getItems()[0].getIndex(), equalTo("c2"));
assertThat(response.getItems()[0].getType(), equalTo("test"));
assertThat(response.getItems()[0].getId(), equalTo("test"));
- assertThat(response.getItems()[0].getVersion(), equalTo(1l));
+ assertThat(response.getItems()[0].getVersion(), equalTo(1L));
// Before 2.0 alias filters were parsed at alias creation time, in order
// for filters to work correctly ES required that fields mentioned in those
@@ -660,7 +661,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase {
assertThat(response.hasFailures(), is(false));
assertThat(response.getItems()[0].isFailed(), equalTo(false));
assertThat(response.getItems()[0].getId(), equalTo("test"));
- assertThat(response.getItems()[0].getVersion(), equalTo(1l));
+ assertThat(response.getItems()[0].getVersion(), equalTo(1L));
}
}
diff --git a/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java
new file mode 100644
index 0000000000..bcbe41dd66
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java
@@ -0,0 +1,265 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest;
+
+import org.elasticsearch.action.bulk.BulkItemResponse;
+import org.elasticsearch.action.bulk.BulkRequest;
+import org.elasticsearch.action.bulk.BulkResponse;
+import org.elasticsearch.action.index.IndexRequest;
+import org.elasticsearch.action.index.IndexResponse;
+import org.elasticsearch.action.ingest.DeletePipelineRequest;
+import org.elasticsearch.action.ingest.GetPipelineRequest;
+import org.elasticsearch.action.ingest.GetPipelineResponse;
+import org.elasticsearch.action.ingest.PutPipelineRequest;
+import org.elasticsearch.action.ingest.SimulateDocumentBaseResult;
+import org.elasticsearch.action.ingest.SimulatePipelineRequest;
+import org.elasticsearch.action.ingest.SimulatePipelineResponse;
+import org.elasticsearch.action.ingest.WritePipelineResponse;
+import org.elasticsearch.common.bytes.BytesReference;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.ingest.core.IngestDocument;
+import org.elasticsearch.node.NodeModule;
+import org.elasticsearch.plugins.Plugin;
+import org.elasticsearch.test.ESIntegTestCase;
+
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.instanceOf;
+import static org.hamcrest.Matchers.notNullValue;
+import static org.hamcrest.Matchers.nullValue;
+import static org.hamcrest.core.Is.is;
+
+@ESIntegTestCase.ClusterScope(minNumDataNodes = 2)
+public class IngestClientIT extends ESIntegTestCase {
+
+ @Override
+ protected Settings nodeSettings(int nodeOrdinal) {
+ // TODO: Remove this method once gets in: https://github.com/elastic/elasticsearch/issues/16019
+ if (nodeOrdinal % 2 == 0) {
+ return Settings.builder().put("node.ingest", false).put(super.nodeSettings(nodeOrdinal)).build();
+ }
+ return super.nodeSettings(nodeOrdinal);
+ }
+
+ @Override
+ protected Collection<Class<? extends Plugin>> nodePlugins() {
+ return pluginList(IngestPlugin.class);
+ }
+
+ public void testSimulate() throws Exception {
+ BytesReference pipelineSource = jsonBuilder().startObject()
+ .field("description", "my_pipeline")
+ .startArray("processors")
+ .startObject()
+ .startObject("test")
+ .endObject()
+ .endObject()
+ .endArray()
+ .endObject().bytes();
+ client().admin().cluster().preparePutPipeline("_id", pipelineSource)
+ .get();
+ GetPipelineResponse getResponse = client().admin().cluster().prepareGetPipeline("_id")
+ .get();
+ assertThat(getResponse.isFound(), is(true));
+ assertThat(getResponse.pipelines().size(), equalTo(1));
+ assertThat(getResponse.pipelines().get(0).getId(), equalTo("_id"));
+
+ BytesReference bytes = jsonBuilder().startObject()
+ .startArray("docs")
+ .startObject()
+ .field("_index", "index")
+ .field("_type", "type")
+ .field("_id", "id")
+ .startObject("_source")
+ .field("foo", "bar")
+ .field("fail", false)
+ .endObject()
+ .endObject()
+ .endArray()
+ .endObject().bytes();
+ SimulatePipelineResponse response;
+ if (randomBoolean()) {
+ response = client().admin().cluster().prepareSimulatePipeline(bytes)
+ .setId("_id").get();
+ } else {
+ SimulatePipelineRequest request = new SimulatePipelineRequest(bytes);
+ request.setId("_id");
+ response = client().admin().cluster().simulatePipeline(request).get();
+ }
+ assertThat(response.isVerbose(), equalTo(false));
+ assertThat(response.getPipelineId(), equalTo("_id"));
+ assertThat(response.getResults().size(), equalTo(1));
+ assertThat(response.getResults().get(0), instanceOf(SimulateDocumentBaseResult.class));
+ SimulateDocumentBaseResult simulateDocumentBaseResult = (SimulateDocumentBaseResult) response.getResults().get(0);
+ Map<String, Object> source = new HashMap<>();
+ source.put("foo", "bar");
+ source.put("fail", false);
+ source.put("processed", true);
+ IngestDocument ingestDocument = new IngestDocument("index", "type", "id", null, null, null, null, source);
+ assertThat(simulateDocumentBaseResult.getIngestDocument().getSourceAndMetadata(), equalTo(ingestDocument.getSourceAndMetadata()));
+ assertThat(simulateDocumentBaseResult.getFailure(), nullValue());
+ }
+
+ public void testBulkWithIngestFailures() throws Exception {
+ createIndex("index");
+
+ BytesReference source = jsonBuilder().startObject()
+ .field("description", "my_pipeline")
+ .startArray("processors")
+ .startObject()
+ .startObject("test")
+ .endObject()
+ .endObject()
+ .endArray()
+ .endObject().bytes();
+ PutPipelineRequest putPipelineRequest = new PutPipelineRequest("_id", source);
+ client().admin().cluster().putPipeline(putPipelineRequest).get();
+
+ int numRequests = scaledRandomIntBetween(32, 128);
+ BulkRequest bulkRequest = new BulkRequest();
+ for (int i = 0; i < numRequests; i++) {
+ IndexRequest indexRequest = new IndexRequest("index", "type", Integer.toString(i)).setPipeline("_id");
+ indexRequest.source("field", "value", "fail", i % 2 == 0);
+ bulkRequest.add(indexRequest);
+ }
+
+ BulkResponse response = client().bulk(bulkRequest).actionGet();
+ assertThat(response.getItems().length, equalTo(bulkRequest.requests().size()));
+ for (int i = 0; i < bulkRequest.requests().size(); i++) {
+ BulkItemResponse itemResponse = response.getItems()[i];
+ if (i % 2 == 0) {
+ BulkItemResponse.Failure failure = itemResponse.getFailure();
+ assertThat(failure.getMessage(), equalTo("java.lang.IllegalArgumentException: test processor failed"));
+ } else {
+ IndexResponse indexResponse = itemResponse.getResponse();
+ assertThat(indexResponse.getId(), equalTo(Integer.toString(i)));
+ assertThat(indexResponse.isCreated(), is(true));
+ }
+ }
+ }
+
+ public void test() throws Exception {
+ BytesReference source = jsonBuilder().startObject()
+ .field("description", "my_pipeline")
+ .startArray("processors")
+ .startObject()
+ .startObject("test")
+ .endObject()
+ .endObject()
+ .endArray()
+ .endObject().bytes();
+ PutPipelineRequest putPipelineRequest = new PutPipelineRequest("_id", source);
+ client().admin().cluster().putPipeline(putPipelineRequest).get();
+
+ GetPipelineRequest getPipelineRequest = new GetPipelineRequest("_id");
+ GetPipelineResponse getResponse = client().admin().cluster().getPipeline(getPipelineRequest).get();
+ assertThat(getResponse.isFound(), is(true));
+ assertThat(getResponse.pipelines().size(), equalTo(1));
+ assertThat(getResponse.pipelines().get(0).getId(), equalTo("_id"));
+
+ client().prepareIndex("test", "type", "1").setPipeline("_id").setSource("field", "value", "fail", false).get();
+
+ Map<String, Object> doc = client().prepareGet("test", "type", "1")
+ .get().getSourceAsMap();
+ assertThat(doc.get("field"), equalTo("value"));
+ assertThat(doc.get("processed"), equalTo(true));
+
+ client().prepareBulk().add(
+ client().prepareIndex("test", "type", "2").setSource("field", "value2", "fail", false).setPipeline("_id")).get();
+ doc = client().prepareGet("test", "type", "2").get().getSourceAsMap();
+ assertThat(doc.get("field"), equalTo("value2"));
+ assertThat(doc.get("processed"), equalTo(true));
+
+ DeletePipelineRequest deletePipelineRequest = new DeletePipelineRequest("_id");
+ WritePipelineResponse response = client().admin().cluster().deletePipeline(deletePipelineRequest).get();
+ assertThat(response.isAcknowledged(), is(true));
+
+ getResponse = client().admin().cluster().prepareGetPipeline("_id").get();
+ assertThat(getResponse.isFound(), is(false));
+ assertThat(getResponse.pipelines().size(), equalTo(0));
+ }
+
+ public void testPutWithPipelineError() throws Exception {
+ BytesReference source = jsonBuilder().startObject()
+ .field("description", "my_pipeline")
+ .startArray("processors")
+ .startObject()
+ .startObject("not_found")
+ .endObject()
+ .endObject()
+ .endArray()
+ .endObject().bytes();
+ PutPipelineRequest putPipelineRequest = new PutPipelineRequest("_id", source);
+ WritePipelineResponse response = client().admin().cluster().putPipeline(putPipelineRequest).get();
+ assertThat(response.isAcknowledged(), equalTo(false));
+ assertThat(response.getError().getReason(), equalTo("No processor type exists with name [not_found]"));
+ }
+
+ public void testPutWithProcessorFactoryError() throws Exception {
+ BytesReference source = jsonBuilder().startObject()
+ .field("description", "my_pipeline")
+ .startArray("processors")
+ .startObject()
+ .startObject("test")
+ .field("unused", ":sad_face:")
+ .endObject()
+ .endObject()
+ .endArray()
+ .endObject().bytes();
+ PutPipelineRequest putPipelineRequest = new PutPipelineRequest("_id", source);
+ WritePipelineResponse response = client().admin().cluster().putPipeline(putPipelineRequest).get();
+ assertThat(response.isAcknowledged(), equalTo(false));
+ assertThat(response.getError().getReason(), equalTo("processor [test] doesn't support one or more provided configuration parameters [unused]"));
+ }
+
+ @Override
+ protected Collection<Class<? extends Plugin>> getMockPlugins() {
+ return Collections.singletonList(TestSeedPlugin.class);
+ }
+
+ public static class IngestPlugin extends Plugin {
+
+ @Override
+ public String name() {
+ return "ingest";
+ }
+
+ @Override
+ public String description() {
+ return "ingest mock";
+ }
+
+ public void onModule(NodeModule nodeModule) {
+ nodeModule.registerProcessor("test", templateService -> config ->
+ new TestProcessor("id", "test", ingestDocument -> {
+ ingestDocument.setFieldValue("processed", true);
+ if (ingestDocument.getFieldValue("fail", Boolean.class)) {
+ throw new IllegalArgumentException("test processor failed");
+ }
+ })
+ );
+ }
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/ingest/IngestMetadataTests.java b/core/src/test/java/org/elasticsearch/ingest/IngestMetadataTests.java
new file mode 100644
index 0000000000..a6cf12389a
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/ingest/IngestMetadataTests.java
@@ -0,0 +1,64 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest;
+
+import org.elasticsearch.cluster.metadata.MetaData;
+import org.elasticsearch.common.bytes.BytesArray;
+import org.elasticsearch.common.xcontent.ToXContent;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentFactory;
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.common.xcontent.XContentType;
+import org.elasticsearch.test.ESTestCase;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+
+public class IngestMetadataTests extends ESTestCase {
+
+ public void testFromXContent() throws IOException {
+ PipelineConfiguration pipeline = new PipelineConfiguration(
+ "1",new BytesArray("{\"processors\": [{\"set\" : {\"field\": \"_field\", \"value\": \"_value\"}}]}")
+ );
+ PipelineConfiguration pipeline2 = new PipelineConfiguration(
+ "2",new BytesArray("{\"processors\": [{\"set\" : {\"field\": \"_field1\", \"value\": \"_value1\"}}]}")
+ );
+ Map<String, PipelineConfiguration> map = new HashMap<>();
+ map.put(pipeline.getId(), pipeline);
+ map.put(pipeline2.getId(), pipeline2);
+ IngestMetadata ingestMetadata = new IngestMetadata(map);
+ XContentBuilder builder = XContentFactory.jsonBuilder();
+ builder.prettyPrint();
+ builder.startObject();
+ ingestMetadata.toXContent(builder, ToXContent.EMPTY_PARAMS);
+ builder.endObject();
+ String string = builder.string();
+ final XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(string);
+ MetaData.Custom custom = ingestMetadata.fromXContent(parser);
+ assertTrue(custom instanceof IngestMetadata);
+ IngestMetadata m = (IngestMetadata) custom;
+ assertEquals(2, m.getPipelines().size());
+ assertEquals("1", m.getPipelines().get("1").getId());
+ assertEquals("2", m.getPipelines().get("2").getId());
+ assertEquals(pipeline.getConfigAsMap(), m.getPipelines().get("1").getConfigAsMap());
+ assertEquals(pipeline2.getConfigAsMap(), m.getPipelines().get("2").getConfigAsMap());
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java b/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java
new file mode 100644
index 0000000000..9126a513e6
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java
@@ -0,0 +1,366 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest;
+
+import org.elasticsearch.ElasticsearchParseException;
+import org.elasticsearch.action.ActionRequest;
+import org.elasticsearch.action.bulk.BulkRequest;
+import org.elasticsearch.action.delete.DeleteRequest;
+import org.elasticsearch.action.index.IndexRequest;
+import org.elasticsearch.action.update.UpdateRequest;
+import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.ingest.core.CompoundProcessor;
+import org.elasticsearch.ingest.core.IngestDocument;
+import org.elasticsearch.ingest.core.Pipeline;
+import org.elasticsearch.ingest.core.Processor;
+import org.elasticsearch.test.ESTestCase;
+import org.elasticsearch.threadpool.ThreadPool;
+import org.hamcrest.CustomTypeSafeMatcher;
+import org.junit.Before;
+import org.mockito.ArgumentMatcher;
+import org.mockito.invocation.InvocationOnMock;
+
+import java.util.Collections;
+import java.util.Map;
+import java.util.Objects;
+import java.util.function.BiConsumer;
+import java.util.function.Consumer;
+
+import static org.hamcrest.Matchers.equalTo;
+import static org.mockito.Matchers.eq;
+import static org.mockito.Matchers.any;
+import static org.mockito.Matchers.anyBoolean;
+import static org.mockito.Matchers.anyString;
+import static org.mockito.Matchers.argThat;
+import static org.mockito.Mockito.doAnswer;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+public class PipelineExecutionServiceTests extends ESTestCase {
+
+ private PipelineStore store;
+ private PipelineExecutionService executionService;
+
+ @Before
+ public void setup() {
+ store = mock(PipelineStore.class);
+ ThreadPool threadPool = mock(ThreadPool.class);
+ when(threadPool.executor(anyString())).thenReturn(Runnable::run);
+ executionService = new PipelineExecutionService(store, threadPool);
+ }
+
+ public void testExecuteIndexPipelineDoesNotExist() {
+ IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id");
+ @SuppressWarnings("unchecked")
+ Consumer<Throwable> failureHandler = mock(Consumer.class);
+ @SuppressWarnings("unchecked")
+ Consumer<Boolean> completionHandler = mock(Consumer.class);
+ try {
+ executionService.execute(indexRequest, failureHandler, completionHandler);
+ fail("IllegalArgumentException expected");
+ } catch (IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("pipeline with id [_id] does not exist"));
+ }
+ verify(failureHandler, never()).accept(any(Throwable.class));
+ verify(completionHandler, never()).accept(anyBoolean());
+ }
+
+ public void testExecuteBulkPipelineDoesNotExist() {
+ CompoundProcessor processor = mock(CompoundProcessor.class);
+ when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", processor));
+ BulkRequest bulkRequest = new BulkRequest();
+
+ IndexRequest indexRequest1 = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id");
+ bulkRequest.add(indexRequest1);
+ IndexRequest indexRequest2 = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("does_not_exist");
+ bulkRequest.add(indexRequest2);
+ @SuppressWarnings("unchecked")
+ BiConsumer<IndexRequest, Throwable> failureHandler = mock(BiConsumer.class);
+ @SuppressWarnings("unchecked")
+ Consumer<Throwable> completionHandler = mock(Consumer.class);
+ executionService.execute(bulkRequest.requests(), failureHandler, completionHandler);
+ verify(failureHandler, times(1)).accept(
+ argThat(new CustomTypeSafeMatcher<IndexRequest>("failure handler was not called with the expected arguments") {
+ @Override
+ protected boolean matchesSafely(IndexRequest item) {
+ return item == indexRequest2;
+ }
+
+ }),
+ argThat(new CustomTypeSafeMatcher<IllegalArgumentException>("failure handler was not called with the expected arguments") {
+ @Override
+ protected boolean matchesSafely(IllegalArgumentException iae) {
+ return "pipeline with id [does_not_exist] does not exist".equals(iae.getMessage());
+ }
+ })
+ );
+ verify(completionHandler, times(1)).accept(null);
+ }
+
+ public void testExecuteSuccess() throws Exception {
+ CompoundProcessor processor = mock(CompoundProcessor.class);
+ when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", processor));
+
+ IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id");
+ @SuppressWarnings("unchecked")
+ Consumer<Throwable> failureHandler = mock(Consumer.class);
+ @SuppressWarnings("unchecked")
+ Consumer<Boolean> completionHandler = mock(Consumer.class);
+ executionService.execute(indexRequest, failureHandler, completionHandler);
+ verify(failureHandler, never()).accept(any());
+ verify(completionHandler, times(1)).accept(true);
+ }
+
+ public void testExecutePropagateAllMetaDataUpdates() throws Exception {
+ CompoundProcessor processor = mock(CompoundProcessor.class);
+ doAnswer((InvocationOnMock invocationOnMock) -> {
+ IngestDocument ingestDocument = (IngestDocument) invocationOnMock.getArguments()[0];
+ for (IngestDocument.MetaData metaData : IngestDocument.MetaData.values()) {
+ if (metaData == IngestDocument.MetaData.TTL) {
+ ingestDocument.setFieldValue(IngestDocument.MetaData.TTL.getFieldName(), "5w");
+ } else {
+ ingestDocument.setFieldValue(metaData.getFieldName(), "update" + metaData.getFieldName());
+ }
+
+ }
+ return null;
+ }).when(processor).execute(any());
+ when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", processor));
+
+ IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id");
+ @SuppressWarnings("unchecked")
+ Consumer<Throwable> failureHandler = mock(Consumer.class);
+ @SuppressWarnings("unchecked")
+ Consumer<Boolean> completionHandler = mock(Consumer.class);
+ executionService.execute(indexRequest, failureHandler, completionHandler);
+ verify(processor).execute(any());
+ verify(failureHandler, never()).accept(any());
+ verify(completionHandler, times(1)).accept(true);
+
+ assertThat(indexRequest.index(), equalTo("update_index"));
+ assertThat(indexRequest.type(), equalTo("update_type"));
+ assertThat(indexRequest.id(), equalTo("update_id"));
+ assertThat(indexRequest.routing(), equalTo("update_routing"));
+ assertThat(indexRequest.parent(), equalTo("update_parent"));
+ assertThat(indexRequest.timestamp(), equalTo("update_timestamp"));
+ assertThat(indexRequest.ttl(), equalTo(new TimeValue(3024000000L)));
+ }
+
+ public void testExecuteFailure() throws Exception {
+ CompoundProcessor processor = mock(CompoundProcessor.class);
+ when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", processor));
+ IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id");
+ doThrow(new RuntimeException()).when(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap()));
+ @SuppressWarnings("unchecked")
+ Consumer<Throwable> failureHandler = mock(Consumer.class);
+ @SuppressWarnings("unchecked")
+ Consumer<Boolean> completionHandler = mock(Consumer.class);
+ executionService.execute(indexRequest, failureHandler, completionHandler);
+ verify(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap()));
+ verify(failureHandler, times(1)).accept(any(RuntimeException.class));
+ verify(completionHandler, never()).accept(anyBoolean());
+ }
+
+ public void testExecuteSuccessWithOnFailure() throws Exception {
+ Processor processor = mock(Processor.class);
+ Processor onFailureProcessor = mock(Processor.class);
+ CompoundProcessor compoundProcessor = new CompoundProcessor(Collections.singletonList(processor), Collections.singletonList(new CompoundProcessor(onFailureProcessor)));
+ when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", compoundProcessor));
+ IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id");
+ doThrow(new RuntimeException()).when(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap()));
+ @SuppressWarnings("unchecked")
+ Consumer<Throwable> failureHandler = mock(Consumer.class);
+ @SuppressWarnings("unchecked")
+ Consumer<Boolean> completionHandler = mock(Consumer.class);
+ executionService.execute(indexRequest, failureHandler, completionHandler);
+ verify(failureHandler, never()).accept(any(RuntimeException.class));
+ verify(completionHandler, times(1)).accept(true);
+ }
+
+ public void testExecuteFailureWithOnFailure() throws Exception {
+ Processor processor = mock(Processor.class);
+ Processor onFailureProcessor = mock(Processor.class);
+ CompoundProcessor compoundProcessor = new CompoundProcessor(Collections.singletonList(processor), Collections.singletonList(new CompoundProcessor(onFailureProcessor)));
+ when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", compoundProcessor));
+ IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id");
+ doThrow(new RuntimeException()).when(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap()));
+ doThrow(new RuntimeException()).when(onFailureProcessor).execute(eqID("_index", "_type", "_id", Collections.emptyMap()));
+ @SuppressWarnings("unchecked")
+ Consumer<Throwable> failureHandler = mock(Consumer.class);
+ @SuppressWarnings("unchecked")
+ Consumer<Boolean> completionHandler = mock(Consumer.class);
+ executionService.execute(indexRequest, failureHandler, completionHandler);
+ verify(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap()));
+ verify(failureHandler, times(1)).accept(any(RuntimeException.class));
+ verify(completionHandler, never()).accept(anyBoolean());
+ }
+
+ public void testExecuteFailureWithNestedOnFailure() throws Exception {
+ Processor processor = mock(Processor.class);
+ Processor onFailureProcessor = mock(Processor.class);
+ Processor onFailureOnFailureProcessor = mock(Processor.class);
+ CompoundProcessor compoundProcessor = new CompoundProcessor(Collections.singletonList(processor),
+ Collections.singletonList(new CompoundProcessor(Collections.singletonList(onFailureProcessor), Collections.singletonList(onFailureOnFailureProcessor))));
+ when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", compoundProcessor));
+ IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id");
+ doThrow(new RuntimeException()).when(onFailureOnFailureProcessor).execute(eqID("_index", "_type", "_id", Collections.emptyMap()));
+ doThrow(new RuntimeException()).when(onFailureProcessor).execute(eqID("_index", "_type", "_id", Collections.emptyMap()));
+ doThrow(new RuntimeException()).when(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap()));
+ @SuppressWarnings("unchecked")
+ Consumer<Throwable> failureHandler = mock(Consumer.class);
+ @SuppressWarnings("unchecked")
+ Consumer<Boolean> completionHandler = mock(Consumer.class);
+ executionService.execute(indexRequest, failureHandler, completionHandler);
+ verify(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap()));
+ verify(failureHandler, times(1)).accept(any(RuntimeException.class));
+ verify(completionHandler, never()).accept(anyBoolean());
+ }
+
+ public void testExecuteSetTTL() throws Exception {
+ Processor processor = new TestProcessor(ingestDocument -> ingestDocument.setFieldValue("_ttl", "5d"));
+ when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", new CompoundProcessor(processor)));
+
+ IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id");
+ @SuppressWarnings("unchecked")
+ Consumer<Throwable> failureHandler = mock(Consumer.class);
+ @SuppressWarnings("unchecked")
+ Consumer<Boolean> completionHandler = mock(Consumer.class);
+ executionService.execute(indexRequest, failureHandler, completionHandler);
+
+ assertThat(indexRequest.ttl(), equalTo(TimeValue.parseTimeValue("5d", null, "ttl")));
+ verify(failureHandler, never()).accept(any());
+ verify(completionHandler, times(1)).accept(true);
+ }
+
+ public void testExecuteSetInvalidTTL() throws Exception {
+ Processor processor = new TestProcessor(ingestDocument -> ingestDocument.setFieldValue("_ttl", "abc"));
+ when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", new CompoundProcessor(processor)));
+
+ IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id");
+ @SuppressWarnings("unchecked")
+ Consumer<Throwable> failureHandler = mock(Consumer.class);
+ @SuppressWarnings("unchecked")
+ Consumer<Boolean> completionHandler = mock(Consumer.class);
+ executionService.execute(indexRequest, failureHandler, completionHandler);
+ verify(failureHandler, times(1)).accept(any(ElasticsearchParseException.class));
+ verify(completionHandler, never()).accept(anyBoolean());
+ }
+
+ public void testExecuteProvidedTTL() throws Exception {
+ when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", mock(CompoundProcessor.class)));
+
+ IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").setPipeline("_id")
+ .source(Collections.emptyMap())
+ .ttl(1000L);
+ Consumer<Throwable> failureHandler = mock(Consumer.class);
+ Consumer<Boolean> completionHandler = mock(Consumer.class);
+ executionService.execute(indexRequest, failureHandler, completionHandler);
+
+ assertThat(indexRequest.ttl(), equalTo(new TimeValue(1000L)));
+ verify(failureHandler, never()).accept(any());
+ verify(completionHandler, times(1)).accept(true);
+ }
+
+ public void testBulkRequestExecutionWithFailures() throws Exception {
+ BulkRequest bulkRequest = new BulkRequest();
+ String pipelineId = "_id";
+
+ int numRequest = scaledRandomIntBetween(8, 64);
+ int numIndexRequests = 0;
+ for (int i = 0; i < numRequest; i++) {
+ ActionRequest request;
+ if (randomBoolean()) {
+ if (randomBoolean()) {
+ request = new DeleteRequest("_index", "_type", "_id");
+ } else {
+ request = new UpdateRequest("_index", "_type", "_id");
+ }
+ } else {
+ IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").setPipeline(pipelineId);
+ indexRequest.source("field1", "value1");
+ request = indexRequest;
+ numIndexRequests++;
+ }
+ bulkRequest.add(request);
+ }
+
+ CompoundProcessor processor = mock(CompoundProcessor.class);
+ Exception error = new RuntimeException();
+ doThrow(error).when(processor).execute(any());
+ when(store.get(pipelineId)).thenReturn(new Pipeline(pipelineId, null, processor));
+
+ BiConsumer<IndexRequest, Throwable> requestItemErrorHandler = mock(BiConsumer.class);
+ Consumer<Throwable> completionHandler = mock(Consumer.class);
+ executionService.execute(bulkRequest.requests(), requestItemErrorHandler, completionHandler);
+
+ verify(requestItemErrorHandler, times(numIndexRequests)).accept(any(IndexRequest.class), eq(error));
+ verify(completionHandler, times(1)).accept(null);
+ }
+
+ public void testBulkRequestExecution() throws Exception {
+ BulkRequest bulkRequest = new BulkRequest();
+ String pipelineId = "_id";
+
+ int numRequest = scaledRandomIntBetween(8, 64);
+ for (int i = 0; i < numRequest; i++) {
+ IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").setPipeline(pipelineId);
+ indexRequest.source("field1", "value1");
+ bulkRequest.add(indexRequest);
+ }
+
+ when(store.get(pipelineId)).thenReturn(new Pipeline(pipelineId, null, new CompoundProcessor()));
+
+ @SuppressWarnings("unchecked")
+ BiConsumer<IndexRequest, Throwable> requestItemErrorHandler = mock(BiConsumer.class);
+ @SuppressWarnings("unchecked")
+ Consumer<Throwable> completionHandler = mock(Consumer.class);
+ executionService.execute(bulkRequest.requests(), requestItemErrorHandler, completionHandler);
+
+ verify(requestItemErrorHandler, never()).accept(any(), any());
+ verify(completionHandler, times(1)).accept(null);
+ }
+
+ private IngestDocument eqID(String index, String type, String id, Map<String, Object> source) {
+ return argThat(new IngestDocumentMatcher(index, type, id, source));
+ }
+
+ private class IngestDocumentMatcher extends ArgumentMatcher<IngestDocument> {
+
+ private final IngestDocument ingestDocument;
+
+ public IngestDocumentMatcher(String index, String type, String id, Map<String, Object> source) {
+ this.ingestDocument = new IngestDocument(index, type, id, null, null, null, null, source);
+ }
+
+ @Override
+ public boolean matches(Object o) {
+ if (o.getClass() == IngestDocument.class) {
+ IngestDocument otherIngestDocument = (IngestDocument) o;
+ //ingest metadata will not be the same (timestamp differs every time)
+ return Objects.equals(ingestDocument.getSourceAndMetadata(), otherIngestDocument.getSourceAndMetadata());
+ }
+ return false;
+ }
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java b/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java
new file mode 100644
index 0000000000..a75a84f037
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java
@@ -0,0 +1,221 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest;
+
+import org.elasticsearch.ResourceNotFoundException;
+import org.elasticsearch.action.ingest.DeletePipelineRequest;
+import org.elasticsearch.action.ingest.PutPipelineRequest;
+import org.elasticsearch.action.ingest.WritePipelineResponse;
+import org.elasticsearch.cluster.ClusterName;
+import org.elasticsearch.cluster.ClusterState;
+import org.elasticsearch.cluster.metadata.MetaData;
+import org.elasticsearch.common.bytes.BytesArray;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.ingest.core.Pipeline;
+import org.elasticsearch.ingest.processor.SetProcessor;
+import org.elasticsearch.test.ESTestCase;
+import org.junit.Before;
+
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.is;
+import static org.hamcrest.Matchers.notNullValue;
+import static org.hamcrest.Matchers.nullValue;
+
+public class PipelineStoreTests extends ESTestCase {
+
+ private PipelineStore store;
+
+ @Before
+ public void init() throws Exception {
+ store = new PipelineStore(Settings.EMPTY);
+ ProcessorsRegistry registry = new ProcessorsRegistry();
+ registry.registerProcessor("set", (templateService) -> new SetProcessor.Factory(TestTemplateService.instance()));
+ store.buildProcessorFactoryRegistry(registry, null);
+ }
+
+ public void testUpdatePipelines() {
+ ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build();
+ store.innerUpdatePipelines(clusterState);
+ assertThat(store.pipelines.size(), is(0));
+
+ PipelineConfiguration pipeline = new PipelineConfiguration(
+ "_id",new BytesArray("{\"processors\": [{\"set\" : {\"field\": \"_field\", \"value\": \"_value\"}}]}")
+ );
+ IngestMetadata ingestMetadata = new IngestMetadata(Collections.singletonMap("_id", pipeline));
+ clusterState = ClusterState.builder(clusterState)
+ .metaData(MetaData.builder().putCustom(IngestMetadata.TYPE, ingestMetadata))
+ .build();
+ store.innerUpdatePipelines(clusterState);
+ assertThat(store.pipelines.size(), is(1));
+ assertThat(store.pipelines.get("_id").getId(), equalTo("_id"));
+ assertThat(store.pipelines.get("_id").getDescription(), nullValue());
+ assertThat(store.pipelines.get("_id").getProcessors().size(), equalTo(1));
+ assertThat(store.pipelines.get("_id").getProcessors().get(0).getType(), equalTo("set"));
+ }
+
+ public void testPut() {
+ String id = "_id";
+ Pipeline pipeline = store.get(id);
+ assertThat(pipeline, nullValue());
+ ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build();
+
+ // add a new pipeline:
+ PutPipelineRequest putRequest = new PutPipelineRequest(id, new BytesArray("{\"processors\": []}"));
+ clusterState = store.innerPut(putRequest, clusterState);
+ store.innerUpdatePipelines(clusterState);
+ pipeline = store.get(id);
+ assertThat(pipeline, notNullValue());
+ assertThat(pipeline.getId(), equalTo(id));
+ assertThat(pipeline.getDescription(), nullValue());
+ assertThat(pipeline.getProcessors().size(), equalTo(0));
+
+ // overwrite existing pipeline:
+ putRequest = new PutPipelineRequest(id, new BytesArray("{\"processors\": [], \"description\": \"_description\"}"));
+ clusterState = store.innerPut(putRequest, clusterState);
+ store.innerUpdatePipelines(clusterState);
+ pipeline = store.get(id);
+ assertThat(pipeline, notNullValue());
+ assertThat(pipeline.getId(), equalTo(id));
+ assertThat(pipeline.getDescription(), equalTo("_description"));
+ assertThat(pipeline.getProcessors().size(), equalTo(0));
+ }
+
+ public void testPutWithErrorResponse() {
+
+ }
+
+ public void testConstructPipelineResponseSuccess() {
+ Map<String, Object> processorConfig = new HashMap<>();
+ processorConfig.put("field", "foo");
+ processorConfig.put("value", "bar");
+ Map<String, Object> pipelineConfig = new HashMap<>();
+ pipelineConfig.put("description", "_description");
+ pipelineConfig.put("processors", Collections.singletonList(Collections.singletonMap("set", processorConfig)));
+ WritePipelineResponse response = store.validatePipelineResponse("test_id", pipelineConfig);
+ assertThat(response, nullValue());
+ }
+
+ public void testConstructPipelineResponseMissingProcessorsFieldException() {
+ Map<String, Object> pipelineConfig = new HashMap<>();
+ pipelineConfig.put("description", "_description");
+ WritePipelineResponse response = store.validatePipelineResponse("test_id", pipelineConfig);
+ assertThat(response.getError().getProcessorType(), is(nullValue()));
+ assertThat(response.getError().getProcessorTag(), is(nullValue()));
+ assertThat(response.getError().getProcessorPropertyName(), equalTo("processors"));
+ assertThat(response.getError().getReason(), equalTo("[processors] required property is missing"));
+ }
+
+ public void testConstructPipelineResponseConfigurationException() {
+ Map<String, Object> processorConfig = new HashMap<>();
+ processorConfig.put("field", "foo");
+ Map<String, Object> pipelineConfig = new HashMap<>();
+ pipelineConfig.put("description", "_description");
+ pipelineConfig.put("processors", Collections.singletonList(Collections.singletonMap("set", processorConfig)));
+ WritePipelineResponse response = store.validatePipelineResponse("test_id", pipelineConfig);
+
+ assertThat(response.getError().getProcessorTag(), nullValue());
+ assertThat(response.getError().getProcessorType(), equalTo("set"));
+ assertThat(response.getError().getProcessorPropertyName(), equalTo("value"));
+ assertThat(response.getError().getReason(), equalTo("[value] required property is missing"));
+ }
+
+ public void testDelete() {
+ PipelineConfiguration config = new PipelineConfiguration(
+ "_id",new BytesArray("{\"processors\": [{\"set\" : {\"field\": \"_field\", \"value\": \"_value\"}}]}")
+ );
+ IngestMetadata ingestMetadata = new IngestMetadata(Collections.singletonMap("_id", config));
+ ClusterState clusterState = ClusterState.builder(new ClusterName("_name"))
+ .metaData(MetaData.builder().putCustom(IngestMetadata.TYPE, ingestMetadata))
+ .build();
+ store.innerUpdatePipelines(clusterState);
+ assertThat(store.get("_id"), notNullValue());
+
+ // Delete pipeline:
+ DeletePipelineRequest deleteRequest = new DeletePipelineRequest("_id");
+ clusterState = store.innerDelete(deleteRequest, clusterState);
+ store.innerUpdatePipelines(clusterState);
+ assertThat(store.get("_id"), nullValue());
+
+ // Delete existing pipeline:
+ try {
+ store.innerDelete(deleteRequest, clusterState);
+ fail("exception expected");
+ } catch (ResourceNotFoundException e) {
+ assertThat(e.getMessage(), equalTo("pipeline [_id] is missing"));
+ }
+ }
+
+ public void testGetPipelines() {
+ Map<String, PipelineConfiguration> configs = new HashMap<>();
+ configs.put("_id1", new PipelineConfiguration(
+ "_id1", new BytesArray("{\"processors\": []}")
+ ));
+ configs.put("_id2", new PipelineConfiguration(
+ "_id2", new BytesArray("{\"processors\": []}")
+ ));
+
+ assertThat(store.innerGetPipelines(null, "_id1").isEmpty(), is(true));
+
+ IngestMetadata ingestMetadata = new IngestMetadata(configs);
+ List<PipelineConfiguration> pipelines = store.innerGetPipelines(ingestMetadata, "_id1");
+ assertThat(pipelines.size(), equalTo(1));
+ assertThat(pipelines.get(0).getId(), equalTo("_id1"));
+
+ pipelines = store.innerGetPipelines(ingestMetadata, "_id1", "_id2");
+ assertThat(pipelines.size(), equalTo(2));
+ assertThat(pipelines.get(0).getId(), equalTo("_id1"));
+ assertThat(pipelines.get(1).getId(), equalTo("_id2"));
+
+ pipelines = store.innerGetPipelines(ingestMetadata, "_id*");
+ pipelines.sort((o1, o2) -> o1.getId().compareTo(o2.getId()));
+ assertThat(pipelines.size(), equalTo(2));
+ assertThat(pipelines.get(0).getId(), equalTo("_id1"));
+ assertThat(pipelines.get(1).getId(), equalTo("_id2"));
+ }
+
+ public void testCrud() throws Exception {
+ String id = "_id";
+ Pipeline pipeline = store.get(id);
+ assertThat(pipeline, nullValue());
+ ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); // Start empty
+
+ PutPipelineRequest putRequest = new PutPipelineRequest(id, new BytesArray("{\"processors\": [{\"set\" : {\"field\": \"_field\", \"value\": \"_value\"}}]}"));
+ clusterState = store.innerPut(putRequest, clusterState);
+ store.innerUpdatePipelines(clusterState);
+ pipeline = store.get(id);
+ assertThat(pipeline, notNullValue());
+ assertThat(pipeline.getId(), equalTo(id));
+ assertThat(pipeline.getDescription(), nullValue());
+ assertThat(pipeline.getProcessors().size(), equalTo(1));
+ assertThat(pipeline.getProcessors().get(0).getType(), equalTo("set"));
+
+ DeletePipelineRequest deleteRequest = new DeletePipelineRequest(id);
+ clusterState = store.innerDelete(deleteRequest, clusterState);
+ store.innerUpdatePipelines(clusterState);
+ pipeline = store.get(id);
+ assertThat(pipeline, nullValue());
+ }
+
+}
diff --git a/core/src/test/java/org/elasticsearch/ingest/ProcessorsRegistryTests.java b/core/src/test/java/org/elasticsearch/ingest/ProcessorsRegistryTests.java
new file mode 100644
index 0000000000..ad18488d99
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/ingest/ProcessorsRegistryTests.java
@@ -0,0 +1,60 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest;
+
+import org.elasticsearch.ingest.core.Processor;
+import org.elasticsearch.ingest.core.TemplateService;
+import org.elasticsearch.test.ESTestCase;
+
+import java.util.Map;
+import java.util.Set;
+import java.util.function.Function;
+
+import static org.hamcrest.CoreMatchers.equalTo;
+
+public class ProcessorsRegistryTests extends ESTestCase {
+
+ public void testAddProcessor() {
+ ProcessorsRegistry processorsRegistry = new ProcessorsRegistry();
+ TestProcessor.Factory factory1 = new TestProcessor.Factory();
+ processorsRegistry.registerProcessor("1", (templateService) -> factory1);
+ TestProcessor.Factory factory2 = new TestProcessor.Factory();
+ processorsRegistry.registerProcessor("2", (templateService) -> factory2);
+ TestProcessor.Factory factory3 = new TestProcessor.Factory();
+ try {
+ processorsRegistry.registerProcessor("1", (templateService) -> factory3);
+ fail("addProcessor should have failed");
+ } catch(IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("Processor factory already registered for name [1]"));
+ }
+
+ Set<Map.Entry<String, Function<TemplateService, Processor.Factory<?>>>> entrySet = processorsRegistry.entrySet();
+ assertThat(entrySet.size(), equalTo(2));
+ for (Map.Entry<String, Function<TemplateService, Processor.Factory<?>>> entry : entrySet) {
+ if (entry.getKey().equals("1")) {
+ assertThat(entry.getValue().apply(null), equalTo(factory1));
+ } else if (entry.getKey().equals("2")) {
+ assertThat(entry.getValue().apply(null), equalTo(factory2));
+ } else {
+ fail("unexpected processor id [" + entry.getKey() + "]");
+ }
+ }
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/ingest/core/CompoundProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/core/CompoundProcessorTests.java
new file mode 100644
index 0000000000..7bc8922af4
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/ingest/core/CompoundProcessorTests.java
@@ -0,0 +1,120 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.core;
+
+import org.elasticsearch.ingest.TestProcessor;
+import org.elasticsearch.ingest.TestTemplateService;
+import org.elasticsearch.ingest.processor.AppendProcessor;
+import org.elasticsearch.ingest.processor.SetProcessor;
+import org.elasticsearch.ingest.processor.SplitProcessor;
+import org.elasticsearch.test.ESTestCase;
+import org.junit.Before;
+
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.hamcrest.CoreMatchers.equalTo;
+import static org.hamcrest.Matchers.is;
+
+public class CompoundProcessorTests extends ESTestCase {
+ private IngestDocument ingestDocument;
+
+ @Before
+ public void init() {
+ ingestDocument = new IngestDocument(new HashMap<>(), new HashMap<>());
+ }
+
+ public void testEmpty() throws Exception {
+ CompoundProcessor processor = new CompoundProcessor();
+ assertThat(processor.getProcessors().isEmpty(), is(true));
+ assertThat(processor.getOnFailureProcessors().isEmpty(), is(true));
+ processor.execute(ingestDocument);
+ }
+
+ public void testSingleProcessor() throws Exception {
+ TestProcessor processor = new TestProcessor(ingestDocument -> {});
+ CompoundProcessor compoundProcessor = new CompoundProcessor(processor);
+ assertThat(compoundProcessor.getProcessors().size(), equalTo(1));
+ assertThat(compoundProcessor.getProcessors().get(0), equalTo(processor));
+ assertThat(compoundProcessor.getOnFailureProcessors().isEmpty(), is(true));
+ compoundProcessor.execute(ingestDocument);
+ assertThat(processor.getInvokedCounter(), equalTo(1));
+ }
+
+ public void testSingleProcessorWithException() throws Exception {
+ TestProcessor processor = new TestProcessor(ingestDocument -> {throw new RuntimeException("error");});
+ CompoundProcessor compoundProcessor = new CompoundProcessor(processor);
+ assertThat(compoundProcessor.getProcessors().size(), equalTo(1));
+ assertThat(compoundProcessor.getProcessors().get(0), equalTo(processor));
+ assertThat(compoundProcessor.getOnFailureProcessors().isEmpty(), is(true));
+ try {
+ compoundProcessor.execute(ingestDocument);
+ fail("should throw exception");
+ } catch (Exception e) {
+ assertThat(e.getMessage(), equalTo("error"));
+ }
+ assertThat(processor.getInvokedCounter(), equalTo(1));
+ }
+
+ public void testSingleProcessorWithOnFailureProcessor() throws Exception {
+ TestProcessor processor1 = new TestProcessor("id", "first", ingestDocument -> {throw new RuntimeException("error");});
+ TestProcessor processor2 = new TestProcessor(ingestDocument -> {
+ Map<String, String> ingestMetadata = ingestDocument.getIngestMetadata();
+ assertThat(ingestMetadata.size(), equalTo(3));
+ assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_MESSAGE_FIELD), equalTo("error"));
+ assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TYPE_FIELD), equalTo("first"));
+ assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TAG_FIELD), equalTo("id"));
+ });
+
+ CompoundProcessor compoundProcessor = new CompoundProcessor(Collections.singletonList(processor1), Collections.singletonList(processor2));
+ compoundProcessor.execute(ingestDocument);
+
+ assertThat(processor1.getInvokedCounter(), equalTo(1));
+ assertThat(processor2.getInvokedCounter(), equalTo(1));
+ }
+
+ public void testSingleProcessorWithNestedFailures() throws Exception {
+ TestProcessor processor = new TestProcessor("id", "first", ingestDocument -> {throw new RuntimeException("error");});
+ TestProcessor processorToFail = new TestProcessor("id2", "second", ingestDocument -> {
+ Map<String, String> ingestMetadata = ingestDocument.getIngestMetadata();
+ assertThat(ingestMetadata.size(), equalTo(3));
+ assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_MESSAGE_FIELD), equalTo("error"));
+ assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TYPE_FIELD), equalTo("first"));
+ assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TAG_FIELD), equalTo("id"));
+ throw new RuntimeException("error");
+ });
+ TestProcessor lastProcessor = new TestProcessor(ingestDocument -> {
+ Map<String, String> ingestMetadata = ingestDocument.getIngestMetadata();
+ assertThat(ingestMetadata.size(), equalTo(3));
+ assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_MESSAGE_FIELD), equalTo("error"));
+ assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TYPE_FIELD), equalTo("second"));
+ assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TAG_FIELD), equalTo("id2"));
+ });
+ CompoundProcessor compoundOnFailProcessor = new CompoundProcessor(Collections.singletonList(processorToFail), Collections.singletonList(lastProcessor));
+ CompoundProcessor compoundProcessor = new CompoundProcessor(Collections.singletonList(processor), Collections.singletonList(compoundOnFailProcessor));
+ compoundProcessor.execute(ingestDocument);
+
+ assertThat(processorToFail.getInvokedCounter(), equalTo(1));
+ assertThat(lastProcessor.getInvokedCounter(), equalTo(1));
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/ingest/core/ConfigurationUtilsTests.java b/core/src/test/java/org/elasticsearch/ingest/core/ConfigurationUtilsTests.java
new file mode 100644
index 0000000000..954a03c217
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/ingest/core/ConfigurationUtilsTests.java
@@ -0,0 +1,71 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.core;
+
+import org.elasticsearch.ingest.processor.ConfigurationPropertyException;
+import org.elasticsearch.test.ESTestCase;
+import org.junit.Before;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.hamcrest.Matchers.equalTo;
+
+
+public class ConfigurationUtilsTests extends ESTestCase {
+ private Map<String, Object> config;
+
+ @Before
+ public void setConfig() {
+ config = new HashMap<>();
+ config.put("foo", "bar");
+ config.put("arr", Arrays.asList("1", "2", "3"));
+ List<Integer> list = new ArrayList<>();
+ list.add(2);
+ config.put("int", list);
+ config.put("ip", "127.0.0.1");
+ Map<String, Object> fizz = new HashMap<>();
+ fizz.put("buzz", "hello world");
+ config.put("fizz", fizz);
+ }
+
+ public void testReadStringProperty() {
+ String val = ConfigurationUtils.readStringProperty(null, null, config, "foo");
+ assertThat(val, equalTo("bar"));
+ }
+
+ public void testReadStringPropertyInvalidType() {
+ try {
+ ConfigurationUtils.readStringProperty(null, null, config, "arr");
+ } catch (ConfigurationPropertyException e) {
+ assertThat(e.getMessage(), equalTo("[arr] property isn't a string, but of type [java.util.Arrays$ArrayList]"));
+ }
+ }
+
+ // TODO(talevy): Issue with generics. This test should fail, "int" is of type List<Integer>
+ public void testOptional_InvalidType() {
+ List<String> val = ConfigurationUtils.readList(null, null, config, "int");
+ assertThat(val, equalTo(Collections.singletonList(2)));
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/ingest/core/IngestDocumentTests.java b/core/src/test/java/org/elasticsearch/ingest/core/IngestDocumentTests.java
new file mode 100644
index 0000000000..1282c4adf1
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/ingest/core/IngestDocumentTests.java
@@ -0,0 +1,999 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.core;
+
+import org.elasticsearch.ingest.RandomDocumentPicks;
+import org.elasticsearch.test.ESTestCase;
+import org.junit.Before;
+
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+
+import static org.hamcrest.Matchers.both;
+import static org.hamcrest.Matchers.endsWith;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.greaterThanOrEqualTo;
+import static org.hamcrest.Matchers.instanceOf;
+import static org.hamcrest.Matchers.lessThanOrEqualTo;
+import static org.hamcrest.Matchers.not;
+import static org.hamcrest.Matchers.notNullValue;
+import static org.hamcrest.Matchers.nullValue;
+import static org.hamcrest.Matchers.sameInstance;
+
+public class IngestDocumentTests extends ESTestCase {
+
+ private IngestDocument ingestDocument;
+
+ @Before
+ public void setIngestDocument() {
+ Map<String, Object> document = new HashMap<>();
+ Map<String, Object> ingestMap = new HashMap<>();
+ ingestMap.put("timestamp", "bogus_timestamp");
+ document.put("_ingest", ingestMap);
+ document.put("foo", "bar");
+ document.put("int", 123);
+ Map<String, Object> innerObject = new HashMap<>();
+ innerObject.put("buzz", "hello world");
+ innerObject.put("foo_null", null);
+ innerObject.put("1", "bar");
+ List<String> innerInnerList = new ArrayList<>();
+ innerInnerList.add("item1");
+ List<Object> innerList = new ArrayList<>();
+ innerList.add(innerInnerList);
+ innerObject.put("list", innerList);
+ document.put("fizz", innerObject);
+ List<Map<String, Object>> list = new ArrayList<>();
+ Map<String, Object> value = new HashMap<>();
+ value.put("field", "value");
+ list.add(value);
+ list.add(null);
+
+ document.put("list", list);
+ ingestDocument = new IngestDocument("index", "type", "id", null, null, null, null, document);
+ }
+
+ public void testSimpleGetFieldValue() {
+ assertThat(ingestDocument.getFieldValue("foo", String.class), equalTo("bar"));
+ assertThat(ingestDocument.getFieldValue("int", Integer.class), equalTo(123));
+ assertThat(ingestDocument.getFieldValue("_source.foo", String.class), equalTo("bar"));
+ assertThat(ingestDocument.getFieldValue("_source.int", Integer.class), equalTo(123));
+ assertThat(ingestDocument.getFieldValue("_index", String.class), equalTo("index"));
+ assertThat(ingestDocument.getFieldValue("_type", String.class), equalTo("type"));
+ assertThat(ingestDocument.getFieldValue("_id", String.class), equalTo("id"));
+ assertThat(ingestDocument.getFieldValue("_ingest.timestamp", String.class), both(notNullValue()).and(not(equalTo("bogus_timestamp"))));
+ assertThat(ingestDocument.getFieldValue("_source._ingest.timestamp", String.class), equalTo("bogus_timestamp"));
+ }
+
+ public void testGetSourceObject() {
+ try {
+ ingestDocument.getFieldValue("_source", Object.class);
+ fail("get field value should have failed");
+ } catch (IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("field [_source] not present as part of path [_source]"));
+ }
+ }
+
+ public void testGetIngestObject() {
+ assertThat(ingestDocument.getFieldValue("_ingest", Map.class), notNullValue());
+ }
+
+ public void testGetEmptyPathAfterStrippingOutPrefix() {
+ try {
+ ingestDocument.getFieldValue("_source.", Object.class);
+ fail("get field value should have failed");
+ } catch(IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("path [_source.] is not valid"));
+ }
+
+ try {
+ ingestDocument.getFieldValue("_ingest.", Object.class);
+ fail("get field value should have failed");
+ } catch(IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("path [_ingest.] is not valid"));
+ }
+ }
+
+ public void testGetFieldValueNullValue() {
+ assertThat(ingestDocument.getFieldValue("fizz.foo_null", Object.class), nullValue());
+ }
+
+ public void testSimpleGetFieldValueTypeMismatch() {
+ try {
+ ingestDocument.getFieldValue("int", String.class);
+ fail("getFieldValue should have failed");
+ } catch(IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("field [int] of type [java.lang.Integer] cannot be cast to [java.lang.String]"));
+ }
+
+ try {
+ ingestDocument.getFieldValue("foo", Integer.class);
+ fail("getFieldValue should have failed");
+ } catch(IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("field [foo] of type [java.lang.String] cannot be cast to [java.lang.Integer]"));
+ }
+ }
+
+ public void testNestedGetFieldValue() {
+ assertThat(ingestDocument.getFieldValue("fizz.buzz", String.class), equalTo("hello world"));
+ assertThat(ingestDocument.getFieldValue("fizz.1", String.class), equalTo("bar"));
+ }
+
+ public void testNestedGetFieldValueTypeMismatch() {
+ try {
+ ingestDocument.getFieldValue("foo.foo.bar", String.class);
+ } catch(IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("cannot resolve [foo] from object of type [java.lang.String] as part of path [foo.foo.bar]"));
+ }
+ }
+
+ public void testListGetFieldValue() {
+ assertThat(ingestDocument.getFieldValue("list.0.field", String.class), equalTo("value"));
+ }
+
+ public void testListGetFieldValueNull() {
+ assertThat(ingestDocument.getFieldValue("list.1", String.class), nullValue());
+ }
+
+ public void testListGetFieldValueIndexNotNumeric() {
+ try {
+ ingestDocument.getFieldValue("list.test.field", String.class);
+ } catch(IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("[test] is not an integer, cannot be used as an index as part of path [list.test.field]"));
+ }
+ }
+
+ public void testListGetFieldValueIndexOutOfBounds() {
+ try {
+ ingestDocument.getFieldValue("list.10.field", String.class);
+ } catch(IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("[10] is out of bounds for array with length [2] as part of path [list.10.field]"));
+ }
+ }
+
+ public void testGetFieldValueNotFound() {
+ try {
+ ingestDocument.getFieldValue("not.here", String.class);
+ fail("get field value should have failed");
+ } catch(IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("field [not] not present as part of path [not.here]"));
+ }
+ }
+
+ public void testGetFieldValueNotFoundNullParent() {
+ try {
+ ingestDocument.getFieldValue("fizz.foo_null.not_there", String.class);
+ fail("get field value should have failed");
+ } catch(IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("cannot resolve [not_there] from null as part of path [fizz.foo_null.not_there]"));
+ }
+ }
+
+ public void testGetFieldValueNull() {
+ try {
+ ingestDocument.getFieldValue(null, String.class);
+ fail("get field value should have failed");
+ } catch(IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("path cannot be null nor empty"));
+ }
+ }
+
+ public void testGetFieldValueEmpty() {
+ try {
+ ingestDocument.getFieldValue("", String.class);
+ fail("get field value should have failed");
+ } catch(IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("path cannot be null nor empty"));
+ }
+ }
+
+ public void testHasField() {
+ assertTrue(ingestDocument.hasField("fizz"));
+ assertTrue(ingestDocument.hasField("_index"));
+ assertTrue(ingestDocument.hasField("_type"));
+ assertTrue(ingestDocument.hasField("_id"));
+ assertTrue(ingestDocument.hasField("_source.fizz"));
+ assertTrue(ingestDocument.hasField("_ingest.timestamp"));
+ }
+
+ public void testHasFieldNested() {
+ assertTrue(ingestDocument.hasField("fizz.buzz"));
+ assertTrue(ingestDocument.hasField("_source._ingest.timestamp"));
+ }
+
+ public void testListHasField() {
+ assertTrue(ingestDocument.hasField("list.0.field"));
+ }
+
+ public void testListHasFieldNull() {
+ assertTrue(ingestDocument.hasField("list.1"));
+ }
+
+ public void testListHasFieldIndexOutOfBounds() {
+ assertFalse(ingestDocument.hasField("list.10"));
+ }
+
+ public void testListHasFieldIndexNotNumeric() {
+ assertFalse(ingestDocument.hasField("list.test"));
+ }
+
+ public void testNestedHasFieldTypeMismatch() {
+ assertFalse(ingestDocument.hasField("foo.foo.bar"));
+ }
+
+ public void testHasFieldNotFound() {
+ assertFalse(ingestDocument.hasField("not.here"));
+ }
+
+ public void testHasFieldNotFoundNullParent() {
+ assertFalse(ingestDocument.hasField("fizz.foo_null.not_there"));
+ }
+
+ public void testHasFieldNestedNotFound() {
+ assertFalse(ingestDocument.hasField("fizz.doesnotexist"));
+ }
+
+ public void testHasFieldNull() {
+ try {
+ ingestDocument.hasField(null);
+ fail("has field should have failed");
+ } catch(IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("path cannot be null nor empty"));
+ }
+ }
+
+ public void testHasFieldNullValue() {
+ assertTrue(ingestDocument.hasField("fizz.foo_null"));
+ }
+
+ public void testHasFieldEmpty() {
+ try {
+ ingestDocument.hasField("");
+ fail("has field should have failed");
+ } catch(IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("path cannot be null nor empty"));
+ }
+ }
+
+ public void testHasFieldSourceObject() {
+ assertThat(ingestDocument.hasField("_source"), equalTo(false));
+ }
+
+ public void testHasFieldIngestObject() {
+ assertThat(ingestDocument.hasField("_ingest"), equalTo(true));
+ }
+
+ public void testHasFieldEmptyPathAfterStrippingOutPrefix() {
+ try {
+ ingestDocument.hasField("_source.");
+ fail("has field value should have failed");
+ } catch(IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("path [_source.] is not valid"));
+ }
+
+ try {
+ ingestDocument.hasField("_ingest.");
+ fail("has field value should have failed");
+ } catch(IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("path [_ingest.] is not valid"));
+ }
+ }
+
+ public void testSimpleSetFieldValue() {
+ ingestDocument.setFieldValue("new_field", "foo");
+ assertThat(ingestDocument.getSourceAndMetadata().get("new_field"), equalTo("foo"));
+ ingestDocument.setFieldValue("_ttl", "ttl");
+ assertThat(ingestDocument.getSourceAndMetadata().get("_ttl"), equalTo("ttl"));
+ ingestDocument.setFieldValue("_source.another_field", "bar");
+ assertThat(ingestDocument.getSourceAndMetadata().get("another_field"), equalTo("bar"));
+ ingestDocument.setFieldValue("_ingest.new_field", "new_value");
+ assertThat(ingestDocument.getIngestMetadata().size(), equalTo(2));
+ assertThat(ingestDocument.getIngestMetadata().get("new_field"), equalTo("new_value"));
+ ingestDocument.setFieldValue("_ingest.timestamp", "timestamp");
+ assertThat(ingestDocument.getIngestMetadata().get("timestamp"), equalTo("timestamp"));
+ }
+
+ public void testSetFieldValueNullValue() {
+ ingestDocument.setFieldValue("new_field", null);
+ assertThat(ingestDocument.getSourceAndMetadata().containsKey("new_field"), equalTo(true));
+ assertThat(ingestDocument.getSourceAndMetadata().get("new_field"), nullValue());
+ }
+
+ @SuppressWarnings("unchecked")
+ public void testNestedSetFieldValue() {
+ ingestDocument.setFieldValue("a.b.c.d", "foo");
+ assertThat(ingestDocument.getSourceAndMetadata().get("a"), instanceOf(Map.class));
+ Map<String, Object> a = (Map<String, Object>) ingestDocument.getSourceAndMetadata().get("a");
+ assertThat(a.get("b"), instanceOf(Map.class));
+ Map<String, Object> b = (Map<String, Object>) a.get("b");
+ assertThat(b.get("c"), instanceOf(Map.class));
+ Map<String, Object> c = (Map<String, Object>) b.get("c");
+ assertThat(c.get("d"), instanceOf(String.class));
+ String d = (String) c.get("d");
+ assertThat(d, equalTo("foo"));
+ }
+
+ public void testSetFieldValueOnExistingField() {
+ ingestDocument.setFieldValue("foo", "newbar");
+ assertThat(ingestDocument.getSourceAndMetadata().get("foo"), equalTo("newbar"));
+ }
+
+ @SuppressWarnings("unchecked")
+ public void testSetFieldValueOnExistingParent() {
+ ingestDocument.setFieldValue("fizz.new", "bar");
+ assertThat(ingestDocument.getSourceAndMetadata().get("fizz"), instanceOf(Map.class));
+ Map<String, Object> innerMap = (Map<String, Object>) ingestDocument.getSourceAndMetadata().get("fizz");
+ assertThat(innerMap.get("new"), instanceOf(String.class));
+ String value = (String) innerMap.get("new");
+ assertThat(value, equalTo("bar"));
+ }
+
+ public void testSetFieldValueOnExistingParentTypeMismatch() {
+ try {
+ ingestDocument.setFieldValue("fizz.buzz.new", "bar");
+ fail("add field should have failed");
+ } catch(IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("cannot set [new] with parent object of type [java.lang.String] as part of path [fizz.buzz.new]"));
+ }
+ }
+
+ public void testSetFieldValueOnExistingNullParent() {
+ try {
+ ingestDocument.setFieldValue("fizz.foo_null.test", "bar");
+ fail("add field should have failed");
+ } catch(IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("cannot set [test] with null parent as part of path [fizz.foo_null.test]"));
+ }
+ }
+
+ public void testSetFieldValueNullName() {
+ try {
+ ingestDocument.setFieldValue(null, "bar");
+ fail("add field should have failed");
+ } catch(IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("path cannot be null nor empty"));
+ }
+ }
+
+ public void testSetSourceObject() {
+ ingestDocument.setFieldValue("_source", "value");
+ assertThat(ingestDocument.getSourceAndMetadata().get("_source"), equalTo("value"));
+ }
+
+ public void testSetIngestObject() {
+ ingestDocument.setFieldValue("_ingest", "value");
+ assertThat(ingestDocument.getSourceAndMetadata().get("_ingest"), equalTo("value"));
+ }
+
+ public void testSetIngestSourceObject() {
+ //test that we don't strip out the _source prefix when _ingest is used
+ ingestDocument.setFieldValue("_ingest._source", "value");
+ assertThat(ingestDocument.getIngestMetadata().get("_source"), equalTo("value"));
+ }
+
+ public void testSetEmptyPathAfterStrippingOutPrefix() {
+ try {
+ ingestDocument.setFieldValue("_source.", "value");
+ fail("set field value should have failed");
+ } catch(IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("path [_source.] is not valid"));
+ }
+
+ try {
+ ingestDocument.setFieldValue("_ingest.", "_value");
+ fail("set field value should have failed");
+ } catch(IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("path [_ingest.] is not valid"));
+ }
+ }
+
+ public void testListSetFieldValueNoIndexProvided() {
+ ingestDocument.setFieldValue("list", "value");
+ Object object = ingestDocument.getSourceAndMetadata().get("list");
+ assertThat(object, instanceOf(String.class));
+ assertThat(object, equalTo("value"));
+ }
+
+ public void testListAppendFieldValue() {
+ ingestDocument.appendFieldValue("list", "new_value");
+ Object object = ingestDocument.getSourceAndMetadata().get("list");
+ assertThat(object, instanceOf(List.class));
+ @SuppressWarnings("unchecked")
+ List<Object> list = (List<Object>) object;
+ assertThat(list.size(), equalTo(3));
+ assertThat(list.get(0), equalTo(Collections.singletonMap("field", "value")));
+ assertThat(list.get(1), nullValue());
+ assertThat(list.get(2), equalTo("new_value"));
+ }
+
+ public void testListAppendFieldValues() {
+ ingestDocument.appendFieldValue("list", Arrays.asList("item1", "item2", "item3"));
+ Object object = ingestDocument.getSourceAndMetadata().get("list");
+ assertThat(object, instanceOf(List.class));
+ @SuppressWarnings("unchecked")
+ List<Object> list = (List<Object>) object;
+ assertThat(list.size(), equalTo(5));
+ assertThat(list.get(0), equalTo(Collections.singletonMap("field", "value")));
+ assertThat(list.get(1), nullValue());
+ assertThat(list.get(2), equalTo("item1"));
+ assertThat(list.get(3), equalTo("item2"));
+ assertThat(list.get(4), equalTo("item3"));
+ }
+
+ public void testAppendFieldValueToNonExistingList() {
+ ingestDocument.appendFieldValue("non_existing_list", "new_value");
+ Object object = ingestDocument.getSourceAndMetadata().get("non_existing_list");
+ assertThat(object, instanceOf(List.class));
+ @SuppressWarnings("unchecked")
+ List<Object> list = (List<Object>) object;
+ assertThat(list.size(), equalTo(1));
+ assertThat(list.get(0), equalTo("new_value"));
+ }
+
+ public void testAppendFieldValuesToNonExistingList() {
+ ingestDocument.appendFieldValue("non_existing_list", Arrays.asList("item1", "item2", "item3"));
+ Object object = ingestDocument.getSourceAndMetadata().get("non_existing_list");
+ assertThat(object, instanceOf(List.class));
+ @SuppressWarnings("unchecked")
+ List<Object> list = (List<Object>) object;
+ assertThat(list.size(), equalTo(3));
+ assertThat(list.get(0), equalTo("item1"));
+ assertThat(list.get(1), equalTo("item2"));
+ assertThat(list.get(2), equalTo("item3"));
+ }
+
+ public void testAppendFieldValueConvertStringToList() {
+ ingestDocument.appendFieldValue("fizz.buzz", "new_value");
+ Object object = ingestDocument.getSourceAndMetadata().get("fizz");
+ assertThat(object, instanceOf(Map.class));
+ @SuppressWarnings("unchecked")
+ Map<String, Object> map = (Map<String, Object>) object;
+ object = map.get("buzz");
+ assertThat(object, instanceOf(List.class));
+ @SuppressWarnings("unchecked")
+ List<Object> list = (List<Object>) object;
+ assertThat(list.size(), equalTo(2));
+ assertThat(list.get(0), equalTo("hello world"));
+ assertThat(list.get(1), equalTo("new_value"));
+ }
+
+ public void testAppendFieldValuesConvertStringToList() {
+ ingestDocument.appendFieldValue("fizz.buzz", Arrays.asList("item1", "item2", "item3"));
+ Object object = ingestDocument.getSourceAndMetadata().get("fizz");
+ assertThat(object, instanceOf(Map.class));
+ @SuppressWarnings("unchecked")
+ Map<String, Object> map = (Map<String, Object>) object;
+ object = map.get("buzz");
+ assertThat(object, instanceOf(List.class));
+ @SuppressWarnings("unchecked")
+ List<Object> list = (List<Object>) object;
+ assertThat(list.size(), equalTo(4));
+ assertThat(list.get(0), equalTo("hello world"));
+ assertThat(list.get(1), equalTo("item1"));
+ assertThat(list.get(2), equalTo("item2"));
+ assertThat(list.get(3), equalTo("item3"));
+ }
+
+ public void testAppendFieldValueConvertIntegerToList() {
+ ingestDocument.appendFieldValue("int", 456);
+ Object object = ingestDocument.getSourceAndMetadata().get("int");
+ assertThat(object, instanceOf(List.class));
+ @SuppressWarnings("unchecked")
+ List<Object> list = (List<Object>) object;
+ assertThat(list.size(), equalTo(2));
+ assertThat(list.get(0), equalTo(123));
+ assertThat(list.get(1), equalTo(456));
+ }
+
+ public void testAppendFieldValuesConvertIntegerToList() {
+ ingestDocument.appendFieldValue("int", Arrays.asList(456, 789));
+ Object object = ingestDocument.getSourceAndMetadata().get("int");
+ assertThat(object, instanceOf(List.class));
+ @SuppressWarnings("unchecked")
+ List<Object> list = (List<Object>) object;
+ assertThat(list.size(), equalTo(3));
+ assertThat(list.get(0), equalTo(123));
+ assertThat(list.get(1), equalTo(456));
+ assertThat(list.get(2), equalTo(789));
+ }
+
+ public void testAppendFieldValueConvertMapToList() {
+ ingestDocument.appendFieldValue("fizz", Collections.singletonMap("field", "value"));
+ Object object = ingestDocument.getSourceAndMetadata().get("fizz");
+ assertThat(object, instanceOf(List.class));
+ List<?> list = (List<?>) object;
+ assertThat(list.size(), equalTo(2));
+ assertThat(list.get(0), instanceOf(Map.class));
+ @SuppressWarnings("unchecked")
+ Map<String, Object> map = (Map<String, Object>) list.get(0);
+ assertThat(map.size(), equalTo(4));
+ assertThat(list.get(1), equalTo(Collections.singletonMap("field", "value")));
+ }
+
+ public void testAppendFieldValueToNull() {
+ ingestDocument.appendFieldValue("fizz.foo_null", "new_value");
+ Object object = ingestDocument.getSourceAndMetadata().get("fizz");
+ assertThat(object, instanceOf(Map.class));
+ @SuppressWarnings("unchecked")
+ Map<String, Object> map = (Map<String, Object>) object;
+ object = map.get("foo_null");
+ assertThat(object, instanceOf(List.class));
+ List<?> list = (List<?>) object;
+ assertThat(list.size(), equalTo(2));
+ assertThat(list.get(0), nullValue());
+ assertThat(list.get(1), equalTo("new_value"));
+ }
+
+ public void testAppendFieldValueToListElement() {
+ ingestDocument.appendFieldValue("fizz.list.0", "item2");
+ Object object = ingestDocument.getSourceAndMetadata().get("fizz");
+ assertThat(object, instanceOf(Map.class));
+ @SuppressWarnings("unchecked")
+ Map<String, Object> map = (Map<String, Object>) object;
+ object = map.get("list");
+ assertThat(object, instanceOf(List.class));
+ @SuppressWarnings("unchecked")
+ List<Object> list = (List<Object>) object;
+ assertThat(list.size(), equalTo(1));
+ object = list.get(0);
+ assertThat(object, instanceOf(List.class));
+ @SuppressWarnings("unchecked")
+ List<String> innerList = (List<String>) object;
+ assertThat(innerList.size(), equalTo(2));
+ assertThat(innerList.get(0), equalTo("item1"));
+ assertThat(innerList.get(1), equalTo("item2"));
+ }
+
+ public void testAppendFieldValuesToListElement() {
+ ingestDocument.appendFieldValue("fizz.list.0", Arrays.asList("item2", "item3", "item4"));
+ Object object = ingestDocument.getSourceAndMetadata().get("fizz");
+ assertThat(object, instanceOf(Map.class));
+ @SuppressWarnings("unchecked")
+ Map<String, Object> map = (Map<String, Object>) object;
+ object = map.get("list");
+ assertThat(object, instanceOf(List.class));
+ @SuppressWarnings("unchecked")
+ List<Object> list = (List<Object>) object;
+ assertThat(list.size(), equalTo(1));
+ object = list.get(0);
+ assertThat(object, instanceOf(List.class));
+ @SuppressWarnings("unchecked")
+ List<String> innerList = (List<String>) object;
+ assertThat(innerList.size(), equalTo(4));
+ assertThat(innerList.get(0), equalTo("item1"));
+ assertThat(innerList.get(1), equalTo("item2"));
+ assertThat(innerList.get(2), equalTo("item3"));
+ assertThat(innerList.get(3), equalTo("item4"));
+ }
+
+ public void testAppendFieldValueConvertStringListElementToList() {
+ ingestDocument.appendFieldValue("fizz.list.0.0", "new_value");
+ Object object = ingestDocument.getSourceAndMetadata().get("fizz");
+ assertThat(object, instanceOf(Map.class));
+ @SuppressWarnings("unchecked")
+ Map<String, Object> map = (Map<String, Object>) object;
+ object = map.get("list");
+ assertThat(object, instanceOf(List.class));
+ @SuppressWarnings("unchecked")
+ List<Object> list = (List<Object>) object;
+ assertThat(list.size(), equalTo(1));
+ object = list.get(0);
+ assertThat(object, instanceOf(List.class));
+ @SuppressWarnings("unchecked")
+ List<Object> innerList = (List<Object>) object;
+ object = innerList.get(0);
+ assertThat(object, instanceOf(List.class));
+ @SuppressWarnings("unchecked")
+ List<String> innerInnerList = (List<String>) object;
+ assertThat(innerInnerList.size(), equalTo(2));
+ assertThat(innerInnerList.get(0), equalTo("item1"));
+ assertThat(innerInnerList.get(1), equalTo("new_value"));
+ }
+
+ public void testAppendFieldValuesConvertStringListElementToList() {
+ ingestDocument.appendFieldValue("fizz.list.0.0", Arrays.asList("item2", "item3", "item4"));
+ Object object = ingestDocument.getSourceAndMetadata().get("fizz");
+ assertThat(object, instanceOf(Map.class));
+ @SuppressWarnings("unchecked")
+ Map<String, Object> map = (Map<String, Object>) object;
+ object = map.get("list");
+ assertThat(object, instanceOf(List.class));
+ @SuppressWarnings("unchecked")
+ List<Object> list = (List<Object>) object;
+ assertThat(list.size(), equalTo(1));
+ object = list.get(0);
+ assertThat(object, instanceOf(List.class));
+ @SuppressWarnings("unchecked")
+ List<Object> innerList = (List<Object>) object;
+ object = innerList.get(0);
+ assertThat(object, instanceOf(List.class));
+ @SuppressWarnings("unchecked")
+ List<String> innerInnerList = (List<String>) object;
+ assertThat(innerInnerList.size(), equalTo(4));
+ assertThat(innerInnerList.get(0), equalTo("item1"));
+ assertThat(innerInnerList.get(1), equalTo("item2"));
+ assertThat(innerInnerList.get(2), equalTo("item3"));
+ assertThat(innerInnerList.get(3), equalTo("item4"));
+ }
+
+ public void testAppendFieldValueListElementConvertMapToList() {
+ ingestDocument.appendFieldValue("list.0", Collections.singletonMap("item2", "value2"));
+ Object object = ingestDocument.getSourceAndMetadata().get("list");
+ assertThat(object, instanceOf(List.class));
+ List<?> list = (List<?>) object;
+ assertThat(list.size(), equalTo(2));
+ assertThat(list.get(0), instanceOf(List.class));
+ assertThat(list.get(1), nullValue());
+ list = (List<?>) list.get(0);
+ assertThat(list.size(), equalTo(2));
+ assertThat(list.get(0), equalTo(Collections.singletonMap("field", "value")));
+ assertThat(list.get(1), equalTo(Collections.singletonMap("item2", "value2")));
+ }
+
+ public void testAppendFieldValueToNullListElement() {
+ ingestDocument.appendFieldValue("list.1", "new_value");
+ Object object = ingestDocument.getSourceAndMetadata().get("list");
+ assertThat(object, instanceOf(List.class));
+ List<?> list = (List<?>) object;
+ assertThat(list.get(1), instanceOf(List.class));
+ list = (List<?>) list.get(1);
+ assertThat(list.size(), equalTo(2));
+ assertThat(list.get(0), nullValue());
+ assertThat(list.get(1), equalTo("new_value"));
+ }
+
+ public void testAppendFieldValueToListOfMaps() {
+ ingestDocument.appendFieldValue("list", Collections.singletonMap("item2", "value2"));
+ Object object = ingestDocument.getSourceAndMetadata().get("list");
+ assertThat(object, instanceOf(List.class));
+ @SuppressWarnings("unchecked")
+ List<Object> list = (List<Object>) object;
+ assertThat(list.size(), equalTo(3));
+ assertThat(list.get(0), equalTo(Collections.singletonMap("field", "value")));
+ assertThat(list.get(1), nullValue());
+ assertThat(list.get(2), equalTo(Collections.singletonMap("item2", "value2")));
+ }
+
+ public void testListSetFieldValueIndexProvided() {
+ ingestDocument.setFieldValue("list.1", "value");
+ Object object = ingestDocument.getSourceAndMetadata().get("list");
+ assertThat(object, instanceOf(List.class));
+ @SuppressWarnings("unchecked")
+ List<Object> list = (List<Object>) object;
+ assertThat(list.size(), equalTo(2));
+ assertThat(list.get(0), equalTo(Collections.singletonMap("field", "value")));
+ assertThat(list.get(1), equalTo("value"));
+ }
+
+ public void testSetFieldValueListAsPartOfPath() {
+ ingestDocument.setFieldValue("list.0.field", "new_value");
+ Object object = ingestDocument.getSourceAndMetadata().get("list");
+ assertThat(object, instanceOf(List.class));
+ @SuppressWarnings("unchecked")
+ List<Object> list = (List<Object>) object;
+ assertThat(list.size(), equalTo(2));
+ assertThat(list.get(0), equalTo(Collections.singletonMap("field", "new_value")));
+ assertThat(list.get(1), nullValue());
+ }
+
+ public void testListSetFieldValueIndexNotNumeric() {
+ try {
+ ingestDocument.setFieldValue("list.test", "value");
+ } catch(IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("[test] is not an integer, cannot be used as an index as part of path [list.test]"));
+ }
+
+ try {
+ ingestDocument.setFieldValue("list.test.field", "new_value");
+ } catch(IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("[test] is not an integer, cannot be used as an index as part of path [list.test.field]"));
+ }
+ }
+
+ public void testListSetFieldValueIndexOutOfBounds() {
+ try {
+ ingestDocument.setFieldValue("list.10", "value");
+ } catch(IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("[10] is out of bounds for array with length [2] as part of path [list.10]"));
+ }
+
+ try {
+ ingestDocument.setFieldValue("list.10.field", "value");
+ } catch(IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("[10] is out of bounds for array with length [2] as part of path [list.10.field]"));
+ }
+ }
+
+ public void testSetFieldValueEmptyName() {
+ try {
+ ingestDocument.setFieldValue("", "bar");
+ fail("add field should have failed");
+ } catch(IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("path cannot be null nor empty"));
+ }
+ }
+
+ public void testRemoveField() {
+ ingestDocument.removeField("foo");
+ assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(7));
+ assertThat(ingestDocument.getSourceAndMetadata().containsKey("foo"), equalTo(false));
+ ingestDocument.removeField("_index");
+ assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(6));
+ assertThat(ingestDocument.getSourceAndMetadata().containsKey("_index"), equalTo(false));
+ ingestDocument.removeField("_source.fizz");
+ assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(5));
+ assertThat(ingestDocument.getSourceAndMetadata().containsKey("fizz"), equalTo(false));
+ assertThat(ingestDocument.getIngestMetadata().size(), equalTo(1));
+ ingestDocument.removeField("_ingest.timestamp");
+ assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(5));
+ assertThat(ingestDocument.getIngestMetadata().size(), equalTo(0));
+ }
+
+ public void testRemoveInnerField() {
+ ingestDocument.removeField("fizz.buzz");
+ assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(8));
+ assertThat(ingestDocument.getSourceAndMetadata().get("fizz"), instanceOf(Map.class));
+ @SuppressWarnings("unchecked")
+ Map<String, Object> map = (Map<String, Object>) ingestDocument.getSourceAndMetadata().get("fizz");
+ assertThat(map.size(), equalTo(3));
+ assertThat(map.containsKey("buzz"), equalTo(false));
+
+ ingestDocument.removeField("fizz.foo_null");
+ assertThat(map.size(), equalTo(2));
+ assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(8));
+ assertThat(ingestDocument.getSourceAndMetadata().containsKey("fizz"), equalTo(true));
+
+ ingestDocument.removeField("fizz.1");
+ assertThat(map.size(), equalTo(1));
+ assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(8));
+ assertThat(ingestDocument.getSourceAndMetadata().containsKey("fizz"), equalTo(true));
+
+ ingestDocument.removeField("fizz.list");
+ assertThat(map.size(), equalTo(0));
+ assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(8));
+ assertThat(ingestDocument.getSourceAndMetadata().containsKey("fizz"), equalTo(true));
+ }
+
+ public void testRemoveNonExistingField() {
+ try {
+ ingestDocument.removeField("does_not_exist");
+ fail("remove field should have failed");
+ } catch(IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("field [does_not_exist] not present as part of path [does_not_exist]"));
+ }
+ }
+
+ public void testRemoveExistingParentTypeMismatch() {
+ try {
+ ingestDocument.removeField("foo.foo.bar");
+ fail("remove field should have failed");
+ } catch(IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("cannot resolve [foo] from object of type [java.lang.String] as part of path [foo.foo.bar]"));
+ }
+ }
+
+ public void testRemoveSourceObject() {
+ try {
+ ingestDocument.removeField("_source");
+ fail("remove field should have failed");
+ } catch (IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("field [_source] not present as part of path [_source]"));
+ }
+ }
+
+ public void testRemoveIngestObject() {
+ ingestDocument.removeField("_ingest");
+ assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(7));
+ assertThat(ingestDocument.getSourceAndMetadata().containsKey("_ingest"), equalTo(false));
+ }
+
+ public void testRemoveEmptyPathAfterStrippingOutPrefix() {
+ try {
+ ingestDocument.removeField("_source.");
+ fail("set field value should have failed");
+ } catch(IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("path [_source.] is not valid"));
+ }
+
+ try {
+ ingestDocument.removeField("_ingest.");
+ fail("set field value should have failed");
+ } catch(IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("path [_ingest.] is not valid"));
+ }
+ }
+
+ public void testListRemoveField() {
+ ingestDocument.removeField("list.0.field");
+ assertThat(ingestDocument.getSourceAndMetadata().size(), equalTo(8));
+ assertThat(ingestDocument.getSourceAndMetadata().containsKey("list"), equalTo(true));
+ Object object = ingestDocument.getSourceAndMetadata().get("list");
+ assertThat(object, instanceOf(List.class));
+ @SuppressWarnings("unchecked")
+ List<Object> list = (List<Object>) object;
+ assertThat(list.size(), equalTo(2));
+ object = list.get(0);
+ assertThat(object, instanceOf(Map.class));
+ @SuppressWarnings("unchecked")
+ Map<String, Object> map = (Map<String, Object>) object;
+ assertThat(map.size(), equalTo(0));
+ ingestDocument.removeField("list.0");
+ assertThat(list.size(), equalTo(1));
+ assertThat(list.get(0), nullValue());
+ }
+
+ public void testRemoveFieldValueNotFoundNullParent() {
+ try {
+ ingestDocument.removeField("fizz.foo_null.not_there");
+ fail("get field value should have failed");
+ } catch(IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("cannot remove [not_there] from null as part of path [fizz.foo_null.not_there]"));
+ }
+ }
+
+ public void testNestedRemoveFieldTypeMismatch() {
+ try {
+ ingestDocument.removeField("fizz.1.bar");
+ } catch(IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("cannot remove [bar] from object of type [java.lang.String] as part of path [fizz.1.bar]"));
+ }
+ }
+
+ public void testListRemoveFieldIndexNotNumeric() {
+ try {
+ ingestDocument.removeField("list.test");
+ } catch(IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("[test] is not an integer, cannot be used as an index as part of path [list.test]"));
+ }
+ }
+
+ public void testListRemoveFieldIndexOutOfBounds() {
+ try {
+ ingestDocument.removeField("list.10");
+ } catch(IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("[10] is out of bounds for array with length [2] as part of path [list.10]"));
+ }
+ }
+
+ public void testRemoveNullField() {
+ try {
+ ingestDocument.removeField((String) null);
+ fail("remove field should have failed");
+ } catch(IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("path cannot be null nor empty"));
+ }
+ }
+
+ public void testRemoveEmptyField() {
+ try {
+ ingestDocument.removeField("");
+ fail("remove field should have failed");
+ } catch(IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("path cannot be null nor empty"));
+ }
+ }
+
+ public void testEqualsAndHashcode() throws Exception {
+ Map<String, Object> sourceAndMetadata = RandomDocumentPicks.randomSource(random());
+ int numFields = randomIntBetween(1, IngestDocument.MetaData.values().length);
+ for (int i = 0; i < numFields; i++) {
+ sourceAndMetadata.put(randomFrom(IngestDocument.MetaData.values()).getFieldName(), randomAsciiOfLengthBetween(5, 10));
+ }
+ Map<String, String> ingestMetadata = new HashMap<>();
+ numFields = randomIntBetween(1, 5);
+ for (int i = 0; i < numFields; i++) {
+ ingestMetadata.put(randomAsciiOfLengthBetween(5, 10), randomAsciiOfLengthBetween(5, 10));
+ }
+ IngestDocument ingestDocument = new IngestDocument(sourceAndMetadata, ingestMetadata);
+
+ boolean changed = false;
+ Map<String, Object> otherSourceAndMetadata;
+ if (randomBoolean()) {
+ otherSourceAndMetadata = RandomDocumentPicks.randomSource(random());
+ changed = true;
+ } else {
+ otherSourceAndMetadata = new HashMap<>(sourceAndMetadata);
+ }
+ if (randomBoolean()) {
+ numFields = randomIntBetween(1, IngestDocument.MetaData.values().length);
+ for (int i = 0; i < numFields; i++) {
+ otherSourceAndMetadata.put(randomFrom(IngestDocument.MetaData.values()).getFieldName(), randomAsciiOfLengthBetween(5, 10));
+ }
+ changed = true;
+ }
+
+ Map<String, String> otherIngestMetadata;
+ if (randomBoolean()) {
+ otherIngestMetadata = new HashMap<>();
+ numFields = randomIntBetween(1, 5);
+ for (int i = 0; i < numFields; i++) {
+ otherIngestMetadata.put(randomAsciiOfLengthBetween(5, 10), randomAsciiOfLengthBetween(5, 10));
+ }
+ changed = true;
+ } else {
+ otherIngestMetadata = Collections.unmodifiableMap(ingestMetadata);
+ }
+
+ IngestDocument otherIngestDocument = new IngestDocument(otherSourceAndMetadata, otherIngestMetadata);
+ if (changed) {
+ assertThat(ingestDocument, not(equalTo(otherIngestDocument)));
+ assertThat(otherIngestDocument, not(equalTo(ingestDocument)));
+ } else {
+ assertThat(ingestDocument, equalTo(otherIngestDocument));
+ assertThat(otherIngestDocument, equalTo(ingestDocument));
+ assertThat(ingestDocument.hashCode(), equalTo(otherIngestDocument.hashCode()));
+ IngestDocument thirdIngestDocument = new IngestDocument(Collections.unmodifiableMap(sourceAndMetadata), Collections.unmodifiableMap(ingestMetadata));
+ assertThat(thirdIngestDocument, equalTo(ingestDocument));
+ assertThat(ingestDocument, equalTo(thirdIngestDocument));
+ assertThat(ingestDocument.hashCode(), equalTo(thirdIngestDocument.hashCode()));
+ }
+ }
+
+ public void testIngestMetadataTimestamp() throws Exception {
+ long before = System.currentTimeMillis();
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
+ long after = System.currentTimeMillis();
+ String timestampString = ingestDocument.getIngestMetadata().get("timestamp");
+ assertThat(timestampString, notNullValue());
+ assertThat(timestampString, endsWith("+0000"));
+ DateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZZ", Locale.ROOT);
+ Date timestamp = df.parse(timestampString);
+ assertThat(timestamp.getTime(), greaterThanOrEqualTo(before));
+ assertThat(timestamp.getTime(), lessThanOrEqualTo(after));
+ }
+
+ public void testCopyConstructor() {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
+ IngestDocument copy = new IngestDocument(ingestDocument);
+ recursiveEqualsButNotSameCheck(ingestDocument.getSourceAndMetadata(), copy.getSourceAndMetadata());
+ }
+
+ private void recursiveEqualsButNotSameCheck(Object a, Object b) {
+ assertThat(a, not(sameInstance(b)));
+ assertThat(a, equalTo(b));
+ if (a instanceof Map) {
+ Map<?, ?> mapA = (Map<?, ?>) a;
+ Map<?, ?> mapB = (Map<?, ?>) b;
+ for (Map.Entry<?, ?> entry : mapA.entrySet()) {
+ if (entry.getValue() instanceof List || entry.getValue() instanceof Map) {
+ recursiveEqualsButNotSameCheck(entry.getValue(), mapB.get(entry.getKey()));
+ }
+ }
+ } else if (a instanceof List) {
+ List<?> listA = (List<?>) a;
+ List<?> listB = (List<?>) b;
+ for (int i = 0; i < listA.size(); i++) {
+ Object value = listA.get(i);
+ if (value instanceof List || value instanceof Map) {
+ recursiveEqualsButNotSameCheck(value, listB.get(i));
+ }
+ }
+ }
+
+ }
+
+}
diff --git a/core/src/test/java/org/elasticsearch/ingest/core/PipelineFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/core/PipelineFactoryTests.java
new file mode 100644
index 0000000000..746ac2f561
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/ingest/core/PipelineFactoryTests.java
@@ -0,0 +1,114 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.core;
+
+import org.elasticsearch.ingest.TestProcessor;
+import org.elasticsearch.ingest.processor.ConfigurationPropertyException;
+import org.elasticsearch.test.ESTestCase;
+
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.hamcrest.CoreMatchers.equalTo;
+import static org.hamcrest.Matchers.nullValue;
+
+public class PipelineFactoryTests extends ESTestCase {
+
+ public void testCreate() throws Exception {
+ Map<String, Object> processorConfig0 = new HashMap<>();
+ Map<String, Object> processorConfig1 = new HashMap<>();
+ processorConfig0.put(AbstractProcessorFactory.TAG_KEY, "first-processor");
+ Map<String, Object> pipelineConfig = new HashMap<>();
+ pipelineConfig.put(Pipeline.DESCRIPTION_KEY, "_description");
+ pipelineConfig.put(Pipeline.PROCESSORS_KEY, Arrays.asList(Collections.singletonMap("test", processorConfig0), Collections.singletonMap("test", processorConfig1)));
+ Pipeline.Factory factory = new Pipeline.Factory();
+ Map<String, Processor.Factory> processorRegistry = Collections.singletonMap("test", new TestProcessor.Factory());
+ Pipeline pipeline = factory.create("_id", pipelineConfig, processorRegistry);
+ assertThat(pipeline.getId(), equalTo("_id"));
+ assertThat(pipeline.getDescription(), equalTo("_description"));
+ assertThat(pipeline.getProcessors().size(), equalTo(2));
+ assertThat(pipeline.getProcessors().get(0).getType(), equalTo("test-processor"));
+ assertThat(pipeline.getProcessors().get(0).getTag(), equalTo("first-processor"));
+ assertThat(pipeline.getProcessors().get(1).getType(), equalTo("test-processor"));
+ assertThat(pipeline.getProcessors().get(1).getTag(), nullValue());
+ }
+
+ public void testCreateWithNoProcessorsField() throws Exception {
+ Map<String, Object> pipelineConfig = new HashMap<>();
+ pipelineConfig.put(Pipeline.DESCRIPTION_KEY, "_description");
+ Pipeline.Factory factory = new Pipeline.Factory();
+ try {
+ factory.create("_id", pipelineConfig, Collections.emptyMap());
+ fail("should fail, missing required [processors] field");
+ } catch (ConfigurationPropertyException e) {
+ assertThat(e.getMessage(), equalTo("[processors] required property is missing"));
+ }
+ }
+
+ public void testCreateWithPipelineOnFailure() throws Exception {
+ Map<String, Object> processorConfig = new HashMap<>();
+ Map<String, Object> pipelineConfig = new HashMap<>();
+ pipelineConfig.put(Pipeline.DESCRIPTION_KEY, "_description");
+ pipelineConfig.put(Pipeline.PROCESSORS_KEY, Collections.singletonList(Collections.singletonMap("test", processorConfig)));
+ pipelineConfig.put(Pipeline.ON_FAILURE_KEY, Collections.singletonList(Collections.singletonMap("test", processorConfig)));
+ Pipeline.Factory factory = new Pipeline.Factory();
+ Map<String, Processor.Factory> processorRegistry = Collections.singletonMap("test", new TestProcessor.Factory());
+ Pipeline pipeline = factory.create("_id", pipelineConfig, processorRegistry);
+ assertThat(pipeline.getId(), equalTo("_id"));
+ assertThat(pipeline.getDescription(), equalTo("_description"));
+ assertThat(pipeline.getProcessors().size(), equalTo(1));
+ assertThat(pipeline.getProcessors().get(0).getType(), equalTo("test-processor"));
+ assertThat(pipeline.getOnFailureProcessors().size(), equalTo(1));
+ assertThat(pipeline.getOnFailureProcessors().get(0).getType(), equalTo("test-processor"));
+ }
+
+ public void testCreateUnusedProcessorOptions() throws Exception {
+ Map<String, Object> processorConfig = new HashMap<>();
+ processorConfig.put("unused", "value");
+ Map<String, Object> pipelineConfig = new HashMap<>();
+ pipelineConfig.put(Pipeline.DESCRIPTION_KEY, "_description");
+ pipelineConfig.put(Pipeline.PROCESSORS_KEY, Collections.singletonList(Collections.singletonMap("test", processorConfig)));
+ Pipeline.Factory factory = new Pipeline.Factory();
+ Map<String, Processor.Factory> processorRegistry = Collections.singletonMap("test", new TestProcessor.Factory());
+ try {
+ factory.create("_id", pipelineConfig, processorRegistry);
+ } catch (ConfigurationPropertyException e) {
+ assertThat(e.getMessage(), equalTo("processor [test] doesn't support one or more provided configuration parameters [unused]"));
+ }
+ }
+
+ public void testCreateProcessorsWithOnFailureProperties() throws Exception {
+ Map<String, Object> processorConfig = new HashMap<>();
+ processorConfig.put(Pipeline.ON_FAILURE_KEY, Collections.singletonList(Collections.singletonMap("test", new HashMap<>())));
+
+ Map<String, Object> pipelineConfig = new HashMap<>();
+ pipelineConfig.put(Pipeline.DESCRIPTION_KEY, "_description");
+ pipelineConfig.put(Pipeline.PROCESSORS_KEY, Collections.singletonList(Collections.singletonMap("test", processorConfig)));
+ Pipeline.Factory factory = new Pipeline.Factory();
+ Map<String, Processor.Factory> processorRegistry = Collections.singletonMap("test", new TestProcessor.Factory());
+ Pipeline pipeline = factory.create("_id", pipelineConfig, processorRegistry);
+ assertThat(pipeline.getId(), equalTo("_id"));
+ assertThat(pipeline.getDescription(), equalTo("_description"));
+ assertThat(pipeline.getProcessors().size(), equalTo(1));
+ assertThat(pipeline.getProcessors().get(0).getType(), equalTo("compound"));
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/ingest/core/ValueSourceTests.java b/core/src/test/java/org/elasticsearch/ingest/core/ValueSourceTests.java
new file mode 100644
index 0000000000..f2aa9f32bc
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/ingest/core/ValueSourceTests.java
@@ -0,0 +1,72 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.core;
+
+import org.elasticsearch.ingest.RandomDocumentPicks;
+import org.elasticsearch.ingest.TestTemplateService;
+import org.elasticsearch.test.ESTestCase;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.not;
+import static org.hamcrest.Matchers.sameInstance;
+
+public class ValueSourceTests extends ESTestCase {
+
+ public void testDeepCopy() {
+ int iterations = scaledRandomIntBetween(8, 64);
+ for (int i = 0; i < iterations; i++) {
+ Map<String, Object> map = RandomDocumentPicks.randomSource(random());
+ ValueSource valueSource = ValueSource.wrap(map, TestTemplateService.instance());
+ Object copy = valueSource.copyAndResolve(Collections.emptyMap());
+ assertThat("iteration: " + i, copy, equalTo(map));
+ assertThat("iteration: " + i, copy, not(sameInstance(map)));
+ }
+ }
+
+ public void testCopyDoesNotChangeProvidedMap() {
+ Map<String, Object> myPreciousMap = new HashMap<>();
+ myPreciousMap.put("field2", "value2");
+
+ IngestDocument ingestDocument = new IngestDocument(new HashMap<>(), new HashMap<>());
+ ingestDocument.setFieldValue(TestTemplateService.instance().compile("field1"), ValueSource.wrap(myPreciousMap, TestTemplateService.instance()));
+ ingestDocument.removeField("field1.field2");
+
+ assertThat(myPreciousMap.size(), equalTo(1));
+ assertThat(myPreciousMap.get("field2"), equalTo("value2"));
+ }
+
+ public void testCopyDoesNotChangeProvidedList() {
+ List<String> myPreciousList = new ArrayList<>();
+ myPreciousList.add("value");
+
+ IngestDocument ingestDocument = new IngestDocument(new HashMap<>(), new HashMap<>());
+ ingestDocument.setFieldValue(TestTemplateService.instance().compile("field1"), ValueSource.wrap(myPreciousList, TestTemplateService.instance()));
+ ingestDocument.removeField("field1.0");
+
+ assertThat(myPreciousList.size(), equalTo(1));
+ assertThat(myPreciousList.get(0), equalTo("value"));
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/AbstractStringProcessorTestCase.java b/core/src/test/java/org/elasticsearch/ingest/processor/AbstractStringProcessorTestCase.java
new file mode 100644
index 0000000000..1113a4b402
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/ingest/processor/AbstractStringProcessorTestCase.java
@@ -0,0 +1,87 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.processor;
+
+import org.elasticsearch.ingest.core.IngestDocument;
+import org.elasticsearch.ingest.core.Processor;
+import org.elasticsearch.ingest.RandomDocumentPicks;
+import org.elasticsearch.test.ESTestCase;
+
+import java.util.Collections;
+import java.util.HashMap;
+
+import static org.hamcrest.Matchers.containsString;
+import static org.hamcrest.Matchers.equalTo;
+
+public abstract class AbstractStringProcessorTestCase extends ESTestCase {
+
+ protected abstract AbstractStringProcessor newProcessor(String field);
+
+ protected String modifyInput(String input) {
+ return input;
+ }
+
+ protected abstract String expectedResult(String input);
+
+ public void testProcessor() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
+ String fieldValue = RandomDocumentPicks.randomString(random());
+ String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, modifyInput(fieldValue));
+ Processor processor = newProcessor(fieldName);
+ processor.execute(ingestDocument);
+ assertThat(ingestDocument.getFieldValue(fieldName, String.class), equalTo(expectedResult(fieldValue)));
+ }
+
+ public void testFieldNotFound() throws Exception {
+ String fieldName = RandomDocumentPicks.randomFieldName(random());
+ Processor processor = newProcessor(fieldName);
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>());
+ try {
+ processor.execute(ingestDocument);
+ fail("processor should have failed");
+ } catch (IllegalArgumentException e) {
+ assertThat(e.getMessage(), containsString("not present as part of path [" + fieldName + "]"));
+ }
+ }
+
+ public void testNullValue() throws Exception {
+ Processor processor = newProcessor("field");
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("field", null));
+ try {
+ processor.execute(ingestDocument);
+ fail("processor should have failed");
+ } catch (IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("field [field] is null, cannot process it."));
+ }
+ }
+
+ public void testNonStringValue() throws Exception {
+ String fieldName = RandomDocumentPicks.randomFieldName(random());
+ Processor processor = newProcessor(fieldName);
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>());
+ ingestDocument.setFieldValue(fieldName, randomInt());
+ try {
+ processor.execute(ingestDocument);
+ fail("processor should have failed");
+ } catch (IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("field [" + fieldName + "] of type [java.lang.Integer] cannot be cast to [java.lang.String]"));
+ }
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/AppendProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/AppendProcessorFactoryTests.java
new file mode 100644
index 0000000000..c4c13a6ab7
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/ingest/processor/AppendProcessorFactoryTests.java
@@ -0,0 +1,95 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.processor;
+
+import org.elasticsearch.ingest.TestTemplateService;
+import org.elasticsearch.ingest.core.AbstractProcessorFactory;
+import org.elasticsearch.ingest.core.Processor;
+import org.elasticsearch.test.ESTestCase;
+import org.junit.Before;
+
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.hamcrest.CoreMatchers.equalTo;
+
+public class AppendProcessorFactoryTests extends ESTestCase {
+
+ private AppendProcessor.Factory factory;
+
+ @Before
+ public void init() {
+ factory = new AppendProcessor.Factory(TestTemplateService.instance());
+ }
+
+ public void testCreate() throws Exception {
+ Map<String, Object> config = new HashMap<>();
+ config.put("field", "field1");
+ Object value;
+ if (randomBoolean()) {
+ value = "value1";
+ } else {
+ value = Arrays.asList("value1", "value2", "value3");
+ }
+ config.put("value", value);
+ String processorTag = randomAsciiOfLength(10);
+ config.put(AbstractProcessorFactory.TAG_KEY, processorTag);
+ AppendProcessor appendProcessor = factory.create(config);
+ assertThat(appendProcessor.getTag(), equalTo(processorTag));
+ assertThat(appendProcessor.getField().execute(Collections.emptyMap()), equalTo("field1"));
+ assertThat(appendProcessor.getValue().copyAndResolve(Collections.emptyMap()), equalTo(value));
+ }
+
+ public void testCreateNoFieldPresent() throws Exception {
+ Map<String, Object> config = new HashMap<>();
+ config.put("value", "value1");
+ try {
+ factory.create(config);
+ fail("factory create should have failed");
+ } catch(ConfigurationPropertyException e) {
+ assertThat(e.getMessage(), equalTo("[field] required property is missing"));
+ }
+ }
+
+ public void testCreateNoValuePresent() throws Exception {
+ Map<String, Object> config = new HashMap<>();
+ config.put("field", "field1");
+ try {
+ factory.create(config);
+ fail("factory create should have failed");
+ } catch(ConfigurationPropertyException e) {
+ assertThat(e.getMessage(), equalTo("[value] required property is missing"));
+ }
+ }
+
+ public void testCreateNullValue() throws Exception {
+ Map<String, Object> config = new HashMap<>();
+ config.put("field", "field1");
+ config.put("value", null);
+ try {
+ factory.create(config);
+ fail("factory create should have failed");
+ } catch(ConfigurationPropertyException e) {
+ assertThat(e.getMessage(), equalTo("[value] required property is missing"));
+ }
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/AppendProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/AppendProcessorTests.java
new file mode 100644
index 0000000000..4a78ba621c
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/ingest/processor/AppendProcessorTests.java
@@ -0,0 +1,209 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.processor;
+
+import org.elasticsearch.ingest.core.IngestDocument;
+import org.elasticsearch.ingest.RandomDocumentPicks;
+import org.elasticsearch.ingest.core.TemplateService;
+import org.elasticsearch.ingest.TestTemplateService;
+import org.elasticsearch.ingest.core.ValueSource;
+import org.elasticsearch.ingest.core.Processor;
+import org.elasticsearch.test.ESTestCase;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.hamcrest.CoreMatchers.equalTo;
+import static org.hamcrest.CoreMatchers.not;
+import static org.hamcrest.CoreMatchers.sameInstance;
+
+public class AppendProcessorTests extends ESTestCase {
+
+ public void testAppendValuesToExistingList() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
+ Scalar scalar = randomFrom(Scalar.values());
+ List<Object> list = new ArrayList<>();
+ int size = randomIntBetween(0, 10);
+ for (int i = 0; i < size; i++) {
+ list.add(scalar.randomValue());
+ }
+ List<Object> checkList = new ArrayList<>(list);
+ String field = RandomDocumentPicks.addRandomField(random(), ingestDocument, list);
+ List<Object> values = new ArrayList<>();
+ Processor appendProcessor;
+ if (randomBoolean()) {
+ Object value = scalar.randomValue();
+ values.add(value);
+ appendProcessor = createAppendProcessor(field, value);
+ } else {
+ int valuesSize = randomIntBetween(0, 10);
+ for (int i = 0; i < valuesSize; i++) {
+ values.add(scalar.randomValue());
+ }
+ appendProcessor = createAppendProcessor(field, values);
+ }
+ appendProcessor.execute(ingestDocument);
+ Object fieldValue = ingestDocument.getFieldValue(field, Object.class);
+ assertThat(fieldValue, sameInstance(list));
+ assertThat(list.size(), equalTo(size + values.size()));
+ for (int i = 0; i < size; i++) {
+ assertThat(list.get(i), equalTo(checkList.get(i)));
+ }
+ for (int i = size; i < size + values.size(); i++) {
+ assertThat(list.get(i), equalTo(values.get(i - size)));
+ }
+ }
+
+ public void testAppendValuesToNonExistingList() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>());
+ String field = RandomDocumentPicks.randomFieldName(random());
+ Scalar scalar = randomFrom(Scalar.values());
+ List<Object> values = new ArrayList<>();
+ Processor appendProcessor;
+ if (randomBoolean()) {
+ Object value = scalar.randomValue();
+ values.add(value);
+ appendProcessor = createAppendProcessor(field, value);
+ } else {
+ int valuesSize = randomIntBetween(0, 10);
+ for (int i = 0; i < valuesSize; i++) {
+ values.add(scalar.randomValue());
+ }
+ appendProcessor = createAppendProcessor(field, values);
+ }
+ appendProcessor.execute(ingestDocument);
+ List list = ingestDocument.getFieldValue(field, List.class);
+ assertThat(list, not(sameInstance(values)));
+ assertThat(list, equalTo(values));
+ }
+
+ public void testConvertScalarToList() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
+ Scalar scalar = randomFrom(Scalar.values());
+ Object initialValue = scalar.randomValue();
+ String field = RandomDocumentPicks.addRandomField(random(), ingestDocument, initialValue);
+ List<Object> values = new ArrayList<>();
+ Processor appendProcessor;
+ if (randomBoolean()) {
+ Object value = scalar.randomValue();
+ values.add(value);
+ appendProcessor = createAppendProcessor(field, value);
+ } else {
+ int valuesSize = randomIntBetween(0, 10);
+ for (int i = 0; i < valuesSize; i++) {
+ values.add(scalar.randomValue());
+ }
+ appendProcessor = createAppendProcessor(field, values);
+ }
+ appendProcessor.execute(ingestDocument);
+ List fieldValue = ingestDocument.getFieldValue(field, List.class);
+ assertThat(fieldValue.size(), equalTo(values.size() + 1));
+ assertThat(fieldValue.get(0), equalTo(initialValue));
+ for (int i = 1; i < values.size() + 1; i++) {
+ assertThat(fieldValue.get(i), equalTo(values.get(i - 1)));
+ }
+ }
+
+ public void testAppendMetadata() throws Exception {
+ //here any metadata field value becomes a list, which won't make sense in most of the cases,
+ // but support for append is streamlined like for set so we test it
+ IngestDocument.MetaData randomMetaData = randomFrom(IngestDocument.MetaData.values());
+ List<String> values = new ArrayList<>();
+ Processor appendProcessor;
+ if (randomBoolean()) {
+ String value = randomAsciiOfLengthBetween(1, 10);
+ values.add(value);
+ appendProcessor = createAppendProcessor(randomMetaData.getFieldName(), value);
+ } else {
+ int valuesSize = randomIntBetween(0, 10);
+ for (int i = 0; i < valuesSize; i++) {
+ values.add(randomAsciiOfLengthBetween(1, 10));
+ }
+ appendProcessor = createAppendProcessor(randomMetaData.getFieldName(), values);
+ }
+
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
+ Object initialValue = ingestDocument.getSourceAndMetadata().get(randomMetaData.getFieldName());
+ appendProcessor.execute(ingestDocument);
+ List list = ingestDocument.getFieldValue(randomMetaData.getFieldName(), List.class);
+ if (initialValue == null) {
+ assertThat(list, equalTo(values));
+ } else {
+ assertThat(list.size(), equalTo(values.size() + 1));
+ assertThat(list.get(0), equalTo(initialValue));
+ for (int i = 1; i < list.size(); i++) {
+ assertThat(list.get(i), equalTo(values.get(i - 1)));
+ }
+ }
+ }
+
+ private static Processor createAppendProcessor(String fieldName, Object fieldValue) {
+ TemplateService templateService = TestTemplateService.instance();
+ return new AppendProcessor(randomAsciiOfLength(10), templateService.compile(fieldName), ValueSource.wrap(fieldValue, templateService));
+ }
+
+ private enum Scalar {
+ INTEGER {
+ @Override
+ Object randomValue() {
+ return randomInt();
+ }
+ }, DOUBLE {
+ @Override
+ Object randomValue() {
+ return randomDouble();
+ }
+ }, FLOAT {
+ @Override
+ Object randomValue() {
+ return randomFloat();
+ }
+ }, BOOLEAN {
+ @Override
+ Object randomValue() {
+ return randomBoolean();
+ }
+ }, STRING {
+ @Override
+ Object randomValue() {
+ return randomAsciiOfLengthBetween(1, 10);
+ }
+ }, MAP {
+ @Override
+ Object randomValue() {
+ int numItems = randomIntBetween(1, 10);
+ Map<String, Object> map = new HashMap<>(numItems);
+ for (int i = 0; i < numItems; i++) {
+ map.put(randomAsciiOfLengthBetween(1, 10), randomFrom(Scalar.values()).randomValue());
+ }
+ return map;
+ }
+ }, NULL {
+ @Override
+ Object randomValue() {
+ return null;
+ }
+ };
+
+ abstract Object randomValue();
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/ConvertProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/ConvertProcessorFactoryTests.java
new file mode 100644
index 0000000000..a07cec5c4e
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/ingest/processor/ConvertProcessorFactoryTests.java
@@ -0,0 +1,86 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.processor;
+
+import org.elasticsearch.ingest.core.AbstractProcessorFactory;
+import org.elasticsearch.ingest.core.Processor;
+import org.elasticsearch.test.ESTestCase;
+import org.hamcrest.Matchers;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.hamcrest.CoreMatchers.equalTo;
+
+public class ConvertProcessorFactoryTests extends ESTestCase {
+
+ public void testCreate() throws Exception {
+ ConvertProcessor.Factory factory = new ConvertProcessor.Factory();
+ Map<String, Object> config = new HashMap<>();
+ ConvertProcessor.Type type = randomFrom(ConvertProcessor.Type.values());
+ config.put("field", "field1");
+ config.put("type", type.toString());
+ String processorTag = randomAsciiOfLength(10);
+ config.put(AbstractProcessorFactory.TAG_KEY, processorTag);
+ ConvertProcessor convertProcessor = factory.create(config);
+ assertThat(convertProcessor.getTag(), equalTo(processorTag));
+ assertThat(convertProcessor.getField(), equalTo("field1"));
+ assertThat(convertProcessor.getConvertType(), equalTo(type));
+ }
+
+ public void testCreateUnsupportedType() throws Exception {
+ ConvertProcessor.Factory factory = new ConvertProcessor.Factory();
+ Map<String, Object> config = new HashMap<>();
+ String type = "type-" + randomAsciiOfLengthBetween(1, 10);
+ config.put("field", "field1");
+ config.put("type", type);
+ try {
+ factory.create(config);
+ fail("factory create should have failed");
+ } catch (IllegalArgumentException e) {
+ assertThat(e.getMessage(), Matchers.equalTo("type [" + type + "] not supported, cannot convert field."));
+ }
+ }
+
+ public void testCreateNoFieldPresent() throws Exception {
+ ConvertProcessor.Factory factory = new ConvertProcessor.Factory();
+ Map<String, Object> config = new HashMap<>();
+ String type = "type-" + randomAsciiOfLengthBetween(1, 10);
+ config.put("type", type);
+ try {
+ factory.create(config);
+ fail("factory create should have failed");
+ } catch (ConfigurationPropertyException e) {
+ assertThat(e.getMessage(), Matchers.equalTo("[field] required property is missing"));
+ }
+ }
+
+ public void testCreateNoTypePresent() throws Exception {
+ ConvertProcessor.Factory factory = new ConvertProcessor.Factory();
+ Map<String, Object> config = new HashMap<>();
+ config.put("field", "field1");
+ try {
+ factory.create(config);
+ fail("factory create should have failed");
+ } catch (ConfigurationPropertyException e) {
+ assertThat(e.getMessage(), Matchers.equalTo("[type] required property is missing"));
+ }
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/ConvertProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/ConvertProcessorTests.java
new file mode 100644
index 0000000000..1350ebab60
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/ingest/processor/ConvertProcessorTests.java
@@ -0,0 +1,268 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.processor;
+
+import org.elasticsearch.ingest.core.IngestDocument;
+import org.elasticsearch.ingest.RandomDocumentPicks;
+import org.elasticsearch.ingest.core.Processor;
+import org.elasticsearch.test.ESTestCase;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+
+import static org.elasticsearch.ingest.processor.ConvertProcessor.Type;
+import static org.hamcrest.Matchers.containsString;
+import static org.hamcrest.Matchers.equalTo;
+
+public class ConvertProcessorTests extends ESTestCase {
+
+ public void testConvertInt() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
+ int randomInt = randomInt();
+ String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, randomInt);
+ Processor processor = new ConvertProcessor(randomAsciiOfLength(10), fieldName, Type.INTEGER);
+ processor.execute(ingestDocument);
+ assertThat(ingestDocument.getFieldValue(fieldName, Integer.class), equalTo(randomInt));
+ }
+
+ public void testConvertIntList() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
+ int numItems = randomIntBetween(1, 10);
+ List<String> fieldValue = new ArrayList<>();
+ List<Integer> expectedList = new ArrayList<>();
+ for (int j = 0; j < numItems; j++) {
+ int randomInt = randomInt();
+ fieldValue.add(Integer.toString(randomInt));
+ expectedList.add(randomInt);
+ }
+ String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue);
+ Processor processor = new ConvertProcessor(randomAsciiOfLength(10), fieldName, Type.INTEGER);
+ processor.execute(ingestDocument);
+ assertThat(ingestDocument.getFieldValue(fieldName, List.class), equalTo(expectedList));
+ }
+
+ public void testConvertIntError() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>());
+ String fieldName = RandomDocumentPicks.randomFieldName(random());
+ String value = "string-" + randomAsciiOfLengthBetween(1, 10);
+ ingestDocument.setFieldValue(fieldName, value);
+
+ Processor processor = new ConvertProcessor(randomAsciiOfLength(10), fieldName, Type.INTEGER);
+ try {
+ processor.execute(ingestDocument);
+ fail("processor execute should have failed");
+ } catch(IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("unable to convert [" + value + "] to integer"));
+ }
+ }
+
+ public void testConvertFloat() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
+ Map<String, Float> expectedResult = new HashMap<>();
+ float randomFloat = randomFloat();
+ String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, randomFloat);
+ expectedResult.put(fieldName, randomFloat);
+
+ Processor processor = new ConvertProcessor(randomAsciiOfLength(10), fieldName, Type.FLOAT);
+ processor.execute(ingestDocument);
+ assertThat(ingestDocument.getFieldValue(fieldName, Float.class), equalTo(randomFloat));
+ }
+
+ public void testConvertFloatList() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
+ int numItems = randomIntBetween(1, 10);
+ List<String> fieldValue = new ArrayList<>();
+ List<Float> expectedList = new ArrayList<>();
+ for (int j = 0; j < numItems; j++) {
+ float randomFloat = randomFloat();
+ fieldValue.add(Float.toString(randomFloat));
+ expectedList.add(randomFloat);
+ }
+ String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue);
+ Processor processor = new ConvertProcessor(randomAsciiOfLength(10), fieldName, Type.FLOAT);
+ processor.execute(ingestDocument);
+ assertThat(ingestDocument.getFieldValue(fieldName, List.class), equalTo(expectedList));
+ }
+
+ public void testConvertFloatError() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>());
+ String fieldName = RandomDocumentPicks.randomFieldName(random());
+ String value = "string-" + randomAsciiOfLengthBetween(1, 10);
+ ingestDocument.setFieldValue(fieldName, value);
+
+ Processor processor = new ConvertProcessor(randomAsciiOfLength(10), fieldName, Type.FLOAT);
+ try {
+ processor.execute(ingestDocument);
+ fail("processor execute should have failed");
+ } catch(IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("unable to convert [" + value + "] to float"));
+ }
+ }
+
+ public void testConvertBoolean() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
+ boolean randomBoolean = randomBoolean();
+ String booleanString = Boolean.toString(randomBoolean);
+ if (randomBoolean) {
+ booleanString = booleanString.toUpperCase(Locale.ROOT);
+ }
+ String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, booleanString);
+
+ Processor processor = new ConvertProcessor(randomAsciiOfLength(10), fieldName, Type.BOOLEAN);
+ processor.execute(ingestDocument);
+ assertThat(ingestDocument.getFieldValue(fieldName, Boolean.class), equalTo(randomBoolean));
+ }
+
+ public void testConvertBooleanList() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
+ int numItems = randomIntBetween(1, 10);
+ List<String> fieldValue = new ArrayList<>();
+ List<Boolean> expectedList = new ArrayList<>();
+ for (int j = 0; j < numItems; j++) {
+ boolean randomBoolean = randomBoolean();
+ String booleanString = Boolean.toString(randomBoolean);
+ if (randomBoolean) {
+ booleanString = booleanString.toUpperCase(Locale.ROOT);
+ }
+ fieldValue.add(booleanString);
+ expectedList.add(randomBoolean);
+ }
+ String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue);
+ Processor processor = new ConvertProcessor(randomAsciiOfLength(10), fieldName, Type.BOOLEAN);
+ processor.execute(ingestDocument);
+ assertThat(ingestDocument.getFieldValue(fieldName, List.class), equalTo(expectedList));
+ }
+
+ public void testConvertBooleanError() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>());
+ String fieldName = RandomDocumentPicks.randomFieldName(random());
+ String fieldValue;
+ if (randomBoolean()) {
+ fieldValue = "string-" + randomAsciiOfLengthBetween(1, 10);
+ } else {
+ //verify that only proper boolean values are supported and we are strict about it
+ fieldValue = randomFrom("on", "off", "yes", "no", "0", "1");
+ }
+ ingestDocument.setFieldValue(fieldName, fieldValue);
+
+ Processor processor = new ConvertProcessor(randomAsciiOfLength(10), fieldName, Type.BOOLEAN);
+ try {
+ processor.execute(ingestDocument);
+ fail("processor execute should have failed");
+ } catch(Exception e) {
+ assertThat(e.getMessage(), equalTo("[" + fieldValue + "] is not a boolean value, cannot convert to boolean"));
+ }
+ }
+
+ public void testConvertString() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
+ Object fieldValue;
+ String expectedFieldValue;
+ switch(randomIntBetween(0, 2)) {
+ case 0:
+ float randomFloat = randomFloat();
+ fieldValue = randomFloat;
+ expectedFieldValue = Float.toString(randomFloat);
+ break;
+ case 1:
+ int randomInt = randomInt();
+ fieldValue = randomInt;
+ expectedFieldValue = Integer.toString(randomInt);
+ break;
+ case 2:
+ boolean randomBoolean = randomBoolean();
+ fieldValue = randomBoolean;
+ expectedFieldValue = Boolean.toString(randomBoolean);
+ break;
+ default:
+ throw new UnsupportedOperationException();
+ }
+ String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue);
+
+ Processor processor = new ConvertProcessor(randomAsciiOfLength(10), fieldName, Type.STRING);
+ processor.execute(ingestDocument);
+ assertThat(ingestDocument.getFieldValue(fieldName, String.class), equalTo(expectedFieldValue));
+ }
+
+ public void testConvertStringList() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
+ int numItems = randomIntBetween(1, 10);
+ List<Object> fieldValue = new ArrayList<>();
+ List<String> expectedList = new ArrayList<>();
+ for (int j = 0; j < numItems; j++) {
+ Object randomValue;
+ String randomValueString;
+ switch(randomIntBetween(0, 2)) {
+ case 0:
+ float randomFloat = randomFloat();
+ randomValue = randomFloat;
+ randomValueString = Float.toString(randomFloat);
+ break;
+ case 1:
+ int randomInt = randomInt();
+ randomValue = randomInt;
+ randomValueString = Integer.toString(randomInt);
+ break;
+ case 2:
+ boolean randomBoolean = randomBoolean();
+ randomValue = randomBoolean;
+ randomValueString = Boolean.toString(randomBoolean);
+ break;
+ default:
+ throw new UnsupportedOperationException();
+ }
+ fieldValue.add(randomValue);
+ expectedList.add(randomValueString);
+ }
+ String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue);
+ Processor processor = new ConvertProcessor(randomAsciiOfLength(10), fieldName, Type.STRING);
+ processor.execute(ingestDocument);
+ assertThat(ingestDocument.getFieldValue(fieldName, List.class), equalTo(expectedList));
+ }
+
+ public void testConvertNonExistingField() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>());
+ String fieldName = RandomDocumentPicks.randomFieldName(random());
+ Type type = randomFrom(Type.values());
+ Processor processor = new ConvertProcessor(randomAsciiOfLength(10), fieldName, type);
+ try {
+ processor.execute(ingestDocument);
+ fail("processor execute should have failed");
+ } catch (IllegalArgumentException e) {
+ assertThat(e.getMessage(), containsString("not present as part of path [" + fieldName + "]"));
+ }
+ }
+
+ public void testConvertNullField() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("field", null));
+ Type type = randomFrom(Type.values());
+ Processor processor = new ConvertProcessor(randomAsciiOfLength(10), "field", type);
+ try {
+ processor.execute(ingestDocument);
+ fail("processor execute should have failed");
+ } catch (IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("Field [field] is null, cannot be converted to type [" + type + "]"));
+ }
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/DateFormatTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/DateFormatTests.java
new file mode 100644
index 0000000000..401dd44d44
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/ingest/processor/DateFormatTests.java
@@ -0,0 +1,83 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.processor;
+
+import org.elasticsearch.test.ESTestCase;
+import org.joda.time.DateTime;
+import org.joda.time.DateTimeZone;
+
+import java.time.Instant;
+import java.time.ZoneId;
+import java.time.format.DateTimeFormatter;
+import java.util.Locale;
+import java.util.function.Function;
+
+import static org.hamcrest.core.IsEqual.equalTo;
+
+public class DateFormatTests extends ESTestCase {
+
+ public void testParseJoda() {
+ Function<String, DateTime> jodaFunction = DateFormat.Joda.getFunction("MMM dd HH:mm:ss Z", DateTimeZone.forOffsetHours(-8), Locale.ENGLISH);
+ assertThat(Instant.ofEpochMilli(jodaFunction.apply("Nov 24 01:29:01 -0800").getMillis())
+ .atZone(ZoneId.of("GMT-8"))
+ .format(DateTimeFormatter.ofPattern("MM dd HH:mm:ss", Locale.ENGLISH)),
+ equalTo("11 24 01:29:01"));
+ }
+
+ public void testParseUnixMs() {
+ assertThat(DateFormat.UnixMs.getFunction(null, DateTimeZone.UTC, null).apply("1000500").getMillis(), equalTo(1000500L));
+ }
+
+ public void testParseUnix() {
+ assertThat(DateFormat.Unix.getFunction(null, DateTimeZone.UTC, null).apply("1000.5").getMillis(), equalTo(1000500L));
+ }
+
+ public void testParseISO8601() {
+ assertThat(DateFormat.Iso8601.getFunction(null, DateTimeZone.UTC, null).apply("2001-01-01T00:00:00-0800").getMillis(), equalTo(978336000000L));
+ }
+
+ public void testParseISO8601Failure() {
+ Function<String, DateTime> function = DateFormat.Iso8601.getFunction(null, DateTimeZone.UTC, null);
+ try {
+ function.apply("2001-01-0:00-0800");
+ fail("parse should have failed");
+ } catch(IllegalArgumentException e) {
+ //all good
+ }
+ }
+
+ public void testTAI64NParse() {
+ String input = "4000000050d506482dbdf024";
+ String expected = "2012-12-22T03:00:46.767+02:00";
+ assertThat(DateFormat.Tai64n.getFunction(null, DateTimeZone.forOffsetHours(2), null).apply((randomBoolean() ? "@" : "") + input).toString(), equalTo(expected));
+ }
+
+ public void testFromString() {
+ assertThat(DateFormat.fromString("UNIX_MS"), equalTo(DateFormat.UnixMs));
+ assertThat(DateFormat.fromString("unix_ms"), equalTo(DateFormat.Joda));
+ assertThat(DateFormat.fromString("UNIX"), equalTo(DateFormat.Unix));
+ assertThat(DateFormat.fromString("unix"), equalTo(DateFormat.Joda));
+ assertThat(DateFormat.fromString("ISO8601"), equalTo(DateFormat.Iso8601));
+ assertThat(DateFormat.fromString("iso8601"), equalTo(DateFormat.Joda));
+ assertThat(DateFormat.fromString("TAI64N"), equalTo(DateFormat.Tai64n));
+ assertThat(DateFormat.fromString("tai64n"), equalTo(DateFormat.Joda));
+ assertThat(DateFormat.fromString("prefix-" + randomAsciiOfLengthBetween(1, 10)), equalTo(DateFormat.Joda));
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/DateProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/DateProcessorFactoryTests.java
new file mode 100644
index 0000000000..1139f1968f
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/ingest/processor/DateProcessorFactoryTests.java
@@ -0,0 +1,190 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.processor;
+
+import org.elasticsearch.ingest.core.AbstractProcessorFactory;
+import org.elasticsearch.ingest.core.Processor;
+import org.elasticsearch.test.ESTestCase;
+import org.joda.time.DateTimeZone;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+
+import static org.hamcrest.Matchers.containsString;
+import static org.hamcrest.Matchers.equalTo;
+
+public class DateProcessorFactoryTests extends ESTestCase {
+
+ public void testBuildDefaults() throws Exception {
+ DateProcessor.Factory factory = new DateProcessor.Factory();
+ Map<String, Object> config = new HashMap<>();
+ String sourceField = randomAsciiOfLengthBetween(1, 10);
+ config.put("match_field", sourceField);
+ config.put("match_formats", Collections.singletonList("dd/MM/yyyyy"));
+ String processorTag = randomAsciiOfLength(10);
+ config.put(AbstractProcessorFactory.TAG_KEY, processorTag);
+ DateProcessor processor = factory.create(config);
+ assertThat(processor.getTag(), equalTo(processorTag));
+ assertThat(processor.getMatchField(), equalTo(sourceField));
+ assertThat(processor.getTargetField(), equalTo(DateProcessor.DEFAULT_TARGET_FIELD));
+ assertThat(processor.getMatchFormats(), equalTo(Collections.singletonList("dd/MM/yyyyy")));
+ assertThat(processor.getLocale(), equalTo(Locale.ENGLISH));
+ assertThat(processor.getTimezone(), equalTo(DateTimeZone.UTC));
+ }
+
+ public void testMatchFieldIsMandatory() throws Exception {
+ DateProcessor.Factory factory = new DateProcessor.Factory();
+ Map<String, Object> config = new HashMap<>();
+ String targetField = randomAsciiOfLengthBetween(1, 10);
+ config.put("target_field", targetField);
+ config.put("match_formats", Collections.singletonList("dd/MM/yyyyy"));
+
+ try {
+ factory.create(config);
+ fail("processor creation should have failed");
+ } catch(ConfigurationPropertyException e) {
+ assertThat(e.getMessage(), containsString("[match_field] required property is missing"));
+ }
+ }
+
+ public void testMatchFormatsIsMandatory() throws Exception {
+ DateProcessor.Factory factory = new DateProcessor.Factory();
+ Map<String, Object> config = new HashMap<>();
+ String sourceField = randomAsciiOfLengthBetween(1, 10);
+ String targetField = randomAsciiOfLengthBetween(1, 10);
+ config.put("match_field", sourceField);
+ config.put("target_field", targetField);
+
+ try {
+ factory.create(config);
+ fail("processor creation should have failed");
+ } catch(ConfigurationPropertyException e) {
+ assertThat(e.getMessage(), containsString("[match_formats] required property is missing"));
+ }
+ }
+
+ public void testParseLocale() throws Exception {
+ DateProcessor.Factory factory = new DateProcessor.Factory();
+ Map<String, Object> config = new HashMap<>();
+ String sourceField = randomAsciiOfLengthBetween(1, 10);
+ config.put("match_field", sourceField);
+ config.put("match_formats", Collections.singletonList("dd/MM/yyyyy"));
+ Locale locale = randomLocale(random());
+ config.put("locale", locale.toLanguageTag());
+
+ DateProcessor processor = factory.create(config);
+ assertThat(processor.getLocale().toLanguageTag(), equalTo(locale.toLanguageTag()));
+ }
+
+ public void testParseInvalidLocale() throws Exception {
+ DateProcessor.Factory factory = new DateProcessor.Factory();
+ Map<String, Object> config = new HashMap<>();
+ String sourceField = randomAsciiOfLengthBetween(1, 10);
+ config.put("match_field", sourceField);
+ config.put("match_formats", Collections.singletonList("dd/MM/yyyyy"));
+ config.put("locale", "invalid_locale");
+ try {
+ factory.create(config);
+ fail("should fail with invalid locale");
+ } catch (IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("Invalid language tag specified: invalid_locale"));
+ }
+ }
+
+ public void testParseTimezone() throws Exception {
+ DateProcessor.Factory factory = new DateProcessor.Factory();
+ Map<String, Object> config = new HashMap<>();
+ String sourceField = randomAsciiOfLengthBetween(1, 10);
+ config.put("match_field", sourceField);
+ config.put("match_formats", Collections.singletonList("dd/MM/yyyyy"));
+
+ DateTimeZone timezone = randomTimezone();
+ config.put("timezone", timezone.getID());
+ DateProcessor processor = factory.create(config);
+ assertThat(processor.getTimezone(), equalTo(timezone));
+ }
+
+ public void testParseInvalidTimezone() throws Exception {
+ DateProcessor.Factory factory = new DateProcessor.Factory();
+ Map<String, Object> config = new HashMap<>();
+ String sourceField = randomAsciiOfLengthBetween(1, 10);
+ config.put("match_field", sourceField);
+ config.put("match_formats", Collections.singletonList("dd/MM/yyyyy"));
+ config.put("timezone", "invalid_timezone");
+ try {
+ factory.create(config);
+ fail("invalid timezone should fail");
+ } catch (IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("The datetime zone id 'invalid_timezone' is not recognised"));
+ }
+ }
+
+ //we generate a timezone out of the available ones in joda, some available in the jdk are not available in joda by default
+ private static DateTimeZone randomTimezone() {
+ List<String> ids = new ArrayList<>(DateTimeZone.getAvailableIDs());
+ Collections.sort(ids);
+ return DateTimeZone.forID(randomFrom(ids));
+ }
+
+
+ public void testParseMatchFormats() throws Exception {
+ DateProcessor.Factory factory = new DateProcessor.Factory();
+ Map<String, Object> config = new HashMap<>();
+ String sourceField = randomAsciiOfLengthBetween(1, 10);
+ config.put("match_field", sourceField);
+ config.put("match_formats", Arrays.asList("dd/MM/yyyy", "dd-MM-yyyy"));
+
+ DateProcessor processor = factory.create(config);
+ assertThat(processor.getMatchFormats(), equalTo(Arrays.asList("dd/MM/yyyy", "dd-MM-yyyy")));
+ }
+
+ public void testParseMatchFormatsFailure() throws Exception {
+ DateProcessor.Factory factory = new DateProcessor.Factory();
+ Map<String, Object> config = new HashMap<>();
+ String sourceField = randomAsciiOfLengthBetween(1, 10);
+ config.put("match_field", sourceField);
+ config.put("match_formats", "dd/MM/yyyy");
+
+ try {
+ factory.create(config);
+ fail("processor creation should have failed");
+ } catch(ConfigurationPropertyException e) {
+ assertThat(e.getMessage(), containsString("[match_formats] property isn't a list, but of type [java.lang.String]"));
+ }
+ }
+
+ public void testParseTargetField() throws Exception {
+ DateProcessor.Factory factory = new DateProcessor.Factory();
+ Map<String, Object> config = new HashMap<>();
+ String sourceField = randomAsciiOfLengthBetween(1, 10);
+ String targetField = randomAsciiOfLengthBetween(1, 10);
+ config.put("match_field", sourceField);
+ config.put("target_field", targetField);
+ config.put("match_formats", Arrays.asList("dd/MM/yyyy", "dd-MM-yyyy"));
+
+ DateProcessor processor = factory.create(config);
+ assertThat(processor.getTargetField(), equalTo(targetField));
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/DateProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/DateProcessorTests.java
new file mode 100644
index 0000000000..5daab95a5d
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/ingest/processor/DateProcessorTests.java
@@ -0,0 +1,147 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.processor;
+
+import org.elasticsearch.ingest.core.IngestDocument;
+import org.elasticsearch.ingest.RandomDocumentPicks;
+import org.elasticsearch.test.ESTestCase;
+import org.joda.time.DateTime;
+import org.joda.time.DateTimeZone;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+
+import static org.hamcrest.CoreMatchers.containsString;
+import static org.hamcrest.CoreMatchers.equalTo;
+
+public class DateProcessorTests extends ESTestCase {
+
+ public void testJodaPattern() {
+ DateProcessor dateProcessor = new DateProcessor(randomAsciiOfLength(10), DateTimeZone.forID("Europe/Amsterdam"), Locale.ENGLISH,
+ "date_as_string", Collections.singletonList("yyyy dd MM hh:mm:ss"), "date_as_date");
+ Map<String, Object> document = new HashMap<>();
+ document.put("date_as_string", "2010 12 06 11:05:15");
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document);
+ dateProcessor.execute(ingestDocument);
+ assertThat(ingestDocument.getFieldValue("date_as_date", String.class), equalTo("2010-06-12T11:05:15.000+02:00"));
+ }
+
+ public void testJodaPatternMultipleFormats() {
+ List<String> matchFormats = new ArrayList<>();
+ matchFormats.add("yyyy dd MM");
+ matchFormats.add("dd/MM/yyyy");
+ matchFormats.add("dd-MM-yyyy");
+ DateProcessor dateProcessor = new DateProcessor(randomAsciiOfLength(10), DateTimeZone.forID("Europe/Amsterdam"), Locale.ENGLISH,
+ "date_as_string", matchFormats, "date_as_date");
+
+ Map<String, Object> document = new HashMap<>();
+ document.put("date_as_string", "2010 12 06");
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document);
+ dateProcessor.execute(ingestDocument);
+ assertThat(ingestDocument.getFieldValue("date_as_date", String.class), equalTo("2010-06-12T00:00:00.000+02:00"));
+
+ document = new HashMap<>();
+ document.put("date_as_string", "12/06/2010");
+ ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document);
+ dateProcessor.execute(ingestDocument);
+ assertThat(ingestDocument.getFieldValue("date_as_date", String.class), equalTo("2010-06-12T00:00:00.000+02:00"));
+
+ document = new HashMap<>();
+ document.put("date_as_string", "12-06-2010");
+ ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document);
+ dateProcessor.execute(ingestDocument);
+ assertThat(ingestDocument.getFieldValue("date_as_date", String.class), equalTo("2010-06-12T00:00:00.000+02:00"));
+
+ document = new HashMap<>();
+ document.put("date_as_string", "2010");
+ ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document);
+ try {
+ dateProcessor.execute(ingestDocument);
+ fail("processor should have failed due to not supported date format");
+ } catch(IllegalArgumentException e) {
+ assertThat(e.getMessage(), containsString("unable to parse date [2010]"));
+ }
+ }
+
+ public void testInvalidJodaPattern() {
+ try {
+ new DateProcessor(randomAsciiOfLength(10), DateTimeZone.UTC, randomLocale(random()),
+ "date_as_string", Collections.singletonList("invalid pattern"), "date_as_date");
+ fail("date processor initialization should have failed");
+ } catch(IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("Illegal pattern component: i"));
+ }
+ }
+
+ public void testJodaPatternLocale() {
+ DateProcessor dateProcessor = new DateProcessor(randomAsciiOfLength(10), DateTimeZone.forID("Europe/Amsterdam"), Locale.ITALIAN,
+ "date_as_string", Collections.singletonList("yyyy dd MMM"), "date_as_date");
+ Map<String, Object> document = new HashMap<>();
+ document.put("date_as_string", "2010 12 giugno");
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document);
+ dateProcessor.execute(ingestDocument);
+ assertThat(ingestDocument.getFieldValue("date_as_date", String.class), equalTo("2010-06-12T00:00:00.000+02:00"));
+ }
+
+ public void testJodaPatternDefaultYear() {
+ DateProcessor dateProcessor = new DateProcessor(randomAsciiOfLength(10), DateTimeZone.forID("Europe/Amsterdam"), Locale.ENGLISH,
+ "date_as_string", Collections.singletonList("dd/MM"), "date_as_date");
+ Map<String, Object> document = new HashMap<>();
+ document.put("date_as_string", "12/06");
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document);
+ dateProcessor.execute(ingestDocument);
+ assertThat(ingestDocument.getFieldValue("date_as_date", String.class), equalTo(DateTime.now().getYear() + "-06-12T00:00:00.000+02:00"));
+ }
+
+ public void testTAI64N() {
+ DateProcessor dateProcessor = new DateProcessor(randomAsciiOfLength(10), DateTimeZone.forOffsetHours(2), randomLocale(random()),
+ "date_as_string", Collections.singletonList("TAI64N"), "date_as_date");
+ Map<String, Object> document = new HashMap<>();
+ String dateAsString = (randomBoolean() ? "@" : "") + "4000000050d506482dbdf024";
+ document.put("date_as_string", dateAsString);
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document);
+ dateProcessor.execute(ingestDocument);
+ assertThat(ingestDocument.getFieldValue("date_as_date", String.class), equalTo("2012-12-22T03:00:46.767+02:00"));
+ }
+
+ public void testUnixMs() {
+ DateProcessor dateProcessor = new DateProcessor(randomAsciiOfLength(10), DateTimeZone.UTC, randomLocale(random()),
+ "date_as_string", Collections.singletonList("UNIX_MS"), "date_as_date");
+ Map<String, Object> document = new HashMap<>();
+ document.put("date_as_string", "1000500");
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document);
+ dateProcessor.execute(ingestDocument);
+ assertThat(ingestDocument.getFieldValue("date_as_date", String.class), equalTo("1970-01-01T00:16:40.500Z"));
+ }
+
+ public void testUnix() {
+ DateProcessor dateProcessor = new DateProcessor(randomAsciiOfLength(10), DateTimeZone.UTC, randomLocale(random()),
+ "date_as_string", Collections.singletonList("UNIX"), "date_as_date");
+ Map<String, Object> document = new HashMap<>();
+ document.put("date_as_string", "1000.5");
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document);
+ dateProcessor.execute(ingestDocument);
+ assertThat(ingestDocument.getFieldValue("date_as_date", String.class), equalTo("1970-01-01T00:16:40.500Z"));
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/DeDotProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/DeDotProcessorFactoryTests.java
new file mode 100644
index 0000000000..63eee56cc6
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/ingest/processor/DeDotProcessorFactoryTests.java
@@ -0,0 +1,56 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.processor;
+
+import org.elasticsearch.ingest.core.AbstractProcessorFactory;
+import org.elasticsearch.test.ESTestCase;
+import org.junit.Before;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.hamcrest.CoreMatchers.equalTo;
+
+public class DeDotProcessorFactoryTests extends ESTestCase {
+
+ private DeDotProcessor.Factory factory;
+
+ @Before
+ public void init() {
+ factory = new DeDotProcessor.Factory();
+ }
+
+ public void testCreate() throws Exception {
+ Map<String, Object> config = new HashMap<>();
+ config.put("separator", "_");
+ String processorTag = randomAsciiOfLength(10);
+ config.put(AbstractProcessorFactory.TAG_KEY, processorTag);
+ DeDotProcessor deDotProcessor = factory.create(config);
+ assertThat(deDotProcessor.getSeparator(), equalTo("_"));
+ assertThat(deDotProcessor.getTag(), equalTo(processorTag));
+ }
+
+ public void testCreateMissingSeparatorField() throws Exception {
+ Map<String, Object> config = new HashMap<>();
+ DeDotProcessor deDotProcessor = factory.create(config);
+ assertThat(deDotProcessor.getSeparator(), equalTo(DeDotProcessor.DEFAULT_SEPARATOR));
+ }
+
+}
diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/DeDotProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/DeDotProcessorTests.java
new file mode 100644
index 0000000000..a0c87d7a16
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/ingest/processor/DeDotProcessorTests.java
@@ -0,0 +1,75 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.processor;
+
+import org.elasticsearch.ingest.core.IngestDocument;
+import org.elasticsearch.ingest.core.Processor;
+import org.elasticsearch.test.ESTestCase;
+
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.hamcrest.Matchers.equalTo;
+
+public class DeDotProcessorTests extends ESTestCase {
+
+ public void testSimple() throws Exception {
+ Map<String, Object> source = new HashMap<>();
+ source.put("a.b", "hello world!");
+ IngestDocument ingestDocument = new IngestDocument(source, Collections.emptyMap());
+ String separator = randomUnicodeOfCodepointLengthBetween(1, 10);
+ Processor processor = new DeDotProcessor(randomAsciiOfLength(10), separator);
+ processor.execute(ingestDocument);
+ assertThat(ingestDocument.getSourceAndMetadata().get("a" + separator + "b" ), equalTo("hello world!"));
+ }
+
+ public void testSimpleMap() throws Exception {
+ Map<String, Object> source = new HashMap<>();
+ Map<String, Object> subField = new HashMap<>();
+ subField.put("b.c", "hello world!");
+ source.put("a", subField);
+ IngestDocument ingestDocument = new IngestDocument(source, Collections.emptyMap());
+ Processor processor = new DeDotProcessor(randomAsciiOfLength(10), "_");
+ processor.execute(ingestDocument);
+
+ IngestDocument expectedDocument = new IngestDocument(
+ Collections.singletonMap("a", Collections.singletonMap("b_c", "hello world!")),
+ Collections.emptyMap());
+ assertThat(ingestDocument, equalTo(expectedDocument));
+ }
+
+ public void testSimpleList() throws Exception {
+ Map<String, Object> source = new HashMap<>();
+ Map<String, Object> subField = new HashMap<>();
+ subField.put("b.c", "hello world!");
+ source.put("a", Arrays.asList(subField));
+ IngestDocument ingestDocument = new IngestDocument(source, Collections.emptyMap());
+ Processor processor = new DeDotProcessor(randomAsciiOfLength(10), "_");
+ processor.execute(ingestDocument);
+
+ IngestDocument expectedDocument = new IngestDocument(
+ Collections.singletonMap("a",
+ Collections.singletonList(Collections.singletonMap("b_c", "hello world!"))),
+ Collections.emptyMap());
+ assertThat(ingestDocument, equalTo(expectedDocument));
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/FailProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/FailProcessorFactoryTests.java
new file mode 100644
index 0000000000..661a6383df
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/ingest/processor/FailProcessorFactoryTests.java
@@ -0,0 +1,63 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.processor;
+
+import org.elasticsearch.ingest.TestTemplateService;
+import org.elasticsearch.ingest.core.AbstractProcessorFactory;
+import org.elasticsearch.ingest.core.Processor;
+import org.elasticsearch.test.ESTestCase;
+import org.junit.Before;
+
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.hamcrest.CoreMatchers.equalTo;
+
+public class FailProcessorFactoryTests extends ESTestCase {
+
+ private FailProcessor.Factory factory;
+
+ @Before
+ public void init() {
+ factory = new FailProcessor.Factory(TestTemplateService.instance());
+ }
+
+ public void testCreate() throws Exception {
+ Map<String, Object> config = new HashMap<>();
+ config.put("message", "error");
+ String processorTag = randomAsciiOfLength(10);
+ config.put(AbstractProcessorFactory.TAG_KEY, processorTag);
+ FailProcessor failProcessor = factory.create(config);
+ assertThat(failProcessor.getTag(), equalTo(processorTag));
+ assertThat(failProcessor.getMessage().execute(Collections.emptyMap()), equalTo("error"));
+ }
+
+ public void testCreateMissingMessageField() throws Exception {
+ Map<String, Object> config = new HashMap<>();
+ try {
+ factory.create(config);
+ fail("factory create should have failed");
+ } catch(ConfigurationPropertyException e) {
+ assertThat(e.getMessage(), equalTo("[message] required property is missing"));
+ }
+ }
+
+}
diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/FailProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/FailProcessorTests.java
new file mode 100644
index 0000000000..3fdc2073c0
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/ingest/processor/FailProcessorTests.java
@@ -0,0 +1,43 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.processor;
+
+import org.elasticsearch.ingest.core.IngestDocument;
+import org.elasticsearch.ingest.core.Processor;
+import org.elasticsearch.ingest.RandomDocumentPicks;
+import org.elasticsearch.ingest.TestTemplateService;
+import org.elasticsearch.test.ESTestCase;
+
+import static org.hamcrest.Matchers.equalTo;
+
+public class FailProcessorTests extends ESTestCase {
+
+ public void test() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
+ String message = randomAsciiOfLength(10);
+ Processor processor = new FailProcessor(randomAsciiOfLength(10), new TestTemplateService.MockTemplate(message));
+ try {
+ processor.execute(ingestDocument);
+ fail("fail processor should throw an exception");
+ } catch (FailProcessorException e) {
+ assertThat(e.getMessage(), equalTo(message));
+ }
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/GsubProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/GsubProcessorFactoryTests.java
new file mode 100644
index 0000000000..bce033091a
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/ingest/processor/GsubProcessorFactoryTests.java
@@ -0,0 +1,86 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.processor;
+
+import org.elasticsearch.ingest.core.AbstractProcessorFactory;
+import org.elasticsearch.ingest.core.Processor;
+import org.elasticsearch.test.ESTestCase;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.hamcrest.CoreMatchers.equalTo;
+
+public class GsubProcessorFactoryTests extends ESTestCase {
+
+ public void testCreate() throws Exception {
+ GsubProcessor.Factory factory = new GsubProcessor.Factory();
+ Map<String, Object> config = new HashMap<>();
+ config.put("field", "field1");
+ config.put("pattern", "\\.");
+ config.put("replacement", "-");
+ String processorTag = randomAsciiOfLength(10);
+ config.put(AbstractProcessorFactory.TAG_KEY, processorTag);
+ GsubProcessor gsubProcessor = factory.create(config);
+ assertThat(gsubProcessor.getTag(), equalTo(processorTag));
+ assertThat(gsubProcessor.getField(), equalTo("field1"));
+ assertThat(gsubProcessor.getPattern().toString(), equalTo("\\."));
+ assertThat(gsubProcessor.getReplacement(), equalTo("-"));
+ }
+
+ public void testCreateNoFieldPresent() throws Exception {
+ GsubProcessor.Factory factory = new GsubProcessor.Factory();
+ Map<String, Object> config = new HashMap<>();
+ config.put("pattern", "\\.");
+ config.put("replacement", "-");
+ try {
+ factory.create(config);
+ fail("factory create should have failed");
+ } catch(ConfigurationPropertyException e) {
+ assertThat(e.getMessage(), equalTo("[field] required property is missing"));
+ }
+ }
+
+ public void testCreateNoPatternPresent() throws Exception {
+ GsubProcessor.Factory factory = new GsubProcessor.Factory();
+ Map<String, Object> config = new HashMap<>();
+ config.put("field", "field1");
+ config.put("replacement", "-");
+ try {
+ factory.create(config);
+ fail("factory create should have failed");
+ } catch(ConfigurationPropertyException e) {
+ assertThat(e.getMessage(), equalTo("[pattern] required property is missing"));
+ }
+ }
+
+ public void testCreateNoReplacementPresent() throws Exception {
+ GsubProcessor.Factory factory = new GsubProcessor.Factory();
+ Map<String, Object> config = new HashMap<>();
+ config.put("field", "field1");
+ config.put("pattern", "\\.");
+ try {
+ factory.create(config);
+ fail("factory create should have failed");
+ } catch(ConfigurationPropertyException e) {
+ assertThat(e.getMessage(), equalTo("[replacement] required property is missing"));
+ }
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/GsubProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/GsubProcessorTests.java
new file mode 100644
index 0000000000..fe44f33b61
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/ingest/processor/GsubProcessorTests.java
@@ -0,0 +1,79 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.processor;
+
+import org.elasticsearch.ingest.core.IngestDocument;
+import org.elasticsearch.ingest.RandomDocumentPicks;
+import org.elasticsearch.ingest.core.Processor;
+import org.elasticsearch.test.ESTestCase;
+
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.regex.Pattern;
+
+import static org.hamcrest.Matchers.containsString;
+import static org.hamcrest.Matchers.equalTo;
+
+public class GsubProcessorTests extends ESTestCase {
+
+ public void testGsub() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
+ String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "127.0.0.1");
+ Processor processor = new GsubProcessor(randomAsciiOfLength(10), fieldName, Pattern.compile("\\."), "-");
+ processor.execute(ingestDocument);
+ assertThat(ingestDocument.getFieldValue(fieldName, String.class), equalTo("127-0-0-1"));
+ }
+
+ public void testGsubNotAStringValue() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>());
+ String fieldName = RandomDocumentPicks.randomFieldName(random());
+ ingestDocument.setFieldValue(fieldName, 123);
+ Processor processor = new GsubProcessor(randomAsciiOfLength(10), fieldName, Pattern.compile("\\."), "-");
+ try {
+ processor.execute(ingestDocument);
+ fail("processor execution should have failed");
+ } catch (IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("field [" + fieldName + "] of type [java.lang.Integer] cannot be cast to [java.lang.String]"));
+ }
+ }
+
+ public void testGsubFieldNotFound() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>());
+ String fieldName = RandomDocumentPicks.randomFieldName(random());
+ Processor processor = new GsubProcessor(randomAsciiOfLength(10), fieldName, Pattern.compile("\\."), "-");
+ try {
+ processor.execute(ingestDocument);
+ fail("processor execution should have failed");
+ } catch (IllegalArgumentException e) {
+ assertThat(e.getMessage(), containsString("not present as part of path [" + fieldName + "]"));
+ }
+ }
+
+ public void testGsubNullValue() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("field", null));
+ Processor processor = new GsubProcessor(randomAsciiOfLength(10), "field", Pattern.compile("\\."), "-");
+ try {
+ processor.execute(ingestDocument);
+ fail("processor execution should have failed");
+ } catch (IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("field [field] is null, cannot match pattern."));
+ }
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/JoinProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/JoinProcessorFactoryTests.java
new file mode 100644
index 0000000000..51eb989bed
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/ingest/processor/JoinProcessorFactoryTests.java
@@ -0,0 +1,69 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.processor;
+
+import org.elasticsearch.ingest.core.AbstractProcessorFactory;
+import org.elasticsearch.ingest.core.Processor;
+import org.elasticsearch.test.ESTestCase;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.hamcrest.CoreMatchers.equalTo;
+
+public class JoinProcessorFactoryTests extends ESTestCase {
+
+ public void testCreate() throws Exception {
+ JoinProcessor.Factory factory = new JoinProcessor.Factory();
+ Map<String, Object> config = new HashMap<>();
+ config.put("field", "field1");
+ config.put("separator", "-");
+ String processorTag = randomAsciiOfLength(10);
+ config.put(AbstractProcessorFactory.TAG_KEY, processorTag);
+ JoinProcessor joinProcessor = factory.create(config);
+ assertThat(joinProcessor.getTag(), equalTo(processorTag));
+ assertThat(joinProcessor.getField(), equalTo("field1"));
+ assertThat(joinProcessor.getSeparator(), equalTo("-"));
+ }
+
+ public void testCreateNoFieldPresent() throws Exception {
+ JoinProcessor.Factory factory = new JoinProcessor.Factory();
+ Map<String, Object> config = new HashMap<>();
+ config.put("separator", "-");
+ try {
+ factory.create(config);
+ fail("factory create should have failed");
+ } catch (ConfigurationPropertyException e) {
+ assertThat(e.getMessage(), equalTo("[field] required property is missing"));
+ }
+ }
+
+ public void testCreateNoSeparatorPresent() throws Exception {
+ JoinProcessor.Factory factory = new JoinProcessor.Factory();
+ Map<String, Object> config = new HashMap<>();
+ config.put("field", "field1");
+ try {
+ factory.create(config);
+ fail("factory create should have failed");
+ } catch (ConfigurationPropertyException e) {
+ assertThat(e.getMessage(), equalTo("[separator] required property is missing"));
+ }
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/JoinProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/JoinProcessorTests.java
new file mode 100644
index 0000000000..2aa3ac2e03
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/ingest/processor/JoinProcessorTests.java
@@ -0,0 +1,111 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.processor;
+
+import org.elasticsearch.ingest.core.IngestDocument;
+import org.elasticsearch.ingest.RandomDocumentPicks;
+import org.elasticsearch.ingest.core.Processor;
+import org.elasticsearch.test.ESTestCase;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+
+import static org.hamcrest.Matchers.containsString;
+import static org.hamcrest.Matchers.equalTo;
+
+public class JoinProcessorTests extends ESTestCase {
+
+ private static final String[] SEPARATORS = new String[]{"-", "_", "."};
+
+ public void testJoinStrings() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
+ int numItems = randomIntBetween(1, 10);
+ String separator = randomFrom(SEPARATORS);
+ List<String> fieldValue = new ArrayList<>(numItems);
+ String expectedResult = "";
+ for (int j = 0; j < numItems; j++) {
+ String value = randomAsciiOfLengthBetween(1, 10);
+ fieldValue.add(value);
+ expectedResult += value;
+ if (j < numItems - 1) {
+ expectedResult += separator;
+ }
+ }
+ String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue);
+ Processor processor = new JoinProcessor(randomAsciiOfLength(10), fieldName, separator);
+ processor.execute(ingestDocument);
+ assertThat(ingestDocument.getFieldValue(fieldName, String.class), equalTo(expectedResult));
+ }
+
+ public void testJoinIntegers() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
+ int numItems = randomIntBetween(1, 10);
+ String separator = randomFrom(SEPARATORS);
+ List<Integer> fieldValue = new ArrayList<>(numItems);
+ String expectedResult = "";
+ for (int j = 0; j < numItems; j++) {
+ int value = randomInt();
+ fieldValue.add(value);
+ expectedResult += value;
+ if (j < numItems - 1) {
+ expectedResult += separator;
+ }
+ }
+ String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue);
+ Processor processor = new JoinProcessor(randomAsciiOfLength(10), fieldName, separator);
+ processor.execute(ingestDocument);
+ assertThat(ingestDocument.getFieldValue(fieldName, String.class), equalTo(expectedResult));
+ }
+
+ public void testJoinNonListField() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>());
+ String fieldName = RandomDocumentPicks.randomFieldName(random());
+ ingestDocument.setFieldValue(fieldName, randomAsciiOfLengthBetween(1, 10));
+ Processor processor = new JoinProcessor(randomAsciiOfLength(10), fieldName, "-");
+ try {
+ processor.execute(ingestDocument);
+ } catch(IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("field [" + fieldName + "] of type [java.lang.String] cannot be cast to [java.util.List]"));
+ }
+ }
+
+ public void testJoinNonExistingField() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>());
+ String fieldName = RandomDocumentPicks.randomFieldName(random());
+ Processor processor = new JoinProcessor(randomAsciiOfLength(10), fieldName, "-");
+ try {
+ processor.execute(ingestDocument);
+ } catch(IllegalArgumentException e) {
+ assertThat(e.getMessage(), containsString("not present as part of path [" + fieldName + "]"));
+ }
+ }
+
+ public void testJoinNullValue() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("field", null));
+ Processor processor = new JoinProcessor(randomAsciiOfLength(10), "field", "-");
+ try {
+ processor.execute(ingestDocument);
+ } catch(IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("field [field] is null, cannot join."));
+ }
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/LowercaseProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/LowercaseProcessorFactoryTests.java
new file mode 100644
index 0000000000..32eefa0789
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/ingest/processor/LowercaseProcessorFactoryTests.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.processor;
+
+import org.elasticsearch.ingest.core.AbstractProcessorFactory;
+import org.elasticsearch.ingest.core.Processor;
+import org.elasticsearch.test.ESTestCase;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.hamcrest.CoreMatchers.equalTo;
+
+public class LowercaseProcessorFactoryTests extends ESTestCase {
+
+ public void testCreate() throws Exception {
+ LowercaseProcessor.Factory factory = new LowercaseProcessor.Factory();
+ Map<String, Object> config = new HashMap<>();
+ config.put("field", "field1");
+ String processorTag = randomAsciiOfLength(10);
+ config.put(AbstractProcessorFactory.TAG_KEY, processorTag);
+ LowercaseProcessor uppercaseProcessor = factory.create(config);
+ assertThat(uppercaseProcessor.getTag(), equalTo(processorTag));
+ assertThat(uppercaseProcessor.getField(), equalTo("field1"));
+ }
+
+ public void testCreateMissingField() throws Exception {
+ LowercaseProcessor.Factory factory = new LowercaseProcessor.Factory();
+ Map<String, Object> config = new HashMap<>();
+ try {
+ factory.create(config);
+ fail("factory create should have failed");
+ } catch(ConfigurationPropertyException e) {
+ assertThat(e.getMessage(), equalTo("[field] required property is missing"));
+ }
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/plugins/SitePlugin.java b/core/src/test/java/org/elasticsearch/ingest/processor/LowercaseProcessorTests.java
index 4c12f2095b..77e22b0ca1 100644
--- a/core/src/main/java/org/elasticsearch/plugins/SitePlugin.java
+++ b/core/src/test/java/org/elasticsearch/ingest/processor/LowercaseProcessorTests.java
@@ -17,25 +17,18 @@
* under the License.
*/
-package org.elasticsearch.plugins;
+package org.elasticsearch.ingest.processor;
-/** A site-only plugin, just serves resources */
-final class SitePlugin extends Plugin {
- final String name;
- final String description;
-
- SitePlugin(String name, String description) {
- this.name = name;
- this.description = description;
- }
+import java.util.Locale;
+public class LowercaseProcessorTests extends AbstractStringProcessorTestCase {
@Override
- public String name() {
- return name;
+ protected AbstractStringProcessor newProcessor(String field) {
+ return new LowercaseProcessor(randomAsciiOfLength(10), field);
}
@Override
- public String description() {
- return description;
+ protected String expectedResult(String input) {
+ return input.toLowerCase(Locale.ROOT);
}
}
diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/RemoveProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/RemoveProcessorFactoryTests.java
new file mode 100644
index 0000000000..5b03d28806
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/ingest/processor/RemoveProcessorFactoryTests.java
@@ -0,0 +1,63 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.processor;
+
+import org.elasticsearch.ingest.TestTemplateService;
+import org.elasticsearch.ingest.core.AbstractProcessorFactory;
+import org.elasticsearch.ingest.core.Processor;
+import org.elasticsearch.test.ESTestCase;
+import org.junit.Before;
+
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.hamcrest.CoreMatchers.equalTo;
+
+public class RemoveProcessorFactoryTests extends ESTestCase {
+
+ private RemoveProcessor.Factory factory;
+
+ @Before
+ public void init() {
+ factory = new RemoveProcessor.Factory(TestTemplateService.instance());
+ }
+
+ public void testCreate() throws Exception {
+ Map<String, Object> config = new HashMap<>();
+ config.put("field", "field1");
+ String processorTag = randomAsciiOfLength(10);
+ config.put(AbstractProcessorFactory.TAG_KEY, processorTag);
+ RemoveProcessor removeProcessor = factory.create(config);
+ assertThat(removeProcessor.getTag(), equalTo(processorTag));
+ assertThat(removeProcessor.getField().execute(Collections.emptyMap()), equalTo("field1"));
+ }
+
+ public void testCreateMissingField() throws Exception {
+ Map<String, Object> config = new HashMap<>();
+ try {
+ factory.create(config);
+ fail("factory create should have failed");
+ } catch(ConfigurationPropertyException e) {
+ assertThat(e.getMessage(), equalTo("[field] required property is missing"));
+ }
+ }
+
+}
diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/RemoveProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/RemoveProcessorTests.java
new file mode 100644
index 0000000000..d134b0213e
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/ingest/processor/RemoveProcessorTests.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.processor;
+
+import org.elasticsearch.ingest.core.IngestDocument;
+import org.elasticsearch.ingest.RandomDocumentPicks;
+import org.elasticsearch.ingest.TestTemplateService;
+import org.elasticsearch.ingest.core.Processor;
+import org.elasticsearch.test.ESTestCase;
+
+import java.util.HashMap;
+
+import static org.hamcrest.Matchers.containsString;
+import static org.hamcrest.Matchers.equalTo;
+
+public class RemoveProcessorTests extends ESTestCase {
+
+ public void testRemoveFields() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
+ String field = RandomDocumentPicks.randomExistingFieldName(random(), ingestDocument);
+ Processor processor = new RemoveProcessor(randomAsciiOfLength(10), new TestTemplateService.MockTemplate(field));
+ processor.execute(ingestDocument);
+ assertThat(ingestDocument.hasField(field), equalTo(false));
+ }
+
+ public void testRemoveNonExistingField() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>());
+ String fieldName = RandomDocumentPicks.randomFieldName(random());
+ Processor processor = new RemoveProcessor(randomAsciiOfLength(10), new TestTemplateService.MockTemplate(fieldName));
+ try {
+ processor.execute(ingestDocument);
+ fail("remove field should have failed");
+ } catch(IllegalArgumentException e) {
+ assertThat(e.getMessage(), containsString("not present as part of path [" + fieldName + "]"));
+ }
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/RenameProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/RenameProcessorFactoryTests.java
new file mode 100644
index 0000000000..ea6284f305
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/ingest/processor/RenameProcessorFactoryTests.java
@@ -0,0 +1,69 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.processor;
+
+import org.elasticsearch.ingest.core.AbstractProcessorFactory;
+import org.elasticsearch.ingest.core.Processor;
+import org.elasticsearch.test.ESTestCase;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.hamcrest.CoreMatchers.equalTo;
+
+public class RenameProcessorFactoryTests extends ESTestCase {
+
+ public void testCreate() throws Exception {
+ RenameProcessor.Factory factory = new RenameProcessor.Factory();
+ Map<String, Object> config = new HashMap<>();
+ config.put("field", "old_field");
+ config.put("to", "new_field");
+ String processorTag = randomAsciiOfLength(10);
+ config.put(AbstractProcessorFactory.TAG_KEY, processorTag);
+ RenameProcessor renameProcessor = factory.create(config);
+ assertThat(renameProcessor.getTag(), equalTo(processorTag));
+ assertThat(renameProcessor.getOldFieldName(), equalTo("old_field"));
+ assertThat(renameProcessor.getNewFieldName(), equalTo("new_field"));
+ }
+
+ public void testCreateNoFieldPresent() throws Exception {
+ RenameProcessor.Factory factory = new RenameProcessor.Factory();
+ Map<String, Object> config = new HashMap<>();
+ config.put("to", "new_field");
+ try {
+ factory.create(config);
+ fail("factory create should have failed");
+ } catch(ConfigurationPropertyException e) {
+ assertThat(e.getMessage(), equalTo("[field] required property is missing"));
+ }
+ }
+
+ public void testCreateNoToPresent() throws Exception {
+ RenameProcessor.Factory factory = new RenameProcessor.Factory();
+ Map<String, Object> config = new HashMap<>();
+ config.put("field", "old_field");
+ try {
+ factory.create(config);
+ fail("factory create should have failed");
+ } catch(ConfigurationPropertyException e) {
+ assertThat(e.getMessage(), equalTo("[to] required property is missing"));
+ }
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/RenameProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/RenameProcessorTests.java
new file mode 100644
index 0000000000..1f9bddaac6
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/ingest/processor/RenameProcessorTests.java
@@ -0,0 +1,173 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.processor;
+
+import org.elasticsearch.ingest.core.IngestDocument;
+import org.elasticsearch.ingest.RandomDocumentPicks;
+import org.elasticsearch.ingest.core.Processor;
+import org.elasticsearch.test.ESTestCase;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.instanceOf;
+import static org.hamcrest.Matchers.nullValue;
+
+public class RenameProcessorTests extends ESTestCase {
+
+ public void testRename() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
+ String fieldName = RandomDocumentPicks.randomExistingFieldName(random(), ingestDocument);
+ Object fieldValue = ingestDocument.getFieldValue(fieldName, Object.class);
+ String newFieldName;
+ do {
+ newFieldName = RandomDocumentPicks.randomFieldName(random());
+ } while (RandomDocumentPicks.canAddField(newFieldName, ingestDocument) == false || newFieldName.equals(fieldName));
+ Processor processor = new RenameProcessor(randomAsciiOfLength(10), fieldName, newFieldName);
+ processor.execute(ingestDocument);
+ assertThat(ingestDocument.getFieldValue(newFieldName, Object.class), equalTo(fieldValue));
+ }
+
+ public void testRenameArrayElement() throws Exception {
+ Map<String, Object> document = new HashMap<>();
+ List<String> list = new ArrayList<>();
+ list.add("item1");
+ list.add("item2");
+ list.add("item3");
+ document.put("list", list);
+ List<Map<String, String>> one = new ArrayList<>();
+ one.add(Collections.singletonMap("one", "one"));
+ one.add(Collections.singletonMap("two", "two"));
+ document.put("one", one);
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document);
+
+ Processor processor = new RenameProcessor(randomAsciiOfLength(10), "list.0", "item");
+ processor.execute(ingestDocument);
+ Object actualObject = ingestDocument.getSourceAndMetadata().get("list");
+ assertThat(actualObject, instanceOf(List.class));
+ @SuppressWarnings("unchecked")
+ List<String> actualList = (List<String>) actualObject;
+ assertThat(actualList.size(), equalTo(2));
+ assertThat(actualList.get(0), equalTo("item2"));
+ assertThat(actualList.get(1), equalTo("item3"));
+ actualObject = ingestDocument.getSourceAndMetadata().get("item");
+ assertThat(actualObject, instanceOf(String.class));
+ assertThat(actualObject, equalTo("item1"));
+
+ processor = new RenameProcessor(randomAsciiOfLength(10), "list.0", "list.3");
+ try {
+ processor.execute(ingestDocument);
+ fail("processor execute should have failed");
+ } catch(IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("[3] is out of bounds for array with length [2] as part of path [list.3]"));
+ assertThat(actualList.size(), equalTo(2));
+ assertThat(actualList.get(0), equalTo("item2"));
+ assertThat(actualList.get(1), equalTo("item3"));
+ }
+ }
+
+ public void testRenameNonExistingField() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>());
+ String fieldName = RandomDocumentPicks.randomFieldName(random());
+ Processor processor = new RenameProcessor(randomAsciiOfLength(10), fieldName, RandomDocumentPicks.randomFieldName(random()));
+ try {
+ processor.execute(ingestDocument);
+ fail("processor execute should have failed");
+ } catch(IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("field [" + fieldName + "] doesn't exist"));
+ }
+ }
+
+ public void testRenameNewFieldAlreadyExists() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
+ String fieldName = RandomDocumentPicks.randomExistingFieldName(random(), ingestDocument);
+ Processor processor = new RenameProcessor(randomAsciiOfLength(10), RandomDocumentPicks.randomExistingFieldName(random(), ingestDocument), fieldName);
+ try {
+ processor.execute(ingestDocument);
+ fail("processor execute should have failed");
+ } catch(IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("field [" + fieldName + "] already exists"));
+ }
+ }
+
+ public void testRenameExistingFieldNullValue() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>());
+ String fieldName = RandomDocumentPicks.randomFieldName(random());
+ ingestDocument.setFieldValue(fieldName, null);
+ String newFieldName = RandomDocumentPicks.randomFieldName(random());
+ Processor processor = new RenameProcessor(randomAsciiOfLength(10), fieldName, newFieldName);
+ processor.execute(ingestDocument);
+ assertThat(ingestDocument.hasField(fieldName), equalTo(false));
+ assertThat(ingestDocument.hasField(newFieldName), equalTo(true));
+ assertThat(ingestDocument.getFieldValue(newFieldName, Object.class), nullValue());
+ }
+
+ public void testRenameAtomicOperationSetFails() throws Exception {
+ Map<String, Object> source = new HashMap<String, Object>() {
+ @Override
+ public Object put(String key, Object value) {
+ if (key.equals("new_field")) {
+ throw new UnsupportedOperationException();
+ }
+ return super.put(key, value);
+ }
+ };
+ source.put("list", Collections.singletonList("item"));
+
+ IngestDocument ingestDocument = new IngestDocument(source, Collections.emptyMap());
+ Processor processor = new RenameProcessor(randomAsciiOfLength(10), "list", "new_field");
+ try {
+ processor.execute(ingestDocument);
+ fail("processor execute should have failed");
+ } catch(UnsupportedOperationException e) {
+ //the set failed, the old field has not been removed
+ assertThat(ingestDocument.getSourceAndMetadata().containsKey("list"), equalTo(true));
+ assertThat(ingestDocument.getSourceAndMetadata().containsKey("new_field"), equalTo(false));
+ }
+ }
+
+ public void testRenameAtomicOperationRemoveFails() throws Exception {
+ Map<String, Object> source = new HashMap<String, Object>() {
+ @Override
+ public Object remove(Object key) {
+ if (key.equals("list")) {
+ throw new UnsupportedOperationException();
+ }
+ return super.remove(key);
+ }
+ };
+ source.put("list", Collections.singletonList("item"));
+
+ IngestDocument ingestDocument = new IngestDocument(source, Collections.emptyMap());
+ Processor processor = new RenameProcessor(randomAsciiOfLength(10), "list", "new_field");
+ try {
+ processor.execute(ingestDocument);
+ fail("processor execute should have failed");
+ } catch (UnsupportedOperationException e) {
+ //the set failed, the old field has not been removed
+ assertThat(ingestDocument.getSourceAndMetadata().containsKey("list"), equalTo(true));
+ assertThat(ingestDocument.getSourceAndMetadata().containsKey("new_field"), equalTo(false));
+ }
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/SetProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/SetProcessorFactoryTests.java
new file mode 100644
index 0000000000..1c3cf15e48
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/ingest/processor/SetProcessorFactoryTests.java
@@ -0,0 +1,89 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.processor;
+
+import org.elasticsearch.ingest.TestTemplateService;
+import org.elasticsearch.ingest.core.AbstractProcessorFactory;
+import org.elasticsearch.ingest.core.Processor;
+import org.elasticsearch.test.ESTestCase;
+import org.junit.Before;
+
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.hamcrest.CoreMatchers.equalTo;
+
+public class SetProcessorFactoryTests extends ESTestCase {
+
+ private SetProcessor.Factory factory;
+
+ @Before
+ public void init() {
+ factory = new SetProcessor.Factory(TestTemplateService.instance());
+ }
+
+ public void testCreate() throws Exception {
+ Map<String, Object> config = new HashMap<>();
+ config.put("field", "field1");
+ config.put("value", "value1");
+ String processorTag = randomAsciiOfLength(10);
+ config.put(AbstractProcessorFactory.TAG_KEY, processorTag);
+ SetProcessor setProcessor = factory.create(config);
+ assertThat(setProcessor.getTag(), equalTo(processorTag));
+ assertThat(setProcessor.getField().execute(Collections.emptyMap()), equalTo("field1"));
+ assertThat(setProcessor.getValue().copyAndResolve(Collections.emptyMap()), equalTo("value1"));
+ }
+
+ public void testCreateNoFieldPresent() throws Exception {
+ Map<String, Object> config = new HashMap<>();
+ config.put("value", "value1");
+ try {
+ factory.create(config);
+ fail("factory create should have failed");
+ } catch(ConfigurationPropertyException e) {
+ assertThat(e.getMessage(), equalTo("[field] required property is missing"));
+ }
+ }
+
+ public void testCreateNoValuePresent() throws Exception {
+ Map<String, Object> config = new HashMap<>();
+ config.put("field", "field1");
+ try {
+ factory.create(config);
+ fail("factory create should have failed");
+ } catch(ConfigurationPropertyException e) {
+ assertThat(e.getMessage(), equalTo("[value] required property is missing"));
+ }
+ }
+
+ public void testCreateNullValue() throws Exception {
+ Map<String, Object> config = new HashMap<>();
+ config.put("field", "field1");
+ config.put("value", null);
+ try {
+ factory.create(config);
+ fail("factory create should have failed");
+ } catch(ConfigurationPropertyException e) {
+ assertThat(e.getMessage(), equalTo("[value] required property is missing"));
+ }
+ }
+
+}
diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/SetProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/SetProcessorTests.java
new file mode 100644
index 0000000000..283825cdad
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/ingest/processor/SetProcessorTests.java
@@ -0,0 +1,83 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.processor;
+
+import org.elasticsearch.ingest.core.IngestDocument;
+import org.elasticsearch.ingest.RandomDocumentPicks;
+import org.elasticsearch.ingest.core.TemplateService;
+import org.elasticsearch.ingest.TestTemplateService;
+import org.elasticsearch.ingest.core.ValueSource;
+import org.elasticsearch.ingest.core.Processor;
+import org.elasticsearch.test.ESTestCase;
+import org.hamcrest.Matchers;
+
+import java.util.HashMap;
+
+import static org.hamcrest.Matchers.equalTo;
+
+public class SetProcessorTests extends ESTestCase {
+
+ public void testSetExistingFields() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
+ String fieldName = RandomDocumentPicks.randomExistingFieldName(random(), ingestDocument);
+ Object fieldValue = RandomDocumentPicks.randomFieldValue(random());
+ Processor processor = createSetProcessor(fieldName, fieldValue);
+ processor.execute(ingestDocument);
+ assertThat(ingestDocument.hasField(fieldName), equalTo(true));
+ assertThat(ingestDocument.getFieldValue(fieldName, Object.class), equalTo(fieldValue));
+ }
+
+ public void testSetNewFields() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>());
+ //used to verify that there are no conflicts between subsequent fields going to be added
+ IngestDocument testIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>());
+ Object fieldValue = RandomDocumentPicks.randomFieldValue(random());
+ String fieldName = RandomDocumentPicks.addRandomField(random(), testIngestDocument, fieldValue);
+ Processor processor = createSetProcessor(fieldName, fieldValue);
+ processor.execute(ingestDocument);
+ assertThat(ingestDocument.hasField(fieldName), equalTo(true));
+ assertThat(ingestDocument.getFieldValue(fieldName, Object.class), equalTo(fieldValue));
+ }
+
+ public void testSetFieldsTypeMismatch() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>());
+ ingestDocument.setFieldValue("field", "value");
+ Processor processor = createSetProcessor("field.inner", "value");
+ try {
+ processor.execute(ingestDocument);
+ fail("processor execute should have failed");
+ } catch(IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("cannot set [inner] with parent object of type [java.lang.String] as part of path [field.inner]"));
+ }
+ }
+
+ public void testSetMetadata() throws Exception {
+ IngestDocument.MetaData randomMetaData = randomFrom(IngestDocument.MetaData.values());
+ Processor processor = createSetProcessor(randomMetaData.getFieldName(), "_value");
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
+ processor.execute(ingestDocument);
+ assertThat(ingestDocument.getFieldValue(randomMetaData.getFieldName(), String.class), Matchers.equalTo("_value"));
+ }
+
+ private static Processor createSetProcessor(String fieldName, Object fieldValue) {
+ TemplateService templateService = TestTemplateService.instance();
+ return new SetProcessor(randomAsciiOfLength(10), templateService.compile(fieldName), ValueSource.wrap(fieldValue, templateService));
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorFactoryTests.java
new file mode 100644
index 0000000000..3bd2f95e2b
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorFactoryTests.java
@@ -0,0 +1,69 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.processor;
+
+import org.elasticsearch.ingest.core.AbstractProcessorFactory;
+import org.elasticsearch.ingest.core.Processor;
+import org.elasticsearch.test.ESTestCase;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.hamcrest.CoreMatchers.equalTo;
+
+public class SplitProcessorFactoryTests extends ESTestCase {
+
+ public void testCreate() throws Exception {
+ SplitProcessor.Factory factory = new SplitProcessor.Factory();
+ Map<String, Object> config = new HashMap<>();
+ config.put("field", "field1");
+ config.put("separator", "\\.");
+ String processorTag = randomAsciiOfLength(10);
+ config.put(AbstractProcessorFactory.TAG_KEY, processorTag);
+ SplitProcessor splitProcessor = factory.create(config);
+ assertThat(splitProcessor.getTag(), equalTo(processorTag));
+ assertThat(splitProcessor.getField(), equalTo("field1"));
+ assertThat(splitProcessor.getSeparator(), equalTo("\\."));
+ }
+
+ public void testCreateNoFieldPresent() throws Exception {
+ SplitProcessor.Factory factory = new SplitProcessor.Factory();
+ Map<String, Object> config = new HashMap<>();
+ config.put("separator", "\\.");
+ try {
+ factory.create(config);
+ fail("factory create should have failed");
+ } catch(ConfigurationPropertyException e) {
+ assertThat(e.getMessage(), equalTo("[field] required property is missing"));
+ }
+ }
+
+ public void testCreateNoSeparatorPresent() throws Exception {
+ SplitProcessor.Factory factory = new SplitProcessor.Factory();
+ Map<String, Object> config = new HashMap<>();
+ config.put("field", "field1");
+ try {
+ factory.create(config);
+ fail("factory create should have failed");
+ } catch(ConfigurationPropertyException e) {
+ assertThat(e.getMessage(), equalTo("[separator] required property is missing"));
+ }
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorTests.java
new file mode 100644
index 0000000000..e1c8a626a4
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/ingest/processor/SplitProcessorTests.java
@@ -0,0 +1,97 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.processor;
+
+import org.elasticsearch.ingest.RandomDocumentPicks;
+import org.elasticsearch.ingest.core.IngestDocument;
+import org.elasticsearch.ingest.core.Processor;
+import org.elasticsearch.test.ESTestCase;
+
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.hamcrest.Matchers.containsString;
+import static org.hamcrest.Matchers.equalTo;
+
+public class SplitProcessorTests extends ESTestCase {
+
+ public void testSplit() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
+ String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "127.0.0.1");
+ Processor processor = new SplitProcessor(randomAsciiOfLength(10), fieldName, "\\.");
+ processor.execute(ingestDocument);
+ assertThat(ingestDocument.getFieldValue(fieldName, List.class), equalTo(Arrays.asList("127", "0", "0", "1")));
+ }
+
+ public void testSplitFieldNotFound() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>());
+ String fieldName = RandomDocumentPicks.randomFieldName(random());
+ Processor processor = new SplitProcessor(randomAsciiOfLength(10), fieldName, "\\.");
+ try {
+ processor.execute(ingestDocument);
+ fail("split processor should have failed");
+ } catch (IllegalArgumentException e) {
+ assertThat(e.getMessage(), containsString("not present as part of path [" + fieldName + "]"));
+ }
+ }
+
+ public void testSplitNullValue() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("field", null));
+ Processor processor = new SplitProcessor(randomAsciiOfLength(10), "field", "\\.");
+ try {
+ processor.execute(ingestDocument);
+ fail("split processor should have failed");
+ } catch (IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("field [field] is null, cannot split."));
+ }
+ }
+
+ public void testSplitNonStringValue() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>());
+ String fieldName = RandomDocumentPicks.randomFieldName(random());
+ ingestDocument.setFieldValue(fieldName, randomInt());
+ Processor processor = new SplitProcessor(randomAsciiOfLength(10), fieldName, "\\.");
+ try {
+ processor.execute(ingestDocument);
+ fail("split processor should have failed");
+ } catch (IllegalArgumentException e) {
+ assertThat(e.getMessage(), equalTo("field [" + fieldName + "] of type [java.lang.Integer] cannot be cast to [java.lang.String]"));
+ }
+ }
+
+ public void testSplitAppendable() throws Exception {
+ Map<String, Object> splitConfig = new HashMap<>();
+ splitConfig.put("field", "flags");
+ splitConfig.put("separator", "\\|");
+ Processor splitProcessor = (new SplitProcessor.Factory()).create(splitConfig);
+ Map<String, Object> source = new HashMap<>();
+ source.put("flags", "new|hot|super|fun|interesting");
+ IngestDocument ingestDocument = new IngestDocument(source, new HashMap<>());
+ splitProcessor.execute(ingestDocument);
+ @SuppressWarnings("unchecked")
+ List<String> flags = (List<String>)ingestDocument.getFieldValue("flags", List.class);
+ assertThat(flags, equalTo(Arrays.asList("new", "hot", "super", "fun", "interesting")));
+ ingestDocument.appendFieldValue("flags", "additional_flag");
+ assertThat(ingestDocument.getFieldValue("flags", List.class), equalTo(Arrays.asList("new", "hot", "super", "fun", "interesting", "additional_flag")));
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/TrimProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/TrimProcessorFactoryTests.java
new file mode 100644
index 0000000000..8012893bfc
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/ingest/processor/TrimProcessorFactoryTests.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.processor;
+
+import org.elasticsearch.ingest.core.AbstractProcessorFactory;
+import org.elasticsearch.ingest.core.Processor;
+import org.elasticsearch.test.ESTestCase;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.hamcrest.CoreMatchers.equalTo;
+
+public class TrimProcessorFactoryTests extends ESTestCase {
+
+ public void testCreate() throws Exception {
+ TrimProcessor.Factory factory = new TrimProcessor.Factory();
+ Map<String, Object> config = new HashMap<>();
+ config.put("field", "field1");
+ String processorTag = randomAsciiOfLength(10);
+ config.put(AbstractProcessorFactory.TAG_KEY, processorTag);
+ TrimProcessor uppercaseProcessor = factory.create(config);
+ assertThat(uppercaseProcessor.getTag(), equalTo(processorTag));
+ assertThat(uppercaseProcessor.getField(), equalTo("field1"));
+ }
+
+ public void testCreateMissingField() throws Exception {
+ TrimProcessor.Factory factory = new TrimProcessor.Factory();
+ Map<String, Object> config = new HashMap<>();
+ try {
+ factory.create(config);
+ fail("factory create should have failed");
+ } catch(ConfigurationPropertyException e) {
+ assertThat(e.getMessage(), equalTo("[field] required property is missing"));
+ }
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/TrimProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/TrimProcessorTests.java
new file mode 100644
index 0000000000..a0e5fdeab2
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/ingest/processor/TrimProcessorTests.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.processor;
+
+public class TrimProcessorTests extends AbstractStringProcessorTestCase {
+
+ @Override
+ protected AbstractStringProcessor newProcessor(String field) {
+ return new TrimProcessor(randomAsciiOfLength(10), field);
+ }
+
+ @Override
+ protected String modifyInput(String input) {
+ String updatedFieldValue = "";
+ updatedFieldValue = addWhitespaces(updatedFieldValue);
+ updatedFieldValue += input;
+ updatedFieldValue = addWhitespaces(updatedFieldValue);
+ return updatedFieldValue;
+ }
+
+ @Override
+ protected String expectedResult(String input) {
+ return input.trim();
+ }
+
+ private static String addWhitespaces(String input) {
+ int prefixLength = randomIntBetween(0, 10);
+ for (int i = 0; i < prefixLength; i++) {
+ input += ' ';
+ }
+ return input;
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/UppercaseProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/UppercaseProcessorFactoryTests.java
new file mode 100644
index 0000000000..914909f937
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/ingest/processor/UppercaseProcessorFactoryTests.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.ingest.processor;
+
+import org.elasticsearch.ingest.core.AbstractProcessorFactory;
+import org.elasticsearch.ingest.core.Processor;
+import org.elasticsearch.test.ESTestCase;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.hamcrest.CoreMatchers.equalTo;
+
+public class UppercaseProcessorFactoryTests extends ESTestCase {
+
+ public void testCreate() throws Exception {
+ UppercaseProcessor.Factory factory = new UppercaseProcessor.Factory();
+ Map<String, Object> config = new HashMap<>();
+ config.put("field", "field1");
+ String processorTag = randomAsciiOfLength(10);
+ config.put(AbstractProcessorFactory.TAG_KEY, processorTag);
+ UppercaseProcessor uppercaseProcessor = factory.create(config);
+ assertThat(uppercaseProcessor.getTag(), equalTo(processorTag));
+ assertThat(uppercaseProcessor.getField(), equalTo("field1"));
+ }
+
+ public void testCreateMissingField() throws Exception {
+ UppercaseProcessor.Factory factory = new UppercaseProcessor.Factory();
+ Map<String, Object> config = new HashMap<>();
+ try {
+ factory.create(config);
+ fail("factory create should have failed");
+ } catch(ConfigurationPropertyException e) {
+ assertThat(e.getMessage(), equalTo("[field] required property is missing"));
+ }
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentTypeListener.java b/core/src/test/java/org/elasticsearch/ingest/processor/UppercaseProcessorTests.java
index ceb79df17f..4ab61f7b5e 100644
--- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentTypeListener.java
+++ b/core/src/test/java/org/elasticsearch/ingest/processor/UppercaseProcessorTests.java
@@ -17,17 +17,19 @@
* under the License.
*/
-package org.elasticsearch.index.mapper;
+package org.elasticsearch.ingest.processor;
-/**
- */
-public interface DocumentTypeListener {
+import java.util.Locale;
+
+public class UppercaseProcessorTests extends AbstractStringProcessorTestCase {
- /**
- * Invoked just before a new document type has been created.
- *
- * @param mapper The new document mapper of the type being added
- */
- void beforeCreate(DocumentMapper mapper);
+ @Override
+ protected AbstractStringProcessor newProcessor(String field) {
+ return new UppercaseProcessor(randomAsciiOfLength(10), field);
+ }
+ @Override
+ protected String expectedResult(String input) {
+ return input.toUpperCase(Locale.ROOT);
+ }
}
diff --git a/core/src/test/java/org/elasticsearch/mget/SimpleMgetIT.java b/core/src/test/java/org/elasticsearch/mget/SimpleMgetIT.java
index b661e78532..21be585862 100644
--- a/core/src/test/java/org/elasticsearch/mget/SimpleMgetIT.java
+++ b/core/src/test/java/org/elasticsearch/mget/SimpleMgetIT.java
@@ -59,7 +59,7 @@ public class SimpleMgetIT extends ESIntegTestCase {
assertThat(mgetResponse.getResponses()[1].getIndex(), is("nonExistingIndex"));
assertThat(mgetResponse.getResponses()[1].isFailed(), is(true));
assertThat(mgetResponse.getResponses()[1].getFailure().getMessage(), is("no such index"));
- assertThat(((ElasticsearchException)mgetResponse.getResponses()[1].getFailure().getFailure()).getIndex(), is("nonExistingIndex"));
+ assertThat(((ElasticsearchException) mgetResponse.getResponses()[1].getFailure().getFailure()).getIndex().getName(), is("nonExistingIndex"));
mgetResponse = client().prepareMultiGet()
@@ -69,7 +69,7 @@ public class SimpleMgetIT extends ESIntegTestCase {
assertThat(mgetResponse.getResponses()[0].getIndex(), is("nonExistingIndex"));
assertThat(mgetResponse.getResponses()[0].isFailed(), is(true));
assertThat(mgetResponse.getResponses()[0].getFailure().getMessage(), is("no such index"));
- assertThat(((ElasticsearchException)mgetResponse.getResponses()[0].getFailure().getFailure()).getIndex(), is("nonExistingIndex"));
+ assertThat(((ElasticsearchException) mgetResponse.getResponses()[0].getFailure().getFailure()).getIndex().getName(), is("nonExistingIndex"));
}
diff --git a/core/src/test/java/org/elasticsearch/monitor/jvm/JvmGcMonitorServiceSettingsTests.java b/core/src/test/java/org/elasticsearch/monitor/jvm/JvmGcMonitorServiceSettingsTests.java
new file mode 100644
index 0000000000..29f497458c
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/monitor/jvm/JvmGcMonitorServiceSettingsTests.java
@@ -0,0 +1,113 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.monitor.jvm;
+
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.test.ESTestCase;
+import org.elasticsearch.threadpool.ThreadPool;
+
+import java.util.AbstractMap;
+import java.util.HashSet;
+import java.util.Set;
+import java.util.concurrent.ScheduledFuture;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.function.BiFunction;
+import java.util.function.Consumer;
+
+import static org.hamcrest.CoreMatchers.allOf;
+import static org.hamcrest.CoreMatchers.containsString;
+import static org.hamcrest.CoreMatchers.instanceOf;
+
+public class JvmGcMonitorServiceSettingsTests extends ESTestCase {
+
+ public void testEmptySettingsAreOkay() throws InterruptedException {
+ AtomicBoolean scheduled = new AtomicBoolean();
+ execute(Settings.EMPTY, (command, interval) -> { scheduled.set(true); return null; }, () -> assertTrue(scheduled.get()));
+ }
+
+ public void testDisabledSetting() throws InterruptedException {
+ Settings settings = Settings.builder().put("monitor.jvm.gc.enabled", "false").build();
+ AtomicBoolean scheduled = new AtomicBoolean();
+ execute(settings, (command, interval) -> { scheduled.set(true); return null; }, () -> assertFalse(scheduled.get()));
+ }
+
+ public void testNegativeSetting() throws InterruptedException {
+ String collector = randomAsciiOfLength(5);
+ Settings settings = Settings.builder().put("monitor.jvm.gc.collector." + collector + ".warn", "-" + randomTimeValue()).build();
+ execute(settings, (command, interval) -> null, t -> {
+ assertThat(t, instanceOf(IllegalArgumentException.class));
+ assertThat(t.getMessage(), allOf(containsString("invalid gc_threshold"), containsString("for [monitor.jvm.gc.collector." + collector + ".")));
+ }, true, null);
+ }
+
+ public void testMissingSetting() throws InterruptedException {
+ String collector = randomAsciiOfLength(5);
+ Set<AbstractMap.SimpleEntry<String, String>> entries = new HashSet<>();
+ entries.add(new AbstractMap.SimpleEntry<>("monitor.jvm.gc.collector." + collector + ".warn", randomTimeValue()));
+ entries.add(new AbstractMap.SimpleEntry<>("monitor.jvm.gc.collector." + collector + ".info", randomTimeValue()));
+ entries.add(new AbstractMap.SimpleEntry<>("monitor.jvm.gc.collector." + collector + ".debug", randomTimeValue()));
+ Settings.Builder builder = Settings.builder();
+
+ // drop a random setting or two
+ for (@SuppressWarnings("unchecked") AbstractMap.SimpleEntry<String, String> entry : randomSubsetOf(randomIntBetween(1, 2), entries.toArray(new AbstractMap.SimpleEntry[0]))) {
+ builder.put(entry.getKey(), entry.getValue());
+ }
+
+ // we should get an exception that a setting is missing
+ execute(builder.build(), (command, interval) -> null, t -> {
+ assertThat(t, instanceOf(IllegalArgumentException.class));
+ assertThat(t.getMessage(), containsString("missing gc_threshold for [monitor.jvm.gc.collector." + collector + "."));
+ }, true, null);
+ }
+
+ private static void execute(Settings settings, BiFunction<Runnable, TimeValue, ScheduledFuture<?>> scheduler, Runnable asserts) throws InterruptedException {
+ execute(settings, scheduler, null, false, asserts);
+ }
+
+ private static void execute(Settings settings, BiFunction<Runnable, TimeValue, ScheduledFuture<?>> scheduler, Consumer<Throwable> consumer, boolean constructionShouldFail, Runnable asserts) throws InterruptedException {
+ assert constructionShouldFail == (consumer != null);
+ assert constructionShouldFail == (asserts == null);
+ ThreadPool threadPool = null;
+ try {
+ threadPool = new ThreadPool(JvmGcMonitorServiceSettingsTests.class.getCanonicalName()) {
+ @Override
+ public ScheduledFuture<?> scheduleWithFixedDelay(Runnable command, TimeValue interval) {
+ return scheduler.apply(command, interval);
+ }
+ };
+ try {
+ JvmGcMonitorService service = new JvmGcMonitorService(settings, threadPool);
+ if (constructionShouldFail) {
+ fail("construction of jvm gc service should have failed");
+ }
+ service.doStart();
+ asserts.run();
+ service.doStop();
+ } catch (Throwable t) {
+ consumer.accept(t);
+ }
+ } finally {
+ ThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS);
+ }
+ }
+
+}
diff --git a/core/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java b/core/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java
index 4f4319e212..2edaad5c4b 100644
--- a/core/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java
+++ b/core/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java
@@ -29,7 +29,6 @@ import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.is;
-import static org.hamcrest.Matchers.lessThan;
import static org.hamcrest.Matchers.lessThanOrEqualTo;
public class OsProbeTests extends ESTestCase {
@@ -56,22 +55,29 @@ public class OsProbeTests extends ESTestCase {
}
if (Constants.WINDOWS) {
// load average is unavailable on Windows
- if (loadAverage != null) {
- assertThat(loadAverage[0], equalTo((double) -1));
- assertThat(loadAverage[1], equalTo((double) -1));
- assertThat(loadAverage[2], equalTo((double) -1));
- }
+ assertNull(loadAverage);
} else if (Constants.LINUX) {
// we should be able to get the load average
assertNotNull(loadAverage);
assertThat(loadAverage[0], greaterThanOrEqualTo((double) 0));
assertThat(loadAverage[1], greaterThanOrEqualTo((double) 0));
assertThat(loadAverage[2], greaterThanOrEqualTo((double) 0));
- } else {
+ } else if (Constants.FREE_BSD) {
+ // five- and fifteen-minute load averages not available if linprocfs is not mounted at /compat/linux/proc
+ assertNotNull(loadAverage);
+ assertThat(loadAverage[0], greaterThanOrEqualTo((double) 0));
+ assertThat(loadAverage[1], anyOf(equalTo((double) -1), greaterThanOrEqualTo((double) 0)));
+ assertThat(loadAverage[2], anyOf(equalTo((double) -1), greaterThanOrEqualTo((double) 0)));
+ } else if (Constants.MAC_OS_X) {
// one minute load average is available, but 10-minute and 15-minute load averages are not
- // load average can be negative if not available or not computed yet, otherwise it should be >= 0
+ assertNotNull(loadAverage);
+ assertThat(loadAverage[0], greaterThanOrEqualTo((double) 0));
+ assertThat(loadAverage[1], equalTo((double) -1));
+ assertThat(loadAverage[2], equalTo((double) -1));
+ } else {
+ // unknown system, but the best case is that we have the one-minute load average
if (loadAverage != null) {
- assertThat(loadAverage[0], anyOf(lessThan((double) 0), greaterThanOrEqualTo((double) 0)));
+ assertThat(loadAverage[0], anyOf(equalTo((double) -1), greaterThanOrEqualTo((double) 0)));
assertThat(loadAverage[1], equalTo((double) -1));
assertThat(loadAverage[2], equalTo((double) -1));
}
diff --git a/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java b/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java
index 4dda068ddd..204dcefea7 100644
--- a/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java
+++ b/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java
@@ -48,7 +48,7 @@ public class InternalSettingsPreparerTests extends ESTestCase {
@Before
public void createBaseEnvSettings() {
baseEnvSettings = settingsBuilder()
- .put("path.home", createTempDir())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
.build();
}
@@ -60,24 +60,24 @@ public class InternalSettingsPreparerTests extends ESTestCase {
public void testEmptySettings() {
Settings settings = InternalSettingsPreparer.prepareSettings(Settings.EMPTY);
assertNotNull(settings.get("name")); // a name was set
- assertNotNull(settings.get(ClusterName.SETTING)); // a cluster name was set
+ assertNotNull(settings.get(ClusterName.CLUSTER_NAME_SETTING.getKey())); // a cluster name was set
int size = settings.names().size();
Environment env = InternalSettingsPreparer.prepareEnvironment(baseEnvSettings, null);
settings = env.settings();
assertNotNull(settings.get("name")); // a name was set
- assertNotNull(settings.get(ClusterName.SETTING)); // a cluster name was set
+ assertNotNull(settings.get(ClusterName.CLUSTER_NAME_SETTING.getKey())); // a cluster name was set
assertEquals(settings.toString(), size + 1 /* path.home is in the base settings */, settings.names().size());
- String home = baseEnvSettings.get("path.home");
+ String home = Environment.PATH_HOME_SETTING.get(baseEnvSettings);
String configDir = env.configFile().toString();
assertTrue(configDir, configDir.startsWith(home));
}
public void testClusterNameDefault() {
Settings settings = InternalSettingsPreparer.prepareSettings(Settings.EMPTY);
- assertEquals(ClusterName.DEFAULT.value(), settings.get(ClusterName.SETTING));
+ assertEquals(ClusterName.DEFAULT.value(), settings.get(ClusterName.CLUSTER_NAME_SETTING.getKey()));
settings = InternalSettingsPreparer.prepareEnvironment(baseEnvSettings, null).settings();
- assertEquals(ClusterName.DEFAULT.value(), settings.get(ClusterName.SETTING));
+ assertEquals(ClusterName.DEFAULT.value(), settings.get(ClusterName.CLUSTER_NAME_SETTING.getKey()));
}
public void testReplacePromptPlaceholders() {
diff --git a/core/src/test/java/org/elasticsearch/nodesinfo/SimpleNodesInfoIT.java b/core/src/test/java/org/elasticsearch/nodesinfo/SimpleNodesInfoIT.java
index 93ba861dca..b643ba0d0a 100644
--- a/core/src/test/java/org/elasticsearch/nodesinfo/SimpleNodesInfoIT.java
+++ b/core/src/test/java/org/elasticsearch/nodesinfo/SimpleNodesInfoIT.java
@@ -88,8 +88,8 @@ public class SimpleNodesInfoIT extends ESIntegTestCase {
public void testAllocatedProcessors() throws Exception {
List<String> nodesIds = internalCluster().
startNodesAsync(
- Settings.builder().put(EsExecutors.PROCESSORS, 3).build(),
- Settings.builder().put(EsExecutors.PROCESSORS, 6).build()
+ Settings.builder().put(EsExecutors.PROCESSORS_SETTING.getKey(), 3).build(),
+ Settings.builder().put(EsExecutors.PROCESSORS_SETTING.getKey(), 6).build()
).get();
final String node_1 = nodesIds.get(0);
diff --git a/core/src/test/java/org/elasticsearch/options/detailederrors/DetailedErrorsDisabledIT.java b/core/src/test/java/org/elasticsearch/options/detailederrors/DetailedErrorsDisabledIT.java
index a0751dffac..d1f25a8fb4 100644
--- a/core/src/test/java/org/elasticsearch/options/detailederrors/DetailedErrorsDisabledIT.java
+++ b/core/src/test/java/org/elasticsearch/options/detailederrors/DetailedErrorsDisabledIT.java
@@ -20,10 +20,10 @@
package org.elasticsearch.options.detailederrors;
import org.apache.http.impl.client.HttpClients;
+import org.elasticsearch.common.network.NetworkModule;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.http.HttpServerTransport;
-import org.elasticsearch.http.netty.NettyHttpServerTransport;
-import org.elasticsearch.node.Node;
+import org.elasticsearch.http.HttpTransportSettings;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
import org.elasticsearch.test.ESIntegTestCase.Scope;
@@ -43,8 +43,8 @@ public class DetailedErrorsDisabledIT extends ESIntegTestCase {
protected Settings nodeSettings(int nodeOrdinal) {
return Settings.settingsBuilder()
.put(super.nodeSettings(nodeOrdinal))
- .put(Node.HTTP_ENABLED, true)
- .put(NettyHttpServerTransport.SETTING_HTTP_DETAILED_ERRORS_ENABLED, false)
+ .put(NetworkModule.HTTP_ENABLED.getKey(), true)
+ .put(HttpTransportSettings.SETTING_HTTP_DETAILED_ERRORS_ENABLED.getKey(), false)
.build();
}
diff --git a/core/src/test/java/org/elasticsearch/options/detailederrors/DetailedErrorsEnabledIT.java b/core/src/test/java/org/elasticsearch/options/detailederrors/DetailedErrorsEnabledIT.java
index 935b4e21ad..4333d81b2e 100644
--- a/core/src/test/java/org/elasticsearch/options/detailederrors/DetailedErrorsEnabledIT.java
+++ b/core/src/test/java/org/elasticsearch/options/detailederrors/DetailedErrorsEnabledIT.java
@@ -20,6 +20,7 @@
package org.elasticsearch.options.detailederrors;
import org.apache.http.impl.client.HttpClients;
+import org.elasticsearch.common.network.NetworkModule;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.http.HttpServerTransport;
import org.elasticsearch.node.Node;
@@ -42,7 +43,7 @@ public class DetailedErrorsEnabledIT extends ESIntegTestCase {
protected Settings nodeSettings(int nodeOrdinal) {
return Settings.settingsBuilder()
.put(super.nodeSettings(nodeOrdinal))
- .put(Node.HTTP_ENABLED, true)
+ .put(NetworkModule.HTTP_ENABLED.getKey(), true)
.build();
}
diff --git a/core/src/test/java/org/elasticsearch/percolator/ConcurrentPercolatorIT.java b/core/src/test/java/org/elasticsearch/percolator/ConcurrentPercolatorIT.java
index 178f070927..5c04666375 100644
--- a/core/src/test/java/org/elasticsearch/percolator/ConcurrentPercolatorIT.java
+++ b/core/src/test/java/org/elasticsearch/percolator/ConcurrentPercolatorIT.java
@@ -210,7 +210,7 @@ public class ConcurrentPercolatorIT extends ESIntegTestCase {
throw new IllegalStateException("Illegal x=" + x);
}
assertThat(response.getId(), equalTo(id));
- assertThat(response.getVersion(), equalTo(1l));
+ assertThat(response.getVersion(), equalTo(1L));
}
} catch (Throwable t) {
exceptionsHolder.add(t);
diff --git a/core/src/test/java/org/elasticsearch/percolator/MultiPercolatorIT.java b/core/src/test/java/org/elasticsearch/percolator/MultiPercolatorIT.java
index abd158788f..31115a2565 100644
--- a/core/src/test/java/org/elasticsearch/percolator/MultiPercolatorIT.java
+++ b/core/src/test/java/org/elasticsearch/percolator/MultiPercolatorIT.java
@@ -96,7 +96,7 @@ public class MultiPercolatorIT extends ESIntegTestCase {
.execute().actionGet();
MultiPercolateResponse.Item item = response.getItems()[0];
- assertMatchCount(item.getResponse(), 2l);
+ assertMatchCount(item.getResponse(), 2L);
assertThat(item.getResponse().getMatches(), arrayWithSize(2));
assertThat(item.getErrorMessage(), nullValue());
assertThat(convertFromTextArray(item.getResponse().getMatches(), "test"), arrayContainingInAnyOrder("1", "4"));
@@ -104,18 +104,18 @@ public class MultiPercolatorIT extends ESIntegTestCase {
item = response.getItems()[1];
assertThat(item.getErrorMessage(), nullValue());
- assertMatchCount(item.getResponse(), 2l);
+ assertMatchCount(item.getResponse(), 2L);
assertThat(item.getResponse().getMatches(), arrayWithSize(2));
assertThat(convertFromTextArray(item.getResponse().getMatches(), "test"), arrayContainingInAnyOrder("2", "4"));
item = response.getItems()[2];
assertThat(item.getErrorMessage(), nullValue());
- assertMatchCount(item.getResponse(), 4l);
+ assertMatchCount(item.getResponse(), 4L);
assertThat(convertFromTextArray(item.getResponse().getMatches(), "test"), arrayContainingInAnyOrder("1", "2", "3", "4"));
item = response.getItems()[3];
assertThat(item.getErrorMessage(), nullValue());
- assertMatchCount(item.getResponse(), 1l);
+ assertMatchCount(item.getResponse(), 1L);
assertThat(item.getResponse().getMatches(), arrayWithSize(1));
assertThat(convertFromTextArray(item.getResponse().getMatches(), "test"), arrayContaining("4"));
@@ -175,7 +175,7 @@ public class MultiPercolatorIT extends ESIntegTestCase {
.execute().actionGet();
MultiPercolateResponse.Item item = response.getItems()[0];
- assertMatchCount(item.getResponse(), 2l);
+ assertMatchCount(item.getResponse(), 2L);
assertThat(item.getResponse().getMatches(), arrayWithSize(2));
assertThat(item.getErrorMessage(), nullValue());
assertThat(convertFromTextArray(item.getResponse().getMatches(), "test"), arrayContainingInAnyOrder("1", "4"));
@@ -183,18 +183,18 @@ public class MultiPercolatorIT extends ESIntegTestCase {
item = response.getItems()[1];
assertThat(item.getErrorMessage(), nullValue());
- assertMatchCount(item.getResponse(), 2l);
+ assertMatchCount(item.getResponse(), 2L);
assertThat(item.getResponse().getMatches(), arrayWithSize(2));
assertThat(convertFromTextArray(item.getResponse().getMatches(), "test"), arrayContainingInAnyOrder("2", "4"));
item = response.getItems()[2];
assertThat(item.getErrorMessage(), nullValue());
- assertMatchCount(item.getResponse(), 4l);
+ assertMatchCount(item.getResponse(), 4L);
assertThat(convertFromTextArray(item.getResponse().getMatches(), "test"), arrayContainingInAnyOrder("1", "2", "3", "4"));
item = response.getItems()[3];
assertThat(item.getErrorMessage(), nullValue());
- assertMatchCount(item.getResponse(), 1l);
+ assertMatchCount(item.getResponse(), 1L);
assertThat(item.getResponse().getMatches(), arrayWithSize(1));
assertThat(convertFromTextArray(item.getResponse().getMatches(), "test"), arrayContaining("4"));
diff --git a/core/src/test/java/org/elasticsearch/percolator/PercolateDocumentParserTests.java b/core/src/test/java/org/elasticsearch/percolator/PercolateDocumentParserTests.java
index c50c191f8f..ab0e86fd2a 100644
--- a/core/src/test/java/org/elasticsearch/percolator/PercolateDocumentParserTests.java
+++ b/core/src/test/java/org/elasticsearch/percolator/PercolateDocumentParserTests.java
@@ -23,7 +23,6 @@ import org.apache.lucene.index.Term;
import org.apache.lucene.search.TermQuery;
import org.elasticsearch.Version;
import org.elasticsearch.action.percolate.PercolateShardRequest;
-import org.elasticsearch.cluster.action.index.MappingUpdatedAction;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
@@ -90,13 +89,10 @@ public class PercolateDocumentParserTests extends ESTestCase {
HighlightPhase highlightPhase = new HighlightPhase(Settings.EMPTY, new Highlighters());
AggregatorParsers aggregatorParsers = new AggregatorParsers(Collections.emptySet(), Collections.emptySet());
AggregationPhase aggregationPhase = new AggregationPhase(new AggregationParseElement(aggregatorParsers), new AggregationBinaryParseElement(aggregatorParsers));
- MappingUpdatedAction mappingUpdatedAction = Mockito.mock(MappingUpdatedAction.class);
- parser = new PercolateDocumentParser(
- highlightPhase, new SortParseElement(), aggregationPhase, mappingUpdatedAction
- );
+ parser = new PercolateDocumentParser(highlightPhase, new SortParseElement(), aggregationPhase);
request = Mockito.mock(PercolateShardRequest.class);
- Mockito.when(request.shardId()).thenReturn(new ShardId(new Index("_index"), 0));
+ Mockito.when(request.shardId()).thenReturn(new ShardId("_index", "_na_", 0));
Mockito.when(request.documentType()).thenReturn("type");
}
@@ -108,7 +104,7 @@ public class PercolateDocumentParserTests extends ESTestCase {
.endObject();
Mockito.when(request.source()).thenReturn(source.bytes());
- PercolateContext context = new PercolateContext(request, new SearchShardTarget("_node", "_index", 0), mapperService);
+ PercolateContext context = new PercolateContext(request, new SearchShardTarget("_node", new Index("_index", "_na_"), 0), mapperService);
ParsedDocument parsedDocument = parser.parse(request, context, mapperService, queryShardContext);
assertThat(parsedDocument.rootDoc().get("field1"), equalTo("value1"));
}
@@ -127,7 +123,7 @@ public class PercolateDocumentParserTests extends ESTestCase {
.endObject();
Mockito.when(request.source()).thenReturn(source.bytes());
- PercolateContext context = new PercolateContext(request, new SearchShardTarget("_node", "_index", 0), mapperService);
+ PercolateContext context = new PercolateContext(request, new SearchShardTarget("_node", new Index("_index", "_na_"), 0), mapperService);
ParsedDocument parsedDocument = parser.parse(request, context, mapperService, queryShardContext);
assertThat(parsedDocument.rootDoc().get("field1"), equalTo("value1"));
assertThat(context.percolateQuery(), equalTo(new TermQuery(new Term("field1", "value1"))));
@@ -151,7 +147,7 @@ public class PercolateDocumentParserTests extends ESTestCase {
Mockito.when(request.source()).thenReturn(source.bytes());
Mockito.when(request.docSource()).thenReturn(docSource.bytes());
- PercolateContext context = new PercolateContext(request, new SearchShardTarget("_node", "_index", 0), mapperService);
+ PercolateContext context = new PercolateContext(request, new SearchShardTarget("_node", new Index("_index", "_na_"), 0), mapperService);
ParsedDocument parsedDocument = parser.parse(request, context, mapperService, queryShardContext);
assertThat(parsedDocument.rootDoc().get("field1"), equalTo("value1"));
assertThat(context.percolateQuery(), equalTo(new TermQuery(new Term("field1", "value1"))));
@@ -178,7 +174,7 @@ public class PercolateDocumentParserTests extends ESTestCase {
Mockito.when(request.source()).thenReturn(source.bytes());
Mockito.when(request.docSource()).thenReturn(docSource.bytes());
- PercolateContext context = new PercolateContext(request, new SearchShardTarget("_node", "_index", 0), mapperService);
+ PercolateContext context = new PercolateContext(request, new SearchShardTarget("_node", new Index("_index", "_na_"), 0), mapperService);
try {
parser.parse(request, context, mapperService, queryShardContext);
} catch (IllegalArgumentException e) {
diff --git a/core/src/test/java/org/elasticsearch/percolator/PercolatorBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/percolator/PercolatorBackwardsCompatibilityIT.java
index cb8ffb8e91..9378eaa754 100644
--- a/core/src/test/java/org/elasticsearch/percolator/PercolatorBackwardsCompatibilityIT.java
+++ b/core/src/test/java/org/elasticsearch/percolator/PercolatorBackwardsCompatibilityIT.java
@@ -28,6 +28,7 @@ import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.MappingMetaData;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
+import org.elasticsearch.env.Environment;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.ESIntegTestCase;
@@ -113,7 +114,7 @@ public class PercolatorBackwardsCompatibilityIT extends ESIntegTestCase {
}
Settings.Builder nodeSettings = Settings.builder()
- .put("path.data", dataDir);
+ .put(Environment.PATH_DATA_SETTING.getKey(), dataDir);
internalCluster().startNode(nodeSettings.build());
ensureGreen(INDEX_NAME);
}
diff --git a/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java b/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java
index 0c16c98184..9bd5a74312 100644
--- a/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java
+++ b/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java
@@ -18,6 +18,7 @@
*/
package org.elasticsearch.percolator;
+import com.vividsolutions.jts.geom.Coordinate;
import org.apache.lucene.search.join.ScoreMode;
import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse;
@@ -30,6 +31,7 @@ import org.elasticsearch.action.percolate.PercolateSourceBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.client.Requests;
+import org.elasticsearch.common.geo.builders.ShapeBuilders;
import org.elasticsearch.common.lucene.search.function.CombineFunction;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.Settings.Builder;
@@ -71,6 +73,7 @@ import static org.elasticsearch.common.xcontent.XContentFactory.yamlBuilder;
import static org.elasticsearch.index.query.QueryBuilders.boolQuery;
import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery;
import static org.elasticsearch.index.query.QueryBuilders.functionScoreQuery;
+import static org.elasticsearch.index.query.QueryBuilders.geoShapeQuery;
import static org.elasticsearch.index.query.QueryBuilders.hasChildQuery;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.index.query.QueryBuilders.matchQuery;
@@ -132,7 +135,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setIndices("test").setDocumentType("type")
.setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "b").endObject()))
.execute().actionGet();
- assertMatchCount(response, 2l);
+ assertMatchCount(response, 2L);
assertThat(response.getMatches(), arrayWithSize(2));
assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContainingInAnyOrder("1", "4"));
@@ -141,7 +144,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setIndices("test").setDocumentType("type")
.setPercolateDoc(docBuilder().setDoc(yamlBuilder().startObject().field("field1", "c").endObject()))
.execute().actionGet();
- assertMatchCount(response, 2l);
+ assertMatchCount(response, 2L);
assertThat(response.getMatches(), arrayWithSize(2));
assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContainingInAnyOrder("2", "4"));
@@ -150,7 +153,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setIndices("test").setDocumentType("type")
.setPercolateDoc(docBuilder().setDoc(smileBuilder().startObject().field("field1", "b c").endObject()))
.execute().actionGet();
- assertMatchCount(response, 4l);
+ assertMatchCount(response, 4L);
assertThat(response.getMatches(), arrayWithSize(4));
assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContainingInAnyOrder("1", "2", "3", "4"));
@@ -159,16 +162,10 @@ public class PercolatorIT extends ESIntegTestCase {
.setIndices("test").setDocumentType("type")
.setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "d").endObject()))
.execute().actionGet();
- assertMatchCount(response, 1l);
+ assertMatchCount(response, 1L);
assertThat(response.getMatches(), arrayWithSize(1));
assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContaining("4"));
- logger.info("--> Search dummy doc, percolate queries must not be included");
- SearchResponse searchResponse = client().prepareSearch("test", "test").execute().actionGet();
- assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
- assertThat(searchResponse.getHits().getAt(0).type(), equalTo("type"));
- assertThat(searchResponse.getHits().getAt(0).id(), equalTo("1"));
-
logger.info("--> Percolate non existing doc");
try {
client().preparePercolate()
@@ -181,7 +178,7 @@ public class PercolatorIT extends ESIntegTestCase {
}
public void testSimple2() throws Exception {
- assertAcked(prepareCreate("test").addMapping("type1", "field1", "type=long,doc_values=true"));
+ assertAcked(prepareCreate("test").addMapping("type1", "field1", "type=long,doc_values=true", "field2", "type=string"));
ensureGreen();
// introduce the doc
@@ -193,7 +190,7 @@ public class PercolatorIT extends ESIntegTestCase {
PercolateResponse response = client().preparePercolate().setSource(doc)
.setIndices("test").setDocumentType("type1")
.execute().actionGet();
- assertMatchCount(response, 0l);
+ assertMatchCount(response, 0L);
assertThat(response.getMatches(), emptyArray());
// add first query...
@@ -205,7 +202,7 @@ public class PercolatorIT extends ESIntegTestCase {
response = client().preparePercolate()
.setIndices("test").setDocumentType("type1")
.setSource(doc).execute().actionGet();
- assertMatchCount(response, 1l);
+ assertMatchCount(response, 1L);
assertThat(response.getMatches(), arrayWithSize(1));
assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContaining("test1"));
@@ -219,7 +216,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setIndices("test").setDocumentType("type1")
.setSource(doc)
.execute().actionGet();
- assertMatchCount(response, 2l);
+ assertMatchCount(response, 2L);
assertThat(response.getMatches(), arrayWithSize(2));
assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContainingInAnyOrder("test1", "test2"));
@@ -229,7 +226,7 @@ public class PercolatorIT extends ESIntegTestCase {
response = client().preparePercolate()
.setIndices("test").setDocumentType("type1")
.setSource(doc).execute().actionGet();
- assertMatchCount(response, 1l);
+ assertMatchCount(response, 1L);
assertThat(response.getMatches(), arrayWithSize(1));
assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContaining("test1"));
}
@@ -255,7 +252,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().startObject("doc").field("field1", "value").endObject().endObject()))
.setSize(100)
.execute().actionGet();
- assertMatchCount(response, 100l);
+ assertMatchCount(response, 100L);
assertThat(response.getMatches(), arrayWithSize(100));
logger.info("--> Percolate doc with routing=0");
@@ -265,7 +262,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setSize(100)
.setRouting("0")
.execute().actionGet();
- assertMatchCount(response, 50l);
+ assertMatchCount(response, 50L);
assertThat(response.getMatches(), arrayWithSize(50));
logger.info("--> Percolate doc with routing=1");
@@ -275,7 +272,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setSize(100)
.setRouting("1")
.execute().actionGet();
- assertMatchCount(response, 50l);
+ assertMatchCount(response, 50L);
assertThat(response.getMatches(), arrayWithSize(50));
}
@@ -342,7 +339,7 @@ public class PercolatorIT extends ESIntegTestCase {
.field("query", termQuery("source", "productizer"))
.endObject())
.execute().actionGet();
- assertMatchCount(percolate, 1l);
+ assertMatchCount(percolate, 1L);
assertThat(percolate.getMatches(), arrayWithSize(1));
}
@@ -364,7 +361,7 @@ public class PercolatorIT extends ESIntegTestCase {
SearchResponse countResponse = client().prepareSearch().setSize(0)
.setQuery(matchAllQuery()).setTypes(PercolatorService.TYPE_NAME)
.execute().actionGet();
- assertThat(countResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(countResponse.getHits().totalHits(), equalTo(1L));
for (int i = 0; i < 10; i++) {
@@ -372,7 +369,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setIndices("test").setDocumentType("type1")
.setSource(jsonBuilder().startObject().startObject("doc").field("field1", "value1").endObject().endObject())
.execute().actionGet();
- assertMatchCount(percolate, 1l);
+ assertMatchCount(percolate, 1L);
assertThat(percolate.getMatches(), arrayWithSize(1));
}
@@ -382,7 +379,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setPreference("_local")
.setSource(jsonBuilder().startObject().startObject("doc").field("field1", "value1").endObject().endObject())
.execute().actionGet();
- assertMatchCount(percolate, 1l);
+ assertMatchCount(percolate, 1L);
assertThat(percolate.getMatches(), arrayWithSize(1));
}
@@ -393,7 +390,7 @@ public class PercolatorIT extends ESIntegTestCase {
countResponse = client().prepareSearch().setSize(0)
.setQuery(matchAllQuery()).setTypes(PercolatorService.TYPE_NAME)
.execute().actionGet();
- assertHitCount(countResponse, 0l);
+ assertHitCount(countResponse, 0L);
}
public void testMultiplePercolators() throws Exception {
@@ -422,7 +419,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setIndices("test").setDocumentType("type1")
.setSource(jsonBuilder().startObject().startObject("doc").field("field1", "value1").endObject().endObject())
.execute().actionGet();
- assertMatchCount(percolate, 1l);
+ assertMatchCount(percolate, 1L);
assertThat(percolate.getMatches(), arrayWithSize(1));
assertThat(convertFromTextArray(percolate.getMatches(), "test"), arrayContaining("kuku"));
@@ -430,7 +427,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setIndices("test").setDocumentType("type1")
.setSource(jsonBuilder().startObject().startObject("doc").field("field1", "value2").endObject().endObject())
.execute().actionGet();
- assertMatchCount(percolate, 1l);
+ assertMatchCount(percolate, 1L);
assertThat(percolate.getMatches(), arrayWithSize(1));
assertThat(convertFromTextArray(percolate.getMatches(), "test"), arrayContaining("bubu"));
@@ -456,7 +453,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setIndices("test").setDocumentType("type1")
.setSource(jsonBuilder().startObject().startObject("doc").field("field1", "value1").endObject().endObject())
.execute().actionGet();
- assertMatchCount(percolate, 1l);
+ assertMatchCount(percolate, 1L);
assertThat(percolate.getMatches(), arrayWithSize(1));
assertThat(convertFromTextArray(percolate.getMatches(), "test"), arrayContaining("kuku"));
@@ -473,7 +470,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setIndices("test").setDocumentType("type1")
.setSource(jsonBuilder().startObject().startObject("doc").field("field1", "value2").endObject().endObject())
.execute().actionGet();
- assertMatchCount(percolate, 1l);
+ assertMatchCount(percolate, 1L);
assertThat(percolate.getMatches(), arrayWithSize(1));
assertThat(convertFromTextArray(percolate.getMatches(), "test"), arrayContaining("bubu"));
@@ -493,7 +490,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setIndices("test").setDocumentType("type1")
.setSource(sourceBuilder)
.execute().actionGet();
- assertMatchCount(percolate, 1l);
+ assertMatchCount(percolate, 1L);
assertThat(percolate.getMatches(), arrayWithSize(1));
assertThat(convertFromTextArray(percolate.getMatches(), "test"), arrayContaining("susu"));
@@ -506,7 +503,7 @@ public class PercolatorIT extends ESIntegTestCase {
.field("field1", "value1")
.endObject().endObject().endObject())
.execute().actionGet();
- assertMatchCount(percolate, 0l);
+ assertMatchCount(percolate, 0L);
assertThat(percolate.getMatches(), emptyArray());
}
@@ -525,16 +522,16 @@ public class PercolatorIT extends ESIntegTestCase {
.setIndices("test").setDocumentType("type")
.setSource(jsonBuilder().startObject().startObject("doc").field("field", "val").endObject().endObject())
.execute().actionGet();
- assertMatchCount(response, 1l);
+ assertMatchCount(response, 1L);
assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContaining("1"));
NumShards numShards = getNumShards("test");
IndicesStatsResponse indicesResponse = client().admin().indices().prepareStats("test").execute().actionGet();
assertThat(indicesResponse.getTotal().getPercolate().getCount(), equalTo((long) numShards.numPrimaries));
- assertThat(indicesResponse.getTotal().getPercolate().getCurrent(), equalTo(0l));
+ assertThat(indicesResponse.getTotal().getPercolate().getCurrent(), equalTo(0L));
assertThat(indicesResponse.getTotal().getPercolate().getNumQueries(), equalTo((long)numShards.dataCopies)); //number of copies
- assertThat(indicesResponse.getTotal().getPercolate().getMemorySizeInBytes(), equalTo(-1l));
+ assertThat(indicesResponse.getTotal().getPercolate().getMemorySizeInBytes(), equalTo(-1L));
NodesStatsResponse nodesResponse = client().admin().cluster().prepareNodesStats().execute().actionGet();
long percolateCount = 0;
@@ -548,15 +545,15 @@ public class PercolatorIT extends ESIntegTestCase {
.setIndices("test").setDocumentType("type")
.setSource(jsonBuilder().startObject().startObject("doc").field("field", "val").endObject().endObject())
.execute().actionGet();
- assertMatchCount(response, 1l);
+ assertMatchCount(response, 1L);
assertThat(response.getMatches(), arrayWithSize(1));
assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContaining("1"));
indicesResponse = client().admin().indices().prepareStats().setPercolate(true).execute().actionGet();
assertThat(indicesResponse.getTotal().getPercolate().getCount(), equalTo((long) numShards.numPrimaries * 2));
- assertThat(indicesResponse.getTotal().getPercolate().getCurrent(), equalTo(0l));
+ assertThat(indicesResponse.getTotal().getPercolate().getCurrent(), equalTo(0L));
assertThat(indicesResponse.getTotal().getPercolate().getNumQueries(), equalTo((long)numShards.dataCopies)); //number of copies
- assertThat(indicesResponse.getTotal().getPercolate().getMemorySizeInBytes(), equalTo(-1l));
+ assertThat(indicesResponse.getTotal().getPercolate().getMemorySizeInBytes(), equalTo(-1L));
percolateCount = 0;
nodesResponse = client().admin().cluster().prepareNodesStats().execute().actionGet();
@@ -591,7 +588,7 @@ public class PercolatorIT extends ESIntegTestCase {
percolateCount += nodeStats.getIndices().getPercolate().getCount();
percolateSumTime += nodeStats.getIndices().getPercolate().getTimeInMillis();
}
- assertThat(percolateSumTime, greaterThan(0l));
+ assertThat(percolateSumTime, greaterThan(0L));
}
public void testPercolatingExistingDocs() throws Exception {
@@ -627,7 +624,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setIndices("test").setDocumentType("type")
.setGetRequest(Requests.getRequest("test").type("type").id("1"))
.execute().actionGet();
- assertMatchCount(response, 2l);
+ assertMatchCount(response, 2L);
assertThat(response.getMatches(), arrayWithSize(2));
assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContainingInAnyOrder("1", "4"));
@@ -636,7 +633,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setIndices("test").setDocumentType("type")
.setGetRequest(Requests.getRequest("test").type("type").id("2"))
.execute().actionGet();
- assertMatchCount(response, 2l);
+ assertMatchCount(response, 2L);
assertThat(response.getMatches(), arrayWithSize(2));
assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContainingInAnyOrder("2", "4"));
@@ -645,7 +642,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setIndices("test").setDocumentType("type")
.setGetRequest(Requests.getRequest("test").type("type").id("3"))
.execute().actionGet();
- assertMatchCount(response, 4l);
+ assertMatchCount(response, 4L);
assertThat(response.getMatches(), arrayWithSize(4));
assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContainingInAnyOrder("1", "2", "3", "4"));
@@ -654,17 +651,9 @@ public class PercolatorIT extends ESIntegTestCase {
.setIndices("test").setDocumentType("type")
.setGetRequest(Requests.getRequest("test").type("type").id("4"))
.execute().actionGet();
- assertMatchCount(response, 1l);
+ assertMatchCount(response, 1L);
assertThat(response.getMatches(), arrayWithSize(1));
assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContaining("4"));
-
- logger.info("--> Search normals docs, percolate queries must not be included");
- SearchResponse searchResponse = client().prepareSearch("test").execute().actionGet();
- assertThat(searchResponse.getHits().totalHits(), equalTo(4L));
- assertThat(searchResponse.getHits().getAt(0).type(), equalTo("type"));
- assertThat(searchResponse.getHits().getAt(1).type(), equalTo("type"));
- assertThat(searchResponse.getHits().getAt(2).type(), equalTo("type"));
- assertThat(searchResponse.getHits().getAt(3).type(), equalTo("type"));
}
public void testPercolatingExistingDocs_routing() throws Exception {
@@ -700,7 +689,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setIndices("test").setDocumentType("type")
.setGetRequest(Requests.getRequest("test").type("type").id("1").routing("4"))
.execute().actionGet();
- assertMatchCount(response, 2l);
+ assertMatchCount(response, 2L);
assertThat(response.getMatches(), arrayWithSize(2));
assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContainingInAnyOrder("1", "4"));
@@ -709,7 +698,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setIndices("test").setDocumentType("type")
.setGetRequest(Requests.getRequest("test").type("type").id("2").routing("3"))
.execute().actionGet();
- assertMatchCount(response, 2l);
+ assertMatchCount(response, 2L);
assertThat(response.getMatches(), arrayWithSize(2));
assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContainingInAnyOrder("2", "4"));
@@ -718,7 +707,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setIndices("test").setDocumentType("type")
.setGetRequest(Requests.getRequest("test").type("type").id("3").routing("2"))
.execute().actionGet();
- assertMatchCount(response, 4l);
+ assertMatchCount(response, 4L);
assertThat(response.getMatches(), arrayWithSize(4));
assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContainingInAnyOrder("1", "2", "3", "4"));
@@ -727,7 +716,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setIndices("test").setDocumentType("type")
.setGetRequest(Requests.getRequest("test").type("type").id("4").routing("1"))
.execute().actionGet();
- assertMatchCount(response, 1l);
+ assertMatchCount(response, 1L);
assertThat(response.getMatches(), arrayWithSize(1));
assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContaining("4"));
}
@@ -763,9 +752,9 @@ public class PercolatorIT extends ESIntegTestCase {
logger.info("--> Percolate existing doc with id 2 and version 1");
PercolateResponse response = client().preparePercolate()
.setIndices("test").setDocumentType("type")
- .setGetRequest(Requests.getRequest("test").type("type").id("2").version(1l))
+ .setGetRequest(Requests.getRequest("test").type("type").id("2").version(1L))
.execute().actionGet();
- assertMatchCount(response, 2l);
+ assertMatchCount(response, 2L);
assertThat(response.getMatches(), arrayWithSize(2));
assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContainingInAnyOrder("2", "4"));
@@ -773,7 +762,7 @@ public class PercolatorIT extends ESIntegTestCase {
try {
client().preparePercolate()
.setIndices("test").setDocumentType("type")
- .setGetRequest(Requests.getRequest("test").type("type").id("2").version(2l))
+ .setGetRequest(Requests.getRequest("test").type("type").id("2").version(2L))
.execute().actionGet();
fail("Error should have been thrown");
} catch (VersionConflictEngineException e) {
@@ -785,9 +774,9 @@ public class PercolatorIT extends ESIntegTestCase {
logger.info("--> Percolate existing doc with id 2 and version 2");
response = client().preparePercolate()
.setIndices("test").setDocumentType("type")
- .setGetRequest(Requests.getRequest("test").type("type").id("2").version(2l))
+ .setGetRequest(Requests.getRequest("test").type("type").id("2").version(2L))
.execute().actionGet();
- assertMatchCount(response, 2l);
+ assertMatchCount(response, 2L);
assertThat(response.getMatches(), arrayWithSize(2));
assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContainingInAnyOrder("2", "4"));
}
@@ -810,7 +799,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setIndices("test1").setDocumentType("type")
.setSource(jsonBuilder().startObject().startObject("doc").field("field1", "value").endObject().endObject())
.execute().actionGet();
- assertMatchCount(response, 5l);
+ assertMatchCount(response, 5L);
assertThat(response.getMatches(), arrayWithSize(5));
logger.info("--> Percolate doc to index test2");
@@ -818,7 +807,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setIndices("test2").setDocumentType("type")
.setSource(jsonBuilder().startObject().startObject("doc").field("field1", "value").endObject().endObject())
.execute().actionGet();
- assertMatchCount(response, 5l);
+ assertMatchCount(response, 5L);
assertThat(response.getMatches(), arrayWithSize(5));
logger.info("--> Percolate doc to index test1 and test2");
@@ -826,7 +815,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setIndices("test1", "test2").setDocumentType("type")
.setSource(jsonBuilder().startObject().startObject("doc").field("field1", "value").endObject().endObject())
.execute().actionGet();
- assertMatchCount(response, 10l);
+ assertMatchCount(response, 10L);
assertThat(response.getMatches(), arrayWithSize(10));
logger.info("--> Percolate doc to index test2 and test3, with ignore missing");
@@ -835,7 +824,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setIndicesOptions(IndicesOptions.lenientExpandOpen())
.setSource(jsonBuilder().startObject().startObject("doc").field("field1", "value").endObject().endObject())
.execute().actionGet();
- assertMatchCount(response, 5l);
+ assertMatchCount(response, 5L);
assertThat(response.getMatches(), arrayWithSize(5));
logger.info("--> Adding aliases");
@@ -852,7 +841,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setIndices("my-alias1").setDocumentType("type")
.setSource(jsonBuilder().startObject().startObject("doc").field("field1", "value").endObject().endObject())
.execute().actionGet();
- assertMatchCount(response, 10l);
+ assertMatchCount(response, 10L);
assertThat(response.getMatches(), arrayWithSize(10));
for (PercolateResponse.Match match : response) {
assertThat(match.getIndex().string(), anyOf(equalTo("test1"), equalTo("test2")));
@@ -863,7 +852,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setIndices("my-alias2").setDocumentType("type")
.setSource(jsonBuilder().startObject().startObject("doc").field("field1", "value").endObject().endObject())
.execute().actionGet();
- assertMatchCount(response, 5l);
+ assertMatchCount(response, 5L);
assertThat(response.getMatches(), arrayWithSize(5));
for (PercolateResponse.Match match : response) {
assertThat(match.getIndex().string(), equalTo("test2"));
@@ -894,7 +883,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setPercolateDoc(new PercolateSourceBuilder.DocBuilder().setDoc("{}"))
.get();
assertNoFailures(response);
- assertThat(response.getCount(), equalTo(1l));
+ assertThat(response.getCount(), equalTo(1L));
assertThat(response.getMatches()[0].getId().string(), equalTo("1"));
response = client().preparePercolate()
@@ -903,7 +892,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setPercolateDoc(new PercolateSourceBuilder.DocBuilder().setDoc("{}"))
.get();
assertNoFailures(response);
- assertThat(response.getCount(), equalTo(1l));
+ assertThat(response.getCount(), equalTo(1L));
assertThat(response.getMatches()[0].getId().string(), equalTo("2"));
@@ -913,7 +902,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setPercolateDoc(new PercolateSourceBuilder.DocBuilder().setDoc("{}"))
.get();
assertNoFailures(response);
- assertThat(response.getCount(), equalTo(0l));
+ assertThat(response.getCount(), equalTo(0L));
// Testing that the alias filter and the filter specified while percolating are both taken into account.
response = client().preparePercolate()
@@ -923,7 +912,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setPercolateQuery(QueryBuilders.matchAllQuery())
.get();
assertNoFailures(response);
- assertThat(response.getCount(), equalTo(1l));
+ assertThat(response.getCount(), equalTo(1L));
assertThat(response.getMatches()[0].getId().string(), equalTo("1"));
response = client().preparePercolate()
@@ -933,7 +922,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setPercolateQuery(QueryBuilders.matchAllQuery())
.get();
assertNoFailures(response);
- assertThat(response.getCount(), equalTo(1l));
+ assertThat(response.getCount(), equalTo(1L));
assertThat(response.getMatches()[0].getId().string(), equalTo("2"));
@@ -944,7 +933,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setPercolateQuery(QueryBuilders.matchAllQuery())
.get();
assertNoFailures(response);
- assertThat(response.getCount(), equalTo(0l));
+ assertThat(response.getCount(), equalTo(0L));
}
public void testCountPercolation() throws Exception {
@@ -977,7 +966,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setIndices("test").setDocumentType("type").setOnlyCount(true)
.setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "b").endObject()))
.execute().actionGet();
- assertMatchCount(response, 2l);
+ assertMatchCount(response, 2L);
assertThat(response.getMatches(), nullValue());
logger.info("--> Count percolate doc with field1=c");
@@ -985,7 +974,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setIndices("test").setDocumentType("type").setOnlyCount(true)
.setPercolateDoc(docBuilder().setDoc(yamlBuilder().startObject().field("field1", "c").endObject()))
.execute().actionGet();
- assertMatchCount(response, 2l);
+ assertMatchCount(response, 2L);
assertThat(response.getMatches(), nullValue());
logger.info("--> Count percolate doc with field1=b c");
@@ -993,7 +982,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setIndices("test").setDocumentType("type").setOnlyCount(true)
.setPercolateDoc(docBuilder().setDoc(smileBuilder().startObject().field("field1", "b c").endObject()))
.execute().actionGet();
- assertMatchCount(response, 4l);
+ assertMatchCount(response, 4L);
assertThat(response.getMatches(), nullValue());
logger.info("--> Count percolate doc with field1=d");
@@ -1001,7 +990,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setIndices("test").setDocumentType("type").setOnlyCount(true)
.setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "d").endObject()))
.execute().actionGet();
- assertMatchCount(response, 1l);
+ assertMatchCount(response, 1L);
assertThat(response.getMatches(), nullValue());
logger.info("--> Count percolate non existing doc");
@@ -1048,7 +1037,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setIndices("test").setDocumentType("type").setOnlyCount(true)
.setGetRequest(Requests.getRequest("test").type("type").id("1"))
.execute().actionGet();
- assertMatchCount(response, 2l);
+ assertMatchCount(response, 2L);
assertThat(response.getMatches(), nullValue());
logger.info("--> Count percolate existing doc with id 2");
@@ -1056,7 +1045,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setIndices("test").setDocumentType("type").setOnlyCount(true)
.setGetRequest(Requests.getRequest("test").type("type").id("2"))
.execute().actionGet();
- assertMatchCount(response, 2l);
+ assertMatchCount(response, 2L);
assertThat(response.getMatches(), nullValue());
logger.info("--> Count percolate existing doc with id 3");
@@ -1064,7 +1053,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setIndices("test").setDocumentType("type").setOnlyCount(true)
.setGetRequest(Requests.getRequest("test").type("type").id("3"))
.execute().actionGet();
- assertMatchCount(response, 4l);
+ assertMatchCount(response, 4L);
assertThat(response.getMatches(), nullValue());
logger.info("--> Count percolate existing doc with id 4");
@@ -1072,7 +1061,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setIndices("test").setDocumentType("type").setOnlyCount(true)
.setGetRequest(Requests.getRequest("test").type("type").id("4"))
.execute().actionGet();
- assertMatchCount(response, 1l);
+ assertMatchCount(response, 1L);
assertThat(response.getMatches(), nullValue());
}
@@ -1275,7 +1264,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setPercolateDoc(docBuilder().setDoc("field", "value"))
.setPercolateQuery(QueryBuilders.functionScoreQuery(matchAllQuery(), fieldValueFactorFunction("level")))
.execute().actionGet();
- assertMatchCount(response, 2l);
+ assertMatchCount(response, 2L);
assertThat(response.getMatches()[0].getId().string(), equalTo("2"));
assertThat(response.getMatches()[0].getScore(), equalTo(2f));
assertThat(response.getMatches()[1].getId().string(), equalTo("1"));
@@ -1319,7 +1308,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setPercolateDoc(docBuilder().setDoc("field", "value"))
.setPercolateQuery(QueryBuilders.functionScoreQuery(matchAllQuery(), fieldValueFactorFunction("level")))
.execute().actionGet();
- assertMatchCount(response, 0l);
+ assertMatchCount(response, 0L);
}
public void testPercolatorWithHighlighting() throws Exception {
@@ -1357,7 +1346,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "The quick brown fox jumps over the lazy dog").endObject()))
.setHighlightBuilder(new HighlightBuilder().field("field1"))
.execute().actionGet();
- assertMatchCount(response, 5l);
+ assertMatchCount(response, 5L);
assertThat(response.getMatches(), arrayWithSize(5));
assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContainingInAnyOrder("1", "2", "3", "4", "5"));
@@ -1383,7 +1372,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setHighlightBuilder(new HighlightBuilder().field("field1"))
.setPercolateQuery(matchAllQuery())
.execute().actionGet();
- assertMatchCount(response, 5l);
+ assertMatchCount(response, 5L);
assertThat(response.getMatches(), arrayWithSize(5));
assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContainingInAnyOrder("1", "2", "3", "4", "5"));
@@ -1442,7 +1431,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setPercolateQuery(functionScoreQuery(new WeightBuilder().setWeight(5.5f)))
.setSortByScore(true)
.execute().actionGet();
- assertMatchCount(response, 5l);
+ assertMatchCount(response, 5L);
assertThat(response.getMatches(), arrayWithSize(5));
assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContainingInAnyOrder("1", "2", "3", "4", "5"));
@@ -1474,7 +1463,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setPercolateQuery(functionScoreQuery(new WeightBuilder().setWeight(5.5f)))
.setSortByScore(true)
.execute().actionGet();
- assertMatchCount(response, 5l);
+ assertMatchCount(response, 5L);
assertThat(response.getMatches(), arrayWithSize(5));
assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContainingInAnyOrder("1", "2", "3", "4", "5"));
@@ -1512,7 +1501,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setPercolateQuery(functionScoreQuery(new WeightBuilder().setWeight(5.5f)))
.setSortByScore(true)
.execute().actionGet();
- assertMatchCount(response, 5l);
+ assertMatchCount(response, 5L);
assertThat(response.getMatches(), arrayWithSize(5));
assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContainingInAnyOrder("1", "2", "3", "4", "5"));
@@ -1558,7 +1547,7 @@ public class PercolatorIT extends ESIntegTestCase {
.endObject())
.execute().actionGet();
assertNoFailures(percolate);
- assertMatchCount(percolate, 0l);
+ assertMatchCount(percolate, 0L);
}
public void testNestedPercolation() throws IOException {
@@ -1591,92 +1580,6 @@ public class PercolatorIT extends ESIntegTestCase {
assertEquals(response.getMatches()[0].getId().string(), "Q");
}
- public void testPercolationWithDynamicTemplates() throws Exception {
- assertAcked(prepareCreate("idx").addMapping("type", jsonBuilder().startObject().startObject("type")
- .field("dynamic", false)
- .startObject("properties")
- .startObject("custom")
- .field("dynamic", true)
- .field("type", "object")
- .field("include_in_all", false)
- .endObject()
- .endObject()
- .startArray("dynamic_templates")
- .startObject()
- .startObject("custom_fields")
- .field("path_match", "custom.*")
- .startObject("mapping")
- .field("index", "not_analyzed")
- .endObject()
- .endObject()
- .endObject()
- .endArray()
- .endObject().endObject()));
- ensureGreen("idx");
-
- try {
- client().prepareIndex("idx", PercolatorService.TYPE_NAME, "1")
- .setSource(jsonBuilder().startObject().field("query", QueryBuilders.queryStringQuery("color:red")).endObject())
- .get();
- fail();
- } catch (MapperParsingException e) {
- }
- refresh();
-
- PercolateResponse percolateResponse = client().preparePercolate().setDocumentType("type")
- .setPercolateDoc(new PercolateSourceBuilder.DocBuilder().setDoc(jsonBuilder().startObject().startObject("custom").field("color", "blue").endObject().endObject()))
- .get();
-
- assertMatchCount(percolateResponse, 0l);
- assertThat(percolateResponse.getMatches(), arrayWithSize(0));
-
- // The previous percolate request introduced the custom.color field, so now we register the query again
- // and the field name `color` will be resolved to `custom.color` field in mapping via smart field mapping resolving.
- client().prepareIndex("idx", PercolatorService.TYPE_NAME, "1")
- .setSource(jsonBuilder().startObject().field("query", QueryBuilders.queryStringQuery("custom.color:red")).endObject())
- .get();
- client().prepareIndex("idx", PercolatorService.TYPE_NAME, "2")
- .setSource(jsonBuilder().startObject().field("query", QueryBuilders.queryStringQuery("custom.color:blue")).field("type", "type").endObject())
- .get();
- refresh();
-
- // The second request will yield a match, since the query during the proper field during parsing.
- percolateResponse = client().preparePercolate().setDocumentType("type")
- .setPercolateDoc(new PercolateSourceBuilder.DocBuilder().setDoc(jsonBuilder().startObject().startObject("custom").field("color", "blue").endObject().endObject()))
- .get();
-
- assertMatchCount(percolateResponse, 1l);
- assertThat(percolateResponse.getMatches()[0].getId().string(), equalTo("2"));
- }
-
- public void testUpdateMappingDynamicallyWhilePercolating() throws Exception {
- createIndex("test");
- ensureSearchable();
-
- // percolation source
- XContentBuilder percolateDocumentSource = XContentFactory.jsonBuilder().startObject().startObject("doc")
- .field("field1", 1)
- .field("field2", "value")
- .endObject().endObject();
-
- PercolateResponse response = client().preparePercolate()
- .setIndices("test").setDocumentType("type1")
- .setSource(percolateDocumentSource).execute().actionGet();
- assertAllSuccessful(response);
- assertMatchCount(response, 0l);
- assertThat(response.getMatches(), arrayWithSize(0));
-
- assertMappingOnMaster("test", "type1");
-
- GetMappingsResponse mappingsResponse = client().admin().indices().prepareGetMappings("test").get();
- assertThat(mappingsResponse.getMappings().get("test"), notNullValue());
- assertThat(mappingsResponse.getMappings().get("test").get("type1"), notNullValue());
- assertThat(mappingsResponse.getMappings().get("test").get("type1").getSourceAsMap().isEmpty(), is(false));
- Map<String, Object> properties = (Map<String, Object>) mappingsResponse.getMappings().get("test").get("type1").getSourceAsMap().get("properties");
- assertThat(((Map<String, String>) properties.get("field1")).get("type"), equalTo("long"));
- assertThat(((Map<String, String>) properties.get("field2")).get("type"), equalTo("string"));
- }
-
public void testDontReportDeletedPercolatorDocs() throws Exception {
client().admin().indices().prepareCreate("test").execute().actionGet();
ensureGreen();
@@ -1694,7 +1597,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field", "value").endObject()))
.setPercolateQuery(QueryBuilders.matchAllQuery())
.get();
- assertMatchCount(response, 1l);
+ assertMatchCount(response, 1L);
assertThat(response.getMatches(), arrayWithSize(1));
assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContainingInAnyOrder("1"));
}
@@ -1741,7 +1644,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setIndices("test").setDocumentType("my-type")
.setPercolateDoc(docBuilder().setDoc("timestamp", System.currentTimeMillis()))
.get();
- assertMatchCount(response, 2l);
+ assertMatchCount(response, 2L);
assertThat(response.getMatches(), arrayWithSize(2));
assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContainingInAnyOrder("1", "2"));
}
@@ -1896,7 +1799,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setIndices("test").setDocumentType("doc")
.setPercolateDoc(docBuilder().setDoc(doc))
.get();
- assertMatchCount(response, 3l);
+ assertMatchCount(response, 3L);
Set<String> expectedIds = new HashSet<>();
expectedIds.add("q1");
expectedIds.add("q4");
@@ -1909,12 +1812,12 @@ public class PercolatorIT extends ESIntegTestCase {
.setIndices("test").setDocumentType("doc")
.setPercolateDoc(docBuilder().setDoc(doc))
.get();
- assertMatchCount(response, 3l);
+ assertMatchCount(response, 3L);
response = client().preparePercolate().setScore(randomBoolean()).setSortByScore(randomBoolean()).setOnlyCount(randomBoolean()).setSize(10).setPercolateQuery(QueryBuilders.termQuery("text", "foo"))
.setIndices("test").setDocumentType("doc")
.setPercolateDoc(docBuilder().setDoc(doc))
.get();
- assertMatchCount(response, 3l);
+ assertMatchCount(response, 3L);
}
public void testMapUnmappedFieldAsString() throws IOException{
@@ -1932,10 +1835,37 @@ public class PercolatorIT extends ESIntegTestCase {
.setIndices("test").setDocumentType("type")
.setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "value").endObject()))
.execute().actionGet();
- assertMatchCount(response1, 1l);
+ assertMatchCount(response1, 1L);
assertThat(response1.getMatches(), arrayWithSize(1));
}
+ public void testGeoShapeWithMapUnmappedFieldAsString() throws Exception {
+ // If index.percolator.map_unmapped_fields_as_string is set to true, unmapped field is mapped as an analyzed string.
+ Settings.Builder settings = Settings.settingsBuilder()
+ .put(indexSettings())
+ .put("index.percolator.map_unmapped_fields_as_string", true);
+ assertAcked(prepareCreate("test")
+ .setSettings(settings)
+ .addMapping("type", "location", "type=geo_shape"));
+ client().prepareIndex("test", PercolatorService.TYPE_NAME, "1")
+ .setSource(jsonBuilder().startObject().field("query", geoShapeQuery("location", ShapeBuilders.newEnvelope(new Coordinate(0d, 50d), new Coordinate(2d, 40d)))).endObject())
+ .get();
+ refresh();
+
+ PercolateResponse response1 = client().preparePercolate()
+ .setIndices("test").setDocumentType("type")
+ .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject()
+ .startObject("location")
+ .field("type", "point")
+ .field("coordinates", Arrays.asList(1.44207d, 43.59959d))
+ .endObject()
+ .endObject()))
+ .execute().actionGet();
+ assertMatchCount(response1, 1L);
+ assertThat(response1.getMatches().length, equalTo(1));
+ assertThat(response1.getMatches()[0].getId().string(), equalTo("1"));
+ }
+
public void testFailNicelyWithInnerHits() throws Exception {
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject()
.startObject("mapping")
diff --git a/core/src/test/java/org/elasticsearch/percolator/PercolatorServiceTests.java b/core/src/test/java/org/elasticsearch/percolator/PercolatorServiceTests.java
index d9c784da2b..05a4a156a0 100644
--- a/core/src/test/java/org/elasticsearch/percolator/PercolatorServiceTests.java
+++ b/core/src/test/java/org/elasticsearch/percolator/PercolatorServiceTests.java
@@ -42,22 +42,15 @@ import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexSettings;
-import org.elasticsearch.index.analysis.AnalysisService;
-import org.elasticsearch.index.analysis.AnalyzerProvider;
-import org.elasticsearch.index.analysis.CharFilterFactory;
-import org.elasticsearch.index.analysis.TokenFilterFactory;
-import org.elasticsearch.index.analysis.TokenizerFactory;
import org.elasticsearch.index.engine.Engine;
-import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
+import org.elasticsearch.index.percolator.ExtractQueryTermsService;
import org.elasticsearch.index.percolator.PercolatorFieldMapper;
import org.elasticsearch.index.percolator.PercolatorQueriesRegistry;
-import org.elasticsearch.index.percolator.ExtractQueryTermsService;
+import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.ShardId;
-import org.elasticsearch.index.similarity.SimilarityService;
-import org.elasticsearch.indices.IndicesModule;
import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.internal.ContextIndexSearcher;
import org.elasticsearch.test.ESTestCase;
@@ -65,7 +58,6 @@ import org.junit.After;
import org.junit.Before;
import java.io.IOException;
-import java.util.Collections;
import static org.hamcrest.Matchers.equalTo;
import static org.mockito.Mockito.mock;
@@ -93,9 +85,12 @@ public class PercolatorServiceTests extends ESTestCase {
public void testCount() throws Exception {
PercolateContext context = mock(PercolateContext.class);
- when(context.shardTarget()).thenReturn(new SearchShardTarget("_id", "_index", 0));
+ when(context.shardTarget()).thenReturn(new SearchShardTarget("_id", new Index("_index", "_na_"), 0));
when(context.percolatorTypeFilter()).thenReturn(new MatchAllDocsQuery());
when(context.isOnlyCount()).thenReturn(true);
+ IndexShard shard = mock(IndexShard.class);
+ when(shard.shardId()).thenReturn(new ShardId("_index", "_na_", 0));
+ when(context.indexShard()).thenReturn(shard);
PercolatorQueriesRegistry registry = createRegistry();
addPercolatorQuery("1", new TermQuery(new Term("field", "brown")), indexWriter, registry);
@@ -118,9 +113,12 @@ public class PercolatorServiceTests extends ESTestCase {
public void testTopMatching() throws Exception {
PercolateContext context = mock(PercolateContext.class);
- when(context.shardTarget()).thenReturn(new SearchShardTarget("_id", "_index", 0));
+ when(context.shardTarget()).thenReturn(new SearchShardTarget("_id", new Index("_index", "_na_"), 0));
when(context.percolatorTypeFilter()).thenReturn(new MatchAllDocsQuery());
when(context.size()).thenReturn(10);
+ IndexShard shard = mock(IndexShard.class);
+ when(shard.shardId()).thenReturn(new ShardId("_index", "_na_", 0));
+ when(context.indexShard()).thenReturn(shard);
PercolatorQueriesRegistry registry = createRegistry();
addPercolatorQuery("1", new TermQuery(new Term("field", "brown")), indexWriter, registry);
@@ -158,7 +156,7 @@ public class PercolatorServiceTests extends ESTestCase {
}
PercolatorQueriesRegistry createRegistry() {
- Index index = new Index("_index");
+ Index index = new Index("_index", "_na_");
IndexSettings indexSettings = new IndexSettings(new IndexMetaData.Builder("_index").settings(
Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1)
diff --git a/core/src/test/java/org/elasticsearch/percolator/RecoveryPercolatorIT.java b/core/src/test/java/org/elasticsearch/percolator/RecoveryPercolatorIT.java
index c93bedf9e8..4005754f31 100644
--- a/core/src/test/java/org/elasticsearch/percolator/RecoveryPercolatorIT.java
+++ b/core/src/test/java/org/elasticsearch/percolator/RecoveryPercolatorIT.java
@@ -99,7 +99,7 @@ public class RecoveryPercolatorIT extends ESIntegTestCase {
.field("field1", "value1")
.endObject().endObject())
.get();
- assertMatchCount(percolate, 1l);
+ assertMatchCount(percolate, 1L);
assertThat(percolate.getMatches(), arrayWithSize(1));
}
@@ -116,7 +116,7 @@ public class RecoveryPercolatorIT extends ESIntegTestCase {
.setRefresh(true)
.get();
- assertThat(client().prepareSearch().setSize(0).setTypes(PercolatorService.TYPE_NAME).setQuery(matchAllQuery()).get().getHits().totalHits(), equalTo(1l));
+ assertThat(client().prepareSearch().setSize(0).setTypes(PercolatorService.TYPE_NAME).setQuery(matchAllQuery()).get().getHits().totalHits(), equalTo(1L));
PercolateResponse percolate = client().preparePercolate()
.setIndices("test").setDocumentType("type1")
@@ -124,7 +124,7 @@ public class RecoveryPercolatorIT extends ESIntegTestCase {
.field("field1", "value1")
.endObject().endObject())
.get();
- assertMatchCount(percolate, 1l);
+ assertMatchCount(percolate, 1L);
assertThat(percolate.getMatches(), arrayWithSize(1));
internalCluster().rollingRestart();
@@ -134,7 +134,7 @@ public class RecoveryPercolatorIT extends ESIntegTestCase {
logger.info("Done Cluster Health, status " + clusterHealth.getStatus());
assertThat(clusterHealth.isTimedOut(), equalTo(false));
SearchResponse countResponse = client().prepareSearch().setSize(0).setTypes(PercolatorService.TYPE_NAME).setQuery(matchAllQuery()).get();
- assertHitCount(countResponse, 1l);
+ assertHitCount(countResponse, 1L);
DeleteIndexResponse actionGet = client().admin().indices().prepareDelete("test").get();
assertThat(actionGet.isAcknowledged(), equalTo(true));
@@ -142,7 +142,7 @@ public class RecoveryPercolatorIT extends ESIntegTestCase {
clusterHealth = client().admin().cluster().health(clusterHealthRequest().waitForYellowStatus().waitForActiveShards(1)).actionGet();
logger.info("Done Cluster Health, status " + clusterHealth.getStatus());
assertThat(clusterHealth.isTimedOut(), equalTo(false));
- assertThat(client().prepareSearch().setSize(0).setTypes(PercolatorService.TYPE_NAME).setQuery(matchAllQuery()).get().getHits().totalHits(), equalTo(0l));
+ assertThat(client().prepareSearch().setSize(0).setTypes(PercolatorService.TYPE_NAME).setQuery(matchAllQuery()).get().getHits().totalHits(), equalTo(0L));
percolate = client().preparePercolate()
.setIndices("test").setDocumentType("type1")
@@ -150,7 +150,7 @@ public class RecoveryPercolatorIT extends ESIntegTestCase {
.field("field1", "value1")
.endObject().endObject())
.get();
- assertMatchCount(percolate, 0l);
+ assertMatchCount(percolate, 0L);
assertThat(percolate.getMatches(), emptyArray());
logger.info("--> register a query");
@@ -162,7 +162,7 @@ public class RecoveryPercolatorIT extends ESIntegTestCase {
.setRefresh(true)
.get();
- assertThat(client().prepareSearch().setSize(0).setTypes(PercolatorService.TYPE_NAME).setQuery(matchAllQuery()).get().getHits().totalHits(), equalTo(1l));
+ assertThat(client().prepareSearch().setSize(0).setTypes(PercolatorService.TYPE_NAME).setQuery(matchAllQuery()).get().getHits().totalHits(), equalTo(1L));
percolate = client().preparePercolate()
.setIndices("test").setDocumentType("type1")
@@ -170,7 +170,7 @@ public class RecoveryPercolatorIT extends ESIntegTestCase {
.field("field1", "value1")
.endObject().endObject())
.get();
- assertMatchCount(percolate, 1l);
+ assertMatchCount(percolate, 1L);
assertThat(percolate.getMatches(), arrayWithSize(1));
}
@@ -202,7 +202,7 @@ public class RecoveryPercolatorIT extends ESIntegTestCase {
.setIndices("test").setDocumentType("type1")
.setSource(jsonBuilder().startObject().startObject("doc").field("field1", 95).endObject().endObject())
.get();
- assertMatchCount(response, 6l);
+ assertMatchCount(response, 6L);
assertThat(response.getMatches(), arrayWithSize(6));
assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContainingInAnyOrder("95", "96", "97", "98", "99", "100"));
@@ -216,7 +216,7 @@ public class RecoveryPercolatorIT extends ESIntegTestCase {
.setIndices("test").setDocumentType("type1")
.setSource(jsonBuilder().startObject().startObject("doc").field("field1", 100).endObject().endObject()).get();
- assertMatchCount(response, 1l);
+ assertMatchCount(response, 1L);
assertThat(response.getMatches(), arrayWithSize(1));
assertThat(response.getMatches()[0].getId().string(), equalTo("100"));
}
diff --git a/core/src/test/java/org/elasticsearch/percolator/TTLPercolatorIT.java b/core/src/test/java/org/elasticsearch/percolator/TTLPercolatorIT.java
index 43ca89923f..b1cd982e99 100644
--- a/core/src/test/java/org/elasticsearch/percolator/TTLPercolatorIT.java
+++ b/core/src/test/java/org/elasticsearch/percolator/TTLPercolatorIT.java
@@ -148,7 +148,7 @@ public class TTLPercolatorIT extends ESIntegTestCase {
.endObject()
.endObject()
).execute().actionGet();
- assertMatchCount(percolateResponse, 0l);
+ assertMatchCount(percolateResponse, 0L);
assertThat(percolateResponse.getMatches(), emptyArray());
}
diff --git a/core/src/test/java/org/elasticsearch/plugins/PluginManagerCliTests.java b/core/src/test/java/org/elasticsearch/plugins/PluginCliTests.java
index f16f9981d9..99d1c33821 100644
--- a/core/src/test/java/org/elasticsearch/plugins/PluginManagerCliTests.java
+++ b/core/src/test/java/org/elasticsearch/plugins/PluginCliTests.java
@@ -32,25 +32,25 @@ import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.Matchers.is;
-public class PluginManagerCliTests extends CliToolTestCase {
+public class PluginCliTests extends CliToolTestCase {
public void testHelpWorks() throws IOException {
CliToolTestCase.CaptureOutputTerminal terminal = new CliToolTestCase.CaptureOutputTerminal();
- assertThat(new PluginManagerCliParser(terminal).execute(args("--help")), is(OK_AND_EXIT));
+ assertThat(new PluginCli(terminal).execute(args("--help")), is(OK_AND_EXIT));
assertTerminalOutputContainsHelpFile(terminal, "/org/elasticsearch/plugins/plugin.help");
terminal.getTerminalOutput().clear();
- assertThat(new PluginManagerCliParser(terminal).execute(args("install -h")), is(OK_AND_EXIT));
+ assertThat(new PluginCli(terminal).execute(args("install -h")), is(OK_AND_EXIT));
assertTerminalOutputContainsHelpFile(terminal, "/org/elasticsearch/plugins/plugin-install.help");
- for (String plugin : PluginManager.OFFICIAL_PLUGINS) {
+ for (String plugin : InstallPluginCommand.OFFICIAL_PLUGINS) {
assertThat(terminal.getTerminalOutput(), hasItem(containsString(plugin)));
}
terminal.getTerminalOutput().clear();
- assertThat(new PluginManagerCliParser(terminal).execute(args("remove --help")), is(OK_AND_EXIT));
+ assertThat(new PluginCli(terminal).execute(args("remove --help")), is(OK_AND_EXIT));
assertTerminalOutputContainsHelpFile(terminal, "/org/elasticsearch/plugins/plugin-remove.help");
terminal.getTerminalOutput().clear();
- assertThat(new PluginManagerCliParser(terminal).execute(args("list -h")), is(OK_AND_EXIT));
+ assertThat(new PluginCli(terminal).execute(args("list -h")), is(OK_AND_EXIT));
assertTerminalOutputContainsHelpFile(terminal, "/org/elasticsearch/plugins/plugin-list.help");
}
@@ -58,8 +58,7 @@ public class PluginManagerCliTests extends CliToolTestCase {
CliToolTestCase.CaptureOutputTerminal terminal = new CliToolTestCase.CaptureOutputTerminal();
Path tmpDir = createTempDir().resolve("foo deps");
String finalDir = tmpDir.toAbsolutePath().toUri().toURL().toString();
- logger.warn(finalDir);
- CliTool.ExitStatus execute = new PluginManagerCliParser(terminal).execute(args("install " + finalDir));
+ CliTool.ExitStatus execute = new PluginCli(terminal).execute("install", finalDir);
assertThat(execute.status(), is(IO_ERROR.status()));
}
}
diff --git a/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java b/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java
index deaff46f27..37a0f4e358 100644
--- a/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java
+++ b/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java
@@ -40,17 +40,13 @@ public class PluginInfoTests extends ESTestCase {
"version", "1.0",
"elasticsearch.version", Version.CURRENT.toString(),
"java.version", System.getProperty("java.specification.version"),
- "jvm", "true",
"classname", "FakePlugin");
PluginInfo info = PluginInfo.readFromProperties(pluginDir);
assertEquals("my_plugin", info.getName());
assertEquals("fake desc", info.getDescription());
assertEquals("1.0", info.getVersion());
assertEquals("FakePlugin", info.getClassname());
- assertTrue(info.isJvm());
assertTrue(info.isIsolated());
- assertFalse(info.isSite());
- assertNull(info.getUrl());
}
public void testReadFromPropertiesNameMissing() throws Exception {
@@ -94,27 +90,12 @@ public class PluginInfoTests extends ESTestCase {
}
}
- public void testReadFromPropertiesJvmAndSiteMissing() throws Exception {
- Path pluginDir = createTempDir().resolve("fake-plugin");
- PluginTestUtil.writeProperties(pluginDir,
- "description", "fake desc",
- "version", "1.0",
- "name", "my_plugin");
- try {
- PluginInfo.readFromProperties(pluginDir);
- fail("expected jvm or site exception");
- } catch (IllegalArgumentException e) {
- assertTrue(e.getMessage().contains("must be at least a jvm or site plugin"));
- }
- }
-
public void testReadFromPropertiesElasticsearchVersionMissing() throws Exception {
Path pluginDir = createTempDir().resolve("fake-plugin");
PluginTestUtil.writeProperties(pluginDir,
"description", "fake desc",
"name", "my_plugin",
- "version", "1.0",
- "jvm", "true");
+ "version", "1.0");
try {
PluginInfo.readFromProperties(pluginDir);
fail("expected missing elasticsearch version exception");
@@ -129,8 +110,7 @@ public class PluginInfoTests extends ESTestCase {
"description", "fake desc",
"name", "my_plugin",
"elasticsearch.version", Version.CURRENT.toString(),
- "version", "1.0",
- "jvm", "true");
+ "version", "1.0");
try {
PluginInfo.readFromProperties(pluginDir);
fail("expected missing java version exception");
@@ -148,8 +128,7 @@ public class PluginInfoTests extends ESTestCase {
"elasticsearch.version", Version.CURRENT.toString(),
"java.version", "1000000.0",
"classname", "FakePlugin",
- "version", "1.0",
- "jvm", "true");
+ "version", "1.0");
try {
PluginInfo.readFromProperties(pluginDir);
fail("expected incompatible java version exception");
@@ -167,8 +146,7 @@ public class PluginInfoTests extends ESTestCase {
"elasticsearch.version", Version.CURRENT.toString(),
"java.version", "1.7.0_80",
"classname", "FakePlugin",
- "version", "1.0",
- "jvm", "true");
+ "version", "1.0");
try {
PluginInfo.readFromProperties(pluginDir);
fail("expected bad java version format exception");
@@ -182,7 +160,6 @@ public class PluginInfoTests extends ESTestCase {
PluginTestUtil.writeProperties(pluginDir,
"description", "fake desc",
"version", "1.0",
- "jvm", "true",
"name", "my_plugin",
"elasticsearch.version", "bogus");
try {
@@ -199,7 +176,6 @@ public class PluginInfoTests extends ESTestCase {
"description", "fake desc",
"name", "my_plugin",
"version", "1.0",
- "jvm", "true",
"elasticsearch.version", Version.V_1_7_0.toString());
try {
PluginInfo.readFromProperties(pluginDir);
@@ -216,8 +192,7 @@ public class PluginInfoTests extends ESTestCase {
"name", "my_plugin",
"version", "1.0",
"elasticsearch.version", Version.CURRENT.toString(),
- "java.version", System.getProperty("java.specification.version"),
- "jvm", "true");
+ "java.version", System.getProperty("java.specification.version"));
try {
PluginInfo.readFromProperties(pluginDir);
fail("expected old elasticsearch version exception");
@@ -226,42 +201,13 @@ public class PluginInfoTests extends ESTestCase {
}
}
- public void testReadFromPropertiesSitePlugin() throws Exception {
- Path pluginDir = createTempDir().resolve("fake-plugin");
- Files.createDirectories(pluginDir.resolve("_site"));
- PluginTestUtil.writeProperties(pluginDir,
- "description", "fake desc",
- "name", "my_plugin",
- "version", "1.0",
- "site", "true");
- PluginInfo info = PluginInfo.readFromProperties(pluginDir);
- assertTrue(info.isSite());
- assertFalse(info.isJvm());
- assertEquals("NA", info.getClassname());
- }
-
- public void testReadFromPropertiesSitePluginWithoutSite() throws Exception {
- Path pluginDir = createTempDir().resolve("fake-plugin");
- PluginTestUtil.writeProperties(pluginDir,
- "description", "fake desc",
- "name", "my_plugin",
- "version", "1.0",
- "site", "true");
- try {
- PluginInfo.readFromProperties(pluginDir);
- fail("didn't get expected exception");
- } catch (IllegalArgumentException e) {
- assertTrue(e.getMessage().contains("site plugin but has no '_site"));
- }
- }
-
public void testPluginListSorted() {
PluginsAndModules pluginsInfo = new PluginsAndModules();
- pluginsInfo.addPlugin(new PluginInfo("c", "foo", true, "dummy", true, "dummyclass", true));
- pluginsInfo.addPlugin(new PluginInfo("b", "foo", true, "dummy", true, "dummyclass", true));
- pluginsInfo.addPlugin(new PluginInfo("e", "foo", true, "dummy", true, "dummyclass", true));
- pluginsInfo.addPlugin(new PluginInfo("a", "foo", true, "dummy", true, "dummyclass", true));
- pluginsInfo.addPlugin(new PluginInfo("d", "foo", true, "dummy", true, "dummyclass", true));
+ pluginsInfo.addPlugin(new PluginInfo("c", "foo", "dummy", "dummyclass", true));
+ pluginsInfo.addPlugin(new PluginInfo("b", "foo", "dummy", "dummyclass", true));
+ pluginsInfo.addPlugin(new PluginInfo("e", "foo", "dummy", "dummyclass", true));
+ pluginsInfo.addPlugin(new PluginInfo("a", "foo", "dummy", "dummyclass", true));
+ pluginsInfo.addPlugin(new PluginInfo("d", "foo", "dummy", "dummyclass", true));
final List<PluginInfo> infos = pluginsInfo.getPluginInfos();
List<String> names = infos.stream().map((input) -> input.getName()).collect(Collectors.toList());
diff --git a/core/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java b/core/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java
index 5d8605ab19..d3c1f1b8ba 100644
--- a/core/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java
+++ b/core/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java
@@ -88,7 +88,7 @@ public class PluginsServiceTests extends ESTestCase {
public void testAdditionalSettings() {
Settings settings = Settings.builder()
- .put("path.home", createTempDir())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
.put("my.setting", "test")
.put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), IndexModule.Type.SIMPLEFS.getSettingsKey()).build();
PluginsService service = newPluginsService(settings, AdditionalSettingsPlugin1.class);
@@ -100,7 +100,7 @@ public class PluginsServiceTests extends ESTestCase {
public void testAdditionalSettingsClash() {
Settings settings = Settings.builder()
- .put("path.home", createTempDir()).build();
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()).build();
PluginsService service = newPluginsService(settings, AdditionalSettingsPlugin1.class, AdditionalSettingsPlugin2.class);
try {
service.updatedSettings();
@@ -115,7 +115,7 @@ public class PluginsServiceTests extends ESTestCase {
public void testOnModuleExceptionsArePropagated() {
Settings settings = Settings.builder()
- .put("path.home", createTempDir()).build();
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()).build();
PluginsService service = newPluginsService(settings, FailOnModule.class);
try {
service.processModule(new BrokenModule());
diff --git a/core/src/test/java/org/elasticsearch/plugins/SitePluginIT.java b/core/src/test/java/org/elasticsearch/plugins/SitePluginIT.java
deleted file mode 100644
index e2df2518f1..0000000000
--- a/core/src/test/java/org/elasticsearch/plugins/SitePluginIT.java
+++ /dev/null
@@ -1,131 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.elasticsearch.plugins;
-
-import org.apache.http.client.config.RequestConfig;
-import org.apache.http.impl.client.CloseableHttpClient;
-import org.apache.http.impl.client.HttpClients;
-import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.http.HttpServerTransport;
-import org.elasticsearch.test.ESIntegTestCase;
-import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
-import org.elasticsearch.test.ESIntegTestCase.Scope;
-import org.elasticsearch.test.rest.client.http.HttpRequestBuilder;
-import org.elasticsearch.test.rest.client.http.HttpResponse;
-
-import java.nio.file.Path;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Locale;
-
-import static org.elasticsearch.common.settings.Settings.settingsBuilder;
-import static org.elasticsearch.rest.RestStatus.FORBIDDEN;
-import static org.elasticsearch.rest.RestStatus.MOVED_PERMANENTLY;
-import static org.elasticsearch.rest.RestStatus.NOT_FOUND;
-import static org.elasticsearch.rest.RestStatus.OK;
-import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasStatus;
-import static org.hamcrest.Matchers.containsString;
-
-/**
- * We want to test site plugins
- */
-@ClusterScope(scope = Scope.SUITE, numDataNodes = 1)
-public class SitePluginIT extends ESIntegTestCase {
- @Override
- protected Settings nodeSettings(int nodeOrdinal) {
- Path pluginDir = getDataPath("/org/elasticsearch/test_plugins");
- return settingsBuilder()
- .put(super.nodeSettings(nodeOrdinal))
- .put("path.plugins", pluginDir.toAbsolutePath())
- .put("force.http.enabled", true)
- .build();
- }
-
- @Override
- public HttpRequestBuilder httpClient() {
- RequestConfig.Builder builder = RequestConfig.custom().setRedirectsEnabled(false);
- CloseableHttpClient httpClient = HttpClients.custom().setDefaultRequestConfig(builder.build()).build();
- return new HttpRequestBuilder(httpClient).httpTransport(internalCluster().getDataNodeInstance(HttpServerTransport.class));
- }
-
- public void testRedirectSitePlugin() throws Exception {
- // We use an HTTP Client to test redirection
- HttpResponse response = httpClient().method("GET").path("/_plugin/dummy").execute();
- assertThat(response, hasStatus(MOVED_PERMANENTLY));
- assertThat(response.getBody(), containsString("/_plugin/dummy/"));
-
- // We test the real URL
- response = httpClient().method("GET").path("/_plugin/dummy/").execute();
- assertThat(response, hasStatus(OK));
- assertThat(response.getBody(), containsString("<title>Dummy Site Plugin</title>"));
- }
-
- /**
- * Test direct access to an existing file (index.html)
- */
- public void testAnyPage() throws Exception {
- HttpResponse response = httpClient().path("/_plugin/dummy/index.html").execute();
- assertThat(response, hasStatus(OK));
- assertThat(response.getBody(), containsString("<title>Dummy Site Plugin</title>"));
- }
-
- /**
- * Test normalizing of path
- */
- public void testThatPathsAreNormalized() throws Exception {
- // more info: https://www.owasp.org/index.php/Path_Traversal
- List<String> notFoundUris = new ArrayList<>();
- notFoundUris.add("/_plugin/dummy/../../../../../log4j.properties");
- notFoundUris.add("/_plugin/dummy/../../../../../%00log4j.properties");
- notFoundUris.add("/_plugin/dummy/..%c0%af..%c0%af..%c0%af..%c0%af..%c0%aflog4j.properties");
- notFoundUris.add("/_plugin/dummy/%2E%2E/%2E%2E/%2E%2E/%2E%2E/index.html");
- notFoundUris.add("/_plugin/dummy/%2e%2e/%2e%2e/%2e%2e/%2e%2e/index.html");
- notFoundUris.add("/_plugin/dummy/%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2findex.html");
- notFoundUris.add("/_plugin/dummy/%2E%2E/%2E%2E/%2E%2E/%2E%2E/index.html");
- notFoundUris.add("/_plugin/dummy/..%5C..%5C..%5C..%5C..%5Clog4j.properties");
-
- for (String uri : notFoundUris) {
- HttpResponse response = httpClient().path(uri).execute();
- String message = String.format(Locale.ROOT, "URI [%s] expected to be not found", uri);
- assertThat(message, response, hasStatus(NOT_FOUND));
- }
-
- // using relative path inside of the plugin should work
- HttpResponse response = httpClient().path("/_plugin/dummy/dir1/../dir1/../index.html").execute();
- assertThat(response, hasStatus(OK));
- assertThat(response.getBody(), containsString("<title>Dummy Site Plugin</title>"));
- }
-
- /**
- * Test case for #4845: https://github.com/elasticsearch/elasticsearch/issues/4845
- * Serving _site plugins do not pick up on index.html for sub directories
- */
- public void testWelcomePageInSubDirs() throws Exception {
- HttpResponse response = httpClient().path("/_plugin/subdir/dir/").execute();
- assertThat(response, hasStatus(OK));
- assertThat(response.getBody(), containsString("<title>Dummy Site Plugin (subdir)</title>"));
-
- response = httpClient().path("/_plugin/subdir/dir_without_index/").execute();
- assertThat(response, hasStatus(FORBIDDEN));
-
- response = httpClient().path("/_plugin/subdir/dir_without_index/page.html").execute();
- assertThat(response, hasStatus(OK));
- assertThat(response.getBody(), containsString("<title>Dummy Site Plugin (page)</title>"));
- }
-}
diff --git a/core/src/test/java/org/elasticsearch/plugins/SitePluginRelativePathConfigIT.java b/core/src/test/java/org/elasticsearch/plugins/SitePluginRelativePathConfigIT.java
deleted file mode 100644
index 1cde90d698..0000000000
--- a/core/src/test/java/org/elasticsearch/plugins/SitePluginRelativePathConfigIT.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.elasticsearch.plugins;
-
-import org.apache.http.impl.client.CloseableHttpClient;
-import org.apache.http.impl.client.HttpClients;
-import org.elasticsearch.common.io.PathUtils;
-import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.http.HttpServerTransport;
-import org.elasticsearch.test.ESIntegTestCase;
-import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
-import org.elasticsearch.test.rest.client.http.HttpRequestBuilder;
-import org.elasticsearch.test.rest.client.http.HttpResponse;
-
-import java.nio.file.Path;
-
-import static org.apache.lucene.util.Constants.WINDOWS;
-import static org.elasticsearch.common.settings.Settings.settingsBuilder;
-import static org.elasticsearch.rest.RestStatus.OK;
-import static org.elasticsearch.test.ESIntegTestCase.Scope.SUITE;
-import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasStatus;
-
-@ClusterScope(scope = SUITE, numDataNodes = 1)
-public class SitePluginRelativePathConfigIT extends ESIntegTestCase {
- private final Path root = PathUtils.get(".").toAbsolutePath().getRoot();
-
- @Override
- protected Settings nodeSettings(int nodeOrdinal) {
- String cwdToRoot = getRelativePath(PathUtils.get(".").toAbsolutePath());
- Path pluginDir = PathUtils.get(cwdToRoot, relativizeToRootIfNecessary(getDataPath("/org/elasticsearch/test_plugins")).toString());
-
- Path tempDir = createTempDir();
- boolean useRelativeInMiddleOfPath = randomBoolean();
- if (useRelativeInMiddleOfPath) {
- pluginDir = PathUtils.get(tempDir.toString(), getRelativePath(tempDir), pluginDir.toString());
- }
-
- return settingsBuilder()
- .put(super.nodeSettings(nodeOrdinal))
- .put("path.plugins", pluginDir)
- .put("force.http.enabled", true)
- .build();
- }
-
- public void testThatRelativePathsDontAffectPlugins() throws Exception {
- HttpResponse response = httpClient().method("GET").path("/_plugin/dummy/").execute();
- assertThat(response, hasStatus(OK));
- }
-
- private Path relativizeToRootIfNecessary(Path path) {
- if (WINDOWS) {
- return root.relativize(path);
- }
- return path;
- }
-
- private String getRelativePath(Path path) {
- StringBuilder sb = new StringBuilder();
- for (int i = 0; i < path.getNameCount(); i++) {
- sb.append("..");
- sb.append(path.getFileSystem().getSeparator());
- }
-
- return sb.toString();
- }
-
- @Override
- public HttpRequestBuilder httpClient() {
- CloseableHttpClient httpClient = HttpClients.createDefault();
- return new HttpRequestBuilder(httpClient).httpTransport(internalCluster().getDataNodeInstance(HttpServerTransport.class));
- }
-}
diff --git a/core/src/test/java/org/elasticsearch/plugins/responseheader/TestResponseHeaderRestAction.java b/core/src/test/java/org/elasticsearch/plugins/responseheader/TestResponseHeaderRestAction.java
index 4b1645a4ec..39432bd01e 100644
--- a/core/src/test/java/org/elasticsearch/plugins/responseheader/TestResponseHeaderRestAction.java
+++ b/core/src/test/java/org/elasticsearch/plugins/responseheader/TestResponseHeaderRestAction.java
@@ -33,7 +33,7 @@ public class TestResponseHeaderRestAction extends BaseRestHandler {
@Inject
public TestResponseHeaderRestAction(Settings settings, RestController controller, Client client) {
- super(settings, controller, client);
+ super(settings, client);
controller.registerHandler(RestRequest.Method.GET, "/_protected", this);
}
diff --git a/core/src/test/java/org/elasticsearch/recovery/FullRollingRestartIT.java b/core/src/test/java/org/elasticsearch/recovery/FullRollingRestartIT.java
index 3b61edf292..61dca3f37a 100644
--- a/core/src/test/java/org/elasticsearch/recovery/FullRollingRestartIT.java
+++ b/core/src/test/java/org/elasticsearch/recovery/FullRollingRestartIT.java
@@ -57,7 +57,7 @@ public class FullRollingRestartIT extends ESIntegTestCase {
}
public void testFullRollingRestart() throws Exception {
- Settings settings = Settings.builder().put(ZenDiscovery.SETTING_JOIN_TIMEOUT, "30s").build();
+ Settings settings = Settings.builder().put(ZenDiscovery.JOIN_TIMEOUT_SETTING.getKey(), "30s").build();
internalCluster().startNode(settings);
createIndex("test");
@@ -91,7 +91,7 @@ public class FullRollingRestartIT extends ESIntegTestCase {
logger.info("--> refreshing and checking data");
refresh();
for (int i = 0; i < 10; i++) {
- assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()).get(), 2000l);
+ assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()).get(), 2000L);
}
// now start shutting nodes down
@@ -109,7 +109,7 @@ public class FullRollingRestartIT extends ESIntegTestCase {
logger.info("--> stopped two nodes, verifying data");
refresh();
for (int i = 0; i < 10; i++) {
- assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()).get(), 2000l);
+ assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()).get(), 2000L);
}
// closing the 3rd node
@@ -127,7 +127,7 @@ public class FullRollingRestartIT extends ESIntegTestCase {
logger.info("--> one node left, verifying data");
refresh();
for (int i = 0; i < 10; i++) {
- assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()).get(), 2000l);
+ assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()).get(), 2000L);
}
}
diff --git a/core/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java b/core/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java
index e9349a97d7..ed07b06a2e 100644
--- a/core/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java
+++ b/core/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java
@@ -36,6 +36,7 @@ import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.BackgroundIndexer;
import org.elasticsearch.test.ESIntegTestCase;
+import org.elasticsearch.test.junit.annotations.TestLogging;
import java.util.Arrays;
import java.util.concurrent.TimeUnit;
@@ -49,6 +50,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllS
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoTimeout;
+@TestLogging("_root:DEBUG")
public class RecoveryWhileUnderLoadIT extends ESIntegTestCase {
private final ESLogger logger = Loggers.getLogger(RecoveryWhileUnderLoadIT.class);
diff --git a/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java b/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java
index 6542a8ab1c..fac65cc8dc 100644
--- a/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java
+++ b/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java
@@ -85,7 +85,7 @@ import static org.hamcrest.Matchers.startsWith;
/**
*/
@ClusterScope(scope = Scope.TEST, numDataNodes = 0)
-@TestLogging("indices.recovery:TRACE,index.shard.service:TRACE")
+@TestLogging("_root:DEBUG,indices.recovery:TRACE,index.shard.service:TRACE")
public class RelocationIT extends ESIntegTestCase {
private final TimeValue ACCEPTABLE_RELOCATION_TIME = new TimeValue(5, TimeUnit.MINUTES);
@@ -121,7 +121,7 @@ public class RelocationIT extends ESIntegTestCase {
logger.info("--> verifying count");
client().admin().indices().prepareRefresh().execute().actionGet();
- assertThat(client().prepareSearch("test").setSize(0).execute().actionGet().getHits().totalHits(), equalTo(20l));
+ assertThat(client().prepareSearch("test").setSize(0).execute().actionGet().getHits().totalHits(), equalTo(20L));
logger.info("--> start another node");
final String node_2 = internalCluster().startNode();
@@ -130,7 +130,7 @@ public class RelocationIT extends ESIntegTestCase {
logger.info("--> relocate the shard from node1 to node2");
client().admin().cluster().prepareReroute()
- .add(new MoveAllocationCommand(new ShardId("test", 0), node_1, node_2))
+ .add(new MoveAllocationCommand("test", 0, node_1, node_2))
.execute().actionGet();
clusterHealthResponse = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForRelocatingShards(0).setTimeout(ACCEPTABLE_RELOCATION_TIME).execute().actionGet();
@@ -140,7 +140,7 @@ public class RelocationIT extends ESIntegTestCase {
logger.info("--> verifying count again...");
client().admin().indices().prepareRefresh().execute().actionGet();
- assertThat(client().prepareSearch("test").setSize(0).execute().actionGet().getHits().totalHits(), equalTo(20l));
+ assertThat(client().prepareSearch("test").setSize(0).execute().actionGet().getHits().totalHits(), equalTo(20L));
}
public void testRelocationWhileIndexingRandom() throws Exception {
@@ -190,7 +190,7 @@ public class RelocationIT extends ESIntegTestCase {
indexer.continueIndexing(numDocs);
logger.info("--> START relocate the shard from {} to {}", nodes[fromNode], nodes[toNode]);
client().admin().cluster().prepareReroute()
- .add(new MoveAllocationCommand(new ShardId("test", 0), nodes[fromNode], nodes[toNode]))
+ .add(new MoveAllocationCommand("test", 0, nodes[fromNode], nodes[toNode]))
.get();
if (rarely()) {
logger.debug("--> flushing");
@@ -319,7 +319,7 @@ public class RelocationIT extends ESIntegTestCase {
client().admin().cluster().prepareReroute()
- .add(new MoveAllocationCommand(new ShardId("test", 0), nodes[fromNode], nodes[toNode]))
+ .add(new MoveAllocationCommand("test", 0, nodes[fromNode], nodes[toNode]))
.get();
@@ -406,7 +406,7 @@ public class RelocationIT extends ESIntegTestCase {
logger.info("--> verifying no temporary recoveries are left");
for (String node : internalCluster().getNodeNames()) {
NodeEnvironment nodeEnvironment = internalCluster().getInstance(NodeEnvironment.class, node);
- for (final Path shardLoc : nodeEnvironment.availableShardPaths(new ShardId(indexName, 0))) {
+ for (final Path shardLoc : nodeEnvironment.availableShardPaths(new ShardId(indexName, "_na_", 0))) {
if (Files.exists(shardLoc)) {
assertBusy(new Runnable() {
@Override
diff --git a/core/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java b/core/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java
index 0a40da3403..78da0e1e1e 100644
--- a/core/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java
+++ b/core/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java
@@ -24,6 +24,7 @@ import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.search.SearchPhaseExecutionException;
import org.elasticsearch.action.search.ShardSearchFailure;
import org.elasticsearch.common.ParsingException;
+import org.elasticsearch.index.Index;
import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.rest.FakeRestRequest;
@@ -134,9 +135,9 @@ public class BytesRestResponseTests extends ESTestCase {
RestRequest request = new FakeRestRequest();
RestChannel channel = new DetailedExceptionRestChannel(request);
ShardSearchFailure failure = new ShardSearchFailure(new ParsingException(1, 2, "foobar", null),
- new SearchShardTarget("node_1", "foo", 1));
+ new SearchShardTarget("node_1", new Index("foo", "_na_"), 1));
ShardSearchFailure failure1 = new ShardSearchFailure(new ParsingException(1, 2, "foobar", null),
- new SearchShardTarget("node_1", "foo", 2));
+ new SearchShardTarget("node_1", new Index("foo", "_na_"), 2));
SearchPhaseExecutionException ex = new SearchPhaseExecutionException("search", "all shards failed", new ShardSearchFailure[] {failure, failure1});
BytesRestResponse response = new BytesRestResponse(channel, new RemoteTransportException("foo", ex));
String text = response.content().toUtf8();
diff --git a/core/src/test/java/org/elasticsearch/rest/CorsRegexDefaultIT.java b/core/src/test/java/org/elasticsearch/rest/CorsRegexDefaultIT.java
index 2b7533cae1..f2ce16ac85 100644
--- a/core/src/test/java/org/elasticsearch/rest/CorsRegexDefaultIT.java
+++ b/core/src/test/java/org/elasticsearch/rest/CorsRegexDefaultIT.java
@@ -18,6 +18,7 @@
*/
package org.elasticsearch.rest;
+import org.elasticsearch.common.network.NetworkModule;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.node.Node;
import org.elasticsearch.test.ESIntegTestCase;
@@ -35,7 +36,7 @@ public class CorsRegexDefaultIT extends ESIntegTestCase {
@Override
protected Settings nodeSettings(int nodeOrdinal) {
return Settings.builder()
- .put(Node.HTTP_ENABLED, true)
+ .put(NetworkModule.HTTP_ENABLED.getKey(), true)
.put(super.nodeSettings(nodeOrdinal)).build();
}
diff --git a/core/src/test/java/org/elasticsearch/rest/CorsRegexIT.java b/core/src/test/java/org/elasticsearch/rest/CorsRegexIT.java
index 3828ae0ad7..9740032ed7 100644
--- a/core/src/test/java/org/elasticsearch/rest/CorsRegexIT.java
+++ b/core/src/test/java/org/elasticsearch/rest/CorsRegexIT.java
@@ -20,16 +20,16 @@ package org.elasticsearch.rest;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
+import org.elasticsearch.common.network.NetworkModule;
import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.node.Node;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
import org.elasticsearch.test.ESIntegTestCase.Scope;
import org.elasticsearch.test.rest.client.http.HttpResponse;
-import static org.elasticsearch.http.netty.NettyHttpServerTransport.SETTING_CORS_ALLOW_CREDENTIALS;
-import static org.elasticsearch.http.netty.NettyHttpServerTransport.SETTING_CORS_ALLOW_ORIGIN;
-import static org.elasticsearch.http.netty.NettyHttpServerTransport.SETTING_CORS_ENABLED;
+import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ALLOW_CREDENTIALS;
+import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ALLOW_ORIGIN;
+import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ENABLED;
import static org.hamcrest.Matchers.hasKey;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.not;
@@ -46,10 +46,10 @@ public class CorsRegexIT extends ESIntegTestCase {
protected Settings nodeSettings(int nodeOrdinal) {
return Settings.settingsBuilder()
.put(super.nodeSettings(nodeOrdinal))
- .put(SETTING_CORS_ALLOW_ORIGIN, "/https?:\\/\\/localhost(:[0-9]+)?/")
- .put(SETTING_CORS_ALLOW_CREDENTIALS, true)
- .put(SETTING_CORS_ENABLED, true)
- .put(Node.HTTP_ENABLED, true)
+ .put(SETTING_CORS_ALLOW_ORIGIN.getKey(), "/https?:\\/\\/localhost(:[0-9]+)?/")
+ .put(SETTING_CORS_ALLOW_CREDENTIALS.getKey(), true)
+ .put(SETTING_CORS_ENABLED.getKey(), true)
+ .put(NetworkModule.HTTP_ENABLED.getKey(), true)
.build();
}
diff --git a/core/src/test/java/org/elasticsearch/rest/HeadersAndContextCopyClientTests.java b/core/src/test/java/org/elasticsearch/rest/HeadersAndContextCopyClientTests.java
deleted file mode 100644
index 2bcb491bab..0000000000
--- a/core/src/test/java/org/elasticsearch/rest/HeadersAndContextCopyClientTests.java
+++ /dev/null
@@ -1,426 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.rest;
-
-import org.elasticsearch.action.ActionRequest;
-import org.elasticsearch.action.ActionRequestBuilder;
-import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
-import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest;
-import org.elasticsearch.action.admin.cluster.stats.ClusterStatsRequest;
-import org.elasticsearch.action.admin.indices.close.CloseIndexRequest;
-import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
-import org.elasticsearch.action.admin.indices.flush.FlushRequest;
-import org.elasticsearch.action.get.GetRequest;
-import org.elasticsearch.action.index.IndexRequest;
-import org.elasticsearch.action.search.SearchRequest;
-import org.elasticsearch.client.Client;
-import org.elasticsearch.client.Requests;
-import org.elasticsearch.common.collect.ImmutableOpenMap;
-import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.test.ESTestCase;
-import org.elasticsearch.test.client.NoOpClient;
-import org.elasticsearch.test.rest.FakeRestRequest;
-
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.TimeUnit;
-
-import static org.hamcrest.CoreMatchers.equalTo;
-import static org.hamcrest.CoreMatchers.notNullValue;
-import static org.hamcrest.Matchers.is;
-
-public class HeadersAndContextCopyClientTests extends ESTestCase {
-
- public void testRegisterRelevantHeaders() throws InterruptedException {
-
- final RestController restController = new RestController(Settings.EMPTY);
-
- int iterations = randomIntBetween(1, 5);
-
- Set<String> headers = new HashSet<>();
- ExecutorService executorService = Executors.newFixedThreadPool(iterations);
- for (int i = 0; i < iterations; i++) {
- int headersCount = randomInt(10);
- final Set<String> newHeaders = new HashSet<>();
- for (int j = 0; j < headersCount; j++) {
- String usefulHeader = randomRealisticUnicodeOfLengthBetween(1, 30);
- newHeaders.add(usefulHeader);
- }
- headers.addAll(newHeaders);
-
- executorService.submit(new Runnable() {
- @Override
- public void run() {
- restController.registerRelevantHeaders(newHeaders.toArray(new String[newHeaders.size()]));
- }
- });
- }
-
- executorService.shutdown();
- assertThat(executorService.awaitTermination(1, TimeUnit.SECONDS), equalTo(true));
- String[] relevantHeaders = restController.relevantHeaders().toArray(new String[restController.relevantHeaders().size()]);
- assertThat(relevantHeaders.length, equalTo(headers.size()));
-
- Arrays.sort(relevantHeaders);
- String[] headersArray = new String[headers.size()];
- headersArray = headers.toArray(headersArray);
- Arrays.sort(headersArray);
- assertThat(relevantHeaders, equalTo(headersArray));
- }
-
- public void testCopyHeadersRequest() {
- Map<String, String> transportHeaders = randomHeaders(randomIntBetween(0, 10));
- Map<String, String> restHeaders = randomHeaders(randomIntBetween(0, 10));
- Map<String, String> copiedHeaders = randomHeadersFrom(restHeaders);
- Set<String> usefulRestHeaders = new HashSet<>(copiedHeaders.keySet());
- usefulRestHeaders.addAll(randomMap(randomIntBetween(0, 10), "useful-").keySet());
- Map<String, String> restContext = randomContext(randomIntBetween(0, 10));
- Map<String, String> transportContext = onlyOnLeft(randomContext(randomIntBetween(0, 10)), restContext);
-
- Map<String, String> expectedHeaders = new HashMap<>();
- expectedHeaders.putAll(transportHeaders);
- expectedHeaders.putAll(copiedHeaders);
-
- Map<String, String> expectedContext = new HashMap<>();
- expectedContext.putAll(transportContext);
- expectedContext.putAll(restContext);
-
- try (Client client = client(new NoOpClient(getTestName()), new FakeRestRequest(restHeaders, restContext), usefulRestHeaders)) {
-
- SearchRequest searchRequest = Requests.searchRequest();
- putHeaders(searchRequest, transportHeaders);
- putContext(searchRequest, transportContext);
- assertHeaders(searchRequest, transportHeaders);
- client.search(searchRequest);
- assertHeaders(searchRequest, expectedHeaders);
- assertContext(searchRequest, expectedContext);
-
- GetRequest getRequest = Requests.getRequest("index");
- putHeaders(getRequest, transportHeaders);
- putContext(getRequest, transportContext);
- assertHeaders(getRequest, transportHeaders);
- client.get(getRequest);
- assertHeaders(getRequest, expectedHeaders);
- assertContext(getRequest, expectedContext);
-
- IndexRequest indexRequest = Requests.indexRequest();
- putHeaders(indexRequest, transportHeaders);
- putContext(indexRequest, transportContext);
- assertHeaders(indexRequest, transportHeaders);
- client.index(indexRequest);
- assertHeaders(indexRequest, expectedHeaders);
- assertContext(indexRequest, expectedContext);
- }
- }
-
- public void testCopyHeadersClusterAdminRequest() {
- Map<String, String> transportHeaders = randomHeaders(randomIntBetween(0, 10));
- Map<String, String> restHeaders = randomHeaders(randomIntBetween(0, 10));
- Map<String, String> copiedHeaders = randomHeadersFrom(restHeaders);
- Set<String> usefulRestHeaders = new HashSet<>(copiedHeaders.keySet());
- usefulRestHeaders.addAll(randomMap(randomIntBetween(0, 10), "useful-").keySet());
- Map<String, String> restContext = randomContext(randomIntBetween(0, 10));
- Map<String, String> transportContext = onlyOnLeft(randomContext(randomIntBetween(0, 10)), restContext);
-
- HashMap<String, String> expectedHeaders = new HashMap<>();
- expectedHeaders.putAll(transportHeaders);
- expectedHeaders.putAll(copiedHeaders);
-
- Map<String, String> expectedContext = new HashMap<>();
- expectedContext.putAll(transportContext);
- expectedContext.putAll(restContext);
-
- try (Client client = client(new NoOpClient(getTestName()), new FakeRestRequest(restHeaders, expectedContext), usefulRestHeaders)) {
-
- ClusterHealthRequest clusterHealthRequest = Requests.clusterHealthRequest();
- putHeaders(clusterHealthRequest, transportHeaders);
- putContext(clusterHealthRequest, transportContext);
- assertHeaders(clusterHealthRequest, transportHeaders);
- client.admin().cluster().health(clusterHealthRequest);
- assertHeaders(clusterHealthRequest, expectedHeaders);
- assertContext(clusterHealthRequest, expectedContext);
-
- ClusterStateRequest clusterStateRequest = Requests.clusterStateRequest();
- putHeaders(clusterStateRequest, transportHeaders);
- putContext(clusterStateRequest, transportContext);
- assertHeaders(clusterStateRequest, transportHeaders);
- client.admin().cluster().state(clusterStateRequest);
- assertHeaders(clusterStateRequest, expectedHeaders);
- assertContext(clusterStateRequest, expectedContext);
-
- ClusterStatsRequest clusterStatsRequest = Requests.clusterStatsRequest();
- putHeaders(clusterStatsRequest, transportHeaders);
- putContext(clusterStatsRequest, transportContext);
- assertHeaders(clusterStatsRequest, transportHeaders);
- client.admin().cluster().clusterStats(clusterStatsRequest);
- assertHeaders(clusterStatsRequest, expectedHeaders);
- assertContext(clusterStatsRequest, expectedContext);
- }
- }
-
- public void testCopyHeadersIndicesAdminRequest() {
- Map<String, String> transportHeaders = randomHeaders(randomIntBetween(0, 10));
- Map<String, String> restHeaders = randomHeaders(randomIntBetween(0, 10));
- Map<String, String> copiedHeaders = randomHeadersFrom(restHeaders);
- Set<String> usefulRestHeaders = new HashSet<>(copiedHeaders.keySet());
- usefulRestHeaders.addAll(randomMap(randomIntBetween(0, 10), "useful-").keySet());
- Map<String, String> restContext = randomContext(randomIntBetween(0, 10));
- Map<String, String> transportContext = onlyOnLeft(randomContext(randomIntBetween(0, 10)), restContext);
-
- HashMap<String, String> expectedHeaders = new HashMap<>();
- expectedHeaders.putAll(transportHeaders);
- expectedHeaders.putAll(copiedHeaders);
-
- Map<String, String> expectedContext = new HashMap<>();
- expectedContext.putAll(transportContext);
- expectedContext.putAll(restContext);
-
- try (Client client = client(new NoOpClient(getTestName()), new FakeRestRequest(restHeaders, restContext), usefulRestHeaders)) {
-
- CreateIndexRequest createIndexRequest = Requests.createIndexRequest("test");
- putHeaders(createIndexRequest, transportHeaders);
- putContext(createIndexRequest, transportContext);
- assertHeaders(createIndexRequest, transportHeaders);
- client.admin().indices().create(createIndexRequest);
- assertHeaders(createIndexRequest, expectedHeaders);
- assertContext(createIndexRequest, expectedContext);
-
- CloseIndexRequest closeIndexRequest = Requests.closeIndexRequest("test");
- putHeaders(closeIndexRequest, transportHeaders);
- putContext(closeIndexRequest, transportContext);
- assertHeaders(closeIndexRequest, transportHeaders);
- client.admin().indices().close(closeIndexRequest);
- assertHeaders(closeIndexRequest, expectedHeaders);
- assertContext(closeIndexRequest, expectedContext);
-
- FlushRequest flushRequest = Requests.flushRequest();
- putHeaders(flushRequest, transportHeaders);
- putContext(flushRequest, transportContext);
- assertHeaders(flushRequest, transportHeaders);
- client.admin().indices().flush(flushRequest);
- assertHeaders(flushRequest, expectedHeaders);
- assertContext(flushRequest, expectedContext);
- }
- }
-
- public void testCopyHeadersRequestBuilder() {
- Map<String, String> transportHeaders = randomHeaders(randomIntBetween(0, 10));
- Map<String, String> restHeaders = randomHeaders(randomIntBetween(0, 10));
- Map<String, String> copiedHeaders = randomHeadersFrom(restHeaders);
- Set<String> usefulRestHeaders = new HashSet<>(copiedHeaders.keySet());
- usefulRestHeaders.addAll(randomMap(randomIntBetween(0, 10), "useful-").keySet());
- Map<String, String> restContext = randomContext(randomIntBetween(0, 10));
- Map<String, String> transportContext = onlyOnLeft(randomContext(randomIntBetween(0, 10)), restContext);
-
- HashMap<String, String> expectedHeaders = new HashMap<>();
- expectedHeaders.putAll(transportHeaders);
- expectedHeaders.putAll(copiedHeaders);
-
- Map<String, String> expectedContext = new HashMap<>();
- expectedContext.putAll(transportContext);
- expectedContext.putAll(restContext);
-
- try (Client client = client(new NoOpClient(getTestName()), new FakeRestRequest(restHeaders, restContext), usefulRestHeaders)) {
-
- ActionRequestBuilder requestBuilders[] = new ActionRequestBuilder[]{
- client.prepareIndex("index", "type"),
- client.prepareGet("index", "type", "id"),
- client.prepareBulk(),
- client.prepareDelete(),
- client.prepareIndex(),
- client.prepareClearScroll(),
- client.prepareMultiGet(),
- };
-
- for (ActionRequestBuilder requestBuilder : requestBuilders) {
- putHeaders(requestBuilder.request(), transportHeaders);
- putContext(requestBuilder.request(), transportContext);
- assertHeaders(requestBuilder.request(), transportHeaders);
- requestBuilder.get();
- assertHeaders(requestBuilder.request(), expectedHeaders);
- assertContext(requestBuilder.request(), expectedContext);
- }
- }
- }
-
- public void testCopyHeadersClusterAdminRequestBuilder() {
- Map<String, String> transportHeaders = randomHeaders(randomIntBetween(0, 10));
- Map<String, String> restHeaders = randomHeaders(randomIntBetween(0, 10));
- Map<String, String> copiedHeaders = randomHeadersFrom(restHeaders);
- Set<String> usefulRestHeaders = new HashSet<>(copiedHeaders.keySet());
- usefulRestHeaders.addAll(randomMap(randomIntBetween(0, 10), "useful-").keySet());
- Map<String, String> restContext = randomContext(randomIntBetween(0, 10));
- Map<String, String> transportContext = onlyOnLeft(randomContext(randomIntBetween(0, 10)), restContext);
-
- HashMap<String, String> expectedHeaders = new HashMap<>();
- expectedHeaders.putAll(transportHeaders);
- expectedHeaders.putAll(copiedHeaders);
-
- Map<String, String> expectedContext = new HashMap<>();
- expectedContext.putAll(transportContext);
- expectedContext.putAll(restContext);
-
- try (Client client = client(new NoOpClient(getTestName()), new FakeRestRequest(restHeaders, restContext), usefulRestHeaders)) {
-
- ActionRequestBuilder requestBuilders[] = new ActionRequestBuilder[]{
- client.admin().cluster().prepareNodesInfo(),
- client.admin().cluster().prepareClusterStats(),
- client.admin().cluster().prepareState(),
- client.admin().cluster().prepareCreateSnapshot("repo", "name"),
- client.admin().cluster().prepareHealth(),
- client.admin().cluster().prepareReroute()
- };
-
- for (ActionRequestBuilder requestBuilder : requestBuilders) {
- putHeaders(requestBuilder.request(), transportHeaders);
- putContext(requestBuilder.request(), transportContext);
- assertHeaders(requestBuilder.request(), transportHeaders);
- requestBuilder.get();
- assertHeaders(requestBuilder.request(), expectedHeaders);
- assertContext(requestBuilder.request(), expectedContext);
- }
- }
- }
-
- public void testCopyHeadersIndicesAdminRequestBuilder() {
- Map<String, String> transportHeaders = randomHeaders(randomIntBetween(0, 10));
- Map<String, String> restHeaders = randomHeaders(randomIntBetween(0, 10));
- Map<String, String> copiedHeaders = randomHeadersFrom(restHeaders);
- Set<String> usefulRestHeaders = new HashSet<>(copiedHeaders.keySet());
- usefulRestHeaders.addAll(randomMap(randomIntBetween(0, 10), "useful-").keySet());
- Map<String, String> restContext = randomContext(randomIntBetween(0, 10));
- Map<String, String> transportContext = onlyOnLeft(randomContext(randomIntBetween(0, 10)), restContext);
-
- HashMap<String, String> expectedHeaders = new HashMap<>();
- expectedHeaders.putAll(transportHeaders);
- expectedHeaders.putAll(copiedHeaders);
-
- Map<String, String> expectedContext = new HashMap<>();
- expectedContext.putAll(transportContext);
- expectedContext.putAll(restContext);
-
- try (Client client = client(new NoOpClient(getTestName()), new FakeRestRequest(restHeaders, restContext), usefulRestHeaders)) {
-
- ActionRequestBuilder requestBuilders[] = new ActionRequestBuilder[]{
- client.admin().indices().prepareValidateQuery(),
- client.admin().indices().prepareCreate("test"),
- client.admin().indices().prepareAliases(),
- client.admin().indices().prepareAnalyze("text"),
- client.admin().indices().prepareTypesExists("type"),
- client.admin().indices().prepareClose()
- };
-
- for (ActionRequestBuilder requestBuilder : requestBuilders) {
- putHeaders(requestBuilder.request(), transportHeaders);
- putContext(requestBuilder.request(), transportContext);
- assertHeaders(requestBuilder.request(), transportHeaders);
- requestBuilder.get();
- assertHeaders(requestBuilder.request(), expectedHeaders);
- assertContext(requestBuilder.request(), expectedContext);
- }
- }
- }
-
- private static Map<String, String> randomHeaders(int count) {
- return randomMap(count, "header-");
- }
-
- private static Map<String, String> randomContext(int count) {
- return randomMap(count, "context-");
- }
-
- private static Map<String, String> randomMap(int count, String prefix) {
- Map<String, String> headers = new HashMap<>();
- for (int i = 0; i < count; i++) {
- headers.put(prefix + randomInt(30), randomAsciiOfLength(10));
- }
- return headers;
- }
-
- private static Map<String, String> randomHeadersFrom(Map<String, String> headers) {
- Map<String, String> newHeaders = new HashMap<>();
- if (headers.isEmpty()) {
- return newHeaders;
- }
- int i = randomInt(headers.size() - 1);
- for (Map.Entry<String, String> entry : headers.entrySet()) {
- if (randomInt(i) == 0) {
- newHeaders.put(entry.getKey(), entry.getValue());
- }
- }
- return newHeaders;
- }
-
- private static Client client(Client noOpClient, RestRequest restRequest, Set<String> usefulRestHeaders) {
- return new BaseRestHandler.HeadersAndContextCopyClient(noOpClient, restRequest, usefulRestHeaders);
- }
-
- private static void putHeaders(ActionRequest<?> request, Map<String, String> headers) {
- for (Map.Entry<String, String> header : headers.entrySet()) {
- request.putHeader(header.getKey(), header.getValue());
- }
- }
-
- private static void putContext(ActionRequest<?> request, Map<String, String> context) {
- for (Map.Entry<String, String> header : context.entrySet()) {
- request.putInContext(header.getKey(), header.getValue());
- }
- }
-
- private static void assertHeaders(ActionRequest<?> request, Map<String, String> headers) {
- if (headers.size() == 0) {
- assertThat(request.getHeaders() == null || request.getHeaders().size() == 0, equalTo(true));
- } else {
- assertThat(request.getHeaders(), notNullValue());
- assertThat(request.getHeaders().size(), equalTo(headers.size()));
- for (String key : request.getHeaders()) {
- assertThat(headers.get(key), equalTo(request.getHeader(key)));
- }
- }
- }
-
- private static void assertContext(ActionRequest<?> request, Map<String, String> context) {
- if (context.size() == 0) {
- assertThat(request.isContextEmpty(), is(true));
- } else {
- ImmutableOpenMap map = request.getContext();
- assertThat(map, notNullValue());
- assertThat(map.size(), equalTo(context.size()));
- for (Object key : map.keys()) {
- assertThat(context.get(key), equalTo(request.getFromContext(key)));
- }
- }
- }
-
- private static Map<String, String> onlyOnLeft(Map<String, String> left, Map<String, String> right) {
- Map<String, String> map = new HashMap<>();
- for (Map.Entry<String, String> entry : left.entrySet()) {
- if (!right.containsKey(entry.getKey())) {
- map.put(entry.getKey(), entry.getValue());
- }
- }
- return map;
- }
-}
diff --git a/core/src/test/java/org/elasticsearch/rest/RestControllerTests.java b/core/src/test/java/org/elasticsearch/rest/RestControllerTests.java
new file mode 100644
index 0000000000..d6e1a97ac8
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/rest/RestControllerTests.java
@@ -0,0 +1,99 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.rest;
+
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.util.concurrent.ThreadContext;
+import org.elasticsearch.test.ESTestCase;
+import org.elasticsearch.test.rest.FakeRestRequest;
+
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
+
+import static org.hamcrest.CoreMatchers.equalTo;
+
+public class RestControllerTests extends ESTestCase {
+
+ public void testRegisterRelevantHeaders() throws InterruptedException {
+
+ final RestController restController = new RestController(Settings.EMPTY);
+
+ int iterations = randomIntBetween(1, 5);
+
+ Set<String> headers = new HashSet<>();
+ ExecutorService executorService = Executors.newFixedThreadPool(iterations);
+ for (int i = 0; i < iterations; i++) {
+ int headersCount = randomInt(10);
+ final Set<String> newHeaders = new HashSet<>();
+ for (int j = 0; j < headersCount; j++) {
+ String usefulHeader = randomRealisticUnicodeOfLengthBetween(1, 30);
+ newHeaders.add(usefulHeader);
+ }
+ headers.addAll(newHeaders);
+
+ executorService.submit((Runnable) () -> restController.registerRelevantHeaders(newHeaders.toArray(new String[newHeaders.size()])));
+ }
+
+ executorService.shutdown();
+ assertThat(executorService.awaitTermination(1, TimeUnit.SECONDS), equalTo(true));
+ String[] relevantHeaders = restController.relevantHeaders().toArray(new String[restController.relevantHeaders().size()]);
+ assertThat(relevantHeaders.length, equalTo(headers.size()));
+
+ Arrays.sort(relevantHeaders);
+ String[] headersArray = new String[headers.size()];
+ headersArray = headers.toArray(headersArray);
+ Arrays.sort(headersArray);
+ assertThat(relevantHeaders, equalTo(headersArray));
+ }
+
+ public void testApplyRelevantHeaders() {
+ final ThreadContext threadContext = new ThreadContext(Settings.EMPTY);
+ final RestController restController = new RestController(Settings.EMPTY) {
+ @Override
+ boolean checkRequestParameters(RestRequest request, RestChannel channel) {
+ return true;
+ }
+
+ @Override
+ void executeHandler(RestRequest request, RestChannel channel) throws Exception {
+ assertEquals("true", threadContext.getHeader("header.1"));
+ assertEquals("true", threadContext.getHeader("header.2"));
+ assertNull(threadContext.getHeader("header.3"));
+
+ }
+ };
+ threadContext.putHeader("header.3", "true");
+ restController.registerRelevantHeaders("header.1", "header.2");
+ Map<String, String> restHeaders = new HashMap<>();
+ restHeaders.put("header.1", "true");
+ restHeaders.put("header.2", "true");
+ restHeaders.put("header.3", "false");
+ restController.dispatchRequest(new FakeRestRequest(restHeaders), null, threadContext);
+ assertNull(threadContext.getHeader("header.1"));
+ assertNull(threadContext.getHeader("header.2"));
+ assertEquals("true", threadContext.getHeader("header.3"));
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/rest/RestFilterChainTests.java b/core/src/test/java/org/elasticsearch/rest/RestFilterChainTests.java
index b66d00cd6a..56ae8e2183 100644
--- a/core/src/test/java/org/elasticsearch/rest/RestFilterChainTests.java
+++ b/core/src/test/java/org/elasticsearch/rest/RestFilterChainTests.java
@@ -23,6 +23,7 @@ import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.rest.FakeRestRequest;
@@ -84,7 +85,7 @@ public class RestFilterChainTests extends ESTestCase {
FakeRestRequest fakeRestRequest = new FakeRestRequest();
FakeRestChannel fakeRestChannel = new FakeRestChannel(fakeRestRequest, 1);
- restController.dispatchRequest(fakeRestRequest, fakeRestChannel);
+ restController.dispatchRequest(fakeRestRequest, fakeRestChannel, new ThreadContext(Settings.EMPTY));
assertThat(fakeRestChannel.await(), equalTo(true));
@@ -142,7 +143,7 @@ public class RestFilterChainTests extends ESTestCase {
FakeRestRequest fakeRestRequest = new FakeRestRequest();
FakeRestChannel fakeRestChannel = new FakeRestChannel(fakeRestRequest, additionalContinueCount + 1);
- restController.dispatchRequest(fakeRestRequest, fakeRestChannel);
+ restController.dispatchRequest(fakeRestRequest, fakeRestChannel, new ThreadContext(Settings.EMPTY));
fakeRestChannel.await();
assertThat(testFilter.runs.get(), equalTo(1));
diff --git a/core/src/test/java/org/elasticsearch/rest/RestRequestTests.java b/core/src/test/java/org/elasticsearch/rest/RestRequestTests.java
deleted file mode 100644
index 8e60b28f37..0000000000
--- a/core/src/test/java/org/elasticsearch/rest/RestRequestTests.java
+++ /dev/null
@@ -1,107 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.rest;
-
-import org.elasticsearch.common.bytes.BytesReference;
-import org.elasticsearch.common.collect.ImmutableOpenMap;
-import org.elasticsearch.test.ESTestCase;
-
-import java.util.Map;
-
-import static org.hamcrest.Matchers.equalTo;
-import static org.hamcrest.Matchers.is;
-
-/**
- *
- */
-public class RestRequestTests extends ESTestCase {
- public void testContext() throws Exception {
- int count = randomInt(10);
- Request request = new Request();
- for (int i = 0; i < count; i++) {
- request.putInContext("key" + i, "val" + i);
- }
- assertThat(request.isContextEmpty(), is(count == 0));
- assertThat(request.contextSize(), is(count));
- ImmutableOpenMap<Object, Object> ctx = request.getContext();
- for (int i = 0; i < count; i++) {
- assertThat(request.hasInContext("key" + i), is(true));
- assertThat((String) request.getFromContext("key" + i), equalTo("val" + i));
- assertThat((String) ctx.get("key" + i), equalTo("val" + i));
- }
- }
-
- public static class Request extends RestRequest {
- @Override
- public Method method() {
- return null;
- }
-
- @Override
- public String uri() {
- return null;
- }
-
- @Override
- public String rawPath() {
- return null;
- }
-
- @Override
- public boolean hasContent() {
- return false;
- }
-
- @Override
- public BytesReference content() {
- return null;
- }
-
- @Override
- public String header(String name) {
- return null;
- }
-
- @Override
- public Iterable<Map.Entry<String, String>> headers() {
- return null;
- }
-
- @Override
- public boolean hasParam(String key) {
- return false;
- }
-
- @Override
- public String param(String key) {
- return null;
- }
-
- @Override
- public Map<String, String> params() {
- return null;
- }
-
- @Override
- public String param(String key, String defaultValue) {
- return null;
- }
- }
-}
diff --git a/core/src/test/java/org/elasticsearch/routing/AliasResolveRoutingIT.java b/core/src/test/java/org/elasticsearch/routing/AliasResolveRoutingIT.java
index e73f7e510f..cb880fc4fe 100644
--- a/core/src/test/java/org/elasticsearch/routing/AliasResolveRoutingIT.java
+++ b/core/src/test/java/org/elasticsearch/routing/AliasResolveRoutingIT.java
@@ -54,7 +54,7 @@ public class AliasResolveRoutingIT extends ESIntegTestCase {
client().prepareIndex("test-0", "type1", "2").setSource("field1", "quick brown"),
client().prepareIndex("test-0", "type1", "3").setSource("field1", "quick"));
refresh("test-*");
- assertHitCount(client().prepareSearch().setIndices("alias-*").setIndicesOptions(IndicesOptions.lenientExpandOpen()).setQuery(matchQuery("_all", "quick")).get(), 3l);
+ assertHitCount(client().prepareSearch().setIndices("alias-*").setIndicesOptions(IndicesOptions.lenientExpandOpen()).setQuery(matchQuery("_all", "quick")).get(), 3L);
}
public void testResolveIndexRouting() throws Exception {
diff --git a/core/src/test/java/org/elasticsearch/routing/AliasRoutingIT.java b/core/src/test/java/org/elasticsearch/routing/AliasRoutingIT.java
index 9fc6bcfb73..a43b481b2e 100644
--- a/core/src/test/java/org/elasticsearch/routing/AliasRoutingIT.java
+++ b/core/src/test/java/org/elasticsearch/routing/AliasRoutingIT.java
@@ -122,23 +122,23 @@ public class AliasRoutingIT extends ESIntegTestCase {
logger.info("--> search with no routing, should fine one");
for (int i = 0; i < 5; i++) {
- assertThat(client().prepareSearch().setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1l));
+ assertThat(client().prepareSearch().setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1L));
}
logger.info("--> search with wrong routing, should not find");
for (int i = 0; i < 5; i++) {
- assertThat(client().prepareSearch().setRouting("1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(0l));
- assertThat(client().prepareSearch().setSize(0).setRouting("1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(0l));
- assertThat(client().prepareSearch("alias1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(0l));
- assertThat(client().prepareSearch("alias1").setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(0l));
+ assertThat(client().prepareSearch().setRouting("1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(0L));
+ assertThat(client().prepareSearch().setSize(0).setRouting("1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(0L));
+ assertThat(client().prepareSearch("alias1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(0L));
+ assertThat(client().prepareSearch("alias1").setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(0L));
}
logger.info("--> search with correct routing, should find");
for (int i = 0; i < 5; i++) {
- assertThat(client().prepareSearch().setRouting("0").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1l));
- assertThat(client().prepareSearch().setSize(0).setRouting("0").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1l));
- assertThat(client().prepareSearch("alias0").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1l));
- assertThat(client().prepareSearch("alias0").setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1l));
+ assertThat(client().prepareSearch().setRouting("0").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1L));
+ assertThat(client().prepareSearch().setSize(0).setRouting("0").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1L));
+ assertThat(client().prepareSearch("alias0").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1L));
+ assertThat(client().prepareSearch("alias0").setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1L));
}
logger.info("--> indexing with id [2], and routing [1] using alias");
@@ -146,50 +146,50 @@ public class AliasRoutingIT extends ESIntegTestCase {
logger.info("--> search with no routing, should fine two");
for (int i = 0; i < 5; i++) {
- assertThat(client().prepareSearch().setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2l));
- assertThat(client().prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2l));
+ assertThat(client().prepareSearch().setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2L));
+ assertThat(client().prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2L));
}
logger.info("--> search with 0 routing, should find one");
for (int i = 0; i < 5; i++) {
- assertThat(client().prepareSearch().setRouting("0").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1l));
- assertThat(client().prepareSearch().setSize(0).setRouting("0").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1l));
- assertThat(client().prepareSearch("alias0").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1l));
- assertThat(client().prepareSearch("alias0").setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1l));
+ assertThat(client().prepareSearch().setRouting("0").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1L));
+ assertThat(client().prepareSearch().setSize(0).setRouting("0").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1L));
+ assertThat(client().prepareSearch("alias0").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1L));
+ assertThat(client().prepareSearch("alias0").setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1L));
}
logger.info("--> search with 1 routing, should find one");
for (int i = 0; i < 5; i++) {
- assertThat(client().prepareSearch().setRouting("1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1l));
- assertThat(client().prepareSearch().setSize(0).setRouting("1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1l));
- assertThat(client().prepareSearch("alias1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1l));
- assertThat(client().prepareSearch("alias1").setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1l));
+ assertThat(client().prepareSearch().setRouting("1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1L));
+ assertThat(client().prepareSearch().setSize(0).setRouting("1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1L));
+ assertThat(client().prepareSearch("alias1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1L));
+ assertThat(client().prepareSearch("alias1").setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1L));
}
logger.info("--> search with 0,1 routings , should find two");
for (int i = 0; i < 5; i++) {
- assertThat(client().prepareSearch().setRouting("0", "1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2l));
- assertThat(client().prepareSearch().setSize(0).setRouting("0", "1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2l));
- assertThat(client().prepareSearch("alias01").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2l));
- assertThat(client().prepareSearch("alias01").setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2l));
+ assertThat(client().prepareSearch().setRouting("0", "1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2L));
+ assertThat(client().prepareSearch().setSize(0).setRouting("0", "1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2L));
+ assertThat(client().prepareSearch("alias01").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2L));
+ assertThat(client().prepareSearch("alias01").setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2L));
}
logger.info("--> search with two routing aliases , should find two");
for (int i = 0; i < 5; i++) {
- assertThat(client().prepareSearch("alias0", "alias1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2l));
- assertThat(client().prepareSearch("alias0", "alias1").setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2l));
+ assertThat(client().prepareSearch("alias0", "alias1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2L));
+ assertThat(client().prepareSearch("alias0", "alias1").setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2L));
}
logger.info("--> search with alias0, alias1 and alias01, should find two");
for (int i = 0; i < 5; i++) {
- assertThat(client().prepareSearch("alias0", "alias1", "alias01").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2l));
- assertThat(client().prepareSearch("alias0", "alias1", "alias01").setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2l));
+ assertThat(client().prepareSearch("alias0", "alias1", "alias01").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2L));
+ assertThat(client().prepareSearch("alias0", "alias1", "alias01").setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2L));
}
logger.info("--> search with test, alias0 and alias1, should find two");
for (int i = 0; i < 5; i++) {
- assertThat(client().prepareSearch("test", "alias0", "alias1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2l));
- assertThat(client().prepareSearch("test", "alias0", "alias1").setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2l));
+ assertThat(client().prepareSearch("test", "alias0", "alias1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2L));
+ assertThat(client().prepareSearch("test", "alias0", "alias1").setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2L));
}
}
@@ -231,20 +231,20 @@ public class AliasRoutingIT extends ESIntegTestCase {
logger.info("--> search with alias-a1,alias-b0, should not find");
for (int i = 0; i < 5; i++) {
- assertThat(client().prepareSearch("alias-a1", "alias-b0").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(0l));
- assertThat(client().prepareSearch("alias-a1", "alias-b0").setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(0l));
+ assertThat(client().prepareSearch("alias-a1", "alias-b0").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(0L));
+ assertThat(client().prepareSearch("alias-a1", "alias-b0").setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(0L));
}
logger.info("--> search with alias-ab, should find two");
for (int i = 0; i < 5; i++) {
- assertThat(client().prepareSearch("alias-ab").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2l));
- assertThat(client().prepareSearch("alias-ab").setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2l));
+ assertThat(client().prepareSearch("alias-ab").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2L));
+ assertThat(client().prepareSearch("alias-ab").setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2L));
}
logger.info("--> search with alias-a0,alias-b1 should find two");
for (int i = 0; i < 5; i++) {
- assertThat(client().prepareSearch("alias-a0", "alias-b1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2l));
- assertThat(client().prepareSearch("alias-a0", "alias-b1").setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2l));
+ assertThat(client().prepareSearch("alias-a0", "alias-b1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2L));
+ assertThat(client().prepareSearch("alias-a0", "alias-b1").setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2L));
}
}
@@ -268,7 +268,7 @@ public class AliasRoutingIT extends ESIntegTestCase {
logger.info("--> search all on index_* should find two");
for (int i = 0; i < 5; i++) {
- assertThat(client().prepareSearch("index_*").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2l));
+ assertThat(client().prepareSearch("index_*").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2L));
}
}
@@ -313,8 +313,8 @@ public class AliasRoutingIT extends ESIntegTestCase {
logger.info("--> verifying get and search with routing, should find");
for (int i = 0; i < 5; i++) {
assertThat(client().prepareGet("test", "type1", "0").setRouting("3").execute().actionGet().isExists(), equalTo(true));
- assertThat(client().prepareSearch("alias").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1l));
- assertThat(client().prepareSearch("alias").setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1l));
+ assertThat(client().prepareSearch("alias").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1L));
+ assertThat(client().prepareSearch("alias").setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1L));
}
logger.info("--> creating alias with routing [4]");
@@ -323,8 +323,8 @@ public class AliasRoutingIT extends ESIntegTestCase {
logger.info("--> verifying search with wrong routing should not find");
for (int i = 0; i < 5; i++) {
- assertThat(client().prepareSearch("alias").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(0l));
- assertThat(client().prepareSearch("alias").setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(0l));
+ assertThat(client().prepareSearch("alias").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(0L));
+ assertThat(client().prepareSearch("alias").setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(0L));
}
logger.info("--> creating alias with search routing [3,4] and index routing 4");
@@ -339,8 +339,8 @@ public class AliasRoutingIT extends ESIntegTestCase {
for (int i = 0; i < 5; i++) {
assertThat(client().prepareGet("test", "type1", "0").setRouting("3").execute().actionGet().isExists(), equalTo(true));
assertThat(client().prepareGet("test", "type1", "1").setRouting("4").execute().actionGet().isExists(), equalTo(true));
- assertThat(client().prepareSearch("alias").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2l));
- assertThat(client().prepareSearch("alias").setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2l));
+ assertThat(client().prepareSearch("alias").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2L));
+ assertThat(client().prepareSearch("alias").setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2L));
}
}
diff --git a/core/src/test/java/org/elasticsearch/routing/SimpleRoutingIT.java b/core/src/test/java/org/elasticsearch/routing/SimpleRoutingIT.java
index 3bbf7146ae..a9c51d6ea9 100644
--- a/core/src/test/java/org/elasticsearch/routing/SimpleRoutingIT.java
+++ b/core/src/test/java/org/elasticsearch/routing/SimpleRoutingIT.java
@@ -107,19 +107,19 @@ public class SimpleRoutingIT extends ESIntegTestCase {
logger.info("--> search with no routing, should fine one");
for (int i = 0; i < 5; i++) {
- assertThat(client().prepareSearch().setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1l));
+ assertThat(client().prepareSearch().setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1L));
}
logger.info("--> search with wrong routing, should not find");
for (int i = 0; i < 5; i++) {
- assertThat(client().prepareSearch().setRouting("1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(0l));
- assertThat(client().prepareSearch().setSize(0).setRouting("1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(0l));
+ assertThat(client().prepareSearch().setRouting("1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(0L));
+ assertThat(client().prepareSearch().setSize(0).setRouting("1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(0L));
}
logger.info("--> search with correct routing, should find");
for (int i = 0; i < 5; i++) {
- assertThat(client().prepareSearch().setRouting("0").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1l));
- assertThat(client().prepareSearch().setSize(0).setRouting("0").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1l));
+ assertThat(client().prepareSearch().setRouting("0").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1L));
+ assertThat(client().prepareSearch().setSize(0).setRouting("0").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1L));
}
logger.info("--> indexing with id [2], and routing [1]");
@@ -127,32 +127,32 @@ public class SimpleRoutingIT extends ESIntegTestCase {
logger.info("--> search with no routing, should fine two");
for (int i = 0; i < 5; i++) {
- assertThat(client().prepareSearch().setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2l));
- assertThat(client().prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2l));
+ assertThat(client().prepareSearch().setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2L));
+ assertThat(client().prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2L));
}
logger.info("--> search with 0 routing, should find one");
for (int i = 0; i < 5; i++) {
- assertThat(client().prepareSearch().setRouting("0").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1l));
- assertThat(client().prepareSearch().setSize(0).setRouting("0").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1l));
+ assertThat(client().prepareSearch().setRouting("0").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1L));
+ assertThat(client().prepareSearch().setSize(0).setRouting("0").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1L));
}
logger.info("--> search with 1 routing, should find one");
for (int i = 0; i < 5; i++) {
- assertThat(client().prepareSearch().setRouting("1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1l));
- assertThat(client().prepareSearch().setSize(0).setRouting("1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1l));
+ assertThat(client().prepareSearch().setRouting("1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1L));
+ assertThat(client().prepareSearch().setSize(0).setRouting("1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(1L));
}
logger.info("--> search with 0,1 routings , should find two");
for (int i = 0; i < 5; i++) {
- assertThat(client().prepareSearch().setRouting("0", "1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2l));
- assertThat(client().prepareSearch().setSize(0).setRouting("0", "1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2l));
+ assertThat(client().prepareSearch().setRouting("0", "1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2L));
+ assertThat(client().prepareSearch().setSize(0).setRouting("0", "1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2L));
}
logger.info("--> search with 0,1,0 routings , should find two");
for (int i = 0; i < 5; i++) {
- assertThat(client().prepareSearch().setRouting("0", "1", "0").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2l));
- assertThat(client().prepareSearch().setSize(0).setRouting("0", "1", "0").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2l));
+ assertThat(client().prepareSearch().setRouting("0", "1", "0").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2L));
+ assertThat(client().prepareSearch().setSize(0).setRouting("0", "1", "0").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(2L));
}
}
@@ -300,7 +300,7 @@ public class SimpleRoutingIT extends ESIntegTestCase {
UpdateResponse updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1").setRouting("0")
.setDoc("field1", "value1").get();
assertThat(updateResponse.getId(), equalTo("1"));
- assertThat(updateResponse.getVersion(), equalTo(2l));
+ assertThat(updateResponse.getVersion(), equalTo(2L));
try {
client().prepareUpdate(indexOrAlias(), "type1", "1").setDoc("field1", "value1").get();
diff --git a/core/src/test/java/org/elasticsearch/script/FileScriptTests.java b/core/src/test/java/org/elasticsearch/script/FileScriptTests.java
index 987aef90bc..64f6cc85b3 100644
--- a/core/src/test/java/org/elasticsearch/script/FileScriptTests.java
+++ b/core/src/test/java/org/elasticsearch/script/FileScriptTests.java
@@ -18,7 +18,6 @@
*/
package org.elasticsearch.script;
-import org.elasticsearch.common.ContextAndHeaderHolder;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.test.ESTestCase;
@@ -39,36 +38,38 @@ public class FileScriptTests extends ESTestCase {
Path mockscript = scriptsDir.resolve("script1.mockscript");
Files.write(mockscript, "1".getBytes("UTF-8"));
settings = Settings.builder()
- .put("path.home", homeDir)
+ .put(Environment.PATH_HOME_SETTING.getKey(), homeDir)
// no file watching, so we don't need a ResourceWatcherService
- .put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING, false)
+ .put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING.getKey(), false)
.put(settings)
.build();
Set<ScriptEngineService> engines = new HashSet<>(Collections.singletonList(new MockScriptEngine()));
- return new ScriptService(settings, new Environment(settings), engines, null, new ScriptContextRegistry(Collections.emptyList()));
+ ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Collections.singletonList(new ScriptEngineRegistry.ScriptEngineRegistration(MockScriptEngine.class, MockScriptEngine.TYPES)));
+ ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(Collections.emptyList());
+ ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry);
+ return new ScriptService(settings, new Environment(settings), engines, null, scriptEngineRegistry, scriptContextRegistry, scriptSettings);
}
public void testFileScriptFound() throws Exception {
- ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder();
Settings settings = Settings.builder()
- .put("script.engine." + MockScriptEngine.NAME + ".file.aggs", false).build();
+ .put("script.engine." + MockScriptEngine.NAME + ".file.aggs", "false").build();
ScriptService scriptService = makeScriptService(settings);
Script script = new Script("script1", ScriptService.ScriptType.FILE, MockScriptEngine.NAME, null);
- assertNotNull(scriptService.compile(script, ScriptContext.Standard.SEARCH, contextAndHeaders, Collections.emptyMap()));
+ assertNotNull(scriptService.compile(script, ScriptContext.Standard.SEARCH, Collections.emptyMap()));
}
public void testAllOpsDisabled() throws Exception {
- ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder();
Settings settings = Settings.builder()
- .put("script.engine." + MockScriptEngine.NAME + ".file.aggs", false)
- .put("script.engine." + MockScriptEngine.NAME + ".file.search", false)
- .put("script.engine." + MockScriptEngine.NAME + ".file.mapping", false)
- .put("script.engine." + MockScriptEngine.NAME + ".file.update", false).build();
+ .put("script.engine." + MockScriptEngine.NAME + ".file.aggs", "false")
+ .put("script.engine." + MockScriptEngine.NAME + ".file.search", "false")
+ .put("script.engine." + MockScriptEngine.NAME + ".file.mapping", "false")
+ .put("script.engine." + MockScriptEngine.NAME + ".file.update", "false")
+ .put("script.engine." + MockScriptEngine.NAME + ".file.ingest", "false").build();
ScriptService scriptService = makeScriptService(settings);
Script script = new Script("script1", ScriptService.ScriptType.FILE, MockScriptEngine.NAME, null);
for (ScriptContext context : ScriptContext.Standard.values()) {
try {
- scriptService.compile(script, context, contextAndHeaders, Collections.emptyMap());
+ scriptService.compile(script, context, Collections.emptyMap());
fail(context.getKey() + " script should have been rejected");
} catch(Exception e) {
assertTrue(e.getMessage(), e.getMessage().contains("scripts of type [file], operation [" + context.getKey() + "] and lang [" + MockScriptEngine.NAME + "] are disabled"));
diff --git a/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java b/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java
index 47adeabe02..144aedb7fd 100644
--- a/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java
+++ b/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java
@@ -19,7 +19,6 @@
package org.elasticsearch.script;
-import org.elasticsearch.common.ContextAndHeaderHolder;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.inject.Injector;
import org.elasticsearch.common.inject.ModulesBuilder;
@@ -47,12 +46,12 @@ import static org.hamcrest.Matchers.notNullValue;
public class NativeScriptTests extends ESTestCase {
public void testNativeScript() throws InterruptedException {
- ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder();
Settings settings = Settings.settingsBuilder()
.put("name", "testNativeScript")
- .put("path.home", createTempDir())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
.build();
- ScriptModule scriptModule = new ScriptModule(settings);
+ SettingsModule settingsModule = new SettingsModule(settings, new SettingsFilter(settings));
+ ScriptModule scriptModule = new ScriptModule(settingsModule);
scriptModule.registerScript("my", MyNativeScriptFactory.class);
Injector injector = new ModulesBuilder().add(
new EnvironmentModule(new Environment(settings)),
@@ -63,33 +62,34 @@ public class NativeScriptTests extends ESTestCase {
ScriptService scriptService = injector.getInstance(ScriptService.class);
ExecutableScript executable = scriptService.executable(new Script("my", ScriptType.INLINE, NativeScriptEngineService.NAME, null),
- ScriptContext.Standard.SEARCH, contextAndHeaders, Collections.emptyMap());
+ ScriptContext.Standard.SEARCH, Collections.emptyMap());
assertThat(executable.run().toString(), equalTo("test"));
terminate(injector.getInstance(ThreadPool.class));
}
public void testFineGrainedSettingsDontAffectNativeScripts() throws IOException {
- ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder();
Settings.Builder builder = Settings.settingsBuilder();
if (randomBoolean()) {
ScriptType scriptType = randomFrom(ScriptType.values());
- builder.put(ScriptModes.SCRIPT_SETTINGS_PREFIX + scriptType, randomFrom(ScriptMode.values()));
+ builder.put("script" + "." + scriptType.getScriptType(), randomFrom(ScriptMode.values()));
} else {
- String scriptContext = randomFrom(ScriptContext.Standard.values()).getKey();
- builder.put(ScriptModes.SCRIPT_SETTINGS_PREFIX + scriptContext, randomFrom(ScriptMode.values()));
+ ScriptContext scriptContext = randomFrom(ScriptContext.Standard.values());
+ builder.put("script" + "." + scriptContext.getKey(), randomFrom(ScriptMode.values()));
}
- Settings settings = builder.put("path.home", createTempDir()).build();
+ Settings settings = builder.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()).build();
Environment environment = new Environment(settings);
ResourceWatcherService resourceWatcherService = new ResourceWatcherService(settings, null);
Map<String, NativeScriptFactory> nativeScriptFactoryMap = new HashMap<>();
nativeScriptFactoryMap.put("my", new MyNativeScriptFactory());
Set<ScriptEngineService> scriptEngineServices = singleton(new NativeScriptEngineService(settings, nativeScriptFactoryMap));
- ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(new ArrayList<ScriptContext.Plugin>());
- ScriptService scriptService = new ScriptService(settings, environment, scriptEngineServices, resourceWatcherService, scriptContextRegistry);
+ ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Collections.singletonList(new ScriptEngineRegistry.ScriptEngineRegistration(NativeScriptEngineService.class, NativeScriptEngineService.TYPES)));
+ ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(new ArrayList<>());
+ ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry);
+ ScriptService scriptService = new ScriptService(settings, environment, scriptEngineServices, resourceWatcherService, scriptEngineRegistry, scriptContextRegistry, scriptSettings);
for (ScriptContext scriptContext : scriptContextRegistry.scriptContexts()) {
assertThat(scriptService.compile(new Script("my", ScriptType.INLINE, NativeScriptEngineService.NAME, null), scriptContext,
- contextAndHeaders, Collections.emptyMap()), notNullValue());
+ Collections.emptyMap()), notNullValue());
}
}
diff --git a/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java b/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java
index 019eb7c74a..b6e8ee6833 100644
--- a/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java
+++ b/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java
@@ -19,7 +19,6 @@
package org.elasticsearch.script;
-import org.elasticsearch.common.ContextAndHeaderHolder;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.test.ESTestCase;
@@ -38,27 +37,30 @@ public class ScriptContextTests extends ESTestCase {
ScriptService makeScriptService() throws Exception {
Settings settings = Settings.builder()
- .put("path.home", createTempDir())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
// no file watching, so we don't need a ResourceWatcherService
- .put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING, false)
- .put("script." + PLUGIN_NAME + "_custom_globally_disabled_op", false)
- .put("script.engine." + MockScriptEngine.NAME + ".inline." + PLUGIN_NAME + "_custom_exp_disabled_op", false)
+ .put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING.getKey(), "off")
+ .put("script." + PLUGIN_NAME + "_custom_globally_disabled_op", "false")
+ .put("script.engine." + MockScriptEngine.NAME + ".inline." + PLUGIN_NAME + "_custom_exp_disabled_op", "false")
.build();
Set<ScriptEngineService> engines = new HashSet<>(Collections.singletonList(new MockScriptEngine()));
+ ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Collections.singletonList(new ScriptEngineRegistry.ScriptEngineRegistration(MockScriptEngine.class, MockScriptEngine.TYPES)));
List<ScriptContext.Plugin> customContexts = Arrays.asList(
new ScriptContext.Plugin(PLUGIN_NAME, "custom_op"),
new ScriptContext.Plugin(PLUGIN_NAME, "custom_exp_disabled_op"),
new ScriptContext.Plugin(PLUGIN_NAME, "custom_globally_disabled_op"));
- return new ScriptService(settings, new Environment(settings), engines, null, new ScriptContextRegistry(customContexts));
+ ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(customContexts);
+ ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry);
+
+ return new ScriptService(settings, new Environment(settings), engines, null, scriptEngineRegistry, scriptContextRegistry, scriptSettings);
}
public void testCustomGlobalScriptContextSettings() throws Exception {
- ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder();
ScriptService scriptService = makeScriptService();
for (ScriptService.ScriptType scriptType : ScriptService.ScriptType.values()) {
try {
Script script = new Script("1", scriptType, MockScriptEngine.NAME, null);
- scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "custom_globally_disabled_op"), contextAndHeaders, Collections.emptyMap());
+ scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "custom_globally_disabled_op"), Collections.emptyMap());
fail("script compilation should have been rejected");
} catch (ScriptException e) {
assertThat(e.getMessage(), containsString("scripts of type [" + scriptType + "], operation [" + PLUGIN_NAME + "_custom_globally_disabled_op] and lang [" + MockScriptEngine.NAME + "] are disabled"));
@@ -67,29 +69,27 @@ public class ScriptContextTests extends ESTestCase {
}
public void testCustomScriptContextSettings() throws Exception {
- ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder();
ScriptService scriptService = makeScriptService();
Script script = new Script("1", ScriptService.ScriptType.INLINE, MockScriptEngine.NAME, null);
try {
- scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "custom_exp_disabled_op"), contextAndHeaders, Collections.emptyMap());
+ scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "custom_exp_disabled_op"), Collections.emptyMap());
fail("script compilation should have been rejected");
} catch (ScriptException e) {
assertTrue(e.getMessage(), e.getMessage().contains("scripts of type [inline], operation [" + PLUGIN_NAME + "_custom_exp_disabled_op] and lang [" + MockScriptEngine.NAME + "] are disabled"));
}
// still works for other script contexts
- assertNotNull(scriptService.compile(script, ScriptContext.Standard.AGGS, contextAndHeaders, Collections.emptyMap()));
- assertNotNull(scriptService.compile(script, ScriptContext.Standard.SEARCH, contextAndHeaders, Collections.emptyMap()));
- assertNotNull(scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "custom_op"), contextAndHeaders, Collections.emptyMap()));
+ assertNotNull(scriptService.compile(script, ScriptContext.Standard.AGGS, Collections.emptyMap()));
+ assertNotNull(scriptService.compile(script, ScriptContext.Standard.SEARCH, Collections.emptyMap()));
+ assertNotNull(scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "custom_op"), Collections.emptyMap()));
}
public void testUnknownPluginScriptContext() throws Exception {
- ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder();
ScriptService scriptService = makeScriptService();
for (ScriptService.ScriptType scriptType : ScriptService.ScriptType.values()) {
try {
Script script = new Script("1", scriptType, MockScriptEngine.NAME, null);
- scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "unknown"), contextAndHeaders, Collections.emptyMap());
+ scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "unknown"), Collections.emptyMap());
fail("script compilation should have been rejected");
} catch (IllegalArgumentException e) {
assertTrue(e.getMessage(), e.getMessage().contains("script context [" + PLUGIN_NAME + "_unknown] not supported"));
@@ -98,7 +98,6 @@ public class ScriptContextTests extends ESTestCase {
}
public void testUnknownCustomScriptContext() throws Exception {
- ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder();
ScriptContext context = new ScriptContext() {
@Override
public String getKey() {
@@ -109,7 +108,7 @@ public class ScriptContextTests extends ESTestCase {
for (ScriptService.ScriptType scriptType : ScriptService.ScriptType.values()) {
try {
Script script = new Script("1", scriptType, MockScriptEngine.NAME, null);
- scriptService.compile(script, context, contextAndHeaders, Collections.emptyMap());
+ scriptService.compile(script, context, Collections.emptyMap());
fail("script compilation should have been rejected");
} catch (IllegalArgumentException e) {
assertTrue(e.getMessage(), e.getMessage().contains("script context [test] not supported"));
diff --git a/core/src/test/java/org/elasticsearch/script/ScriptFieldIT.java b/core/src/test/java/org/elasticsearch/script/ScriptFieldIT.java
index 71a41750c9..8f20d64116 100644
--- a/core/src/test/java/org/elasticsearch/script/ScriptFieldIT.java
+++ b/core/src/test/java/org/elasticsearch/script/ScriptFieldIT.java
@@ -44,7 +44,7 @@ public class ScriptFieldIT extends ESIntegTestCase {
}
static int[] intArray = { Integer.MAX_VALUE, Integer.MIN_VALUE, 3 };
- static long[] longArray = { Long.MAX_VALUE, Long.MIN_VALUE, 9223372036854775807l };
+ static long[] longArray = { Long.MAX_VALUE, Long.MIN_VALUE, 9223372036854775807L };
static float[] floatArray = { Float.MAX_VALUE, Float.MIN_VALUE, 3.3f };
static double[] doubleArray = { Double.MAX_VALUE, Double.MIN_VALUE, 3.3d };
diff --git a/core/src/test/java/org/elasticsearch/script/ScriptModesTests.java b/core/src/test/java/org/elasticsearch/script/ScriptModesTests.java
index 0f00c2dd58..305cfc183e 100644
--- a/core/src/test/java/org/elasticsearch/script/ScriptModesTests.java
+++ b/core/src/test/java/org/elasticsearch/script/ScriptModesTests.java
@@ -27,9 +27,11 @@ import org.elasticsearch.test.ESTestCase;
import org.junit.After;
import org.junit.Before;
+import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
+import java.util.List;
import java.util.Map;
import java.util.Set;
@@ -45,9 +47,10 @@ public class ScriptModesTests extends ESTestCase {
private static final Set<String> ALL_LANGS = unmodifiableSet(
newHashSet("custom", "test"));
- static final String[] ENABLE_VALUES = new String[]{"on", "true", "yes", "1"};
- static final String[] DISABLE_VALUES = new String[]{"off", "false", "no", "0"};
+ static final String[] ENABLE_VALUES = new String[]{"true"};
+ static final String[] DISABLE_VALUES = new String[]{"false"};
+ ScriptSettings scriptSettings;
ScriptContextRegistry scriptContextRegistry;
private ScriptContext[] scriptContexts;
private Map<String, ScriptEngineService> scriptEngines;
@@ -74,6 +77,10 @@ public class ScriptModesTests extends ESTestCase {
//add the native engine just to make sure it gets filtered out
new NativeScriptEngineService(Settings.EMPTY, Collections.<String, NativeScriptFactory>emptyMap()),
new CustomScriptEngineService()));
+ ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Arrays.asList(
+ new ScriptEngineRegistry.ScriptEngineRegistration(NativeScriptEngineService.class, NativeScriptEngineService.TYPES),
+ new ScriptEngineRegistry.ScriptEngineRegistration(CustomScriptEngineService.class, CustomScriptEngineService.TYPES)));
+ scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry);
checkedSettings = new HashSet<>();
assertAllSettingsWereChecked = true;
assertScriptModesNonNull = true;
@@ -100,14 +107,14 @@ public class ScriptModesTests extends ESTestCase {
}
public void testDefaultSettings() {
- this.scriptModes = new ScriptModes(scriptEngines, scriptContextRegistry, Settings.EMPTY);
+ this.scriptModes = new ScriptModes(scriptSettings, Settings.EMPTY);
assertScriptModesAllOps(ScriptMode.ON, ALL_LANGS, ScriptType.FILE);
assertScriptModesAllOps(ScriptMode.SANDBOX, ALL_LANGS, ScriptType.INDEXED, ScriptType.INLINE);
}
public void testMissingSetting() {
assertAllSettingsWereChecked = false;
- this.scriptModes = new ScriptModes(scriptEngines, scriptContextRegistry, Settings.EMPTY);
+ this.scriptModes = new ScriptModes(scriptSettings, Settings.EMPTY);
try {
scriptModes.getScriptMode("non_existing", randomFrom(ScriptType.values()), randomFrom(scriptContexts));
fail("Expected IllegalArgumentException");
@@ -130,9 +137,9 @@ public class ScriptModesTests extends ESTestCase {
ScriptType[] randomScriptTypes = randomScriptTypesSet.toArray(new ScriptType[randomScriptTypesSet.size()]);
Settings.Builder builder = Settings.builder();
for (int i = 0; i < randomInt; i++) {
- builder.put(ScriptModes.SCRIPT_SETTINGS_PREFIX + randomScriptTypes[i], randomScriptModes[i]);
+ builder.put("script" + "." + randomScriptTypes[i].getScriptType(), randomScriptModes[i]);
}
- this.scriptModes = new ScriptModes(scriptEngines, scriptContextRegistry, builder.build());
+ this.scriptModes = new ScriptModes(scriptSettings, builder.build());
for (int i = 0; i < randomInt; i++) {
assertScriptModesAllOps(randomScriptModes[i], ALL_LANGS, randomScriptTypes[i]);
@@ -162,9 +169,9 @@ public class ScriptModesTests extends ESTestCase {
ScriptContext[] randomScriptContexts = randomScriptContextsSet.toArray(new ScriptContext[randomScriptContextsSet.size()]);
Settings.Builder builder = Settings.builder();
for (int i = 0; i < randomInt; i++) {
- builder.put(ScriptModes.SCRIPT_SETTINGS_PREFIX + randomScriptContexts[i].getKey(), randomScriptModes[i]);
+ builder.put("script" + "." + randomScriptContexts[i].getKey(), randomScriptModes[i]);
}
- this.scriptModes = new ScriptModes(scriptEngines, scriptContextRegistry, builder.build());
+ this.scriptModes = new ScriptModes(scriptSettings, builder.build());
for (int i = 0; i < randomInt; i++) {
assertScriptModesAllTypes(randomScriptModes[i], ALL_LANGS, randomScriptContexts[i]);
@@ -177,10 +184,10 @@ public class ScriptModesTests extends ESTestCase {
public void testConflictingScriptTypeAndOpGenericSettings() {
ScriptContext scriptContext = randomFrom(scriptContexts);
- Settings.Builder builder = Settings.builder().put(ScriptModes.SCRIPT_SETTINGS_PREFIX + scriptContext.getKey(), randomFrom(DISABLE_VALUES))
- .put("script.indexed", randomFrom(ENABLE_VALUES)).put("script.inline", ScriptMode.SANDBOX);
+ Settings.Builder builder = Settings.builder().put("script" + "." + scriptContext.getKey(), randomFrom(DISABLE_VALUES))
+ .put("script.indexed", randomFrom(ENABLE_VALUES)).put("script.inline", "sandbox");
//operations generic settings have precedence over script type generic settings
- this.scriptModes = new ScriptModes(scriptEngines, scriptContextRegistry, builder.build());
+ this.scriptModes = new ScriptModes(scriptSettings, builder.build());
assertScriptModesAllTypes(ScriptMode.OFF, ALL_LANGS, scriptContext);
ScriptContext[] complementOf = complementOf(scriptContext);
assertScriptModes(ScriptMode.ON, ALL_LANGS, new ScriptType[]{ScriptType.FILE, ScriptType.INDEXED}, complementOf);
@@ -220,14 +227,10 @@ public class ScriptModesTests extends ESTestCase {
return copy.values().toArray(new ScriptContext[copy.size()]);
}
- private static String specificEngineOpSettings(String lang, ScriptType scriptType, ScriptContext scriptContext) {
- return ScriptModes.ENGINE_SETTINGS_PREFIX + "." + lang + "." + scriptType + "." + scriptContext.getKey();
- }
-
static Map<String, ScriptEngineService> buildScriptEnginesByLangMap(Set<ScriptEngineService> scriptEngines) {
Map<String, ScriptEngineService> builder = new HashMap<>();
for (ScriptEngineService scriptEngine : scriptEngines) {
- for (String type : scriptEngine.types()) {
+ for (String type : scriptEngine.getTypes()) {
builder.put(type, scriptEngine);
}
}
@@ -235,18 +238,21 @@ public class ScriptModesTests extends ESTestCase {
}
private static class CustomScriptEngineService implements ScriptEngineService {
+
+ public static final List<String> TYPES = Collections.unmodifiableList(Arrays.asList("custom", "test"));
+
@Override
- public String[] types() {
- return new String[]{"custom", "test"};
+ public List<String> getTypes() {
+ return TYPES;
}
@Override
- public String[] extensions() {
- return new String[0];
+ public List<String> getExtensions() {
+ return Collections.singletonList(TYPES.get(0));
}
@Override
- public boolean sandboxed() {
+ public boolean isSandboxed() {
return false;
}
diff --git a/core/src/test/java/org/elasticsearch/script/ScriptParameterParserTests.java b/core/src/test/java/org/elasticsearch/script/ScriptParameterParserTests.java
index c3c80c5085..8cadbb3e95 100644
--- a/core/src/test/java/org/elasticsearch/script/ScriptParameterParserTests.java
+++ b/core/src/test/java/org/elasticsearch/script/ScriptParameterParserTests.java
@@ -891,7 +891,7 @@ public class ScriptParameterParserTests extends ESTestCase {
public void testConfigMultipleParametersInlineWrongType() throws IOException {
Map<String, Object> config = new HashMap<>();
- config.put("foo", 1l);
+ config.put("foo", 1L);
config.put("bar_file", "barScriptValue");
config.put("baz_id", "bazScriptValue");
config.put("lang", "myLang");
@@ -917,7 +917,7 @@ public class ScriptParameterParserTests extends ESTestCase {
public void testConfigMultipleParametersFileWrongType() throws IOException {
Map<String, Object> config = new HashMap<>();
config.put("foo", "fooScriptValue");
- config.put("bar_file", 1l);
+ config.put("bar_file", 1L);
config.put("baz_id", "bazScriptValue");
config.put("lang", "myLang");
Set<String> parameters = new HashSet<>();
@@ -944,7 +944,7 @@ public class ScriptParameterParserTests extends ESTestCase {
Map<String, Object> config = new HashMap<>();
config.put("foo", "fooScriptValue");
config.put("bar_file", "barScriptValue");
- config.put("baz_id", 1l);
+ config.put("baz_id", 1L);
config.put("lang", "myLang");
Set<String> parameters = new HashSet<>();
parameters.add("foo");
@@ -970,7 +970,7 @@ public class ScriptParameterParserTests extends ESTestCase {
config.put("foo", "fooScriptValue");
config.put("bar_file", "barScriptValue");
config.put("baz_id", "bazScriptValue");
- config.put("lang", 1l);
+ config.put("lang", 1L);
Set<String> parameters = new HashSet<>();
parameters.add("foo");
parameters.add("bar");
diff --git a/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java b/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java
index 3c939e7e91..0825da4d4d 100644
--- a/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java
+++ b/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java
@@ -18,8 +18,6 @@
*/
package org.elasticsearch.script;
-import org.elasticsearch.common.ContextAndHeaderHolder;
-import org.elasticsearch.common.HasContextAndHeaders;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.settings.Settings;
@@ -33,8 +31,10 @@ import org.junit.Before;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
+import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
+import java.util.List;
import java.util.Map;
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
@@ -49,7 +49,9 @@ public class ScriptServiceTests extends ESTestCase {
private ResourceWatcherService resourceWatcherService;
private ScriptEngineService scriptEngineService;
private Map<String, ScriptEngineService> scriptEnginesByLangMap;
+ private ScriptEngineRegistry scriptEngineRegistry;
private ScriptContextRegistry scriptContextRegistry;
+ private ScriptSettings scriptSettings;
private ScriptContext[] scriptContexts;
private ScriptService scriptService;
private Path scriptsFilePath;
@@ -67,8 +69,8 @@ public class ScriptServiceTests extends ESTestCase {
public void setup() throws IOException {
Path genericConfigFolder = createTempDir();
baseSettings = settingsBuilder()
- .put("path.home", createTempDir().toString())
- .put("path.conf", genericConfigFolder)
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
+ .put(Environment.PATH_CONF_SETTING.getKey(), genericConfigFolder)
.build();
resourceWatcherService = new ResourceWatcherService(baseSettings, null);
scriptEngineService = new TestEngineService();
@@ -89,7 +91,9 @@ public class ScriptServiceTests extends ESTestCase {
String context = plugin + "_" + operation;
contexts.put(context, new ScriptContext.Plugin(plugin, operation));
}
+ scriptEngineRegistry = new ScriptEngineRegistry(Collections.singletonList(new ScriptEngineRegistry.ScriptEngineRegistration(TestEngineService.class, TestEngineService.TYPES)));
scriptContextRegistry = new ScriptContextRegistry(contexts.values());
+ scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry);
scriptContexts = scriptContextRegistry.scriptContexts().toArray(new ScriptContext[scriptContextRegistry.scriptContexts().size()]);
logger.info("--> setup script service");
scriptsFilePath = genericConfigFolder.resolve("scripts");
@@ -99,9 +103,9 @@ public class ScriptServiceTests extends ESTestCase {
private void buildScriptService(Settings additionalSettings) throws IOException {
Settings finalSettings = Settings.builder().put(baseSettings).put(additionalSettings).build();
Environment environment = new Environment(finalSettings);
- scriptService = new ScriptService(finalSettings, environment, Collections.singleton(scriptEngineService), resourceWatcherService, scriptContextRegistry) {
+ scriptService = new ScriptService(finalSettings, environment, Collections.singleton(scriptEngineService), resourceWatcherService, scriptEngineRegistry, scriptContextRegistry, scriptSettings) {
@Override
- String getScriptFromIndex(String scriptLang, String id, HasContextAndHeaders headersContext) {
+ String getScriptFromIndex(String scriptLang, String id) {
//mock the script that gets retrieved from an index
return "100";
}
@@ -119,7 +123,6 @@ public class ScriptServiceTests extends ESTestCase {
public void testScriptsWithoutExtensions() throws IOException {
- ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder();
buildScriptService(Settings.EMPTY);
logger.info("--> setup two test files one with extension and another without");
Path testFileNoExt = scriptsFilePath.resolve("test_no_ext");
@@ -130,7 +133,7 @@ public class ScriptServiceTests extends ESTestCase {
logger.info("--> verify that file with extension was correctly processed");
CompiledScript compiledScript = scriptService.compile(new Script("test_script", ScriptType.FILE, "test", null),
- ScriptContext.Standard.SEARCH, contextAndHeaders, Collections.emptyMap());
+ ScriptContext.Standard.SEARCH, Collections.emptyMap());
assertThat(compiledScript.compiled(), equalTo((Object) "compiled_test_file"));
logger.info("--> delete both files");
@@ -141,7 +144,7 @@ public class ScriptServiceTests extends ESTestCase {
logger.info("--> verify that file with extension was correctly removed");
try {
scriptService.compile(new Script("test_script", ScriptType.FILE, "test", null), ScriptContext.Standard.SEARCH,
- contextAndHeaders, Collections.emptyMap());
+ Collections.emptyMap());
fail("the script test_script should no longer exist");
} catch (IllegalArgumentException ex) {
assertThat(ex.getMessage(), containsString("Unable to find on disk file script [test_script] using lang [test]"));
@@ -149,62 +152,57 @@ public class ScriptServiceTests extends ESTestCase {
}
public void testInlineScriptCompiledOnceCache() throws IOException {
- ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder();
buildScriptService(Settings.EMPTY);
CompiledScript compiledScript1 = scriptService.compile(new Script("1+1", ScriptType.INLINE, "test", null),
- randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap());
+ randomFrom(scriptContexts), Collections.emptyMap());
CompiledScript compiledScript2 = scriptService.compile(new Script("1+1", ScriptType.INLINE, "test", null),
- randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap());
+ randomFrom(scriptContexts), Collections.emptyMap());
assertThat(compiledScript1.compiled(), sameInstance(compiledScript2.compiled()));
}
public void testInlineScriptCompiledOnceMultipleLangAcronyms() throws IOException {
- ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder();
buildScriptService(Settings.EMPTY);
CompiledScript compiledScript1 = scriptService.compile(new Script("script", ScriptType.INLINE, "test", null),
- randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap());
+ randomFrom(scriptContexts), Collections.emptyMap());
CompiledScript compiledScript2 = scriptService.compile(new Script("script", ScriptType.INLINE, "test2", null),
- randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap());
+ randomFrom(scriptContexts), Collections.emptyMap());
assertThat(compiledScript1.compiled(), sameInstance(compiledScript2.compiled()));
}
public void testFileScriptCompiledOnceMultipleLangAcronyms() throws IOException {
- ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder();
buildScriptService(Settings.EMPTY);
createFileScripts("test");
CompiledScript compiledScript1 = scriptService.compile(new Script("file_script", ScriptType.FILE, "test", null),
- randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap());
+ randomFrom(scriptContexts), Collections.emptyMap());
CompiledScript compiledScript2 = scriptService.compile(new Script("file_script", ScriptType.FILE, "test2", null),
- randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap());
+ randomFrom(scriptContexts), Collections.emptyMap());
assertThat(compiledScript1.compiled(), sameInstance(compiledScript2.compiled()));
}
public void testDefaultBehaviourFineGrainedSettings() throws IOException {
- ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder();
Settings.Builder builder = Settings.builder();
//rarely inject the default settings, which have no effect
if (rarely()) {
builder.put("script.file", randomFrom(ScriptModesTests.ENABLE_VALUES));
}
if (rarely()) {
- builder.put("script.indexed", ScriptMode.SANDBOX);
+ builder.put("script.indexed", "sandbox");
}
if (rarely()) {
- builder.put("script.inline", ScriptMode.SANDBOX);
+ builder.put("script.inline", "sandbox");
}
buildScriptService(builder.build());
createFileScripts("groovy", "mustache", "test");
for (ScriptContext scriptContext : scriptContexts) {
//custom engine is sandboxed, all scripts are enabled by default
- assertCompileAccepted("test", "script", ScriptType.INLINE, scriptContext, contextAndHeaders);
- assertCompileAccepted("test", "script", ScriptType.INDEXED, scriptContext, contextAndHeaders);
- assertCompileAccepted("test", "file_script", ScriptType.FILE, scriptContext, contextAndHeaders);
+ assertCompileAccepted("test", "script", ScriptType.INLINE, scriptContext);
+ assertCompileAccepted("test", "script", ScriptType.INDEXED, scriptContext);
+ assertCompileAccepted("test", "file_script", ScriptType.FILE, scriptContext);
}
}
public void testFineGrainedSettings() throws IOException {
- ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder();
//collect the fine-grained settings to set for this run
int numScriptSettings = randomIntBetween(0, ScriptType.values().length);
Map<ScriptType, ScriptMode> scriptSourceSettings = new HashMap<>();
@@ -216,11 +214,11 @@ public class ScriptServiceTests extends ESTestCase {
scriptSourceSettings.put(scriptType, randomFrom(ScriptMode.values()));
}
int numScriptContextSettings = randomIntBetween(0, this.scriptContextRegistry.scriptContexts().size());
- Map<String, ScriptMode> scriptContextSettings = new HashMap<>();
+ Map<ScriptContext, ScriptMode> scriptContextSettings = new HashMap<>();
for (int i = 0; i < numScriptContextSettings; i++) {
- String scriptContext;
+ ScriptContext scriptContext;
do {
- scriptContext = randomFrom(this.scriptContexts).getKey();
+ scriptContext = randomFrom(this.scriptContexts);
} while (scriptContextSettings.containsKey(scriptContext));
scriptContextSettings.put(scriptContext, randomFrom(ScriptMode.values()));
}
@@ -231,7 +229,7 @@ public class ScriptServiceTests extends ESTestCase {
do {
ScriptType scriptType = randomFrom(ScriptType.values());
ScriptContext scriptContext = randomFrom(this.scriptContexts);
- settingKey = scriptEngineService.types()[0] + "." + scriptType + "." + scriptContext.getKey();
+ settingKey = scriptEngineService.getTypes().get(0) + "." + scriptType + "." + scriptContext.getKey();
} while (engineSettings.containsKey(settingKey));
engineSettings.put(settingKey, randomFrom(ScriptMode.values()));
}
@@ -240,26 +238,27 @@ public class ScriptServiceTests extends ESTestCase {
for (Map.Entry<ScriptType, ScriptMode> entry : scriptSourceSettings.entrySet()) {
switch (entry.getValue()) {
case ON:
- builder.put(ScriptModes.SCRIPT_SETTINGS_PREFIX + entry.getKey(), randomFrom(ScriptModesTests.ENABLE_VALUES));
+ builder.put("script" + "." + entry.getKey().getScriptType(), "true");
break;
case OFF:
- builder.put(ScriptModes.SCRIPT_SETTINGS_PREFIX + entry.getKey(), randomFrom(ScriptModesTests.DISABLE_VALUES));
+ builder.put("script" + "." + entry.getKey().getScriptType(), "false");
break;
case SANDBOX:
- builder.put(ScriptModes.SCRIPT_SETTINGS_PREFIX + entry.getKey(), ScriptMode.SANDBOX);
+ builder.put("script" + "." + entry.getKey().getScriptType(), "sandbox");
break;
}
}
- for (Map.Entry<String, ScriptMode> entry : scriptContextSettings.entrySet()) {
+ for (Map.Entry<ScriptContext, ScriptMode> entry : scriptContextSettings.entrySet()) {
switch (entry.getValue()) {
case ON:
- builder.put(ScriptModes.SCRIPT_SETTINGS_PREFIX + entry.getKey(), randomFrom(ScriptModesTests.ENABLE_VALUES));
+ builder.put("script" + "." + entry.getKey().getKey(), "true");
break;
+
case OFF:
- builder.put(ScriptModes.SCRIPT_SETTINGS_PREFIX + entry.getKey(), randomFrom(ScriptModesTests.DISABLE_VALUES));
+ builder.put("script" + "." + entry.getKey().getKey(), "false");
break;
case SANDBOX:
- builder.put(ScriptModes.SCRIPT_SETTINGS_PREFIX + entry.getKey(), ScriptMode.SANDBOX);
+ builder.put("script" + "." + entry.getKey().getKey(), "sandbox");
break;
}
}
@@ -268,16 +267,16 @@ public class ScriptServiceTests extends ESTestCase {
String part1 = entry.getKey().substring(0, delimiter);
String part2 = entry.getKey().substring(delimiter + 1);
- String lang = randomFrom(scriptEnginesByLangMap.get(part1).types());
+ String lang = randomFrom(scriptEnginesByLangMap.get(part1).getTypes());
switch (entry.getValue()) {
case ON:
- builder.put(ScriptModes.ENGINE_SETTINGS_PREFIX + "." + lang + "." + part2, randomFrom(ScriptModesTests.ENABLE_VALUES));
+ builder.put("script.engine" + "." + lang + "." + part2, "true");
break;
case OFF:
- builder.put(ScriptModes.ENGINE_SETTINGS_PREFIX + "." + lang + "." + part2, randomFrom(ScriptModesTests.DISABLE_VALUES));
+ builder.put("script.engine" + "." + lang + "." + part2, "false");
break;
case SANDBOX:
- builder.put(ScriptModes.ENGINE_SETTINGS_PREFIX + "." + lang + "." + part2, ScriptMode.SANDBOX);
+ builder.put("script.engine" + "." + lang + "." + part2, "sandbox");
break;
}
}
@@ -291,9 +290,9 @@ public class ScriptServiceTests extends ESTestCase {
String script = scriptType == ScriptType.FILE ? "file_script" : "script";
for (ScriptContext scriptContext : this.scriptContexts) {
//fallback mechanism: 1) engine specific settings 2) op based settings 3) source based settings
- ScriptMode scriptMode = engineSettings.get(scriptEngineService.types()[0] + "." + scriptType + "." + scriptContext.getKey());
+ ScriptMode scriptMode = engineSettings.get(scriptEngineService.getTypes().get(0) + "." + scriptType + "." + scriptContext.getKey());
if (scriptMode == null) {
- scriptMode = scriptContextSettings.get(scriptContext.getKey());
+ scriptMode = scriptContextSettings.get(scriptContext);
}
if (scriptMode == null) {
scriptMode = scriptSourceSettings.get(scriptType);
@@ -302,19 +301,19 @@ public class ScriptServiceTests extends ESTestCase {
scriptMode = DEFAULT_SCRIPT_MODES.get(scriptType);
}
- for (String lang : scriptEngineService.types()) {
+ for (String lang : scriptEngineService.getTypes()) {
switch (scriptMode) {
case ON:
- assertCompileAccepted(lang, script, scriptType, scriptContext, contextAndHeaders);
+ assertCompileAccepted(lang, script, scriptType, scriptContext);
break;
case OFF:
- assertCompileRejected(lang, script, scriptType, scriptContext, contextAndHeaders);
+ assertCompileRejected(lang, script, scriptType, scriptContext);
break;
case SANDBOX:
- if (scriptEngineService.sandboxed()) {
- assertCompileAccepted(lang, script, scriptType, scriptContext, contextAndHeaders);
+ if (scriptEngineService.isSandboxed()) {
+ assertCompileAccepted(lang, script, scriptType, scriptContext);
} else {
- assertCompileRejected(lang, script, scriptType, scriptContext, contextAndHeaders);
+ assertCompileRejected(lang, script, scriptType, scriptContext);
}
break;
}
@@ -324,7 +323,6 @@ public class ScriptServiceTests extends ESTestCase {
}
public void testCompileNonRegisteredContext() throws IOException {
- ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder();
buildScriptService(Settings.EMPTY);
String pluginName;
String unknownContext;
@@ -333,10 +331,10 @@ public class ScriptServiceTests extends ESTestCase {
unknownContext = randomAsciiOfLength(randomIntBetween(1, 30));
} while(scriptContextRegistry.isSupportedContext(new ScriptContext.Plugin(pluginName, unknownContext)));
- for (String type : scriptEngineService.types()) {
+ for (String type : scriptEngineService.getTypes()) {
try {
scriptService.compile(new Script("test", randomFrom(ScriptType.values()), type, null), new ScriptContext.Plugin(
- pluginName, unknownContext), contextAndHeaders, Collections.emptyMap());
+ pluginName, unknownContext), Collections.emptyMap());
fail("script compilation should have been rejected");
} catch(IllegalArgumentException e) {
assertThat(e.getMessage(), containsString("script context [" + pluginName + "_" + unknownContext + "] not supported"));
@@ -345,16 +343,14 @@ public class ScriptServiceTests extends ESTestCase {
}
public void testCompileCountedInCompilationStats() throws IOException {
- ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder();
buildScriptService(Settings.EMPTY);
- scriptService.compile(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap());
+ scriptService.compile(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), Collections.emptyMap());
assertEquals(1L, scriptService.stats().getCompilations());
}
public void testExecutableCountedInCompilationStats() throws IOException {
- ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder();
buildScriptService(Settings.EMPTY);
- scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap());
+ scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), Collections.emptyMap());
assertEquals(1L, scriptService.stats().getCompilations());
}
@@ -365,52 +361,56 @@ public class ScriptServiceTests extends ESTestCase {
}
public void testMultipleCompilationsCountedInCompilationStats() throws IOException {
- ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder();
buildScriptService(Settings.EMPTY);
int numberOfCompilations = randomIntBetween(1, 1024);
for (int i = 0; i < numberOfCompilations; i++) {
scriptService
- .compile(new Script(i + " + " + i, ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap());
+ .compile(new Script(i + " + " + i, ScriptType.INLINE, "test", null), randomFrom(scriptContexts), Collections.emptyMap());
}
assertEquals(numberOfCompilations, scriptService.stats().getCompilations());
}
public void testCompilationStatsOnCacheHit() throws IOException {
- ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder();
Settings.Builder builder = Settings.builder();
- builder.put(ScriptService.SCRIPT_CACHE_SIZE_SETTING, 1);
+ builder.put(ScriptService.SCRIPT_CACHE_SIZE_SETTING.getKey(), 1);
buildScriptService(builder.build());
- scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap());
- scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap());
+ scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), Collections.emptyMap());
+ scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), Collections.emptyMap());
assertEquals(1L, scriptService.stats().getCompilations());
}
public void testFileScriptCountedInCompilationStats() throws IOException {
- ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder();
buildScriptService(Settings.EMPTY);
createFileScripts("test");
- scriptService.compile(new Script("file_script", ScriptType.FILE, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap());
+ scriptService.compile(new Script("file_script", ScriptType.FILE, "test", null), randomFrom(scriptContexts), Collections.emptyMap());
assertEquals(1L, scriptService.stats().getCompilations());
}
public void testIndexedScriptCountedInCompilationStats() throws IOException {
- ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder();
buildScriptService(Settings.EMPTY);
- scriptService.compile(new Script("script", ScriptType.INDEXED, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap());
+ scriptService.compile(new Script("script", ScriptType.INDEXED, "test", null), randomFrom(scriptContexts), Collections.emptyMap());
assertEquals(1L, scriptService.stats().getCompilations());
}
public void testCacheEvictionCountedInCacheEvictionsStats() throws IOException {
- ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder();
Settings.Builder builder = Settings.builder();
- builder.put(ScriptService.SCRIPT_CACHE_SIZE_SETTING, 1);
+ builder.put(ScriptService.SCRIPT_CACHE_SIZE_SETTING.getKey(), 1);
buildScriptService(builder.build());
- scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap());
- scriptService.executable(new Script("2+2", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap());
+ scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), Collections.emptyMap());
+ scriptService.executable(new Script("2+2", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), Collections.emptyMap());
assertEquals(2L, scriptService.stats().getCompilations());
assertEquals(1L, scriptService.stats().getCacheEvictions());
}
+ public void testDefaultLanguage() throws IOException {
+ Settings.Builder builder = Settings.builder();
+ builder.put("script.default_lang", "test");
+ buildScriptService(builder.build());
+ CompiledScript script =
+ scriptService.compile(new Script("1 + 1", ScriptType.INLINE, null, null), randomFrom(scriptContexts), Collections.emptyMap());
+ assertEquals(script.lang(), "test");
+ }
+
private void createFileScripts(String... langs) throws IOException {
for (String lang : langs) {
Path scriptPath = scriptsFilePath.resolve("file_script." + lang);
@@ -419,35 +419,37 @@ public class ScriptServiceTests extends ESTestCase {
resourceWatcherService.notifyNow();
}
- private void assertCompileRejected(String lang, String script, ScriptType scriptType, ScriptContext scriptContext,
- HasContextAndHeaders contextAndHeaders) {
+ private void assertCompileRejected(String lang, String script, ScriptType scriptType, ScriptContext scriptContext) {
try {
- scriptService.compile(new Script(script, scriptType, lang, null), scriptContext, contextAndHeaders, Collections.emptyMap());
+ scriptService.compile(new Script(script, scriptType, lang, null), scriptContext, Collections.emptyMap());
fail("compile should have been rejected for lang [" + lang + "], script_type [" + scriptType + "], scripted_op [" + scriptContext + "]");
} catch(ScriptException e) {
//all good
}
}
- private void assertCompileAccepted(String lang, String script, ScriptType scriptType, ScriptContext scriptContext,
- HasContextAndHeaders contextAndHeaders) {
- assertThat(scriptService.compile(new Script(script, scriptType, lang, null), scriptContext, contextAndHeaders, Collections.emptyMap()), notNullValue());
+ private void assertCompileAccepted(String lang, String script, ScriptType scriptType, ScriptContext scriptContext) {
+ assertThat(scriptService.compile(new Script(script, scriptType, lang, null), scriptContext, Collections.emptyMap()), notNullValue());
}
public static class TestEngineService implements ScriptEngineService {
+ public static final List<String> TYPES = Collections.unmodifiableList(Arrays.asList("test", "test2"));
+
+ public static final List<String> EXTENSIONS = Collections.unmodifiableList(Arrays.asList("test", "tst"));
+
@Override
- public String[] types() {
- return new String[] {"test", "test2"};
+ public List<String> getTypes() {
+ return TYPES;
}
@Override
- public String[] extensions() {
- return new String[] {"test", "tst"};
+ public List<String> getExtensions() {
+ return EXTENSIONS;
}
@Override
- public boolean sandboxed() {
+ public boolean isSandboxed() {
return true;
}
diff --git a/core/src/test/java/org/elasticsearch/script/ScriptSettingsTests.java b/core/src/test/java/org/elasticsearch/script/ScriptSettingsTests.java
new file mode 100644
index 0000000000..eeae3622b0
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/script/ScriptSettingsTests.java
@@ -0,0 +1,144 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.script;
+
+import org.elasticsearch.common.Nullable;
+import org.elasticsearch.common.settings.Setting;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.search.lookup.SearchLookup;
+import org.elasticsearch.test.ESTestCase;
+
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+
+import static org.hamcrest.Matchers.anyOf;
+import static org.hamcrest.Matchers.containsString;
+import static org.hamcrest.Matchers.equalTo;
+
+public class ScriptSettingsTests extends ESTestCase {
+
+ public void testConflictingModesForLanguage() {
+ ScriptEngineRegistry scriptEngineRegistry =
+ new ScriptEngineRegistry(Collections.singletonList(new ScriptEngineRegistry.ScriptEngineRegistration(CustomScriptEngineService.class, CustomScriptEngineService.TYPES)));
+ ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(Collections.emptyList());
+ ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry);
+ ScriptService.ScriptType scriptType = randomFrom(ScriptService.ScriptType.values());
+ ScriptContext scriptContext = randomFrom(ScriptContext.Standard.values());
+ Settings settings =
+ Settings
+ .builder()
+ .put(ScriptModes.getKey("test1", scriptType, scriptContext), "off")
+ .put(ScriptModes.getKey("test2", scriptType, scriptContext), "on")
+ .build();
+ boolean sawConflictingSettings = false;
+ for (Setting<ScriptMode> scriptModeSetting : scriptSettings.getScriptLanguageSettings()) {
+ if (scriptModeSetting.getKey().startsWith(ScriptModes.getKey("test3", scriptType, scriptContext))) {
+ try {
+ scriptModeSetting.get(settings);
+ fail("should have seen conflicting settings");
+ } catch (IllegalArgumentException e) {
+ assertThat(e.getMessage(), anyOf(containsString("conflicting settings [{off=[test1], on=[test2]}] for language [test3]"), containsString("conflicting settings [{on=[test2], on=[test1]}] for language [test3]")));
+ sawConflictingSettings = true;
+ }
+ }
+ }
+ assertTrue(sawConflictingSettings);
+ }
+
+ public void testDefaultLanguageIsGroovy() {
+ ScriptEngineRegistry scriptEngineRegistry =
+ new ScriptEngineRegistry(Collections.singletonList(new ScriptEngineRegistry.ScriptEngineRegistration(CustomScriptEngineService.class, CustomScriptEngineService.TYPES)));
+ ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(Collections.emptyList());
+ ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry);
+ assertThat(scriptSettings.getDefaultScriptLanguageSetting().get(Settings.EMPTY), equalTo("groovy"));
+ }
+
+ public void testCustomDefaultLanguage() {
+ ScriptEngineRegistry scriptEngineRegistry =
+ new ScriptEngineRegistry(Collections.singletonList(new ScriptEngineRegistry.ScriptEngineRegistration(CustomScriptEngineService.class, CustomScriptEngineService.TYPES)));
+ ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(Collections.emptyList());
+ ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry);
+ String defaultLanguage = randomFrom(CustomScriptEngineService.TYPES);
+ Settings settings = Settings.builder().put("script.default_lang", defaultLanguage).build();
+ assertThat(scriptSettings.getDefaultScriptLanguageSetting().get(settings), equalTo(defaultLanguage));
+ }
+
+ public void testInvalidDefaultLanguage() {
+ ScriptEngineRegistry scriptEngineRegistry =
+ new ScriptEngineRegistry(Collections.singletonList(new ScriptEngineRegistry.ScriptEngineRegistration(CustomScriptEngineService.class, CustomScriptEngineService.TYPES)));
+ ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(Collections.emptyList());
+ ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry);
+ Settings settings = Settings.builder().put("script.default_lang", "C++").build();
+ try {
+ scriptSettings.getDefaultScriptLanguageSetting().get(settings);
+ fail("should have seen unregistered default language");
+ } catch (IllegalArgumentException e) {
+ assertThat(e.getMessage(), containsString("unregistered default language [C++]"));
+ }
+ }
+
+ private static class CustomScriptEngineService implements ScriptEngineService {
+
+ public static final List<String> TYPES = Collections.unmodifiableList(Arrays.asList("test1", "test2", "test3"));
+
+ @Override
+ public List<String> getTypes() {
+ return TYPES;
+ }
+
+ @Override
+ public List<String> getExtensions() {
+ return Collections.singletonList(TYPES.get(0));
+ }
+
+ @Override
+ public boolean isSandboxed() {
+ return false;
+ }
+
+ @Override
+ public Object compile(String script, Map<String, String> params) {
+ return null;
+ }
+
+ @Override
+ public ExecutableScript executable(CompiledScript compiledScript, @Nullable Map<String, Object> vars) {
+ return null;
+ }
+
+ @Override
+ public SearchScript search(CompiledScript compiledScript, SearchLookup lookup, @Nullable Map<String, Object> vars) {
+ return null;
+ }
+
+ @Override
+ public void close() {
+
+ }
+
+ @Override
+ public void scriptRemoved(@Nullable CompiledScript script) {
+
+ }
+ }
+
+}
diff --git a/core/src/test/java/org/elasticsearch/search/SearchWithRejectionsIT.java b/core/src/test/java/org/elasticsearch/search/SearchWithRejectionsIT.java
index d3e3de5fda..63cbdb56fd 100644
--- a/core/src/test/java/org/elasticsearch/search/SearchWithRejectionsIT.java
+++ b/core/src/test/java/org/elasticsearch/search/SearchWithRejectionsIT.java
@@ -50,7 +50,7 @@ public class SearchWithRejectionsIT extends ESIntegTestCase {
client().prepareIndex("test", "type", Integer.toString(i)).setSource("field", "value").execute().actionGet();
}
IndicesStatsResponse indicesStats = client().admin().indices().prepareStats().execute().actionGet();
- assertThat(indicesStats.getTotal().getSearch().getOpenContexts(), equalTo(0l));
+ assertThat(indicesStats.getTotal().getSearch().getOpenContexts(), equalTo(0L));
refresh();
int numSearches = 10;
@@ -71,6 +71,6 @@ public class SearchWithRejectionsIT extends ESIntegTestCase {
}
awaitBusy(() -> client().admin().indices().prepareStats().execute().actionGet().getTotal().getSearch().getOpenContexts() == 0, 1, TimeUnit.SECONDS);
indicesStats = client().admin().indices().prepareStats().execute().actionGet();
- assertThat(indicesStats.getTotal().getSearch().getOpenContexts(), equalTo(0l));
+ assertThat(indicesStats.getTotal().getSearch().getOpenContexts(), equalTo(0L));
}
}
diff --git a/core/src/test/java/org/elasticsearch/search/StressSearchServiceReaperIT.java b/core/src/test/java/org/elasticsearch/search/StressSearchServiceReaperIT.java
index addfe14c48..9ea5ec93f1 100644
--- a/core/src/test/java/org/elasticsearch/search/StressSearchServiceReaperIT.java
+++ b/core/src/test/java/org/elasticsearch/search/StressSearchServiceReaperIT.java
@@ -40,7 +40,7 @@ public class StressSearchServiceReaperIT extends ESIntegTestCase {
protected Settings nodeSettings(int nodeOrdinal) {
// very frequent checks
return Settings.builder().put(super.nodeSettings(nodeOrdinal))
- .put(SearchService.KEEPALIVE_INTERVAL_KEY, TimeValue.timeValueMillis(1)).build();
+ .put(SearchService.KEEPALIVE_INTERVAL_SETTING.getKey(), TimeValue.timeValueMillis(1)).build();
}
// see issue #5165 - this test fails each time without the fix in pull #5170
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/AggregationsBinaryIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/AggregationsBinaryIT.java
index 80227ff7f6..1b8d0bb785 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/AggregationsBinaryIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/AggregationsBinaryIT.java
@@ -85,7 +85,7 @@ public class AggregationsBinaryIT extends ESIntegTestCase {
Terms.Bucket bucket = terms.getBucketByKey("val" + i);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo("val" + i));
- assertThat(bucket.getDocCount(), equalTo(1l));
+ assertThat(bucket.getDocCount(), equalTo(1L));
Aggregations subAggs = bucket.getAggregations();
assertThat(subAggs, notNullValue());
assertThat(subAggs.asList().size(), equalTo(1));
@@ -95,7 +95,7 @@ public class AggregationsBinaryIT extends ESIntegTestCase {
assertThat(subTermsBuckets, notNullValue());
assertThat(subTermsBuckets.size(), equalTo(1));
assertThat(((Number) subTermsBuckets.get(0).getKey()).intValue(), equalTo(i));
- assertThat(subTermsBuckets.get(0).getDocCount(), equalTo(1l));
+ assertThat(subTermsBuckets.get(0).getDocCount(), equalTo(1L));
}
}
@@ -126,7 +126,7 @@ public class AggregationsBinaryIT extends ESIntegTestCase {
Terms.Bucket bucket = terms.getBucketByKey("val" + i);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo("val" + i));
- assertThat(bucket.getDocCount(), equalTo(1l));
+ assertThat(bucket.getDocCount(), equalTo(1L));
Aggregations subAggs = bucket.getAggregations();
assertThat(subAggs, notNullValue());
assertThat(subAggs.asList().size(), equalTo(1));
@@ -136,7 +136,7 @@ public class AggregationsBinaryIT extends ESIntegTestCase {
assertThat(subTermsBuckets, notNullValue());
assertThat(subTermsBuckets.size(), equalTo(1));
assertThat(((Number) subTermsBuckets.get(0).getKey()).intValue(), equalTo(i));
- assertThat(subTermsBuckets.get(0).getDocCount(), equalTo(1l));
+ assertThat(subTermsBuckets.get(0).getDocCount(), equalTo(1L));
}
}
}
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/CombiIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/CombiIT.java
index 782ac3225f..a07d0714b6 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/CombiIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/CombiIT.java
@@ -131,7 +131,7 @@ public class CombiIT extends ESIntegTestCase {
.collectMode(aggCollectionMode )))
.execute().actionGet();
- assertThat(searchResponse.getHits().getTotalHits(), Matchers.equalTo(0l));
+ assertThat(searchResponse.getHits().getTotalHits(), Matchers.equalTo(0L));
Histogram values = searchResponse.getAggregations().get("values");
assertThat(values, notNullValue());
assertThat(values.getBuckets().isEmpty(), is(true));
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ChildrenIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ChildrenIT.java
index e0c7d2352c..2dcb79b8d5 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ChildrenIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ChildrenIT.java
@@ -195,13 +195,13 @@ public class ChildrenIT extends ESIntegTestCase {
Terms.Bucket categoryBucket = categoryTerms.getBucketByKey("a");
assertThat(categoryBucket.getKeyAsString(), equalTo("a"));
- assertThat(categoryBucket.getDocCount(), equalTo(3l));
+ assertThat(categoryBucket.getDocCount(), equalTo(3L));
Children childrenBucket = categoryBucket.getAggregations().get("to_comment");
assertThat(childrenBucket.getName(), equalTo("to_comment"));
- assertThat(childrenBucket.getDocCount(), equalTo(2l));
+ assertThat(childrenBucket.getDocCount(), equalTo(2L));
TopHits topHits = childrenBucket.getAggregations().get("top_comments");
- assertThat(topHits.getHits().totalHits(), equalTo(2l));
+ assertThat(topHits.getHits().totalHits(), equalTo(2L));
assertThat(topHits.getHits().getAt(0).getId(), equalTo("a"));
assertThat(topHits.getHits().getAt(0).getType(), equalTo("comment"));
assertThat(topHits.getHits().getAt(1).getId(), equalTo("c"));
@@ -209,25 +209,25 @@ public class ChildrenIT extends ESIntegTestCase {
categoryBucket = categoryTerms.getBucketByKey("b");
assertThat(categoryBucket.getKeyAsString(), equalTo("b"));
- assertThat(categoryBucket.getDocCount(), equalTo(2l));
+ assertThat(categoryBucket.getDocCount(), equalTo(2L));
childrenBucket = categoryBucket.getAggregations().get("to_comment");
assertThat(childrenBucket.getName(), equalTo("to_comment"));
- assertThat(childrenBucket.getDocCount(), equalTo(1l));
+ assertThat(childrenBucket.getDocCount(), equalTo(1L));
topHits = childrenBucket.getAggregations().get("top_comments");
- assertThat(topHits.getHits().totalHits(), equalTo(1l));
+ assertThat(topHits.getHits().totalHits(), equalTo(1L));
assertThat(topHits.getHits().getAt(0).getId(), equalTo("c"));
assertThat(topHits.getHits().getAt(0).getType(), equalTo("comment"));
categoryBucket = categoryTerms.getBucketByKey("c");
assertThat(categoryBucket.getKeyAsString(), equalTo("c"));
- assertThat(categoryBucket.getDocCount(), equalTo(2l));
+ assertThat(categoryBucket.getDocCount(), equalTo(2L));
childrenBucket = categoryBucket.getAggregations().get("to_comment");
assertThat(childrenBucket.getName(), equalTo("to_comment"));
- assertThat(childrenBucket.getDocCount(), equalTo(1l));
+ assertThat(childrenBucket.getDocCount(), equalTo(1L));
topHits = childrenBucket.getAggregations().get("top_comments");
- assertThat(topHits.getHits().totalHits(), equalTo(1l));
+ assertThat(topHits.getHits().totalHits(), equalTo(1L));
assertThat(topHits.getHits().getAt(0).getId(), equalTo("c"));
assertThat(topHits.getHits().getAt(0).getType(), equalTo("comment"));
}
@@ -255,7 +255,7 @@ public class ChildrenIT extends ESIntegTestCase {
assertNoFailures(searchResponse);
Children children = searchResponse.getAggregations().get("children");
- assertThat(children.getDocCount(), equalTo(4l));
+ assertThat(children.getDocCount(), equalTo(4L));
Sum count = children.getAggregations().get("counts");
assertThat(count.getValue(), equalTo(4.));
@@ -271,7 +271,7 @@ public class ChildrenIT extends ESIntegTestCase {
.setDoc("count", 1)
.setDetectNoop(false)
.get();
- assertThat(updateResponse.getVersion(), greaterThan(1l));
+ assertThat(updateResponse.getVersion(), greaterThan(1L));
refresh();
}
}
@@ -285,7 +285,7 @@ public class ChildrenIT extends ESIntegTestCase {
Children children = searchResponse.getAggregations().get("non-existing");
assertThat(children.getName(), equalTo("non-existing"));
- assertThat(children.getDocCount(), equalTo(0l));
+ assertThat(children.getDocCount(), equalTo(0L));
}
public void testPostCollection() throws Exception {
@@ -328,23 +328,23 @@ public class ChildrenIT extends ESIntegTestCase {
assertHitCount(response, 1);
Children childrenAgg = response.getAggregations().get("my-refinements");
- assertThat(childrenAgg.getDocCount(), equalTo(7l));
+ assertThat(childrenAgg.getDocCount(), equalTo(7L));
Terms termsAgg = childrenAgg.getAggregations().get("my-colors");
assertThat(termsAgg.getBuckets().size(), equalTo(4));
- assertThat(termsAgg.getBucketByKey("black").getDocCount(), equalTo(3l));
- assertThat(termsAgg.getBucketByKey("blue").getDocCount(), equalTo(2l));
- assertThat(termsAgg.getBucketByKey("green").getDocCount(), equalTo(1l));
- assertThat(termsAgg.getBucketByKey("orange").getDocCount(), equalTo(1l));
+ assertThat(termsAgg.getBucketByKey("black").getDocCount(), equalTo(3L));
+ assertThat(termsAgg.getBucketByKey("blue").getDocCount(), equalTo(2L));
+ assertThat(termsAgg.getBucketByKey("green").getDocCount(), equalTo(1L));
+ assertThat(termsAgg.getBucketByKey("orange").getDocCount(), equalTo(1L));
termsAgg = childrenAgg.getAggregations().get("my-sizes");
assertThat(termsAgg.getBuckets().size(), equalTo(6));
- assertThat(termsAgg.getBucketByKey("36").getDocCount(), equalTo(2l));
- assertThat(termsAgg.getBucketByKey("32").getDocCount(), equalTo(1l));
- assertThat(termsAgg.getBucketByKey("34").getDocCount(), equalTo(1l));
- assertThat(termsAgg.getBucketByKey("38").getDocCount(), equalTo(1l));
- assertThat(termsAgg.getBucketByKey("40").getDocCount(), equalTo(1l));
- assertThat(termsAgg.getBucketByKey("44").getDocCount(), equalTo(1l));
+ assertThat(termsAgg.getBucketByKey("36").getDocCount(), equalTo(2L));
+ assertThat(termsAgg.getBucketByKey("32").getDocCount(), equalTo(1L));
+ assertThat(termsAgg.getBucketByKey("34").getDocCount(), equalTo(1L));
+ assertThat(termsAgg.getBucketByKey("38").getDocCount(), equalTo(1L));
+ assertThat(termsAgg.getBucketByKey("40").getDocCount(), equalTo(1L));
+ assertThat(termsAgg.getBucketByKey("44").getDocCount(), equalTo(1L));
}
public void testHierarchicalChildrenAggs() {
@@ -383,14 +383,14 @@ public class ChildrenIT extends ESIntegTestCase {
Children children = response.getAggregations().get(parentType);
assertThat(children.getName(), equalTo(parentType));
- assertThat(children.getDocCount(), equalTo(1l));
+ assertThat(children.getDocCount(), equalTo(1L));
children = children.getAggregations().get(childType);
assertThat(children.getName(), equalTo(childType));
- assertThat(children.getDocCount(), equalTo(1l));
+ assertThat(children.getDocCount(), equalTo(1L));
Terms terms = children.getAggregations().get("name");
assertThat(terms.getBuckets().size(), equalTo(1));
assertThat(terms.getBuckets().get(0).getKey().toString(), equalTo("brussels"));
- assertThat(terms.getBuckets().get(0).getDocCount(), equalTo(1l));
+ assertThat(terms.getBuckets().get(0).getDocCount(), equalTo(1L));
}
public void testPostCollectAllLeafReaders() throws Exception {
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java
index a39e12f00d..0d28bcebee 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java
@@ -33,6 +33,7 @@ import org.elasticsearch.script.CompiledScript;
import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.LeafSearchScript;
import org.elasticsearch.script.Script;
+import org.elasticsearch.script.ScriptEngineRegistry;
import org.elasticsearch.script.ScriptEngineService;
import org.elasticsearch.script.ScriptModule;
import org.elasticsearch.script.ScriptService.ScriptType;
@@ -55,6 +56,7 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
+import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@@ -176,21 +178,21 @@ public class DateHistogramIT extends ESIntegTestCase {
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(1l));
+ assertThat(bucket.getDocCount(), equalTo(1L));
key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(2l));
+ assertThat(bucket.getDocCount(), equalTo(2L));
key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC);
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(3l));
+ assertThat(bucket.getDocCount(), equalTo(3L));
}
public void testSingleValuedFieldWithTimeZone() throws Exception {
@@ -211,42 +213,42 @@ public class DateHistogramIT extends ESIntegTestCase {
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(1l));
+ assertThat(bucket.getDocCount(), equalTo(1L));
key = new DateTime(2012, 2, 1, 23, 0, DateTimeZone.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(1l));
+ assertThat(bucket.getDocCount(), equalTo(1L));
key = new DateTime(2012, 2, 14, 23, 0, DateTimeZone.UTC);
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(1l));
+ assertThat(bucket.getDocCount(), equalTo(1L));
key = new DateTime(2012, 3, 1, 23, 0, DateTimeZone.UTC);
bucket = buckets.get(3);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(1l));
+ assertThat(bucket.getDocCount(), equalTo(1L));
key = new DateTime(2012, 3, 14, 23, 0, DateTimeZone.UTC);
bucket = buckets.get(4);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(1l));
+ assertThat(bucket.getDocCount(), equalTo(1L));
key = new DateTime(2012, 3, 22, 23, 0, DateTimeZone.UTC);
bucket = buckets.get(5);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(1l));
+ assertThat(bucket.getDocCount(), equalTo(1L));
}
public void testSingleValuedFieldOrderedByKeyAsc() throws Exception {
@@ -360,12 +362,12 @@ public class DateHistogramIT extends ESIntegTestCase {
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(1l));
+ assertThat(bucket.getDocCount(), equalTo(1L));
Sum sum = bucket.getAggregations().get("sum");
assertThat(sum, notNullValue());
assertThat(sum.getValue(), equalTo(1.0));
assertThat((DateTime) propertiesKeys[0], equalTo(key));
- assertThat((long) propertiesDocCounts[0], equalTo(1l));
+ assertThat((long) propertiesDocCounts[0], equalTo(1L));
assertThat((double) propertiesCounts[0], equalTo(1.0));
key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC);
@@ -373,12 +375,12 @@ public class DateHistogramIT extends ESIntegTestCase {
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(2l));
+ assertThat(bucket.getDocCount(), equalTo(2L));
sum = bucket.getAggregations().get("sum");
assertThat(sum, notNullValue());
assertThat(sum.getValue(), equalTo(5.0));
assertThat((DateTime) propertiesKeys[1], equalTo(key));
- assertThat((long) propertiesDocCounts[1], equalTo(2l));
+ assertThat((long) propertiesDocCounts[1], equalTo(2L));
assertThat((double) propertiesCounts[1], equalTo(5.0));
key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC);
@@ -386,12 +388,12 @@ public class DateHistogramIT extends ESIntegTestCase {
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(3l));
+ assertThat(bucket.getDocCount(), equalTo(3L));
sum = bucket.getAggregations().get("sum");
assertThat(sum, notNullValue());
assertThat(sum.getValue(), equalTo(15.0));
assertThat((DateTime) propertiesKeys[2], equalTo(key));
- assertThat((long) propertiesDocCounts[2], equalTo(3l));
+ assertThat((long) propertiesDocCounts[2], equalTo(3L));
assertThat((double) propertiesCounts[2], equalTo(15.0));
}
@@ -414,7 +416,7 @@ public class DateHistogramIT extends ESIntegTestCase {
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(1l));
+ assertThat(bucket.getDocCount(), equalTo(1L));
Max max = bucket.getAggregations().get("max");
assertThat(max, notNullValue());
assertThat(max.getValue(), equalTo((double) new DateTime(2012, 1, 2, 0, 0, DateTimeZone.UTC).getMillis()));
@@ -424,7 +426,7 @@ public class DateHistogramIT extends ESIntegTestCase {
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(2l));
+ assertThat(bucket.getDocCount(), equalTo(2L));
max = bucket.getAggregations().get("max");
assertThat(max, notNullValue());
assertThat(max.getValue(), equalTo((double) new DateTime(2012, 2, 15, 0, 0, DateTimeZone.UTC).getMillis()));
@@ -434,7 +436,7 @@ public class DateHistogramIT extends ESIntegTestCase {
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(3l));
+ assertThat(bucket.getDocCount(), equalTo(3L));
max = bucket.getAggregations().get("max");
assertThat(max, notNullValue());
assertThat(max.getValue(), equalTo((double) new DateTime(2012, 3, 23, 0, 0, DateTimeZone.UTC).getMillis()));
@@ -553,21 +555,21 @@ public class DateHistogramIT extends ESIntegTestCase {
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(1l));
+ assertThat(bucket.getDocCount(), equalTo(1L));
key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(2l));
+ assertThat(bucket.getDocCount(), equalTo(2L));
key = new DateTime(2012, 4, 1, 0, 0, DateTimeZone.UTC);
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(3l));
+ assertThat(bucket.getDocCount(), equalTo(3L));
}
/*
@@ -597,28 +599,28 @@ public class DateHistogramIT extends ESIntegTestCase {
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(1l));
+ assertThat(bucket.getDocCount(), equalTo(1L));
key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(3l));
+ assertThat(bucket.getDocCount(), equalTo(3L));
key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC);
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(5l));
+ assertThat(bucket.getDocCount(), equalTo(5L));
key = new DateTime(2012, 4, 1, 0, 0, DateTimeZone.UTC);
bucket = buckets.get(3);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(3l));
+ assertThat(bucket.getDocCount(), equalTo(3L));
}
public void testMultiValuedFieldOrderedByKeyDesc() throws Exception {
@@ -641,19 +643,19 @@ public class DateHistogramIT extends ESIntegTestCase {
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
- assertThat(bucket.getDocCount(), equalTo(5l));
+ assertThat(bucket.getDocCount(), equalTo(5L));
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
- assertThat(bucket.getDocCount(), equalTo(3l));
+ assertThat(bucket.getDocCount(), equalTo(3L));
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
- assertThat(bucket.getDocCount(), equalTo(3l));
+ assertThat(bucket.getDocCount(), equalTo(3L));
bucket = buckets.get(3);
assertThat(bucket, notNullValue());
- assertThat(bucket.getDocCount(), equalTo(1l));
+ assertThat(bucket.getDocCount(), equalTo(1L));
}
/**
@@ -686,28 +688,28 @@ public class DateHistogramIT extends ESIntegTestCase {
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(1l));
+ assertThat(bucket.getDocCount(), equalTo(1L));
key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(3l));
+ assertThat(bucket.getDocCount(), equalTo(3L));
key = new DateTime(2012, 4, 1, 0, 0, DateTimeZone.UTC);
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(5l));
+ assertThat(bucket.getDocCount(), equalTo(5L));
key = new DateTime(2012, 5, 1, 0, 0, DateTimeZone.UTC);
bucket = buckets.get(3);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(3l));
+ assertThat(bucket.getDocCount(), equalTo(3L));
}
/**
@@ -740,7 +742,7 @@ public class DateHistogramIT extends ESIntegTestCase {
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(1l));
+ assertThat(bucket.getDocCount(), equalTo(1L));
Max max = bucket.getAggregations().get("max");
assertThat(max, notNullValue());
assertThat((long) max.getValue(), equalTo(new DateTime(2012, 3, 3, 0, 0, DateTimeZone.UTC).getMillis()));
@@ -750,7 +752,7 @@ public class DateHistogramIT extends ESIntegTestCase {
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(3l));
+ assertThat(bucket.getDocCount(), equalTo(3L));
max = bucket.getAggregations().get("max");
assertThat(max, notNullValue());
assertThat((long) max.getValue(), equalTo(new DateTime(2012, 4, 16, 0, 0, DateTimeZone.UTC).getMillis()));
@@ -760,7 +762,7 @@ public class DateHistogramIT extends ESIntegTestCase {
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(5l));
+ assertThat(bucket.getDocCount(), equalTo(5L));
max = bucket.getAggregations().get("max");
assertThat(max, notNullValue());
assertThat((long) max.getValue(), equalTo(new DateTime(2012, 5, 24, 0, 0, DateTimeZone.UTC).getMillis()));
@@ -770,7 +772,7 @@ public class DateHistogramIT extends ESIntegTestCase {
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(3l));
+ assertThat(bucket.getDocCount(), equalTo(3L));
max = bucket.getAggregations().get("max");
assertThat(max, notNullValue());
assertThat((long) max.getValue(), equalTo(new DateTime(2012, 5, 24, 0, 0, DateTimeZone.UTC).getMillis()));
@@ -802,21 +804,21 @@ public class DateHistogramIT extends ESIntegTestCase {
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(1l));
+ assertThat(bucket.getDocCount(), equalTo(1L));
key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(2l));
+ assertThat(bucket.getDocCount(), equalTo(2L));
key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC);
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(3l));
+ assertThat(bucket.getDocCount(), equalTo(3L));
}
public void testScriptSingleValueWithSubAggregatorInherited() throws Exception {
@@ -838,7 +840,7 @@ public class DateHistogramIT extends ESIntegTestCase {
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(1l));
+ assertThat(bucket.getDocCount(), equalTo(1L));
Max max = bucket.getAggregations().get("max");
assertThat(max, notNullValue());
assertThat(max.getValue(), equalTo((double) new DateTime(2012, 1, 2, 0, 0, DateTimeZone.UTC).getMillis()));
@@ -848,7 +850,7 @@ public class DateHistogramIT extends ESIntegTestCase {
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(2l));
+ assertThat(bucket.getDocCount(), equalTo(2L));
max = bucket.getAggregations().get("max");
assertThat(max, notNullValue());
assertThat(max.getValue(), equalTo((double) new DateTime(2012, 2, 15, 0, 0, DateTimeZone.UTC).getMillis()));
@@ -858,7 +860,7 @@ public class DateHistogramIT extends ESIntegTestCase {
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(3l));
+ assertThat(bucket.getDocCount(), equalTo(3L));
max = bucket.getAggregations().get("max");
assertThat(max, notNullValue());
assertThat(max.getValue(), equalTo((double) new DateTime(2012, 3, 23, 0, 0, DateTimeZone.UTC).getMillis()));
@@ -882,28 +884,28 @@ public class DateHistogramIT extends ESIntegTestCase {
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(1l));
+ assertThat(bucket.getDocCount(), equalTo(1L));
key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(3l));
+ assertThat(bucket.getDocCount(), equalTo(3L));
key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC);
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(5l));
+ assertThat(bucket.getDocCount(), equalTo(5L));
key = new DateTime(2012, 4, 1, 0, 0, DateTimeZone.UTC);
bucket = buckets.get(3);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(3l));
+ assertThat(bucket.getDocCount(), equalTo(3L));
}
@@ -936,7 +938,7 @@ public class DateHistogramIT extends ESIntegTestCase {
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(1l));
+ assertThat(bucket.getDocCount(), equalTo(1L));
Max max = bucket.getAggregations().get("max");
assertThat(max, notNullValue());
assertThat((long) max.getValue(), equalTo(new DateTime(2012, 2, 3, 0, 0, DateTimeZone.UTC).getMillis()));
@@ -946,7 +948,7 @@ public class DateHistogramIT extends ESIntegTestCase {
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(3l));
+ assertThat(bucket.getDocCount(), equalTo(3L));
max = bucket.getAggregations().get("max");
assertThat(max, notNullValue());
assertThat((long) max.getValue(), equalTo(new DateTime(2012, 3, 16, 0, 0, DateTimeZone.UTC).getMillis()));
@@ -956,7 +958,7 @@ public class DateHistogramIT extends ESIntegTestCase {
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(5l));
+ assertThat(bucket.getDocCount(), equalTo(5L));
max = bucket.getAggregations().get("max");
assertThat(max, notNullValue());
assertThat((long) max.getValue(), equalTo(new DateTime(2012, 4, 24, 0, 0, DateTimeZone.UTC).getMillis()));
@@ -966,7 +968,7 @@ public class DateHistogramIT extends ESIntegTestCase {
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(3l));
+ assertThat(bucket.getDocCount(), equalTo(3L));
max = bucket.getAggregations().get("max");
assertThat(max, notNullValue());
assertThat((long) max.getValue(), equalTo(new DateTime(2012, 4, 24, 0, 0, DateTimeZone.UTC).getMillis()));
@@ -1003,30 +1005,30 @@ public class DateHistogramIT extends ESIntegTestCase {
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(1l));
+ assertThat(bucket.getDocCount(), equalTo(1L));
key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(2l));
+ assertThat(bucket.getDocCount(), equalTo(2L));
key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC);
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(3l));
+ assertThat(bucket.getDocCount(), equalTo(3L));
}
public void testEmptyAggregation() throws Exception {
SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx")
.setQuery(matchAllQuery())
- .addAggregation(histogram("histo").field("value").interval(1l).minDocCount(0).subAggregation(dateHistogram("date_histo").interval(1)))
+ .addAggregation(histogram("histo").field("value").interval(1L).minDocCount(0).subAggregation(dateHistogram("date_histo").interval(1)))
.execute().actionGet();
- assertThat(searchResponse.getHits().getTotalHits(), equalTo(2l));
+ assertThat(searchResponse.getHits().getTotalHits(), equalTo(2L));
Histogram histo = searchResponse.getAggregations().get("histo");
assertThat(histo, Matchers.notNullValue());
List<? extends Histogram.Bucket> buckets = histo.getBuckets();
@@ -1062,7 +1064,7 @@ public class DateHistogramIT extends ESIntegTestCase {
.format("yyyy-MM-dd:HH-mm-ssZZ"))
.execute().actionGet();
- assertThat(response.getHits().getTotalHits(), equalTo(5l));
+ assertThat(response.getHits().getTotalHits(), equalTo(5L));
Histogram histo = response.getAggregations().get("date_histo");
List<? extends Histogram.Bucket> buckets = histo.getBuckets();
@@ -1071,12 +1073,12 @@ public class DateHistogramIT extends ESIntegTestCase {
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo("2014-03-10:00-00-00-02:00"));
- assertThat(bucket.getDocCount(), equalTo(2l));
+ assertThat(bucket.getDocCount(), equalTo(2L));
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo("2014-03-11:00-00-00-02:00"));
- assertThat(bucket.getDocCount(), equalTo(3l));
+ assertThat(bucket.getDocCount(), equalTo(3L));
}
public void testSingleValueFieldWithExtendedBounds() throws Exception {
@@ -1232,7 +1234,7 @@ public class DateHistogramIT extends ESIntegTestCase {
).execute().actionGet();
assertSearchResponse(response);
- assertThat("Expected 24 buckets for one day aggregation with hourly interval", response.getHits().totalHits(), equalTo(2l));
+ assertThat("Expected 24 buckets for one day aggregation with hourly interval", response.getHits().totalHits(), equalTo(2L));
Histogram histo = response.getAggregations().get("histo");
assertThat(histo, notNullValue());
@@ -1245,9 +1247,9 @@ public class DateHistogramIT extends ESIntegTestCase {
assertThat(bucket, notNullValue());
assertThat("InternalBucket " + i + " had wrong key", (DateTime) bucket.getKey(), equalTo(new DateTime(timeZoneStartToday.getMillis() + (i * 60 * 60 * 1000), DateTimeZone.UTC)));
if (i == 0 || i == 12) {
- assertThat(bucket.getDocCount(), equalTo(1l));
+ assertThat(bucket.getDocCount(), equalTo(1L));
} else {
- assertThat(bucket.getDocCount(), equalTo(0l));
+ assertThat(bucket.getDocCount(), equalTo(0L));
}
}
internalCluster().wipeIndices("test12278");
@@ -1280,7 +1282,7 @@ public class DateHistogramIT extends ESIntegTestCase {
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(5l));
+ assertThat(bucket.getDocCount(), equalTo(5L));
}
public void testIssue6965() {
@@ -1303,21 +1305,21 @@ public class DateHistogramIT extends ESIntegTestCase {
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(1l));
+ assertThat(bucket.getDocCount(), equalTo(1L));
key = new DateTime(2012, 1, 31, 23, 0, DateTimeZone.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(2l));
+ assertThat(bucket.getDocCount(), equalTo(2L));
key = new DateTime(2012, 2, 29, 23, 0, DateTimeZone.UTC);
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(3l));
+ assertThat(bucket.getDocCount(), equalTo(3L));
}
public void testDSTBoundaryIssue9491() throws InterruptedException, ExecutionException {
@@ -1397,7 +1399,7 @@ public class DateHistogramIT extends ESIntegTestCase {
}
public void onModule(ScriptModule module) {
- module.addScriptEngine(ExtractFieldScriptEngine.class);
+ module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(ExtractFieldScriptEngine.class, ExtractFieldScriptEngine.TYPES));
}
}
@@ -1409,22 +1411,24 @@ public class DateHistogramIT extends ESIntegTestCase {
public static final String NAME = "extract_field";
+ public static final List<String> TYPES = Collections.singletonList(NAME);
+
@Override
public void close() throws IOException {
}
@Override
- public String[] types() {
- return new String[] { NAME };
+ public List<String> getTypes() {
+ return TYPES;
}
@Override
- public String[] extensions() {
- return types();
+ public List<String> getExtensions() {
+ return TYPES;
}
@Override
- public boolean sandboxed() {
+ public boolean isSandboxed() {
return true;
}
@@ -1447,12 +1451,6 @@ public class DateHistogramIT extends ESIntegTestCase {
final LeafSearchLookup leafLookup = lookup.getLeafSearchLookup(context);
return new LeafSearchScript() {
-
- @Override
- public Object unwrap(Object value) {
- return null;
- }
-
@Override
public void setNextVar(String name, Object value) {
}
@@ -1523,7 +1521,7 @@ public class DateHistogramIT extends ESIntegTestCase {
}
public void onModule(ScriptModule module) {
- module.addScriptEngine(FieldValueScriptEngine.class);
+ module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(FieldValueScriptEngine.class, FieldValueScriptEngine.TYPES));
}
}
@@ -1535,22 +1533,24 @@ public class DateHistogramIT extends ESIntegTestCase {
public static final String NAME = "field_value";
+ public static final List<String> TYPES = Collections.singletonList(NAME);
+
@Override
public void close() throws IOException {
}
@Override
- public String[] types() {
- return new String[] { NAME };
+ public List<String> getTypes() {
+ return TYPES;
}
@Override
- public String[] extensions() {
- return types();
+ public List<String> getExtensions() {
+ return TYPES;
}
@Override
- public boolean sandboxed() {
+ public boolean isSandboxed() {
return true;
}
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java
index bb22361ebd..b729692e40 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java
@@ -61,7 +61,7 @@ public class DateHistogramOffsetIT extends ESIntegTestCase {
protected Settings nodeSettings(int nodeOrdinal) {
return Settings.builder()
.put(super.nodeSettings(nodeOrdinal))
- .put(AssertingLocalTransport.ASSERTING_TRANSPORT_MIN_VERSION_KEY, Version.V_1_4_0_Beta1).build();
+ .put(AssertingLocalTransport.ASSERTING_TRANSPORT_MIN_VERSION_KEY.getKey(), Version.V_1_4_0_Beta1).build();
}
@Before
@@ -95,14 +95,14 @@ public class DateHistogramOffsetIT extends ESIntegTestCase {
.interval(DateHistogramInterval.DAY))
.execute().actionGet();
- assertThat(response.getHits().getTotalHits(), equalTo(5l));
+ assertThat(response.getHits().getTotalHits(), equalTo(5L));
Histogram histo = response.getAggregations().get("date_histo");
List<? extends Histogram.Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(2));
- checkBucketFor(buckets.get(0), new DateTime(2014, 3, 10, 2, 0, DateTimeZone.UTC), 2l);
- checkBucketFor(buckets.get(1), new DateTime(2014, 3, 11, 2, 0, DateTimeZone.UTC), 3l);
+ checkBucketFor(buckets.get(0), new DateTime(2014, 3, 10, 2, 0, DateTimeZone.UTC), 2L);
+ checkBucketFor(buckets.get(1), new DateTime(2014, 3, 11, 2, 0, DateTimeZone.UTC), 3L);
}
public void testSingleValueWithNegativeOffset() throws Exception {
@@ -117,14 +117,14 @@ public class DateHistogramOffsetIT extends ESIntegTestCase {
.interval(DateHistogramInterval.DAY))
.execute().actionGet();
- assertThat(response.getHits().getTotalHits(), equalTo(5l));
+ assertThat(response.getHits().getTotalHits(), equalTo(5L));
Histogram histo = response.getAggregations().get("date_histo");
List<? extends Histogram.Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(2));
- checkBucketFor(buckets.get(0), new DateTime(2014, 3, 9, 22, 0, DateTimeZone.UTC), 2l);
- checkBucketFor(buckets.get(1), new DateTime(2014, 3, 10, 22, 0, DateTimeZone.UTC), 3l);
+ checkBucketFor(buckets.get(0), new DateTime(2014, 3, 9, 22, 0, DateTimeZone.UTC), 2L);
+ checkBucketFor(buckets.get(1), new DateTime(2014, 3, 10, 22, 0, DateTimeZone.UTC), 3L);
}
/**
@@ -144,7 +144,7 @@ public class DateHistogramOffsetIT extends ESIntegTestCase {
.interval(DateHistogramInterval.DAY))
.execute().actionGet();
- assertThat(response.getHits().getTotalHits(), equalTo(24l));
+ assertThat(response.getHits().getTotalHits(), equalTo(24L));
Histogram histo = response.getAggregations().get("date_histo");
List<? extends Histogram.Bucket> buckets = histo.getBuckets();
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java
index 6e97a33e93..38cc24afd7 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java
@@ -184,11 +184,11 @@ public class FilterIT extends ESIntegTestCase {
public void testEmptyAggregation() throws Exception {
SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx")
.setQuery(matchAllQuery())
- .addAggregation(histogram("histo").field("value").interval(1l).minDocCount(0)
+ .addAggregation(histogram("histo").field("value").interval(1L).minDocCount(0)
.subAggregation(filter("filter").filter(matchAllQuery())))
.execute().actionGet();
- assertThat(searchResponse.getHits().getTotalHits(), equalTo(2l));
+ assertThat(searchResponse.getHits().getTotalHits(), equalTo(2L));
Histogram histo = searchResponse.getAggregations().get("histo");
assertThat(histo, Matchers.notNullValue());
Histogram.Bucket bucket = histo.getBuckets().get(1);
@@ -197,6 +197,6 @@ public class FilterIT extends ESIntegTestCase {
Filter filter = bucket.getAggregations().get("filter");
assertThat(filter, Matchers.notNullValue());
assertThat(filter.getName(), equalTo("filter"));
- assertThat(filter.getDocCount(), is(0l));
+ assertThat(filter.getDocCount(), is(0L));
}
}
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java
index 2235b00c2c..ab44d6cdb6 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java
@@ -250,11 +250,11 @@ public class FiltersIT extends ESIntegTestCase {
public void testEmptyAggregation() throws Exception {
SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx")
.setQuery(matchAllQuery())
- .addAggregation(histogram("histo").field("value").interval(1l).minDocCount(0)
+ .addAggregation(histogram("histo").field("value").interval(1L).minDocCount(0)
.subAggregation(filters("filters").filter("all", matchAllQuery())))
.execute().actionGet();
- assertThat(searchResponse.getHits().getTotalHits(), equalTo(2l));
+ assertThat(searchResponse.getHits().getTotalHits(), equalTo(2L));
Histogram histo = searchResponse.getAggregations().get("histo");
assertThat(histo, Matchers.notNullValue());
Histogram.Bucket bucket = histo.getBuckets().get(1);
@@ -265,7 +265,7 @@ public class FiltersIT extends ESIntegTestCase {
Filters.Bucket all = filters.getBucketByKey("all");
assertThat(all, Matchers.notNullValue());
assertThat(all.getKeyAsString(), equalTo("all"));
- assertThat(all.getDocCount(), is(0l));
+ assertThat(all.getDocCount(), is(0L));
}
public void testSimpleNonKeyed() throws Exception {
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java
index 57c4c8bc6f..5d52d1442f 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java
@@ -165,7 +165,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(500.0));
assertThat(bucket.getFromAsString(), equalTo("0.0"));
assertThat(bucket.getToAsString(), equalTo("500.0"));
- assertThat(bucket.getDocCount(), equalTo(2l));
+ assertThat(bucket.getDocCount(), equalTo(2L));
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
@@ -174,7 +174,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(1000.0));
assertThat(bucket.getFromAsString(), equalTo("500.0"));
assertThat(bucket.getToAsString(), equalTo("1000.0"));
- assertThat(bucket.getDocCount(), equalTo(2l));
+ assertThat(bucket.getDocCount(), equalTo(2L));
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
@@ -183,7 +183,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getFromAsString(), equalTo("1000.0"));
assertThat(bucket.getToAsString(), nullValue());
- assertThat(bucket.getDocCount(), equalTo(1l));
+ assertThat(bucket.getDocCount(), equalTo(1L));
}
public void testSimpleWithCustomKeys() throws Exception {
@@ -213,7 +213,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(500.0));
assertThat(bucket.getFromAsString(), equalTo("0.0"));
assertThat(bucket.getToAsString(), equalTo("500.0"));
- assertThat(bucket.getDocCount(), equalTo(2l));
+ assertThat(bucket.getDocCount(), equalTo(2L));
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
@@ -222,7 +222,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(1000.0));
assertThat(bucket.getFromAsString(), equalTo("500.0"));
assertThat(bucket.getToAsString(), equalTo("1000.0"));
- assertThat(bucket.getDocCount(), equalTo(2l));
+ assertThat(bucket.getDocCount(), equalTo(2L));
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
@@ -231,7 +231,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getFromAsString(), equalTo("1000.0"));
assertThat(bucket.getToAsString(), nullValue());
- assertThat(bucket.getDocCount(), equalTo(1l));
+ assertThat(bucket.getDocCount(), equalTo(1L));
}
public void testUnmapped() throws Exception {
@@ -263,7 +263,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(500.0));
assertThat(bucket.getFromAsString(), equalTo("0.0"));
assertThat(bucket.getToAsString(), equalTo("500.0"));
- assertThat(bucket.getDocCount(), equalTo(0l));
+ assertThat(bucket.getDocCount(), equalTo(0L));
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
@@ -272,7 +272,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(1000.0));
assertThat(bucket.getFromAsString(), equalTo("500.0"));
assertThat(bucket.getToAsString(), equalTo("1000.0"));
- assertThat(bucket.getDocCount(), equalTo(0l));
+ assertThat(bucket.getDocCount(), equalTo(0L));
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
@@ -281,7 +281,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getFromAsString(), equalTo("1000.0"));
assertThat(bucket.getToAsString(), nullValue());
- assertThat(bucket.getDocCount(), equalTo(0l));
+ assertThat(bucket.getDocCount(), equalTo(0L));
}
public void testPartiallyUnmapped() throws Exception {
@@ -311,7 +311,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(500.0));
assertThat(bucket.getFromAsString(), equalTo("0.0"));
assertThat(bucket.getToAsString(), equalTo("500.0"));
- assertThat(bucket.getDocCount(), equalTo(2l));
+ assertThat(bucket.getDocCount(), equalTo(2L));
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
@@ -320,7 +320,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(1000.0));
assertThat(bucket.getFromAsString(), equalTo("500.0"));
assertThat(bucket.getToAsString(), equalTo("1000.0"));
- assertThat(bucket.getDocCount(), equalTo(2l));
+ assertThat(bucket.getDocCount(), equalTo(2L));
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
@@ -329,7 +329,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getFromAsString(), equalTo("1000.0"));
assertThat(bucket.getToAsString(), nullValue());
- assertThat(bucket.getDocCount(), equalTo(1l));
+ assertThat(bucket.getDocCount(), equalTo(1L));
}
public void testWithSubAggregation() throws Exception {
@@ -364,7 +364,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(500.0));
assertThat(bucket.getFromAsString(), equalTo("0.0"));
assertThat(bucket.getToAsString(), equalTo("500.0"));
- assertThat(bucket.getDocCount(), equalTo(2l));
+ assertThat(bucket.getDocCount(), equalTo(2L));
assertThat(bucket.getAggregations().asList().isEmpty(), is(false));
Terms cities = bucket.getAggregations().get("cities");
assertThat(cities, Matchers.notNullValue());
@@ -374,7 +374,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
}
assertThat(names.contains("utrecht") && names.contains("haarlem"), is(true));
assertThat((String) propertiesKeys[0], equalTo("*-500.0"));
- assertThat((long) propertiesDocCounts[0], equalTo(2l));
+ assertThat((long) propertiesDocCounts[0], equalTo(2L));
assertThat((Terms) propertiesCities[0], sameInstance(cities));
bucket = buckets.get(1);
@@ -384,7 +384,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(1000.0));
assertThat(bucket.getFromAsString(), equalTo("500.0"));
assertThat(bucket.getToAsString(), equalTo("1000.0"));
- assertThat(bucket.getDocCount(), equalTo(2l));
+ assertThat(bucket.getDocCount(), equalTo(2L));
assertThat(bucket.getAggregations().asList().isEmpty(), is(false));
cities = bucket.getAggregations().get("cities");
assertThat(cities, Matchers.notNullValue());
@@ -394,7 +394,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
}
assertThat(names.contains("berlin") && names.contains("prague"), is(true));
assertThat((String) propertiesKeys[1], equalTo("500.0-1000.0"));
- assertThat((long) propertiesDocCounts[1], equalTo(2l));
+ assertThat((long) propertiesDocCounts[1], equalTo(2L));
assertThat((Terms) propertiesCities[1], sameInstance(cities));
bucket = buckets.get(2);
@@ -404,7 +404,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getFromAsString(), equalTo("1000.0"));
assertThat(bucket.getToAsString(), nullValue());
- assertThat(bucket.getDocCount(), equalTo(1l));
+ assertThat(bucket.getDocCount(), equalTo(1L));
assertThat(bucket.getAggregations().asList().isEmpty(), is(false));
cities = bucket.getAggregations().get("cities");
assertThat(cities, Matchers.notNullValue());
@@ -414,18 +414,18 @@ public class GeoDistanceIT extends ESIntegTestCase {
}
assertThat(names.contains("tel-aviv"), is(true));
assertThat((String) propertiesKeys[2], equalTo("1000.0-*"));
- assertThat((long) propertiesDocCounts[2], equalTo(1l));
+ assertThat((long) propertiesDocCounts[2], equalTo(1L));
assertThat((Terms) propertiesCities[2], sameInstance(cities));
}
public void testEmptyAggregation() throws Exception {
SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx")
.setQuery(matchAllQuery())
- .addAggregation(histogram("histo").field("value").interval(1l).minDocCount(0)
+ .addAggregation(histogram("histo").field("value").interval(1L).minDocCount(0)
.subAggregation(geoDistance("geo_dist").field("location").point("52.3760, 4.894").addRange("0-100", 0.0, 100.0)))
.execute().actionGet();
- assertThat(searchResponse.getHits().getTotalHits(), equalTo(2l));
+ assertThat(searchResponse.getHits().getTotalHits(), equalTo(2L));
Histogram histo = searchResponse.getAggregations().get("histo");
assertThat(histo, Matchers.notNullValue());
Histogram.Bucket bucket = histo.getBuckets().get(1);
@@ -442,7 +442,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
assertThat(((Number) buckets.get(0).getTo()).doubleValue(), equalTo(100.0));
assertThat(buckets.get(0).getFromAsString(), equalTo("0.0"));
assertThat(buckets.get(0).getToAsString(), equalTo("100.0"));
- assertThat(buckets.get(0).getDocCount(), equalTo(0l));
+ assertThat(buckets.get(0).getDocCount(), equalTo(0L));
}
public void testMultiValues() throws Exception {
@@ -472,7 +472,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(500.0));
assertThat(bucket.getFromAsString(), equalTo("0.0"));
assertThat(bucket.getToAsString(), equalTo("500.0"));
- assertThat(bucket.getDocCount(), equalTo(2l));
+ assertThat(bucket.getDocCount(), equalTo(2L));
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
@@ -481,7 +481,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(1000.0));
assertThat(bucket.getFromAsString(), equalTo("500.0"));
assertThat(bucket.getToAsString(), equalTo("1000.0"));
- assertThat(bucket.getDocCount(), equalTo(2l));
+ assertThat(bucket.getDocCount(), equalTo(2L));
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
@@ -490,7 +490,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getFromAsString(), equalTo("1000.0"));
assertThat(bucket.getToAsString(), nullValue());
- assertThat(bucket.getDocCount(), equalTo(1l));
+ assertThat(bucket.getDocCount(), equalTo(1L));
}
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/MissingIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/MissingIT.java
index 924ba7283f..85e0c58eda 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/MissingIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/MissingIT.java
@@ -184,11 +184,11 @@ public class MissingIT extends ESIntegTestCase {
public void testEmptyAggregation() throws Exception {
SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx")
.setQuery(matchAllQuery())
- .addAggregation(histogram("histo").field("value").interval(1l).minDocCount(0)
+ .addAggregation(histogram("histo").field("value").interval(1L).minDocCount(0)
.subAggregation(missing("missing")))
.execute().actionGet();
- assertThat(searchResponse.getHits().getTotalHits(), equalTo(2l));
+ assertThat(searchResponse.getHits().getTotalHits(), equalTo(2L));
Histogram histo = searchResponse.getAggregations().get("histo");
assertThat(histo, Matchers.notNullValue());
Histogram.Bucket bucket = histo.getBuckets().get(1);
@@ -197,7 +197,7 @@ public class MissingIT extends ESIntegTestCase {
Missing missing = bucket.getAggregations().get("missing");
assertThat(missing, Matchers.notNullValue());
assertThat(missing.getName(), equalTo("missing"));
- assertThat(missing.getDocCount(), is(0l));
+ assertThat(missing.getDocCount(), is(0L));
}
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java
index 349b61fc37..5317f2e15f 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java
@@ -212,7 +212,7 @@ public class NestedIT extends ESIntegTestCase {
Nested nested = searchResponse.getAggregations().get("nested");
assertThat(nested, Matchers.notNullValue());
assertThat(nested.getName(), equalTo("nested"));
- assertThat(nested.getDocCount(), is(0l));
+ assertThat(nested.getDocCount(), is(0L));
}
public void testNestedWithSubTermsAgg() throws Exception {
@@ -308,23 +308,23 @@ public class NestedIT extends ESIntegTestCase {
Nested level1 = response.getAggregations().get("level1");
assertThat(level1, notNullValue());
assertThat(level1.getName(), equalTo("level1"));
- assertThat(level1.getDocCount(), equalTo(2l));
+ assertThat(level1.getDocCount(), equalTo(2L));
StringTerms a = level1.getAggregations().get("a");
Terms.Bucket bBucket = a.getBucketByKey("a");
- assertThat(bBucket.getDocCount(), equalTo(1l));
+ assertThat(bBucket.getDocCount(), equalTo(1L));
Nested level2 = bBucket.getAggregations().get("level2");
- assertThat(level2.getDocCount(), equalTo(1l));
+ assertThat(level2.getDocCount(), equalTo(1L));
Sum sum = level2.getAggregations().get("sum");
assertThat(sum.getValue(), equalTo(2d));
a = level1.getAggregations().get("a");
bBucket = a.getBucketByKey("b");
- assertThat(bBucket.getDocCount(), equalTo(1l));
+ assertThat(bBucket.getDocCount(), equalTo(1L));
level2 = bBucket.getAggregations().get("level2");
- assertThat(level2.getDocCount(), equalTo(1l));
+ assertThat(level2.getDocCount(), equalTo(1L));
sum = level2.getAggregations().get("sum");
assertThat(sum.getValue(), equalTo(2d));
}
@@ -332,11 +332,11 @@ public class NestedIT extends ESIntegTestCase {
public void testEmptyAggregation() throws Exception {
SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx")
.setQuery(matchAllQuery())
- .addAggregation(histogram("histo").field("value").interval(1l).minDocCount(0)
+ .addAggregation(histogram("histo").field("value").interval(1L).minDocCount(0)
.subAggregation(nested("nested").path("nested")))
.execute().actionGet();
- assertThat(searchResponse.getHits().getTotalHits(), equalTo(2l));
+ assertThat(searchResponse.getHits().getTotalHits(), equalTo(2L));
Histogram histo = searchResponse.getAggregations().get("histo");
assertThat(histo, Matchers.notNullValue());
Histogram.Bucket bucket = histo.getBuckets().get(1);
@@ -345,7 +345,7 @@ public class NestedIT extends ESIntegTestCase {
Nested nested = bucket.getAggregations().get("nested");
assertThat(nested, Matchers.notNullValue());
assertThat(nested.getName(), equalTo("nested"));
- assertThat(nested.getDocCount(), is(0l));
+ assertThat(nested.getDocCount(), is(0L));
}
public void testNestedOnObjectField() throws Exception {
@@ -424,36 +424,36 @@ public class NestedIT extends ESIntegTestCase {
Terms startDate = response.getAggregations().get("startDate");
assertThat(startDate.getBuckets().size(), equalTo(2));
Terms.Bucket bucket = startDate.getBucketByKey("2014-11-01T00:00:00.000Z");
- assertThat(bucket.getDocCount(), equalTo(1l));
+ assertThat(bucket.getDocCount(), equalTo(1L));
Terms endDate = bucket.getAggregations().get("endDate");
bucket = endDate.getBucketByKey("2014-11-30T00:00:00.000Z");
- assertThat(bucket.getDocCount(), equalTo(1l));
+ assertThat(bucket.getDocCount(), equalTo(1L));
Terms period = bucket.getAggregations().get("period");
bucket = period.getBucketByKey("2014-11");
- assertThat(bucket.getDocCount(), equalTo(1l));
+ assertThat(bucket.getDocCount(), equalTo(1L));
Nested comments = bucket.getAggregations().get("ctxt_idfier_nested");
- assertThat(comments.getDocCount(), equalTo(2l));
+ assertThat(comments.getDocCount(), equalTo(2L));
Filter filter = comments.getAggregations().get("comment_filter");
- assertThat(filter.getDocCount(), equalTo(1l));
+ assertThat(filter.getDocCount(), equalTo(1L));
Nested nestedTags = filter.getAggregations().get("nested_tags");
- assertThat(nestedTags.getDocCount(), equalTo(0l)); // This must be 0
+ assertThat(nestedTags.getDocCount(), equalTo(0L)); // This must be 0
Terms tags = nestedTags.getAggregations().get("tag");
assertThat(tags.getBuckets().size(), equalTo(0)); // and this must be empty
bucket = startDate.getBucketByKey("2014-12-01T00:00:00.000Z");
- assertThat(bucket.getDocCount(), equalTo(1l));
+ assertThat(bucket.getDocCount(), equalTo(1L));
endDate = bucket.getAggregations().get("endDate");
bucket = endDate.getBucketByKey("2014-12-31T00:00:00.000Z");
- assertThat(bucket.getDocCount(), equalTo(1l));
+ assertThat(bucket.getDocCount(), equalTo(1L));
period = bucket.getAggregations().get("period");
bucket = period.getBucketByKey("2014-12");
- assertThat(bucket.getDocCount(), equalTo(1l));
+ assertThat(bucket.getDocCount(), equalTo(1L));
comments = bucket.getAggregations().get("ctxt_idfier_nested");
- assertThat(comments.getDocCount(), equalTo(2l));
+ assertThat(comments.getDocCount(), equalTo(2L));
filter = comments.getAggregations().get("comment_filter");
- assertThat(filter.getDocCount(), equalTo(1l));
+ assertThat(filter.getDocCount(), equalTo(1L));
nestedTags = filter.getAggregations().get("nested_tags");
- assertThat(nestedTags.getDocCount(), equalTo(0l)); // This must be 0
+ assertThat(nestedTags.getDocCount(), equalTo(0L)); // This must be 0
tags = nestedTags.getAggregations().get("tag");
assertThat(tags.getBuckets().size(), equalTo(0)); // and this must be empty
}
@@ -500,47 +500,47 @@ public class NestedIT extends ESIntegTestCase {
assertThat(category.getBuckets().size(), equalTo(4));
Terms.Bucket bucket = category.getBucketByKey("1");
- assertThat(bucket.getDocCount(), equalTo(2l));
+ assertThat(bucket.getDocCount(), equalTo(2L));
Nested property = bucket.getAggregations().get("property");
- assertThat(property.getDocCount(), equalTo(6l));
+ assertThat(property.getDocCount(), equalTo(6L));
Terms propertyId = property.getAggregations().get("property_id");
assertThat(propertyId.getBuckets().size(), equalTo(5));
- assertThat(propertyId.getBucketByKey("1").getDocCount(), equalTo(2l));
- assertThat(propertyId.getBucketByKey("2").getDocCount(), equalTo(1l));
- assertThat(propertyId.getBucketByKey("3").getDocCount(), equalTo(1l));
- assertThat(propertyId.getBucketByKey("4").getDocCount(), equalTo(1l));
- assertThat(propertyId.getBucketByKey("5").getDocCount(), equalTo(1l));
+ assertThat(propertyId.getBucketByKey("1").getDocCount(), equalTo(2L));
+ assertThat(propertyId.getBucketByKey("2").getDocCount(), equalTo(1L));
+ assertThat(propertyId.getBucketByKey("3").getDocCount(), equalTo(1L));
+ assertThat(propertyId.getBucketByKey("4").getDocCount(), equalTo(1L));
+ assertThat(propertyId.getBucketByKey("5").getDocCount(), equalTo(1L));
bucket = category.getBucketByKey("2");
- assertThat(bucket.getDocCount(), equalTo(2l));
+ assertThat(bucket.getDocCount(), equalTo(2L));
property = bucket.getAggregations().get("property");
- assertThat(property.getDocCount(), equalTo(6l));
+ assertThat(property.getDocCount(), equalTo(6L));
propertyId = property.getAggregations().get("property_id");
assertThat(propertyId.getBuckets().size(), equalTo(5));
- assertThat(propertyId.getBucketByKey("1").getDocCount(), equalTo(2l));
- assertThat(propertyId.getBucketByKey("2").getDocCount(), equalTo(1l));
- assertThat(propertyId.getBucketByKey("3").getDocCount(), equalTo(1l));
- assertThat(propertyId.getBucketByKey("4").getDocCount(), equalTo(1l));
- assertThat(propertyId.getBucketByKey("5").getDocCount(), equalTo(1l));
+ assertThat(propertyId.getBucketByKey("1").getDocCount(), equalTo(2L));
+ assertThat(propertyId.getBucketByKey("2").getDocCount(), equalTo(1L));
+ assertThat(propertyId.getBucketByKey("3").getDocCount(), equalTo(1L));
+ assertThat(propertyId.getBucketByKey("4").getDocCount(), equalTo(1L));
+ assertThat(propertyId.getBucketByKey("5").getDocCount(), equalTo(1L));
bucket = category.getBucketByKey("3");
- assertThat(bucket.getDocCount(), equalTo(1l));
+ assertThat(bucket.getDocCount(), equalTo(1L));
property = bucket.getAggregations().get("property");
- assertThat(property.getDocCount(), equalTo(3l));
+ assertThat(property.getDocCount(), equalTo(3L));
propertyId = property.getAggregations().get("property_id");
assertThat(propertyId.getBuckets().size(), equalTo(3));
- assertThat(propertyId.getBucketByKey("1").getDocCount(), equalTo(1l));
- assertThat(propertyId.getBucketByKey("2").getDocCount(), equalTo(1l));
- assertThat(propertyId.getBucketByKey("3").getDocCount(), equalTo(1l));
+ assertThat(propertyId.getBucketByKey("1").getDocCount(), equalTo(1L));
+ assertThat(propertyId.getBucketByKey("2").getDocCount(), equalTo(1L));
+ assertThat(propertyId.getBucketByKey("3").getDocCount(), equalTo(1L));
bucket = category.getBucketByKey("4");
- assertThat(bucket.getDocCount(), equalTo(1l));
+ assertThat(bucket.getDocCount(), equalTo(1L));
property = bucket.getAggregations().get("property");
- assertThat(property.getDocCount(), equalTo(3l));
+ assertThat(property.getDocCount(), equalTo(3L));
propertyId = property.getAggregations().get("property_id");
assertThat(propertyId.getBuckets().size(), equalTo(3));
- assertThat(propertyId.getBucketByKey("1").getDocCount(), equalTo(1l));
- assertThat(propertyId.getBucketByKey("2").getDocCount(), equalTo(1l));
- assertThat(propertyId.getBucketByKey("3").getDocCount(), equalTo(1l));
+ assertThat(propertyId.getBucketByKey("1").getDocCount(), equalTo(1L));
+ assertThat(propertyId.getBucketByKey("2").getDocCount(), equalTo(1L));
+ assertThat(propertyId.getBucketByKey("3").getDocCount(), equalTo(1L));
}
}
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ParentIdAggIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ParentIdAggIT.java
index 44bd22af1c..0c01825d7e 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ParentIdAggIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ParentIdAggIT.java
@@ -47,11 +47,11 @@ public class ParentIdAggIT extends ESIntegTestCase {
refresh();
ensureGreen("testidx");
- SearchResponse searchResponse = client().prepareSearch("testidx").setTypes("childtype").setQuery(matchAllQuery()).addAggregation(AggregationBuilders.terms("children").field("_parent")).get();
- assertThat(searchResponse.getHits().getTotalHits(), equalTo(2l));
+ SearchResponse searchResponse = client().prepareSearch("testidx").setTypes("childtype").setQuery(matchAllQuery()).addAggregation(AggregationBuilders.terms("children").field("_parent#parenttype")).get();
+ assertThat(searchResponse.getHits().getTotalHits(), equalTo(2L));
assertSearchResponse(searchResponse);
assertThat(searchResponse.getAggregations().getAsMap().get("children"), instanceOf(Terms.class));
Terms terms = (Terms) searchResponse.getAggregations().getAsMap().get("children");
- assertThat(terms.getBuckets().iterator().next().getDocCount(), equalTo(2l));
+ assertThat(terms.getBuckets().iterator().next().getDocCount(), equalTo(2L));
}
-} \ No newline at end of file
+}
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ReverseNestedIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ReverseNestedIT.java
index 8800063043..e82a7374b8 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ReverseNestedIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ReverseNestedIT.java
@@ -156,7 +156,7 @@ public class ReverseNestedIT extends ESIntegTestCase {
Nested nested = response.getAggregations().get("nested1");
assertThat(nested, notNullValue());
assertThat(nested.getName(), equalTo("nested1"));
- assertThat(nested.getDocCount(), equalTo(25l));
+ assertThat(nested.getDocCount(), equalTo(25L));
assertThat(nested.getAggregations().asList().isEmpty(), is(false));
Terms usernames = nested.getAggregations().get("field2");
@@ -167,161 +167,161 @@ public class ReverseNestedIT extends ESIntegTestCase {
// nested.field2: 1
Terms.Bucket bucket = usernameBuckets.get(0);
assertThat(bucket.getKeyAsString(), equalTo("1"));
- assertThat(bucket.getDocCount(), equalTo(6l));
+ assertThat(bucket.getDocCount(), equalTo(6L));
ReverseNested reverseNested = bucket.getAggregations().get("nested1_to_field1");
- assertThat((long) reverseNested.getProperty("_count"), equalTo(5l));
+ assertThat((long) reverseNested.getProperty("_count"), equalTo(5L));
Terms tags = reverseNested.getAggregations().get("field1");
assertThat((Terms) reverseNested.getProperty("field1"), sameInstance(tags));
List<Terms.Bucket> tagsBuckets = new ArrayList<>(tags.getBuckets());
assertThat(tagsBuckets.size(), equalTo(6));
assertThat(tagsBuckets.get(0).getKeyAsString(), equalTo("c"));
- assertThat(tagsBuckets.get(0).getDocCount(), equalTo(4l));
+ assertThat(tagsBuckets.get(0).getDocCount(), equalTo(4L));
assertThat(tagsBuckets.get(1).getKeyAsString(), equalTo("a"));
- assertThat(tagsBuckets.get(1).getDocCount(), equalTo(3l));
+ assertThat(tagsBuckets.get(1).getDocCount(), equalTo(3L));
assertThat(tagsBuckets.get(2).getKeyAsString(), equalTo("e"));
- assertThat(tagsBuckets.get(2).getDocCount(), equalTo(2l));
+ assertThat(tagsBuckets.get(2).getDocCount(), equalTo(2L));
assertThat(tagsBuckets.get(3).getKeyAsString(), equalTo("b"));
- assertThat(tagsBuckets.get(3).getDocCount(), equalTo(1l));
+ assertThat(tagsBuckets.get(3).getDocCount(), equalTo(1L));
assertThat(tagsBuckets.get(4).getKeyAsString(), equalTo("d"));
- assertThat(tagsBuckets.get(4).getDocCount(), equalTo(1l));
+ assertThat(tagsBuckets.get(4).getDocCount(), equalTo(1L));
assertThat(tagsBuckets.get(5).getKeyAsString(), equalTo("x"));
- assertThat(tagsBuckets.get(5).getDocCount(), equalTo(1l));
+ assertThat(tagsBuckets.get(5).getDocCount(), equalTo(1L));
// nested.field2: 4
bucket = usernameBuckets.get(1);
assertThat(bucket.getKeyAsString(), equalTo("4"));
- assertThat(bucket.getDocCount(), equalTo(4l));
+ assertThat(bucket.getDocCount(), equalTo(4L));
reverseNested = bucket.getAggregations().get("nested1_to_field1");
tags = reverseNested.getAggregations().get("field1");
tagsBuckets = new ArrayList<>(tags.getBuckets());
assertThat(tagsBuckets.size(), equalTo(5));
assertThat(tagsBuckets.get(0).getKeyAsString(), equalTo("a"));
- assertThat(tagsBuckets.get(0).getDocCount(), equalTo(3l));
+ assertThat(tagsBuckets.get(0).getDocCount(), equalTo(3L));
assertThat(tagsBuckets.get(1).getKeyAsString(), equalTo("b"));
- assertThat(tagsBuckets.get(1).getDocCount(), equalTo(2l));
+ assertThat(tagsBuckets.get(1).getDocCount(), equalTo(2L));
assertThat(tagsBuckets.get(2).getKeyAsString(), equalTo("c"));
- assertThat(tagsBuckets.get(2).getDocCount(), equalTo(2l));
+ assertThat(tagsBuckets.get(2).getDocCount(), equalTo(2L));
assertThat(tagsBuckets.get(3).getKeyAsString(), equalTo("d"));
- assertThat(tagsBuckets.get(3).getDocCount(), equalTo(1l));
+ assertThat(tagsBuckets.get(3).getDocCount(), equalTo(1L));
assertThat(tagsBuckets.get(4).getKeyAsString(), equalTo("e"));
- assertThat(tagsBuckets.get(4).getDocCount(), equalTo(1l));
+ assertThat(tagsBuckets.get(4).getDocCount(), equalTo(1L));
// nested.field2: 7
bucket = usernameBuckets.get(2);
assertThat(bucket.getKeyAsString(), equalTo("7"));
- assertThat(bucket.getDocCount(), equalTo(3l));
+ assertThat(bucket.getDocCount(), equalTo(3L));
reverseNested = bucket.getAggregations().get("nested1_to_field1");
tags = reverseNested.getAggregations().get("field1");
tagsBuckets = new ArrayList<>(tags.getBuckets());
assertThat(tagsBuckets.size(), equalTo(5));
assertThat(tagsBuckets.get(0).getKeyAsString(), equalTo("c"));
- assertThat(tagsBuckets.get(0).getDocCount(), equalTo(2l));
+ assertThat(tagsBuckets.get(0).getDocCount(), equalTo(2L));
assertThat(tagsBuckets.get(1).getKeyAsString(), equalTo("d"));
- assertThat(tagsBuckets.get(1).getDocCount(), equalTo(2l));
+ assertThat(tagsBuckets.get(1).getDocCount(), equalTo(2L));
assertThat(tagsBuckets.get(2).getKeyAsString(), equalTo("e"));
- assertThat(tagsBuckets.get(2).getDocCount(), equalTo(2l));
+ assertThat(tagsBuckets.get(2).getDocCount(), equalTo(2L));
assertThat(tagsBuckets.get(3).getKeyAsString(), equalTo("a"));
- assertThat(tagsBuckets.get(3).getDocCount(), equalTo(1l));
+ assertThat(tagsBuckets.get(3).getDocCount(), equalTo(1L));
assertThat(tagsBuckets.get(4).getKeyAsString(), equalTo("b"));
- assertThat(tagsBuckets.get(4).getDocCount(), equalTo(1l));
+ assertThat(tagsBuckets.get(4).getDocCount(), equalTo(1L));
// nested.field2: 2
bucket = usernameBuckets.get(3);
assertThat(bucket.getKeyAsString(), equalTo("2"));
- assertThat(bucket.getDocCount(), equalTo(2l));
+ assertThat(bucket.getDocCount(), equalTo(2L));
reverseNested = bucket.getAggregations().get("nested1_to_field1");
tags = reverseNested.getAggregations().get("field1");
tagsBuckets = new ArrayList<>(tags.getBuckets());
assertThat(tagsBuckets.size(), equalTo(3));
assertThat(tagsBuckets.get(0).getKeyAsString(), equalTo("a"));
- assertThat(tagsBuckets.get(0).getDocCount(), equalTo(2l));
+ assertThat(tagsBuckets.get(0).getDocCount(), equalTo(2L));
assertThat(tagsBuckets.get(1).getKeyAsString(), equalTo("c"));
- assertThat(tagsBuckets.get(1).getDocCount(), equalTo(2l));
+ assertThat(tagsBuckets.get(1).getDocCount(), equalTo(2L));
assertThat(tagsBuckets.get(2).getKeyAsString(), equalTo("b"));
- assertThat(tagsBuckets.get(2).getDocCount(), equalTo(1l));
+ assertThat(tagsBuckets.get(2).getDocCount(), equalTo(1L));
// nested.field2: 3
bucket = usernameBuckets.get(4);
assertThat(bucket.getKeyAsString(), equalTo("3"));
- assertThat(bucket.getDocCount(), equalTo(2l));
+ assertThat(bucket.getDocCount(), equalTo(2L));
reverseNested = bucket.getAggregations().get("nested1_to_field1");
tags = reverseNested.getAggregations().get("field1");
tagsBuckets = new ArrayList<>(tags.getBuckets());
assertThat(tagsBuckets.size(), equalTo(3));
assertThat(tagsBuckets.get(0).getKeyAsString(), equalTo("a"));
- assertThat(tagsBuckets.get(0).getDocCount(), equalTo(2l));
+ assertThat(tagsBuckets.get(0).getDocCount(), equalTo(2L));
assertThat(tagsBuckets.get(1).getKeyAsString(), equalTo("b"));
- assertThat(tagsBuckets.get(1).getDocCount(), equalTo(1l));
+ assertThat(tagsBuckets.get(1).getDocCount(), equalTo(1L));
assertThat(tagsBuckets.get(2).getKeyAsString(), equalTo("c"));
- assertThat(tagsBuckets.get(2).getDocCount(), equalTo(1l));
+ assertThat(tagsBuckets.get(2).getDocCount(), equalTo(1L));
// nested.field2: 5
bucket = usernameBuckets.get(5);
assertThat(bucket.getKeyAsString(), equalTo("5"));
- assertThat(bucket.getDocCount(), equalTo(2l));
+ assertThat(bucket.getDocCount(), equalTo(2L));
reverseNested = bucket.getAggregations().get("nested1_to_field1");
tags = reverseNested.getAggregations().get("field1");
tagsBuckets = new ArrayList<>(tags.getBuckets());
assertThat(tagsBuckets.size(), equalTo(4));
assertThat(tagsBuckets.get(0).getKeyAsString(), equalTo("b"));
- assertThat(tagsBuckets.get(0).getDocCount(), equalTo(1l));
+ assertThat(tagsBuckets.get(0).getDocCount(), equalTo(1L));
assertThat(tagsBuckets.get(1).getKeyAsString(), equalTo("c"));
- assertThat(tagsBuckets.get(1).getDocCount(), equalTo(1l));
+ assertThat(tagsBuckets.get(1).getDocCount(), equalTo(1L));
assertThat(tagsBuckets.get(2).getKeyAsString(), equalTo("d"));
- assertThat(tagsBuckets.get(2).getDocCount(), equalTo(1l));
+ assertThat(tagsBuckets.get(2).getDocCount(), equalTo(1L));
assertThat(tagsBuckets.get(3).getKeyAsString(), equalTo("z"));
- assertThat(tagsBuckets.get(3).getDocCount(), equalTo(1l));
+ assertThat(tagsBuckets.get(3).getDocCount(), equalTo(1L));
// nested.field2: 6
bucket = usernameBuckets.get(6);
assertThat(bucket.getKeyAsString(), equalTo("6"));
- assertThat(bucket.getDocCount(), equalTo(2l));
+ assertThat(bucket.getDocCount(), equalTo(2L));
reverseNested = bucket.getAggregations().get("nested1_to_field1");
tags = reverseNested.getAggregations().get("field1");
tagsBuckets = new ArrayList<>(tags.getBuckets());
assertThat(tagsBuckets.size(), equalTo(4));
assertThat(tagsBuckets.get(0).getKeyAsString(), equalTo("c"));
- assertThat(tagsBuckets.get(0).getDocCount(), equalTo(2l));
+ assertThat(tagsBuckets.get(0).getDocCount(), equalTo(2L));
assertThat(tagsBuckets.get(1).getKeyAsString(), equalTo("b"));
- assertThat(tagsBuckets.get(1).getDocCount(), equalTo(1l));
+ assertThat(tagsBuckets.get(1).getDocCount(), equalTo(1L));
assertThat(tagsBuckets.get(2).getKeyAsString(), equalTo("d"));
- assertThat(tagsBuckets.get(2).getDocCount(), equalTo(1l));
+ assertThat(tagsBuckets.get(2).getDocCount(), equalTo(1L));
assertThat(tagsBuckets.get(3).getKeyAsString(), equalTo("y"));
- assertThat(tagsBuckets.get(3).getDocCount(), equalTo(1l));
+ assertThat(tagsBuckets.get(3).getDocCount(), equalTo(1L));
// nested.field2: 8
bucket = usernameBuckets.get(7);
assertThat(bucket.getKeyAsString(), equalTo("8"));
- assertThat(bucket.getDocCount(), equalTo(2l));
+ assertThat(bucket.getDocCount(), equalTo(2L));
reverseNested = bucket.getAggregations().get("nested1_to_field1");
tags = reverseNested.getAggregations().get("field1");
tagsBuckets = new ArrayList<>(tags.getBuckets());
assertThat(tagsBuckets.size(), equalTo(4));
assertThat(tagsBuckets.get(0).getKeyAsString(), equalTo("c"));
- assertThat(tagsBuckets.get(0).getDocCount(), equalTo(2l));
+ assertThat(tagsBuckets.get(0).getDocCount(), equalTo(2L));
assertThat(tagsBuckets.get(1).getKeyAsString(), equalTo("d"));
- assertThat(tagsBuckets.get(1).getDocCount(), equalTo(1l));
+ assertThat(tagsBuckets.get(1).getDocCount(), equalTo(1L));
assertThat(tagsBuckets.get(2).getKeyAsString(), equalTo("e"));
- assertThat(tagsBuckets.get(2).getDocCount(), equalTo(1l));
+ assertThat(tagsBuckets.get(2).getDocCount(), equalTo(1L));
assertThat(tagsBuckets.get(3).getKeyAsString(), equalTo("x"));
- assertThat(tagsBuckets.get(3).getDocCount(), equalTo(1l));
+ assertThat(tagsBuckets.get(3).getDocCount(), equalTo(1L));
// nested.field2: 9
bucket = usernameBuckets.get(8);
assertThat(bucket.getKeyAsString(), equalTo("9"));
- assertThat(bucket.getDocCount(), equalTo(2l));
+ assertThat(bucket.getDocCount(), equalTo(2L));
reverseNested = bucket.getAggregations().get("nested1_to_field1");
tags = reverseNested.getAggregations().get("field1");
tagsBuckets = new ArrayList<>(tags.getBuckets());
assertThat(tagsBuckets.size(), equalTo(4));
assertThat(tagsBuckets.get(0).getKeyAsString(), equalTo("c"));
- assertThat(tagsBuckets.get(0).getDocCount(), equalTo(1l));
+ assertThat(tagsBuckets.get(0).getDocCount(), equalTo(1L));
assertThat(tagsBuckets.get(1).getKeyAsString(), equalTo("d"));
- assertThat(tagsBuckets.get(1).getDocCount(), equalTo(1l));
+ assertThat(tagsBuckets.get(1).getDocCount(), equalTo(1L));
assertThat(tagsBuckets.get(2).getKeyAsString(), equalTo("e"));
- assertThat(tagsBuckets.get(2).getDocCount(), equalTo(1l));
+ assertThat(tagsBuckets.get(2).getDocCount(), equalTo(1L));
assertThat(tagsBuckets.get(3).getKeyAsString(), equalTo("z"));
- assertThat(tagsBuckets.get(3).getDocCount(), equalTo(1l));
+ assertThat(tagsBuckets.get(3).getDocCount(), equalTo(1L));
}
public void testSimpleNested1ToRootToNested2() throws Exception {
@@ -337,13 +337,13 @@ public class ReverseNestedIT extends ESIntegTestCase {
assertSearchResponse(response);
Nested nested = response.getAggregations().get("nested1");
assertThat(nested.getName(), equalTo("nested1"));
- assertThat(nested.getDocCount(), equalTo(9l));
+ assertThat(nested.getDocCount(), equalTo(9L));
ReverseNested reverseNested = nested.getAggregations().get("nested1_to_root");
assertThat(reverseNested.getName(), equalTo("nested1_to_root"));
- assertThat(reverseNested.getDocCount(), equalTo(4l));
+ assertThat(reverseNested.getDocCount(), equalTo(4L));
nested = reverseNested.getAggregations().get("root_to_nested2");
assertThat(nested.getName(), equalTo("root_to_nested2"));
- assertThat(nested.getDocCount(), equalTo(27l));
+ assertThat(nested.getDocCount(), equalTo(27L));
}
public void testSimpleReverseNestedToNested1() throws Exception {
@@ -368,7 +368,7 @@ public class ReverseNestedIT extends ESIntegTestCase {
Nested nested = response.getAggregations().get("nested1");
assertThat(nested, notNullValue());
assertThat(nested.getName(), equalTo("nested1"));
- assertThat(nested.getDocCount(), equalTo(27l));
+ assertThat(nested.getDocCount(), equalTo(27L));
assertThat(nested.getAggregations().asList().isEmpty(), is(false));
Terms usernames = nested.getAggregations().get("field2");
@@ -378,73 +378,73 @@ public class ReverseNestedIT extends ESIntegTestCase {
Terms.Bucket bucket = usernameBuckets.get(0);
assertThat(bucket.getKeyAsString(), equalTo("0"));
- assertThat(bucket.getDocCount(), equalTo(12l));
+ assertThat(bucket.getDocCount(), equalTo(12L));
ReverseNested reverseNested = bucket.getAggregations().get("nested1_to_field1");
- assertThat(reverseNested.getDocCount(), equalTo(5l));
+ assertThat(reverseNested.getDocCount(), equalTo(5L));
Terms tags = reverseNested.getAggregations().get("field1");
List<Terms.Bucket> tagsBuckets = new ArrayList<>(tags.getBuckets());
assertThat(tagsBuckets.size(), equalTo(2));
assertThat(tagsBuckets.get(0).getKeyAsString(), equalTo("a"));
- assertThat(tagsBuckets.get(0).getDocCount(), equalTo(3l));
+ assertThat(tagsBuckets.get(0).getDocCount(), equalTo(3L));
assertThat(tagsBuckets.get(1).getKeyAsString(), equalTo("b"));
- assertThat(tagsBuckets.get(1).getDocCount(), equalTo(2l));
+ assertThat(tagsBuckets.get(1).getDocCount(), equalTo(2L));
bucket = usernameBuckets.get(1);
assertThat(bucket.getKeyAsString(), equalTo("1"));
- assertThat(bucket.getDocCount(), equalTo(6l));
+ assertThat(bucket.getDocCount(), equalTo(6L));
reverseNested = bucket.getAggregations().get("nested1_to_field1");
- assertThat(reverseNested.getDocCount(), equalTo(4l));
+ assertThat(reverseNested.getDocCount(), equalTo(4L));
tags = reverseNested.getAggregations().get("field1");
tagsBuckets = new ArrayList<>(tags.getBuckets());
assertThat(tagsBuckets.size(), equalTo(4));
assertThat(tagsBuckets.get(0).getKeyAsString(), equalTo("a"));
- assertThat(tagsBuckets.get(0).getDocCount(), equalTo(1l));
+ assertThat(tagsBuckets.get(0).getDocCount(), equalTo(1L));
assertThat(tagsBuckets.get(1).getKeyAsString(), equalTo("b"));
- assertThat(tagsBuckets.get(1).getDocCount(), equalTo(1l));
+ assertThat(tagsBuckets.get(1).getDocCount(), equalTo(1L));
assertThat(tagsBuckets.get(2).getKeyAsString(), equalTo("c"));
- assertThat(tagsBuckets.get(2).getDocCount(), equalTo(1l));
+ assertThat(tagsBuckets.get(2).getDocCount(), equalTo(1L));
assertThat(tagsBuckets.get(3).getKeyAsString(), equalTo("e"));
- assertThat(tagsBuckets.get(3).getDocCount(), equalTo(1l));
+ assertThat(tagsBuckets.get(3).getDocCount(), equalTo(1L));
bucket = usernameBuckets.get(2);
assertThat(bucket.getKeyAsString(), equalTo("2"));
- assertThat(bucket.getDocCount(), equalTo(5l));
+ assertThat(bucket.getDocCount(), equalTo(5L));
reverseNested = bucket.getAggregations().get("nested1_to_field1");
- assertThat(reverseNested.getDocCount(), equalTo(4l));
+ assertThat(reverseNested.getDocCount(), equalTo(4L));
tags = reverseNested.getAggregations().get("field1");
tagsBuckets = new ArrayList<>(tags.getBuckets());
assertThat(tagsBuckets.size(), equalTo(4));
assertThat(tagsBuckets.get(0).getKeyAsString(), equalTo("a"));
- assertThat(tagsBuckets.get(0).getDocCount(), equalTo(1l));
+ assertThat(tagsBuckets.get(0).getDocCount(), equalTo(1L));
assertThat(tagsBuckets.get(1).getKeyAsString(), equalTo("b"));
- assertThat(tagsBuckets.get(1).getDocCount(), equalTo(1l));
+ assertThat(tagsBuckets.get(1).getDocCount(), equalTo(1L));
assertThat(tagsBuckets.get(2).getKeyAsString(), equalTo("c"));
- assertThat(tagsBuckets.get(2).getDocCount(), equalTo(1l));
+ assertThat(tagsBuckets.get(2).getDocCount(), equalTo(1L));
assertThat(tagsBuckets.get(3).getKeyAsString(), equalTo("e"));
- assertThat(tagsBuckets.get(3).getDocCount(), equalTo(1l));
+ assertThat(tagsBuckets.get(3).getDocCount(), equalTo(1L));
bucket = usernameBuckets.get(3);
assertThat(bucket.getKeyAsString(), equalTo("3"));
- assertThat(bucket.getDocCount(), equalTo(2l));
+ assertThat(bucket.getDocCount(), equalTo(2L));
reverseNested = bucket.getAggregations().get("nested1_to_field1");
- assertThat(reverseNested.getDocCount(), equalTo(2l));
+ assertThat(reverseNested.getDocCount(), equalTo(2L));
tags = reverseNested.getAggregations().get("field1");
tagsBuckets = new ArrayList<>(tags.getBuckets());
assertThat(tagsBuckets.size(), equalTo(2));
assertThat(tagsBuckets.get(0).getKeyAsString(), equalTo("d"));
- assertThat(tagsBuckets.get(0).getDocCount(), equalTo(1l));
+ assertThat(tagsBuckets.get(0).getDocCount(), equalTo(1L));
assertThat(tagsBuckets.get(1).getKeyAsString(), equalTo("f"));
bucket = usernameBuckets.get(4);
assertThat(bucket.getKeyAsString(), equalTo("4"));
- assertThat(bucket.getDocCount(), equalTo(2l));
+ assertThat(bucket.getDocCount(), equalTo(2L));
reverseNested = bucket.getAggregations().get("nested1_to_field1");
- assertThat(reverseNested.getDocCount(), equalTo(2l));
+ assertThat(reverseNested.getDocCount(), equalTo(2L));
tags = reverseNested.getAggregations().get("field1");
tagsBuckets = new ArrayList<>(tags.getBuckets());
assertThat(tagsBuckets.size(), equalTo(2));
assertThat(tagsBuckets.get(0).getKeyAsString(), equalTo("d"));
- assertThat(tagsBuckets.get(0).getDocCount(), equalTo(1l));
+ assertThat(tagsBuckets.get(0).getDocCount(), equalTo(1L));
assertThat(tagsBuckets.get(1).getKeyAsString(), equalTo("f"));
}
@@ -478,7 +478,7 @@ public class ReverseNestedIT extends ESIntegTestCase {
assertThat(nested.getName(), equalTo("nested2"));
ReverseNested reverseNested = nested.getAggregations().get("incorrect");
- assertThat(reverseNested.getDocCount(), is(0l));
+ assertThat(reverseNested.getDocCount(), is(0L));
}
public void testSameParentDocHavingMultipleBuckets() throws Exception {
@@ -574,21 +574,21 @@ public class ReverseNestedIT extends ESIntegTestCase {
assertHitCount(response, 1);
Nested nested0 = response.getAggregations().get("nested_0");
- assertThat(nested0.getDocCount(), equalTo(3l));
+ assertThat(nested0.getDocCount(), equalTo(3L));
Terms terms = nested0.getAggregations().get("group_by_category");
assertThat(terms.getBuckets().size(), equalTo(3));
for (String bucketName : new String[]{"abc", "klm", "xyz"}) {
logger.info("Checking results for bucket {}", bucketName);
Terms.Bucket bucket = terms.getBucketByKey(bucketName);
- assertThat(bucket.getDocCount(), equalTo(1l));
+ assertThat(bucket.getDocCount(), equalTo(1L));
ReverseNested toRoot = bucket.getAggregations().get("to_root");
- assertThat(toRoot.getDocCount(), equalTo(1l));
+ assertThat(toRoot.getDocCount(), equalTo(1L));
Nested nested1 = toRoot.getAggregations().get("nested_1");
- assertThat(nested1.getDocCount(), equalTo(5l));
+ assertThat(nested1.getDocCount(), equalTo(5L));
Filter filterByBar = nested1.getAggregations().get("filter_by_sku");
- assertThat(filterByBar.getDocCount(), equalTo(3l));
+ assertThat(filterByBar.getDocCount(), equalTo(3L));
ValueCount barCount = filterByBar.getAggregations().get("sku_count");
- assertThat(barCount.getValue(), equalTo(3l));
+ assertThat(barCount.getValue(), equalTo(3L));
}
response = client().prepareSearch("idx3")
@@ -615,27 +615,27 @@ public class ReverseNestedIT extends ESIntegTestCase {
assertHitCount(response, 1);
nested0 = response.getAggregations().get("nested_0");
- assertThat(nested0.getDocCount(), equalTo(3l));
+ assertThat(nested0.getDocCount(), equalTo(3L));
terms = nested0.getAggregations().get("group_by_category");
assertThat(terms.getBuckets().size(), equalTo(3));
for (String bucketName : new String[]{"abc", "klm", "xyz"}) {
logger.info("Checking results for bucket {}", bucketName);
Terms.Bucket bucket = terms.getBucketByKey(bucketName);
- assertThat(bucket.getDocCount(), equalTo(1l));
+ assertThat(bucket.getDocCount(), equalTo(1L));
ReverseNested toRoot = bucket.getAggregations().get("to_root");
- assertThat(toRoot.getDocCount(), equalTo(1l));
+ assertThat(toRoot.getDocCount(), equalTo(1L));
Nested nested1 = toRoot.getAggregations().get("nested_1");
- assertThat(nested1.getDocCount(), equalTo(5l));
+ assertThat(nested1.getDocCount(), equalTo(5L));
Filter filterByBar = nested1.getAggregations().get("filter_by_sku");
- assertThat(filterByBar.getDocCount(), equalTo(3l));
+ assertThat(filterByBar.getDocCount(), equalTo(3L));
Nested nested2 = filterByBar.getAggregations().get("nested_2");
- assertThat(nested2.getDocCount(), equalTo(8l));
+ assertThat(nested2.getDocCount(), equalTo(8L));
Filter filterBarColor = nested2.getAggregations().get("filter_sku_color");
- assertThat(filterBarColor.getDocCount(), equalTo(2l));
+ assertThat(filterBarColor.getDocCount(), equalTo(2L));
ReverseNested reverseToBar = filterBarColor.getAggregations().get("reverse_to_sku");
- assertThat(reverseToBar.getDocCount(), equalTo(2l));
+ assertThat(reverseToBar.getDocCount(), equalTo(2L));
ValueCount barCount = reverseToBar.getAggregations().get("sku_count");
- assertThat(barCount.getValue(), equalTo(2l));
+ assertThat(barCount.getValue(), equalTo(2L));
}
}
}
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SamplerIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SamplerIT.java
index 2535ca33b7..623d27b2cc 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SamplerIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SamplerIT.java
@@ -137,7 +137,7 @@ public class SamplerIT extends ESIntegTestCase {
for (Terms.Bucket testBucket : testBuckets) {
maxBooksPerAuthor = Math.max(testBucket.getDocCount(), maxBooksPerAuthor);
}
- assertThat(maxBooksPerAuthor, equalTo(3l));
+ assertThat(maxBooksPerAuthor, equalTo(3L));
}
public void testSimpleDiversity() throws Exception {
@@ -232,7 +232,7 @@ public class SamplerIT extends ESIntegTestCase {
.actionGet();
assertSearchResponse(response);
Sampler sample = response.getAggregations().get("sample");
- assertThat(sample.getDocCount(), equalTo(0l));
+ assertThat(sample.getDocCount(), equalTo(0L));
Terms authors = sample.getAggregations().get("authors");
assertThat(authors.getBuckets().size(), equalTo(0));
}
@@ -249,7 +249,7 @@ public class SamplerIT extends ESIntegTestCase {
.actionGet();
assertSearchResponse(response);
Sampler sample = response.getAggregations().get("sample");
- assertThat(sample.getDocCount(), greaterThan(0l));
+ assertThat(sample.getDocCount(), greaterThan(0L));
Terms authors = sample.getAggregations().get("authors");
assertThat(authors.getBuckets().size(), greaterThan(0));
}
@@ -264,7 +264,7 @@ public class SamplerIT extends ESIntegTestCase {
.execute().actionGet();
assertSearchResponse(response);
Sampler sample = response.getAggregations().get("sample");
- assertThat(sample.getDocCount(), greaterThan(0l));
+ assertThat(sample.getDocCount(), greaterThan(0L));
Terms authors = sample.getAggregations().get("authors");
assertThat(authors.getBuckets().size(), greaterThan(0));
}
@@ -279,7 +279,7 @@ public class SamplerIT extends ESIntegTestCase {
.setQuery(new TermQueryBuilder("genre", "fantasy")).setFrom(0).setSize(60).addAggregation(sampleAgg).execute().actionGet();
assertSearchResponse(response);
Sampler sample = response.getAggregations().get("sample");
- assertThat(sample.getDocCount(), equalTo(0l));
+ assertThat(sample.getDocCount(), equalTo(0L));
Terms authors = sample.getAggregations().get("authors");
assertNull(authors);
}
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTermsIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTermsIT.java
index 78e4f7a099..0616fa01b1 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTermsIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTermsIT.java
@@ -46,9 +46,9 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
Collection<Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(3));
Map<String, Long> expected = new HashMap<>();
- expected.put("1", 8l);
- expected.put("3", 8l);
- expected.put("2", 5l);
+ expected.put("1", 8L);
+ expected.put("3", 8L);
+ expected.put("2", 5L);
for (Terms.Bucket bucket : buckets) {
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsString())));
}
@@ -69,9 +69,9 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
Collection<Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(3));
Map<String, Long> expected = new HashMap<>();
- expected.put("1", 8l);
- expected.put("3", 8l);
- expected.put("2", 4l);
+ expected.put("1", 8L);
+ expected.put("3", 8L);
+ expected.put("2", 4L);
for (Terms.Bucket bucket : buckets) {
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsString())));
}
@@ -93,9 +93,9 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
Collection<Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(3)); // we still only return 3 entries (based on the 'size' param)
Map<String, Long> expected = new HashMap<>();
- expected.put("1", 8l);
- expected.put("3", 8l);
- expected.put("2", 5l); // <-- count is now fixed
+ expected.put("1", 8L);
+ expected.put("3", 8L);
+ expected.put("2", 5L); // <-- count is now fixed
for (Terms.Bucket bucket : buckets) {
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsString())));
}
@@ -117,9 +117,9 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
Collection<Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(3)); // we still only return 3 entries (based on the 'size' param)
Map<String, Long> expected = new HashMap<>();
- expected.put("1", 5l);
- expected.put("2", 4l);
- expected.put("3", 3l); // <-- count is now fixed
+ expected.put("1", 5L);
+ expected.put("2", 4L);
+ expected.put("3", 3L); // <-- count is now fixed
for (Terms.Bucket bucket: buckets) {
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKey())));
}
@@ -140,9 +140,9 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
Collection<Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(3));
Map<String, Long> expected = new HashMap<>();
- expected.put("1", 8l);
- expected.put("2", 5l);
- expected.put("3", 8l);
+ expected.put("1", 8L);
+ expected.put("2", 5L);
+ expected.put("3", 8L);
for (Terms.Bucket bucket : buckets) {
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsString())));
}
@@ -163,9 +163,9 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
Collection<Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(3));
Map<Integer, Long> expected = new HashMap<>();
- expected.put(1, 8l);
- expected.put(3, 8l);
- expected.put(2, 5l);
+ expected.put(1, 8L);
+ expected.put(3, 8L);
+ expected.put(2, 5L);
for (Terms.Bucket bucket : buckets) {
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsNumber().intValue())));
}
@@ -186,9 +186,9 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
Collection<Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(3));
Map<Integer, Long> expected = new HashMap<>();
- expected.put(1, 8l);
- expected.put(3, 8l);
- expected.put(2, 4l);
+ expected.put(1, 8L);
+ expected.put(3, 8L);
+ expected.put(2, 4L);
for (Terms.Bucket bucket : buckets) {
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsNumber().intValue())));
}
@@ -209,9 +209,9 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
Collection<Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(3)); // we still only return 3 entries (based on the 'size' param)
Map<Integer, Long> expected = new HashMap<>();
- expected.put(1, 8l);
- expected.put(3, 8l);
- expected.put(2, 5l); // <-- count is now fixed
+ expected.put(1, 8L);
+ expected.put(3, 8L);
+ expected.put(2, 5L); // <-- count is now fixed
for (Terms.Bucket bucket : buckets) {
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsNumber().intValue())));
}
@@ -233,9 +233,9 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
Collection<Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(3)); // we still only return 3 entries (based on the 'size' param)
Map<Integer, Long> expected = new HashMap<>();
- expected.put(1, 5l);
- expected.put(2, 4l);
- expected.put(3, 3l);
+ expected.put(1, 5L);
+ expected.put(2, 4L);
+ expected.put(3, 3L);
for (Terms.Bucket bucket : buckets) {
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsNumber().intValue())));
}
@@ -256,9 +256,9 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
Collection<Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(3));
Map<Integer, Long> expected = new HashMap<>();
- expected.put(1, 8l);
- expected.put(2, 5l);
- expected.put(3, 8l);
+ expected.put(1, 8L);
+ expected.put(2, 5L);
+ expected.put(3, 8L);
for (Terms.Bucket bucket : buckets) {
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsNumber().intValue())));
}
@@ -279,9 +279,9 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
Collection<Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(3));
Map<Integer, Long> expected = new HashMap<>();
- expected.put(1, 8l);
- expected.put(3, 8l);
- expected.put(2, 5l);
+ expected.put(1, 8L);
+ expected.put(3, 8L);
+ expected.put(2, 5L);
for (Terms.Bucket bucket : buckets) {
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsNumber().intValue())));
}
@@ -302,9 +302,9 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
Collection<Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(3));
Map<Integer, Long> expected = new HashMap<>();
- expected.put(1, 8l);
- expected.put(3, 8l);
- expected.put(2, 4l);
+ expected.put(1, 8L);
+ expected.put(3, 8L);
+ expected.put(2, 4L);
for (Terms.Bucket bucket : buckets) {
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsNumber().intValue())));
}
@@ -325,9 +325,9 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
Collection<Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(3));
Map<Integer, Long> expected = new HashMap<>();
- expected.put(1, 8l);
- expected.put(3, 8l);
- expected.put(2, 5l); // <-- count is now fixed
+ expected.put(1, 8L);
+ expected.put(3, 8L);
+ expected.put(2, 5L); // <-- count is now fixed
for (Terms.Bucket bucket : buckets) {
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsNumber().intValue())));
}
@@ -348,9 +348,9 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
Collection<Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(3));
Map<Integer, Long> expected = new HashMap<>();
- expected.put(1, 5l);
- expected.put(2, 4l);
- expected.put(3, 3l);
+ expected.put(1, 5L);
+ expected.put(2, 4L);
+ expected.put(3, 3L);
for (Terms.Bucket bucket : buckets) {
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsNumber().intValue())));
}
@@ -371,9 +371,9 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
Collection<Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(3));
Map<Integer, Long> expected = new HashMap<>();
- expected.put(1, 8l);
- expected.put(2, 5l);
- expected.put(3, 8l);
+ expected.put(1, 8L);
+ expected.put(2, 5L);
+ expected.put(3, 8L);
for (Terms.Bucket bucket : buckets) {
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsNumber().intValue())));
}
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTestCase.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTestCase.java
index 607b6902f8..a372342887 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTestCase.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTestCase.java
@@ -93,11 +93,11 @@ public abstract class ShardSizeTestCase extends ESIntegTestCase {
SearchResponse resp = client().prepareSearch("idx").setTypes("type").setRouting(routing1).setQuery(matchAllQuery()).execute().actionGet();
assertSearchResponse(resp);
long totalOnOne = resp.getHits().getTotalHits();
- assertThat(totalOnOne, is(15l));
+ assertThat(totalOnOne, is(15L));
resp = client().prepareSearch("idx").setTypes("type").setRouting(routing2).setQuery(matchAllQuery()).execute().actionGet();
assertSearchResponse(resp);
long totalOnTwo = resp.getHits().getTotalHits();
- assertThat(totalOnTwo, is(12l));
+ assertThat(totalOnTwo, is(12L));
}
protected List<IndexRequestBuilder> indexDoc(String shard, String key, int times) throws Exception {
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsIT.java
index 7582d75ca0..97a3cfa3ba 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsIT.java
@@ -76,7 +76,7 @@ public class SignificantTermsIT extends ESIntegTestCase {
public void setupSuiteScopeCluster() throws Exception {
assertAcked(prepareCreate("test").setSettings(SETTING_NUMBER_OF_SHARDS, 5, SETTING_NUMBER_OF_REPLICAS, 0).addMapping("fact",
"_routing", "required=true", "routing_id", "type=string,index=not_analyzed", "fact_category",
- "type=integer,index=not_analyzed", "description", "type=string,index=analyzed"));
+ "type=integer,index=true", "description", "type=string,index=analyzed"));
createIndex("idx_unmapped");
ensureGreen();
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/TermsDocCountErrorIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/TermsDocCountErrorIT.java
index 79aa6b2d5c..1780911ccf 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/TermsDocCountErrorIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/TermsDocCountErrorIT.java
@@ -101,12 +101,12 @@ public class TermsDocCountErrorIT extends ESIntegTestCase {
Terms accurateTerms = accurateResponse.getAggregations().get("terms");
assertThat(accurateTerms, notNullValue());
assertThat(accurateTerms.getName(), equalTo("terms"));
- assertThat(accurateTerms.getDocCountError(), equalTo(0l));
+ assertThat(accurateTerms.getDocCountError(), equalTo(0L));
Terms testTerms = testResponse.getAggregations().get("terms");
assertThat(testTerms, notNullValue());
assertThat(testTerms.getName(), equalTo("terms"));
- assertThat(testTerms.getDocCountError(), greaterThanOrEqualTo(0l));
+ assertThat(testTerms.getDocCountError(), greaterThanOrEqualTo(0L));
Collection<Bucket> testBuckets = testTerms.getBuckets();
assertThat(testBuckets.size(), lessThanOrEqualTo(size));
assertThat(accurateTerms.getBuckets().size(), greaterThanOrEqualTo(testBuckets.size()));
@@ -115,7 +115,7 @@ public class TermsDocCountErrorIT extends ESIntegTestCase {
assertThat(testBucket, notNullValue());
Terms.Bucket accurateBucket = accurateTerms.getBucketByKey(testBucket.getKeyAsString());
assertThat(accurateBucket, notNullValue());
- assertThat(accurateBucket.getDocCountError(), equalTo(0l));
+ assertThat(accurateBucket.getDocCountError(), equalTo(0L));
assertThat(testBucket.getDocCountError(), lessThanOrEqualTo(testTerms.getDocCountError()));
assertThat(testBucket.getDocCount() + testBucket.getDocCountError(), greaterThanOrEqualTo(accurateBucket.getDocCount()));
assertThat(testBucket.getDocCount() - testBucket.getDocCountError(), lessThanOrEqualTo(accurateBucket.getDocCount()));
@@ -135,12 +135,12 @@ public class TermsDocCountErrorIT extends ESIntegTestCase {
Terms accurateTerms = accurateResponse.getAggregations().get("terms");
assertThat(accurateTerms, notNullValue());
assertThat(accurateTerms.getName(), equalTo("terms"));
- assertThat(accurateTerms.getDocCountError(), equalTo(0l));
+ assertThat(accurateTerms.getDocCountError(), equalTo(0L));
Terms testTerms = testResponse.getAggregations().get("terms");
assertThat(testTerms, notNullValue());
assertThat(testTerms.getName(), equalTo("terms"));
- assertThat(testTerms.getDocCountError(), equalTo(0l));
+ assertThat(testTerms.getDocCountError(), equalTo(0L));
Collection<Bucket> testBuckets = testTerms.getBuckets();
assertThat(testBuckets.size(), lessThanOrEqualTo(size));
assertThat(accurateTerms.getBuckets().size(), greaterThanOrEqualTo(testBuckets.size()));
@@ -149,8 +149,8 @@ public class TermsDocCountErrorIT extends ESIntegTestCase {
assertThat(testBucket, notNullValue());
Terms.Bucket accurateBucket = accurateTerms.getBucketByKey(testBucket.getKeyAsString());
assertThat(accurateBucket, notNullValue());
- assertThat(accurateBucket.getDocCountError(), equalTo(0l));
- assertThat(testBucket.getDocCountError(), equalTo(0l));
+ assertThat(accurateBucket.getDocCountError(), equalTo(0L));
+ assertThat(testBucket.getDocCountError(), equalTo(0L));
}
}
@@ -158,13 +158,13 @@ public class TermsDocCountErrorIT extends ESIntegTestCase {
Terms testTerms = testResponse.getAggregations().get("terms");
assertThat(testTerms, notNullValue());
assertThat(testTerms.getName(), equalTo("terms"));
- assertThat(testTerms.getDocCountError(), equalTo(0l));
+ assertThat(testTerms.getDocCountError(), equalTo(0L));
Collection<Bucket> testBuckets = testTerms.getBuckets();
assertThat(testBuckets.size(), lessThanOrEqualTo(size));
for (Terms.Bucket testBucket : testBuckets) {
assertThat(testBucket, notNullValue());
- assertThat(testBucket.getDocCountError(), equalTo(0l));
+ assertThat(testBucket.getDocCountError(), equalTo(0L));
}
}
@@ -172,12 +172,12 @@ public class TermsDocCountErrorIT extends ESIntegTestCase {
Terms accurateTerms = accurateResponse.getAggregations().get("terms");
assertThat(accurateTerms, notNullValue());
assertThat(accurateTerms.getName(), equalTo("terms"));
- assertThat(accurateTerms.getDocCountError(), equalTo(0l));
+ assertThat(accurateTerms.getDocCountError(), equalTo(0L));
Terms testTerms = testResponse.getAggregations().get("terms");
assertThat(testTerms, notNullValue());
assertThat(testTerms.getName(), equalTo("terms"));
- assertThat(testTerms.getDocCountError(),anyOf(equalTo(-1l), equalTo(0l)));
+ assertThat(testTerms.getDocCountError(),anyOf(equalTo(-1L), equalTo(0L)));
Collection<Bucket> testBuckets = testTerms.getBuckets();
assertThat(testBuckets.size(), lessThanOrEqualTo(size));
assertThat(accurateTerms.getBuckets().size(), greaterThanOrEqualTo(testBuckets.size()));
@@ -186,8 +186,8 @@ public class TermsDocCountErrorIT extends ESIntegTestCase {
assertThat(testBucket, notNullValue());
Terms.Bucket accurateBucket = accurateTerms.getBucketByKey(testBucket.getKeyAsString());
assertThat(accurateBucket, notNullValue());
- assertThat(accurateBucket.getDocCountError(), equalTo(0l));
- assertThat(testBucket.getDocCountError(), anyOf(equalTo(-1l), equalTo(0l)));
+ assertThat(accurateBucket.getDocCountError(), equalTo(0L));
+ assertThat(testBucket.getDocCountError(), anyOf(equalTo(-1L), equalTo(0L)));
}
}
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java
index 0cb799d2ac..a9cdb6ca98 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java
@@ -36,7 +36,6 @@ import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
import org.elasticsearch.common.lucene.search.Queries;
-import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
@@ -114,11 +113,11 @@ public class NestedAggregatorTests extends ESSingleNodeTestCase {
indexWriter.commit();
indexWriter.close();
+ IndexService indexService = createIndex("test");
DirectoryReader directoryReader = DirectoryReader.open(directory);
- directoryReader = ElasticsearchDirectoryReader.wrap(directoryReader, new ShardId(new Index("test"), 0));
+ directoryReader = ElasticsearchDirectoryReader.wrap(directoryReader, new ShardId(indexService.index(), 0));
IndexSearcher searcher = new IndexSearcher(directoryReader);
- IndexService indexService = createIndex("test");
indexService.mapperService().merge("test", new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef("test", "nested_field", "type=nested").string()), MapperService.MergeReason.MAPPING_UPDATE, false);
SearchContext searchContext = createSearchContext(indexService);
AggregationContext context = new AggregationContext(searchContext);
@@ -141,7 +140,7 @@ public class NestedAggregatorTests extends ESSingleNodeTestCase {
Nested nested = (Nested) aggs[0].buildAggregation(0);
// The bug manifests if 6 docs are returned, because currentRootDoc isn't reset the previous child docs from the first segment are emitted as hits.
- assertThat(nested.getDocCount(), equalTo(4l));
+ assertThat(nested.getDocCount(), equalTo(4L));
directoryReader.close();
directory.close();
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java
index 0fe9113e8f..59c110e774 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java
@@ -27,6 +27,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
+import org.elasticsearch.index.Index;
import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.InternalAggregations;
@@ -75,7 +76,7 @@ public class SignificanceHeuristicTests extends ESTestCase {
@Override
public SearchShardTarget shardTarget() {
- return new SearchShardTarget("no node, this is a unit test", "no index, this is a unit test", 0);
+ return new SearchShardTarget("no node, this is a unit test", new Index("no index, this is a unit test", "_na_"), 0);
}
}
@@ -102,8 +103,8 @@ public class SignificanceHeuristicTests extends ESTestCase {
assertThat(originalBucket.getKeyAsString(), equalTo(streamedBucket.getKeyAsString()));
assertThat(originalBucket.getSupersetDf(), equalTo(streamedBucket.getSupersetDf()));
assertThat(originalBucket.getSubsetDf(), equalTo(streamedBucket.getSubsetDf()));
- assertThat(streamedBucket.getSubsetSize(), equalTo(10l));
- assertThat(streamedBucket.getSupersetSize(), equalTo(20l));
+ assertThat(streamedBucket.getSubsetSize(), equalTo(10L));
+ assertThat(streamedBucket.getSupersetSize(), equalTo(20L));
}
InternalSignificantTerms[] getRandomSignificantTerms(SignificanceHeuristic heuristic) {
@@ -138,14 +139,14 @@ public class SignificanceHeuristicTests extends ESTestCase {
List<InternalAggregation> aggs = createInternalAggregations();
SignificantTerms reducedAgg = (SignificantTerms) aggs.get(0).doReduce(aggs, null);
assertThat(reducedAgg.getBuckets().size(), equalTo(2));
- assertThat(reducedAgg.getBuckets().get(0).getSubsetDf(), equalTo(8l));
- assertThat(reducedAgg.getBuckets().get(0).getSubsetSize(), equalTo(16l));
- assertThat(reducedAgg.getBuckets().get(0).getSupersetDf(), equalTo(10l));
- assertThat(reducedAgg.getBuckets().get(0).getSupersetSize(), equalTo(30l));
- assertThat(reducedAgg.getBuckets().get(1).getSubsetDf(), equalTo(8l));
- assertThat(reducedAgg.getBuckets().get(1).getSubsetSize(), equalTo(16l));
- assertThat(reducedAgg.getBuckets().get(1).getSupersetDf(), equalTo(10l));
- assertThat(reducedAgg.getBuckets().get(1).getSupersetSize(), equalTo(30l));
+ assertThat(reducedAgg.getBuckets().get(0).getSubsetDf(), equalTo(8L));
+ assertThat(reducedAgg.getBuckets().get(0).getSubsetSize(), equalTo(16L));
+ assertThat(reducedAgg.getBuckets().get(0).getSupersetDf(), equalTo(10L));
+ assertThat(reducedAgg.getBuckets().get(0).getSupersetSize(), equalTo(30L));
+ assertThat(reducedAgg.getBuckets().get(1).getSubsetDf(), equalTo(8L));
+ assertThat(reducedAgg.getBuckets().get(1).getSubsetSize(), equalTo(16L));
+ assertThat(reducedAgg.getBuckets().get(1).getSupersetDf(), equalTo(10L));
+ assertThat(reducedAgg.getBuckets().get(1).getSupersetSize(), equalTo(30L));
}
// Create aggregations as they might come from three different shards and return as list.
@@ -251,7 +252,7 @@ public class SignificanceHeuristicTests extends ESTestCase {
stParser.nextToken();
SignificantTermsAggregatorFactory aggregatorFactory = (SignificantTermsAggregatorFactory) new SignificantTermsParser(heuristicParserMapper).parse("testagg", stParser, searchContext);
stParser.nextToken();
- assertThat(aggregatorFactory.getBucketCountThresholds().getMinDocCount(), equalTo(200l));
+ assertThat(aggregatorFactory.getBucketCountThresholds().getMinDocCount(), equalTo(200L));
assertThat(stParser.currentToken(), equalTo(null));
stParser.close();
return aggregatorFactory.getSignificanceHeuristic();
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java
index 390e0cf547..695fb87efa 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java
@@ -199,7 +199,7 @@ public abstract class AbstractGeoTestCase extends ESIntegTestCase {
Long value = hitField.getValue();
assertThat("Hit " + i + " has wrong value", value.intValue(), equalTo(i));
}
- assertThat(totalHits, equalTo(2000l));
+ assertThat(totalHits, equalTo(2000L));
}
private void updateGeohashBucketsCentroid(final GeoPoint location) {
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgIT.java
index db02d6ccb0..2ce78e451c 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgIT.java
@@ -26,6 +26,7 @@ import org.elasticsearch.script.CompiledScript;
import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.LeafSearchScript;
import org.elasticsearch.script.Script;
+import org.elasticsearch.script.ScriptEngineRegistry;
import org.elasticsearch.script.ScriptEngineService;
import org.elasticsearch.script.ScriptModule;
import org.elasticsearch.script.ScriptService.ScriptType;
@@ -71,10 +72,10 @@ public class AvgIT extends AbstractNumericTestCase {
SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx")
.setQuery(matchAllQuery())
- .addAggregation(histogram("histo").field("value").interval(1l).minDocCount(0).subAggregation(avg("avg")))
+ .addAggregation(histogram("histo").field("value").interval(1L).minDocCount(0).subAggregation(avg("avg")))
.execute().actionGet();
- assertThat(searchResponse.getHits().getTotalHits(), equalTo(2l));
+ assertThat(searchResponse.getHits().getTotalHits(), equalTo(2L));
Histogram histo = searchResponse.getAggregations().get("histo");
assertThat(histo, notNullValue());
Histogram.Bucket bucket = histo.getBuckets().get(1);
@@ -93,7 +94,7 @@ public class AvgIT extends AbstractNumericTestCase {
.addAggregation(avg("avg").field("value"))
.execute().actionGet();
- assertThat(searchResponse.getHits().getTotalHits(), equalTo(0l));
+ assertThat(searchResponse.getHits().getTotalHits(), equalTo(0L));
Avg avg = searchResponse.getAggregations().get("avg");
assertThat(avg, notNullValue());
@@ -127,7 +128,7 @@ public class AvgIT extends AbstractNumericTestCase {
Global global = searchResponse.getAggregations().get("global");
assertThat(global, notNullValue());
assertThat(global.getName(), equalTo("global"));
- assertThat(global.getDocCount(), equalTo(10l));
+ assertThat(global.getDocCount(), equalTo(10L));
assertThat(global.getAggregations(), notNullValue());
assertThat(global.getAggregations().asMap().size(), equalTo(1));
@@ -332,7 +333,7 @@ public class AvgIT extends AbstractNumericTestCase {
}
public void onModule(ScriptModule module) {
- module.addScriptEngine(ExtractFieldScriptEngine.class);
+ module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(ExtractFieldScriptEngine.class, ExtractFieldScriptEngine.TYPES));
}
}
@@ -344,22 +345,24 @@ public class AvgIT extends AbstractNumericTestCase {
public static final String NAME = "extract_field";
+ public static final List<String> TYPES = Collections.singletonList(NAME);
+
@Override
public void close() throws IOException {
}
@Override
- public String[] types() {
- return new String[] { NAME };
+ public List<String> getTypes() {
+ return TYPES;
}
@Override
- public String[] extensions() {
- return types();
+ public List<String> getExtensions() {
+ return TYPES;
}
@Override
- public boolean sandboxed() {
+ public boolean isSandboxed() {
return true;
}
@@ -388,12 +391,6 @@ public class AvgIT extends AbstractNumericTestCase {
final LeafSearchLookup leafLookup = lookup.getLeafSearchLookup(context);
return new LeafSearchScript() {
-
- @Override
- public Object unwrap(Object value) {
- return null;
- }
-
@Override
public void setNextVar(String name, Object value) {
}
@@ -468,7 +465,7 @@ public class AvgIT extends AbstractNumericTestCase {
}
public void onModule(ScriptModule module) {
- module.addScriptEngine(FieldValueScriptEngine.class);
+ module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(FieldValueScriptEngine.class, FieldValueScriptEngine.TYPES));
}
}
@@ -480,22 +477,24 @@ public class AvgIT extends AbstractNumericTestCase {
public static final String NAME = "field_value";
+ public static final List<String> TYPES = Collections.singletonList(NAME);
+
@Override
public void close() throws IOException {
}
@Override
- public String[] types() {
- return new String[] { NAME };
+ public List<String> getTypes() {
+ return TYPES;
}
@Override
- public String[] extensions() {
- return types();
+ public List<String> getExtensions() {
+ return TYPES;
}
@Override
- public boolean sandboxed() {
+ public boolean isSandboxed() {
return true;
}
@@ -526,12 +525,6 @@ public class AvgIT extends AbstractNumericTestCase {
final LeafSearchLookup leafLookup = lookup.getLeafSearchLookup(context);
return new LeafSearchScript() {
-
- @Override
- public Object unwrap(Object value) {
- throw new UnsupportedOperationException();
- }
-
@Override
public void setNextVar(String name, Object value) {
vars.put(name, value);
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsIT.java
index 0f94f14213..d97bc82460 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsIT.java
@@ -166,7 +166,7 @@ public class GeoBoundsIT extends AbstractGeoTestCase {
.wrapLongitude(false))
.execute().actionGet();
- assertThat(searchResponse.getHits().getTotalHits(), equalTo(0l));
+ assertThat(searchResponse.getHits().getTotalHits(), equalTo(0L));
GeoBounds geoBounds = searchResponse.getAggregations().get(aggName);
assertThat(geoBounds, notNullValue());
assertThat(geoBounds.getName(), equalTo(aggName));
@@ -238,7 +238,7 @@ public class GeoBoundsIT extends AbstractGeoTestCase {
for (int i = 0; i < 10; i++) {
Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue());
- assertThat("InternalBucket " + bucket.getKey() + " has wrong number of documents", bucket.getDocCount(), equalTo(1l));
+ assertThat("InternalBucket " + bucket.getKey() + " has wrong number of documents", bucket.getDocCount(), equalTo(1L));
GeoBounds geoBounds = bucket.getAggregations().get(aggName);
assertThat(geoBounds, notNullValue());
assertThat(geoBounds.getName(), equalTo(aggName));
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidIT.java
index e0d260f543..8c21cbd7a5 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidIT.java
@@ -53,7 +53,7 @@ public class GeoCentroidIT extends AbstractGeoTestCase {
assertSearchResponse(response);
GeoCentroid geoCentroid = response.getAggregations().get(aggName);
- assertThat(response.getHits().getTotalHits(), equalTo(0l));
+ assertThat(response.getHits().getTotalHits(), equalTo(0L));
assertThat(geoCentroid, notNullValue());
assertThat(geoCentroid.getName(), equalTo(aggName));
GeoPoint centroid = geoCentroid.centroid();
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/SumIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/SumIT.java
index 2c27bde57d..f2c05ee4ee 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/SumIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/SumIT.java
@@ -26,6 +26,7 @@ import org.elasticsearch.script.CompiledScript;
import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.LeafSearchScript;
import org.elasticsearch.script.Script;
+import org.elasticsearch.script.ScriptEngineRegistry;
import org.elasticsearch.script.ScriptEngineService;
import org.elasticsearch.script.ScriptModule;
import org.elasticsearch.script.ScriptService.ScriptType;
@@ -40,6 +41,7 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
+import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@@ -67,10 +69,10 @@ public class SumIT extends AbstractNumericTestCase {
SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx")
.setQuery(matchAllQuery())
- .addAggregation(histogram("histo").field("value").interval(1l).minDocCount(0).subAggregation(sum("sum")))
+ .addAggregation(histogram("histo").field("value").interval(1L).minDocCount(0).subAggregation(sum("sum")))
.execute().actionGet();
- assertThat(searchResponse.getHits().getTotalHits(), equalTo(2l));
+ assertThat(searchResponse.getHits().getTotalHits(), equalTo(2L));
Histogram histo = searchResponse.getAggregations().get("histo");
assertThat(histo, notNullValue());
Histogram.Bucket bucket = histo.getBuckets().get(1);
@@ -89,7 +91,7 @@ public class SumIT extends AbstractNumericTestCase {
.addAggregation(sum("sum").field("value"))
.execute().actionGet();
- assertThat(searchResponse.getHits().getTotalHits(), equalTo(0l));
+ assertThat(searchResponse.getHits().getTotalHits(), equalTo(0L));
Sum sum = searchResponse.getAggregations().get("sum");
assertThat(sum, notNullValue());
@@ -136,7 +138,7 @@ public class SumIT extends AbstractNumericTestCase {
Global global = searchResponse.getAggregations().get("global");
assertThat(global, notNullValue());
assertThat(global.getName(), equalTo("global"));
- assertThat(global.getDocCount(), equalTo(10l));
+ assertThat(global.getDocCount(), equalTo(10L));
assertThat(global.getAggregations(), notNullValue());
assertThat(global.getAggregations().asMap().size(), equalTo(1));
@@ -326,7 +328,7 @@ public class SumIT extends AbstractNumericTestCase {
}
public void onModule(ScriptModule module) {
- module.addScriptEngine(ExtractFieldScriptEngine.class);
+ module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(ExtractFieldScriptEngine.class, ExtractFieldScriptEngine.TYPES));
}
}
@@ -339,22 +341,24 @@ public class SumIT extends AbstractNumericTestCase {
public static final String NAME = "extract_field";
+ public static final List<String> TYPES = Collections.singletonList(NAME);
+
@Override
public void close() throws IOException {
}
@Override
- public String[] types() {
- return new String[] { NAME };
+ public List<String> getTypes() {
+ return TYPES;
}
@Override
- public String[] extensions() {
- return types();
+ public List<String> getExtensions() {
+ return TYPES;
}
@Override
- public boolean sandboxed() {
+ public boolean isSandboxed() {
return true;
}
@@ -384,12 +388,6 @@ public class SumIT extends AbstractNumericTestCase {
final LeafSearchLookup leafLookup = lookup.getLeafSearchLookup(context);
return new LeafSearchScript() {
-
- @Override
- public Object unwrap(Object value) {
- return null;
- }
-
@Override
public void setNextVar(String name, Object value) {
}
@@ -464,7 +462,7 @@ public class SumIT extends AbstractNumericTestCase {
}
public void onModule(ScriptModule module) {
- module.addScriptEngine(FieldValueScriptEngine.class);
+ module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(FieldValueScriptEngine.class, FieldValueScriptEngine.TYPES));
}
}
@@ -477,22 +475,24 @@ public class SumIT extends AbstractNumericTestCase {
public static final String NAME = "field_value";
+ public static final List<String> TYPES = Collections.singletonList(NAME);
+
@Override
public void close() throws IOException {
}
@Override
- public String[] types() {
- return new String[] { NAME };
+ public List<String> getTypes() {
+ return TYPES;
}
@Override
- public String[] extensions() {
- return types();
+ public List<String> getExtensions() {
+ return TYPES;
}
@Override
- public boolean sandboxed() {
+ public boolean isSandboxed() {
return true;
}
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java
index 65e71fe9c0..90a52ed11b 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java
@@ -210,7 +210,7 @@ public class TopHitsIT extends ESIntegTestCase {
client().prepareIndex("articles", "article", "1")
.setSource(jsonBuilder().startObject().field("title", "title 1").field("body", "some text").startArray("comments")
.startObject()
- .field("user", "a").field("date", 1l).field("message", "some comment")
+ .field("user", "a").field("date", 1L).field("message", "some comment")
.startArray("reviewers")
.startObject().field("name", "user a").endObject()
.startObject().field("name", "user b").endObject()
@@ -218,7 +218,7 @@ public class TopHitsIT extends ESIntegTestCase {
.endArray()
.endObject()
.startObject()
- .field("user", "b").field("date", 2l).field("message", "some other comment")
+ .field("user", "b").field("date", 2L).field("message", "some other comment")
.startArray("reviewers")
.startObject().field("name", "user c").endObject()
.startObject().field("name", "user d").endObject()
@@ -231,12 +231,12 @@ public class TopHitsIT extends ESIntegTestCase {
client().prepareIndex("articles", "article", "2")
.setSource(jsonBuilder().startObject().field("title", "title 2").field("body", "some different text").startArray("comments")
.startObject()
- .field("user", "b").field("date", 3l).field("message", "some comment")
+ .field("user", "b").field("date", 3L).field("message", "some comment")
.startArray("reviewers")
.startObject().field("name", "user f").endObject()
.endArray()
.endObject()
- .startObject().field("user", "c").field("date", 4l).field("message", "some other comment").endObject()
+ .startObject().field("user", "c").field("date", 4L).field("message", "some other comment").endObject()
.endArray().endObject())
);
@@ -273,10 +273,10 @@ public class TopHitsIT extends ESIntegTestCase {
Terms.Bucket bucket = terms.getBucketByKey("val" + i);
assertThat(bucket, notNullValue());
assertThat(key(bucket), equalTo("val" + i));
- assertThat(bucket.getDocCount(), equalTo(10l));
+ assertThat(bucket.getDocCount(), equalTo(10L));
TopHits topHits = bucket.getAggregations().get("hits");
SearchHits hits = topHits.getHits();
- assertThat(hits.totalHits(), equalTo(10l));
+ assertThat(hits.totalHits(), equalTo(10L));
assertThat(hits.getHits().length, equalTo(3));
higestSortValue += 10;
assertThat((Long) hits.getAt(0).sortValues()[0], equalTo(higestSortValue));
@@ -299,7 +299,7 @@ public class TopHitsIT extends ESIntegTestCase {
assertSearchResponse(response);
- assertThat(response.getHits().getTotalHits(), equalTo(8l));
+ assertThat(response.getHits().getTotalHits(), equalTo(8L));
assertThat(response.getHits().hits().length, equalTo(0));
assertThat(response.getHits().maxScore(), equalTo(0f));
Terms terms = response.getAggregations().get("terms");
@@ -335,7 +335,7 @@ public class TopHitsIT extends ESIntegTestCase {
assertSearchResponse(response);
- assertThat(response.getHits().getTotalHits(), equalTo(8l));
+ assertThat(response.getHits().getTotalHits(), equalTo(8L));
assertThat(response.getHits().hits().length, equalTo(0));
assertThat(response.getHits().maxScore(), equalTo(0f));
terms = response.getAggregations().get("terms");
@@ -366,10 +366,10 @@ public class TopHitsIT extends ESIntegTestCase {
Terms.Bucket bucket = terms.getBucketByKey("val" + i);
assertThat(bucket, notNullValue());
assertThat(key(bucket), equalTo("val" + i));
- assertThat(bucket.getDocCount(), equalTo(10l));
+ assertThat(bucket.getDocCount(), equalTo(10L));
TopHits topHits = bucket.getAggregations().get("hits");
SearchHits hits = topHits.getHits();
- assertThat(hits.totalHits(), equalTo(10l));
+ assertThat(hits.totalHits(), equalTo(10L));
assertThat(hits.getHits().length, equalTo(3));
assertThat(hits.getAt(0).sourceAsMap().size(), equalTo(4));
@@ -428,7 +428,7 @@ public class TopHitsIT extends ESIntegTestCase {
Terms.Bucket bucket = terms.getBucketByKey("val0");
assertThat(bucket, notNullValue());
- assertThat(bucket.getDocCount(), equalTo(10l));
+ assertThat(bucket.getDocCount(), equalTo(10L));
TopHits topHits = bucket.getAggregations().get("hits");
SearchHits hits = topHits.getHits();
assertThat(hits.totalHits(), equalTo(controlHits.totalHits()));
@@ -465,10 +465,10 @@ public class TopHitsIT extends ESIntegTestCase {
int currentBucket = 4;
for (Terms.Bucket bucket : terms.getBuckets()) {
assertThat(key(bucket), equalTo("val" + currentBucket--));
- assertThat(bucket.getDocCount(), equalTo(10l));
+ assertThat(bucket.getDocCount(), equalTo(10L));
TopHits topHits = bucket.getAggregations().get("hits");
SearchHits hits = topHits.getHits();
- assertThat(hits.totalHits(), equalTo(10l));
+ assertThat(hits.totalHits(), equalTo(10L));
assertThat(hits.getHits().length, equalTo(3));
assertThat((Long) hits.getAt(0).sortValues()[0], equalTo(higestSortValue));
assertThat((Long) hits.getAt(1).sortValues()[0], equalTo(higestSortValue - 1));
@@ -501,7 +501,7 @@ public class TopHitsIT extends ESIntegTestCase {
assertThat(key(bucket), equalTo("b"));
TopHits topHits = bucket.getAggregations().get("hits");
SearchHits hits = topHits.getHits();
- assertThat(hits.totalHits(), equalTo(4l));
+ assertThat(hits.totalHits(), equalTo(4L));
assertThat(hits.getHits().length, equalTo(1));
assertThat(hits.getAt(0).id(), equalTo("6"));
@@ -509,7 +509,7 @@ public class TopHitsIT extends ESIntegTestCase {
assertThat(key(bucket), equalTo("c"));
topHits = bucket.getAggregations().get("hits");
hits = topHits.getHits();
- assertThat(hits.totalHits(), equalTo(3l));
+ assertThat(hits.totalHits(), equalTo(3L));
assertThat(hits.getHits().length, equalTo(1));
assertThat(hits.getAt(0).id(), equalTo("9"));
@@ -517,7 +517,7 @@ public class TopHitsIT extends ESIntegTestCase {
assertThat(key(bucket), equalTo("a"));
topHits = bucket.getAggregations().get("hits");
hits = topHits.getHits();
- assertThat(hits.totalHits(), equalTo(2l));
+ assertThat(hits.totalHits(), equalTo(2L));
assertThat(hits.getHits().length, equalTo(1));
assertThat(hits.getAt(0).id(), equalTo("2"));
}
@@ -550,7 +550,7 @@ public class TopHitsIT extends ESIntegTestCase {
for (Terms.Bucket bucket : terms.getBuckets()) {
TopHits topHits = bucket.getAggregations().get("hits");
SearchHits hits = topHits.getHits();
- assertThat(hits.totalHits(), equalTo(10l));
+ assertThat(hits.totalHits(), equalTo(10L));
assertThat(hits.getHits().length, equalTo(1));
SearchHit hit = hits.getAt(0);
@@ -562,7 +562,7 @@ public class TopHitsIT extends ESIntegTestCase {
assertThat(explanation.toString(), containsString("text:text"));
long version = hit.version();
- assertThat(version, equalTo(1l));
+ assertThat(version, equalTo(1L));
assertThat(hit.matchedQueries()[0], equalTo("test"));
@@ -637,7 +637,7 @@ public class TopHitsIT extends ESIntegTestCase {
TopHits hits = response.getAggregations().get("hits");
assertThat(hits, notNullValue());
assertThat(hits.getName(), equalTo("hits"));
- assertThat(hits.getHits().totalHits(), equalTo(0l));
+ assertThat(hits.getHits().totalHits(), equalTo(0L));
}
public void testTrackScores() throws Exception {
@@ -703,23 +703,23 @@ public class TopHitsIT extends ESIntegTestCase {
.get();
Nested nested = searchResponse.getAggregations().get("to-comments");
- assertThat(nested.getDocCount(), equalTo(4l));
+ assertThat(nested.getDocCount(), equalTo(4L));
Terms terms = nested.getAggregations().get("users");
Terms.Bucket bucket = terms.getBucketByKey("a");
- assertThat(bucket.getDocCount(), equalTo(1l));
+ assertThat(bucket.getDocCount(), equalTo(1L));
TopHits topHits = bucket.getAggregations().get("top-comments");
SearchHits searchHits = topHits.getHits();
- assertThat(searchHits.totalHits(), equalTo(1l));
+ assertThat(searchHits.totalHits(), equalTo(1L));
assertThat(searchHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments"));
assertThat(searchHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0));
assertThat((Integer) searchHits.getAt(0).getSource().get("date"), equalTo(1));
bucket = terms.getBucketByKey("b");
- assertThat(bucket.getDocCount(), equalTo(2l));
+ assertThat(bucket.getDocCount(), equalTo(2L));
topHits = bucket.getAggregations().get("top-comments");
searchHits = topHits.getHits();
- assertThat(searchHits.totalHits(), equalTo(2l));
+ assertThat(searchHits.totalHits(), equalTo(2L));
assertThat(searchHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments"));
assertThat(searchHits.getAt(0).getNestedIdentity().getOffset(), equalTo(1));
assertThat((Integer) searchHits.getAt(0).getSource().get("date"), equalTo(2));
@@ -728,10 +728,10 @@ public class TopHitsIT extends ESIntegTestCase {
assertThat((Integer) searchHits.getAt(1).getSource().get("date"), equalTo(3));
bucket = terms.getBucketByKey("c");
- assertThat(bucket.getDocCount(), equalTo(1l));
+ assertThat(bucket.getDocCount(), equalTo(1L));
topHits = bucket.getAggregations().get("top-comments");
searchHits = topHits.getHits();
- assertThat(searchHits.totalHits(), equalTo(1l));
+ assertThat(searchHits.totalHits(), equalTo(1L));
assertThat(searchHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments"));
assertThat(searchHits.getAt(0).getNestedIdentity().getOffset(), equalTo(1));
assertThat((Integer) searchHits.getAt(0).getSource().get("date"), equalTo(4));
@@ -754,10 +754,10 @@ public class TopHitsIT extends ESIntegTestCase {
assertNoFailures(searchResponse);
Nested toComments = searchResponse.getAggregations().get("to-comments");
- assertThat(toComments.getDocCount(), equalTo(4l));
+ assertThat(toComments.getDocCount(), equalTo(4L));
TopHits topComments = toComments.getAggregations().get("top-comments");
- assertThat(topComments.getHits().totalHits(), equalTo(4l));
+ assertThat(topComments.getHits().totalHits(), equalTo(4L));
assertThat(topComments.getHits().getHits().length, equalTo(4));
assertThat(topComments.getHits().getAt(0).getId(), equalTo("2"));
@@ -781,10 +781,10 @@ public class TopHitsIT extends ESIntegTestCase {
assertThat(topComments.getHits().getAt(3).getNestedIdentity().getChild(), nullValue());
Nested toReviewers = toComments.getAggregations().get("to-reviewers");
- assertThat(toReviewers.getDocCount(), equalTo(7l));
+ assertThat(toReviewers.getDocCount(), equalTo(7L));
TopHits topReviewers = toReviewers.getAggregations().get("top-reviewers");
- assertThat(topReviewers.getHits().totalHits(), equalTo(7l));
+ assertThat(topReviewers.getHits().totalHits(), equalTo(7L));
assertThat(topReviewers.getHits().getHits().length, equalTo(7));
assertThat(topReviewers.getHits().getAt(0).getId(), equalTo("1"));
@@ -855,10 +855,10 @@ public class TopHitsIT extends ESIntegTestCase {
.setVersion(true).addSort("comments.date", SortOrder.ASC))).get();
assertHitCount(searchResponse, 2);
Nested nested = searchResponse.getAggregations().get("to-comments");
- assertThat(nested.getDocCount(), equalTo(4l));
+ assertThat(nested.getDocCount(), equalTo(4L));
SearchHits hits = ((TopHits) nested.getAggregations().get("top-comments")).getHits();
- assertThat(hits.totalHits(), equalTo(4l));
+ assertThat(hits.totalHits(), equalTo(4L));
SearchHit searchHit = hits.getAt(0);
assertThat(searchHit.getId(), equalTo("1"));
assertThat(searchHit.getNestedIdentity().getField().string(), equalTo("comments"));
@@ -875,7 +875,7 @@ public class TopHitsIT extends ESIntegTestCase {
// Returns the version of the root document. Nested docs don't have a separate version
long version = searchHit.version();
- assertThat(version, equalTo(1l));
+ assertThat(version, equalTo(1L));
assertThat(searchHit.matchedQueries(), arrayContaining("test"));
@@ -909,7 +909,7 @@ public class TopHitsIT extends ESIntegTestCase {
Histogram histogram = searchResponse.getAggregations().get("dates");
for (int i = 0; i < numArticles; i += 5) {
Histogram.Bucket bucket = histogram.getBuckets().get(i / 5);
- assertThat(bucket.getDocCount(), equalTo(5l));
+ assertThat(bucket.getDocCount(), equalTo(5L));
long numNestedDocs = 10 + (5 * i);
Nested nested = bucket.getAggregations().get("to-comments");
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java
index 903c1bab35..381335cb00 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java
@@ -26,6 +26,7 @@ import org.elasticsearch.script.CompiledScript;
import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.LeafSearchScript;
import org.elasticsearch.script.Script;
+import org.elasticsearch.script.ScriptEngineRegistry;
import org.elasticsearch.script.ScriptEngineService;
import org.elasticsearch.script.ScriptModule;
import org.elasticsearch.script.ScriptService.ScriptType;
@@ -40,6 +41,7 @@ import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
+import java.util.List;
import java.util.Map;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
@@ -83,12 +85,12 @@ public class ValueCountIT extends ESIntegTestCase {
.addAggregation(count("count").field("value"))
.execute().actionGet();
- assertThat(searchResponse.getHits().getTotalHits(), equalTo(0l));
+ assertThat(searchResponse.getHits().getTotalHits(), equalTo(0L));
ValueCount valueCount = searchResponse.getAggregations().get("count");
assertThat(valueCount, notNullValue());
assertThat(valueCount.getName(), equalTo("count"));
- assertThat(valueCount.getValue(), equalTo(0l));
+ assertThat(valueCount.getValue(), equalTo(0L));
}
public void testSingleValuedField() throws Exception {
@@ -102,7 +104,7 @@ public class ValueCountIT extends ESIntegTestCase {
ValueCount valueCount = searchResponse.getAggregations().get("count");
assertThat(valueCount, notNullValue());
assertThat(valueCount.getName(), equalTo("count"));
- assertThat(valueCount.getValue(), equalTo(10l));
+ assertThat(valueCount.getValue(), equalTo(10L));
}
public void testSingleValuedFieldGetProperty() throws Exception {
@@ -114,14 +116,14 @@ public class ValueCountIT extends ESIntegTestCase {
Global global = searchResponse.getAggregations().get("global");
assertThat(global, notNullValue());
assertThat(global.getName(), equalTo("global"));
- assertThat(global.getDocCount(), equalTo(10l));
+ assertThat(global.getDocCount(), equalTo(10L));
assertThat(global.getAggregations(), notNullValue());
assertThat(global.getAggregations().asMap().size(), equalTo(1));
ValueCount valueCount = global.getAggregations().get("count");
assertThat(valueCount, notNullValue());
assertThat(valueCount.getName(), equalTo("count"));
- assertThat(valueCount.getValue(), equalTo(10l));
+ assertThat(valueCount.getValue(), equalTo(10L));
assertThat((ValueCount) global.getProperty("count"), equalTo(valueCount));
assertThat((double) global.getProperty("count.value"), equalTo(10d));
assertThat((double) valueCount.getProperty("value"), equalTo(10d));
@@ -138,7 +140,7 @@ public class ValueCountIT extends ESIntegTestCase {
ValueCount valueCount = searchResponse.getAggregations().get("count");
assertThat(valueCount, notNullValue());
assertThat(valueCount.getName(), equalTo("count"));
- assertThat(valueCount.getValue(), equalTo(10l));
+ assertThat(valueCount.getValue(), equalTo(10L));
}
public void testMultiValuedField() throws Exception {
@@ -152,7 +154,7 @@ public class ValueCountIT extends ESIntegTestCase {
ValueCount valueCount = searchResponse.getAggregations().get("count");
assertThat(valueCount, notNullValue());
assertThat(valueCount.getName(), equalTo("count"));
- assertThat(valueCount.getValue(), equalTo(20l));
+ assertThat(valueCount.getValue(), equalTo(20L));
}
public void testSingleValuedScript() throws Exception {
@@ -164,7 +166,7 @@ public class ValueCountIT extends ESIntegTestCase {
ValueCount valueCount = searchResponse.getAggregations().get("count");
assertThat(valueCount, notNullValue());
assertThat(valueCount.getName(), equalTo("count"));
- assertThat(valueCount.getValue(), equalTo(10l));
+ assertThat(valueCount.getValue(), equalTo(10L));
}
public void testMultiValuedScript() throws Exception {
@@ -176,7 +178,7 @@ public class ValueCountIT extends ESIntegTestCase {
ValueCount valueCount = searchResponse.getAggregations().get("count");
assertThat(valueCount, notNullValue());
assertThat(valueCount.getName(), equalTo("count"));
- assertThat(valueCount.getValue(), equalTo(20l));
+ assertThat(valueCount.getValue(), equalTo(20L));
}
public void testSingleValuedScriptWithParams() throws Exception {
@@ -189,7 +191,7 @@ public class ValueCountIT extends ESIntegTestCase {
ValueCount valueCount = searchResponse.getAggregations().get("count");
assertThat(valueCount, notNullValue());
assertThat(valueCount.getName(), equalTo("count"));
- assertThat(valueCount.getValue(), equalTo(10l));
+ assertThat(valueCount.getValue(), equalTo(10L));
}
public void testMultiValuedScriptWithParams() throws Exception {
@@ -202,7 +204,7 @@ public class ValueCountIT extends ESIntegTestCase {
ValueCount valueCount = searchResponse.getAggregations().get("count");
assertThat(valueCount, notNullValue());
assertThat(valueCount.getName(), equalTo("count"));
- assertThat(valueCount.getValue(), equalTo(20l));
+ assertThat(valueCount.getValue(), equalTo(20L));
}
/**
@@ -221,7 +223,7 @@ public class ValueCountIT extends ESIntegTestCase {
}
public void onModule(ScriptModule module) {
- module.addScriptEngine(FieldValueScriptEngine.class);
+ module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(FieldValueScriptEngine.class, FieldValueScriptEngine.TYPES));
}
}
@@ -233,22 +235,24 @@ public class ValueCountIT extends ESIntegTestCase {
public static final String NAME = "field_value";
+ public static final List<String> TYPES = Collections.singletonList(NAME);
+
@Override
public void close() throws IOException {
}
@Override
- public String[] types() {
- return new String[] { NAME };
+ public List<String> getTypes() {
+ return TYPES;
}
@Override
- public String[] extensions() {
- return types();
+ public List<String> getExtensions() {
+ return TYPES;
}
@Override
- public boolean sandboxed() {
+ public boolean isSandboxed() {
return true;
}
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketIT.java
index ea0eb7fd93..47c0c96312 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketIT.java
@@ -189,7 +189,7 @@ public class AvgBucketIT extends ESIntegTestCase {
Terms.Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("tag" + (i % interval)));
- assertThat(bucket.getDocCount(), greaterThan(0l));
+ assertThat(bucket.getDocCount(), greaterThan(0L));
Sum sum = bucket.getAggregations().get("sum");
assertThat(sum, notNullValue());
count++;
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/DateDerivativeIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/DateDerivativeIT.java
index 3058d1f10d..f2da1db440 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/DateDerivativeIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/DateDerivativeIT.java
@@ -123,7 +123,7 @@ public class DateDerivativeIT extends ESIntegTestCase {
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((DateTime) bucket.getKey(), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(1l));
+ assertThat(bucket.getDocCount(), equalTo(1L));
SimpleValue docCountDeriv = bucket.getAggregations().get("deriv");
assertThat(docCountDeriv, nullValue());
@@ -131,7 +131,7 @@ public class DateDerivativeIT extends ESIntegTestCase {
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((DateTime) bucket.getKey(), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(2l));
+ assertThat(bucket.getDocCount(), equalTo(2L));
docCountDeriv = bucket.getAggregations().get("deriv");
assertThat(docCountDeriv, notNullValue());
assertThat(docCountDeriv.value(), equalTo(1d));
@@ -140,7 +140,7 @@ public class DateDerivativeIT extends ESIntegTestCase {
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((DateTime) bucket.getKey(), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(3l));
+ assertThat(bucket.getDocCount(), equalTo(3L));
docCountDeriv = bucket.getAggregations().get("deriv");
assertThat(docCountDeriv, notNullValue());
assertThat(docCountDeriv.value(), equalTo(1d));
@@ -166,7 +166,7 @@ public class DateDerivativeIT extends ESIntegTestCase {
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((DateTime) bucket.getKey(), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(1l));
+ assertThat(bucket.getDocCount(), equalTo(1L));
Derivative docCountDeriv = bucket.getAggregations().get("deriv");
assertThat(docCountDeriv, nullValue());
@@ -174,7 +174,7 @@ public class DateDerivativeIT extends ESIntegTestCase {
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((DateTime) bucket.getKey(), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(2l));
+ assertThat(bucket.getDocCount(), equalTo(2L));
docCountDeriv = bucket.getAggregations().get("deriv");
assertThat(docCountDeriv, notNullValue());
assertThat(docCountDeriv.value(), closeTo(1d, 0.00001));
@@ -184,7 +184,7 @@ public class DateDerivativeIT extends ESIntegTestCase {
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((DateTime) bucket.getKey(), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(3l));
+ assertThat(bucket.getDocCount(), equalTo(3L));
docCountDeriv = bucket.getAggregations().get("deriv");
assertThat(docCountDeriv, notNullValue());
assertThat(docCountDeriv.value(), closeTo(1d, 0.00001));
@@ -214,7 +214,7 @@ public class DateDerivativeIT extends ESIntegTestCase {
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((DateTime) bucket.getKey(), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(1l));
+ assertThat(bucket.getDocCount(), equalTo(1L));
assertThat(bucket.getAggregations().asList().isEmpty(), is(false));
Sum sum = bucket.getAggregations().get("sum");
assertThat(sum, notNullValue());
@@ -222,14 +222,14 @@ public class DateDerivativeIT extends ESIntegTestCase {
SimpleValue deriv = bucket.getAggregations().get("deriv");
assertThat(deriv, nullValue());
assertThat((DateTime) propertiesKeys[0], equalTo(key));
- assertThat((long) propertiesDocCounts[0], equalTo(1l));
+ assertThat((long) propertiesDocCounts[0], equalTo(1L));
assertThat((double) propertiesCounts[0], equalTo(1.0));
key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((DateTime) bucket.getKey(), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(2l));
+ assertThat(bucket.getDocCount(), equalTo(2L));
assertThat(bucket.getAggregations().asList().isEmpty(), is(false));
sum = bucket.getAggregations().get("sum");
assertThat(sum, notNullValue());
@@ -239,14 +239,14 @@ public class DateDerivativeIT extends ESIntegTestCase {
assertThat(deriv.value(), equalTo(4.0));
assertThat((double) bucket.getProperty("histo", AggregationPath.parse("deriv.value").getPathElementsAsStringList()), equalTo(4.0));
assertThat((DateTime) propertiesKeys[1], equalTo(key));
- assertThat((long) propertiesDocCounts[1], equalTo(2l));
+ assertThat((long) propertiesDocCounts[1], equalTo(2L));
assertThat((double) propertiesCounts[1], equalTo(5.0));
key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC);
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((DateTime) bucket.getKey(), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(3l));
+ assertThat(bucket.getDocCount(), equalTo(3L));
assertThat(bucket.getAggregations().asList().isEmpty(), is(false));
sum = bucket.getAggregations().get("sum");
assertThat(sum, notNullValue());
@@ -256,7 +256,7 @@ public class DateDerivativeIT extends ESIntegTestCase {
assertThat(deriv.value(), equalTo(10.0));
assertThat((double) bucket.getProperty("histo", AggregationPath.parse("deriv.value").getPathElementsAsStringList()), equalTo(10.0));
assertThat((DateTime) propertiesKeys[2], equalTo(key));
- assertThat((long) propertiesDocCounts[2], equalTo(3l));
+ assertThat((long) propertiesDocCounts[2], equalTo(3L));
assertThat((double) propertiesCounts[2], equalTo(15.0));
}
@@ -279,7 +279,7 @@ public class DateDerivativeIT extends ESIntegTestCase {
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((DateTime) bucket.getKey(), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(1l));
+ assertThat(bucket.getDocCount(), equalTo(1L));
assertThat(bucket.getAggregations().asList().isEmpty(), is(true));
SimpleValue docCountDeriv = bucket.getAggregations().get("deriv");
assertThat(docCountDeriv, nullValue());
@@ -288,7 +288,7 @@ public class DateDerivativeIT extends ESIntegTestCase {
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((DateTime) bucket.getKey(), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(3l));
+ assertThat(bucket.getDocCount(), equalTo(3L));
assertThat(bucket.getAggregations().asList().isEmpty(), is(false));
docCountDeriv = bucket.getAggregations().get("deriv");
assertThat(docCountDeriv, notNullValue());
@@ -298,7 +298,7 @@ public class DateDerivativeIT extends ESIntegTestCase {
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((DateTime) bucket.getKey(), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(5l));
+ assertThat(bucket.getDocCount(), equalTo(5L));
assertThat(bucket.getAggregations().asList().isEmpty(), is(false));
docCountDeriv = bucket.getAggregations().get("deriv");
assertThat(docCountDeriv, notNullValue());
@@ -308,7 +308,7 @@ public class DateDerivativeIT extends ESIntegTestCase {
bucket = buckets.get(3);
assertThat(bucket, notNullValue());
assertThat((DateTime) bucket.getKey(), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(3l));
+ assertThat(bucket.getDocCount(), equalTo(3L));
assertThat(bucket.getAggregations().asList().isEmpty(), is(false));
docCountDeriv = bucket.getAggregations().get("deriv");
assertThat(docCountDeriv, notNullValue());
@@ -349,7 +349,7 @@ public class DateDerivativeIT extends ESIntegTestCase {
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((DateTime) bucket.getKey(), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(1l));
+ assertThat(bucket.getDocCount(), equalTo(1L));
assertThat(bucket.getAggregations().asList().isEmpty(), is(true));
SimpleValue docCountDeriv = bucket.getAggregations().get("deriv");
assertThat(docCountDeriv, nullValue());
@@ -358,7 +358,7 @@ public class DateDerivativeIT extends ESIntegTestCase {
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((DateTime) bucket.getKey(), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(2l));
+ assertThat(bucket.getDocCount(), equalTo(2L));
assertThat(bucket.getAggregations().asList().isEmpty(), is(false));
docCountDeriv = bucket.getAggregations().get("deriv");
assertThat(docCountDeriv, notNullValue());
@@ -368,7 +368,7 @@ public class DateDerivativeIT extends ESIntegTestCase {
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((DateTime) bucket.getKey(), equalTo(key));
- assertThat(bucket.getDocCount(), equalTo(3l));
+ assertThat(bucket.getDocCount(), equalTo(3L));
assertThat(bucket.getAggregations().asList().isEmpty(), is(false));
docCountDeriv = bucket.getAggregations().get("deriv");
assertThat(docCountDeriv, notNullValue());
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeIT.java
index d76f88a1a9..6a4f548978 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeIT.java
@@ -123,7 +123,7 @@ public class DerivativeIT extends ESIntegTestCase {
}
// setup for index with empty buckets
- valueCounts_empty = new Long[] { 1l, 1l, 2l, 0l, 2l, 2l, 0l, 0l, 0l, 3l, 2l, 1l };
+ valueCounts_empty = new Long[] { 1L, 1L, 2L, 0L, 2L, 2L, 0L, 0L, 0L, 3L, 2L, 1L };
firstDerivValueCounts_empty = new Double[] { null, 0d, 1d, -2d, 2d, 0d, -2d, 0d, 0d, 3d, -1d, -1d };
assertAcked(prepareCreate("empty_bucket_idx").addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=integer"));
@@ -145,7 +145,7 @@ public class DerivativeIT extends ESIntegTestCase {
valueCounts_empty_rnd[i] = (long) randomIntBetween(1, 10);
// make approximately half of the buckets empty
if (randomBoolean())
- valueCounts_empty_rnd[i] = 0l;
+ valueCounts_empty_rnd[i] = 0L;
for (int docs = 0; docs < valueCounts_empty_rnd[i]; docs++) {
builders.add(client().prepareIndex("empty_bucket_idx_rnd", "type").setSource(newDocBuilder(i)));
numDocsEmptyIdx_rnd++;
@@ -411,7 +411,7 @@ public class DerivativeIT extends ESIntegTestCase {
.setQuery(matchAllQuery())
.addAggregation(
histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(1)
- .extendedBounds(0l, (long) numBuckets_empty_rnd - 1)
+ .extendedBounds(0L, (long) numBuckets_empty_rnd - 1)
.subAggregation(derivative("deriv").setBucketsPaths("_count").gapPolicy(randomFrom(GapPolicy.values()))))
.execute().actionGet();
@@ -550,7 +550,7 @@ public class DerivativeIT extends ESIntegTestCase {
.setQuery(matchAllQuery())
.addAggregation(
histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(1)
- .extendedBounds(0l, (long) numBuckets_empty_rnd - 1)
+ .extendedBounds(0L, (long) numBuckets_empty_rnd - 1)
.subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))
.subAggregation(derivative("deriv").setBucketsPaths("sum").gapPolicy(gapPolicy))).execute().actionGet();
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java
index 6c7ae2383f..3a82b6883c 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java
@@ -212,7 +212,7 @@ public class ExtendedStatsBucketIT extends ESIntegTestCase {
Terms.Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("tag" + (i % interval)));
- assertThat(bucket.getDocCount(), greaterThan(0l));
+ assertThat(bucket.getDocCount(), greaterThan(0L));
Sum sum = bucket.getAggregations().get("sum");
assertThat(sum, notNullValue());
count++;
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketIT.java
index 81b5735012..bf8c23a9e7 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketIT.java
@@ -203,7 +203,7 @@ public class MaxBucketIT extends ESIntegTestCase {
Terms.Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("tag" + (i % interval)));
- assertThat(bucket.getDocCount(), greaterThan(0l));
+ assertThat(bucket.getDocCount(), greaterThan(0L));
Sum sum = bucket.getAggregations().get("sum");
assertThat(sum, notNullValue());
if (sum.value() > maxValue) {
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/MinBucketIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/MinBucketIT.java
index f02a85f130..ff865e24ed 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/MinBucketIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/MinBucketIT.java
@@ -200,7 +200,7 @@ public class MinBucketIT extends ESIntegTestCase {
Terms.Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("tag" + (i % interval)));
- assertThat(bucket.getDocCount(), greaterThan(0l));
+ assertThat(bucket.getDocCount(), greaterThan(0L));
Sum sum = bucket.getAggregations().get("sum");
assertThat(sum, notNullValue());
if (sum.value() < minValue) {
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java
index c4dc267ec5..27f12bd0c0 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java
@@ -203,7 +203,7 @@ public class PercentilesBucketIT extends ESIntegTestCase {
Terms.Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("tag" + (i % interval)));
- assertThat(bucket.getDocCount(), greaterThan(0l));
+ assertThat(bucket.getDocCount(), greaterThan(0L));
Sum sum = bucket.getAggregations().get("sum");
assertThat(sum, notNullValue());
values[i] = sum.value();
@@ -240,7 +240,7 @@ public class PercentilesBucketIT extends ESIntegTestCase {
Terms.Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("tag" + (i % interval)));
- assertThat(bucket.getDocCount(), greaterThan(0l));
+ assertThat(bucket.getDocCount(), greaterThan(0L));
Sum sum = bucket.getAggregations().get("sum");
assertThat(sum, notNullValue());
values[i] = sum.value();
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregationHelperTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregationHelperTests.java
index 65d5fbafbe..e962e90830 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregationHelperTests.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregationHelperTests.java
@@ -109,7 +109,7 @@ public class PipelineAggregationHelperTests extends ESTestCase {
* @param values Array of values to compute metric for
* @param metric A metric builder which defines what kind of metric should be returned for the values
*/
- public static double calculateMetric(double[] values, ValuesSourceMetricsAggregationBuilder<?> metric) {
+ public static double calculateMetric(double[] values, ValuesSourceMetricsAggregationBuilder metric) {
if (metric instanceof MinBuilder) {
double accumulator = Double.POSITIVE_INFINITY;
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketIT.java
index 92325ccd81..949350c4cd 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketIT.java
@@ -204,7 +204,7 @@ public class StatsBucketIT extends ESIntegTestCase {
Terms.Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("tag" + (i % interval)));
- assertThat(bucket.getDocCount(), greaterThan(0l));
+ assertThat(bucket.getDocCount(), greaterThan(0L));
Sum sum = bucket.getAggregations().get("sum");
assertThat(sum, notNullValue());
count++;
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/SumBucketIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/SumBucketIT.java
index ba13b553d8..0d9a324e29 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/SumBucketIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/SumBucketIT.java
@@ -182,7 +182,7 @@ public class SumBucketIT extends ESIntegTestCase {
Terms.Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("tag" + (i % interval)));
- assertThat(bucket.getDocCount(), greaterThan(0l));
+ assertThat(bucket.getDocCount(), greaterThan(0L));
Sum sum = bucket.getAggregations().get("sum");
assertThat(sum, notNullValue());
bucketSum += sum.value();
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java
index 6184cb9fd6..3dd578a0af 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java
@@ -77,7 +77,7 @@ public class MovAvgIT extends ESIntegTestCase {
static int period;
static HoltWintersModel.SeasonalityType seasonalityType;
static BucketHelpers.GapPolicy gapPolicy;
- static ValuesSourceMetricsAggregationBuilder<?> metric;
+ static ValuesSourceMetricsAggregationBuilder metric;
static List<PipelineAggregationHelperTests.MockBucket> mockHisto;
static Map<String, ArrayList<Double>> testValues;
@@ -686,7 +686,7 @@ public class MovAvgIT extends ESIntegTestCase {
Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue());
assertThat((long) bucket.getKey(), equalTo((long) i - 10));
- assertThat(bucket.getDocCount(), equalTo(1l));
+ assertThat(bucket.getDocCount(), equalTo(1L));
Avg avgAgg = bucket.getAggregations().get("avg");
assertThat(avgAgg, notNullValue());
assertThat(avgAgg.value(), equalTo(10d));
@@ -699,7 +699,7 @@ public class MovAvgIT extends ESIntegTestCase {
Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue());
assertThat((long) bucket.getKey(), equalTo((long) i - 10));
- assertThat(bucket.getDocCount(), equalTo(0l));
+ assertThat(bucket.getDocCount(), equalTo(0L));
Avg avgAgg = bucket.getAggregations().get("avg");
assertThat(avgAgg, nullValue());
SimpleValue movAvgAgg = bucket.getAggregations().get("movavg_values");
@@ -864,7 +864,7 @@ public class MovAvgIT extends ESIntegTestCase {
public void testHoltWintersNotEnoughData() {
try {
- client()
+ SearchResponse response = client()
.prepareSearch("idx").setTypes("type")
.addAggregation(
histogram("histo").field(INTERVAL_FIELD).interval(interval)
@@ -919,7 +919,7 @@ public class MovAvgIT extends ESIntegTestCase {
Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((long) bucket.getKey(), equalTo((long) 0));
- assertThat(bucket.getDocCount(), equalTo(1l));
+ assertThat(bucket.getDocCount(), equalTo(1L));
Avg avgAgg = bucket.getAggregations().get("avg");
assertThat(avgAgg, notNullValue());
@@ -938,7 +938,7 @@ public class MovAvgIT extends ESIntegTestCase {
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((long) bucket.getKey(), equalTo(1L));
- assertThat(bucket.getDocCount(), equalTo(1l));
+ assertThat(bucket.getDocCount(), equalTo(1L));
avgAgg = bucket.getAggregations().get("avg");
assertThat(avgAgg, notNullValue());
@@ -959,7 +959,7 @@ public class MovAvgIT extends ESIntegTestCase {
bucket = buckets.get(i);
assertThat(bucket, notNullValue());
assertThat((long) bucket.getKey(), equalTo((long) i));
- assertThat(bucket.getDocCount(), equalTo(1l));
+ assertThat(bucket.getDocCount(), equalTo(1L));
avgAgg = bucket.getAggregations().get("avg");
assertThat(avgAgg, notNullValue());
@@ -983,7 +983,7 @@ public class MovAvgIT extends ESIntegTestCase {
bucket = buckets.get(i);
assertThat(bucket, notNullValue());
assertThat((long) bucket.getKey(), equalTo((long) i));
- assertThat(bucket.getDocCount(), equalTo(0l));
+ assertThat(bucket.getDocCount(), equalTo(0L));
avgAgg = bucket.getAggregations().get("avg");
assertThat(avgAgg, nullValue());
@@ -1003,7 +1003,7 @@ public class MovAvgIT extends ESIntegTestCase {
public void testBadModelParams() {
try {
- client()
+ SearchResponse response = client()
.prepareSearch("idx").setTypes("type")
.addAggregation(
histogram("histo").field(INTERVAL_FIELD).interval(interval)
@@ -1248,7 +1248,7 @@ public class MovAvgIT extends ESIntegTestCase {
for (MovAvgModelBuilder builder : builders) {
try {
- client()
+ SearchResponse response = client()
.prepareSearch("idx").setTypes("type")
.addAggregation(
histogram("histo").field(INTERVAL_FIELD).interval(interval)
@@ -1265,10 +1265,14 @@ public class MovAvgIT extends ESIntegTestCase {
// All good
}
}
+
+
+
+
}
- private void assertValidIterators(Iterator<?> expectedBucketIter, Iterator<?> expectedCountsIter, Iterator<?> expectedValuesIter) {
+ private void assertValidIterators(Iterator expectedBucketIter, Iterator expectedCountsIter, Iterator expectedValuesIter) {
if (!expectedBucketIter.hasNext()) {
fail("`expectedBucketIter` iterator ended before `actual` iterator, size mismatch");
}
@@ -1351,7 +1355,7 @@ public class MovAvgIT extends ESIntegTestCase {
}
}
- private ValuesSourceMetricsAggregationBuilder<?> randomMetric(String name, String field) {
+ private ValuesSourceMetricsAggregationBuilder randomMetric(String name, String field) {
int rand = randomIntBetween(0,3);
switch (rand) {
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/serialdiff/SerialDiffIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/serialdiff/SerialDiffIT.java
index 145587a4b2..aebd6a7e78 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/serialdiff/SerialDiffIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/serialdiff/SerialDiffIT.java
@@ -60,7 +60,7 @@ public class SerialDiffIT extends ESIntegTestCase {
static int numBuckets;
static int lag;
static BucketHelpers.GapPolicy gapPolicy;
- static ValuesSourceMetricsAggregationBuilder<?> metric;
+ static ValuesSourceMetricsAggregationBuilder metric;
static List<PipelineAggregationHelperTests.MockBucket> mockHisto;
static Map<String, ArrayList<Double>> testValues;
@@ -80,7 +80,7 @@ public class SerialDiffIT extends ESIntegTestCase {
}
}
- private ValuesSourceMetricsAggregationBuilder<?> randomMetric(String name, String field) {
+ private ValuesSourceMetricsAggregationBuilder randomMetric(String name, String field) {
int rand = randomIntBetween(0,3);
switch (rand) {
@@ -95,7 +95,7 @@ public class SerialDiffIT extends ESIntegTestCase {
}
}
- private void assertValidIterators(Iterator<?> expectedBucketIter, Iterator<?> expectedCountsIter, Iterator<?> expectedValuesIter) {
+ private void assertValidIterators(Iterator expectedBucketIter, Iterator expectedCountsIter, Iterator expectedValuesIter) {
if (!expectedBucketIter.hasNext()) {
fail("`expectedBucketIter` iterator ended before `actual` iterator, size mismatch");
}
diff --git a/core/src/test/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java b/core/src/test/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java
index 28874d2e2a..53ac2bc045 100644
--- a/core/src/test/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java
+++ b/core/src/test/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java
@@ -25,15 +25,16 @@ import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.health.ClusterHealthStatus;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.test.ESIntegTestCase;
+import org.elasticsearch.test.junit.annotations.TestLogging;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
-
/**
* This test basically verifies that search with a single shard active (cause we indexed to it) and other
* shards possibly not active at all (cause they haven't allocated) will still work.
*/
+@TestLogging("_root:DEBUG")
public class SearchWhileCreatingIndexIT extends ESIntegTestCase {
public void testIndexCausesIndexCreation() throws Exception {
searchWhileCreatingIndex(false, 1); // 1 replica in our default...
@@ -58,39 +59,44 @@ public class SearchWhileCreatingIndexIT extends ESIntegTestCase {
int shardsNo = numberOfReplicas + 1;
int neededNodes = shardsNo <= 2 ? 1 : shardsNo / 2 + 1;
internalCluster().ensureAtLeastNumDataNodes(randomIntBetween(neededNodes, shardsNo));
- for (int i = 0; i < 20; i++) {
- logger.info("running iteration {}", i);
- if (createIndex) {
- createIndex("test");
- }
- client().prepareIndex("test", "type1", randomAsciiOfLength(5)).setSource("field", "test").execute().actionGet();
- RefreshResponse refreshResponse = client().admin().indices().prepareRefresh("test").execute().actionGet();
- assertThat(refreshResponse.getSuccessfulShards(), greaterThanOrEqualTo(1)); // at least one shard should be successful when refreshing
- // we want to make sure that while recovery happens, and a replica gets recovered, its properly refreshed
- ClusterHealthStatus status = ClusterHealthStatus.RED;
- while (status != ClusterHealthStatus.GREEN) {
- // first, verify that search on the primary search works
- SearchResponse searchResponse = client().prepareSearch("test").setPreference("_primary").setQuery(QueryBuilders.termQuery("field", "test")).execute().actionGet();
- assertHitCount(searchResponse, 1);
- // now, let it go to primary or replica, though in a randomized re-creatable manner
- String preference = randomAsciiOfLength(5);
- Client client = client();
- searchResponse = client.prepareSearch("test").setPreference(preference).setQuery(QueryBuilders.termQuery("field", "test")).execute().actionGet();
- if (searchResponse.getHits().getTotalHits() != 1) {
- refresh();
- SearchResponse searchResponseAfterRefresh = client.prepareSearch("test").setPreference(preference).setQuery(QueryBuilders.termQuery("field", "test")).execute().actionGet();
- logger.info("hits count mismatch on any shard search failed, post explicit refresh hits are {}", searchResponseAfterRefresh.getHits().getTotalHits());
- ensureGreen();
- SearchResponse searchResponseAfterGreen = client.prepareSearch("test").setPreference(preference).setQuery(QueryBuilders.termQuery("field", "test")).execute().actionGet();
- logger.info("hits count mismatch on any shard search failed, post explicit wait for green hits are {}", searchResponseAfterGreen.getHits().getTotalHits());
- assertHitCount(searchResponse, 1);
- }
+ String id = randomAsciiOfLength(5);
+ // we will go the primary or the replica, but in a
+ // randomized re-creatable manner
+ int counter = 0;
+ String preference = randomAsciiOfLength(5);
+
+ logger.info("running iteration for id {}, preference {}", id, preference);
+
+ if (createIndex) {
+ createIndex("test");
+ }
+ client().prepareIndex("test", "type1", id).setSource("field", "test").execute().actionGet();
+ RefreshResponse refreshResponse = client().admin().indices().prepareRefresh("test").execute().actionGet();
+ assertThat(refreshResponse.getSuccessfulShards(), greaterThanOrEqualTo(1)); // at least one shard should be successful when refreshing
+
+ logger.info("using preference {}", preference);
+ // we want to make sure that while recovery happens, and a replica gets recovered, its properly refreshed
+ ClusterHealthStatus status = ClusterHealthStatus.RED;
+ while (status != ClusterHealthStatus.GREEN) {
+ // first, verify that search on the primary search works
+ SearchResponse searchResponse = client().prepareSearch("test").setPreference("_primary").setQuery(QueryBuilders.termQuery("field", "test")).execute().actionGet();
+ assertHitCount(searchResponse, 1);
+ Client client = client();
+ searchResponse = client.prepareSearch("test").setPreference(preference + Integer.toString(counter++)).setQuery(QueryBuilders.termQuery("field", "test")).execute().actionGet();
+ if (searchResponse.getHits().getTotalHits() != 1) {
+ refresh();
+ SearchResponse searchResponseAfterRefresh = client.prepareSearch("test").setPreference(preference).setQuery(QueryBuilders.termQuery("field", "test")).execute().actionGet();
+ logger.info("hits count mismatch on any shard search failed, post explicit refresh hits are {}", searchResponseAfterRefresh.getHits().getTotalHits());
+ ensureGreen();
+ SearchResponse searchResponseAfterGreen = client.prepareSearch("test").setPreference(preference).setQuery(QueryBuilders.termQuery("field", "test")).execute().actionGet();
+ logger.info("hits count mismatch on any shard search failed, post explicit wait for green hits are {}", searchResponseAfterGreen.getHits().getTotalHits());
assertHitCount(searchResponse, 1);
- status = client().admin().cluster().prepareHealth("test").get().getStatus();
- internalCluster().ensureAtLeastNumDataNodes(numberOfReplicas + 1);
}
- cluster().wipeIndices("test");
+ assertHitCount(searchResponse, 1);
+ status = client().admin().cluster().prepareHealth("test").get().getStatus();
+ internalCluster().ensureAtLeastNumDataNodes(numberOfReplicas + 1);
}
+ cluster().wipeIndices("test");
}
}
diff --git a/core/src/test/java/org/elasticsearch/search/basic/TransportSearchFailuresIT.java b/core/src/test/java/org/elasticsearch/search/basic/TransportSearchFailuresIT.java
index 3d3388b87b..3cd1d26927 100644
--- a/core/src/test/java/org/elasticsearch/search/basic/TransportSearchFailuresIT.java
+++ b/core/src/test/java/org/elasticsearch/search/basic/TransportSearchFailuresIT.java
@@ -39,7 +39,6 @@ import java.io.IOException;
import static org.elasticsearch.client.Requests.clusterHealthRequest;
import static org.elasticsearch.client.Requests.refreshRequest;
import static org.elasticsearch.client.Requests.searchRequest;
-import static org.elasticsearch.common.settings.Settings.settingsBuilder;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.hamcrest.Matchers.anyOf;
diff --git a/core/src/test/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java b/core/src/test/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java
index eedc4d9e79..fc1ee7afc8 100644
--- a/core/src/test/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java
+++ b/core/src/test/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java
@@ -140,7 +140,7 @@ public class TransportTwoNodesSearchIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch("test").setSearchType(DFS_QUERY_THEN_FETCH).setQuery(termQuery("multi", "test")).setSize(60).setExplain(true).setScroll(TimeValue.timeValueSeconds(30)).get();
while (true) {
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(100l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(100L));
SearchHit[] hits = searchResponse.getHits().hits();
if (hits.length == 0) {
break; // finished
@@ -164,7 +164,7 @@ public class TransportTwoNodesSearchIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch("test").setSearchType(DFS_QUERY_THEN_FETCH).setQuery(termQuery("multi", "test")).setSize(60).setExplain(true).addSort("age", SortOrder.ASC).setScroll(TimeValue.timeValueSeconds(30)).get();
while (true) {
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(100l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(100L));
SearchHit[] hits = searchResponse.getHits().hits();
if (hits.length == 0) {
break; // finished
@@ -188,7 +188,7 @@ public class TransportTwoNodesSearchIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch("test").setSearchType(QUERY_THEN_FETCH).setQuery(termQuery("multi", "test")).setSize(60).setExplain(true).addSort("nid", SortOrder.DESC).setScroll(TimeValue.timeValueSeconds(30)).get();
while (true) {
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(100l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(100L));
SearchHit[] hits = searchResponse.getHits().hits();
if (hits.length == 0) {
break; // finished
@@ -216,7 +216,7 @@ public class TransportTwoNodesSearchIT extends ESIntegTestCase {
SearchResponse searchResponse = client().search(searchRequest("test").source(source.from(0).size(60)).searchType(QUERY_THEN_FETCH)).actionGet();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(100l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(100L));
assertThat(searchResponse.getHits().hits().length, equalTo(60));
for (int i = 0; i < 60; i++) {
SearchHit hit = searchResponse.getHits().hits()[i];
@@ -224,7 +224,7 @@ public class TransportTwoNodesSearchIT extends ESIntegTestCase {
}
searchResponse = client().search(searchRequest("test").source(source.from(60).size(60)).searchType(QUERY_THEN_FETCH)).actionGet();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(100l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(100L));
assertThat(searchResponse.getHits().hits().length, equalTo(40));
for (int i = 0; i < 40; i++) {
SearchHit hit = searchResponse.getHits().hits()[i];
@@ -240,7 +240,7 @@ public class TransportTwoNodesSearchIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch("test").setQuery(termQuery("multi", "test")).setSize(60).setExplain(true).addSort("age", SortOrder.ASC).setScroll(TimeValue.timeValueSeconds(30)).get();
while (true) {
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(100l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(100L));
SearchHit[] hits = searchResponse.getHits().hits();
if (hits.length == 0) {
break; // finished
@@ -271,7 +271,7 @@ public class TransportTwoNodesSearchIT extends ESIntegTestCase {
SearchResponse searchResponse = client().search(searchRequest("test").source(source).searchType(QUERY_AND_FETCH).scroll(new Scroll(timeValueMinutes(10)))).actionGet();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(100l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(100L));
assertThat(searchResponse.getHits().hits().length, equalTo(60)); // 20 per shard
for (int i = 0; i < 60; i++) {
SearchHit hit = searchResponse.getHits().hits()[i];
@@ -284,7 +284,7 @@ public class TransportTwoNodesSearchIT extends ESIntegTestCase {
do {
searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll("10m").get();
- assertThat(searchResponse.getHits().totalHits(), equalTo(100l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(100L));
assertThat(searchResponse.getHits().hits().length, lessThanOrEqualTo(40));
for (int i = 0; i < searchResponse.getHits().hits().length; i++) {
SearchHit hit = searchResponse.getHits().hits()[i];
@@ -312,7 +312,7 @@ public class TransportTwoNodesSearchIT extends ESIntegTestCase {
//SearchResponse searchResponse = client().search(searchRequest("test").source(source).searchType(DFS_QUERY_AND_FETCH).scroll(new Scroll(timeValueMinutes(10)))).actionGet();
SearchResponse searchResponse = client().prepareSearch("test").setSearchType(DFS_QUERY_AND_FETCH).setScroll("10m").setSource(source).get();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(100l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(100L));
assertThat(searchResponse.getHits().hits().length, equalTo(60)); // 20 per shard
for (int i = 0; i < 60; i++) {
SearchHit hit = searchResponse.getHits().hits()[i];
@@ -325,7 +325,7 @@ public class TransportTwoNodesSearchIT extends ESIntegTestCase {
do {
searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll("10m").get();
- assertThat(searchResponse.getHits().totalHits(), equalTo(100l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(100L));
assertThat(searchResponse.getHits().hits().length, lessThanOrEqualTo(40));
for (int i = 0; i < searchResponse.getHits().hits().length; i++) {
SearchHit hit = searchResponse.getHits().hits()[i];
@@ -349,13 +349,13 @@ public class TransportTwoNodesSearchIT extends ESIntegTestCase {
SearchResponse searchResponse = client().search(searchRequest("test").source(sourceBuilder)).actionGet();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(100l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(100L));
Global global = searchResponse.getAggregations().get("global");
Filter all = global.getAggregations().get("all");
Filter test1 = searchResponse.getAggregations().get("test1");
- assertThat(test1.getDocCount(), equalTo(1l));
- assertThat(all.getDocCount(), equalTo(100l));
+ assertThat(test1.getDocCount(), equalTo(1L));
+ assertThat(all.getDocCount(), equalTo(100L));
}
public void testFailedSearchWithWrongQuery() throws Exception {
diff --git a/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java b/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java
index 5a1b99fe05..bb969b90de 100644
--- a/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java
+++ b/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java
@@ -19,11 +19,6 @@
package org.elasticsearch.search.builder;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.concurrent.TimeUnit;
-
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
@@ -38,12 +33,14 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsFilter;
import org.elasticsearch.common.settings.SettingsModule;
+import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
+import org.elasticsearch.env.Environment;
import org.elasticsearch.index.query.AbstractQueryTestCase;
import org.elasticsearch.index.query.EmptyQueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
@@ -57,7 +54,8 @@ import org.elasticsearch.search.fetch.innerhits.InnerHitsBuilder;
import org.elasticsearch.search.fetch.innerhits.InnerHitsBuilder.InnerHit;
import org.elasticsearch.search.fetch.source.FetchSourceContext;
import org.elasticsearch.search.highlight.HighlightBuilderTests;
-import org.elasticsearch.search.rescore.RescoreBuilder;
+import org.elasticsearch.search.rescore.QueryRescoreBuilderTests;
+import org.elasticsearch.search.searchafter.SearchAfterBuilder;
import org.elasticsearch.search.sort.SortBuilders;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.search.suggest.SuggestBuilder;
@@ -68,6 +66,11 @@ import org.elasticsearch.threadpool.ThreadPoolModule;
import org.junit.AfterClass;
import org.junit.BeforeClass;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+
import static org.hamcrest.Matchers.equalTo;
public class SearchSourceBuilderTests extends ESTestCase {
@@ -81,7 +84,7 @@ public class SearchSourceBuilderTests extends ESTestCase {
public static void init() throws IOException {
Settings settings = Settings.settingsBuilder()
.put("name", SearchSourceBuilderTests.class.toString())
- .put("path.home", createTempDir())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
.build();
namedWriteableRegistry = new NamedWriteableRegistry();
injector = new ModulesBuilder().add(
@@ -261,6 +264,56 @@ public class SearchSourceBuilderTests extends ESTestCase {
}
}
}
+
+ if (randomBoolean()) {
+ int numSearchFrom = randomIntBetween(1, 5);
+ // We build a json version of the search_from first in order to
+ // ensure that every number type remain the same before/after xcontent (de)serialization.
+ // This is not a problem because the final type of each field value is extracted from associated sort field.
+ // This little trick ensure that equals and hashcode are the same when using the xcontent serialization.
+ XContentBuilder jsonBuilder = XContentFactory.jsonBuilder();
+ jsonBuilder.startObject();
+ jsonBuilder.startArray("search_from");
+ for (int i = 0; i < numSearchFrom; i++) {
+ int branch = randomInt(8);
+ switch (branch) {
+ case 0:
+ jsonBuilder.value(randomInt());
+ break;
+ case 1:
+ jsonBuilder.value(randomFloat());
+ break;
+ case 2:
+ jsonBuilder.value(randomLong());
+ break;
+ case 3:
+ jsonBuilder.value(randomDouble());
+ break;
+ case 4:
+ jsonBuilder.value(randomAsciiOfLengthBetween(5, 20));
+ break;
+ case 5:
+ jsonBuilder.value(randomBoolean());
+ break;
+ case 6:
+ jsonBuilder.value(randomByte());
+ break;
+ case 7:
+ jsonBuilder.value(randomShort());
+ break;
+ case 8:
+ jsonBuilder.value(new Text(randomAsciiOfLengthBetween(5, 20)));
+ break;
+ }
+ }
+ jsonBuilder.endArray();
+ jsonBuilder.endObject();
+ XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(jsonBuilder.bytes());
+ parser.nextToken();
+ parser.nextToken();
+ parser.nextToken();
+ builder.searchAfter(SearchAfterBuilder.PROTOTYPE.fromXContent(parser, null).getSortValues());
+ }
if (randomBoolean()) {
builder.highlighter(HighlightBuilderTests.randomHighlighterBuilder());
}
@@ -280,10 +333,7 @@ public class SearchSourceBuilderTests extends ESTestCase {
if (randomBoolean()) {
int numRescores = randomIntBetween(1, 5);
for (int i = 0; i < numRescores; i++) {
- // NORELEASE need a random rescore builder method
- RescoreBuilder rescoreBuilder = new RescoreBuilder(RescoreBuilder.queryRescorer(QueryBuilders.termQuery(randomAsciiOfLengthBetween(5, 20),
- randomAsciiOfLengthBetween(5, 20))));
- builder.addRescorer(rescoreBuilder);
+ builder.addRescorer(QueryRescoreBuilderTests.randomRescoreBuilder());
}
}
if (randomBoolean()) {
diff --git a/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java b/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java
index 1351d2ed74..67420bdf3b 100644
--- a/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java
+++ b/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java
@@ -67,6 +67,7 @@ import static org.elasticsearch.index.query.QueryBuilders.idsQuery;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.index.query.QueryBuilders.matchQuery;
import static org.elasticsearch.index.query.QueryBuilders.multiMatchQuery;
+import static org.elasticsearch.index.query.QueryBuilders.parentId;
import static org.elasticsearch.index.query.QueryBuilders.prefixQuery;
import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery;
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
@@ -135,33 +136,33 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
boolQuery().must(termQuery("c_field", "c_value1"))
.filter(hasChildQuery("grandchild", termQuery("gc_field", "gc_value1")))))).get();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("p1"));
searchResponse = client().prepareSearch("test")
.setQuery(boolQuery().must(matchAllQuery()).filter(hasParentQuery("parent", termQuery("p_field", "p_value1")))).execute()
.actionGet();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("c1"));
searchResponse = client().prepareSearch("test")
.setQuery(boolQuery().must(matchAllQuery()).filter(hasParentQuery("child", termQuery("c_field", "c_value1")))).execute()
.actionGet();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("gc1"));
searchResponse = client().prepareSearch("test").setQuery(hasParentQuery("parent", termQuery("p_field", "p_value1"))).execute()
.actionGet();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("c1"));
searchResponse = client().prepareSearch("test").setQuery(hasParentQuery("child", termQuery("c_field", "c_value1"))).execute()
.actionGet();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("gc1"));
}
@@ -179,7 +180,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch("test").setQuery(hasChildQuery("test", matchQuery("foo", 1))).execute()
.actionGet();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("1"));
}
@@ -203,22 +204,22 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch("test").setQuery(idsQuery("child").addIds("c1")).fields("_parent").execute()
.actionGet();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("c1"));
assertThat(searchResponse.getHits().getAt(0).field("_parent").value().toString(), equalTo("p1"));
// TEST matching on parent
- searchResponse = client().prepareSearch("test").setQuery(termQuery("_parent", "p1")).fields("_parent").get();
+ searchResponse = client().prepareSearch("test").setQuery(termQuery("_parent#parent", "p1")).fields("_parent").get();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(2l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(2L));
assertThat(searchResponse.getHits().getAt(0).id(), anyOf(equalTo("c1"), equalTo("c2")));
assertThat(searchResponse.getHits().getAt(0).field("_parent").value().toString(), equalTo("p1"));
assertThat(searchResponse.getHits().getAt(1).id(), anyOf(equalTo("c1"), equalTo("c2")));
assertThat(searchResponse.getHits().getAt(1).field("_parent").value().toString(), equalTo("p1"));
- searchResponse = client().prepareSearch("test").setQuery(queryStringQuery("_parent:p1")).fields("_parent").get();
+ searchResponse = client().prepareSearch("test").setQuery(queryStringQuery("_parent#parent:p1")).fields("_parent").get();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(2l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(2L));
assertThat(searchResponse.getHits().getAt(0).id(), anyOf(equalTo("c1"), equalTo("c2")));
assertThat(searchResponse.getHits().getAt(0).field("_parent").value().toString(), equalTo("p1"));
assertThat(searchResponse.getHits().getAt(1).id(), anyOf(equalTo("c1"), equalTo("c2")));
@@ -227,17 +228,17 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
// HAS CHILD
searchResponse = client().prepareSearch("test").setQuery(randomHasChild("child", "c_field", "yellow"))
.get();
- assertHitCount(searchResponse, 1l);
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertHitCount(searchResponse, 1L);
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("p1"));
searchResponse = client().prepareSearch("test").setQuery(randomHasChild("child", "c_field", "blue")).execute()
.actionGet();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("p2"));
searchResponse = client().prepareSearch("test").setQuery(randomHasChild("child", "c_field", "red")).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertThat(searchResponse.getHits().getAt(0).id(), anyOf(equalTo("p2"), equalTo("p1")));
assertThat(searchResponse.getHits().getAt(1).id(), anyOf(equalTo("p2"), equalTo("p1")));
@@ -245,13 +246,13 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
searchResponse = client().prepareSearch("test")
.setQuery(randomHasParent("parent", "p_field", "p_value2")).get();
assertNoFailures(searchResponse);
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("c3"));
assertThat(searchResponse.getHits().getAt(1).id(), equalTo("c4"));
searchResponse = client().prepareSearch("test")
.setQuery(randomHasParent("parent", "p_field", "p_value1")).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("c1"));
assertThat(searchResponse.getHits().getAt(1).id(), equalTo("c2"));
}
@@ -371,18 +372,18 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "yellow"))).execute()
.actionGet();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("p1"));
searchResponse = client().prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "blue"))).execute()
.actionGet();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("p2"));
searchResponse = client().prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "red"))).get();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(2l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(2L));
assertThat(searchResponse.getHits().getAt(0).id(), anyOf(equalTo("p2"), equalTo("p1")));
assertThat(searchResponse.getHits().getAt(1).id(), anyOf(equalTo("p2"), equalTo("p1")));
@@ -391,19 +392,19 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
searchResponse = client().prepareSearch("test")
.setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "yellow")))).get();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("p1"));
searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "blue"))))
.get();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("p2"));
searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "red"))))
.get();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(2l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(2L));
assertThat(searchResponse.getHits().getAt(0).id(), anyOf(equalTo("p2"), equalTo("p1")));
assertThat(searchResponse.getHits().getAt(1).id(), anyOf(equalTo("p2"), equalTo("p1")));
}
@@ -431,7 +432,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
AggregationBuilders.filter("filter").filter(boolQuery().should(termQuery("c_field", "red")).should(termQuery("c_field", "yellow"))).subAggregation(
AggregationBuilders.terms("facet1").field("c_field")))).get();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(2l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(2L));
assertThat(searchResponse.getHits().getAt(0).id(), anyOf(equalTo("p2"), equalTo("p1")));
assertThat(searchResponse.getHits().getAt(1).id(), anyOf(equalTo("p2"), equalTo("p1")));
@@ -463,7 +464,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch("test")
.setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "yellow")))).get();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("p1"));
assertThat(searchResponse.getHits().getAt(0).sourceAsString(), containsString("\"p_value1\""));
@@ -475,7 +476,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
searchResponse = client().prepareSearch("test")
.setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "yellow")))).get();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("p1"));
assertThat(searchResponse.getHits().getAt(0).sourceAsString(), containsString("\"p_value1_updated\""));
}
@@ -522,12 +523,12 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch("test")
.setQuery(boolQuery().must(matchAllQuery()).filter(hasChildQuery("child", matchAllQuery()))).get();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
searchResponse = client().prepareSearch("test")
.setQuery(boolQuery().must(matchAllQuery()).filter(hasParentQuery("parent", matchAllQuery()))).get();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
}
public void testCountApiUsage() throws Exception {
@@ -543,19 +544,19 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
SearchResponse countResponse = client().prepareSearch("test").setSize(0).setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreMode(ScoreMode.Max))
.get();
- assertHitCount(countResponse, 1l);
+ assertHitCount(countResponse, 1L);
countResponse = client().prepareSearch("test").setSize(0).setQuery(hasParentQuery("parent", termQuery("p_field", "1")).score(true))
.get();
- assertHitCount(countResponse, 1l);
+ assertHitCount(countResponse, 1L);
countResponse = client().prepareSearch("test").setSize(0).setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "1"))))
.get();
- assertHitCount(countResponse, 1l);
+ assertHitCount(countResponse, 1L);
countResponse = client().prepareSearch("test").setSize(0).setQuery(constantScoreQuery(hasParentQuery("parent", termQuery("p_field", "1"))))
.get();
- assertHitCount(countResponse, 1l);
+ assertHitCount(countResponse, 1L);
}
public void testExplainUsage() throws Exception {
@@ -573,14 +574,14 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
.setExplain(true)
.setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreMode(ScoreMode.Max))
.get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertThat(searchResponse.getHits().getAt(0).explanation().getDescription(), equalTo("Score based on join value p1"));
searchResponse = client().prepareSearch("test")
.setExplain(true)
.setQuery(hasParentQuery("parent", termQuery("p_field", "1")).score(true))
.get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertThat(searchResponse.getHits().getAt(0).explanation().getDescription(), equalTo("Score based on join value p1"));
ExplainResponse explainResponse = client().prepareExplain("test", "parent", parentId)
@@ -663,7 +664,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
fieldValueFactorFunction("c_field1"))
.boostMode(CombineFunction.REPLACE)).scoreMode(ScoreMode.Total)).get();
- assertThat(response.getHits().totalHits(), equalTo(3l));
+ assertThat(response.getHits().totalHits(), equalTo(3L));
assertThat(response.getHits().hits()[0].id(), equalTo("1"));
assertThat(response.getHits().hits()[0].score(), equalTo(6f));
assertThat(response.getHits().hits()[1].id(), equalTo("3"));
@@ -680,7 +681,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
fieldValueFactorFunction("c_field1"))
.boostMode(CombineFunction.REPLACE)).scoreMode(ScoreMode.Max)).get();
- assertThat(response.getHits().totalHits(), equalTo(3l));
+ assertThat(response.getHits().totalHits(), equalTo(3L));
assertThat(response.getHits().hits()[0].id(), equalTo("3"));
assertThat(response.getHits().hits()[0].score(), equalTo(4f));
assertThat(response.getHits().hits()[1].id(), equalTo("2"));
@@ -697,7 +698,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
fieldValueFactorFunction("c_field1"))
.boostMode(CombineFunction.REPLACE)).scoreMode(ScoreMode.Avg)).get();
- assertThat(response.getHits().totalHits(), equalTo(3l));
+ assertThat(response.getHits().totalHits(), equalTo(3L));
assertThat(response.getHits().hits()[0].id(), equalTo("3"));
assertThat(response.getHits().hits()[0].score(), equalTo(4f));
assertThat(response.getHits().hits()[1].id(), equalTo("2"));
@@ -715,7 +716,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
.boostMode(CombineFunction.REPLACE)).score(true))
.addSort(SortBuilders.fieldSort("c_field3")).addSort(SortBuilders.scoreSort()).get();
- assertThat(response.getHits().totalHits(), equalTo(7l));
+ assertThat(response.getHits().totalHits(), equalTo(7L));
assertThat(response.getHits().hits()[0].id(), equalTo("13"));
assertThat(response.getHits().hits()[0].score(), equalTo(5f));
assertThat(response.getHits().hits()[1].id(), equalTo("14"));
@@ -742,28 +743,28 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
SearchResponse response = client().prepareSearch("test")
.setQuery(QueryBuilders.hasChildQuery("child", matchQuery("text", "value"))).get();
assertNoFailures(response);
- assertThat(response.getHits().totalHits(), equalTo(0l));
+ assertThat(response.getHits().totalHits(), equalTo(0L));
client().prepareIndex("test", "child1").setSource(jsonBuilder().startObject().field("text", "value").endObject()).setRefresh(true)
.get();
response = client().prepareSearch("test").setQuery(QueryBuilders.hasChildQuery("child", matchQuery("text", "value"))).get();
assertNoFailures(response);
- assertThat(response.getHits().totalHits(), equalTo(0l));
+ assertThat(response.getHits().totalHits(), equalTo(0L));
response = client().prepareSearch("test").setQuery(QueryBuilders.hasChildQuery("child", matchQuery("text", "value")).scoreMode(ScoreMode.Max))
.get();
assertNoFailures(response);
- assertThat(response.getHits().totalHits(), equalTo(0l));
+ assertThat(response.getHits().totalHits(), equalTo(0L));
response = client().prepareSearch("test").setQuery(QueryBuilders.hasParentQuery("child", matchQuery("text", "value"))).get();
assertNoFailures(response);
- assertThat(response.getHits().totalHits(), equalTo(0l));
+ assertThat(response.getHits().totalHits(), equalTo(0L));
response = client().prepareSearch("test").setQuery(QueryBuilders.hasParentQuery("child", matchQuery("text", "value")).score(true))
.get();
assertNoFailures(response);
- assertThat(response.getHits().totalHits(), equalTo(0l));
+ assertThat(response.getHits().totalHits(), equalTo(0L));
}
public void testHasChildAndHasParentFilter_withFilter() throws Exception {
@@ -782,13 +783,13 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch("test")
.setQuery(boolQuery().must(matchAllQuery()).filter(hasChildQuery("child", termQuery("c_field", 1)))).get();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
assertThat(searchResponse.getHits().hits()[0].id(), equalTo("1"));
searchResponse = client().prepareSearch("test")
.setQuery(boolQuery().must(matchAllQuery()).filter(hasParentQuery("parent", termQuery("p_field", 1)))).get();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
assertThat(searchResponse.getHits().hits()[0].id(), equalTo("2"));
}
@@ -887,7 +888,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch("test")
.setQuery(hasChildQuery("child", termQuery("c_field", "yellow")).scoreMode(ScoreMode.Total)).get();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("p1"));
assertThat(searchResponse.getHits().getAt(0).sourceAsString(), containsString("\"p_value1\""));
@@ -897,7 +898,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
boolQuery().must(matchQuery("c_field", "x")).must(
hasParentQuery("parent", termQuery("p_field", "p_value2")).score(true))).get();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(2l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(2L));
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("c3"));
assertThat(searchResponse.getHits().getAt(1).id(), equalTo("c4"));
@@ -913,7 +914,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
searchResponse = client().prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "yellow")).scoreMode(ScoreMode.Total))
.get();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("p1"));
assertThat(searchResponse.getHits().getAt(0).sourceAsString(), containsString("\"p_value1\""));
@@ -923,7 +924,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
boolQuery().must(matchQuery("c_field", "x")).must(
hasParentQuery("parent", termQuery("p_field", "p_value2")).score(true))).get();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(2l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(2L));
assertThat(searchResponse.getHits().getAt(0).id(), Matchers.anyOf(equalTo("c3"), equalTo("c4")));
assertThat(searchResponse.getHits().getAt(1).id(), Matchers.anyOf(equalTo("c3"), equalTo("c4")));
}
@@ -948,75 +949,68 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
.setMinScore(3) // Score needs to be 3 or above!
.get();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("p2"));
assertThat(searchResponse.getHits().getAt(0).score(), equalTo(3.0f));
}
- public void testParentFieldFilter() throws Exception {
+ public void testParentFieldQuery() throws Exception {
assertAcked(prepareCreate("test")
.setSettings(settingsBuilder().put(indexSettings())
.put("index.refresh_interval", -1))
.addMapping("parent")
- .addMapping("child", "_parent", "type=parent")
- .addMapping("child2", "_parent", "type=parent"));
+ .addMapping("child", "_parent", "type=parent"));
ensureGreen();
- // test term filter
- SearchResponse response = client().prepareSearch("test").setQuery(boolQuery().must(matchAllQuery()).filter(termQuery("_parent", "p1")))
+ SearchResponse response = client().prepareSearch("test").setQuery(termQuery("_parent", "p1"))
.get();
- assertHitCount(response, 0l);
-
- client().prepareIndex("test", "some_type", "1").setSource("field", "value").get();
- client().prepareIndex("test", "parent", "p1").setSource("p_field", "value").get();
- client().prepareIndex("test", "child", "c1").setSource("c_field", "value").setParent("p1").get();
+ assertHitCount(response, 0L);
- response = client().prepareSearch("test").setQuery(boolQuery().must(matchAllQuery()).filter(termQuery("_parent", "p1"))).execute()
- .actionGet();
- assertHitCount(response, 0l);
+ client().prepareIndex("test", "child", "c1").setSource("{}").setParent("p1").get();
refresh();
- response = client().prepareSearch("test").setQuery(boolQuery().must(matchAllQuery()).filter(termQuery("_parent", "p1"))).execute()
- .actionGet();
- assertHitCount(response, 1l);
-
- response = client().prepareSearch("test").setQuery(boolQuery().must(matchAllQuery()).filter(termQuery("_parent", "parent#p1"))).execute()
- .actionGet();
- assertHitCount(response, 1l);
+ response = client().prepareSearch("test").setQuery(termQuery("_parent#parent", "p1")).get();
+ assertHitCount(response, 1L);
- client().prepareIndex("test", "parent2", "p1").setSource("p_field", "value").setRefresh(true).get();
+ response = client().prepareSearch("test").setQuery(queryStringQuery("_parent#parent:p1")).get();
+ assertHitCount(response, 1L);
- response = client().prepareSearch("test").setQuery(boolQuery().must(matchAllQuery()).filter(termQuery("_parent", "p1"))).execute()
- .actionGet();
- assertHitCount(response, 1l);
-
- response = client().prepareSearch("test").setQuery(boolQuery().must(matchAllQuery()).filter(termQuery("_parent", "parent#p1"))).execute()
- .actionGet();
- assertHitCount(response, 1l);
+ client().prepareIndex("test", "child", "c2").setSource("{}").setParent("p2").get();
+ refresh();
+ response = client().prepareSearch("test").setQuery(termsQuery("_parent#parent", "p1", "p2")).get();
+ assertHitCount(response, 2L);
- // test terms filter
- client().prepareIndex("test", "child2", "c1").setSource("c_field", "value").setParent("p1").get();
- response = client().prepareSearch("test").setQuery(boolQuery().must(matchAllQuery()).filter(termsQuery("_parent", "p1"))).execute()
- .actionGet();
- assertHitCount(response, 1l);
+ response = client().prepareSearch("test")
+ .setQuery(boolQuery()
+ .should(termQuery("_parent#parent", "p1"))
+ .should(termQuery("_parent#parent", "p2"))
+ ).get();
+ assertHitCount(response, 2L);
+ }
- response = client().prepareSearch("test").setQuery(boolQuery().must(matchAllQuery()).filter(termsQuery("_parent", "parent#p1"))).execute()
- .actionGet();
- assertHitCount(response, 1l);
+ public void testParentIdQuery() throws Exception {
+ assertAcked(prepareCreate("test")
+ .setSettings(settingsBuilder().put(indexSettings())
+ .put("index.refresh_interval", -1))
+ .addMapping("parent")
+ .addMapping("child", "_parent", "type=parent"));
+ ensureGreen();
+ client().prepareIndex("test", "child", "c1").setSource("{}").setParent("p1").get();
refresh();
- response = client().prepareSearch("test").setQuery(boolQuery().must(matchAllQuery()).filter(termsQuery("_parent", "p1"))).execute()
- .actionGet();
- assertHitCount(response, 2l);
+ SearchResponse response = client().prepareSearch("test").setQuery(parentId("child", "p1")).get();
+ assertHitCount(response, 1L);
+
+ client().prepareIndex("test", "child", "c2").setSource("{}").setParent("p2").get();
refresh();
- response = client().prepareSearch("test").setQuery(boolQuery().must(matchAllQuery()).filter(termsQuery("_parent", "p1", "p1"))).execute()
- .actionGet();
- assertHitCount(response, 2l);
response = client().prepareSearch("test")
- .setQuery(boolQuery().must(matchAllQuery()).filter(termsQuery("_parent", "parent#p1", "parent2#p1"))).get();
- assertHitCount(response, 2l);
+ .setQuery(boolQuery()
+ .should(parentId("child", "p1"))
+ .should(parentId("child", "p2"))
+ ).get();
+ assertHitCount(response, 2L);
}
public void testHasChildNotBeingCached() throws IOException {
@@ -1044,7 +1038,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
.setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "blue"))))
.get();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
client().prepareIndex("test", "child", "c2").setParent("p2").setSource("c_field", "blue").get();
client().admin().indices().prepareRefresh("test").get();
@@ -1053,7 +1047,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
.setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "blue"))))
.get();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(2l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(2L));
}
private QueryBuilder randomHasChild(String type, String field, String value) {
@@ -1113,7 +1107,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
)
)
).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
searchResponse = client().prepareSearch("grandissue").setQuery(
boolQuery().must(
@@ -1130,7 +1124,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
)
)
).get();
- assertHitCount(searchResponse, 0l);
+ assertHitCount(searchResponse, 0L);
}
public void testIndexChildDocWithNoParentMapping() throws IOException {
@@ -1212,13 +1206,13 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
.setQuery(boolQuery().must(QueryBuilders.hasChildQuery("child", termQuery("c_field", "blue")).scoreMode(scoreMode)).filter(boolQuery().mustNot(termQuery("p_field", "3"))))
.get();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
searchResponse = client().prepareSearch("test")
.setQuery(boolQuery().must(QueryBuilders.hasChildQuery("child", termQuery("c_field", "red")).scoreMode(scoreMode)).filter(boolQuery().mustNot(termQuery("p_field", "3"))))
.get();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(2l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(2L));
}
public void testNamedFilters() throws Exception {
@@ -1234,25 +1228,25 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreMode(ScoreMode.Max).queryName("test"))
.get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertThat(searchResponse.getHits().getAt(0).getMatchedQueries().length, equalTo(1));
assertThat(searchResponse.getHits().getAt(0).getMatchedQueries()[0], equalTo("test"));
searchResponse = client().prepareSearch("test").setQuery(hasParentQuery("parent", termQuery("p_field", "1")).score(true).queryName("test"))
.get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertThat(searchResponse.getHits().getAt(0).getMatchedQueries().length, equalTo(1));
assertThat(searchResponse.getHits().getAt(0).getMatchedQueries()[0], equalTo("test"));
searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "1")).queryName("test")))
.get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertThat(searchResponse.getHits().getAt(0).getMatchedQueries().length, equalTo(1));
assertThat(searchResponse.getHits().getAt(0).getMatchedQueries()[0], equalTo("test"));
searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(hasParentQuery("parent", termQuery("p_field", "1")).queryName("test")))
.get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertThat(searchResponse.getHits().getAt(0).getMatchedQueries().length, equalTo(1));
assertThat(searchResponse.getHits().getAt(0).getMatchedQueries()[0], equalTo("test"));
}
@@ -1369,7 +1363,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
.must(QueryBuilders.hasChildQuery("child", matchQuery("c_field", "red")))
.must(matchAllQuery())))
.get();
- assertThat(searchResponse.getHits().totalHits(), equalTo(2l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(2L));
}
@@ -1382,7 +1376,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
.must(matchAllQuery())))
.get();
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
}
public void testParentChildQueriesViaScrollApi() throws Exception {
@@ -1414,10 +1408,10 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
.actionGet();
assertNoFailures(scrollResponse);
- assertThat(scrollResponse.getHits().totalHits(), equalTo(10l));
+ assertThat(scrollResponse.getHits().totalHits(), equalTo(10L));
int scannedDocs = 0;
do {
- assertThat(scrollResponse.getHits().totalHits(), equalTo(10l));
+ assertThat(scrollResponse.getHits().totalHits(), equalTo(10L));
scannedDocs += scrollResponse.getHits().getHits().length;
scrollResponse = client()
.prepareSearchScroll(scrollResponse.getScrollId())
@@ -1459,10 +1453,10 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
refresh();
SearchResponse response = client().prepareSearch("test")
- .setQuery(multiMatchQuery("1", "_parent"))
+ .setQuery(multiMatchQuery("1", "_parent#type1"))
.get();
- assertThat(response.getHits().totalHits(), equalTo(1l));
+ assertThat(response.getHits().totalHits(), equalTo(1L));
assertThat(response.getHits().getAt(0).id(), equalTo("1"));
}
@@ -1481,22 +1475,22 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch("test")
.setQuery(constantScoreQuery(hasParentQuery("parent", boolQuery().mustNot(termQuery("field1", "a")))))
.get();
- assertHitCount(searchResponse, 0l);
+ assertHitCount(searchResponse, 0L);
searchResponse = client().prepareSearch("test")
.setQuery(hasParentQuery("parent", constantScoreQuery(boolQuery().mustNot(termQuery("field1", "a")))))
.get();
- assertHitCount(searchResponse, 0l);
+ assertHitCount(searchResponse, 0L);
searchResponse = client().prepareSearch("test")
.setQuery(constantScoreQuery(hasParentQuery("parent", termQuery("field1", "a"))))
.get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
searchResponse = client().prepareSearch("test")
.setQuery(hasParentQuery("parent", constantScoreQuery(termQuery("field1", "a"))))
.get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
}
private List<IndexRequestBuilder> createMinMaxDocBuilders() {
@@ -1568,7 +1562,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
// Score mode = NONE
response = minMaxQuery(ScoreMode.None, 0, 0);
- assertThat(response.getHits().totalHits(), equalTo(3l));
+ assertThat(response.getHits().totalHits(), equalTo(3L));
assertThat(response.getHits().hits()[0].id(), equalTo("2"));
assertThat(response.getHits().hits()[0].score(), equalTo(1f));
assertThat(response.getHits().hits()[1].id(), equalTo("3"));
@@ -1578,7 +1572,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
response = minMaxQuery(ScoreMode.None, 1, 0);
- assertThat(response.getHits().totalHits(), equalTo(3l));
+ assertThat(response.getHits().totalHits(), equalTo(3L));
assertThat(response.getHits().hits()[0].id(), equalTo("2"));
assertThat(response.getHits().hits()[0].score(), equalTo(1f));
assertThat(response.getHits().hits()[1].id(), equalTo("3"));
@@ -1588,7 +1582,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
response = minMaxQuery(ScoreMode.None, 2, 0);
- assertThat(response.getHits().totalHits(), equalTo(2l));
+ assertThat(response.getHits().totalHits(), equalTo(2L));
assertThat(response.getHits().hits()[0].id(), equalTo("3"));
assertThat(response.getHits().hits()[0].score(), equalTo(1f));
assertThat(response.getHits().hits()[1].id(), equalTo("4"));
@@ -1596,17 +1590,17 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
response = minMaxQuery(ScoreMode.None, 3, 0);
- assertThat(response.getHits().totalHits(), equalTo(1l));
+ assertThat(response.getHits().totalHits(), equalTo(1L));
assertThat(response.getHits().hits()[0].id(), equalTo("4"));
assertThat(response.getHits().hits()[0].score(), equalTo(1f));
response = minMaxQuery(ScoreMode.None, 4, 0);
- assertThat(response.getHits().totalHits(), equalTo(0l));
+ assertThat(response.getHits().totalHits(), equalTo(0L));
response = minMaxQuery(ScoreMode.None, 0, 4);
- assertThat(response.getHits().totalHits(), equalTo(3l));
+ assertThat(response.getHits().totalHits(), equalTo(3L));
assertThat(response.getHits().hits()[0].id(), equalTo("2"));
assertThat(response.getHits().hits()[0].score(), equalTo(1f));
assertThat(response.getHits().hits()[1].id(), equalTo("3"));
@@ -1616,7 +1610,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
response = minMaxQuery(ScoreMode.None, 0, 3);
- assertThat(response.getHits().totalHits(), equalTo(3l));
+ assertThat(response.getHits().totalHits(), equalTo(3L));
assertThat(response.getHits().hits()[0].id(), equalTo("2"));
assertThat(response.getHits().hits()[0].score(), equalTo(1f));
assertThat(response.getHits().hits()[1].id(), equalTo("3"));
@@ -1626,7 +1620,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
response = minMaxQuery(ScoreMode.None, 0, 2);
- assertThat(response.getHits().totalHits(), equalTo(2l));
+ assertThat(response.getHits().totalHits(), equalTo(2L));
assertThat(response.getHits().hits()[0].id(), equalTo("2"));
assertThat(response.getHits().hits()[0].score(), equalTo(1f));
assertThat(response.getHits().hits()[1].id(), equalTo("3"));
@@ -1634,7 +1628,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
response = minMaxQuery(ScoreMode.None, 2, 2);
- assertThat(response.getHits().totalHits(), equalTo(1l));
+ assertThat(response.getHits().totalHits(), equalTo(1L));
assertThat(response.getHits().hits()[0].id(), equalTo("3"));
assertThat(response.getHits().hits()[0].score(), equalTo(1f));
@@ -1648,7 +1642,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
// Score mode = SUM
response = minMaxQuery(ScoreMode.Total, 0, 0);
- assertThat(response.getHits().totalHits(), equalTo(3l));
+ assertThat(response.getHits().totalHits(), equalTo(3L));
assertThat(response.getHits().hits()[0].id(), equalTo("4"));
assertThat(response.getHits().hits()[0].score(), equalTo(6f));
assertThat(response.getHits().hits()[1].id(), equalTo("3"));
@@ -1658,7 +1652,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
response = minMaxQuery(ScoreMode.Total, 1, 0);
- assertThat(response.getHits().totalHits(), equalTo(3l));
+ assertThat(response.getHits().totalHits(), equalTo(3L));
assertThat(response.getHits().hits()[0].id(), equalTo("4"));
assertThat(response.getHits().hits()[0].score(), equalTo(6f));
assertThat(response.getHits().hits()[1].id(), equalTo("3"));
@@ -1668,7 +1662,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
response = minMaxQuery(ScoreMode.Total, 2, 0);
- assertThat(response.getHits().totalHits(), equalTo(2l));
+ assertThat(response.getHits().totalHits(), equalTo(2L));
assertThat(response.getHits().hits()[0].id(), equalTo("4"));
assertThat(response.getHits().hits()[0].score(), equalTo(6f));
assertThat(response.getHits().hits()[1].id(), equalTo("3"));
@@ -1676,17 +1670,17 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
response = minMaxQuery(ScoreMode.Total, 3, 0);
- assertThat(response.getHits().totalHits(), equalTo(1l));
+ assertThat(response.getHits().totalHits(), equalTo(1L));
assertThat(response.getHits().hits()[0].id(), equalTo("4"));
assertThat(response.getHits().hits()[0].score(), equalTo(6f));
response = minMaxQuery(ScoreMode.Total, 4, 0);
- assertThat(response.getHits().totalHits(), equalTo(0l));
+ assertThat(response.getHits().totalHits(), equalTo(0L));
response = minMaxQuery(ScoreMode.Total, 0, 4);
- assertThat(response.getHits().totalHits(), equalTo(3l));
+ assertThat(response.getHits().totalHits(), equalTo(3L));
assertThat(response.getHits().hits()[0].id(), equalTo("4"));
assertThat(response.getHits().hits()[0].score(), equalTo(6f));
assertThat(response.getHits().hits()[1].id(), equalTo("3"));
@@ -1696,7 +1690,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
response = minMaxQuery(ScoreMode.Total, 0, 3);
- assertThat(response.getHits().totalHits(), equalTo(3l));
+ assertThat(response.getHits().totalHits(), equalTo(3L));
assertThat(response.getHits().hits()[0].id(), equalTo("4"));
assertThat(response.getHits().hits()[0].score(), equalTo(6f));
assertThat(response.getHits().hits()[1].id(), equalTo("3"));
@@ -1706,7 +1700,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
response = minMaxQuery(ScoreMode.Total, 0, 2);
- assertThat(response.getHits().totalHits(), equalTo(2l));
+ assertThat(response.getHits().totalHits(), equalTo(2L));
assertThat(response.getHits().hits()[0].id(), equalTo("3"));
assertThat(response.getHits().hits()[0].score(), equalTo(3f));
assertThat(response.getHits().hits()[1].id(), equalTo("2"));
@@ -1714,7 +1708,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
response = minMaxQuery(ScoreMode.Total, 2, 2);
- assertThat(response.getHits().totalHits(), equalTo(1l));
+ assertThat(response.getHits().totalHits(), equalTo(1L));
assertThat(response.getHits().hits()[0].id(), equalTo("3"));
assertThat(response.getHits().hits()[0].score(), equalTo(3f));
@@ -1728,7 +1722,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
// Score mode = MAX
response = minMaxQuery(ScoreMode.Max, 0, 0);
- assertThat(response.getHits().totalHits(), equalTo(3l));
+ assertThat(response.getHits().totalHits(), equalTo(3L));
assertThat(response.getHits().hits()[0].id(), equalTo("4"));
assertThat(response.getHits().hits()[0].score(), equalTo(3f));
assertThat(response.getHits().hits()[1].id(), equalTo("3"));
@@ -1738,7 +1732,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
response = minMaxQuery(ScoreMode.Max, 1, 0);
- assertThat(response.getHits().totalHits(), equalTo(3l));
+ assertThat(response.getHits().totalHits(), equalTo(3L));
assertThat(response.getHits().hits()[0].id(), equalTo("4"));
assertThat(response.getHits().hits()[0].score(), equalTo(3f));
assertThat(response.getHits().hits()[1].id(), equalTo("3"));
@@ -1748,7 +1742,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
response = minMaxQuery(ScoreMode.Max, 2, 0);
- assertThat(response.getHits().totalHits(), equalTo(2l));
+ assertThat(response.getHits().totalHits(), equalTo(2L));
assertThat(response.getHits().hits()[0].id(), equalTo("4"));
assertThat(response.getHits().hits()[0].score(), equalTo(3f));
assertThat(response.getHits().hits()[1].id(), equalTo("3"));
@@ -1756,17 +1750,17 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
response = minMaxQuery(ScoreMode.Max, 3, 0);
- assertThat(response.getHits().totalHits(), equalTo(1l));
+ assertThat(response.getHits().totalHits(), equalTo(1L));
assertThat(response.getHits().hits()[0].id(), equalTo("4"));
assertThat(response.getHits().hits()[0].score(), equalTo(3f));
response = minMaxQuery(ScoreMode.Max, 4, 0);
- assertThat(response.getHits().totalHits(), equalTo(0l));
+ assertThat(response.getHits().totalHits(), equalTo(0L));
response = minMaxQuery(ScoreMode.Max, 0, 4);
- assertThat(response.getHits().totalHits(), equalTo(3l));
+ assertThat(response.getHits().totalHits(), equalTo(3L));
assertThat(response.getHits().hits()[0].id(), equalTo("4"));
assertThat(response.getHits().hits()[0].score(), equalTo(3f));
assertThat(response.getHits().hits()[1].id(), equalTo("3"));
@@ -1776,7 +1770,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
response = minMaxQuery(ScoreMode.Max, 0, 3);
- assertThat(response.getHits().totalHits(), equalTo(3l));
+ assertThat(response.getHits().totalHits(), equalTo(3L));
assertThat(response.getHits().hits()[0].id(), equalTo("4"));
assertThat(response.getHits().hits()[0].score(), equalTo(3f));
assertThat(response.getHits().hits()[1].id(), equalTo("3"));
@@ -1786,7 +1780,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
response = minMaxQuery(ScoreMode.Max, 0, 2);
- assertThat(response.getHits().totalHits(), equalTo(2l));
+ assertThat(response.getHits().totalHits(), equalTo(2L));
assertThat(response.getHits().hits()[0].id(), equalTo("3"));
assertThat(response.getHits().hits()[0].score(), equalTo(2f));
assertThat(response.getHits().hits()[1].id(), equalTo("2"));
@@ -1794,7 +1788,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
response = minMaxQuery(ScoreMode.Max, 2, 2);
- assertThat(response.getHits().totalHits(), equalTo(1l));
+ assertThat(response.getHits().totalHits(), equalTo(1L));
assertThat(response.getHits().hits()[0].id(), equalTo("3"));
assertThat(response.getHits().hits()[0].score(), equalTo(2f));
@@ -1808,7 +1802,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
// Score mode = AVG
response = minMaxQuery(ScoreMode.Avg, 0, 0);
- assertThat(response.getHits().totalHits(), equalTo(3l));
+ assertThat(response.getHits().totalHits(), equalTo(3L));
assertThat(response.getHits().hits()[0].id(), equalTo("4"));
assertThat(response.getHits().hits()[0].score(), equalTo(2f));
assertThat(response.getHits().hits()[1].id(), equalTo("3"));
@@ -1818,7 +1812,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
response = minMaxQuery(ScoreMode.Avg, 1, 0);
- assertThat(response.getHits().totalHits(), equalTo(3l));
+ assertThat(response.getHits().totalHits(), equalTo(3L));
assertThat(response.getHits().hits()[0].id(), equalTo("4"));
assertThat(response.getHits().hits()[0].score(), equalTo(2f));
assertThat(response.getHits().hits()[1].id(), equalTo("3"));
@@ -1828,7 +1822,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
response = minMaxQuery(ScoreMode.Avg, 2, 0);
- assertThat(response.getHits().totalHits(), equalTo(2l));
+ assertThat(response.getHits().totalHits(), equalTo(2L));
assertThat(response.getHits().hits()[0].id(), equalTo("4"));
assertThat(response.getHits().hits()[0].score(), equalTo(2f));
assertThat(response.getHits().hits()[1].id(), equalTo("3"));
@@ -1836,17 +1830,17 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
response = minMaxQuery(ScoreMode.Avg, 3, 0);
- assertThat(response.getHits().totalHits(), equalTo(1l));
+ assertThat(response.getHits().totalHits(), equalTo(1L));
assertThat(response.getHits().hits()[0].id(), equalTo("4"));
assertThat(response.getHits().hits()[0].score(), equalTo(2f));
response = minMaxQuery(ScoreMode.Avg, 4, 0);
- assertThat(response.getHits().totalHits(), equalTo(0l));
+ assertThat(response.getHits().totalHits(), equalTo(0L));
response = minMaxQuery(ScoreMode.Avg, 0, 4);
- assertThat(response.getHits().totalHits(), equalTo(3l));
+ assertThat(response.getHits().totalHits(), equalTo(3L));
assertThat(response.getHits().hits()[0].id(), equalTo("4"));
assertThat(response.getHits().hits()[0].score(), equalTo(2f));
assertThat(response.getHits().hits()[1].id(), equalTo("3"));
@@ -1856,7 +1850,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
response = minMaxQuery(ScoreMode.Avg, 0, 3);
- assertThat(response.getHits().totalHits(), equalTo(3l));
+ assertThat(response.getHits().totalHits(), equalTo(3L));
assertThat(response.getHits().hits()[0].id(), equalTo("4"));
assertThat(response.getHits().hits()[0].score(), equalTo(2f));
assertThat(response.getHits().hits()[1].id(), equalTo("3"));
@@ -1866,7 +1860,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
response = minMaxQuery(ScoreMode.Avg, 0, 2);
- assertThat(response.getHits().totalHits(), equalTo(2l));
+ assertThat(response.getHits().totalHits(), equalTo(2L));
assertThat(response.getHits().hits()[0].id(), equalTo("3"));
assertThat(response.getHits().hits()[0].score(), equalTo(1.5f));
assertThat(response.getHits().hits()[1].id(), equalTo("2"));
@@ -1874,7 +1868,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
response = minMaxQuery(ScoreMode.Avg, 2, 2);
- assertThat(response.getHits().totalHits(), equalTo(1l));
+ assertThat(response.getHits().totalHits(), equalTo(1L));
assertThat(response.getHits().hits()[0].id(), equalTo("3"));
assertThat(response.getHits().hits()[0].score(), equalTo(1.5f));
diff --git a/core/src/test/java/org/elasticsearch/search/child/ParentFieldLoadingIT.java b/core/src/test/java/org/elasticsearch/search/child/ParentFieldLoadingIT.java
index 79bd729702..0c7c069ec3 100644
--- a/core/src/test/java/org/elasticsearch/search/child/ParentFieldLoadingIT.java
+++ b/core/src/test/java/org/elasticsearch/search/child/ParentFieldLoadingIT.java
@@ -73,7 +73,7 @@ public class ParentFieldLoadingIT extends ESIntegTestCase {
refresh();
ClusterStatsResponse response = client().admin().cluster().prepareClusterStats().get();
- assertThat(response.getIndicesStats().getFieldData().getMemorySizeInBytes(), equalTo(0l));
+ assertThat(response.getIndicesStats().getFieldData().getMemorySizeInBytes(), equalTo(0L));
logger.info("testing default loading...");
assertAcked(client().admin().indices().prepareDelete("test").get());
@@ -88,7 +88,7 @@ public class ParentFieldLoadingIT extends ESIntegTestCase {
refresh();
response = client().admin().cluster().prepareClusterStats().get();
- assertThat(response.getIndicesStats().getFieldData().getMemorySizeInBytes(), equalTo(0l));
+ assertThat(response.getIndicesStats().getFieldData().getMemorySizeInBytes(), equalTo(0L));
logger.info("testing eager loading...");
assertAcked(client().admin().indices().prepareDelete("test").get());
@@ -103,7 +103,7 @@ public class ParentFieldLoadingIT extends ESIntegTestCase {
refresh();
response = client().admin().cluster().prepareClusterStats().get();
- assertThat(response.getIndicesStats().getFieldData().getMemorySizeInBytes(), equalTo(0l));
+ assertThat(response.getIndicesStats().getFieldData().getMemorySizeInBytes(), equalTo(0L));
logger.info("testing eager global ordinals loading...");
assertAcked(client().admin().indices().prepareDelete("test").get());
@@ -121,7 +121,7 @@ public class ParentFieldLoadingIT extends ESIntegTestCase {
refresh();
response = client().admin().cluster().prepareClusterStats().get();
- assertThat(response.getIndicesStats().getFieldData().getMemorySizeInBytes(), greaterThan(0l));
+ assertThat(response.getIndicesStats().getFieldData().getMemorySizeInBytes(), greaterThan(0L));
}
public void testChangingEagerParentFieldLoadingAtRuntime() throws Exception {
@@ -136,7 +136,7 @@ public class ParentFieldLoadingIT extends ESIntegTestCase {
refresh();
ClusterStatsResponse response = client().admin().cluster().prepareClusterStats().get();
- assertThat(response.getIndicesStats().getFieldData().getMemorySizeInBytes(), equalTo(0l));
+ assertThat(response.getIndicesStats().getFieldData().getMemorySizeInBytes(), equalTo(0L));
PutMappingResponse putMappingResponse = client().admin().indices().preparePutMapping("test").setType("child")
.setSource(childMapping(MappedFieldType.Loading.EAGER_GLOBAL_ORDINALS))
@@ -157,7 +157,7 @@ public class ParentFieldLoadingIT extends ESIntegTestCase {
MapperService mapperService = indexService.mapperService();
DocumentMapper documentMapper = mapperService.documentMapper("child");
if (documentMapper != null) {
- verified = documentMapper.parentFieldMapper().getChildJoinFieldType().fieldDataType().getLoading() == MappedFieldType.Loading.EAGER_GLOBAL_ORDINALS;
+ verified = documentMapper.parentFieldMapper().fieldType().fieldDataType().getLoading() == MappedFieldType.Loading.EAGER_GLOBAL_ORDINALS;
}
}
assertTrue(verified);
@@ -169,7 +169,7 @@ public class ParentFieldLoadingIT extends ESIntegTestCase {
client().prepareIndex("test", "dummy", "dummy").setSource("{}").get();
refresh();
response = client().admin().cluster().prepareClusterStats().get();
- assertThat(response.getIndicesStats().getFieldData().getMemorySizeInBytes(), greaterThan(0l));
+ assertThat(response.getIndicesStats().getFieldData().getMemorySizeInBytes(), greaterThan(0L));
}
private XContentBuilder childMapping(MappedFieldType.Loading loading) throws IOException {
diff --git a/core/src/test/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java b/core/src/test/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java
index 1eff57a056..97bd158158 100644
--- a/core/src/test/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java
+++ b/core/src/test/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java
@@ -165,7 +165,7 @@ public class FetchSubPhasePluginIT extends ESIntegTestCase {
hitField = new InternalSearchHitField(NAMES[0], new ArrayList<>(1));
hitContext.hit().fields().put(NAMES[0], hitField);
}
- TermVectorsResponse termVector = context.indexShard().getTermVectors(new TermVectorsRequest(context.indexShard().shardId().index().getName(), hitContext.hit().type(), hitContext.hit().id()));
+ TermVectorsResponse termVector = context.indexShard().getTermVectors(new TermVectorsRequest(context.indexShard().shardId().getIndex().getName(), hitContext.hit().type(), hitContext.hit().id()));
try {
Map<String, Integer> tv = new HashMap<>();
TermsEnum terms = termVector.getFields().terms(field).iterator();
diff --git a/core/src/test/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java b/core/src/test/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java
index 39ce61f6c7..3267b6b0c8 100644
--- a/core/src/test/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java
+++ b/core/src/test/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java
@@ -80,7 +80,7 @@ public class ExplainableScriptIT extends ESIntegTestCase {
ElasticsearchAssertions.assertNoFailures(response);
SearchHits hits = response.getHits();
- assertThat(hits.getTotalHits(), equalTo(20l));
+ assertThat(hits.getTotalHits(), equalTo(20L));
int idCounter = 19;
for (SearchHit hit : hits.getHits()) {
assertThat(hit.getId(), equalTo(Integer.toString(idCounter)));
diff --git a/core/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreBackwardCompatibilityIT.java b/core/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreBackwardCompatibilityIT.java
index db5f1ed70e..2e7a0a2329 100644
--- a/core/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreBackwardCompatibilityIT.java
+++ b/core/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreBackwardCompatibilityIT.java
@@ -104,7 +104,7 @@ public class FunctionScoreBackwardCompatibilityIT extends ESBackcompatTestCase {
@Override
protected Settings commonNodeSettings(int nodeOrdinal) {
return Settings.builder().put(super.commonNodeSettings(nodeOrdinal))
- .put("script.inline", "on").build();
+ .put("script.inline", "true").build();
}
private void checkFunctionScoreStillWorks(String... ids) throws ExecutionException, InterruptedException, IOException {
diff --git a/core/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java b/core/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java
index 5644f89360..959adac3da 100644
--- a/core/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java
+++ b/core/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java
@@ -37,9 +37,9 @@ import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
-import org.elasticsearch.search.rescore.QueryRescoreMode;
import org.elasticsearch.search.rescore.RescoreBuilder;
-import org.elasticsearch.search.rescore.RescoreBuilder.QueryRescorer;
+import org.elasticsearch.search.rescore.QueryRescoreMode;
+import org.elasticsearch.search.rescore.QueryRescorerBuilder;
import org.elasticsearch.test.ESIntegTestCase;
import java.util.Arrays;
@@ -119,7 +119,7 @@ public class QueryRescorerIT extends ESIntegTestCase {
RescoreBuilder.queryRescorer(QueryBuilders.matchPhraseQuery("field1", "quick brown").slop(2).boost(4.0f))
.setRescoreQueryWeight(2), 5).execute().actionGet();
- assertThat(searchResponse.getHits().totalHits(), equalTo(3l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(3L));
assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("1"));
assertThat(searchResponse.getHits().getHits()[1].getId(), equalTo("3"));
assertThat(searchResponse.getHits().getHits()[2].getId(), equalTo("2"));
@@ -538,7 +538,7 @@ public class QueryRescorerIT extends ESIntegTestCase {
String[] scoreModes = new String[]{ "max", "min", "avg", "total", "multiply", "" };
String[] descriptionModes = new String[]{ "max of:", "min of:", "avg of:", "sum of:", "product of:", "sum of:" };
for (int innerMode = 0; innerMode < scoreModes.length; innerMode++) {
- QueryRescorer innerRescoreQuery = RescoreBuilder.queryRescorer(QueryBuilders.matchQuery("field1", "the quick brown").boost(4.0f))
+ QueryRescorerBuilder innerRescoreQuery = RescoreBuilder.queryRescorer(QueryBuilders.matchQuery("field1", "the quick brown").boost(4.0f))
.setQueryWeight(0.5f).setRescoreQueryWeight(0.4f);
if (!"".equals(scoreModes[innerMode])) {
@@ -561,7 +561,7 @@ public class QueryRescorerIT extends ESIntegTestCase {
}
for (int outerMode = 0; outerMode < scoreModes.length; outerMode++) {
- QueryRescorer outerRescoreQuery = RescoreBuilder.queryRescorer(QueryBuilders.matchQuery("field1", "the quick brown")
+ QueryRescorerBuilder outerRescoreQuery = RescoreBuilder.queryRescorer(QueryBuilders.matchQuery("field1", "the quick brown")
.boost(4.0f)).setQueryWeight(0.5f).setRescoreQueryWeight(0.4f);
if (!"".equals(scoreModes[outerMode])) {
@@ -572,7 +572,7 @@ public class QueryRescorerIT extends ESIntegTestCase {
.prepareSearch()
.setSearchType(SearchType.DFS_QUERY_THEN_FETCH)
.setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR))
- .addRescorer(innerRescoreQuery, 5).addRescorer(outerRescoreQuery, 10)
+ .addRescorer(innerRescoreQuery, 5).addRescorer(outerRescoreQuery.windowSize(10))
.setExplain(true).get();
assertHitCount(searchResponse, 3);
assertFirstHit(searchResponse, hasId("1"));
@@ -599,7 +599,7 @@ public class QueryRescorerIT extends ESIntegTestCase {
for (int i = 0; i < numDocs - 4; i++) {
String[] intToEnglish = new String[] { English.intToEnglish(i), English.intToEnglish(i + 1), English.intToEnglish(i + 2), English.intToEnglish(i + 3) };
- QueryRescorer rescoreQuery = RescoreBuilder
+ QueryRescorerBuilder rescoreQuery = RescoreBuilder
.queryRescorer(
QueryBuilders.boolQuery()
.disableCoord(true)
@@ -682,10 +682,10 @@ public class QueryRescorerIT extends ESIntegTestCase {
public void testMultipleRescores() throws Exception {
int numDocs = indexRandomNumbers("keyword", 1, true);
- QueryRescorer eightIsGreat = RescoreBuilder.queryRescorer(
+ QueryRescorerBuilder eightIsGreat = RescoreBuilder.queryRescorer(
QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("field1", English.intToEnglish(8)),
ScoreFunctionBuilders.weightFactorFunction(1000.0f)).boostMode(CombineFunction.REPLACE)).setScoreMode(QueryRescoreMode.Total);
- QueryRescorer sevenIsBetter = RescoreBuilder.queryRescorer(
+ QueryRescorerBuilder sevenIsBetter = RescoreBuilder.queryRescorer(
QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("field1", English.intToEnglish(7)),
ScoreFunctionBuilders.weightFactorFunction(10000.0f)).boostMode(CombineFunction.REPLACE))
.setScoreMode(QueryRescoreMode.Total);
@@ -703,10 +703,10 @@ public class QueryRescorerIT extends ESIntegTestCase {
// We have no idea what the second hit will be because we didn't get a chance to look for seven
// Now use one rescore to drag the number we're looking for into the window of another
- QueryRescorer ninetyIsGood = RescoreBuilder.queryRescorer(
+ QueryRescorerBuilder ninetyIsGood = RescoreBuilder.queryRescorer(
QueryBuilders.functionScoreQuery(QueryBuilders.queryStringQuery("*ninety*"), ScoreFunctionBuilders.weightFactorFunction(1000.0f))
.boostMode(CombineFunction.REPLACE)).setScoreMode(QueryRescoreMode.Total);
- QueryRescorer oneToo = RescoreBuilder.queryRescorer(
+ QueryRescorerBuilder oneToo = RescoreBuilder.queryRescorer(
QueryBuilders.functionScoreQuery(QueryBuilders.queryStringQuery("*one*"), ScoreFunctionBuilders.weightFactorFunction(1000.0f))
.boostMode(CombineFunction.REPLACE)).setScoreMode(QueryRescoreMode.Total);
request.clearRescorers().addRescorer(ninetyIsGood, numDocs).addRescorer(oneToo, 10);
diff --git a/core/src/test/java/org/elasticsearch/search/geo/GeoBoundingBoxIT.java b/core/src/test/java/org/elasticsearch/search/geo/GeoBoundingBoxIT.java
index 14d620b9b9..651e1a8d1a 100644
--- a/core/src/test/java/org/elasticsearch/search/geo/GeoBoundingBoxIT.java
+++ b/core/src/test/java/org/elasticsearch/search/geo/GeoBoundingBoxIT.java
@@ -109,7 +109,7 @@ public class GeoBoundingBoxIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch() // from NY
.setQuery(geoBoundingBoxQuery("location").setCorners(40.73, -74.1, 40.717, -73.99))
.execute().actionGet();
- assertThat(searchResponse.getHits().getTotalHits(), equalTo(2l));
+ assertThat(searchResponse.getHits().getTotalHits(), equalTo(2L));
assertThat(searchResponse.getHits().hits().length, equalTo(2));
for (SearchHit hit : searchResponse.getHits()) {
assertThat(hit.id(), anyOf(equalTo("1"), equalTo("3"), equalTo("5")));
@@ -118,7 +118,7 @@ public class GeoBoundingBoxIT extends ESIntegTestCase {
searchResponse = client().prepareSearch() // from NY
.setQuery(geoBoundingBoxQuery("location").setCorners(40.73, -74.1, 40.717, -73.99).type("indexed"))
.execute().actionGet();
- assertThat(searchResponse.getHits().getTotalHits(), equalTo(2l));
+ assertThat(searchResponse.getHits().getTotalHits(), equalTo(2L));
assertThat(searchResponse.getHits().hits().length, equalTo(2));
for (SearchHit hit : searchResponse.getHits()) {
assertThat(hit.id(), anyOf(equalTo("1"), equalTo("3"), equalTo("5")));
@@ -182,52 +182,52 @@ public class GeoBoundingBoxIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch()
.setQuery(geoBoundingBoxQuery("location").setCorners(41, -11, 40, 9))
.execute().actionGet();
- assertThat(searchResponse.getHits().getTotalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
assertThat(searchResponse.getHits().hits().length, equalTo(1));
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("2"));
searchResponse = client().prepareSearch()
.setQuery(geoBoundingBoxQuery("location").setCorners(41, -11, 40, 9).type("indexed"))
.execute().actionGet();
- assertThat(searchResponse.getHits().getTotalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
assertThat(searchResponse.getHits().hits().length, equalTo(1));
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("2"));
searchResponse = client().prepareSearch()
.setQuery(geoBoundingBoxQuery("location").setCorners(41, -9, 40, 11))
.execute().actionGet();
- assertThat(searchResponse.getHits().getTotalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
assertThat(searchResponse.getHits().hits().length, equalTo(1));
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("3"));
searchResponse = client().prepareSearch()
.setQuery(geoBoundingBoxQuery("location").setCorners(41, -9, 40, 11).type("indexed"))
.execute().actionGet();
- assertThat(searchResponse.getHits().getTotalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
assertThat(searchResponse.getHits().hits().length, equalTo(1));
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("3"));
searchResponse = client().prepareSearch()
.setQuery(geoBoundingBoxQuery("location").setCorners(11, 171, 1, -169))
.execute().actionGet();
- assertThat(searchResponse.getHits().getTotalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
assertThat(searchResponse.getHits().hits().length, equalTo(1));
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("5"));
searchResponse = client().prepareSearch()
.setQuery(geoBoundingBoxQuery("location").setCorners(11, 171, 1, -169).type("indexed"))
.execute().actionGet();
- assertThat(searchResponse.getHits().getTotalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
assertThat(searchResponse.getHits().hits().length, equalTo(1));
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("5"));
searchResponse = client().prepareSearch()
.setQuery(geoBoundingBoxQuery("location").setCorners(9, 169, -1, -171))
.execute().actionGet();
- assertThat(searchResponse.getHits().getTotalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
assertThat(searchResponse.getHits().hits().length, equalTo(1));
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("9"));
searchResponse = client().prepareSearch()
.setQuery(geoBoundingBoxQuery("location").setCorners(9, 169, -1, -171).type("indexed"))
.execute().actionGet();
- assertThat(searchResponse.getHits().getTotalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
assertThat(searchResponse.getHits().hits().length, equalTo(1));
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("9"));
}
@@ -265,26 +265,26 @@ public class GeoBoundingBoxIT extends ESIntegTestCase {
boolQuery().must(termQuery("userid", 880)).filter(
geoBoundingBoxQuery("location").setCorners(74.579421999999994, 143.5, -66.668903999999998, 113.96875))
).execute().actionGet();
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
searchResponse = client().prepareSearch()
.setQuery(
boolQuery().must(termQuery("userid", 880)).filter(
geoBoundingBoxQuery("location").setCorners(74.579421999999994, 143.5, -66.668903999999998, 113.96875).type("indexed"))
).execute().actionGet();
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
searchResponse = client().prepareSearch()
.setQuery(
boolQuery().must(termQuery("userid", 534)).filter(
geoBoundingBoxQuery("location").setCorners(74.579421999999994, 143.5, -66.668903999999998, 113.96875))
).execute().actionGet();
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
searchResponse = client().prepareSearch()
.setQuery(
boolQuery().must(termQuery("userid", 534)).filter(
geoBoundingBoxQuery("location").setCorners(74.579421999999994, 143.5, -66.668903999999998, 113.96875).type("indexed"))
).execute().actionGet();
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
}
public void testCompleteLonRange() throws Exception {
@@ -319,43 +319,43 @@ public class GeoBoundingBoxIT extends ESIntegTestCase {
.setQuery(
geoBoundingBoxQuery("location").setValidationMethod(GeoValidationMethod.COERCE).setCorners(50, -180, -50, 180)
).execute().actionGet();
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
searchResponse = client().prepareSearch()
.setQuery(
geoBoundingBoxQuery("location").setValidationMethod(GeoValidationMethod.COERCE).setCorners(50, -180, -50, 180).type("indexed")
).execute().actionGet();
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
searchResponse = client().prepareSearch()
.setQuery(
geoBoundingBoxQuery("location").setValidationMethod(GeoValidationMethod.COERCE).setCorners(90, -180, -90, 180)
).execute().actionGet();
- assertThat(searchResponse.getHits().totalHits(), equalTo(2l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(2L));
searchResponse = client().prepareSearch()
.setQuery(
geoBoundingBoxQuery("location").setValidationMethod(GeoValidationMethod.COERCE).setCorners(90, -180, -90, 180).type("indexed")
).execute().actionGet();
- assertThat(searchResponse.getHits().totalHits(), equalTo(2l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(2L));
searchResponse = client().prepareSearch()
.setQuery(
geoBoundingBoxQuery("location").setValidationMethod(GeoValidationMethod.COERCE).setCorners(50, 0, -50, 360)
).execute().actionGet();
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
searchResponse = client().prepareSearch()
.setQuery(
geoBoundingBoxQuery("location").setValidationMethod(GeoValidationMethod.COERCE).setCorners(50, 0, -50, 360).type("indexed")
).execute().actionGet();
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
searchResponse = client().prepareSearch()
.setQuery(
geoBoundingBoxQuery("location").setValidationMethod(GeoValidationMethod.COERCE).setCorners(90, 0, -90, 360)
).execute().actionGet();
- assertThat(searchResponse.getHits().totalHits(), equalTo(2l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(2L));
searchResponse = client().prepareSearch()
.setQuery(
geoBoundingBoxQuery("location").setValidationMethod(GeoValidationMethod.COERCE).setCorners(90, 0, -90, 360).type("indexed")
).execute().actionGet();
- assertThat(searchResponse.getHits().totalHits(), equalTo(2l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(2L));
}
}
diff --git a/core/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java b/core/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java
index c6f05ee23a..7afbeaa9ab 100644
--- a/core/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java
+++ b/core/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java
@@ -104,7 +104,7 @@ public class GeoShapeQueryTests extends ESSingleNodeTestCase {
.execute().actionGet();
assertSearchResponse(searchResponse);
- assertThat(searchResponse.getHits().getTotalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
assertThat(searchResponse.getHits().hits().length, equalTo(1));
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("1"));
@@ -113,7 +113,7 @@ public class GeoShapeQueryTests extends ESSingleNodeTestCase {
.execute().actionGet();
assertSearchResponse(searchResponse);
- assertThat(searchResponse.getHits().getTotalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
assertThat(searchResponse.getHits().hits().length, equalTo(1));
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("1"));
}
@@ -150,7 +150,7 @@ public class GeoShapeQueryTests extends ESSingleNodeTestCase {
.execute().actionGet();
assertSearchResponse(searchResponse);
- assertThat(searchResponse.getHits().getTotalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
assertThat(searchResponse.getHits().hits().length, equalTo(1));
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("blakely"));
}
@@ -183,7 +183,7 @@ public class GeoShapeQueryTests extends ESSingleNodeTestCase {
.execute().actionGet();
assertSearchResponse(searchResponse);
- assertThat(searchResponse.getHits().getTotalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
assertThat(searchResponse.getHits().hits().length, equalTo(1));
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("1"));
@@ -192,7 +192,7 @@ public class GeoShapeQueryTests extends ESSingleNodeTestCase {
.execute().actionGet();
assertSearchResponse(searchResponse);
- assertThat(searchResponse.getHits().getTotalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
assertThat(searchResponse.getHits().hits().length, equalTo(1));
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("1"));
}
diff --git a/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java b/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java
index 383bde00b0..5dc8528c00 100644
--- a/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java
+++ b/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java
@@ -19,13 +19,6 @@
package org.elasticsearch.search.highlight;
-import java.io.IOException;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.ParseFieldMatcher;
@@ -64,6 +57,13 @@ import org.elasticsearch.test.IndexSettingsModule;
import org.junit.AfterClass;
import org.junit.BeforeClass;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.not;
@@ -275,7 +275,7 @@ public class HighlightBuilderTests extends ESTestCase {
public void testBuildSearchContextHighlight() throws IOException {
Settings indexSettings = Settings.settingsBuilder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
- Index index = new Index(randomAsciiOfLengthBetween(1, 10));
+ Index index = new Index(randomAsciiOfLengthBetween(1, 10), "_na_");
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(index, indexSettings);
// shard context will only need indicesQueriesRegistry for building Query objects nested in highlighter
QueryShardContext mockShardContext = new QueryShardContext(idxSettings, null, null, null, null, null, null, indicesQueriesRegistry) {
diff --git a/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java b/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java
index 6f9ef10128..eaad1536e2 100644
--- a/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java
+++ b/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java
@@ -174,13 +174,13 @@ public class HighlighterSearchIT extends ESIntegTestCase {
.field("index_options", "offsets")
.field("term_vector", "with_positions_offsets")
.field("type", "string")
- .field("store", "no")
+ .field("store", false)
.endObject()
.startObject("text")
.field("index_options", "offsets")
.field("term_vector", "with_positions_offsets")
.field("type", "string")
- .field("store", "yes")
+ .field("store", true)
.endObject()
.endObject()
.endObject();
@@ -205,7 +205,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
// see #3486
public void testHighTermFrequencyDoc() throws IOException {
assertAcked(prepareCreate("test")
- .addMapping("test", "name", "type=string,term_vector=with_positions_offsets,store=" + (randomBoolean() ? "yes" : "no")));
+ .addMapping("test", "name", "type=string,term_vector=with_positions_offsets,store=" + randomBoolean()));
ensureYellow();
StringBuilder builder = new StringBuilder();
for (int i = 0; i < 6000; i++) {
@@ -471,8 +471,8 @@ public class HighlighterSearchIT extends ESIntegTestCase {
assertAcked(prepareCreate("test")
.addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties")
// we don't store title and don't use term vector, now lets see if it works...
- .startObject("title").field("type", "string").field("store", "no").field("term_vector", "no").endObject()
- .startObject("attachments").startObject("properties").startObject("body").field("type", "string").field("store", "no").field("term_vector", "no").endObject().endObject().endObject()
+ .startObject("title").field("type", "string").field("store", false).field("term_vector", "no").endObject()
+ .startObject("attachments").startObject("properties").startObject("body").field("type", "string").field("store", false).field("term_vector", "no").endObject().endObject().endObject()
.endObject().endObject().endObject()));
ensureYellow();
@@ -510,8 +510,8 @@ public class HighlighterSearchIT extends ESIntegTestCase {
assertAcked(prepareCreate("test")
.addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties")
// we don't store title, now lets see if it works...
- .startObject("title").field("type", "string").field("store", "no").field("term_vector", "with_positions_offsets").endObject()
- .startObject("attachments").startObject("properties").startObject("body").field("type", "string").field("store", "no").field("term_vector", "with_positions_offsets").endObject().endObject().endObject()
+ .startObject("title").field("type", "string").field("store", false).field("term_vector", "with_positions_offsets").endObject()
+ .startObject("attachments").startObject("properties").startObject("body").field("type", "string").field("store", false).field("term_vector", "with_positions_offsets").endObject().endObject().endObject()
.endObject().endObject().endObject()));
ensureYellow();
@@ -549,8 +549,8 @@ public class HighlighterSearchIT extends ESIntegTestCase {
assertAcked(prepareCreate("test")
.addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties")
// we don't store title, now lets see if it works...
- .startObject("title").field("type", "string").field("store", "no").field("index_options", "offsets").endObject()
- .startObject("attachments").startObject("properties").startObject("body").field("type", "string").field("store", "no").field("index_options", "offsets").endObject().endObject().endObject()
+ .startObject("title").field("type", "string").field("store", false).field("index_options", "offsets").endObject()
+ .startObject("attachments").startObject("properties").startObject("body").field("type", "string").field("store", false).field("index_options", "offsets").endObject().endObject().endObject()
.endObject().endObject().endObject()));
ensureYellow();
@@ -598,7 +598,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
public void testHighlightIssue1994() throws Exception {
assertAcked(prepareCreate("test")
- .addMapping("type1", "title", "type=string,store=no", "titleTV", "type=string,store=no,term_vector=with_positions_offsets"));
+ .addMapping("type1", "title", "type=string,store=false", "titleTV", "type=string,store=false,term_vector=with_positions_offsets"));
ensureYellow();
indexRandom(false, client().prepareIndex("test", "type1", "1")
@@ -683,7 +683,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
.addMapping("type1", jsonBuilder().startObject().startObject("type1")
.startObject("_source").field("enabled", false).endObject()
.startObject("properties")
- .startObject("field1").field("type", "string").field("store", "yes").field("index_options", "offsets")
+ .startObject("field1").field("type", "string").field("store", true).field("index_options", "offsets")
.field("term_vector", "with_positions_offsets").endObject()
.endObject().endObject().endObject()));
@@ -915,7 +915,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
.startObject("foo")
.field("type", "string")
.field("termVector", "with_positions_offsets")
- .field("store", "yes")
+ .field("store", true)
.field("analyzer", "english")
.startObject("fields")
.startObject("plain")
@@ -928,7 +928,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
.startObject("bar")
.field("type", "string")
.field("termVector", "with_positions_offsets")
- .field("store", "yes")
+ .field("store", true)
.field("analyzer", "english")
.startObject("fields")
.startObject("plain")
@@ -1101,7 +1101,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
public XContentBuilder type1TermVectorMapping() throws IOException {
return XContentFactory.jsonBuilder().startObject().startObject("type1")
- .startObject("_all").field("store", "yes").field("termVector", "with_positions_offsets").endObject()
+ .startObject("_all").field("store", true).field("termVector", "with_positions_offsets").endObject()
.startObject("properties")
.startObject("field1").field("type", "string").field("termVector", "with_positions_offsets").endObject()
.startObject("field2").field("type", "string").field("termVector", "with_positions_offsets").endObject()
@@ -1111,7 +1111,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
public void testSameContent() throws Exception {
assertAcked(prepareCreate("test")
- .addMapping("type1", "title", "type=string,store=yes,term_vector=with_positions_offsets"));
+ .addMapping("type1", "title", "type=string,store=true,term_vector=with_positions_offsets"));
ensureYellow();
IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5];
@@ -1133,7 +1133,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
public void testFastVectorHighlighterOffsetParameter() throws Exception {
assertAcked(prepareCreate("test")
- .addMapping("type1", "title", "type=string,store=yes,term_vector=with_positions_offsets").get());
+ .addMapping("type1", "title", "type=string,store=true,term_vector=with_positions_offsets").get());
ensureYellow();
IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5];
@@ -1156,7 +1156,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
public void testEscapeHtml() throws Exception {
assertAcked(prepareCreate("test")
- .addMapping("type1", "title", "type=string,store=yes"));
+ .addMapping("type1", "title", "type=string,store=true"));
ensureYellow();
IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5];
@@ -1178,7 +1178,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
public void testEscapeHtmlVector() throws Exception {
assertAcked(prepareCreate("test")
- .addMapping("type1", "title", "type=string,store=yes,term_vector=with_positions_offsets"));
+ .addMapping("type1", "title", "type=string,store=true,term_vector=with_positions_offsets"));
ensureYellow();
IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5];
@@ -1201,9 +1201,9 @@ public class HighlighterSearchIT extends ESIntegTestCase {
public void testMultiMapperVectorWithStore() throws Exception {
assertAcked(prepareCreate("test")
.addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties")
- .startObject("title").field("type", "string").field("store", "yes").field("term_vector", "with_positions_offsets").field("analyzer", "classic")
+ .startObject("title").field("type", "string").field("store", true).field("term_vector", "with_positions_offsets").field("analyzer", "classic")
.startObject("fields")
- .startObject("key").field("type", "string").field("store", "yes").field("term_vector", "with_positions_offsets").field("analyzer", "whitespace").endObject()
+ .startObject("key").field("type", "string").field("store", true).field("term_vector", "with_positions_offsets").field("analyzer", "whitespace").endObject()
.endObject().endObject()
.endObject().endObject().endObject()));
ensureGreen();
@@ -1229,9 +1229,9 @@ public class HighlighterSearchIT extends ESIntegTestCase {
public void testMultiMapperVectorFromSource() throws Exception {
assertAcked(prepareCreate("test")
.addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties")
- .startObject("title").field("type", "string").field("store", "no").field("term_vector", "with_positions_offsets").field("analyzer", "classic")
+ .startObject("title").field("type", "string").field("store", false).field("term_vector", "with_positions_offsets").field("analyzer", "classic")
.startObject("fields")
- .startObject("key").field("type", "string").field("store", "no").field("term_vector", "with_positions_offsets").field("analyzer", "whitespace").endObject()
+ .startObject("key").field("type", "string").field("store", false).field("term_vector", "with_positions_offsets").field("analyzer", "whitespace").endObject()
.endObject().endObject()
.endObject().endObject().endObject()));
ensureGreen();
@@ -1259,9 +1259,9 @@ public class HighlighterSearchIT extends ESIntegTestCase {
public void testMultiMapperNoVectorWithStore() throws Exception {
assertAcked(prepareCreate("test")
.addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties")
- .startObject("title").field("type", "string").field("store", "yes").field("term_vector", "no").field("analyzer", "classic")
+ .startObject("title").field("type", "string").field("store", true).field("term_vector", "no").field("analyzer", "classic")
.startObject("fields")
- .startObject("key").field("type", "string").field("store", "yes").field("term_vector", "no").field("analyzer", "whitespace").endObject()
+ .startObject("key").field("type", "string").field("store", true).field("term_vector", "no").field("analyzer", "whitespace").endObject()
.endObject().endObject()
.endObject().endObject().endObject()));
@@ -1289,9 +1289,9 @@ public class HighlighterSearchIT extends ESIntegTestCase {
public void testMultiMapperNoVectorFromSource() throws Exception {
assertAcked(prepareCreate("test")
.addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties")
- .startObject("title").field("type", "string").field("store", "no").field("term_vector", "no").field("analyzer", "classic")
+ .startObject("title").field("type", "string").field("store", false).field("term_vector", "no").field("analyzer", "classic")
.startObject("fields")
- .startObject("key").field("type", "string").field("store", "no").field("term_vector", "no").field("analyzer", "whitespace").endObject()
+ .startObject("key").field("type", "string").field("store", false).field("term_vector", "no").field("analyzer", "whitespace").endObject()
.endObject().endObject()
.endObject().endObject().endObject()));
ensureGreen();
@@ -1317,7 +1317,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
public void testFastVectorHighlighterShouldFailIfNoTermVectors() throws Exception {
assertAcked(prepareCreate("test")
- .addMapping("type1", "title", "type=string,store=yes,term_vector=no"));
+ .addMapping("type1", "title", "type=string,store=true,term_vector=no"));
ensureGreen();
IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5];
@@ -1347,7 +1347,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
public void testDisableFastVectorHighlighter() throws Exception {
assertAcked(prepareCreate("test")
- .addMapping("type1", "title", "type=string,store=yes,term_vector=with_positions_offsets,analyzer=classic"));
+ .addMapping("type1", "title", "type=string,store=true,term_vector=with_positions_offsets,analyzer=classic"));
ensureGreen();
IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5];
@@ -1485,7 +1485,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
.putArray("index.analysis.filter.synonym.synonyms", "quick => fast");
assertAcked(prepareCreate("test").setSettings(builder.build()).addMapping("type1", type1TermVectorMapping())
- .addMapping("type2", "_all", "store=yes,termVector=with_positions_offsets",
+ .addMapping("type2", "_all", "store=true,termVector=with_positions_offsets",
"field4", "type=string,term_vector=with_positions_offsets,analyzer=synonym",
"field3", "type=string,analyzer=synonym"));
ensureGreen();
@@ -1622,7 +1622,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
public void testMissingStoredField() throws Exception {
assertAcked(prepareCreate("test")
- .addMapping("type1", "highlight_field", "type=string,store=yes"));
+ .addMapping("type1", "highlight_field", "type=string,store=true"));
ensureGreen();
client().prepareIndex("test", "type1", "1")
.setSource(jsonBuilder().startObject()
@@ -1659,7 +1659,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
.get();
// Highlighting of numeric fields is not supported, but it should not raise errors
// (this behavior is consistent with version 0.20)
- assertHitCount(response, 1l);
+ assertHitCount(response, 1L);
}
// Issue #3200
@@ -1680,7 +1680,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
.setQuery(QueryBuilders.matchQuery("text", "test").type(MatchQuery.Type.BOOLEAN))
.highlighter(new HighlightBuilder().field("text")).execute().actionGet();
// PatternAnalyzer will throw an exception if it is resetted twice
- assertHitCount(response, 1l);
+ assertHitCount(response, 1L);
}
public void testHighlightUsesHighlightQuery() throws IOException {
@@ -1744,7 +1744,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
private static String randomStoreField() {
if (randomBoolean()) {
- return "store=yes,";
+ return "store=true,";
}
return "";
}
@@ -2118,7 +2118,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
.field(new HighlightBuilder.Field("field1").numOfFragments(0).preTags("<field1>").postTags("</field1>")));
searchResponse = client().search(searchRequest("test").source(source)).actionGet();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
for (SearchHit searchHit : searchResponse.getHits()) {
if ("1".equals(searchHit.id())) {
@@ -2136,7 +2136,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
public void testMultiMatchQueryHighlight() throws IOException {
String[] highlighterTypes = new String[] {"fvh", "plain", "postings"};
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
- .startObject("_all").field("store", "yes").field("index_options", "offsets").endObject()
+ .startObject("_all").field("store", true).field("index_options", "offsets").endObject()
.startObject("properties")
.startObject("field1").field("type", "string").field("index_options", "offsets").field("term_vector", "with_positions_offsets").endObject()
.startObject("field2").field("type", "string").field("index_options", "offsets").field("term_vector", "with_positions_offsets").endObject()
@@ -2167,7 +2167,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
.field(new Field("field1").requireFieldMatch(true).preTags("<field1>").postTags("</field1>")));
logger.info("Running multi-match type: [" + matchQueryType + "] highlight with type: [" + highlighterType + "]");
SearchResponse searchResponse = client().search(searchRequest("test").source(source)).actionGet();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertHighlight(searchResponse, 0, "field1", 0, anyOf(equalTo("<field1>The quick brown fox</field1> jumps over"),
equalTo("<field1>The</field1> <field1>quick</field1> <field1>brown</field1> <field1>fox</field1> jumps over")));
}
@@ -2226,9 +2226,9 @@ public class HighlighterSearchIT extends ESIntegTestCase {
assertAcked(prepareCreate("test")
.addMapping("type1", jsonBuilder().startObject().startObject("type1")
.startObject("properties")
- .startObject("title").field("type", "string").field("store", "yes").field("index_options", "offsets").field("analyzer", "classic")
+ .startObject("title").field("type", "string").field("store", true).field("index_options", "offsets").field("analyzer", "classic")
.startObject("fields")
- .startObject("key").field("type", "string").field("store", "yes").field("index_options", "offsets").field("analyzer", "whitespace").endObject()
+ .startObject("key").field("type", "string").field("store", true).field("index_options", "offsets").field("analyzer", "whitespace").endObject()
.endObject().endObject()
.endObject().endObject().endObject()));
ensureGreen();
@@ -2240,7 +2240,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
.setQuery(matchQuery("title", "This is a Test"))
.highlighter(new HighlightBuilder().field("title")).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
SearchHit hit = searchResponse.getHits().getAt(0);
//stopwords are not highlighted since not indexed
assertHighlight(hit, "title", 0, 1, equalTo("this is a <em>test</em> ."));
@@ -2249,7 +2249,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
searchResponse = client().prepareSearch()
.setQuery(matchQuery("title.key", "this is a test"))
.highlighter(new HighlightBuilder().field("title.key")).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
//stopwords are now highlighted since we used only whitespace analyzer here
assertHighlight(searchResponse, 0, "title.key", 0, 1, equalTo("<em>this</em> <em>is</em> <em>a</em> <em>test</em> ."));
@@ -2258,9 +2258,9 @@ public class HighlighterSearchIT extends ESIntegTestCase {
public void testPostingsHighlighterMultiMapperFromSource() throws Exception {
assertAcked(prepareCreate("test")
.addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties")
- .startObject("title").field("type", "string").field("store", "no").field("index_options", "offsets").field("analyzer", "classic")
+ .startObject("title").field("type", "string").field("store", false).field("index_options", "offsets").field("analyzer", "classic")
.startObject("fields")
- .startObject("key").field("type", "string").field("store", "no").field("index_options", "offsets").field("analyzer", "whitespace").endObject()
+ .startObject("key").field("type", "string").field("store", false).field("index_options", "offsets").field("analyzer", "whitespace").endObject()
.endObject().endObject()
.endObject().endObject().endObject()));
ensureGreen();
@@ -2287,7 +2287,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
public void testPostingsHighlighterShouldFailIfNoOffsets() throws Exception {
assertAcked(prepareCreate("test")
.addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties")
- .startObject("title").field("type", "string").field("store", "yes").field("index_options", "docs").endObject()
+ .startObject("title").field("type", "string").field("store", true).field("index_options", "docs").endObject()
.endObject().endObject().endObject()));
ensureGreen();
@@ -2349,7 +2349,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
SearchSourceBuilder source = searchSource().query(commonTermsQuery("field2", "quick brown").cutoffFrequency(100))
.highlighter(highlight().field("field2").preTags("<x>").postTags("</x>"));
SearchResponse searchResponse = client().search(searchRequest("test").source(source)).actionGet();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The <x>quick</x> <x>brown</x> fox jumps over the lazy dog!"));
}
@@ -2378,6 +2378,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
}
+ @SuppressWarnings("deprecation") // fuzzy queries will be removed in 4.0
public void testPostingsHighlighterFuzzyQuery() throws Exception {
assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping()));
ensureGreen();
@@ -2422,7 +2423,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
source = searchSource().query(wildcardQuery("field2", "qu*k"))
.highlighter(highlight().field("field2"));
searchResponse = client().prepareSearch("test").setSource(source).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The <em>quick</em> brown fox jumps over the lazy dog!"));
}
diff --git a/core/src/test/java/org/elasticsearch/search/indicesboost/SimpleIndicesBoostSearchIT.java b/core/src/test/java/org/elasticsearch/search/indicesboost/SimpleIndicesBoostSearchIT.java
index f9245a3d98..606e9a18f2 100644
--- a/core/src/test/java/org/elasticsearch/search/indicesboost/SimpleIndicesBoostSearchIT.java
+++ b/core/src/test/java/org/elasticsearch/search/indicesboost/SimpleIndicesBoostSearchIT.java
@@ -63,7 +63,7 @@ public class SimpleIndicesBoostSearchIT extends ESIntegTestCase {
.source(searchSource().explain(true).indexBoost("test1", indexBoost).query(termQuery("test", "value")))
).actionGet();
- assertThat(response.getHits().totalHits(), equalTo(2l));
+ assertThat(response.getHits().totalHits(), equalTo(2L));
logger.info("Hit[0] {} Explanation {}", response.getHits().getAt(0).index(), response.getHits().getAt(0).explanation());
logger.info("Hit[1] {} Explanation {}", response.getHits().getAt(1).index(), response.getHits().getAt(1).explanation());
assertThat(response.getHits().getAt(0).index(), equalTo("test1"));
@@ -75,7 +75,7 @@ public class SimpleIndicesBoostSearchIT extends ESIntegTestCase {
.source(searchSource().explain(true).indexBoost("test2", indexBoost).query(termQuery("test", "value")))
).actionGet();
- assertThat(response.getHits().totalHits(), equalTo(2l));
+ assertThat(response.getHits().totalHits(), equalTo(2L));
logger.info("Hit[0] {} Explanation {}", response.getHits().getAt(0).index(), response.getHits().getAt(0).explanation());
logger.info("Hit[1] {} Explanation {}", response.getHits().getAt(1).index(), response.getHits().getAt(1).explanation());
assertThat(response.getHits().getAt(0).index(), equalTo("test2"));
@@ -89,7 +89,7 @@ public class SimpleIndicesBoostSearchIT extends ESIntegTestCase {
.source(searchSource().explain(true).indexBoost("test1", indexBoost).query(termQuery("test", "value")))
).actionGet();
- assertThat(response.getHits().totalHits(), equalTo(2l));
+ assertThat(response.getHits().totalHits(), equalTo(2L));
logger.info("Hit[0] {} Explanation {}", response.getHits().getAt(0).index(), response.getHits().getAt(0).explanation());
logger.info("Hit[1] {} Explanation {}", response.getHits().getAt(1).index(), response.getHits().getAt(1).explanation());
assertThat(response.getHits().getAt(0).index(), equalTo("test1"));
@@ -101,7 +101,7 @@ public class SimpleIndicesBoostSearchIT extends ESIntegTestCase {
.source(searchSource().explain(true).indexBoost("test2", indexBoost).query(termQuery("test", "value")))
).actionGet();
- assertThat(response.getHits().totalHits(), equalTo(2l));
+ assertThat(response.getHits().totalHits(), equalTo(2L));
logger.info("Hit[0] {} Explanation {}", response.getHits().getAt(0).index(), response.getHits().getAt(0).explanation());
logger.info("Hit[1] {} Explanation {}", response.getHits().getAt(1).index(), response.getHits().getAt(1).explanation());
assertThat(response.getHits().getAt(0).index(), equalTo("test2"));
diff --git a/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java b/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java
index 1e71b86823..05ff048b42 100644
--- a/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java
+++ b/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java
@@ -28,6 +28,8 @@ import org.elasticsearch.cluster.health.ClusterHealthStatus;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.BoolQueryBuilder;
+import org.elasticsearch.index.query.HasChildQueryBuilder;
+import org.elasticsearch.index.query.MatchAllQueryBuilder;
import org.elasticsearch.index.query.support.QueryInnerHits;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.MockScriptEngine;
@@ -126,7 +128,7 @@ public class InnerHitsIT extends ESIntegTestCase {
assertSearchHit(response, 1, hasId("1"));
assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1));
SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment");
- assertThat(innerHits.totalHits(), equalTo(2l));
+ assertThat(innerHits.totalHits(), equalTo(2L));
assertThat(innerHits.getHits().length, equalTo(2));
assertThat(innerHits.getAt(0).getId(), equalTo("1"));
assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments"));
@@ -158,7 +160,7 @@ public class InnerHitsIT extends ESIntegTestCase {
assertThat(response.getHits().getAt(0).getShard(), notNullValue());
assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1));
SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment");
- assertThat(innerHits.totalHits(), equalTo(3l));
+ assertThat(innerHits.totalHits(), equalTo(3L));
assertThat(innerHits.getHits().length, equalTo(3));
assertThat(innerHits.getAt(0).getId(), equalTo("2"));
assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments"));
@@ -196,7 +198,7 @@ public class InnerHitsIT extends ESIntegTestCase {
SearchResponse response = client().search(searchRequest).actionGet();
assertNoFailures(response);
SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comments");
- assertThat(innerHits.getTotalHits(), equalTo(2l));
+ assertThat(innerHits.getTotalHits(), equalTo(2L));
assertThat(innerHits.getHits().length, equalTo(1));
assertThat(innerHits.getAt(0).getHighlightFields().get("comments.message").getFragments()[0].string(), equalTo("<em>fox</em> eat quick"));
assertThat(innerHits.getAt(0).explanation().toString(), containsString("weight(comments.message:fox in"));
@@ -318,7 +320,7 @@ public class InnerHitsIT extends ESIntegTestCase {
assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1));
SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment");
- assertThat(innerHits.totalHits(), equalTo(2l));
+ assertThat(innerHits.totalHits(), equalTo(2L));
assertThat(innerHits.getAt(0).getId(), equalTo("1"));
assertThat(innerHits.getAt(0).type(), equalTo("comment"));
@@ -345,7 +347,7 @@ public class InnerHitsIT extends ESIntegTestCase {
assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1));
SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment");
- assertThat(innerHits.totalHits(), equalTo(3l));
+ assertThat(innerHits.totalHits(), equalTo(3L));
assertThat(innerHits.getAt(0).getId(), equalTo("4"));
assertThat(innerHits.getAt(0).type(), equalTo("comment"));
@@ -528,14 +530,14 @@ public class InnerHitsIT extends ESIntegTestCase {
SearchHit searchHit = response.getHits().getAt(0);
assertThat(searchHit.getId(), equalTo("1"));
assertThat(searchHit.getType(), equalTo("answer"));
- assertThat(searchHit.getInnerHits().get("question").getTotalHits(), equalTo(1l));
+ assertThat(searchHit.getInnerHits().get("question").getTotalHits(), equalTo(1L));
assertThat(searchHit.getInnerHits().get("question").getAt(0).getType(), equalTo("question"));
assertThat(searchHit.getInnerHits().get("question").getAt(0).id(), equalTo("1"));
searchHit = response.getHits().getAt(1);
assertThat(searchHit.getId(), equalTo("2"));
assertThat(searchHit.getType(), equalTo("answer"));
- assertThat(searchHit.getInnerHits().get("question").getTotalHits(), equalTo(1l));
+ assertThat(searchHit.getInnerHits().get("question").getTotalHits(), equalTo(1L));
assertThat(searchHit.getInnerHits().get("question").getAt(0).getType(), equalTo("question"));
assertThat(searchHit.getInnerHits().get("question").getAt(0).id(), equalTo("2"));
}
@@ -573,12 +575,12 @@ public class InnerHitsIT extends ESIntegTestCase {
assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1));
SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment");
- assertThat(innerHits.totalHits(), equalTo(1l));
+ assertThat(innerHits.totalHits(), equalTo(1L));
assertThat(innerHits.getAt(0).getId(), equalTo("1"));
assertThat(innerHits.getAt(0).type(), equalTo("comment"));
innerHits = innerHits.getAt(0).getInnerHits().get("remark");
- assertThat(innerHits.totalHits(), equalTo(1l));
+ assertThat(innerHits.totalHits(), equalTo(1L));
assertThat(innerHits.getAt(0).getId(), equalTo("1"));
assertThat(innerHits.getAt(0).type(), equalTo("remark"));
@@ -599,12 +601,12 @@ public class InnerHitsIT extends ESIntegTestCase {
assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1));
innerHits = response.getHits().getAt(0).getInnerHits().get("comment");
- assertThat(innerHits.totalHits(), equalTo(1l));
+ assertThat(innerHits.totalHits(), equalTo(1L));
assertThat(innerHits.getAt(0).getId(), equalTo("2"));
assertThat(innerHits.getAt(0).type(), equalTo("comment"));
innerHits = innerHits.getAt(0).getInnerHits().get("remark");
- assertThat(innerHits.totalHits(), equalTo(1l));
+ assertThat(innerHits.totalHits(), equalTo(1L));
assertThat(innerHits.getAt(0).getId(), equalTo("2"));
assertThat(innerHits.getAt(0).type(), equalTo("remark"));
}
@@ -666,13 +668,13 @@ public class InnerHitsIT extends ESIntegTestCase {
assertSearchHit(response, 1, hasId("1"));
assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1));
SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment");
- assertThat(innerHits.totalHits(), equalTo(1l));
+ assertThat(innerHits.totalHits(), equalTo(1L));
assertThat(innerHits.getHits().length, equalTo(1));
assertThat(innerHits.getAt(0).getId(), equalTo("1"));
assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments"));
assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0));
innerHits = innerHits.getAt(0).getInnerHits().get("remark");
- assertThat(innerHits.totalHits(), equalTo(1l));
+ assertThat(innerHits.totalHits(), equalTo(1L));
assertThat(innerHits.getHits().length, equalTo(1));
assertThat(innerHits.getAt(0).getId(), equalTo("1"));
assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments"));
@@ -689,7 +691,7 @@ public class InnerHitsIT extends ESIntegTestCase {
assertSearchHit(response, 1, hasId("2"));
assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1));
innerHits = response.getHits().getAt(0).getInnerHits().get("comments.remarks");
- assertThat(innerHits.totalHits(), equalTo(1l));
+ assertThat(innerHits.totalHits(), equalTo(1L));
assertThat(innerHits.getHits().length, equalTo(1));
assertThat(innerHits.getAt(0).getId(), equalTo("2"));
assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments"));
@@ -712,13 +714,13 @@ public class InnerHitsIT extends ESIntegTestCase {
assertSearchHit(response, 1, hasId("2"));
assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1));
innerHits = response.getHits().getAt(0).getInnerHits().get("comment");
- assertThat(innerHits.totalHits(), equalTo(1l));
+ assertThat(innerHits.totalHits(), equalTo(1L));
assertThat(innerHits.getHits().length, equalTo(1));
assertThat(innerHits.getAt(0).getId(), equalTo("2"));
assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments"));
assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0));
innerHits = innerHits.getAt(0).getInnerHits().get("remark");
- assertThat(innerHits.totalHits(), equalTo(1l));
+ assertThat(innerHits.totalHits(), equalTo(1L));
assertThat(innerHits.getHits().length, equalTo(1));
assertThat(innerHits.getAt(0).getId(), equalTo("2"));
assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments"));
@@ -744,7 +746,7 @@ public class InnerHitsIT extends ESIntegTestCase {
assertNoFailures(response);
assertHitCount(response, 1);
assertThat(response.getHits().getAt(0).id(), equalTo("1"));
- assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits(), equalTo(1l));
+ assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits(), equalTo(1L));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).id(), equalTo("1"));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getField().string(), equalTo("comments"));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getOffset(), equalTo(0));
@@ -760,7 +762,7 @@ public class InnerHitsIT extends ESIntegTestCase {
.startObject("comments")
.field("type", "nested")
.startObject("properties")
- .startObject("message").field("type", "string").field("store", "yes").endObject()
+ .startObject("message").field("type", "string").field("store", true).endObject()
.endObject()
.endObject()
.endObject()
@@ -781,7 +783,7 @@ public class InnerHitsIT extends ESIntegTestCase {
assertNoFailures(response);
assertHitCount(response, 1);
assertThat(response.getHits().getAt(0).id(), equalTo("1"));
- assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits(), equalTo(1l));
+ assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits(), equalTo(1L));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).id(), equalTo("1"));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getField().string(), equalTo("comments"));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getOffset(), equalTo(0));
@@ -798,7 +800,7 @@ public class InnerHitsIT extends ESIntegTestCase {
.startObject("comments")
.field("type", "nested")
.startObject("properties")
- .startObject("message").field("type", "string").field("store", "yes").endObject()
+ .startObject("message").field("type", "string").field("store", true).endObject()
.endObject()
.endObject()
.endObject()
@@ -820,7 +822,7 @@ public class InnerHitsIT extends ESIntegTestCase {
assertNoFailures(response);
assertHitCount(response, 1);
assertThat(response.getHits().getAt(0).id(), equalTo("1"));
- assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits(), equalTo(1l));
+ assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits(), equalTo(1L));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).id(), equalTo("1"));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getField().string(), equalTo("comments"));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getOffset(), equalTo(0));
@@ -836,7 +838,7 @@ public class InnerHitsIT extends ESIntegTestCase {
.startObject("comments")
.field("type", "nested")
.startObject("properties")
- .startObject("message").field("type", "string").field("store", "yes").endObject()
+ .startObject("message").field("type", "string").field("store", true).endObject()
.endObject()
.endObject()
.endObject()
@@ -859,7 +861,7 @@ public class InnerHitsIT extends ESIntegTestCase {
assertNoFailures(response);
assertHitCount(response, 1);
assertThat(response.getHits().getAt(0).id(), equalTo("1"));
- assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits(), equalTo(1l));
+ assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits(), equalTo(1L));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).id(), equalTo("1"));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getField().string(), equalTo("comments"));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getOffset(), equalTo(0));
@@ -875,7 +877,7 @@ public class InnerHitsIT extends ESIntegTestCase {
.startObject("comments")
.field("type", "nested")
.startObject("properties")
- .startObject("message").field("type", "string").field("store", "yes").endObject()
+ .startObject("message").field("type", "string").field("store", true).endObject()
.endObject()
.endObject()
.endObject()
@@ -897,7 +899,7 @@ public class InnerHitsIT extends ESIntegTestCase {
assertNoFailures(response);
assertHitCount(response, 1);
assertThat(response.getHits().getAt(0).id(), equalTo("1"));
- assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits(), equalTo(1l));
+ assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits(), equalTo(1L));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).id(), equalTo("1"));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getField().string(), equalTo("comments"));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getOffset(), equalTo(0));
@@ -938,7 +940,7 @@ public class InnerHitsIT extends ESIntegTestCase {
assertNoFailures(response);
assertHitCount(response, 1);
assertThat(response.getHits().getAt(0).id(), equalTo("1"));
- assertThat(response.getHits().getAt(0).getInnerHits().get("comments.messages").getTotalHits(), equalTo(1l));
+ assertThat(response.getHits().getAt(0).getInnerHits().get("comments.messages").getTotalHits(), equalTo(1L));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments.messages").getAt(0).id(), equalTo("1"));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments.messages").getAt(0).getNestedIdentity().getField().string(), equalTo("comments.messages"));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments.messages").getAt(0).getNestedIdentity().getOffset(), equalTo(0));
@@ -950,7 +952,7 @@ public class InnerHitsIT extends ESIntegTestCase {
assertNoFailures(response);
assertHitCount(response, 1);
assertThat(response.getHits().getAt(0).id(), equalTo("1"));
- assertThat(response.getHits().getAt(0).getInnerHits().get("comments.messages").getTotalHits(), equalTo(1l));
+ assertThat(response.getHits().getAt(0).getInnerHits().get("comments.messages").getTotalHits(), equalTo(1L));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments.messages").getAt(0).id(), equalTo("1"));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments.messages").getAt(0).getNestedIdentity().getField().string(), equalTo("comments.messages"));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments.messages").getAt(0).getNestedIdentity().getOffset(), equalTo(1));
@@ -969,7 +971,7 @@ public class InnerHitsIT extends ESIntegTestCase {
assertNoFailures(response);
assertHitCount(response, 1);
assertThat(response.getHits().getAt(0).id(), equalTo("1"));
- assertThat(response.getHits().getAt(0).getInnerHits().get("comments.messages").getTotalHits(), equalTo(1l));
+ assertThat(response.getHits().getAt(0).getInnerHits().get("comments.messages").getTotalHits(), equalTo(1L));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments.messages").getAt(0).id(), equalTo("1"));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments.messages").getAt(0).getNestedIdentity().getField().string(), equalTo("comments.messages"));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments.messages").getAt(0).getNestedIdentity().getOffset(), equalTo(0));
@@ -1022,34 +1024,34 @@ public class InnerHitsIT extends ESIntegTestCase {
assertThat(response.getHits().getAt(0).getId(), equalTo("duke"));
SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("earls");
- assertThat(innerHits.getTotalHits(), equalTo(4l));
+ assertThat(innerHits.getTotalHits(), equalTo(4L));
assertThat(innerHits.getAt(0).getId(), equalTo("earl1"));
assertThat(innerHits.getAt(1).getId(), equalTo("earl2"));
assertThat(innerHits.getAt(2).getId(), equalTo("earl3"));
assertThat(innerHits.getAt(3).getId(), equalTo("earl4"));
SearchHits innerInnerHits = innerHits.getAt(0).getInnerHits().get("barons");
- assertThat(innerInnerHits.totalHits(), equalTo(1l));
+ assertThat(innerInnerHits.totalHits(), equalTo(1L));
assertThat(innerInnerHits.getAt(0).getId(), equalTo("baron1"));
innerInnerHits = innerHits.getAt(1).getInnerHits().get("barons");
- assertThat(innerInnerHits.totalHits(), equalTo(1l));
+ assertThat(innerInnerHits.totalHits(), equalTo(1L));
assertThat(innerInnerHits.getAt(0).getId(), equalTo("baron2"));
innerInnerHits = innerHits.getAt(2).getInnerHits().get("barons");
- assertThat(innerInnerHits.totalHits(), equalTo(1l));
+ assertThat(innerInnerHits.totalHits(), equalTo(1L));
assertThat(innerInnerHits.getAt(0).getId(), equalTo("baron3"));
innerInnerHits = innerHits.getAt(3).getInnerHits().get("barons");
- assertThat(innerInnerHits.totalHits(), equalTo(1l));
+ assertThat(innerInnerHits.totalHits(), equalTo(1L));
assertThat(innerInnerHits.getAt(0).getId(), equalTo("baron4"));
innerHits = response.getHits().getAt(0).getInnerHits().get("princes");
- assertThat(innerHits.getTotalHits(), equalTo(1l));
+ assertThat(innerHits.getTotalHits(), equalTo(1L));
assertThat(innerHits.getAt(0).getId(), equalTo("prince"));
innerInnerHits = innerHits.getAt(0).getInnerHits().get("kings");
- assertThat(innerInnerHits.totalHits(), equalTo(1l));
+ assertThat(innerInnerHits.totalHits(), equalTo(1L));
assertThat(innerInnerHits.getAt(0).getId(), equalTo("king"));
}
@@ -1130,7 +1132,7 @@ public class InnerHitsIT extends ESIntegTestCase {
assertAllSuccessful(searchResponse);
assertThat(searchResponse.getHits().totalHits(), equalTo((long) numDocs));
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("0"));
- assertThat(searchResponse.getHits().getAt(0).getInnerHits().get("nested1").getTotalHits(), equalTo(2l));
+ assertThat(searchResponse.getHits().getAt(0).getInnerHits().get("nested1").getTotalHits(), equalTo(2L));
assertThat(searchResponse.getHits().getAt(0).getInnerHits().get("nested1").getAt(0).getMatchedQueries().length, equalTo(1));
assertThat(searchResponse.getHits().getAt(0).getInnerHits().get("nested1").getAt(0).getMatchedQueries()[0], equalTo("test1"));
assertThat(searchResponse.getHits().getAt(0).getInnerHits().get("nested1").getAt(1).getMatchedQueries().length, equalTo(1));
@@ -1138,13 +1140,13 @@ public class InnerHitsIT extends ESIntegTestCase {
assertThat(searchResponse.getHits().getAt(1).id(), equalTo("1"));
- assertThat(searchResponse.getHits().getAt(1).getInnerHits().get("nested1").getTotalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().getAt(1).getInnerHits().get("nested1").getTotalHits(), equalTo(1L));
assertThat(searchResponse.getHits().getAt(1).getInnerHits().get("nested1").getAt(0).getMatchedQueries().length, equalTo(1));
assertThat(searchResponse.getHits().getAt(1).getInnerHits().get("nested1").getAt(0).getMatchedQueries()[0], equalTo("test2"));
for (int i = 2; i < numDocs; i++) {
assertThat(searchResponse.getHits().getAt(i).id(), equalTo(String.valueOf(i)));
- assertThat(searchResponse.getHits().getAt(i).getInnerHits().get("nested1").getTotalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().getAt(i).getInnerHits().get("nested1").getTotalHits(), equalTo(1L));
assertThat(searchResponse.getHits().getAt(i).getInnerHits().get("nested1").getAt(0).getMatchedQueries().length, equalTo(1));
assertThat(searchResponse.getHits().getAt(i).getInnerHits().get("nested1").getAt(0).getMatchedQueries()[0], equalTo("test3"));
}
@@ -1166,12 +1168,12 @@ public class InnerHitsIT extends ESIntegTestCase {
.get();
assertHitCount(response, 2);
assertThat(response.getHits().getAt(0).id(), equalTo("1"));
- assertThat(response.getHits().getAt(0).getInnerHits().get("child").getTotalHits(), equalTo(1l));
+ assertThat(response.getHits().getAt(0).getInnerHits().get("child").getTotalHits(), equalTo(1L));
assertThat(response.getHits().getAt(0).getInnerHits().get("child").getAt(0).getMatchedQueries().length, equalTo(1));
assertThat(response.getHits().getAt(0).getInnerHits().get("child").getAt(0).getMatchedQueries()[0], equalTo("_name1"));
assertThat(response.getHits().getAt(1).id(), equalTo("2"));
- assertThat(response.getHits().getAt(1).getInnerHits().get("child").getTotalHits(), equalTo(1l));
+ assertThat(response.getHits().getAt(1).getInnerHits().get("child").getTotalHits(), equalTo(1L));
assertThat(response.getHits().getAt(1).getInnerHits().get("child").getAt(0).getMatchedQueries().length, equalTo(1));
assertThat(response.getHits().getAt(1).getInnerHits().get("child").getAt(0).getMatchedQueries()[0], equalTo("_name1"));
@@ -1181,7 +1183,7 @@ public class InnerHitsIT extends ESIntegTestCase {
.get();
assertHitCount(response, 1);
assertThat(response.getHits().getAt(0).id(), equalTo("1"));
- assertThat(response.getHits().getAt(0).getInnerHits().get("child").getTotalHits(), equalTo(1l));
+ assertThat(response.getHits().getAt(0).getInnerHits().get("child").getTotalHits(), equalTo(1L));
assertThat(response.getHits().getAt(0).getInnerHits().get("child").getAt(0).getMatchedQueries().length, equalTo(1));
assertThat(response.getHits().getAt(0).getInnerHits().get("child").getAt(0).getMatchedQueries()[0], equalTo("_name2"));
}
@@ -1219,4 +1221,25 @@ public class InnerHitsIT extends ESIntegTestCase {
assertHitCount(response, 1);
}
+ public void testTopLevelInnerHitsWithQueryInnerHits() throws Exception {
+ // top level inner hits shouldn't overwrite query inner hits definitions
+
+ assertAcked(prepareCreate("index1").addMapping("child", "_parent", "type=parent"));
+ List<IndexRequestBuilder> requests = new ArrayList<>();
+ requests.add(client().prepareIndex("index1", "parent", "1").setSource("{}"));
+ requests.add(client().prepareIndex("index1", "child", "2").setParent("1").setSource("{}"));
+ indexRandom(true, requests);
+
+ InnerHitsBuilder innerHitsBuilder = new InnerHitsBuilder();
+ innerHitsBuilder.addParentChildInnerHits("my-inner-hit", "child", new InnerHitsBuilder.InnerHit());
+ SearchResponse response = client().prepareSearch("index1")
+ .setQuery(hasChildQuery("child", new MatchAllQueryBuilder()).innerHit(new QueryInnerHits()))
+ .innerHits(innerHitsBuilder)
+ .get();
+ assertHitCount(response, 1);
+ assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(2));
+ assertThat(response.getHits().getAt(0).getInnerHits().get("child").getAt(0).getId(), equalTo("2"));
+ assertThat(response.getHits().getAt(0).getInnerHits().get("my-inner-hit").getAt(0).getId(), equalTo("2"));
+ }
+
}
diff --git a/core/src/test/java/org/elasticsearch/search/internal/DefaultSearchContextTests.java b/core/src/test/java/org/elasticsearch/search/internal/DefaultSearchContextTests.java
new file mode 100644
index 0000000000..d8fe2308bc
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/search/internal/DefaultSearchContextTests.java
@@ -0,0 +1,72 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.search.internal;
+
+import org.apache.lucene.queries.TermsQuery;
+import org.apache.lucene.search.BooleanQuery;
+import org.apache.lucene.search.MatchAllDocsQuery;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.util.BytesRef;
+import org.elasticsearch.common.lucene.search.Queries;
+import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
+import org.elasticsearch.test.ESTestCase;
+
+import static org.apache.lucene.search.BooleanClause.Occur.FILTER;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.nullValue;
+
+public class DefaultSearchContextTests extends ESTestCase {
+
+ public void testCreateSearchFilter() {
+ Query searchFilter = DefaultSearchContext.createSearchFilter(new String[]{"type1", "type2"}, null, randomBoolean());
+ Query expectedQuery = new BooleanQuery.Builder()
+ .add(new TermsQuery(TypeFieldMapper.NAME, new BytesRef("type1"), new BytesRef("type2")), FILTER)
+ .build();
+ assertThat(searchFilter, equalTo(expectedQuery));
+
+ searchFilter = DefaultSearchContext.createSearchFilter(new String[]{"type1", "type2"}, new MatchAllDocsQuery(), randomBoolean());
+ expectedQuery = new BooleanQuery.Builder()
+ .add(new TermsQuery(TypeFieldMapper.NAME, new BytesRef("type1"), new BytesRef("type2")), FILTER)
+ .add(new MatchAllDocsQuery(), FILTER)
+ .build();
+ assertThat(searchFilter, equalTo(expectedQuery));
+
+ searchFilter = DefaultSearchContext.createSearchFilter(null, null, false);
+ assertThat(searchFilter, nullValue());
+
+ searchFilter = DefaultSearchContext.createSearchFilter(null, null, true);
+ expectedQuery = new BooleanQuery.Builder().add(Queries.newNonNestedFilter(), FILTER).build();
+ assertThat(searchFilter, equalTo(expectedQuery));
+
+ searchFilter = DefaultSearchContext.createSearchFilter(null, new MatchAllDocsQuery(), true);
+ expectedQuery = new BooleanQuery.Builder()
+ .add(new MatchAllDocsQuery(), FILTER)
+ .add(Queries.newNonNestedFilter(), FILTER)
+ .build();
+ assertThat(searchFilter, equalTo(expectedQuery));
+
+ searchFilter = DefaultSearchContext.createSearchFilter(null, new MatchAllDocsQuery(), false);
+ expectedQuery = new BooleanQuery.Builder()
+ .add(new MatchAllDocsQuery(), FILTER)
+ .build();
+ assertThat(searchFilter, equalTo(expectedQuery));
+ }
+
+}
diff --git a/core/src/test/java/org/elasticsearch/search/internal/InternalSearchHitTests.java b/core/src/test/java/org/elasticsearch/search/internal/InternalSearchHitTests.java
index 0525fd28db..77fc2f0e6a 100644
--- a/core/src/test/java/org/elasticsearch/search/internal/InternalSearchHitTests.java
+++ b/core/src/test/java/org/elasticsearch/search/internal/InternalSearchHitTests.java
@@ -22,6 +22,7 @@ package org.elasticsearch.search.internal;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.InputStreamStreamInput;
import org.elasticsearch.common.text.Text;
+import org.elasticsearch.index.Index;
import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.test.ESTestCase;
@@ -36,7 +37,7 @@ import static org.hamcrest.Matchers.nullValue;
public class InternalSearchHitTests extends ESTestCase {
public void testSerializeShardTarget() throws Exception {
- SearchShardTarget target = new SearchShardTarget("_node_id", "_index", 0);
+ SearchShardTarget target = new SearchShardTarget("_node_id", new Index("_index", "_na_"), 0);
Map<String, InternalSearchHits> innerHits = new HashMap<>();
InternalSearchHit innerHit1 = new InternalSearchHit(0, "_id", new Text("_type"), null);
diff --git a/core/src/test/java/org/elasticsearch/search/matchedqueries/MatchedQueriesIT.java b/core/src/test/java/org/elasticsearch/search/matchedqueries/MatchedQueriesIT.java
index d8c16282e1..f5427ca677 100644
--- a/core/src/test/java/org/elasticsearch/search/matchedqueries/MatchedQueriesIT.java
+++ b/core/src/test/java/org/elasticsearch/search/matchedqueries/MatchedQueriesIT.java
@@ -54,7 +54,7 @@ public class MatchedQueriesIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch()
.setQuery(boolQuery().must(matchAllQuery()).filter(boolQuery().should(rangeQuery("number").lte(2).queryName("test1")).should(rangeQuery("number").gt(2).queryName("test2")))).get();
- assertHitCount(searchResponse, 3l);
+ assertHitCount(searchResponse, 3L);
for (SearchHit hit : searchResponse.getHits()) {
if (hit.id().equals("1") || hit.id().equals("2")) {
assertThat(hit.matchedQueries().length, equalTo(1));
@@ -69,7 +69,7 @@ public class MatchedQueriesIT extends ESIntegTestCase {
searchResponse = client().prepareSearch()
.setQuery(boolQuery().should(rangeQuery("number").lte(2).queryName("test1")).should(rangeQuery("number").gt(2).queryName("test2"))).get();
- assertHitCount(searchResponse, 3l);
+ assertHitCount(searchResponse, 3L);
for (SearchHit hit : searchResponse.getHits()) {
if (hit.id().equals("1") || hit.id().equals("2")) {
assertThat(hit.matchedQueries().length, equalTo(1));
@@ -97,7 +97,7 @@ public class MatchedQueriesIT extends ESIntegTestCase {
.setPostFilter(boolQuery().should(
termQuery("name", "test").queryName("name")).should(
termQuery("title", "title1").queryName("title"))).get();
- assertHitCount(searchResponse, 3l);
+ assertHitCount(searchResponse, 3L);
for (SearchHit hit : searchResponse.getHits()) {
if (hit.id().equals("1")) {
assertThat(hit.matchedQueries().length, equalTo(2));
@@ -117,7 +117,7 @@ public class MatchedQueriesIT extends ESIntegTestCase {
.should(termQuery("name", "test").queryName("name"))
.should(termQuery("title", "title1").queryName("title"))).get();
- assertHitCount(searchResponse, 3l);
+ assertHitCount(searchResponse, 3L);
for (SearchHit hit : searchResponse.getHits()) {
if (hit.id().equals("1")) {
assertThat(hit.matchedQueries().length, equalTo(2));
@@ -144,7 +144,7 @@ public class MatchedQueriesIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch()
.setQuery(boolQuery().must(matchAllQuery()).filter(termsQuery("title", "title1", "title2", "title3").queryName("title")))
.setPostFilter(termQuery("name", "test").queryName("name")).get();
- assertHitCount(searchResponse, 3l);
+ assertHitCount(searchResponse, 3L);
for (SearchHit hit : searchResponse.getHits()) {
if (hit.id().equals("1") || hit.id().equals("2") || hit.id().equals("3")) {
assertThat(hit.matchedQueries().length, equalTo(2));
@@ -158,7 +158,7 @@ public class MatchedQueriesIT extends ESIntegTestCase {
searchResponse = client().prepareSearch()
.setQuery(termsQuery("title", "title1", "title2", "title3").queryName("title"))
.setPostFilter(matchQuery("name", "test").queryName("name")).get();
- assertHitCount(searchResponse, 3l);
+ assertHitCount(searchResponse, 3L);
for (SearchHit hit : searchResponse.getHits()) {
if (hit.id().equals("1") || hit.id().equals("2") || hit.id().equals("3")) {
assertThat(hit.matchedQueries().length, equalTo(2));
@@ -185,7 +185,7 @@ public class MatchedQueriesIT extends ESIntegTestCase {
indicesQuery(termQuery("title", "title1").queryName("title1"), "test1")
.noMatchQuery(termQuery("title", "title2").queryName("title2")).queryName("indices_filter")).should(
termQuery("title", "title3").queryName("title3")).queryName("or"))).get();
- assertHitCount(searchResponse, 3l);
+ assertHitCount(searchResponse, 3L);
for (SearchHit hit : searchResponse.getHits()) {
if (hit.id().equals("1")) {
@@ -217,7 +217,7 @@ public class MatchedQueriesIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch()
.setQuery(QueryBuilders.regexpQuery("title", "title1").queryName("regex")).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
for (SearchHit hit : searchResponse.getHits()) {
if (hit.id().equals("1")) {
@@ -238,7 +238,7 @@ public class MatchedQueriesIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch()
.setQuery(QueryBuilders.prefixQuery("title", "title").queryName("prefix")).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
for (SearchHit hit : searchResponse.getHits()) {
if (hit.id().equals("1")) {
@@ -250,6 +250,7 @@ public class MatchedQueriesIT extends ESIntegTestCase {
}
}
+ @SuppressWarnings("deprecation") // fuzzy queries will be removed in 4.0
public void testFuzzyQuerySupportsName() {
createIndex("test1");
ensureGreen();
@@ -259,7 +260,7 @@ public class MatchedQueriesIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch()
.setQuery(QueryBuilders.fuzzyQuery("title", "titel1").queryName("fuzzy")).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
for (SearchHit hit : searchResponse.getHits()) {
if (hit.id().equals("1")) {
@@ -280,7 +281,7 @@ public class MatchedQueriesIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch()
.setQuery(QueryBuilders.wildcardQuery("title", "titl*").queryName("wildcard")).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
for (SearchHit hit : searchResponse.getHits()) {
if (hit.id().equals("1")) {
@@ -301,7 +302,7 @@ public class MatchedQueriesIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch()
.setQuery(QueryBuilders.spanFirstQuery(QueryBuilders.spanTermQuery("title", "title1"), 10).queryName("span")).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
for (SearchHit hit : searchResponse.getHits()) {
if (hit.id().equals("1")) {
@@ -337,7 +338,7 @@ public class MatchedQueriesIT extends ESIntegTestCase {
.setPreference("_primary")
.get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
for (SearchHit hit : searchResponse.getHits()) {
if (hit.id().equals("1")) {
assertThat(hit.matchedQueries().length, equalTo(1));
@@ -367,7 +368,7 @@ public class MatchedQueriesIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch()
.setQuery(query)
.get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertThat(searchResponse.getHits().getAt(0).getMatchedQueries()[0], equalTo("abc"));
}
}
diff --git a/core/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java b/core/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java
index ae163eaf4a..40608134fc 100644
--- a/core/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java
+++ b/core/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java
@@ -79,7 +79,7 @@ public class MoreLikeThisIT extends ESIntegTestCase {
logger.info("Running moreLikeThis");
SearchResponse response = client().prepareSearch().setQuery(
new MoreLikeThisQueryBuilder(null, new Item[] {new Item("test", "type1", "1")}).minTermFreq(1).minDocFreq(1)).get();
- assertHitCount(response, 1l);
+ assertHitCount(response, 1L);
}
public void testSimpleMoreLikeOnLongField() throws Exception {
@@ -89,7 +89,7 @@ public class MoreLikeThisIT extends ESIntegTestCase {
assertThat(ensureGreen(), equalTo(ClusterHealthStatus.GREEN));
logger.info("Indexing...");
- client().index(indexRequest("test").type("type1").id("1").source(jsonBuilder().startObject().field("some_long", 1367484649580l).endObject())).actionGet();
+ client().index(indexRequest("test").type("type1").id("1").source(jsonBuilder().startObject().field("some_long", 1367484649580L).endObject())).actionGet();
client().index(indexRequest("test").type("type2").id("2").source(jsonBuilder().startObject().field("some_long", 0).endObject())).actionGet();
client().index(indexRequest("test").type("type1").id("3").source(jsonBuilder().startObject().field("some_long", -666).endObject())).actionGet();
@@ -98,7 +98,7 @@ public class MoreLikeThisIT extends ESIntegTestCase {
logger.info("Running moreLikeThis");
SearchResponse response = client().prepareSearch().setQuery(
new MoreLikeThisQueryBuilder(null, new Item[] {new Item("test", "type1", "1")}).minTermFreq(1).minDocFreq(1)).get();
- assertHitCount(response, 0l);
+ assertHitCount(response, 0L);
}
public void testMoreLikeThisWithAliases() throws Exception {
@@ -124,24 +124,24 @@ public class MoreLikeThisIT extends ESIntegTestCase {
logger.info("Running moreLikeThis on index");
SearchResponse response = client().prepareSearch().setQuery(
new MoreLikeThisQueryBuilder(null, new Item[] {new Item("test", "type1", "1")}).minTermFreq(1).minDocFreq(1)).get();
- assertHitCount(response, 2l);
+ assertHitCount(response, 2L);
logger.info("Running moreLikeThis on beta shard");
response = client().prepareSearch("beta").setQuery(
new MoreLikeThisQueryBuilder(null, new Item[] {new Item("test", "type1", "1")}).minTermFreq(1).minDocFreq(1)).get();
- assertHitCount(response, 1l);
+ assertHitCount(response, 1L);
assertThat(response.getHits().getAt(0).id(), equalTo("3"));
logger.info("Running moreLikeThis on release shard");
response = client().prepareSearch("release").setQuery(
new MoreLikeThisQueryBuilder(null, new Item[] {new Item("test", "type1", "1")}).minTermFreq(1).minDocFreq(1)).get();
- assertHitCount(response, 1l);
+ assertHitCount(response, 1L);
assertThat(response.getHits().getAt(0).id(), equalTo("2"));
logger.info("Running moreLikeThis on alias with node client");
response = internalCluster().clientNodeClient().prepareSearch("beta").setQuery(
new MoreLikeThisQueryBuilder(null, new Item[] {new Item("test", "type1", "1")}).minTermFreq(1).minDocFreq(1)).get();
- assertHitCount(response, 1l);
+ assertHitCount(response, 1L);
assertThat(response.getHits().getAt(0).id(), equalTo("3"));
}
@@ -234,7 +234,7 @@ public class MoreLikeThisIT extends ESIntegTestCase {
// Implicit list of fields -> ignore numeric fields
SearchResponse searchResponse = client().prepareSearch().setQuery(
new MoreLikeThisQueryBuilder(null, new Item[] {new Item("test", "type", "1")}).minTermFreq(1).minDocFreq(1)).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
// Explicit list of fields including numeric fields -> fail
assertThrows(client().prepareSearch().setQuery(
@@ -242,11 +242,11 @@ public class MoreLikeThisIT extends ESIntegTestCase {
// mlt query with no field -> OK
searchResponse = client().prepareSearch().setQuery(moreLikeThisQuery(new String[] {"index"}).minTermFreq(1).minDocFreq(1)).execute().actionGet();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
// mlt query with string fields
searchResponse = client().prepareSearch().setQuery(moreLikeThisQuery(new String[]{"string_value"}, new String[] {"index"}, null).minTermFreq(1).minDocFreq(1)).execute().actionGet();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
// mlt query with at least a numeric field -> fail by default
assertThrows(client().prepareSearch().setQuery(moreLikeThisQuery(new String[] {"string_value", "int_value"}, new String[] {"index"}, null)), SearchPhaseExecutionException.class);
@@ -257,7 +257,7 @@ public class MoreLikeThisIT extends ESIntegTestCase {
// mlt query with at least a numeric field but fail_on_unsupported_field set to false
searchResponse = client().prepareSearch().setQuery(moreLikeThisQuery(new String[] {"string_value", "int_value"}, new String[] {"index"}, null).minTermFreq(1).minDocFreq(1).failOnUnsupportedField(false)).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
// mlt field query on a numeric field -> failure by default
assertThrows(client().prepareSearch().setQuery(moreLikeThisQuery(new String[] {"int_value"}, new String[] {"42"}, null).minTermFreq(1).minDocFreq(1)), SearchPhaseExecutionException.class);
@@ -268,7 +268,7 @@ public class MoreLikeThisIT extends ESIntegTestCase {
// mlt field query on a numeric field but fail_on_unsupported_field set to false
searchResponse = client().prepareSearch().setQuery(moreLikeThisQuery(new String[] {"int_value"}, new String[] {"42"}, null).minTermFreq(1).minDocFreq(1).failOnUnsupportedField(false)).execute().actionGet();
- assertHitCount(searchResponse, 0l);
+ assertHitCount(searchResponse, 0L);
}
public void testSimpleMoreLikeInclude() throws Exception {
@@ -327,7 +327,7 @@ public class MoreLikeThisIT extends ESIntegTestCase {
logger.info("Running MoreLikeThis");
MoreLikeThisQueryBuilder queryBuilder = QueryBuilders.moreLikeThisQuery(new String[] {"text"}, null, ids("1")).include(true).minTermFreq(1).minDocFreq(1);
SearchResponse mltResponse = client().prepareSearch().setTypes("type1").setQuery(queryBuilder).execute().actionGet();
- assertHitCount(mltResponse, 3l);
+ assertHitCount(mltResponse, 3L);
}
public void testSimpleMoreLikeThisIdsMultipleTypes() throws Exception {
diff --git a/core/src/test/java/org/elasticsearch/search/msearch/SimpleMultiSearchIT.java b/core/src/test/java/org/elasticsearch/search/msearch/SimpleMultiSearchIT.java
index c7454a5c8b..d3ee811be2 100644
--- a/core/src/test/java/org/elasticsearch/search/msearch/SimpleMultiSearchIT.java
+++ b/core/src/test/java/org/elasticsearch/search/msearch/SimpleMultiSearchIT.java
@@ -48,9 +48,9 @@ public class SimpleMultiSearchIT extends ESIntegTestCase {
assertNoFailures(item.getResponse());
}
assertThat(response.getResponses().length, equalTo(3));
- assertHitCount(response.getResponses()[0].getResponse(), 1l);
- assertHitCount(response.getResponses()[1].getResponse(), 1l);
- assertHitCount(response.getResponses()[2].getResponse(), 2l);
+ assertHitCount(response.getResponses()[0].getResponse(), 1L);
+ assertHitCount(response.getResponses()[1].getResponse(), 1L);
+ assertHitCount(response.getResponses()[2].getResponse(), 2L);
assertFirstHit(response.getResponses()[0].getResponse(), hasId("1"));
assertFirstHit(response.getResponses()[1].getResponse(), hasId("2"));
}
diff --git a/core/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java b/core/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java
index fd9ee9a3f1..f9bae6a1c9 100644
--- a/core/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java
+++ b/core/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java
@@ -59,9 +59,9 @@ public class SimpleNestedIT extends ESIntegTestCase {
// check on no data, see it works
SearchResponse searchResponse = client().prepareSearch("test").setQuery(termQuery("_all", "n_value1_1")).execute().actionGet();
- assertThat(searchResponse.getHits().totalHits(), equalTo(0l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(0L));
searchResponse = client().prepareSearch("test").setQuery(termQuery("n_field1", "n_value1_1")).execute().actionGet();
- assertThat(searchResponse.getHits().totalHits(), equalTo(0l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(0L));
client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject()
.field("field1", "value1")
@@ -89,24 +89,24 @@ public class SimpleNestedIT extends ESIntegTestCase {
// check that _all is working on nested docs
searchResponse = client().prepareSearch("test").setQuery(termQuery("_all", "n_value1_1")).execute().actionGet();
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
searchResponse = client().prepareSearch("test").setQuery(termQuery("n_field1", "n_value1_1")).execute().actionGet();
- assertThat(searchResponse.getHits().totalHits(), equalTo(0l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(0L));
// search for something that matches the nested doc, and see that we don't find the nested doc
searchResponse = client().prepareSearch("test").setQuery(matchAllQuery()).get();
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
searchResponse = client().prepareSearch("test").setQuery(termQuery("n_field1", "n_value1_1")).get();
- assertThat(searchResponse.getHits().totalHits(), equalTo(0l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(0L));
// now, do a nested query
searchResponse = client().prepareSearch("test").setQuery(nestedQuery("nested1", termQuery("nested1.n_field1", "n_value1_1"))).get();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
searchResponse = client().prepareSearch("test").setQuery(nestedQuery("nested1", termQuery("nested1.n_field1", "n_value1_1"))).setSearchType(SearchType.DFS_QUERY_THEN_FETCH).get();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
// add another doc, one that would match if it was not nested...
@@ -131,19 +131,19 @@ public class SimpleNestedIT extends ESIntegTestCase {
searchResponse = client().prepareSearch("test").setQuery(nestedQuery("nested1",
boolQuery().must(termQuery("nested1.n_field1", "n_value1_1")).must(termQuery("nested1.n_field2", "n_value2_1")))).execute().actionGet();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
// filter
searchResponse = client().prepareSearch("test").setQuery(boolQuery().must(matchAllQuery()).mustNot(nestedQuery("nested1",
boolQuery().must(termQuery("nested1.n_field1", "n_value1_1")).must(termQuery("nested1.n_field2", "n_value2_1"))))).execute().actionGet();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
// check with type prefix
searchResponse = client().prepareSearch("test").setQuery(nestedQuery("nested1",
boolQuery().must(termQuery("nested1.n_field1", "n_value1_1")).must(termQuery("nested1.n_field2", "n_value2_1")))).execute().actionGet();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
// check delete, so all is gone...
DeleteResponse deleteResponse = client().prepareDelete("test", "type1", "2").execute().actionGet();
@@ -155,11 +155,11 @@ public class SimpleNestedIT extends ESIntegTestCase {
searchResponse = client().prepareSearch("test").setQuery(nestedQuery("nested1", termQuery("nested1.n_field1", "n_value1_1"))).execute().actionGet();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
searchResponse = client().prepareSearch("test").setTypes("type1", "type2").setQuery(nestedQuery("nested1", termQuery("nested1.n_field1", "n_value1_1"))).execute().actionGet();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
}
public void testMultiNested() throws Exception {
@@ -193,42 +193,42 @@ public class SimpleNestedIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch("test").setQuery(nestedQuery("nested1",
termQuery("nested1.field1", "1"))).execute().actionGet();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
searchResponse = client().prepareSearch("test").setQuery(nestedQuery("nested1.nested2",
termQuery("nested1.nested2.field2", "2"))).execute().actionGet();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
searchResponse = client().prepareSearch("test").setQuery(nestedQuery("nested1",
boolQuery().must(termQuery("nested1.field1", "1")).must(nestedQuery("nested1.nested2", termQuery("nested1.nested2.field2", "2"))))).execute().actionGet();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
searchResponse = client().prepareSearch("test").setQuery(nestedQuery("nested1",
boolQuery().must(termQuery("nested1.field1", "1")).must(nestedQuery("nested1.nested2", termQuery("nested1.nested2.field2", "3"))))).execute().actionGet();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
searchResponse = client().prepareSearch("test").setQuery(nestedQuery("nested1",
boolQuery().must(termQuery("nested1.field1", "1")).must(nestedQuery("nested1.nested2", termQuery("nested1.nested2.field2", "4"))))).execute().actionGet();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(0l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(0L));
searchResponse = client().prepareSearch("test").setQuery(nestedQuery("nested1",
boolQuery().must(termQuery("nested1.field1", "1")).must(nestedQuery("nested1.nested2", termQuery("nested1.nested2.field2", "5"))))).execute().actionGet();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(0l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(0L));
searchResponse = client().prepareSearch("test").setQuery(nestedQuery("nested1",
boolQuery().must(termQuery("nested1.field1", "4")).must(nestedQuery("nested1.nested2", termQuery("nested1.nested2.field2", "5"))))).execute().actionGet();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
searchResponse = client().prepareSearch("test").setQuery(nestedQuery("nested1",
boolQuery().must(termQuery("nested1.field1", "4")).must(nestedQuery("nested1.nested2", termQuery("nested1.nested2.field2", "2"))))).execute().actionGet();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(0l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(0L));
}
// When IncludeNestedDocsQuery is wrapped in a FilteredQuery then a in-finite loop occurs b/c of a bug in IncludeNestedDocsQuery#advance()
@@ -314,7 +314,7 @@ public class SimpleNestedIT extends ESIntegTestCase {
.setExplain(true)
.execute().actionGet();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
Explanation explanation = searchResponse.getHits().hits()[0].explanation();
assertThat(explanation.getValue(), equalTo(2f));
assertThat(explanation.toString(), startsWith("2.0 = sum of:\n 2.0 = Score based on child doc range from 0 to 1\n"));
@@ -337,7 +337,7 @@ public class SimpleNestedIT extends ESIntegTestCase {
.startObject("properties")
.startObject("field1")
.field("type", "long")
- .field("store", "yes")
+ .field("store", true)
.endObject()
.endObject()
.endObject()
@@ -531,96 +531,96 @@ public class SimpleNestedIT extends ESIntegTestCase {
// sum: 11
client().prepareIndex("test", "type1", Integer.toString(1)).setSource(jsonBuilder().startObject()
- .field("grand_parent_values", 1l)
+ .field("grand_parent_values", 1L)
.startObject("parent")
.field("filter", false)
- .field("parent_values", 1l)
+ .field("parent_values", 1L)
.startObject("child")
.field("filter", true)
- .field("child_values", 1l)
+ .field("child_values", 1L)
.startObject("child_obj")
- .field("value", 1l)
+ .field("value", 1L)
.endObject()
.endObject()
.startObject("child")
.field("filter", false)
- .field("child_values", 6l)
+ .field("child_values", 6L)
.endObject()
.endObject()
.startObject("parent")
.field("filter", true)
- .field("parent_values", 2l)
+ .field("parent_values", 2L)
.startObject("child")
.field("filter", false)
- .field("child_values", -1l)
+ .field("child_values", -1L)
.endObject()
.startObject("child")
.field("filter", false)
- .field("child_values", 5l)
+ .field("child_values", 5L)
.endObject()
.endObject()
.endObject()).execute().actionGet();
// sum: 7
client().prepareIndex("test", "type1", Integer.toString(2)).setSource(jsonBuilder().startObject()
- .field("grand_parent_values", 2l)
+ .field("grand_parent_values", 2L)
.startObject("parent")
.field("filter", false)
- .field("parent_values", 2l)
+ .field("parent_values", 2L)
.startObject("child")
.field("filter", true)
- .field("child_values", 2l)
+ .field("child_values", 2L)
.startObject("child_obj")
- .field("value", 2l)
+ .field("value", 2L)
.endObject()
.endObject()
.startObject("child")
.field("filter", false)
- .field("child_values", 4l)
+ .field("child_values", 4L)
.endObject()
.endObject()
.startObject("parent")
- .field("parent_values", 3l)
+ .field("parent_values", 3L)
.field("filter", true)
.startObject("child")
- .field("child_values", -2l)
+ .field("child_values", -2L)
.field("filter", false)
.endObject()
.startObject("child")
.field("filter", false)
- .field("child_values", 3l)
+ .field("child_values", 3L)
.endObject()
.endObject()
.endObject()).execute().actionGet();
// sum: 2
client().prepareIndex("test", "type1", Integer.toString(3)).setSource(jsonBuilder().startObject()
- .field("grand_parent_values", 3l)
+ .field("grand_parent_values", 3L)
.startObject("parent")
- .field("parent_values", 3l)
+ .field("parent_values", 3L)
.field("filter", false)
.startObject("child")
.field("filter", true)
- .field("child_values", 3l)
+ .field("child_values", 3L)
.startObject("child_obj")
- .field("value", 3l)
+ .field("value", 3L)
.endObject()
.endObject()
.startObject("child")
.field("filter", false)
- .field("child_values", 1l)
+ .field("child_values", 1L)
.endObject()
.endObject()
.startObject("parent")
- .field("parent_values", 4l)
+ .field("parent_values", 4L)
.field("filter", true)
.startObject("child")
.field("filter", false)
- .field("child_values", -3l)
+ .field("child_values", -3L)
.endObject()
.startObject("child")
.field("filter", false)
- .field("child_values", 1l)
+ .field("child_values", 1L)
.endObject()
.endObject()
.endObject()).execute().actionGet();
@@ -1019,7 +1019,7 @@ public class SimpleNestedIT extends ESIntegTestCase {
// No nested mapping yet, there shouldn't be anything in the fixed bit set cache
ClusterStatsResponse clusterStatsResponse = client().admin().cluster().prepareClusterStats().get();
- assertThat(clusterStatsResponse.getIndicesStats().getSegments().getBitsetMemoryInBytes(), equalTo(0l));
+ assertThat(clusterStatsResponse.getIndicesStats().getSegments().getBitsetMemoryInBytes(), equalTo(0L));
// Now add nested mapping
assertAcked(
@@ -1040,21 +1040,21 @@ public class SimpleNestedIT extends ESIntegTestCase {
if (loadFixedBitSeLazily) {
clusterStatsResponse = client().admin().cluster().prepareClusterStats().get();
- assertThat(clusterStatsResponse.getIndicesStats().getSegments().getBitsetMemoryInBytes(), equalTo(0l));
+ assertThat(clusterStatsResponse.getIndicesStats().getSegments().getBitsetMemoryInBytes(), equalTo(0L));
// only when querying with nested the fixed bitsets are loaded
SearchResponse searchResponse = client().prepareSearch("test")
.setQuery(nestedQuery("array1", termQuery("array1.field1", "value1")))
.get();
assertNoFailures(searchResponse);
- assertThat(searchResponse.getHits().totalHits(), equalTo(5l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(5L));
}
clusterStatsResponse = client().admin().cluster().prepareClusterStats().get();
- assertThat(clusterStatsResponse.getIndicesStats().getSegments().getBitsetMemoryInBytes(), greaterThan(0l));
+ assertThat(clusterStatsResponse.getIndicesStats().getSegments().getBitsetMemoryInBytes(), greaterThan(0L));
assertAcked(client().admin().indices().prepareDelete("test"));
clusterStatsResponse = client().admin().cluster().prepareClusterStats().get();
- assertThat(clusterStatsResponse.getIndicesStats().getSegments().getBitsetMemoryInBytes(), equalTo(0l));
+ assertThat(clusterStatsResponse.getIndicesStats().getSegments().getBitsetMemoryInBytes(), equalTo(0L));
}
/**
diff --git a/core/src/test/java/org/elasticsearch/search/preference/SearchPreferenceIT.java b/core/src/test/java/org/elasticsearch/search/preference/SearchPreferenceIT.java
index fa9626964e..7b34aec9bd 100644
--- a/core/src/test/java/org/elasticsearch/search/preference/SearchPreferenceIT.java
+++ b/core/src/test/java/org/elasticsearch/search/preference/SearchPreferenceIT.java
@@ -95,29 +95,29 @@ public class SearchPreferenceIT extends ESIntegTestCase {
refresh();
SearchResponse searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setPreference("_local").execute().actionGet();
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setPreference("_local").execute().actionGet();
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setPreference("_primary").execute().actionGet();
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setPreference("_primary").execute().actionGet();
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setPreference("_replica").execute().actionGet();
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setPreference("_replica").execute().actionGet();
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setPreference("_replica_first").execute().actionGet();
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setPreference("_replica_first").execute().actionGet();
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setPreference("1234").execute().actionGet();
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setPreference("1234").execute().actionGet();
- assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
}
public void testReplicaPreference() throws Exception {
@@ -135,13 +135,13 @@ public class SearchPreferenceIT extends ESIntegTestCase {
}
SearchResponse resp = client().prepareSearch().setQuery(matchAllQuery()).setPreference("_replica_first").execute().actionGet();
- assertThat(resp.getHits().totalHits(), equalTo(1l));
+ assertThat(resp.getHits().totalHits(), equalTo(1L));
client().admin().indices().prepareUpdateSettings("test").setSettings("number_of_replicas=1").get();
ensureGreen("test");
resp = client().prepareSearch().setQuery(matchAllQuery()).setPreference("_replica").execute().actionGet();
- assertThat(resp.getHits().totalHits(), equalTo(1l));
+ assertThat(resp.getHits().totalHits(), equalTo(1L));
}
public void testThatSpecifyingNonExistingNodesReturnsUsefulError() throws Exception {
diff --git a/core/src/test/java/org/elasticsearch/search/profile/RandomQueryGenerator.java b/core/src/test/java/org/elasticsearch/search/profile/RandomQueryGenerator.java
index 9eb41086be..fff0b99c4d 100644
--- a/core/src/test/java/org/elasticsearch/search/profile/RandomQueryGenerator.java
+++ b/core/src/test/java/org/elasticsearch/search/profile/RandomQueryGenerator.java
@@ -72,6 +72,7 @@ public class RandomQueryGenerator {
}
}
+ @SuppressWarnings("deprecation") // fuzzy queries will be removed in 4.0
private static QueryBuilder randomTerminalQuery(List<String> stringFields, List<String> numericFields, int numDocs) {
switch (randomIntBetween(0,6)) {
case 0:
@@ -195,6 +196,8 @@ public class RandomQueryGenerator {
return q;
}
+ @SuppressWarnings("deprecation") // fuzzy queries will be removed in 4.0
+ @Deprecated
private static QueryBuilder randomFuzzyQuery(List<String> fields) {
QueryBuilder q = QueryBuilders.fuzzyQuery(randomField(fields), randomQueryString(1));
diff --git a/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java b/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java
index 1cbdf60a4a..6ad4d990de 100644
--- a/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java
+++ b/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java
@@ -197,13 +197,13 @@ public class MultiMatchQueryIT extends ESIntegTestCase {
searchResponse = client().prepareSearch("test")
.setQuery(randomizeType(multiMatchQuery("captain america", "full_name", "first_name", "last_name", "category")
.operator(Operator.AND).type(type))).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("theone"));
searchResponse = client().prepareSearch("test")
.setQuery(randomizeType(multiMatchQuery("captain america", "full_name", "first_name", "last_name", "category")
.operator(Operator.AND).type(type))).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("theone"));
}
@@ -212,18 +212,18 @@ public class MultiMatchQueryIT extends ESIntegTestCase {
.setQuery(randomizeType(multiMatchQuery("Man the Ultimate", "full_name_phrase", "first_name_phrase", "last_name_phrase", "category_phrase")
.operator(Operator.OR).type(MatchQuery.Type.PHRASE))).get();
assertFirstHit(searchResponse, hasId("ultimate2"));
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
searchResponse = client().prepareSearch("test")
.setQuery(randomizeType(multiMatchQuery("Captain", "full_name_phrase", "first_name_phrase", "last_name_phrase", "category_phrase")
.operator(Operator.OR).type(MatchQuery.Type.PHRASE))).get();
- assertThat(searchResponse.getHits().getTotalHits(), greaterThan(1l));
+ assertThat(searchResponse.getHits().getTotalHits(), greaterThan(1L));
searchResponse = client().prepareSearch("test")
.setQuery(randomizeType(multiMatchQuery("the Ul", "full_name_phrase", "first_name_phrase", "last_name_phrase", "category_phrase")
.operator(Operator.OR).type(MatchQuery.Type.PHRASE_PREFIX))).get();
assertSearchHits(searchResponse, "ultimate2", "ultimate1");
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
}
public void testSingleField() throws NoSuchFieldException, IllegalAccessException {
@@ -318,13 +318,13 @@ public class MultiMatchQueryIT extends ESIntegTestCase {
searchResponse = client().prepareSearch("test")
.setQuery(randomizeType(multiMatchQuery("captain america", "full_name", "first_name", "last_name", "category")
.operator(Operator.AND).cutoffFrequency(cutoffFrequency).type(type))).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("theone"));
searchResponse = client().prepareSearch("test")
.setQuery(randomizeType(multiMatchQuery("captain america", "full_name", "first_name", "last_name", "category")
.operator(Operator.AND).cutoffFrequency(cutoffFrequency).type(type))).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("theone"));
searchResponse = client().prepareSearch("test")
@@ -332,7 +332,7 @@ public class MultiMatchQueryIT extends ESIntegTestCase {
.operator(Operator.AND).cutoffFrequency(cutoffFrequency)
.analyzer("category")
.type(MultiMatchQueryBuilder.Type.CROSS_FIELDS))).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("theother"));
}
@@ -455,7 +455,7 @@ public class MultiMatchQueryIT extends ESIntegTestCase {
.setQuery(randomizeType(multiMatchQuery("captain america", "full_name", "first_name", "last_name", "category")
.type(MultiMatchQueryBuilder.Type.CROSS_FIELDS)
.operator(Operator.AND))).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("theone"));
searchResponse = client().prepareSearch("test")
@@ -463,7 +463,7 @@ public class MultiMatchQueryIT extends ESIntegTestCase {
.type(MultiMatchQueryBuilder.Type.CROSS_FIELDS)
.analyzer("category")
.operator(Operator.AND))).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("theone"));
searchResponse = client().prepareSearch("test")
@@ -471,7 +471,7 @@ public class MultiMatchQueryIT extends ESIntegTestCase {
.type(MultiMatchQueryBuilder.Type.CROSS_FIELDS)
.analyzer("category")
.operator(Operator.AND))).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("theone"));
searchResponse = client().prepareSearch("test")
@@ -479,7 +479,7 @@ public class MultiMatchQueryIT extends ESIntegTestCase {
.type(MultiMatchQueryBuilder.Type.CROSS_FIELDS)
.analyzer("category")
.operator(Operator.AND))).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("theone"));
@@ -549,21 +549,21 @@ public class MultiMatchQueryIT extends ESIntegTestCase {
.type(MultiMatchQueryBuilder.Type.CROSS_FIELDS)
.analyzer("category")
.operator(Operator.AND))).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("theone"));
// counter example
searchResponse = client().prepareSearch("test")
.setQuery(randomizeType(multiMatchQuery("captain america marvel hero", "first_name", "last_name", "category")
.type(randomBoolean() ? MultiMatchQueryBuilder.Type.CROSS_FIELDS : MultiMatchQueryBuilder.DEFAULT_TYPE)
.operator(Operator.AND))).get();
- assertHitCount(searchResponse, 0l);
+ assertHitCount(searchResponse, 0L);
// counter example
searchResponse = client().prepareSearch("test")
.setQuery(randomizeType(multiMatchQuery("captain america marvel hero", "first_name", "last_name", "category")
.type(randomBoolean() ? MultiMatchQueryBuilder.Type.CROSS_FIELDS : MultiMatchQueryBuilder.DEFAULT_TYPE)
.operator(Operator.AND))).get();
- assertHitCount(searchResponse, 0l);
+ assertHitCount(searchResponse, 0L);
// test if boosts work
searchResponse = client().prepareSearch("test")
diff --git a/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java b/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java
index ad9ab044ad..faa6b62d2a 100644
--- a/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java
+++ b/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java
@@ -129,7 +129,7 @@ public class SearchQueryIT extends ESIntegTestCase {
client().prepareIndex("test", "type1", "2").setSource("field1", "quick brown"),
client().prepareIndex("test", "type1", "3").setSource("field1", "quick"));
- assertHitCount(client().prepareSearch().setQuery(matchQuery("_all", "quick")).get(), 3l);
+ assertHitCount(client().prepareSearch().setQuery(matchQuery("_all", "quick")).get(), 3L);
SearchResponse searchResponse = client().prepareSearch().setQuery(matchQuery("_all", "quick")).setExplain(true).get();
SearchHit[] hits = searchResponse.getHits().hits();
assertThat(hits.length, equalTo(3));
@@ -141,7 +141,7 @@ public class SearchQueryIT extends ESIntegTestCase {
client().prepareIndex("test", "type1", "2").setSource("field1", "quick brown"),
client().prepareIndex("test", "type1", "3").setSource("field1", "quick"));
- assertHitCount(client().prepareSearch().setQuery(matchQuery("_all", "quick")).get(), 3l);
+ assertHitCount(client().prepareSearch().setQuery(matchQuery("_all", "quick")).get(), 3L);
searchResponse = client().prepareSearch().setQuery(matchQuery("_all", "quick")).get();
hits = searchResponse.getHits().hits();
assertThat(hits.length, equalTo(3));
@@ -156,8 +156,8 @@ public class SearchQueryIT extends ESIntegTestCase {
client().prepareIndex("test", "type1", "2").setSource("field1", "quick brown"),
client().prepareIndex("test", "type1", "3").setSource("field1", "quick"));
- assertHitCount(client().prepareSearch().setQuery(queryStringQuery("quick")).get(), 3l);
- assertHitCount(client().prepareSearch().setQuery(queryStringQuery("")).get(), 0l); // return no docs
+ assertHitCount(client().prepareSearch().setQuery(queryStringQuery("quick")).get(), 3L);
+ assertHitCount(client().prepareSearch().setQuery(queryStringQuery("")).get(), 0L); // return no docs
}
// see https://github.com/elasticsearch/elasticsearch/issues/3177
@@ -178,7 +178,7 @@ public class SearchQueryIT extends ESIntegTestCase {
matchAllQuery()).must(
boolQuery().mustNot(boolQuery().must(termQuery("field1", "value1")).must(
termQuery("field1", "value2"))))).get(),
- 3l);
+ 3L);
assertHitCount(
client().prepareSearch()
.setQuery(
@@ -187,10 +187,10 @@ public class SearchQueryIT extends ESIntegTestCase {
.should(termQuery("field1", "value3"))).filter(
boolQuery().mustNot(boolQuery().must(termQuery("field1", "value1")).must(
termQuery("field1", "value2"))))).get(),
- 3l);
+ 3L);
assertHitCount(
client().prepareSearch().setQuery(matchAllQuery()).setPostFilter(boolQuery().mustNot(termQuery("field1", "value3"))).get(),
- 2l);
+ 2L);
}
public void testIndexOptions() throws Exception {
@@ -201,7 +201,7 @@ public class SearchQueryIT extends ESIntegTestCase {
client().prepareIndex("test", "type1", "2").setSource("field1", "quick lazy huge brown fox", "field2", "quick lazy huge brown fox"));
SearchResponse searchResponse = client().prepareSearch().setQuery(matchQuery("field2", "quick brown").type(Type.PHRASE).slop(0)).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFailures(client().prepareSearch().setQuery(matchQuery("field1", "quick brown").type(Type.PHRASE).slop(0)),
RestStatus.INTERNAL_SERVER_ERROR,
@@ -215,7 +215,7 @@ public class SearchQueryIT extends ESIntegTestCase {
indexRandom(true, client().prepareIndex("test", "type1", "1").setSource("field1", "quick brown fox", "field2", "quick brown fox"), client().prepareIndex("test", "type1", "2").setSource("field1", "quick lazy huge brown fox", "field2", "quick lazy huge brown fox"));
SearchResponse searchResponse = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("field1", "quick"))).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
for (SearchHit searchHit : searchResponse.getHits().hits()) {
assertSearchHit(searchHit, hasScore(1.0f));
}
@@ -223,17 +223,17 @@ public class SearchQueryIT extends ESIntegTestCase {
searchResponse = client().prepareSearch("test").setQuery(
boolQuery().must(matchAllQuery()).must(
constantScoreQuery(matchQuery("field1", "quick")).boost(1.0f + getRandom().nextFloat()))).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertFirstHit(searchResponse, hasScore(searchResponse.getHits().getAt(1).score()));
client().prepareSearch("test").setQuery(constantScoreQuery(matchQuery("field1", "quick")).boost(1.0f + getRandom().nextFloat())).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertFirstHit(searchResponse, hasScore(searchResponse.getHits().getAt(1).score()));
searchResponse = client().prepareSearch("test").setQuery(
constantScoreQuery(boolQuery().must(matchAllQuery()).must(
constantScoreQuery(matchQuery("field1", "quick")).boost(1.0f + (random.nextBoolean()? 0.0f : random.nextFloat()))))).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertFirstHit(searchResponse, hasScore(searchResponse.getHits().getAt(1).score()));
for (SearchHit searchHit : searchResponse.getHits().hits()) {
assertSearchHit(searchHit, hasScore(1.0f));
@@ -280,11 +280,11 @@ public class SearchQueryIT extends ESIntegTestCase {
int iters = scaledRandomIntBetween(100, 200);
for (int i = 0; i < iters; i++) {
SearchResponse searchResponse = client().prepareSearch("test").setQuery(queryStringQuery("*:*^10.0").boost(10.0f)).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
searchResponse = client().prepareSearch("test").setQuery(
boolQuery().must(matchAllQuery()).must(constantScoreQuery(matchAllQuery()))).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertThat((double)searchResponse.getHits().getAt(0).score(), closeTo(Math.sqrt(2), 0.1));
assertThat((double)searchResponse.getHits().getAt(1).score(),closeTo(Math.sqrt(2), 0.1));
}
@@ -298,7 +298,7 @@ public class SearchQueryIT extends ESIntegTestCase {
client().prepareIndex("test", "type1", "2").setSource("message", "hello world", "comment", "test comment"));
SearchResponse searchResponse = client().prepareSearch().setQuery(commonTermsQuery("_all", "test")).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertFirstHit(searchResponse, hasId("2"));
assertSecondHit(searchResponse, hasId("1"));
assertThat(searchResponse.getHits().getHits()[0].getScore(), greaterThan(searchResponse.getHits().getHits()[1].getScore()));
@@ -314,44 +314,44 @@ public class SearchQueryIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch().setQuery(commonTermsQuery("field1", "the quick brown").cutoffFrequency(3).lowFreqOperator(Operator.OR)).get();
- assertHitCount(searchResponse, 3l);
+ assertHitCount(searchResponse, 3L);
assertFirstHit(searchResponse, hasId("1"));
assertSecondHit(searchResponse, hasId("2"));
assertThirdHit(searchResponse, hasId("3"));
searchResponse = client().prepareSearch().setQuery(commonTermsQuery("field1", "the quick brown").cutoffFrequency(3).lowFreqOperator(Operator.AND)).get();
- assertThat(searchResponse.getHits().totalHits(), equalTo(2l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(2L));
assertFirstHit(searchResponse, hasId("1"));
assertSecondHit(searchResponse, hasId("2"));
// Default
searchResponse = client().prepareSearch().setQuery(commonTermsQuery("field1", "the quick brown").cutoffFrequency(3)).get();
- assertHitCount(searchResponse, 3l);
+ assertHitCount(searchResponse, 3L);
assertFirstHit(searchResponse, hasId("1"));
assertSecondHit(searchResponse, hasId("2"));
assertThirdHit(searchResponse, hasId("3"));
searchResponse = client().prepareSearch().setQuery(commonTermsQuery("field1", "the huge fox").lowFreqMinimumShouldMatch("2")).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("2"));
searchResponse = client().prepareSearch().setQuery(commonTermsQuery("field1", "the lazy fox brown").cutoffFrequency(1).highFreqMinimumShouldMatch("3")).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertFirstHit(searchResponse, hasId("1"));
assertSecondHit(searchResponse, hasId("2"));
searchResponse = client().prepareSearch().setQuery(commonTermsQuery("field1", "the lazy fox brown").cutoffFrequency(1).highFreqMinimumShouldMatch("4")).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("2"));
// Default
searchResponse = client().prepareSearch().setQuery(commonTermsQuery("field1", "the lazy fox brown").cutoffFrequency(1)).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("2"));
searchResponse = client().prepareSearch().setQuery(commonTermsQuery("field1", "the quick brown").cutoffFrequency(3).analyzer("stop")).get();
- assertHitCount(searchResponse, 3l);
+ assertHitCount(searchResponse, 3L);
// stop drops "the" since its a stopword
assertFirstHit(searchResponse, hasId("1"));
assertSecondHit(searchResponse, hasId("3"));
@@ -359,18 +359,18 @@ public class SearchQueryIT extends ESIntegTestCase {
// try the same with match query
searchResponse = client().prepareSearch().setQuery(matchQuery("field1", "the quick brown").cutoffFrequency(3).operator(Operator.AND)).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertFirstHit(searchResponse, hasId("1"));
assertSecondHit(searchResponse, hasId("2"));
searchResponse = client().prepareSearch().setQuery(matchQuery("field1", "the quick brown").cutoffFrequency(3).operator(Operator.OR)).get();
- assertHitCount(searchResponse, 3l);
+ assertHitCount(searchResponse, 3L);
assertFirstHit(searchResponse, hasId("1"));
assertSecondHit(searchResponse, hasId("2"));
assertThirdHit(searchResponse, hasId("3"));
searchResponse = client().prepareSearch().setQuery(matchQuery("field1", "the quick brown").cutoffFrequency(3).operator(Operator.AND).analyzer("stop")).get();
- assertHitCount(searchResponse, 3l);
+ assertHitCount(searchResponse, 3L);
// stop drops "the" since its a stopword
assertFirstHit(searchResponse, hasId("1"));
assertSecondHit(searchResponse, hasId("3"));
@@ -378,7 +378,7 @@ public class SearchQueryIT extends ESIntegTestCase {
// try the same with multi match query
searchResponse = client().prepareSearch().setQuery(multiMatchQuery("the quick brown", "field1", "field2").cutoffFrequency(3).operator(Operator.AND)).get();
- assertHitCount(searchResponse, 3l);
+ assertHitCount(searchResponse, 3L);
assertFirstHit(searchResponse, hasId("3")); // better score due to different query stats
assertSecondHit(searchResponse, hasId("1"));
assertThirdHit(searchResponse, hasId("2"));
@@ -401,44 +401,44 @@ public class SearchQueryIT extends ESIntegTestCase {
client().prepareIndex("test", "type1", "2").setSource("field1", "the quick lazy huge brown fox jumps over the tree") );
SearchResponse searchResponse = client().prepareSearch().setQuery(commonTermsQuery("field1", "the fast brown").cutoffFrequency(3).lowFreqOperator(Operator.OR)).get();
- assertHitCount(searchResponse, 3l);
+ assertHitCount(searchResponse, 3L);
assertFirstHit(searchResponse, hasId("1"));
assertSecondHit(searchResponse, hasId("2"));
assertThirdHit(searchResponse, hasId("3"));
searchResponse = client().prepareSearch().setQuery(commonTermsQuery("field1", "the fast brown").cutoffFrequency(3).lowFreqOperator(Operator.AND)).get();
- assertThat(searchResponse.getHits().totalHits(), equalTo(2l));
+ assertThat(searchResponse.getHits().totalHits(), equalTo(2L));
assertFirstHit(searchResponse, hasId("1"));
assertSecondHit(searchResponse, hasId("2"));
// Default
searchResponse = client().prepareSearch().setQuery(commonTermsQuery("field1", "the fast brown").cutoffFrequency(3)).get();
- assertHitCount(searchResponse, 3l);
+ assertHitCount(searchResponse, 3L);
assertFirstHit(searchResponse, hasId("1"));
assertSecondHit(searchResponse, hasId("2"));
assertThirdHit(searchResponse, hasId("3"));
searchResponse = client().prepareSearch().setQuery(commonTermsQuery("field1", "the fast huge fox").lowFreqMinimumShouldMatch("3")).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("2"));
searchResponse = client().prepareSearch().setQuery(commonTermsQuery("field1", "the fast lazy fox brown").cutoffFrequency(1).highFreqMinimumShouldMatch("5")).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertFirstHit(searchResponse, hasId("1"));
assertSecondHit(searchResponse, hasId("2"));
searchResponse = client().prepareSearch().setQuery(commonTermsQuery("field1", "the fast lazy fox brown").cutoffFrequency(1).highFreqMinimumShouldMatch("6")).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("2"));
// Default
searchResponse = client().prepareSearch().setQuery(commonTermsQuery("field1", "the fast lazy fox brown").cutoffFrequency(1)).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("2"));
searchResponse = client().prepareSearch().setQuery(commonTermsQuery("field1", "the quick brown").cutoffFrequency(3).analyzer("stop")).get();
- assertHitCount(searchResponse, 3l);
+ assertHitCount(searchResponse, 3L);
// stop drops "the" since its a stopword
assertFirstHit(searchResponse, hasId("1"));
assertSecondHit(searchResponse, hasId("3"));
@@ -446,31 +446,31 @@ public class SearchQueryIT extends ESIntegTestCase {
// try the same with match query
searchResponse = client().prepareSearch().setQuery(matchQuery("field1", "the fast brown").cutoffFrequency(3).operator(Operator.AND)).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertFirstHit(searchResponse, hasId("1"));
assertSecondHit(searchResponse, hasId("2"));
searchResponse = client().prepareSearch().setQuery(matchQuery("field1", "the fast brown").cutoffFrequency(3).operator(Operator.OR)).get();
- assertHitCount(searchResponse, 3l);
+ assertHitCount(searchResponse, 3L);
assertFirstHit(searchResponse, hasId("1"));
assertSecondHit(searchResponse, hasId("2"));
assertThirdHit(searchResponse, hasId("3"));
searchResponse = client().prepareSearch().setQuery(matchQuery("field1", "the fast brown").cutoffFrequency(3).operator(Operator.AND).analyzer("stop")).get();
- assertHitCount(searchResponse, 3l);
+ assertHitCount(searchResponse, 3L);
// stop drops "the" since its a stopword
assertFirstHit(searchResponse, hasId("1"));
assertSecondHit(searchResponse, hasId("3"));
assertThirdHit(searchResponse, hasId("2"));
searchResponse = client().prepareSearch().setQuery(matchQuery("field1", "the fast brown").cutoffFrequency(3).minimumShouldMatch("3")).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertFirstHit(searchResponse, hasId("1"));
assertSecondHit(searchResponse, hasId("2"));
// try the same with multi match query
searchResponse = client().prepareSearch().setQuery(multiMatchQuery("the fast brown", "field1", "field2").cutoffFrequency(3).operator(Operator.AND)).get();
- assertHitCount(searchResponse, 3l);
+ assertHitCount(searchResponse, 3L);
assertFirstHit(searchResponse, hasId("3")); // better score due to different query stats
assertSecondHit(searchResponse, hasId("1"));
assertThirdHit(searchResponse, hasId("2"));
@@ -483,19 +483,19 @@ public class SearchQueryIT extends ESIntegTestCase {
refresh();
SearchResponse searchResponse = client().prepareSearch().setQuery(queryStringQuery("value*").analyzeWildcard(true)).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
searchResponse = client().prepareSearch().setQuery(queryStringQuery("*ue*").analyzeWildcard(true)).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
searchResponse = client().prepareSearch().setQuery(queryStringQuery("*ue_1").analyzeWildcard(true)).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
searchResponse = client().prepareSearch().setQuery(queryStringQuery("val*e_1").analyzeWildcard(true)).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
searchResponse = client().prepareSearch().setQuery(queryStringQuery("v?l*e?1").analyzeWildcard(true)).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
}
public void testLowercaseExpandedTerms() {
@@ -505,17 +505,17 @@ public class SearchQueryIT extends ESIntegTestCase {
refresh();
SearchResponse searchResponse = client().prepareSearch().setQuery(queryStringQuery("VALUE_3~1").lowercaseExpandedTerms(true)).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
searchResponse = client().prepareSearch().setQuery(queryStringQuery("VALUE_3~1").lowercaseExpandedTerms(false)).get();
- assertHitCount(searchResponse, 0l);
+ assertHitCount(searchResponse, 0L);
searchResponse = client().prepareSearch().setQuery(queryStringQuery("ValUE_*").lowercaseExpandedTerms(true)).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
searchResponse = client().prepareSearch().setQuery(queryStringQuery("vAl*E_1")).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
searchResponse = client().prepareSearch().setQuery(queryStringQuery("[VALUE_1 TO VALUE_3]")).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
searchResponse = client().prepareSearch().setQuery(queryStringQuery("[VALUE_1 TO VALUE_3]").lowercaseExpandedTerms(false)).get();
- assertHitCount(searchResponse, 0l);
+ assertHitCount(searchResponse, 0L);
}
// Issue #3540
@@ -532,10 +532,10 @@ public class SearchQueryIT extends ESIntegTestCase {
refresh();
SearchResponse searchResponse = client().prepareSearch().setQuery(queryStringQuery("past:[now-2M/d TO now/d]")).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
searchResponse = client().prepareSearch().setQuery(queryStringQuery("future:[now/d TO now+2M/d]").lowercaseExpandedTerms(false)).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
try {
client().prepareSearch().setQuery(queryStringQuery("future:[now/D TO now+2M/d]").lowercaseExpandedTerms(false)).get();
@@ -562,7 +562,7 @@ public class SearchQueryIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch().setQuery(queryStringQuery("past:[now-1m/m TO now+1m/m]")
.timeZone(timeZone.getID())).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
}
// Issue #10477
@@ -581,25 +581,25 @@ public class SearchQueryIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch()
.setQuery(queryStringQuery("past:[2015-04-06T00:00:00+0200 TO 2015-04-06T23:00:00+0200]"))
.get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
// Same timezone set with time_zone
searchResponse = client().prepareSearch()
.setQuery(queryStringQuery("past:[2015-04-06T00:00:00 TO 2015-04-06T23:00:00]").timeZone("+0200"))
.get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
// We set a timezone which will give no result
searchResponse = client().prepareSearch()
.setQuery(queryStringQuery("past:[2015-04-06T00:00:00-0200 TO 2015-04-06T23:00:00-0200]"))
.get();
- assertHitCount(searchResponse, 0l);
+ assertHitCount(searchResponse, 0L);
// Same timezone set with time_zone but another timezone is set directly within dates which has the precedence
searchResponse = client().prepareSearch()
.setQuery(queryStringQuery("past:[2015-04-06T00:00:00-0200 TO 2015-04-06T23:00:00-0200]").timeZone("+0200"))
.get();
- assertHitCount(searchResponse, 0l);
+ assertHitCount(searchResponse, 0L);
}
public void testTypeFilter() throws Exception {
@@ -610,13 +610,13 @@ public class SearchQueryIT extends ESIntegTestCase {
client().prepareIndex("test", "type2", "2").setSource("field1", "value1"),
client().prepareIndex("test", "type2", "3").setSource("field1", "value1"));
- assertHitCount(client().prepareSearch().setQuery(typeQuery("type1")).get(), 2l);
- assertHitCount(client().prepareSearch().setQuery(typeQuery("type2")).get(), 3l);
+ assertHitCount(client().prepareSearch().setQuery(typeQuery("type1")).get(), 2L);
+ assertHitCount(client().prepareSearch().setQuery(typeQuery("type2")).get(), 3L);
- assertHitCount(client().prepareSearch().setTypes("type1").setQuery(matchAllQuery()).get(), 2l);
- assertHitCount(client().prepareSearch().setTypes("type2").setQuery(matchAllQuery()).get(), 3l);
+ assertHitCount(client().prepareSearch().setTypes("type1").setQuery(matchAllQuery()).get(), 2L);
+ assertHitCount(client().prepareSearch().setTypes("type2").setQuery(matchAllQuery()).get(), 3L);
- assertHitCount(client().prepareSearch().setTypes("type1", "type2").setQuery(matchAllQuery()).get(), 5l);
+ assertHitCount(client().prepareSearch().setTypes("type1", "type2").setQuery(matchAllQuery()).get(), 5L);
}
public void testIdsQueryTestsIdIndexed() throws Exception {
@@ -627,29 +627,29 @@ public class SearchQueryIT extends ESIntegTestCase {
client().prepareIndex("test", "type1", "3").setSource("field1", "value3"));
SearchResponse searchResponse = client().prepareSearch().setQuery(constantScoreQuery(idsQuery("type1").addIds("1", "3"))).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertSearchHits(searchResponse, "1", "3");
// no type
searchResponse = client().prepareSearch().setQuery(constantScoreQuery(idsQuery().addIds("1", "3"))).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertSearchHits(searchResponse, "1", "3");
searchResponse = client().prepareSearch().setQuery(idsQuery("type1").addIds("1", "3")).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertSearchHits(searchResponse, "1", "3");
// no type
searchResponse = client().prepareSearch().setQuery(idsQuery().addIds("1", "3")).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertSearchHits(searchResponse, "1", "3");
searchResponse = client().prepareSearch().setQuery(idsQuery("type1").addIds("7", "10")).get();
- assertHitCount(searchResponse, 0l);
+ assertHitCount(searchResponse, 0L);
// repeat..., with terms
searchResponse = client().prepareSearch().setTypes("type1").setQuery(constantScoreQuery(termsQuery("_id", "1", "3"))).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertSearchHits(searchResponse, "1", "3");
}
@@ -668,19 +668,19 @@ public class SearchQueryIT extends ESIntegTestCase {
for (String indexName : indexNames) {
SearchResponse request = client().prepareSearch().setQuery(constantScoreQuery(termQuery("_index", indexName))).get();
SearchResponse searchResponse = assertSearchResponse(request);
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertSearchHits(searchResponse, indexName + "1");
}
for (String indexName : indexNames) {
SearchResponse request = client().prepareSearch().setQuery(constantScoreQuery(termsQuery("_index", indexName))).get();
SearchResponse searchResponse = assertSearchResponse(request);
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertSearchHits(searchResponse, indexName + "1");
}
for (String indexName : indexNames) {
SearchResponse request = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("_index", indexName))).get();
SearchResponse searchResponse = assertSearchResponse(request);
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertSearchHits(searchResponse, indexName + "1");
}
{
@@ -701,33 +701,33 @@ public class SearchQueryIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch().setQuery(existsQuery("field1")).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertSearchHits(searchResponse, "1", "2");
searchResponse = client().prepareSearch().setQuery(constantScoreQuery(existsQuery("field1"))).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertSearchHits(searchResponse, "1", "2");
searchResponse = client().prepareSearch().setQuery(queryStringQuery("_exists_:field1")).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertSearchHits(searchResponse, "1", "2");
searchResponse = client().prepareSearch().setQuery(existsQuery("field2")).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertSearchHits(searchResponse, "1", "3");
searchResponse = client().prepareSearch().setQuery(existsQuery("field3")).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("4"));
// wildcard check
searchResponse = client().prepareSearch().setQuery(existsQuery("x*")).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertSearchHits(searchResponse, "1", "2");
// object check
searchResponse = client().prepareSearch().setQuery(existsQuery("obj1")).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertSearchHits(searchResponse, "1", "2");
}
@@ -737,13 +737,13 @@ public class SearchQueryIT extends ESIntegTestCase {
client().prepareIndex("test", "type1", "1").setSource("field1", "value1_1", "field2", "value2_1").setRefresh(true).get();
WrapperQueryBuilder wrapper = new WrapperQueryBuilder("{ \"term\" : { \"field1\" : \"value1_1\" } }");
- assertHitCount(client().prepareSearch().setQuery(wrapper).get(), 1l);
+ assertHitCount(client().prepareSearch().setQuery(wrapper).get(), 1L);
BoolQueryBuilder bool = boolQuery().must(wrapper).must(new TermQueryBuilder("field2", "value2_1"));
- assertHitCount(client().prepareSearch().setQuery(bool).get(), 1l);
+ assertHitCount(client().prepareSearch().setQuery(bool).get(), 1L);
WrapperQueryBuilder wrapperFilter = wrapperQuery("{ \"term\" : { \"field1\" : \"value1_1\" } }");
- assertHitCount(client().prepareSearch().setPostFilter(wrapperFilter).get(), 1l);
+ assertHitCount(client().prepareSearch().setPostFilter(wrapperFilter).get(), 1L);
}
public void testFiltersWithCustomCacheKey() throws Exception {
@@ -752,31 +752,31 @@ public class SearchQueryIT extends ESIntegTestCase {
client().prepareIndex("test", "type1", "1").setSource("field1", "value1").get();
refresh();
SearchResponse searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(termsQuery("field1", "value1"))).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(termsQuery("field1", "value1"))).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(termsQuery("field1", "value1"))).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(termsQuery("field1", "value1"))).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
}
public void testMatchQueryNumeric() throws Exception {
assertAcked(prepareCreate("test").addMapping("type1", "long", "type=long", "double", "type=double"));
- indexRandom(true, client().prepareIndex("test", "type1", "1").setSource("long", 1l, "double", 1.0d),
- client().prepareIndex("test", "type1", "2").setSource("long", 2l, "double", 2.0d),
- client().prepareIndex("test", "type1", "3").setSource("long", 3l, "double", 3.0d));
+ indexRandom(true, client().prepareIndex("test", "type1", "1").setSource("long", 1L, "double", 1.0d),
+ client().prepareIndex("test", "type1", "2").setSource("long", 2L, "double", 2.0d),
+ client().prepareIndex("test", "type1", "3").setSource("long", 3L, "double", 3.0d));
SearchResponse searchResponse = client().prepareSearch().setQuery(matchQuery("long", "1")).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("1"));
searchResponse = client().prepareSearch().setQuery(matchQuery("double", "2")).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("2"));
try {
client().prepareSearch().setQuery(matchQuery("double", "2 3 4")).get();
@@ -798,7 +798,7 @@ public class SearchQueryIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch().setQuery(builder)
.addAggregation(AggregationBuilders.terms("field1").field("field1")).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
// this uses dismax so scores are equal and the order can be arbitrary
assertSearchHits(searchResponse, "1", "2");
@@ -807,7 +807,7 @@ public class SearchQueryIT extends ESIntegTestCase {
.setQuery(builder)
.get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertSearchHits(searchResponse, "1", "2");
client().admin().indices().prepareRefresh("test").get();
@@ -816,21 +816,21 @@ public class SearchQueryIT extends ESIntegTestCase {
searchResponse = client().prepareSearch()
.setQuery(builder)
.get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("1"));
refresh();
builder = multiMatchQuery("value1", "field1").field("field3", 1.5f)
.operator(Operator.AND); // Operator only applies on terms inside a field! Fields are always OR-ed together.
searchResponse = client().prepareSearch().setQuery(builder).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertSearchHits(searchResponse, "3", "1");
client().admin().indices().prepareRefresh("test").get();
builder = multiMatchQuery("value1").field("field1").field("field3", 1.5f)
.operator(Operator.AND); // Operator only applies on terms inside a field! Fields are always OR-ed together.
searchResponse = client().prepareSearch().setQuery(builder).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertSearchHits(searchResponse, "3", "1");
// Test lenient
@@ -845,7 +845,7 @@ public class SearchQueryIT extends ESIntegTestCase {
builder.lenient(true);
searchResponse = client().prepareSearch().setQuery(builder).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("1"));
}
@@ -860,17 +860,17 @@ public class SearchQueryIT extends ESIntegTestCase {
.must(matchQuery("field1", "a").zeroTermsQuery(MatchQuery.ZeroTermsQuery.NONE))
.must(matchQuery("field1", "value1").zeroTermsQuery(MatchQuery.ZeroTermsQuery.NONE));
SearchResponse searchResponse = client().prepareSearch().setQuery(boolQuery).get();
- assertHitCount(searchResponse, 0l);
+ assertHitCount(searchResponse, 0L);
boolQuery = boolQuery()
.must(matchQuery("field1", "a").zeroTermsQuery(MatchQuery.ZeroTermsQuery.ALL))
.must(matchQuery("field1", "value1").zeroTermsQuery(MatchQuery.ZeroTermsQuery.ALL));
searchResponse = client().prepareSearch().setQuery(boolQuery).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
boolQuery = boolQuery().must(matchQuery("field1", "a").zeroTermsQuery(MatchQuery.ZeroTermsQuery.ALL));
searchResponse = client().prepareSearch().setQuery(boolQuery).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
}
public void testMultiMatchQueryZeroTermsQuery() {
@@ -885,17 +885,17 @@ public class SearchQueryIT extends ESIntegTestCase {
.must(multiMatchQuery("a", "field1", "field2").zeroTermsQuery(MatchQuery.ZeroTermsQuery.NONE))
.must(multiMatchQuery("value1", "field1", "field2").zeroTermsQuery(MatchQuery.ZeroTermsQuery.NONE)); // Fields are ORed together
SearchResponse searchResponse = client().prepareSearch().setQuery(boolQuery).get();
- assertHitCount(searchResponse, 0l);
+ assertHitCount(searchResponse, 0L);
boolQuery = boolQuery()
.must(multiMatchQuery("a", "field1", "field2").zeroTermsQuery(MatchQuery.ZeroTermsQuery.ALL))
.must(multiMatchQuery("value4", "field1", "field2").zeroTermsQuery(MatchQuery.ZeroTermsQuery.ALL));
searchResponse = client().prepareSearch().setQuery(boolQuery).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
boolQuery = boolQuery().must(multiMatchQuery("a", "field1").zeroTermsQuery(MatchQuery.ZeroTermsQuery.ALL));
searchResponse = client().prepareSearch().setQuery(boolQuery).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
}
public void testMultiMatchQueryMinShouldMatch() {
@@ -911,41 +911,41 @@ public class SearchQueryIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch()
.setQuery(multiMatchQuery)
.get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("1"));
multiMatchQuery.minimumShouldMatch("30%");
searchResponse = client().prepareSearch().setQuery(multiMatchQuery).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertFirstHit(searchResponse, hasId("1"));
assertSecondHit(searchResponse, hasId("2"));
multiMatchQuery.useDisMax(false);
multiMatchQuery.minimumShouldMatch("70%");
searchResponse = client().prepareSearch().setQuery(multiMatchQuery).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("1"));
multiMatchQuery.minimumShouldMatch("30%");
searchResponse = client().prepareSearch().setQuery(multiMatchQuery).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertFirstHit(searchResponse, hasId("1"));
assertSecondHit(searchResponse, hasId("2"));
multiMatchQuery = multiMatchQuery("value1 value2 bar", "field1");
multiMatchQuery.minimumShouldMatch("100%");
searchResponse = client().prepareSearch().setQuery(multiMatchQuery).get();
- assertHitCount(searchResponse, 0l);
+ assertHitCount(searchResponse, 0L);
multiMatchQuery.minimumShouldMatch("70%");
searchResponse = client().prepareSearch().setQuery(multiMatchQuery).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("1"));
// Min should match > # optional clauses returns no docs.
multiMatchQuery = multiMatchQuery("value1 value2 value3", "field1", "field2");
multiMatchQuery.minimumShouldMatch("4");
searchResponse = client().prepareSearch().setQuery(multiMatchQuery).get();
- assertHitCount(searchResponse, 0l);
+ assertHitCount(searchResponse, 0L);
}
public void testBoolQueryMinShouldMatchBiggerThanNumberOfShouldClauses() throws IOException {
@@ -961,7 +961,7 @@ public class SearchQueryIT extends ESIntegTestCase {
.should(termQuery("field1", "value2"))
.minimumNumberShouldMatch(3));
SearchResponse searchResponse = client().prepareSearch().setQuery(boolQuery).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("1"));
boolQuery = boolQuery()
@@ -973,7 +973,7 @@ public class SearchQueryIT extends ESIntegTestCase {
// Only one should clause is defined, returns no docs.
.minimumNumberShouldMatch(2);
searchResponse = client().prepareSearch().setQuery(boolQuery).get();
- assertHitCount(searchResponse, 0l);
+ assertHitCount(searchResponse, 0L);
boolQuery = boolQuery()
.should(termQuery("field1", "value1"))
@@ -983,7 +983,7 @@ public class SearchQueryIT extends ESIntegTestCase {
.minimumNumberShouldMatch(3))
.minimumNumberShouldMatch(1);
searchResponse = client().prepareSearch().setQuery(boolQuery).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("1"));
boolQuery = boolQuery()
@@ -993,7 +993,7 @@ public class SearchQueryIT extends ESIntegTestCase {
.should(termQuery("field1", "value2"))
.minimumNumberShouldMatch(3));
searchResponse = client().prepareSearch().setQuery(boolQuery).get();
- assertHitCount(searchResponse, 0l);
+ assertHitCount(searchResponse, 0L);
}
public void testFuzzyQueryString() {
@@ -1004,15 +1004,15 @@ public class SearchQueryIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch().setQuery(queryStringQuery("str:kimcy~1")).get();
assertNoFailures(searchResponse);
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("1"));
searchResponse = client().prepareSearch().setQuery(queryStringQuery("num:11~1")).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("1"));
searchResponse = client().prepareSearch().setQuery(queryStringQuery("date:2012-02-02~1d")).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("1"));
}
@@ -1026,14 +1026,14 @@ public class SearchQueryIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch()
.setQuery(queryStringQuery("\"phrase match\"").field("important", boost).field("less_important")).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertFirstHit(searchResponse, hasId("1"));
assertSecondHit(searchResponse, hasId("2"));
assertThat((double)searchResponse.getHits().getAt(0).score(), closeTo(boost * searchResponse.getHits().getAt(1).score(), .1));
searchResponse = client().prepareSearch()
.setQuery(queryStringQuery("\"phrase match\"").field("important", boost).field("less_important").useDisMax(false)).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertFirstHit(searchResponse, hasId("1"));
assertSecondHit(searchResponse, hasId("2"));
assertThat((double)searchResponse.getHits().getAt(0).score(), closeTo(boost * searchResponse.getHits().getAt(1).score(), .1));
@@ -1046,27 +1046,27 @@ public class SearchQueryIT extends ESIntegTestCase {
refresh();
SearchResponse searchResponse = client().prepareSearch().setQuery(queryStringQuery("num:>19")).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("2"));
searchResponse = client().prepareSearch().setQuery(queryStringQuery("num:>20")).get();
- assertHitCount(searchResponse, 0l);
+ assertHitCount(searchResponse, 0L);
searchResponse = client().prepareSearch().setQuery(queryStringQuery("num:>=20")).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("2"));
searchResponse = client().prepareSearch().setQuery(queryStringQuery("num:>11")).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
searchResponse = client().prepareSearch().setQuery(queryStringQuery("num:<20")).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
searchResponse = client().prepareSearch().setQuery(queryStringQuery("num:<=20")).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
searchResponse = client().prepareSearch().setQuery(queryStringQuery("+num:>11 +num:<20")).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
}
public void testEmptytermsQuery() throws Exception {
@@ -1079,77 +1079,77 @@ public class SearchQueryIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch("test")
.setQuery(constantScoreQuery(termsQuery("term", new String[0]))).get();
- assertHitCount(searchResponse, 0l);
+ assertHitCount(searchResponse, 0L);
searchResponse = client().prepareSearch("test").setQuery(idsQuery()).get();
- assertHitCount(searchResponse, 0l);
+ assertHitCount(searchResponse, 0L);
}
public void testTermsQuery() throws Exception {
assertAcked(prepareCreate("test").addMapping("type", "str", "type=string", "lng", "type=long", "dbl", "type=double"));
indexRandom(true,
- client().prepareIndex("test", "type", "1").setSource("str", "1", "lng", 1l, "dbl", 1.0d),
- client().prepareIndex("test", "type", "2").setSource("str", "2", "lng", 2l, "dbl", 2.0d),
- client().prepareIndex("test", "type", "3").setSource("str", "3", "lng", 3l, "dbl", 3.0d),
- client().prepareIndex("test", "type", "4").setSource("str", "4", "lng", 4l, "dbl", 4.0d));
+ client().prepareIndex("test", "type", "1").setSource("str", "1", "lng", 1L, "dbl", 1.0d),
+ client().prepareIndex("test", "type", "2").setSource("str", "2", "lng", 2L, "dbl", 2.0d),
+ client().prepareIndex("test", "type", "3").setSource("str", "3", "lng", 3L, "dbl", 3.0d),
+ client().prepareIndex("test", "type", "4").setSource("str", "4", "lng", 4L, "dbl", 4.0d));
SearchResponse searchResponse = client().prepareSearch("test")
.setQuery(constantScoreQuery(termsQuery("str", "1", "4"))).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertSearchHits(searchResponse, "1", "4");
searchResponse = client().prepareSearch("test")
.setQuery(constantScoreQuery(termsQuery("lng", new long[] {2, 3}))).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertSearchHits(searchResponse, "2", "3");
searchResponse = client().prepareSearch("test")
.setQuery(constantScoreQuery(termsQuery("dbl", new double[]{2, 3}))).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertSearchHits(searchResponse, "2", "3");
searchResponse = client().prepareSearch("test")
.setQuery(constantScoreQuery(termsQuery("lng", new int[] {1, 3}))).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertSearchHits(searchResponse, "1", "3");
searchResponse = client().prepareSearch("test")
.setQuery(constantScoreQuery(termsQuery("dbl", new float[] {2, 4}))).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertSearchHits(searchResponse, "2", "4");
// test partial matching
searchResponse = client().prepareSearch("test")
.setQuery(constantScoreQuery(termsQuery("str", "2", "5"))).get();
assertNoFailures(searchResponse);
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("2"));
searchResponse = client().prepareSearch("test")
.setQuery(constantScoreQuery(termsQuery("dbl", new double[] {2, 5}))).get();
assertNoFailures(searchResponse);
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("2"));
searchResponse = client().prepareSearch("test")
.setQuery(constantScoreQuery(termsQuery("lng", new long[] {2, 5}))).get();
assertNoFailures(searchResponse);
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("2"));
// test valid type, but no matching terms
searchResponse = client().prepareSearch("test")
.setQuery(constantScoreQuery(termsQuery("str", "5", "6"))).get();
- assertHitCount(searchResponse, 0l);
+ assertHitCount(searchResponse, 0L);
searchResponse = client().prepareSearch("test")
.setQuery(constantScoreQuery(termsQuery("dbl", new double[] {5, 6}))).get();
- assertHitCount(searchResponse, 0l);
+ assertHitCount(searchResponse, 0L);
searchResponse = client().prepareSearch("test")
.setQuery(constantScoreQuery(termsQuery("lng", new long[] {5, 6}))).get();
- assertHitCount(searchResponse, 0l);
+ assertHitCount(searchResponse, 0L);
}
public void testTermsLookupFilter() throws Exception {
@@ -1189,54 +1189,54 @@ public class SearchQueryIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch("test")
.setQuery(termsLookupQuery("term" , new TermsLookup("lookup", "type", "1", "terms"))).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertSearchHits(searchResponse, "1", "3");
// same as above, just on the _id...
searchResponse = client().prepareSearch("test")
.setQuery(termsLookupQuery("_id", new TermsLookup("lookup", "type", "1", "terms"))
).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertSearchHits(searchResponse, "1", "3");
// another search with same parameters...
searchResponse = client().prepareSearch("test")
.setQuery(termsLookupQuery("term", new TermsLookup("lookup", "type", "1", "terms"))).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertSearchHits(searchResponse, "1", "3");
searchResponse = client().prepareSearch("test")
.setQuery(termsLookupQuery("term", new TermsLookup("lookup", "type", "2", "terms"))).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("2"));
searchResponse = client().prepareSearch("test")
.setQuery(termsLookupQuery("term", new TermsLookup("lookup", "type", "3", "terms"))).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertSearchHits(searchResponse, "2", "4");
searchResponse = client().prepareSearch("test")
.setQuery(termsLookupQuery("term", new TermsLookup("lookup", "type", "4", "terms"))).get();
- assertHitCount(searchResponse, 0l);
+ assertHitCount(searchResponse, 0L);
searchResponse = client().prepareSearch("test")
.setQuery(termsLookupQuery("term", new TermsLookup("lookup2", "type", "1", "arr.term"))).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertSearchHits(searchResponse, "1", "3");
searchResponse = client().prepareSearch("test")
.setQuery(termsLookupQuery("term", new TermsLookup("lookup2", "type", "2", "arr.term"))).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("2"));
searchResponse = client().prepareSearch("test")
.setQuery(termsLookupQuery("term", new TermsLookup("lookup2", "type", "3", "arr.term"))).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertSearchHits(searchResponse, "2", "4");
searchResponse = client().prepareSearch("test")
.setQuery(termsLookupQuery("not_exists", new TermsLookup("lookup2", "type", "3", "arr.term"))).get();
- assertHitCount(searchResponse, 0l);
+ assertHitCount(searchResponse, 0L);
}
public void testBasicQueryById() throws Exception {
@@ -1247,27 +1247,27 @@ public class SearchQueryIT extends ESIntegTestCase {
refresh();
SearchResponse searchResponse = client().prepareSearch().setQuery(idsQuery("type1", "type2").addIds("1", "2")).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertThat(searchResponse.getHits().hits().length, equalTo(2));
searchResponse = client().prepareSearch().setQuery(idsQuery().addIds("1")).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertThat(searchResponse.getHits().hits().length, equalTo(1));
searchResponse = client().prepareSearch().setQuery(idsQuery().addIds("1", "2")).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertThat(searchResponse.getHits().hits().length, equalTo(2));
searchResponse = client().prepareSearch().setQuery(idsQuery("type1").addIds("1", "2")).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertThat(searchResponse.getHits().hits().length, equalTo(1));
searchResponse = client().prepareSearch().setQuery(idsQuery(Strings.EMPTY_ARRAY).addIds("1")).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertThat(searchResponse.getHits().hits().length, equalTo(1));
searchResponse = client().prepareSearch().setQuery(idsQuery("type1", "type2", "type3").addIds("1", "2", "3", "4")).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertThat(searchResponse.getHits().hits().length, equalTo(2));
}
@@ -1291,82 +1291,82 @@ public class SearchQueryIT extends ESIntegTestCase {
SearchResponse searchResponse;
logger.info("--> term query on 1");
searchResponse = client().prepareSearch("test").setQuery(termQuery("num_byte", 1)).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("1"));
searchResponse = client().prepareSearch("test").setQuery(termQuery("num_short", 1)).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("1"));
searchResponse = client().prepareSearch("test").setQuery(termQuery("num_integer", 1)).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("1"));
searchResponse = client().prepareSearch("test").setQuery(termQuery("num_long", 1)).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("1"));
searchResponse = client().prepareSearch("test").setQuery(termQuery("num_float", 1)).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("1"));
searchResponse = client().prepareSearch("test").setQuery(termQuery("num_double", 1)).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("1"));
logger.info("--> terms query on 1");
searchResponse = client().prepareSearch("test").setQuery(termsQuery("num_byte", new int[]{1})).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("1"));
searchResponse = client().prepareSearch("test").setQuery(termsQuery("num_short", new int[]{1})).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("1"));
searchResponse = client().prepareSearch("test").setQuery(termsQuery("num_integer", new int[]{1})).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("1"));
searchResponse = client().prepareSearch("test").setQuery(termsQuery("num_long", new int[]{1})).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("1"));
searchResponse = client().prepareSearch("test").setQuery(termsQuery("num_float", new double[]{1})).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("1"));
searchResponse = client().prepareSearch("test").setQuery(termsQuery("num_double", new double[]{1})).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("1"));
logger.info("--> term filter on 1");
searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(termQuery("num_byte", 1))).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("1"));
searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(termQuery("num_short", 1))).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("1"));
searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(termQuery("num_integer", 1))).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("1"));
searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(termQuery("num_long", 1))).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("1"));
searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(termQuery("num_float", 1))).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("1"));
searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(termQuery("num_double", 1))).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("1"));
logger.info("--> terms filter on 1");
searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(termsQuery("num_byte", new int[]{1}))).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("1"));
searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(termsQuery("num_short", new int[]{1}))).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("1"));
searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(termsQuery("num_integer", new int[]{1}))).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("1"));
searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(termsQuery("num_long", new int[]{1}))).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("1"));
searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(termsQuery("num_float", new int[]{1}))).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("1"));
searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(termsQuery("num_double", new int[]{1}))).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("1"));
}
@@ -1388,7 +1388,7 @@ public class SearchQueryIT extends ESIntegTestCase {
.should(rangeQuery("num_long").from(1).to(2))
.should(rangeQuery("num_long").from(3).to(4))
).get();
- assertHitCount(searchResponse, 4l);
+ assertHitCount(searchResponse, 4L);
// This made 2826 fail! (only with bit based filters)
searchResponse = client().prepareSearch("test").setPostFilter(
@@ -1396,7 +1396,7 @@ public class SearchQueryIT extends ESIntegTestCase {
.should(rangeQuery("num_long").from(1).to(2))
.should(rangeQuery("num_long").from(3).to(4))
).get();
- assertHitCount(searchResponse, 4l);
+ assertHitCount(searchResponse, 4L);
// This made #2979 fail!
searchResponse = client().prepareSearch("test").setPostFilter(
@@ -1405,7 +1405,7 @@ public class SearchQueryIT extends ESIntegTestCase {
.should(rangeQuery("num_long").from(1).to(2))
.should(rangeQuery("num_long").from(3).to(4))
).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
}
// see #2926
@@ -1422,13 +1422,13 @@ public class SearchQueryIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch("test").setQuery(matchAllQuery())
.setSearchType(SearchType.DFS_QUERY_THEN_FETCH).get();
- assertHitCount(searchResponse, 4l);
+ assertHitCount(searchResponse, 4L);
searchResponse = client().prepareSearch("test").setQuery(
boolQuery()
.mustNot(matchQuery("description", "anything").type(Type.BOOLEAN))
).setSearchType(SearchType.DFS_QUERY_THEN_FETCH).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
}
// see #2994
@@ -1443,14 +1443,15 @@ public class SearchQueryIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch("test")
.setQuery(spanOrQuery(spanTermQuery("description", "bar"))).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
searchResponse = client().prepareSearch("test").setQuery(
spanNearQuery(spanTermQuery("description", "foo"), 3)
.clause(spanTermQuery("description", "other"))).get();
- assertHitCount(searchResponse, 3l);
+ assertHitCount(searchResponse, 3L);
}
+ @SuppressWarnings("deprecation") // fuzzy queries will be removed in 4.0
public void testSpanMultiTermQuery() throws IOException {
createIndex("test");
@@ -1492,17 +1493,17 @@ public class SearchQueryIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch("test")
.setQuery(spanNotQuery(spanNearQuery(QueryBuilders.spanTermQuery("description", "quick"), 1)
.clause(QueryBuilders.spanTermQuery("description", "fox")), spanTermQuery("description", "brown"))).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
searchResponse = client().prepareSearch("test")
.setQuery(spanNotQuery(spanNearQuery(QueryBuilders.spanTermQuery("description", "quick"), 1)
.clause(QueryBuilders.spanTermQuery("description", "fox")), spanTermQuery("description", "sleeping")).dist(5)).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
searchResponse = client().prepareSearch("test")
.setQuery(spanNotQuery(spanNearQuery(QueryBuilders.spanTermQuery("description", "quick"), 1)
.clause(QueryBuilders.spanTermQuery("description", "fox")), spanTermQuery("description", "jumped")).pre(1).post(1)).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
}
public void testSimpleDFSQuery() throws IOException {
@@ -1690,7 +1691,7 @@ public class SearchQueryIT extends ESIntegTestCase {
.setQuery(
queryStringQuery("foo.baz").useDisMax(false).defaultOperator(Operator.AND)
.field("field1").field("field2")).get();
- assertHitCount(response, 1l);
+ assertHitCount(response, 1L);
}
// see #3797
@@ -1702,15 +1703,15 @@ public class SearchQueryIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch("test")
.setQuery(multiMatchQuery("value2", "field2").field("field1", 2).lenient(true).useDisMax(false)).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
searchResponse = client().prepareSearch("test")
.setQuery(multiMatchQuery("value2", "field2").field("field1", 2).lenient(true).useDisMax(true)).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
searchResponse = client().prepareSearch("test")
.setQuery(multiMatchQuery("value2").field("field2", 2).lenient(true)).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
}
public void testAllFieldEmptyMapping() throws Exception {
@@ -1741,24 +1742,24 @@ public class SearchQueryIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch("index1", "index2", "index3")
.setQuery(indicesQuery(matchQuery("text", "value1"), "index1")
.noMatchQuery(matchQuery("text", "value2"))).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertSearchHits(searchResponse, "1", "2");
//default no match query is match_all
searchResponse = client().prepareSearch("index1", "index2", "index3")
.setQuery(indicesQuery(matchQuery("text", "value1"), "index1")).get();
- assertHitCount(searchResponse, 3l);
+ assertHitCount(searchResponse, 3L);
assertSearchHits(searchResponse, "1", "2", "3");
searchResponse = client().prepareSearch("index1", "index2", "index3")
.setQuery(indicesQuery(matchQuery("text", "value1"), "index1")
.noMatchQuery(QueryBuilders.matchAllQuery())).get();
- assertHitCount(searchResponse, 3l);
+ assertHitCount(searchResponse, 3L);
assertSearchHits(searchResponse, "1", "2", "3");
searchResponse = client().prepareSearch("index1", "index2", "index3")
.setQuery(indicesQuery(matchQuery("text", "value1"), "index1")
.noMatchQuery("none")).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("1"));
}
@@ -1790,7 +1791,7 @@ public class SearchQueryIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch("related", "simple")
.setQuery(indicesQuery(hasChildQuery("child", matchQuery("text", "value2")), "related")
.noMatchQuery(matchQuery("text", "value1"))).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertSearchHits(searchResponse, "1", "2");
}
@@ -1811,7 +1812,7 @@ public class SearchQueryIT extends ESIntegTestCase {
indicesQuery(termQuery("field", "missing"), "test1", "test2", "test3")
.noMatchQuery(termQuery("field", "match"))).get();
- assertHitCount(searchResponse, 3l);
+ assertHitCount(searchResponse, 3L);
for (SearchHit hit : searchResponse.getHits().getHits()) {
if ("index1".equals(hit.index())) {
@@ -1830,7 +1831,7 @@ public class SearchQueryIT extends ESIntegTestCase {
indicesQuery(termQuery("field", "missing"), "test1")
.noMatchQuery(termQuery("field", "match"))).get();
- assertHitCount(searchResponse, 3l);
+ assertHitCount(searchResponse, 3L);
for (SearchHit hit : searchResponse.getHits().getHits()) {
if ("index1".equals(hit.index())) {
@@ -1849,7 +1850,7 @@ public class SearchQueryIT extends ESIntegTestCase {
indicesQuery(termQuery("field", "missing"), "index1", "test1")
.noMatchQuery(termQuery("field", "match"))).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
for (SearchHit hit : searchResponse.getHits().getHits()) {
if ("index2".equals(hit.index())) {
@@ -1932,51 +1933,51 @@ public class SearchQueryIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch("test")
.setQuery(QueryBuilders.rangeQuery("date").from("2014-01-01T00:00:00").to("2014-01-01T00:59:00"))
.get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertThat(searchResponse.getHits().getAt(0).getId(), is("1"));
searchResponse = client().prepareSearch("test")
.setQuery(QueryBuilders.rangeQuery("date").from("2013-12-31T23:00:00").to("2013-12-31T23:59:00"))
.get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertThat(searchResponse.getHits().getAt(0).getId(), is("2"));
searchResponse = client().prepareSearch("test")
.setQuery(QueryBuilders.rangeQuery("date").from("2014-01-01T01:00:00").to("2014-01-01T01:59:00"))
.get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertThat(searchResponse.getHits().getAt(0).getId(), is("3"));
// We explicitly define a time zone in the from/to dates so whatever the time zone is, it won't be used
searchResponse = client().prepareSearch("test")
.setQuery(QueryBuilders.rangeQuery("date").from("2014-01-01T00:00:00Z").to("2014-01-01T00:59:00Z").timeZone("+10:00"))
.get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertThat(searchResponse.getHits().getAt(0).getId(), is("1"));
searchResponse = client().prepareSearch("test")
.setQuery(QueryBuilders.rangeQuery("date").from("2013-12-31T23:00:00Z").to("2013-12-31T23:59:00Z").timeZone("+10:00"))
.get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertThat(searchResponse.getHits().getAt(0).getId(), is("2"));
searchResponse = client().prepareSearch("test")
.setQuery(QueryBuilders.rangeQuery("date").from("2014-01-01T01:00:00Z").to("2014-01-01T01:59:00Z").timeZone("+10:00"))
.get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertThat(searchResponse.getHits().getAt(0).getId(), is("3"));
// We define a time zone to be applied to the filter and from/to have no time zone
searchResponse = client().prepareSearch("test")
.setQuery(QueryBuilders.rangeQuery("date").from("2014-01-01T03:00:00").to("2014-01-01T03:59:00").timeZone("+03:00"))
.get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertThat(searchResponse.getHits().getAt(0).getId(), is("1"));
searchResponse = client().prepareSearch("test")
.setQuery(QueryBuilders.rangeQuery("date").from("2014-01-01T02:00:00").to("2014-01-01T02:59:00").timeZone("+03:00"))
.get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertThat(searchResponse.getHits().getAt(0).getId(), is("2"));
searchResponse = client().prepareSearch("test")
.setQuery(QueryBuilders.rangeQuery("date").from("2014-01-01T04:00:00").to("2014-01-01T04:59:00").timeZone("+03:00"))
.get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertThat(searchResponse.getHits().getAt(0).getId(), is("3"));
// When we use long values, it means we have ms since epoch UTC based so we don't apply any transformation
@@ -1992,13 +1993,13 @@ public class SearchQueryIT extends ESIntegTestCase {
searchResponse = client().prepareSearch("test")
.setQuery(QueryBuilders.rangeQuery("date").from("2014-01-01").to("2014-01-01T00:59:00").timeZone("-01:00"))
.get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertThat(searchResponse.getHits().getAt(0).getId(), is("3"));
searchResponse = client().prepareSearch("test")
.setQuery(QueryBuilders.rangeQuery("date").from("now/d-1d").timeZone("+01:00"))
.get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertThat(searchResponse.getHits().getAt(0).getId(), is("4"));
// A Range Filter on a numeric field with a TimeZone should raise an exception
@@ -2017,7 +2018,7 @@ public class SearchQueryIT extends ESIntegTestCase {
client().prepareIndex("test", "type1", "1").setSource("{}").get();
refresh();
- assertHitCount(client().prepareSearch().setQuery(matchAllQuery()).get(), 1l);
+ assertHitCount(client().prepareSearch().setQuery(matchAllQuery()).get(), 1L);
}
// see #5120
@@ -2041,32 +2042,32 @@ public class SearchQueryIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch("test")
.setQuery(matchQuery("meta", "1234"))
.get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
searchResponse = client().prepareSearch("test")
.setQuery(matchQuery("meta", "1234.56"))
.get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
searchResponse = client().prepareSearch("test")
.setQuery(termQuery("meta", "A1234"))
.get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
searchResponse = client().prepareSearch("test")
.setQuery(termQuery("meta", "a1234"))
.get();
- assertHitCount(searchResponse, 0l); // it's upper case
+ assertHitCount(searchResponse, 0L); // it's upper case
searchResponse = client().prepareSearch("test")
.setQuery(matchQuery("meta", "A1234").analyzer("my_ngram_analyzer"))
.get(); // force ngram analyzer
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
searchResponse = client().prepareSearch("test")
.setQuery(matchQuery("meta", "a1234").analyzer("my_ngram_analyzer"))
.get(); // this one returns a hit since it's default operator is OR
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
}
public void testMatchPhrasePrefixQuery() throws ExecutionException, InterruptedException {
@@ -2076,13 +2077,13 @@ public class SearchQueryIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch().setQuery(matchQuery("field", "Johnnie la").slop(between(2,5)).type(Type.PHRASE_PREFIX)).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertSearchHits(searchResponse, "1");
searchResponse = client().prepareSearch().setQuery(matchQuery("field", "trying").type(Type.PHRASE_PREFIX)).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertSearchHits(searchResponse, "2");
searchResponse = client().prepareSearch().setQuery(matchQuery("field", "try").type(Type.PHRASE_PREFIX)).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertSearchHits(searchResponse, "2");
}
@@ -2095,7 +2096,7 @@ public class SearchQueryIT extends ESIntegTestCase {
.setSearchType(SearchType.DFS_QUERY_THEN_FETCH)
.setQuery(QueryBuilders.queryStringQuery("xyz").boost(100))
.get();
- assertThat(response.getHits().totalHits(), equalTo(1l));
+ assertThat(response.getHits().totalHits(), equalTo(1L));
assertThat(response.getHits().getAt(0).id(), equalTo("1"));
float first = response.getHits().getAt(0).getScore();
@@ -2105,7 +2106,7 @@ public class SearchQueryIT extends ESIntegTestCase {
.setQuery(QueryBuilders.queryStringQuery("xyz").boost(100))
.get();
- assertThat(response.getHits().totalHits(), equalTo(1l));
+ assertThat(response.getHits().totalHits(), equalTo(1L));
assertThat(response.getHits().getAt(0).id(), equalTo("1"));
float actual = response.getHits().getAt(0).getScore();
assertThat(i + " expected: " + first + " actual: " + actual, Float.compare(first, actual), equalTo(0));
diff --git a/core/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java b/core/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java
index 358122f54e..923153a7dd 100644
--- a/core/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java
+++ b/core/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java
@@ -62,7 +62,7 @@ public class SimpleQueryStringIT extends ESIntegTestCase {
client().prepareIndex("test", "type1", "6").setSource("otherbody", "spaghetti"));
SearchResponse searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("foo bar")).get();
- assertHitCount(searchResponse, 3l);
+ assertHitCount(searchResponse, 3L);
assertSearchHits(searchResponse, "1", "2", "3");
// Tests boost value setting. In this case doc 1 should always be ranked above the other
@@ -71,32 +71,32 @@ public class SimpleQueryStringIT extends ESIntegTestCase {
boolQuery()
.should(simpleQueryStringQuery("\"foo bar\"").boost(10.0f))
.should(termQuery("body", "eggplant"))).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertFirstHit(searchResponse, hasId("3"));
searchResponse = client().prepareSearch().setQuery(
simpleQueryStringQuery("foo bar").defaultOperator(Operator.AND)).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("3"));
searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("\"quux baz\" +(eggplant | spaghetti)")).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertSearchHits(searchResponse, "4", "5");
searchResponse = client().prepareSearch().setQuery(
simpleQueryStringQuery("eggplants").analyzer("snowball")).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("4"));
searchResponse = client().prepareSearch().setQuery(
simpleQueryStringQuery("spaghetti").field("body", 1000.0f).field("otherbody", 2.0f).queryName("myquery")).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertFirstHit(searchResponse, hasId("5"));
assertSearchHits(searchResponse, "5", "6");
assertThat(searchResponse.getHits().getAt(0).getMatchedQueries()[0], equalTo("myquery"));
searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("spaghetti").field("*body")).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertSearchHits(searchResponse, "5", "6");
}
@@ -112,17 +112,17 @@ public class SimpleQueryStringIT extends ESIntegTestCase {
logger.info("--> query 1");
SearchResponse searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("foo bar").minimumShouldMatch("2")).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertSearchHits(searchResponse, "3", "4");
logger.info("--> query 2");
searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("foo bar").field("body").field("body2").minimumShouldMatch("2")).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertSearchHits(searchResponse, "3", "4");
logger.info("--> query 3");
searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("foo bar baz").field("body").field("body2").minimumShouldMatch("70%")).get();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
assertSearchHits(searchResponse, "3", "4");
indexRandom(true, false,
@@ -133,17 +133,17 @@ public class SimpleQueryStringIT extends ESIntegTestCase {
logger.info("--> query 4");
searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("foo bar").field("body").field("body2").minimumShouldMatch("2")).get();
- assertHitCount(searchResponse, 4l);
+ assertHitCount(searchResponse, 4L);
assertSearchHits(searchResponse, "3", "4", "7", "8");
logger.info("--> query 5");
searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("foo bar").minimumShouldMatch("2")).get();
- assertHitCount(searchResponse, 5l);
+ assertHitCount(searchResponse, 5L);
assertSearchHits(searchResponse, "3", "4", "6", "7", "8");
logger.info("--> query 6");
searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("foo bar baz").field("body2").field("other").minimumShouldMatch("70%")).get();
- assertHitCount(searchResponse, 3l);
+ assertHitCount(searchResponse, 3L);
assertSearchHits(searchResponse, "6", "7", "8");
}
@@ -153,21 +153,21 @@ public class SimpleQueryStringIT extends ESIntegTestCase {
refresh();
SearchResponse searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("Professio*")).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertSearchHits(searchResponse, "1");
searchResponse = client().prepareSearch().setQuery(
simpleQueryStringQuery("Professio*").lowercaseExpandedTerms(false)).get();
- assertHitCount(searchResponse, 0l);
+ assertHitCount(searchResponse, 0L);
searchResponse = client().prepareSearch().setQuery(
simpleQueryStringQuery("Professionan~1")).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertSearchHits(searchResponse, "1");
searchResponse = client().prepareSearch().setQuery(
simpleQueryStringQuery("Professionan~1").lowercaseExpandedTerms(false)).get();
- assertHitCount(searchResponse, 0l);
+ assertHitCount(searchResponse, 0L);
}
public void testQueryStringLocale() {
@@ -176,17 +176,17 @@ public class SimpleQueryStringIT extends ESIntegTestCase {
refresh();
SearchResponse searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("BILL*")).get();
- assertHitCount(searchResponse, 0l);
+ assertHitCount(searchResponse, 0L);
searchResponse = client().prepareSearch().setQuery(queryStringQuery("body:BILL*")).get();
- assertHitCount(searchResponse, 0l);
+ assertHitCount(searchResponse, 0L);
searchResponse = client().prepareSearch().setQuery(
simpleQueryStringQuery("BILL*").locale(new Locale("tr", "TR"))).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertSearchHits(searchResponse, "1");
searchResponse = client().prepareSearch().setQuery(
queryStringQuery("body:BILL*").locale(new Locale("tr", "TR"))).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertSearchHits(searchResponse, "1");
}
@@ -210,22 +210,22 @@ public class SimpleQueryStringIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch().setQuery(
simpleQueryStringQuery("foo bar baz").field("body")).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertSearchHits(searchResponse, "1");
searchResponse = client().prepareSearch().setTypes("type1").setQuery(
simpleQueryStringQuery("foo bar baz").field("body")).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertSearchHits(searchResponse, "1");
searchResponse = client().prepareSearch().setQuery(
simpleQueryStringQuery("foo bar baz").field("body.sub")).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertSearchHits(searchResponse, "1");
searchResponse = client().prepareSearch().setTypes("type1").setQuery(
simpleQueryStringQuery("foo bar baz").field("body.sub")).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertSearchHits(searchResponse, "1");
}
@@ -241,42 +241,42 @@ public class SimpleQueryStringIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch().setQuery(
simpleQueryStringQuery("foo bar").flags(SimpleQueryStringFlag.ALL)).get();
- assertHitCount(searchResponse, 3l);
+ assertHitCount(searchResponse, 3L);
assertSearchHits(searchResponse, "1", "2", "3");
searchResponse = client().prepareSearch().setQuery(
simpleQueryStringQuery("foo | bar")
.defaultOperator(Operator.AND)
.flags(SimpleQueryStringFlag.OR)).get();
- assertHitCount(searchResponse, 3l);
+ assertHitCount(searchResponse, 3L);
assertSearchHits(searchResponse, "1", "2", "3");
searchResponse = client().prepareSearch().setQuery(
simpleQueryStringQuery("foo | bar")
.defaultOperator(Operator.AND)
.flags(SimpleQueryStringFlag.NONE)).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("3"));
searchResponse = client().prepareSearch().setQuery(
simpleQueryStringQuery("baz | egg*")
.defaultOperator(Operator.AND)
.flags(SimpleQueryStringFlag.NONE)).get();
- assertHitCount(searchResponse, 0l);
+ assertHitCount(searchResponse, 0L);
searchResponse = client()
.prepareSearch()
.setSource(
new SearchSourceBuilder().query(QueryBuilders.simpleQueryStringQuery("foo|bar").defaultOperator(Operator.AND)
.flags(SimpleQueryStringFlag.NONE))).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
searchResponse = client()
.prepareSearch()
.setQuery(
simpleQueryStringQuery("baz | egg*").defaultOperator(Operator.AND).flags(SimpleQueryStringFlag.WHITESPACE,
SimpleQueryStringFlag.PREFIX)).get();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("4"));
}
@@ -288,12 +288,12 @@ public class SimpleQueryStringIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("foo").field("field")).get();
assertFailures(searchResponse);
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertSearchHits(searchResponse, "1");
searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("foo").field("field").lenient(true)).get();
assertNoFailures(searchResponse);
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertSearchHits(searchResponse, "1");
}
@@ -332,7 +332,7 @@ public class SimpleQueryStringIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("Köln*").analyzeWildcard(true).field("location")).get();
assertNoFailures(searchResponse);
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
assertSearchHits(searchResponse, "1");
}
}
diff --git a/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java b/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java
index 2aa55f8b62..01f7e33244 100644
--- a/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java
+++ b/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java
@@ -19,15 +19,37 @@
package org.elasticsearch.search.rescore;
+import org.elasticsearch.ElasticsearchParseException;
+import org.elasticsearch.Version;
+import org.elasticsearch.cluster.metadata.IndexMetaData;
+import org.elasticsearch.common.ParseFieldMatcher;
+import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.xcontent.ToXContent;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentFactory;
+import org.elasticsearch.common.xcontent.XContentHelper;
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.common.xcontent.XContentType;
+import org.elasticsearch.index.IndexSettings;
+import org.elasticsearch.index.mapper.ContentPath;
+import org.elasticsearch.index.mapper.MappedFieldType;
+import org.elasticsearch.index.mapper.Mapper;
+import org.elasticsearch.index.mapper.MapperBuilders;
+import org.elasticsearch.index.mapper.core.StringFieldMapper;
import org.elasticsearch.index.query.MatchAllQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
-import org.elasticsearch.search.rescore.RescoreBuilder.QueryRescorer;
-import org.elasticsearch.search.rescore.RescoreBuilder.Rescorer;
+import org.elasticsearch.index.query.QueryParseContext;
+import org.elasticsearch.index.query.QueryShardContext;
+import org.elasticsearch.indices.query.IndicesQueriesRegistry;
+import org.elasticsearch.search.SearchModule;
+import org.elasticsearch.search.rescore.QueryRescorer.QueryRescoreContext;
import org.elasticsearch.test.ESTestCase;
+import org.elasticsearch.test.IndexSettingsModule;
import org.junit.AfterClass;
import org.junit.BeforeClass;
@@ -40,6 +62,7 @@ public class QueryRescoreBuilderTests extends ESTestCase {
private static final int NUMBER_OF_TESTBUILDERS = 20;
private static NamedWriteableRegistry namedWriteableRegistry;
+ private static IndicesQueriesRegistry indicesQueriesRegistry;
/**
* setup for the whole base test class
@@ -47,13 +70,14 @@ public class QueryRescoreBuilderTests extends ESTestCase {
@BeforeClass
public static void init() {
namedWriteableRegistry = new NamedWriteableRegistry();
- namedWriteableRegistry.registerPrototype(Rescorer.class, org.elasticsearch.search.rescore.RescoreBuilder.QueryRescorer.PROTOTYPE);
- namedWriteableRegistry.registerPrototype(QueryBuilder.class, new MatchAllQueryBuilder());
+ namedWriteableRegistry.registerPrototype(RescoreBuilder.class, QueryRescorerBuilder.PROTOTYPE);
+ indicesQueriesRegistry = new SearchModule(Settings.EMPTY, namedWriteableRegistry).buildQueryParserRegistry();
}
@AfterClass
public static void afterClass() throws Exception {
namedWriteableRegistry = null;
+ indicesQueriesRegistry = null;
}
/**
@@ -61,8 +85,8 @@ public class QueryRescoreBuilderTests extends ESTestCase {
*/
public void testSerialization() throws IOException {
for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) {
- RescoreBuilder original = randomRescoreBuilder();
- RescoreBuilder deserialized = serializedCopy(original);
+ RescoreBuilder<?> original = randomRescoreBuilder();
+ RescoreBuilder<?> deserialized = serializedCopy(original);
assertEquals(deserialized, original);
assertEquals(deserialized.hashCode(), original.hashCode());
assertNotSame(deserialized, original);
@@ -74,7 +98,7 @@ public class QueryRescoreBuilderTests extends ESTestCase {
*/
public void testEqualsAndHashcode() throws IOException {
for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) {
- RescoreBuilder firstBuilder = randomRescoreBuilder();
+ RescoreBuilder<?> firstBuilder = randomRescoreBuilder();
assertFalse("rescore builder is equal to null", firstBuilder.equals(null));
assertFalse("rescore builder is equal to incompatible type", firstBuilder.equals(""));
assertTrue("rescore builder is not equal to self", firstBuilder.equals(firstBuilder));
@@ -82,13 +106,13 @@ public class QueryRescoreBuilderTests extends ESTestCase {
equalTo(firstBuilder.hashCode()));
assertThat("different rescore builder should not be equal", mutate(firstBuilder), not(equalTo(firstBuilder)));
- RescoreBuilder secondBuilder = serializedCopy(firstBuilder);
+ RescoreBuilder<?> secondBuilder = serializedCopy(firstBuilder);
assertTrue("rescore builder is not equal to self", secondBuilder.equals(secondBuilder));
assertTrue("rescore builder is not equal to its copy", firstBuilder.equals(secondBuilder));
assertTrue("equals is not symmetric", secondBuilder.equals(firstBuilder));
assertThat("rescore builder copy's hashcode is different from original hashcode", secondBuilder.hashCode(), equalTo(firstBuilder.hashCode()));
- RescoreBuilder thirdBuilder = serializedCopy(secondBuilder);
+ RescoreBuilder<?> thirdBuilder = serializedCopy(secondBuilder);
assertTrue("rescore builder is not equal to self", thirdBuilder.equals(thirdBuilder));
assertTrue("rescore builder is not equal to its copy", secondBuilder.equals(thirdBuilder));
assertThat("rescore builder copy's hashcode is different from original hashcode", secondBuilder.hashCode(), equalTo(thirdBuilder.hashCode()));
@@ -99,8 +123,161 @@ public class QueryRescoreBuilderTests extends ESTestCase {
}
}
- private RescoreBuilder mutate(RescoreBuilder original) throws IOException {
- RescoreBuilder mutation = serializedCopy(original);
+ /**
+ * creates random rescorer, renders it to xContent and back to new instance that should be equal to original
+ */
+ public void testFromXContent() throws IOException {
+ QueryParseContext context = new QueryParseContext(indicesQueriesRegistry);
+ context.parseFieldMatcher(new ParseFieldMatcher(Settings.EMPTY));
+ for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) {
+ RescoreBuilder<?> rescoreBuilder = randomRescoreBuilder();
+
+ XContentParser parser = createParser(rescoreBuilder);
+ context.reset(parser);
+ parser.nextToken();
+ RescoreBuilder<?> secondRescoreBuilder = RescoreBuilder.parseFromXContent(context);
+ assertNotSame(rescoreBuilder, secondRescoreBuilder);
+ assertEquals(rescoreBuilder, secondRescoreBuilder);
+ assertEquals(rescoreBuilder.hashCode(), secondRescoreBuilder.hashCode());
+ }
+ }
+
+ private static XContentParser createParser(RescoreBuilder<?> rescoreBuilder) throws IOException {
+ XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values()));
+ if (randomBoolean()) {
+ builder.prettyPrint();
+ }
+ rescoreBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS);
+ return XContentHelper.createParser(builder.bytes());
+ }
+
+ /**
+ * test that build() outputs a {@link RescoreSearchContext} that is similar to the one
+ * we would get when parsing the xContent the test rescore builder is rendering out
+ */
+ public void testBuildRescoreSearchContext() throws ElasticsearchParseException, IOException {
+ Settings indexSettings = Settings.settingsBuilder()
+ .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
+ IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(randomAsciiOfLengthBetween(1, 10), indexSettings);
+ // shard context will only need indicesQueriesRegistry for building Query objects nested in query rescorer
+ QueryShardContext mockShardContext = new QueryShardContext(idxSettings, null, null, null, null, null, null, indicesQueriesRegistry) {
+ @Override
+ public MappedFieldType fieldMapper(String name) {
+ StringFieldMapper.Builder builder = MapperBuilders.stringField(name);
+ return builder.build(new Mapper.BuilderContext(idxSettings.getSettings(), new ContentPath(1))).fieldType();
+ }
+ };
+
+ for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) {
+ RescoreBuilder<?> rescoreBuilder = randomRescoreBuilder();
+ QueryRescoreContext rescoreContext = (QueryRescoreContext) rescoreBuilder.build(mockShardContext);
+ XContentParser parser = createParser(rescoreBuilder);
+
+ QueryRescoreContext parsedRescoreContext = (QueryRescoreContext) new RescoreParseElement().parseSingleRescoreContext(parser, mockShardContext);
+ assertNotSame(rescoreContext, parsedRescoreContext);
+ assertEquals(rescoreContext.window(), parsedRescoreContext.window());
+ assertEquals(rescoreContext.query(), parsedRescoreContext.query());
+ assertEquals(rescoreContext.queryWeight(), parsedRescoreContext.queryWeight(), Float.MIN_VALUE);
+ assertEquals(rescoreContext.rescoreQueryWeight(), parsedRescoreContext.rescoreQueryWeight(), Float.MIN_VALUE);
+ assertEquals(rescoreContext.scoreMode(), parsedRescoreContext.scoreMode());
+ }
+ }
+
+ /**
+ * test parsing exceptions for incorrect rescorer syntax
+ */
+ public void testUnknownFieldsExpection() throws IOException {
+ QueryParseContext context = new QueryParseContext(indicesQueriesRegistry);
+ context.parseFieldMatcher(new ParseFieldMatcher(Settings.EMPTY));
+
+ String rescoreElement = "{\n" +
+ " \"window_size\" : 20,\n" +
+ " \"bad_rescorer_name\" : { }\n" +
+ "}\n";
+ prepareContext(context, rescoreElement);
+ try {
+ RescoreBuilder.parseFromXContent(context);
+ fail("expected a parsing exception");
+ } catch (ParsingException e) {
+ assertEquals("rescore doesn't support rescorer with name [bad_rescorer_name]", e.getMessage());
+ }
+
+ rescoreElement = "{\n" +
+ " \"bad_fieldName\" : 20\n" +
+ "}\n";
+ prepareContext(context, rescoreElement);
+ try {
+ RescoreBuilder.parseFromXContent(context);
+ fail("expected a parsing exception");
+ } catch (ParsingException e) {
+ assertEquals("rescore doesn't support [bad_fieldName]", e.getMessage());
+ }
+
+ rescoreElement = "{\n" +
+ " \"window_size\" : 20,\n" +
+ " \"query\" : [ ]\n" +
+ "}\n";
+ prepareContext(context, rescoreElement);
+ try {
+ RescoreBuilder.parseFromXContent(context);
+ fail("expected a parsing exception");
+ } catch (ParsingException e) {
+ assertEquals("unexpected token [START_ARRAY] after [query]", e.getMessage());
+ }
+
+ rescoreElement = "{ }";
+ prepareContext(context, rescoreElement);
+ try {
+ RescoreBuilder.parseFromXContent(context);
+ fail("expected a parsing exception");
+ } catch (ParsingException e) {
+ assertEquals("missing rescore type", e.getMessage());
+ }
+
+ rescoreElement = "{\n" +
+ " \"window_size\" : 20,\n" +
+ " \"query\" : { \"bad_fieldname\" : 1.0 } \n" +
+ "}\n";
+ prepareContext(context, rescoreElement);
+ try {
+ RescoreBuilder.parseFromXContent(context);
+ fail("expected a parsing exception");
+ } catch (IllegalArgumentException e) {
+ assertEquals("[query] unknown field [bad_fieldname], parser not found", e.getMessage());
+ }
+
+ rescoreElement = "{\n" +
+ " \"window_size\" : 20,\n" +
+ " \"query\" : { \"rescore_query\" : { \"unknown_queryname\" : { } } } \n" +
+ "}\n";
+ prepareContext(context, rescoreElement);
+ try {
+ RescoreBuilder.parseFromXContent(context);
+ fail("expected a parsing exception");
+ } catch (ParsingException e) {
+ assertEquals("[query] failed to parse field [rescore_query]", e.getMessage());
+ }
+
+ rescoreElement = "{\n" +
+ " \"window_size\" : 20,\n" +
+ " \"query\" : { \"rescore_query\" : { \"match_all\" : { } } } \n"
+ + "}\n";
+ prepareContext(context, rescoreElement);
+ RescoreBuilder.parseFromXContent(context);
+ }
+
+ /**
+ * create a new parser from the rescorer string representation and reset context with it
+ */
+ private static void prepareContext(QueryParseContext context, String rescoreElement) throws IOException {
+ XContentParser parser = XContentFactory.xContent(rescoreElement).createParser(rescoreElement);
+ context.reset(parser);
+ // move to first token, this is where the internal fromXContent
+ assertTrue(parser.nextToken() == XContentParser.Token.START_OBJECT);
+ }
+
+ private static RescoreBuilder<?> mutate(RescoreBuilder<?> original) throws IOException {
+ RescoreBuilder<?> mutation = serializedCopy(original);
if (randomBoolean()) {
Integer windowSize = original.windowSize();
if (windowSize != null) {
@@ -109,7 +286,7 @@ public class QueryRescoreBuilderTests extends ESTestCase {
mutation.windowSize(randomIntBetween(0, 100));
}
} else {
- QueryRescorer queryRescorer = (QueryRescorer) mutation.rescorer();
+ QueryRescorerBuilder queryRescorer = (QueryRescorerBuilder) mutation;
switch (randomIntBetween(0, 3)) {
case 0:
queryRescorer.setQueryWeight(queryRescorer.getQueryWeight() + 0.1f);
@@ -138,10 +315,10 @@ public class QueryRescoreBuilderTests extends ESTestCase {
/**
* create random shape that is put under test
*/
- private static RescoreBuilder randomRescoreBuilder() {
+ public static org.elasticsearch.search.rescore.QueryRescorerBuilder randomRescoreBuilder() {
QueryBuilder<MatchAllQueryBuilder> queryBuilder = new MatchAllQueryBuilder().boost(randomFloat()).queryName(randomAsciiOfLength(20));
- org.elasticsearch.search.rescore.RescoreBuilder.QueryRescorer rescorer = new
- org.elasticsearch.search.rescore.RescoreBuilder.QueryRescorer(queryBuilder);
+ org.elasticsearch.search.rescore.QueryRescorerBuilder rescorer = new
+ org.elasticsearch.search.rescore.QueryRescorerBuilder(queryBuilder);
if (randomBoolean()) {
rescorer.setQueryWeight(randomFloat());
}
@@ -151,18 +328,17 @@ public class QueryRescoreBuilderTests extends ESTestCase {
if (randomBoolean()) {
rescorer.setScoreMode(randomFrom(QueryRescoreMode.values()));
}
- RescoreBuilder builder = new RescoreBuilder(rescorer);
if (randomBoolean()) {
- builder.windowSize(randomIntBetween(0, 100));
+ rescorer.windowSize(randomIntBetween(0, 100));
}
- return builder;
+ return rescorer;
}
- private static RescoreBuilder serializedCopy(RescoreBuilder original) throws IOException {
+ private static RescoreBuilder<?> serializedCopy(RescoreBuilder<?> original) throws IOException {
try (BytesStreamOutput output = new BytesStreamOutput()) {
- original.writeTo(output);
+ output.writeRescorer(original);
try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) {
- return RescoreBuilder.PROTOYPE.readFrom(in);
+ return in.readRescorer();
}
}
}
diff --git a/core/src/test/java/org/elasticsearch/search/scroll/SearchScrollIT.java b/core/src/test/java/org/elasticsearch/search/scroll/SearchScrollIT.java
index bb81f28d15..6341be8037 100644
--- a/core/src/test/java/org/elasticsearch/search/scroll/SearchScrollIT.java
+++ b/core/src/test/java/org/elasticsearch/search/scroll/SearchScrollIT.java
@@ -92,7 +92,7 @@ public class SearchScrollIT extends ESIntegTestCase {
try {
long counter = 0;
- assertThat(searchResponse.getHits().getTotalHits(), equalTo(100l));
+ assertThat(searchResponse.getHits().getTotalHits(), equalTo(100L));
assertThat(searchResponse.getHits().hits().length, equalTo(35));
for (SearchHit hit : searchResponse.getHits()) {
assertThat(((Number) hit.sortValues()[0]).longValue(), equalTo(counter++));
@@ -102,7 +102,7 @@ public class SearchScrollIT extends ESIntegTestCase {
.setScroll(TimeValue.timeValueMinutes(2))
.execute().actionGet();
- assertThat(searchResponse.getHits().getTotalHits(), equalTo(100l));
+ assertThat(searchResponse.getHits().getTotalHits(), equalTo(100L));
assertThat(searchResponse.getHits().hits().length, equalTo(35));
for (SearchHit hit : searchResponse.getHits()) {
assertThat(((Number) hit.sortValues()[0]).longValue(), equalTo(counter++));
@@ -112,7 +112,7 @@ public class SearchScrollIT extends ESIntegTestCase {
.setScroll(TimeValue.timeValueMinutes(2))
.execute().actionGet();
- assertThat(searchResponse.getHits().getTotalHits(), equalTo(100l));
+ assertThat(searchResponse.getHits().getTotalHits(), equalTo(100L));
assertThat(searchResponse.getHits().hits().length, equalTo(30));
for (SearchHit hit : searchResponse.getHits()) {
assertThat(((Number) hit.sortValues()[0]).longValue(), equalTo(counter++));
@@ -150,7 +150,7 @@ public class SearchScrollIT extends ESIntegTestCase {
try {
long counter = 0;
- assertThat(searchResponse.getHits().getTotalHits(), equalTo(100l));
+ assertThat(searchResponse.getHits().getTotalHits(), equalTo(100L));
assertThat(searchResponse.getHits().hits().length, equalTo(3));
for (SearchHit hit : searchResponse.getHits()) {
assertThat(((Number) hit.sortValues()[0]).longValue(), equalTo(counter++));
@@ -161,7 +161,7 @@ public class SearchScrollIT extends ESIntegTestCase {
.setScroll(TimeValue.timeValueMinutes(2))
.execute().actionGet();
- assertThat(searchResponse.getHits().getTotalHits(), equalTo(100l));
+ assertThat(searchResponse.getHits().getTotalHits(), equalTo(100L));
assertThat(searchResponse.getHits().hits().length, equalTo(3));
for (SearchHit hit : searchResponse.getHits()) {
assertThat(((Number) hit.sortValues()[0]).longValue(), equalTo(counter++));
@@ -173,7 +173,7 @@ public class SearchScrollIT extends ESIntegTestCase {
.setScroll(TimeValue.timeValueMinutes(2))
.execute().actionGet();
- assertThat(searchResponse.getHits().getTotalHits(), equalTo(100l));
+ assertThat(searchResponse.getHits().getTotalHits(), equalTo(100L));
assertThat(searchResponse.getHits().hits().length, equalTo(1));
for (SearchHit hit : searchResponse.getHits()) {
assertThat(((Number) hit.sortValues()[0]).longValue(), equalTo(counter++));
@@ -184,7 +184,7 @@ public class SearchScrollIT extends ESIntegTestCase {
.setScroll(TimeValue.timeValueMinutes(2))
.execute().actionGet();
- assertThat(searchResponse.getHits().getTotalHits(), equalTo(100l));
+ assertThat(searchResponse.getHits().getTotalHits(), equalTo(100L));
assertThat(searchResponse.getHits().hits().length, equalTo(0));
for (SearchHit hit : searchResponse.getHits()) {
assertThat(((Number) hit.sortValues()[0]).longValue(), equalTo(counter++));
@@ -206,11 +206,11 @@ public class SearchScrollIT extends ESIntegTestCase {
client().admin().indices().prepareRefresh().execute().actionGet();
- assertThat(client().prepareSearch().setSize(0).setQuery(matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(500l));
- assertThat(client().prepareSearch().setSize(0).setQuery(termQuery("message", "test")).execute().actionGet().getHits().totalHits(), equalTo(500l));
- assertThat(client().prepareSearch().setSize(0).setQuery(termQuery("message", "test")).execute().actionGet().getHits().totalHits(), equalTo(500l));
- assertThat(client().prepareSearch().setSize(0).setQuery(termQuery("message", "update")).execute().actionGet().getHits().totalHits(), equalTo(0l));
- assertThat(client().prepareSearch().setSize(0).setQuery(termQuery("message", "update")).execute().actionGet().getHits().totalHits(), equalTo(0l));
+ assertThat(client().prepareSearch().setSize(0).setQuery(matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(500L));
+ assertThat(client().prepareSearch().setSize(0).setQuery(termQuery("message", "test")).execute().actionGet().getHits().totalHits(), equalTo(500L));
+ assertThat(client().prepareSearch().setSize(0).setQuery(termQuery("message", "test")).execute().actionGet().getHits().totalHits(), equalTo(500L));
+ assertThat(client().prepareSearch().setSize(0).setQuery(termQuery("message", "update")).execute().actionGet().getHits().totalHits(), equalTo(0L));
+ assertThat(client().prepareSearch().setSize(0).setQuery(termQuery("message", "update")).execute().actionGet().getHits().totalHits(), equalTo(0L));
SearchResponse searchResponse = client().prepareSearch()
.setQuery(queryStringQuery("user:kimchy"))
@@ -229,11 +229,11 @@ public class SearchScrollIT extends ESIntegTestCase {
} while (searchResponse.getHits().hits().length > 0);
client().admin().indices().prepareRefresh().execute().actionGet();
- assertThat(client().prepareSearch().setSize(0).setQuery(matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(500l));
- assertThat(client().prepareSearch().setSize(0).setQuery(termQuery("message", "test")).execute().actionGet().getHits().totalHits(), equalTo(0l));
- assertThat(client().prepareSearch().setSize(0).setQuery(termQuery("message", "test")).execute().actionGet().getHits().totalHits(), equalTo(0l));
- assertThat(client().prepareSearch().setSize(0).setQuery(termQuery("message", "update")).execute().actionGet().getHits().totalHits(), equalTo(500l));
- assertThat(client().prepareSearch().setSize(0).setQuery(termQuery("message", "update")).execute().actionGet().getHits().totalHits(), equalTo(500l));
+ assertThat(client().prepareSearch().setSize(0).setQuery(matchAllQuery()).execute().actionGet().getHits().totalHits(), equalTo(500L));
+ assertThat(client().prepareSearch().setSize(0).setQuery(termQuery("message", "test")).execute().actionGet().getHits().totalHits(), equalTo(0L));
+ assertThat(client().prepareSearch().setSize(0).setQuery(termQuery("message", "test")).execute().actionGet().getHits().totalHits(), equalTo(0L));
+ assertThat(client().prepareSearch().setSize(0).setQuery(termQuery("message", "update")).execute().actionGet().getHits().totalHits(), equalTo(500L));
+ assertThat(client().prepareSearch().setSize(0).setQuery(termQuery("message", "update")).execute().actionGet().getHits().totalHits(), equalTo(500L));
} finally {
clearScroll(searchResponse.getScrollId());
}
@@ -270,13 +270,13 @@ public class SearchScrollIT extends ESIntegTestCase {
long counter1 = 0;
long counter2 = 0;
- assertThat(searchResponse1.getHits().getTotalHits(), equalTo(100l));
+ assertThat(searchResponse1.getHits().getTotalHits(), equalTo(100L));
assertThat(searchResponse1.getHits().hits().length, equalTo(35));
for (SearchHit hit : searchResponse1.getHits()) {
assertThat(((Number) hit.sortValues()[0]).longValue(), equalTo(counter1++));
}
- assertThat(searchResponse2.getHits().getTotalHits(), equalTo(100l));
+ assertThat(searchResponse2.getHits().getTotalHits(), equalTo(100L));
assertThat(searchResponse2.getHits().hits().length, equalTo(35));
for (SearchHit hit : searchResponse2.getHits()) {
assertThat(((Number) hit.sortValues()[0]).longValue(), equalTo(counter2++));
@@ -290,13 +290,13 @@ public class SearchScrollIT extends ESIntegTestCase {
.setScroll(TimeValue.timeValueMinutes(2))
.execute().actionGet();
- assertThat(searchResponse1.getHits().getTotalHits(), equalTo(100l));
+ assertThat(searchResponse1.getHits().getTotalHits(), equalTo(100L));
assertThat(searchResponse1.getHits().hits().length, equalTo(35));
for (SearchHit hit : searchResponse1.getHits()) {
assertThat(((Number) hit.sortValues()[0]).longValue(), equalTo(counter1++));
}
- assertThat(searchResponse2.getHits().getTotalHits(), equalTo(100l));
+ assertThat(searchResponse2.getHits().getTotalHits(), equalTo(100L));
assertThat(searchResponse2.getHits().hits().length, equalTo(35));
for (SearchHit hit : searchResponse2.getHits()) {
assertThat(((Number) hit.sortValues()[0]).longValue(), equalTo(counter2++));
@@ -381,13 +381,13 @@ public class SearchScrollIT extends ESIntegTestCase {
long counter1 = 0;
long counter2 = 0;
- assertThat(searchResponse1.getHits().getTotalHits(), equalTo(100l));
+ assertThat(searchResponse1.getHits().getTotalHits(), equalTo(100L));
assertThat(searchResponse1.getHits().hits().length, equalTo(35));
for (SearchHit hit : searchResponse1.getHits()) {
assertThat(((Number) hit.sortValues()[0]).longValue(), equalTo(counter1++));
}
- assertThat(searchResponse2.getHits().getTotalHits(), equalTo(100l));
+ assertThat(searchResponse2.getHits().getTotalHits(), equalTo(100L));
assertThat(searchResponse2.getHits().hits().length, equalTo(35));
for (SearchHit hit : searchResponse2.getHits()) {
assertThat(((Number) hit.sortValues()[0]).longValue(), equalTo(counter2++));
@@ -401,13 +401,13 @@ public class SearchScrollIT extends ESIntegTestCase {
.setScroll(TimeValue.timeValueMinutes(2))
.execute().actionGet();
- assertThat(searchResponse1.getHits().getTotalHits(), equalTo(100l));
+ assertThat(searchResponse1.getHits().getTotalHits(), equalTo(100L));
assertThat(searchResponse1.getHits().hits().length, equalTo(35));
for (SearchHit hit : searchResponse1.getHits()) {
assertThat(((Number) hit.sortValues()[0]).longValue(), equalTo(counter1++));
}
- assertThat(searchResponse2.getHits().getTotalHits(), equalTo(100l));
+ assertThat(searchResponse2.getHits().getTotalHits(), equalTo(100L));
assertThat(searchResponse2.getHits().hits().length, equalTo(35));
for (SearchHit hit : searchResponse2.getHits()) {
assertThat(((Number) hit.sortValues()[0]).longValue(), equalTo(counter2++));
@@ -439,7 +439,7 @@ public class SearchScrollIT extends ESIntegTestCase {
SearchResponse response = builder.execute().actionGet();
try {
- ElasticsearchAssertions.assertHitCount(response, 1l);
+ ElasticsearchAssertions.assertHitCount(response, 1L);
} finally {
String scrollId = response.getScrollId();
if (scrollId != null) {
@@ -586,7 +586,7 @@ public class SearchScrollIT extends ESIntegTestCase {
.addSort("field", SortOrder.ASC)
.execute().actionGet();
long counter = 0;
- assertThat(searchResponse.getHits().getTotalHits(), equalTo(100l));
+ assertThat(searchResponse.getHits().getTotalHits(), equalTo(100L));
assertThat(searchResponse.getHits().hits().length, equalTo(35));
for (SearchHit hit : searchResponse.getHits()) {
assertThat(((Number) hit.sortValues()[0]).longValue(), equalTo(counter++));
diff --git a/core/src/test/java/org/elasticsearch/search/scroll/SearchScrollWithFailingNodesIT.java b/core/src/test/java/org/elasticsearch/search/scroll/SearchScrollWithFailingNodesIT.java
index 20cf8596c4..2a42ec3530 100644
--- a/core/src/test/java/org/elasticsearch/search/scroll/SearchScrollWithFailingNodesIT.java
+++ b/core/src/test/java/org/elasticsearch/search/scroll/SearchScrollWithFailingNodesIT.java
@@ -84,7 +84,7 @@ public class SearchScrollWithFailingNodesIT extends ESIntegTestCase {
.get();
assertAllSuccessful(searchResponse);
} while (searchResponse.getHits().hits().length > 0);
- assertThat(numHits, equalTo(100l));
+ assertThat(numHits, equalTo(100L));
clearScroll("_all");
internalCluster().stopRandomNonMasterNode();
@@ -104,7 +104,7 @@ public class SearchScrollWithFailingNodesIT extends ESIntegTestCase {
.get();
assertThat(searchResponse.getSuccessfulShards(), equalTo(numberOfSuccessfulShards));
} while (searchResponse.getHits().hits().length > 0);
- assertThat(numHits, greaterThan(0l));
+ assertThat(numHits, greaterThan(0L));
clearScroll(searchResponse.getScrollId());
}
diff --git a/core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java b/core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java
new file mode 100644
index 0000000000..7de0c15001
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java
@@ -0,0 +1,252 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.search.searchafter;
+
+import org.elasticsearch.common.ParseFieldMatcher;
+import org.elasticsearch.common.io.stream.BytesStreamOutput;
+import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.text.Text;
+import org.elasticsearch.common.xcontent.XContentFactory;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.common.xcontent.XContentType;
+import org.elasticsearch.common.xcontent.XContentHelper;
+import org.elasticsearch.index.query.MatchAllQueryParser;
+import org.elasticsearch.index.query.QueryParseContext;
+import org.elasticsearch.indices.query.IndicesQueriesRegistry;
+import org.elasticsearch.test.ESTestCase;
+import org.hamcrest.Matchers;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+
+import java.io.IOException;
+import java.util.Collections;
+
+import static org.hamcrest.Matchers.equalTo;
+
+public class SearchAfterBuilderTests extends ESTestCase {
+ private static final int NUMBER_OF_TESTBUILDERS = 20;
+ private static NamedWriteableRegistry namedWriteableRegistry;
+ private static IndicesQueriesRegistry indicesQueriesRegistry;
+
+ /**
+ * setup for the whole base test class
+ */
+ @BeforeClass
+ public static void init() {
+ namedWriteableRegistry = new NamedWriteableRegistry();
+ indicesQueriesRegistry = new IndicesQueriesRegistry(Settings.settingsBuilder().build(),
+ Collections.singletonMap("match_all", new MatchAllQueryParser()));
+ }
+
+ @AfterClass
+ public static void afterClass() throws Exception {
+ namedWriteableRegistry = null;
+ indicesQueriesRegistry = null;
+ }
+
+ private final SearchAfterBuilder randomSearchFromBuilder() throws IOException {
+ int numSearchFrom = randomIntBetween(1, 10);
+ SearchAfterBuilder searchAfterBuilder = new SearchAfterBuilder();
+ Object[] values = new Object[numSearchFrom];
+ for (int i = 0; i < numSearchFrom; i++) {
+ int branch = randomInt(9);
+ switch (branch) {
+ case 0:
+ values[i] = randomInt();
+ break;
+ case 1:
+ values[i] = randomFloat();
+ break;
+ case 2:
+ values[i] = randomLong();
+ break;
+ case 3:
+ values[i] = randomDouble();
+ break;
+ case 4:
+ values[i] = randomAsciiOfLengthBetween(5, 20);
+ break;
+ case 5:
+ values[i] = randomBoolean();
+ break;
+ case 6:
+ values[i] = randomByte();
+ break;
+ case 7:
+ values[i] = randomShort();
+ break;
+ case 8:
+ values[i] = new Text(randomAsciiOfLengthBetween(5, 20));
+ break;
+ case 9:
+ values[i] = null;
+ break;
+ }
+ }
+ searchAfterBuilder.setSortValues(values);
+ return searchAfterBuilder;
+ }
+
+ // We build a json version of the search_after first in order to
+ // ensure that every number type remain the same before/after xcontent (de)serialization.
+ // This is not a problem because the final type of each field value is extracted from associated sort field.
+ // This little trick ensure that equals and hashcode are the same when using the xcontent serialization.
+ private final SearchAfterBuilder randomJsonSearchFromBuilder() throws IOException {
+ int numSearchAfter = randomIntBetween(1, 10);
+ XContentBuilder jsonBuilder = XContentFactory.jsonBuilder();
+ jsonBuilder.startObject();
+ jsonBuilder.startArray("search_after");
+ for (int i = 0; i < numSearchAfter; i++) {
+ int branch = randomInt(9);
+ switch (branch) {
+ case 0:
+ jsonBuilder.value(randomInt());
+ break;
+ case 1:
+ jsonBuilder.value(randomFloat());
+ break;
+ case 2:
+ jsonBuilder.value(randomLong());
+ break;
+ case 3:
+ jsonBuilder.value(randomDouble());
+ break;
+ case 4:
+ jsonBuilder.value(randomAsciiOfLengthBetween(5, 20));
+ break;
+ case 5:
+ jsonBuilder.value(randomBoolean());
+ break;
+ case 6:
+ jsonBuilder.value(randomByte());
+ break;
+ case 7:
+ jsonBuilder.value(randomShort());
+ break;
+ case 8:
+ jsonBuilder.value(new Text(randomAsciiOfLengthBetween(5, 20)));
+ break;
+ case 9:
+ jsonBuilder.nullValue();
+ break;
+ }
+ }
+ jsonBuilder.endArray();
+ jsonBuilder.endObject();
+ XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(jsonBuilder.bytes());
+ parser.nextToken();
+ parser.nextToken();
+ parser.nextToken();
+ return SearchAfterBuilder.PROTOTYPE.fromXContent(parser, null);
+ }
+
+ private static SearchAfterBuilder serializedCopy(SearchAfterBuilder original) throws IOException {
+ try (BytesStreamOutput output = new BytesStreamOutput()) {
+ original.writeTo(output);
+ try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) {
+ return SearchAfterBuilder.PROTOTYPE.readFrom(in);
+ }
+ }
+ }
+
+ public void testSerialization() throws Exception {
+ for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) {
+ SearchAfterBuilder original = randomSearchFromBuilder();
+ SearchAfterBuilder deserialized = serializedCopy(original);
+ assertEquals(deserialized, original);
+ assertEquals(deserialized.hashCode(), original.hashCode());
+ assertNotSame(deserialized, original);
+ }
+ }
+
+ public void testEqualsAndHashcode() throws Exception {
+ for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) {
+ SearchAfterBuilder firstBuilder = randomSearchFromBuilder();
+ assertFalse("searchFrom is equal to null", firstBuilder.equals(null));
+ assertFalse("searchFrom is equal to incompatible type", firstBuilder.equals(""));
+ assertTrue("searchFrom is not equal to self", firstBuilder.equals(firstBuilder));
+ assertThat("same searchFrom's hashcode returns different values if called multiple times", firstBuilder.hashCode(),
+ equalTo(firstBuilder.hashCode()));
+
+ SearchAfterBuilder secondBuilder = serializedCopy(firstBuilder);
+ assertTrue("searchFrom is not equal to self", secondBuilder.equals(secondBuilder));
+ assertTrue("searchFrom is not equal to its copy", firstBuilder.equals(secondBuilder));
+ assertTrue("equals is not symmetric", secondBuilder.equals(firstBuilder));
+ assertThat("searchFrom copy's hashcode is different from original hashcode", secondBuilder.hashCode(), equalTo(firstBuilder.hashCode()));
+
+ SearchAfterBuilder thirdBuilder = serializedCopy(secondBuilder);
+ assertTrue("searchFrom is not equal to self", thirdBuilder.equals(thirdBuilder));
+ assertTrue("searchFrom is not equal to its copy", secondBuilder.equals(thirdBuilder));
+ assertThat("searchFrom copy's hashcode is different from original hashcode", secondBuilder.hashCode(), equalTo(thirdBuilder.hashCode()));
+ assertTrue("equals is not transitive", firstBuilder.equals(thirdBuilder));
+ assertThat("searchFrom copy's hashcode is different from original hashcode", firstBuilder.hashCode(), equalTo(thirdBuilder.hashCode()));
+ assertTrue("searchFrom is not symmetric", thirdBuilder.equals(secondBuilder));
+ assertTrue("searchFrom is not symmetric", thirdBuilder.equals(firstBuilder));
+ }
+ }
+
+ public void testFromXContent() throws Exception {
+ QueryParseContext context = new QueryParseContext(indicesQueriesRegistry);
+ context.parseFieldMatcher(new ParseFieldMatcher(Settings.EMPTY));
+ for (int runs = 0; runs < 20; runs++) {
+ SearchAfterBuilder searchAfterBuilder = randomJsonSearchFromBuilder();
+ XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values()));
+ if (randomBoolean()) {
+ builder.prettyPrint();
+ }
+ builder.startObject();
+ searchAfterBuilder.innerToXContent(builder);
+ builder.endObject();
+ XContentParser parser = XContentHelper.createParser(builder.bytes());
+ context.reset(parser);
+ parser.nextToken();
+ parser.nextToken();
+ parser.nextToken();
+ SearchAfterBuilder secondSearchAfterBuilder = SearchAfterBuilder.PROTOTYPE.fromXContent(parser, null);
+ assertNotSame(searchAfterBuilder, secondSearchAfterBuilder);
+ assertEquals(searchAfterBuilder, secondSearchAfterBuilder);
+ assertEquals(searchAfterBuilder.hashCode(), secondSearchAfterBuilder.hashCode());
+ }
+ }
+
+ public void testWithNullArray() throws Exception {
+ SearchAfterBuilder builder = new SearchAfterBuilder();
+ try {
+ builder.setSortValues(null);
+ fail("Should fail on null array.");
+ } catch (NullPointerException e) {
+ assertThat(e.getMessage(), Matchers.equalTo("Values cannot be null."));
+ }
+ }
+
+ public void testWithEmptyArray() throws Exception {
+ SearchAfterBuilder builder = new SearchAfterBuilder();
+ try {
+ builder.setSortValues(new Object[0]);
+ fail("Should fail on empty array.");
+ } catch (IllegalArgumentException e) {
+ assertThat(e.getMessage(), Matchers.equalTo("Values must contains at least one value."));
+ }
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterIT.java b/core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterIT.java
new file mode 100644
index 0000000000..e4ac3b728e
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterIT.java
@@ -0,0 +1,332 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.search.searchafter;
+
+import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
+import org.elasticsearch.action.index.IndexRequestBuilder;
+import org.elasticsearch.action.search.SearchPhaseExecutionException;
+import org.elasticsearch.action.search.SearchRequestBuilder;
+import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.common.Strings;
+import org.elasticsearch.common.text.Text;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.search.SearchContextException;
+import org.elasticsearch.search.SearchHit;
+import org.elasticsearch.search.sort.SortOrder;
+import org.elasticsearch.test.ESIntegTestCase;
+import org.elasticsearch.transport.RemoteTransportException;
+import org.hamcrest.Matchers;
+
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Comparator;
+import java.util.Collections;
+import java.util.Arrays;
+import java.util.concurrent.ExecutionException;
+
+import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
+import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
+import static org.hamcrest.Matchers.equalTo;
+
+public class SearchAfterIT extends ESIntegTestCase {
+ private static final String INDEX_NAME = "test";
+ private static final String TYPE_NAME = "type1";
+ private static final int NUM_DOCS = 100;
+
+ public void testsShouldFail() throws Exception {
+ createIndex("test");
+ ensureGreen();
+ indexRandom(true, client().prepareIndex("test", "type1", "0").setSource("field1", 0, "field2", "toto"));
+ try {
+ client().prepareSearch("test")
+ .addSort("field1", SortOrder.ASC)
+ .setQuery(matchAllQuery())
+ .searchAfter(new Object[]{0})
+ .setScroll("1m")
+ .get();
+
+ fail("Should fail on search_after cannot be used with scroll.");
+ } catch (SearchPhaseExecutionException e) {
+ assertThat(e.getCause().getClass(), Matchers.equalTo(RemoteTransportException.class));
+ assertThat(e.getCause().getCause().getClass(), Matchers.equalTo(SearchContextException.class));
+ assertThat(e.getCause().getCause().getMessage(), Matchers.equalTo("`search_after` cannot be used in a scroll context."));
+ }
+ try {
+ client().prepareSearch("test")
+ .addSort("field1", SortOrder.ASC)
+ .setQuery(matchAllQuery())
+ .searchAfter(new Object[]{0})
+ .setFrom(10)
+ .get();
+
+ fail("Should fail on search_after cannot be used with from > 0.");
+ } catch (SearchPhaseExecutionException e) {
+ assertThat(e.getCause().getClass(), Matchers.equalTo(RemoteTransportException.class));
+ assertThat(e.getCause().getCause().getClass(), Matchers.equalTo(SearchContextException.class));
+ assertThat(e.getCause().getCause().getMessage(), Matchers.equalTo("`from` parameter must be set to 0 when `search_after` is used."));
+ }
+
+ try {
+ client().prepareSearch("test")
+ .setQuery(matchAllQuery())
+ .searchAfter(new Object[]{0.75f})
+ .get();
+
+ fail("Should fail on search_after on score only is disabled");
+ } catch (SearchPhaseExecutionException e) {
+ assertThat(e.getCause().getClass(), Matchers.equalTo(RemoteTransportException.class));
+ assertThat(e.getCause().getCause().getClass(), Matchers.equalTo(IllegalArgumentException.class));
+ assertThat(e.getCause().getCause().getMessage(), Matchers.equalTo("Sort must contain at least one field."));
+ }
+
+ try {
+ client().prepareSearch("test")
+ .addSort("field2", SortOrder.DESC)
+ .addSort("field1", SortOrder.ASC)
+ .setQuery(matchAllQuery())
+ .searchAfter(new Object[]{1})
+ .get();
+ fail("Should fail on search_after size differs from sort field size");
+ } catch (SearchPhaseExecutionException e) {
+ assertThat(e.getCause().getClass(), Matchers.equalTo(RemoteTransportException.class));
+ assertThat(e.getCause().getCause().getClass(), Matchers.equalTo(IllegalArgumentException.class));
+ assertThat(e.getCause().getCause().getMessage(), Matchers.equalTo("search_after has 1 value(s) but sort has 2."));
+ }
+
+ try {
+ client().prepareSearch("test")
+ .setQuery(matchAllQuery())
+ .addSort("field1", SortOrder.ASC)
+ .searchAfter(new Object[]{1, 2})
+ .get();
+ fail("Should fail on search_after size differs from sort field size");
+ } catch (SearchPhaseExecutionException e) {
+ assertThat(e.getCause().getClass(), Matchers.equalTo(RemoteTransportException.class));
+ assertThat(e.getCause().getCause().getClass(), Matchers.equalTo(IllegalArgumentException.class));
+ assertThat(e.getCause().getCause().getMessage(), Matchers.equalTo("search_after has 2 value(s) but sort has 1."));
+ }
+
+ try {
+ client().prepareSearch("test")
+ .setQuery(matchAllQuery())
+ .addSort("field1", SortOrder.ASC)
+ .searchAfter(new Object[]{"toto"})
+ .get();
+
+ fail("Should fail on search_after on score only is disabled");
+ } catch (SearchPhaseExecutionException e) {
+ assertThat(e.getCause().getClass(), Matchers.equalTo(RemoteTransportException.class));
+ assertThat(e.getCause().getCause().getClass(), Matchers.equalTo(IllegalArgumentException.class));
+ assertThat(e.getCause().getCause().getMessage(), Matchers.equalTo("Failed to parse search_after value for field [field1]."));
+ }
+ }
+
+ public void testWithNullStrings() throws ExecutionException, InterruptedException {
+ createIndex("test");
+ ensureGreen();
+ indexRandom(true,
+ client().prepareIndex("test", "type1", "0").setSource("field1", 0),
+ client().prepareIndex("test", "type1", "1").setSource("field1", 100, "field2", "toto"));
+ SearchResponse searchResponse = client().prepareSearch("test")
+ .addSort("field1", SortOrder.ASC)
+ .addSort("field2", SortOrder.ASC)
+ .setQuery(matchAllQuery())
+ .searchAfter(new Object[]{0, null})
+ .get();
+ assertThat(searchResponse.getHits().getTotalHits(), Matchers.equalTo(2L));
+ assertThat(searchResponse.getHits().getHits().length, Matchers.equalTo(1));
+ assertThat(searchResponse.getHits().getHits()[0].sourceAsMap().get("field1"), Matchers.equalTo(100));
+ assertThat(searchResponse.getHits().getHits()[0].sourceAsMap().get("field2"), Matchers.equalTo("toto"));
+ }
+
+ public void testWithSimpleTypes() throws Exception {
+ int numFields = randomInt(20) + 1;
+ int[] types = new int[numFields-1];
+ for (int i = 0; i < numFields-1; i++) {
+ types[i] = randomInt(6);
+ }
+ List<List> documents = new ArrayList<>();
+ for (int i = 0; i < NUM_DOCS; i++) {
+ List values = new ArrayList<>();
+ for (int type : types) {
+ switch (type) {
+ case 0:
+ values.add(randomBoolean());
+ break;
+ case 1:
+ values.add(randomByte());
+ break;
+ case 2:
+ values.add(randomShort());
+ break;
+ case 3:
+ values.add(randomInt());
+ break;
+ case 4:
+ values.add(randomFloat());
+ break;
+ case 5:
+ values.add(randomDouble());
+ break;
+ case 6:
+ values.add(new Text(randomAsciiOfLengthBetween(5, 20)));
+ break;
+ }
+ }
+ values.add(new Text(Strings.randomBase64UUID()));
+ documents.add(values);
+ }
+ int reqSize = randomInt(NUM_DOCS-1);
+ if (reqSize == 0) {
+ reqSize = 1;
+ }
+ assertSearchFromWithSortValues(INDEX_NAME, TYPE_NAME, documents, reqSize);
+ }
+
+ private static class ListComparator implements Comparator<List> {
+ @Override
+ public int compare(List o1, List o2) {
+ if (o1.size() > o2.size()) {
+ return 1;
+ }
+
+ if (o2.size() > o1.size()) {
+ return -1;
+ }
+
+ for (int i = 0; i < o1.size(); i++) {
+ if (!(o1.get(i) instanceof Comparable)) {
+ throw new RuntimeException(o1.get(i).getClass() + " is not comparable");
+ }
+ Object cmp1 = o1.get(i);
+ Object cmp2 = o2.get(i);
+ int cmp = ((Comparable)cmp1).compareTo(cmp2);
+ if (cmp != 0) {
+ return cmp;
+ }
+ }
+ return 0;
+ }
+ }
+ private ListComparator LST_COMPARATOR = new ListComparator();
+
+ private void assertSearchFromWithSortValues(String indexName, String typeName, List<List> documents, int reqSize) throws Exception {
+ int numFields = documents.get(0).size();
+ {
+ createIndexMappingsFromObjectType(indexName, typeName, documents.get(0));
+ List<IndexRequestBuilder> requests = new ArrayList<>();
+ for (int i = 0; i < documents.size(); i++) {
+ XContentBuilder builder = jsonBuilder();
+ assertThat(documents.get(i).size(), Matchers.equalTo(numFields));
+ builder.startObject();
+ for (int j = 0; j < numFields; j++) {
+ builder.field("field" + Integer.toString(j), documents.get(i).get(j));
+ }
+ builder.endObject();
+ requests.add(client().prepareIndex(INDEX_NAME, TYPE_NAME, Integer.toString(i)).setSource(builder));
+ }
+ indexRandom(true, requests);
+ }
+
+ Collections.sort(documents, LST_COMPARATOR);
+ int offset = 0;
+ Object[] sortValues = null;
+ while (offset < documents.size()) {
+ SearchRequestBuilder req = client().prepareSearch(indexName);
+ for (int i = 0; i < documents.get(0).size(); i++) {
+ req.addSort("field" + Integer.toString(i), SortOrder.ASC);
+ }
+ req.setQuery(matchAllQuery()).setSize(reqSize);
+ if (sortValues != null) {
+ req.searchAfter(sortValues);
+ }
+ SearchResponse searchResponse = req.get();
+ for (SearchHit hit : searchResponse.getHits()) {
+ List toCompare = convertSortValues(documents.get(offset++));
+ assertThat(LST_COMPARATOR.compare(toCompare, Arrays.asList(hit.sortValues())), equalTo(0));
+ }
+ sortValues = searchResponse.getHits().hits()[searchResponse.getHits().hits().length-1].getSortValues();
+ }
+ }
+
+ private void createIndexMappingsFromObjectType(String indexName, String typeName, List<Object> types) {
+ CreateIndexRequestBuilder indexRequestBuilder = client().admin().indices().prepareCreate(indexName);
+ List<String> mappings = new ArrayList<> ();
+ int numFields = types.size();
+ for (int i = 0; i < numFields; i++) {
+ Class type = types.get(i).getClass();
+ if (type == Integer.class) {
+ mappings.add("field" + Integer.toString(i));
+ mappings.add("type=integer");
+ } else if (type == Long.class) {
+ mappings.add("field" + Integer.toString(i));
+ mappings.add("type=long");
+ } else if (type == Float.class) {
+ mappings.add("field" + Integer.toString(i));
+ mappings.add("type=float");
+ } else if (type == Double.class) {
+ mappings.add("field" + Integer.toString(i));
+ mappings.add("type=double");
+ } else if (type == Byte.class) {
+ mappings.add("field" + Integer.toString(i));
+ mappings.add("type=byte");
+ } else if (type == Short.class) {
+ mappings.add("field" + Integer.toString(i));
+ mappings.add("type=short");
+ } else if (type == Boolean.class) {
+ mappings.add("field" + Integer.toString(i));
+ mappings.add("type=boolean");
+ } else if (types.get(i) instanceof Text) {
+ mappings.add("field" + Integer.toString(i));
+ mappings.add("type=string,index=not_analyzed");
+ } else {
+ fail("Can't match type [" + type + "]");
+ }
+ }
+ indexRequestBuilder.addMapping(typeName, mappings.toArray()).get();
+ ensureGreen();
+ }
+
+ // Convert Integer, Short, Byte and Boolean to Long in order to match the conversion done
+ // by the internal hits when populating the sort values.
+ private List<Object> convertSortValues(List<Object> sortValues) {
+ List<Object> converted = new ArrayList<> ();
+ for (int i = 0; i < sortValues.size(); i++) {
+ Object from = sortValues.get(i);
+ if (from instanceof Integer) {
+ converted.add(((Integer) from).longValue());
+ } else if (from instanceof Short) {
+ converted.add(((Short) from).longValue());
+ } else if (from instanceof Byte) {
+ converted.add(((Byte) from).longValue());
+ } else if (from instanceof Boolean) {
+ boolean b = (boolean) from;
+ if (b) {
+ converted.add(1L);
+ } else {
+ converted.add(0L);
+ }
+ } else {
+ converted.add(from);
+ }
+ }
+ return converted;
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/search/simple/SimpleSearchIT.java b/core/src/test/java/org/elasticsearch/search/simple/SimpleSearchIT.java
index d14ea50838..6c10a1c8ae 100644
--- a/core/src/test/java/org/elasticsearch/search/simple/SimpleSearchIT.java
+++ b/core/src/test/java/org/elasticsearch/search/simple/SimpleSearchIT.java
@@ -85,7 +85,7 @@ public class SimpleSearchIT extends ESIntegTestCase {
}
// id is not indexed, but lets see that we automatically convert to
SearchResponse searchResponse = client().prepareSearch().setQuery(QueryBuilders.matchAllQuery()).setPreference(randomPreference).get();
- assertHitCount(searchResponse, 6l);
+ assertHitCount(searchResponse, 6L);
}
}
@@ -106,7 +106,7 @@ public class SimpleSearchIT extends ESIntegTestCase {
.setQuery(boolQuery().must(rangeQuery("from").lt("192.168.0.7")).must(rangeQuery("to").gt("192.168.0.7")))
.execute().actionGet();
- assertHitCount(search, 1l);
+ assertHitCount(search, 1L);
}
public void testIpCidr() throws Exception {
@@ -138,27 +138,27 @@ public class SimpleSearchIT extends ESIntegTestCase {
search = client().prepareSearch()
.setQuery(boolQuery().must(QueryBuilders.termQuery("ip", "192.168.0.1/32")))
.execute().actionGet();
- assertHitCount(search, 1l);
+ assertHitCount(search, 1L);
search = client().prepareSearch()
.setQuery(boolQuery().must(QueryBuilders.termQuery("ip", "192.168.0.0/24")))
.execute().actionGet();
- assertHitCount(search, 3l);
+ assertHitCount(search, 3L);
search = client().prepareSearch()
.setQuery(boolQuery().must(QueryBuilders.termQuery("ip", "192.0.0.0/8")))
.execute().actionGet();
- assertHitCount(search, 4l);
+ assertHitCount(search, 4L);
search = client().prepareSearch()
.setQuery(boolQuery().must(QueryBuilders.termQuery("ip", "0.0.0.0/0")))
.execute().actionGet();
- assertHitCount(search, 4l);
+ assertHitCount(search, 4L);
search = client().prepareSearch()
.setQuery(boolQuery().must(QueryBuilders.termQuery("ip", "192.168.1.5/32")))
.execute().actionGet();
- assertHitCount(search, 0l);
+ assertHitCount(search, 0L);
assertFailures(client().prepareSearch().setQuery(boolQuery().must(QueryBuilders.termQuery("ip", "0/0/0/0/0"))),
RestStatus.BAD_REQUEST,
@@ -171,17 +171,17 @@ public class SimpleSearchIT extends ESIntegTestCase {
client().prepareIndex("test", "type", "XXX1").setSource("field", "value").setRefresh(true).execute().actionGet();
// id is not indexed, but lets see that we automatically convert to
SearchResponse searchResponse = client().prepareSearch().setQuery(QueryBuilders.termQuery("_id", "XXX1")).execute().actionGet();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
searchResponse = client().prepareSearch().setQuery(QueryBuilders.queryStringQuery("_id:XXX1")).execute().actionGet();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
// id is not index, but we can automatically support prefix as well
searchResponse = client().prepareSearch().setQuery(QueryBuilders.prefixQuery("_id", "XXX")).execute().actionGet();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
searchResponse = client().prepareSearch().setQuery(QueryBuilders.queryStringQuery("_id:XXX*").lowercaseExpandedTerms(false)).execute().actionGet();
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
}
public void testSimpleDateRange() throws Exception {
@@ -192,22 +192,22 @@ public class SimpleSearchIT extends ESIntegTestCase {
refresh();
SearchResponse searchResponse = client().prepareSearch("test").setQuery(QueryBuilders.rangeQuery("field").gte("2010-01-03||+2d").lte("2010-01-04||+2d/d")).execute().actionGet();
assertNoFailures(searchResponse);
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
searchResponse = client().prepareSearch("test").setQuery(QueryBuilders.rangeQuery("field").gte("2010-01-05T02:00").lte("2010-01-06T02:00")).execute().actionGet();
assertNoFailures(searchResponse);
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
searchResponse = client().prepareSearch("test").setQuery(QueryBuilders.rangeQuery("field").gte("2010-01-05T02:00").lt("2010-01-06T02:00")).execute().actionGet();
assertNoFailures(searchResponse);
- assertHitCount(searchResponse, 1l);
+ assertHitCount(searchResponse, 1L);
searchResponse = client().prepareSearch("test").setQuery(QueryBuilders.rangeQuery("field").gt("2010-01-05T02:00").lt("2010-01-06T02:00")).execute().actionGet();
assertNoFailures(searchResponse);
- assertHitCount(searchResponse, 0l);
+ assertHitCount(searchResponse, 0L);
searchResponse = client().prepareSearch("test").setQuery(QueryBuilders.queryStringQuery("field:[2010-01-03||+2d TO 2010-01-04||+2d/d]")).execute().actionGet();
- assertHitCount(searchResponse, 2l);
+ assertHitCount(searchResponse, 2L);
}
public void testLocaleDependentDate() throws Exception {
@@ -236,13 +236,13 @@ public class SimpleSearchIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch("test")
.setQuery(QueryBuilders.rangeQuery("date_field").gte("Di, 05 Dez 2000 02:55:00 -0800").lte("Do, 07 Dez 2000 00:00:00 -0800"))
.execute().actionGet();
- assertHitCount(searchResponse, 10l);
+ assertHitCount(searchResponse, 10L);
searchResponse = client().prepareSearch("test")
.setQuery(QueryBuilders.rangeQuery("date_field").gte("Di, 05 Dez 2000 02:55:00 -0800").lte("Fr, 08 Dez 2000 00:00:00 -0800"))
.execute().actionGet();
- assertHitCount(searchResponse, 20l);
+ assertHitCount(searchResponse, 20L);
}
}
diff --git a/core/src/test/java/org/elasticsearch/search/sort/SortOrderTests.java b/core/src/test/java/org/elasticsearch/search/sort/SortOrderTests.java
new file mode 100644
index 0000000000..e505ec68e6
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/search/sort/SortOrderTests.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.search.sort;
+
+import org.elasticsearch.common.io.stream.BytesStreamOutput;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.test.ESTestCase;
+
+import static org.hamcrest.Matchers.equalTo;
+
+public class SortOrderTests extends ESTestCase {
+
+ /** Check that ordinals remain stable as we rely on them for serialisation. */
+ public void testDistanceUnitNames() {
+ assertEquals(0, SortOrder.ASC.ordinal());
+ assertEquals(1, SortOrder.DESC.ordinal());
+ }
+
+ public void testReadWrite() throws Exception {
+ for (SortOrder unit : SortOrder.values()) {
+ try (BytesStreamOutput out = new BytesStreamOutput()) {
+ unit.writeTo(out);
+ try (StreamInput in = StreamInput.wrap(out.bytes())) {
+ assertThat("Roundtrip serialisation failed.", SortOrder.readOrderFrom(in), equalTo(unit));
+ }
+ }
+ }
+ }
+
+ public void testFromString() {
+ for (SortOrder unit : SortOrder.values()) {
+ assertThat("Roundtrip string parsing failed.", SortOrder.fromString(unit.toString()), equalTo(unit));
+ }
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java b/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java
index fac7f71446..c910e46cbd 100644
--- a/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java
+++ b/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java
@@ -230,7 +230,7 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase {
SuggestResponse suggestResponse = client().suggest(request).get();
assertThat(suggestResponse.getSuccessfulShards(), equalTo(0));
for (ShardOperationFailedException exception : suggestResponse.getShardFailures()) {
- assertThat(exception.reason(), containsString("ParsingException[[completion] failed to parse field [payload]]; nested: IllegalStateException[expected value but got [START_OBJECT]]"));
+ assertThat(exception.reason(), containsString("ParsingException[[completion] failed to parse field [payload]]; nested: IllegalStateException[Can't get text on a START_OBJECT"));
}
}
@@ -293,12 +293,12 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase {
Map<String, List<Object>> firstPayload = options.get(0).getPayload();
assertThat(firstPayload.keySet(), containsInAnyOrder("title", "count"));
assertThat((String) firstPayload.get("title").get(0), equalTo("title2"));
- assertThat((long) firstPayload.get("count").get(0), equalTo(2l));
+ assertThat((long) firstPayload.get("count").get(0), equalTo(2L));
Map<String, List<Object>> secondPayload = options.get(1).getPayload();
assertThat(secondPayload.keySet(), containsInAnyOrder("title", "count"));
assertThat((String) secondPayload.get("title").get(0), equalTo("title1"));
- assertThat((long) secondPayload.get("count").get(0), equalTo(1l));
+ assertThat((long) secondPayload.get("count").get(0), equalTo(1L));
}
public void testSuggestWithPayload() throws Exception {
@@ -378,7 +378,7 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase {
PercolateResponse response = client().preparePercolate().setIndices(INDEX).setDocumentType(TYPE)
.setGetRequest(Requests.getRequest(INDEX).type(TYPE).id("1"))
.execute().actionGet();
- assertThat(response.getCount(), equalTo(1l));
+ assertThat(response.getCount(), equalTo(1L));
}
public void testThatWeightsAreWorking() throws Exception {
@@ -438,7 +438,7 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase {
CompletionSuggestion.Entry.Option prefixOption = (CompletionSuggestion.Entry.Option) option;
assertThat(prefixOption.getText().string(), equalTo("testing"));
- assertThat((long) prefixOption.getScore(), equalTo(10l));
+ assertThat((long) prefixOption.getScore(), equalTo(10L));
}
@@ -1045,7 +1045,7 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase {
refresh();
assertSuggestions("b");
- assertThat(2l, equalTo(client().prepareSearch(INDEX).setSize(0).get().getHits().totalHits()));
+ assertThat(2L, equalTo(client().prepareSearch(INDEX).setSize(0).get().getHits().totalHits()));
for (IndexShardSegments seg : client().admin().indices().prepareSegments().get().getIndices().get(INDEX)) {
ShardSegments[] shards = seg.getShards();
for (ShardSegments shardSegments : shards) {
diff --git a/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggester.java b/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggester.java
index 419316b526..35d495272c 100644
--- a/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggester.java
+++ b/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggester.java
@@ -54,7 +54,7 @@ public class CustomSuggester extends Suggester<CustomSuggester.CustomSuggestions
@Override
public SuggestContextParser getContextParser() {
- return (parser, mapperService, fieldData, headersContext) -> {
+ return (parser, mapperService, fieldData) -> {
Map<String, Object> options = parser.map();
CustomSuggestionsContext suggestionContext = new CustomSuggestionsContext(CustomSuggester.this, options);
suggestionContext.setField((String) options.get("field"));
diff --git a/core/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java b/core/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java
new file mode 100644
index 0000000000..02826b9a7e
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java
@@ -0,0 +1,323 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.search.suggest.phrase;
+
+import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
+import org.elasticsearch.common.ParseFieldMatcher;
+import org.elasticsearch.common.ParsingException;
+import org.elasticsearch.common.io.stream.BytesStreamOutput;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.xcontent.ToXContent;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentFactory;
+import org.elasticsearch.common.xcontent.XContentHelper;
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.common.xcontent.XContentType;
+import org.elasticsearch.index.IndexSettings;
+import org.elasticsearch.index.analysis.AnalysisService;
+import org.elasticsearch.index.analysis.NamedAnalyzer;
+import org.elasticsearch.index.mapper.ContentPath;
+import org.elasticsearch.index.mapper.MappedFieldType;
+import org.elasticsearch.index.mapper.Mapper;
+import org.elasticsearch.index.mapper.MapperBuilders;
+import org.elasticsearch.index.mapper.MapperService;
+import org.elasticsearch.index.mapper.core.StringFieldMapper;
+import org.elasticsearch.index.mapper.core.StringFieldMapper.StringFieldType;
+import org.elasticsearch.index.query.QueryParseContext;
+import org.elasticsearch.index.query.QueryShardContext;
+import org.elasticsearch.indices.IndicesModule;
+import org.elasticsearch.indices.query.IndicesQueriesRegistry;
+import org.elasticsearch.search.suggest.phrase.PhraseSuggestionContext.DirectCandidateGenerator;
+import org.elasticsearch.test.ESTestCase;
+import org.elasticsearch.test.IndexSettingsModule;
+
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.function.Consumer;
+
+import static org.hamcrest.Matchers.equalTo;
+
+public class DirectCandidateGeneratorTests extends ESTestCase{
+
+ private static final int NUMBER_OF_RUNS = 20;
+
+
+
+ /**
+ * Test serialization and deserialization of the generator
+ */
+ public void testSerialization() throws IOException {
+ for (int runs = 0; runs < NUMBER_OF_RUNS; runs++) {
+ DirectCandidateGeneratorBuilder original = randomCandidateGenerator();
+ DirectCandidateGeneratorBuilder deserialized = serializedCopy(original);
+ assertEquals(deserialized, original);
+ assertEquals(deserialized.hashCode(), original.hashCode());
+ assertNotSame(deserialized, original);
+ }
+ }
+
+ /**
+ * Test equality and hashCode properties
+ */
+ public void testEqualsAndHashcode() throws IOException {
+ for (int runs = 0; runs < NUMBER_OF_RUNS; runs++) {
+ DirectCandidateGeneratorBuilder first = randomCandidateGenerator();
+ assertFalse("generator is equal to null", first.equals(null));
+ assertFalse("generator is equal to incompatible type", first.equals(""));
+ assertTrue("generator is not equal to self", first.equals(first));
+ assertThat("same generator's hashcode returns different values if called multiple times", first.hashCode(),
+ equalTo(first.hashCode()));
+
+ DirectCandidateGeneratorBuilder second = serializedCopy(first);
+ assertTrue("generator is not equal to self", second.equals(second));
+ assertTrue("generator is not equal to its copy", first.equals(second));
+ assertTrue("equals is not symmetric", second.equals(first));
+ assertThat("generator copy's hashcode is different from original hashcode", second.hashCode(), equalTo(first.hashCode()));
+
+ DirectCandidateGeneratorBuilder third = serializedCopy(second);
+ assertTrue("generator is not equal to self", third.equals(third));
+ assertTrue("generator is not equal to its copy", second.equals(third));
+ assertThat("generator copy's hashcode is different from original hashcode", second.hashCode(), equalTo(third.hashCode()));
+ assertTrue("equals is not transitive", first.equals(third));
+ assertThat("generator copy's hashcode is different from original hashcode", first.hashCode(), equalTo(third.hashCode()));
+ assertTrue("equals is not symmetric", third.equals(second));
+ assertTrue("equals is not symmetric", third.equals(first));
+
+ // test for non-equality, check that all fields are covered by changing one by one
+ first = new DirectCandidateGeneratorBuilder("aaa");
+ assertEquals(first, serializedCopy(first));
+ second = new DirectCandidateGeneratorBuilder("bbb");
+ assertNotEquals(first, second);
+ assertNotEquals(first.accuracy(0.1f), serializedCopy(first).accuracy(0.2f));
+ assertNotEquals(first.maxEdits(1), serializedCopy(first).maxEdits(2));
+ assertNotEquals(first.maxInspections(1), serializedCopy(first).maxInspections(2));
+ assertNotEquals(first.maxTermFreq(0.1f), serializedCopy(first).maxTermFreq(0.2f));
+ assertNotEquals(first.minDocFreq(0.1f), serializedCopy(first).minDocFreq(0.2f));
+ assertNotEquals(first.minWordLength(1), serializedCopy(first).minWordLength(2));
+ assertNotEquals(first.postFilter("postFilter"), serializedCopy(first).postFilter("postFilter_other"));
+ assertNotEquals(first.preFilter("preFilter"), serializedCopy(first).preFilter("preFilter_other"));
+ assertNotEquals(first.prefixLength(1), serializedCopy(first).prefixLength(2));
+ assertNotEquals(first.size(1), serializedCopy(first).size(2));
+ assertNotEquals(first.sort("score"), serializedCopy(first).sort("frequency"));
+ assertNotEquals(first.stringDistance("levenstein"), serializedCopy(first).sort("ngram"));
+ assertNotEquals(first.suggestMode("missing"), serializedCopy(first).suggestMode("always"));
+ }
+ }
+
+ /**
+ * creates random candidate generator, renders it to xContent and back to new instance that should be equal to original
+ */
+ public void testFromXContent() throws IOException {
+ QueryParseContext context = new QueryParseContext(new IndicesQueriesRegistry(Settings.EMPTY, Collections.emptyMap()));
+ context.parseFieldMatcher(new ParseFieldMatcher(Settings.EMPTY));
+ for (int runs = 0; runs < NUMBER_OF_RUNS; runs++) {
+ DirectCandidateGeneratorBuilder generator = randomCandidateGenerator();
+ XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values()));
+ if (randomBoolean()) {
+ builder.prettyPrint();
+ }
+ generator.toXContent(builder, ToXContent.EMPTY_PARAMS);
+
+ XContentParser parser = XContentHelper.createParser(builder.bytes());
+ context.reset(parser);
+ parser.nextToken();
+ DirectCandidateGeneratorBuilder secondGenerator = DirectCandidateGeneratorBuilder.PROTOTYPE
+ .fromXContent(context);
+ assertNotSame(generator, secondGenerator);
+ assertEquals(generator, secondGenerator);
+ assertEquals(generator.hashCode(), secondGenerator.hashCode());
+ }
+ }
+
+ /**
+ * test that build() outputs a {@link DirectCandidateGenerator} that is similar to the one
+ * we would get when parsing the xContent the test generator is rendering out
+ */
+ public void testBuild() throws IOException {
+
+ long start = System.currentTimeMillis();
+ IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(randomAsciiOfLengthBetween(1, 10), Settings.EMPTY);
+
+ AnalysisService mockAnalysisService = new AnalysisService(idxSettings, Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()) {
+ @Override
+ public NamedAnalyzer analyzer(String name) {
+ return new NamedAnalyzer(name, new WhitespaceAnalyzer());
+ }
+ };
+
+ MapperService mockMapperService = new MapperService(idxSettings, mockAnalysisService , null, new IndicesModule().getMapperRegistry(), null) {
+ @Override
+ public MappedFieldType fullName(String fullName) {
+ return new StringFieldType();
+ }
+ };
+
+ QueryShardContext mockShardContext = new QueryShardContext(idxSettings, null, null, null, mockMapperService, null, null, null) {
+ @Override
+ public MappedFieldType fieldMapper(String name) {
+ StringFieldMapper.Builder builder = MapperBuilders.stringField(name);
+ return builder.build(new Mapper.BuilderContext(idxSettings.getSettings(), new ContentPath(1))).fieldType();
+ }
+ };
+ mockShardContext.setMapUnmappedFieldAsString(true);
+
+ for (int runs = 0; runs < NUMBER_OF_RUNS; runs++) {
+ DirectCandidateGeneratorBuilder generator = randomCandidateGenerator();
+ // first, build via DirectCandidateGenerator#build()
+ DirectCandidateGenerator contextGenerator = generator.build(mockShardContext);
+
+ // second, render random test generator to xContent and parse using
+ // PhraseSuggestParser
+ XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values()));
+ if (randomBoolean()) {
+ builder.prettyPrint();
+ }
+ generator.toXContent(builder, ToXContent.EMPTY_PARAMS);
+ XContentParser parser = XContentHelper.createParser(builder.bytes());
+
+ DirectCandidateGenerator secondGenerator = PhraseSuggestParser.parseCandidateGenerator(parser,
+ mockShardContext.getMapperService(), mockShardContext.parseFieldMatcher());
+
+ // compare their properties
+ assertNotSame(contextGenerator, secondGenerator);
+ assertEquals(contextGenerator.field(), secondGenerator.field());
+ assertEquals(contextGenerator.accuracy(), secondGenerator.accuracy(), Float.MIN_VALUE);
+ assertEquals(contextGenerator.maxTermFreq(), secondGenerator.maxTermFreq(), Float.MIN_VALUE);
+ assertEquals(contextGenerator.maxEdits(), secondGenerator.maxEdits());
+ assertEquals(contextGenerator.maxInspections(), secondGenerator.maxInspections());
+ assertEquals(contextGenerator.minDocFreq(), secondGenerator.minDocFreq(), Float.MIN_VALUE);
+ assertEquals(contextGenerator.minWordLength(), secondGenerator.minWordLength());
+ assertEquals(contextGenerator.postFilter(), secondGenerator.postFilter());
+ assertEquals(contextGenerator.prefixLength(), secondGenerator.prefixLength());
+ assertEquals(contextGenerator.preFilter(), secondGenerator.preFilter());
+ assertEquals(contextGenerator.sort(), secondGenerator.sort());
+ assertEquals(contextGenerator.size(), secondGenerator.size());
+ // some instances of StringDistance don't support equals, just checking the class here
+ assertEquals(contextGenerator.stringDistance().getClass(), secondGenerator.stringDistance().getClass());
+ assertEquals(contextGenerator.suggestMode(), secondGenerator.suggestMode());
+ }
+ }
+
+ /**
+ * test that bad xContent throws exception
+ */
+ public void testIllegalXContent() throws IOException {
+ QueryParseContext context = new QueryParseContext(new IndicesQueriesRegistry(Settings.EMPTY, Collections.emptyMap()));
+ context.parseFieldMatcher(new ParseFieldMatcher(Settings.EMPTY));
+
+ // test missing fieldname
+ String directGenerator = "{ }";
+ XContentParser parser = XContentFactory.xContent(directGenerator).createParser(directGenerator);
+
+ context.reset(parser);
+ try {
+ DirectCandidateGeneratorBuilder.PROTOTYPE.fromXContent(context);
+ fail("expected an exception");
+ } catch (IllegalArgumentException e) {
+ assertEquals("[direct_generator] expects exactly one field parameter, but found []", e.getMessage());
+ }
+
+ // test two fieldnames
+ directGenerator = "{ \"field\" : \"f1\", \"field\" : \"f2\" }";
+ parser = XContentFactory.xContent(directGenerator).createParser(directGenerator);
+
+ context.reset(parser);
+ try {
+ DirectCandidateGeneratorBuilder.PROTOTYPE.fromXContent(context);
+ fail("expected an exception");
+ } catch (IllegalArgumentException e) {
+ assertEquals("[direct_generator] expects exactly one field parameter, but found [f2, f1]", e.getMessage());
+ }
+
+ // test unknown field
+ directGenerator = "{ \"unknown_param\" : \"f1\" }";
+ parser = XContentFactory.xContent(directGenerator).createParser(directGenerator);
+
+ context.reset(parser);
+ try {
+ DirectCandidateGeneratorBuilder.PROTOTYPE.fromXContent(context);
+ fail("expected an exception");
+ } catch (IllegalArgumentException e) {
+ assertEquals("[direct_generator] unknown field [unknown_param], parser not found", e.getMessage());
+ }
+
+ // test bad value for field (e.g. size expects an int)
+ directGenerator = "{ \"size\" : \"xxl\" }";
+ parser = XContentFactory.xContent(directGenerator).createParser(directGenerator);
+
+ context.reset(parser);
+ try {
+ DirectCandidateGeneratorBuilder.PROTOTYPE.fromXContent(context);
+ fail("expected an exception");
+ } catch (ParsingException e) {
+ assertEquals("[direct_generator] failed to parse field [size]", e.getMessage());
+ }
+
+ // test unexpected token
+ directGenerator = "{ \"size\" : [ \"xxl\" ] }";
+ parser = XContentFactory.xContent(directGenerator).createParser(directGenerator);
+
+ context.reset(parser);
+ try {
+ DirectCandidateGeneratorBuilder.PROTOTYPE.fromXContent(context);
+ fail("expected an exception");
+ } catch (IllegalArgumentException e) {
+ assertEquals("[direct_generator] size doesn't support values of type: START_ARRAY", e.getMessage());
+ }
+ }
+
+ /**
+ * create random {@link DirectCandidateGeneratorBuilder}
+ */
+ public static DirectCandidateGeneratorBuilder randomCandidateGenerator() {
+ DirectCandidateGeneratorBuilder generator = new DirectCandidateGeneratorBuilder(randomAsciiOfLength(10));
+ maybeSet(generator::accuracy, randomFloat());
+ maybeSet(generator::maxEdits, randomIntBetween(1, 2));
+ maybeSet(generator::maxInspections, randomIntBetween(1, 20));
+ maybeSet(generator::maxTermFreq, randomFloat());
+ maybeSet(generator::minDocFreq, randomFloat());
+ maybeSet(generator::minWordLength, randomIntBetween(1, 20));
+ maybeSet(generator::prefixLength, randomIntBetween(1, 20));
+ maybeSet(generator::preFilter, randomAsciiOfLengthBetween(1, 20));
+ maybeSet(generator::postFilter, randomAsciiOfLengthBetween(1, 20));
+ maybeSet(generator::size, randomIntBetween(1, 20));
+ maybeSet(generator::sort, randomFrom(Arrays.asList(new String[]{ "score", "frequency" })));
+ maybeSet(generator::stringDistance, randomFrom(Arrays.asList(new String[]{ "internal", "damerau_levenshtein", "levenstein", "jarowinkler", "ngram"})));
+ maybeSet(generator::suggestMode, randomFrom(Arrays.asList(new String[]{ "missing", "popular", "always"})));
+ return generator;
+ }
+
+ private static <T> void maybeSet(Consumer<T> consumer, T value) {
+ if (randomBoolean()) {
+ consumer.accept(value);
+ }
+ }
+
+ private static DirectCandidateGeneratorBuilder serializedCopy(DirectCandidateGeneratorBuilder original) throws IOException {
+ try (BytesStreamOutput output = new BytesStreamOutput()) {
+ original.writeTo(output);
+ try (StreamInput in = StreamInput.wrap(output.bytes())) {
+ return DirectCandidateGeneratorBuilder.PROTOTYPE.readFrom(in);
+ }
+ }
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/search/suggest/phrase/LaplaceModelTests.java b/core/src/test/java/org/elasticsearch/search/suggest/phrase/LaplaceModelTests.java
new file mode 100644
index 0000000000..87ad654e0c
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/search/suggest/phrase/LaplaceModelTests.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.search.suggest.phrase;
+
+import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.Laplace;
+import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.SmoothingModel;
+
+import static org.hamcrest.Matchers.instanceOf;
+
+public class LaplaceModelTests extends SmoothingModelTestCase {
+
+ @Override
+ protected SmoothingModel createTestModel() {
+ return new Laplace(randomDoubleBetween(0.0, 10.0, false));
+ }
+
+ /**
+ * mutate the given model so the returned smoothing model is different
+ */
+ @Override
+ protected Laplace createMutation(SmoothingModel input) {
+ Laplace original = (Laplace) input;
+ return new Laplace(original.getAlpha() + 0.1);
+ }
+
+ @Override
+ void assertWordScorer(WordScorer wordScorer, SmoothingModel input) {
+ Laplace model = (Laplace) input;
+ assertThat(wordScorer, instanceOf(LaplaceScorer.class));
+ assertEquals(model.getAlpha(), ((LaplaceScorer) wordScorer).alpha(), Double.MIN_VALUE);
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/search/suggest/phrase/LinearInterpolationModelTests.java b/core/src/test/java/org/elasticsearch/search/suggest/phrase/LinearInterpolationModelTests.java
new file mode 100644
index 0000000000..1112b7a5ed
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/search/suggest/phrase/LinearInterpolationModelTests.java
@@ -0,0 +1,69 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.search.suggest.phrase;
+
+import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.LinearInterpolation;
+import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.SmoothingModel;
+
+import static org.hamcrest.Matchers.instanceOf;
+
+public class LinearInterpolationModelTests extends SmoothingModelTestCase {
+
+ @Override
+ protected SmoothingModel createTestModel() {
+ double trigramLambda = randomDoubleBetween(0.0, 10.0, false);
+ double bigramLambda = randomDoubleBetween(0.0, 10.0, false);
+ double unigramLambda = randomDoubleBetween(0.0, 10.0, false);
+ // normalize so parameters sum to 1
+ double sum = trigramLambda + bigramLambda + unigramLambda;
+ return new LinearInterpolation(trigramLambda / sum, bigramLambda / sum, unigramLambda / sum);
+ }
+
+ /**
+ * mutate the given model so the returned smoothing model is different
+ */
+ @Override
+ protected LinearInterpolation createMutation(SmoothingModel input) {
+ LinearInterpolation original = (LinearInterpolation) input;
+ // swap two values permute original lambda values
+ switch (randomIntBetween(0, 2)) {
+ case 0:
+ // swap first two
+ return new LinearInterpolation(original.getBigramLambda(), original.getTrigramLambda(), original.getUnigramLambda());
+ case 1:
+ // swap last two
+ return new LinearInterpolation(original.getTrigramLambda(), original.getUnigramLambda(), original.getBigramLambda());
+ case 2:
+ default:
+ // swap first and last
+ return new LinearInterpolation(original.getUnigramLambda(), original.getBigramLambda(), original.getTrigramLambda());
+ }
+ }
+
+ @Override
+ void assertWordScorer(WordScorer wordScorer, SmoothingModel in) {
+ LinearInterpolation testModel = (LinearInterpolation) in;
+ LinearInterpoatingScorer testScorer = (LinearInterpoatingScorer) wordScorer;
+ assertThat(wordScorer, instanceOf(LinearInterpoatingScorer.class));
+ assertEquals(testModel.getTrigramLambda(), (testScorer).trigramLambda(), 1e-15);
+ assertEquals(testModel.getBigramLambda(), (testScorer).bigramLambda(), 1e-15);
+ assertEquals(testModel.getUnigramLambda(), (testScorer).unigramLambda(), 1e-15);
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java b/core/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java
new file mode 100644
index 0000000000..e4a8ae72b9
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java
@@ -0,0 +1,196 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.search.suggest.phrase;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
+import org.apache.lucene.analysis.miscellaneous.PerFieldAnalyzerWrapper;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.document.TextField;
+import org.apache.lucene.index.DirectoryReader;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.index.MultiFields;
+import org.apache.lucene.store.RAMDirectory;
+import org.elasticsearch.common.ParseFieldMatcher;
+import org.elasticsearch.common.io.stream.BytesStreamOutput;
+import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.lucene.BytesRefs;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.xcontent.ToXContent;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentFactory;
+import org.elasticsearch.common.xcontent.XContentHelper;
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.common.xcontent.XContentType;
+import org.elasticsearch.index.query.QueryParseContext;
+import org.elasticsearch.indices.query.IndicesQueriesRegistry;
+import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.Laplace;
+import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.LinearInterpolation;
+import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.SmoothingModel;
+import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.StupidBackoff;
+import org.elasticsearch.test.ESTestCase;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.not;
+
+public abstract class SmoothingModelTestCase extends ESTestCase {
+
+ private static NamedWriteableRegistry namedWriteableRegistry;
+
+ /**
+ * setup for the whole base test class
+ */
+ @BeforeClass
+ public static void init() {
+ if (namedWriteableRegistry == null) {
+ namedWriteableRegistry = new NamedWriteableRegistry();
+ namedWriteableRegistry.registerPrototype(SmoothingModel.class, Laplace.PROTOTYPE);
+ namedWriteableRegistry.registerPrototype(SmoothingModel.class, LinearInterpolation.PROTOTYPE);
+ namedWriteableRegistry.registerPrototype(SmoothingModel.class, StupidBackoff.PROTOTYPE);
+ }
+ }
+
+ @AfterClass
+ public static void afterClass() throws Exception {
+ namedWriteableRegistry = null;
+ }
+
+ /**
+ * create random model that is put under test
+ */
+ protected abstract SmoothingModel createTestModel();
+
+ /**
+ * mutate the given model so the returned smoothing model is different
+ */
+ protected abstract SmoothingModel createMutation(SmoothingModel original) throws IOException;
+
+ /**
+ * Test that creates new smoothing model from a random test smoothing model and checks both for equality
+ */
+ public void testFromXContent() throws IOException {
+ QueryParseContext context = new QueryParseContext(new IndicesQueriesRegistry(Settings.settingsBuilder().build(), Collections.emptyMap()));
+ context.parseFieldMatcher(new ParseFieldMatcher(Settings.EMPTY));
+
+ SmoothingModel testModel = createTestModel();
+ XContentBuilder contentBuilder = XContentFactory.contentBuilder(randomFrom(XContentType.values()));
+ if (randomBoolean()) {
+ contentBuilder.prettyPrint();
+ }
+ contentBuilder.startObject();
+ testModel.innerToXContent(contentBuilder, ToXContent.EMPTY_PARAMS);
+ contentBuilder.endObject();
+ XContentParser parser = XContentHelper.createParser(contentBuilder.bytes());
+ context.reset(parser);
+ parser.nextToken(); // go to start token, real parsing would do that in the outer element parser
+ SmoothingModel prototype = (SmoothingModel) namedWriteableRegistry.getPrototype(SmoothingModel.class,
+ testModel.getWriteableName());
+ SmoothingModel parsedModel = prototype.fromXContent(context);
+ assertNotSame(testModel, parsedModel);
+ assertEquals(testModel, parsedModel);
+ assertEquals(testModel.hashCode(), parsedModel.hashCode());
+ }
+
+ /**
+ * Test the WordScorer emitted by the smoothing model
+ */
+ public void testBuildWordScorer() throws IOException {
+ SmoothingModel testModel = createTestModel();
+
+ Map<String, Analyzer> mapping = new HashMap<>();
+ mapping.put("field", new WhitespaceAnalyzer());
+ PerFieldAnalyzerWrapper wrapper = new PerFieldAnalyzerWrapper(new WhitespaceAnalyzer(), mapping);
+ IndexWriter writer = new IndexWriter(new RAMDirectory(), new IndexWriterConfig(wrapper));
+ Document doc = new Document();
+ doc.add(new Field("field", "someText", TextField.TYPE_NOT_STORED));
+ writer.addDocument(doc);
+ DirectoryReader ir = DirectoryReader.open(writer, false);
+
+ WordScorer wordScorer = testModel.buildWordScorerFactory().newScorer(ir, MultiFields.getTerms(ir , "field"), "field", 0.9d, BytesRefs.toBytesRef(" "));
+ assertWordScorer(wordScorer, testModel);
+ }
+
+ /**
+ * implementation dependant assertions on the wordScorer produced by the smoothing model under test
+ */
+ abstract void assertWordScorer(WordScorer wordScorer, SmoothingModel testModel);
+
+ /**
+ * Test serialization and deserialization of the tested model.
+ */
+ public void testSerialization() throws IOException {
+ SmoothingModel testModel = createTestModel();
+ SmoothingModel deserializedModel = copyModel(testModel);
+ assertEquals(testModel, deserializedModel);
+ assertEquals(testModel.hashCode(), deserializedModel.hashCode());
+ assertNotSame(testModel, deserializedModel);
+ }
+
+ /**
+ * Test equality and hashCode properties
+ */
+ @SuppressWarnings("unchecked")
+ public void testEqualsAndHashcode() throws IOException {
+ SmoothingModel firstModel = createTestModel();
+ assertFalse("smoothing model is equal to null", firstModel.equals(null));
+ assertFalse("smoothing model is equal to incompatible type", firstModel.equals(""));
+ assertTrue("smoothing model is not equal to self", firstModel.equals(firstModel));
+ assertThat("same smoothing model's hashcode returns different values if called multiple times", firstModel.hashCode(),
+ equalTo(firstModel.hashCode()));
+ assertThat("different smoothing models should not be equal", createMutation(firstModel), not(equalTo(firstModel)));
+
+ SmoothingModel secondModel = copyModel(firstModel);
+ assertTrue("smoothing model is not equal to self", secondModel.equals(secondModel));
+ assertTrue("smoothing model is not equal to its copy", firstModel.equals(secondModel));
+ assertTrue("equals is not symmetric", secondModel.equals(firstModel));
+ assertThat("smoothing model copy's hashcode is different from original hashcode", secondModel.hashCode(), equalTo(firstModel.hashCode()));
+
+ SmoothingModel thirdModel = copyModel(secondModel);
+ assertTrue("smoothing model is not equal to self", thirdModel.equals(thirdModel));
+ assertTrue("smoothing model is not equal to its copy", secondModel.equals(thirdModel));
+ assertThat("smoothing model copy's hashcode is different from original hashcode", secondModel.hashCode(), equalTo(thirdModel.hashCode()));
+ assertTrue("equals is not transitive", firstModel.equals(thirdModel));
+ assertThat("smoothing model copy's hashcode is different from original hashcode", firstModel.hashCode(), equalTo(thirdModel.hashCode()));
+ assertTrue("equals is not symmetric", thirdModel.equals(secondModel));
+ assertTrue("equals is not symmetric", thirdModel.equals(firstModel));
+ }
+
+ static SmoothingModel copyModel(SmoothingModel original) throws IOException {
+ try (BytesStreamOutput output = new BytesStreamOutput()) {
+ original.writeTo(output);
+ try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) {
+ SmoothingModel prototype = (SmoothingModel) namedWriteableRegistry.getPrototype(SmoothingModel.class, original.getWriteableName());
+ return prototype.readFrom(in);
+ }
+ }
+ }
+
+}
diff --git a/core/src/test/java/org/elasticsearch/search/suggest/phrase/StupidBackoffModelTests.java b/core/src/test/java/org/elasticsearch/search/suggest/phrase/StupidBackoffModelTests.java
new file mode 100644
index 0000000000..c3bd66d2a8
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/search/suggest/phrase/StupidBackoffModelTests.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.search.suggest.phrase;
+
+import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.SmoothingModel;
+import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.StupidBackoff;
+
+import static org.hamcrest.Matchers.instanceOf;
+
+public class StupidBackoffModelTests extends SmoothingModelTestCase {
+
+ @Override
+ protected SmoothingModel createTestModel() {
+ return new StupidBackoff(randomDoubleBetween(0.0, 10.0, false));
+ }
+
+ /**
+ * mutate the given model so the returned smoothing model is different
+ */
+ @Override
+ protected StupidBackoff createMutation(SmoothingModel input) {
+ StupidBackoff original = (StupidBackoff) input;
+ return new StupidBackoff(original.getDiscount() + 0.1);
+ }
+
+ @Override
+ void assertWordScorer(WordScorer wordScorer, SmoothingModel input) {
+ assertThat(wordScorer, instanceOf(StupidBackoffScorer.class));
+ StupidBackoff testModel = (StupidBackoff) input;
+ assertEquals(testModel.getDiscount(), ((StupidBackoffScorer) wordScorer).discount(), Double.MIN_VALUE);
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/similarity/SimilarityIT.java b/core/src/test/java/org/elasticsearch/similarity/SimilarityIT.java
index 8912956489..f6fa1fc621 100644
--- a/core/src/test/java/org/elasticsearch/similarity/SimilarityIT.java
+++ b/core/src/test/java/org/elasticsearch/similarity/SimilarityIT.java
@@ -55,7 +55,7 @@ public class SimilarityIT extends ESIntegTestCase {
.put("index.number_of_replicas", 0)
.put("similarity.custom.type", "BM25")
.put("similarity.custom.k1", 2.0f)
- .put("similarity.custom.b", 1.5f)
+ .put("similarity.custom.b", 0.5f)
).execute().actionGet();
client().prepareIndex("test", "type1", "1").setSource("field1", "the quick brown fox jumped over the lazy dog",
@@ -63,11 +63,11 @@ public class SimilarityIT extends ESIntegTestCase {
.setRefresh(true).execute().actionGet();
SearchResponse bm25SearchResponse = client().prepareSearch().setQuery(matchQuery("field1", "quick brown fox")).execute().actionGet();
- assertThat(bm25SearchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(bm25SearchResponse.getHits().totalHits(), equalTo(1L));
float bm25Score = bm25SearchResponse.getHits().hits()[0].score();
SearchResponse defaultSearchResponse = client().prepareSearch().setQuery(matchQuery("field2", "quick brown fox")).execute().actionGet();
- assertThat(defaultSearchResponse.getHits().totalHits(), equalTo(1l));
+ assertThat(defaultSearchResponse.getHits().totalHits(), equalTo(1L));
float defaultScore = defaultSearchResponse.getHits().hits()[0].score();
assertThat(bm25Score, not(equalTo(defaultScore)));
diff --git a/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java
index ad554b7628..bd6c253365 100644
--- a/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java
+++ b/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java
@@ -47,6 +47,7 @@ import org.elasticsearch.discovery.zen.elect.ElectMasterService;
import org.elasticsearch.index.store.IndexStore;
import org.elasticsearch.indices.recovery.RecoveryState;
import org.elasticsearch.indices.ttl.IndicesTTLService;
+import org.elasticsearch.node.Node;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.repositories.RepositoryMissingException;
import org.elasticsearch.rest.RestChannel;
@@ -100,7 +101,7 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest
logger.info("--> start 2 nodes");
Settings nodeSettings = settingsBuilder()
.put("discovery.type", "zen")
- .put(ZenDiscovery.SETTING_PING_TIMEOUT, "200ms")
+ .put(ZenDiscovery.PING_TIMEOUT_SETTING.getKey(), "200ms")
.put("discovery.initial_state_timeout", "500ms")
.build();
internalCluster().startNode(nodeSettings);
@@ -609,7 +610,7 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest
internalCluster().startNode();
logger.info("--> start second node");
// Make sure the first node is elected as master
- internalCluster().startNode(settingsBuilder().put("node.master", false));
+ internalCluster().startNode(settingsBuilder().put(Node.NODE_MASTER_SETTING.getKey(), false));
// Register mock repositories
for (int i = 0; i < 5; i++) {
client().admin().cluster().preparePutRepository("test-repo" + i)
@@ -784,8 +785,8 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest
}
public void testMasterShutdownDuringSnapshot() throws Exception {
- Settings masterSettings = settingsBuilder().put("node.data", false).build();
- Settings dataSettings = settingsBuilder().put("node.master", false).build();
+ Settings masterSettings = settingsBuilder().put(Node.NODE_DATA_SETTING.getKey(), false).build();
+ Settings dataSettings = settingsBuilder().put(Node.NODE_MASTER_SETTING.getKey(), false).build();
logger.info("--> starting two master nodes and two data nodes");
internalCluster().startNode(masterSettings);
diff --git a/core/src/test/java/org/elasticsearch/snapshots/FsBlobStoreRepositoryIT.java b/core/src/test/java/org/elasticsearch/snapshots/FsBlobStoreRepositoryIT.java
new file mode 100644
index 0000000000..22a3ecac8e
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/snapshots/FsBlobStoreRepositoryIT.java
@@ -0,0 +1,37 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.snapshots;
+
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.unit.ByteSizeUnit;
+import org.elasticsearch.test.ESBlobStoreRepositoryIntegTestCase;
+
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
+
+public class FsBlobStoreRepositoryIT extends ESBlobStoreRepositoryIntegTestCase {
+ @Override
+ protected void createTestRepository(String name) {
+ assertAcked(client().admin().cluster().preparePutRepository(name)
+ .setType("fs").setSettings(Settings.settingsBuilder()
+ .put("location", randomRepoPath())
+ .put("compress", randomBoolean())
+ .put("chunk_size", randomIntBetween(100, 1000), ByteSizeUnit.BYTES)));
+
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java
index 4cbf436a74..02db67cd15 100644
--- a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java
+++ b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java
@@ -52,7 +52,6 @@ import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MappingMetaData;
import org.elasticsearch.cluster.metadata.MetaDataIndexStateService;
import org.elasticsearch.cluster.metadata.SnapshotId;
-import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.settings.Settings;
@@ -63,7 +62,6 @@ import org.elasticsearch.index.store.IndexStore;
import org.elasticsearch.indices.InvalidIndexNameException;
import org.elasticsearch.repositories.RepositoriesService;
import org.elasticsearch.repositories.RepositoryException;
-import org.elasticsearch.test.junit.annotations.TestLogging;
import java.nio.channels.SeekableByteChannel;
import java.nio.file.Files;
@@ -1540,7 +1538,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas
// Update settings to back to normal
assertAcked(client.admin().indices().prepareUpdateSettings("test-idx").setSettings(Settings.builder()
- .put(IndexStore.INDEX_STORE_THROTTLE_TYPE_SETTING.getKey(), "none")
+ .put(IndexStore.INDEX_STORE_THROTTLE_TYPE_SETTING.getKey(), "node")
));
logger.info("--> wait for snapshot to complete");
@@ -1898,9 +1896,9 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas
public ClusterState execute(ClusterState currentState) {
// Simulate orphan snapshot
ImmutableOpenMap.Builder<ShardId, ShardSnapshotStatus> shards = ImmutableOpenMap.builder();
- shards.put(new ShardId("test-idx", 0), new ShardSnapshotStatus("unknown-node", State.ABORTED));
- shards.put(new ShardId("test-idx", 1), new ShardSnapshotStatus("unknown-node", State.ABORTED));
- shards.put(new ShardId("test-idx", 2), new ShardSnapshotStatus("unknown-node", State.ABORTED));
+ shards.put(new ShardId("test-idx", "_na_", 0), new ShardSnapshotStatus("unknown-node", State.ABORTED));
+ shards.put(new ShardId("test-idx", "_na_", 1), new ShardSnapshotStatus("unknown-node", State.ABORTED));
+ shards.put(new ShardId("test-idx", "_na_", 2), new ShardSnapshotStatus("unknown-node", State.ABORTED));
List<Entry> entries = new ArrayList<>();
entries.add(new Entry(new SnapshotId("test-repo", "test-snap"), true, State.ABORTED, Collections.singletonList("test-idx"), System.currentTimeMillis(), shards.build()));
return ClusterState.builder(currentState).putCustom(SnapshotsInProgress.TYPE, new SnapshotsInProgress(Collections.unmodifiableList(entries))).build();
@@ -1932,7 +1930,6 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas
return awaitBusy(() -> client().admin().cluster().prepareHealth(index).execute().actionGet().getRelocatingShards() > 0, timeout.millis(), TimeUnit.MILLISECONDS);
}
- @TestLogging("cluster:DEBUG")
public void testBatchingShardUpdateTask() throws Exception {
final Client client = client();
diff --git a/core/src/test/java/org/elasticsearch/snapshots/mockstore/BlobStoreWrapper.java b/core/src/test/java/org/elasticsearch/snapshots/mockstore/BlobStoreWrapper.java
index 086aac209b..5ac1e82dba 100644
--- a/core/src/test/java/org/elasticsearch/snapshots/mockstore/BlobStoreWrapper.java
+++ b/core/src/test/java/org/elasticsearch/snapshots/mockstore/BlobStoreWrapper.java
@@ -46,7 +46,7 @@ public class BlobStoreWrapper implements BlobStore {
}
@Override
- public void close() {
+ public void close() throws IOException {
delegate.close();
}
diff --git a/core/src/test/java/org/elasticsearch/test/ESBlobStoreContainerTestCase.java b/core/src/test/java/org/elasticsearch/test/ESBlobStoreContainerTestCase.java
new file mode 100644
index 0000000000..291d00a8dd
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/test/ESBlobStoreContainerTestCase.java
@@ -0,0 +1,115 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.test;
+
+import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.BytesRefBuilder;
+import org.elasticsearch.common.blobstore.BlobContainer;
+import org.elasticsearch.common.blobstore.BlobMetaData;
+import org.elasticsearch.common.blobstore.BlobPath;
+import org.elasticsearch.common.blobstore.BlobStore;
+import org.elasticsearch.common.bytes.BytesArray;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.elasticsearch.test.ESBlobStoreTestCase.writeRandomBlob;
+import static org.elasticsearch.test.ESBlobStoreTestCase.randomBytes;
+import static org.elasticsearch.test.ESBlobStoreTestCase.readBlobFully;
+import static org.hamcrest.CoreMatchers.equalTo;
+import static org.hamcrest.CoreMatchers.notNullValue;
+
+/**
+ * Generic test case for blob store container implementation.
+ * These tests check basic blob store functionality.
+ */
+public abstract class ESBlobStoreContainerTestCase extends ESTestCase {
+
+ public void testWriteRead() throws IOException {
+ try(final BlobStore store = newBlobStore()) {
+ final BlobContainer container = store.blobContainer(new BlobPath());
+ byte[] data = randomBytes(randomIntBetween(10, scaledRandomIntBetween(1024, 1 << 16)));
+ container.writeBlob("foobar", new BytesArray(data));
+ try (InputStream stream = container.readBlob("foobar")) {
+ BytesRefBuilder target = new BytesRefBuilder();
+ while (target.length() < data.length) {
+ byte[] buffer = new byte[scaledRandomIntBetween(1, data.length - target.length())];
+ int offset = scaledRandomIntBetween(0, buffer.length - 1);
+ int read = stream.read(buffer, offset, buffer.length - offset);
+ target.append(new BytesRef(buffer, offset, read));
+ }
+ assertEquals(data.length, target.length());
+ assertArrayEquals(data, Arrays.copyOfRange(target.bytes(), 0, target.length()));
+ }
+ }
+ }
+
+ public void testMoveAndList() throws IOException {
+ try(final BlobStore store = newBlobStore()) {
+ final BlobContainer container = store.blobContainer(new BlobPath());
+ assertThat(container.listBlobs().size(), equalTo(0));
+ int numberOfFooBlobs = randomIntBetween(0, 10);
+ int numberOfBarBlobs = randomIntBetween(3, 20);
+ Map<String, Long> generatedBlobs = new HashMap<>();
+ for (int i = 0; i < numberOfFooBlobs; i++) {
+ int length = randomIntBetween(10, 100);
+ String name = "foo-" + i + "-";
+ generatedBlobs.put(name, (long) length);
+ writeRandomBlob(container, name, length);
+ }
+ for (int i = 1; i < numberOfBarBlobs; i++) {
+ int length = randomIntBetween(10, 100);
+ String name = "bar-" + i + "-";
+ generatedBlobs.put(name, (long) length);
+ writeRandomBlob(container, name, length);
+ }
+ int length = randomIntBetween(10, 100);
+ String name = "bar-0-";
+ generatedBlobs.put(name, (long) length);
+ byte[] data = writeRandomBlob(container, name, length);
+
+ Map<String, BlobMetaData> blobs = container.listBlobs();
+ assertThat(blobs.size(), equalTo(numberOfFooBlobs + numberOfBarBlobs));
+ for (Map.Entry<String, Long> generated : generatedBlobs.entrySet()) {
+ BlobMetaData blobMetaData = blobs.get(generated.getKey());
+ assertThat(generated.getKey(), blobMetaData, notNullValue());
+ assertThat(blobMetaData.name(), equalTo(generated.getKey()));
+ assertThat(blobMetaData.length(), equalTo(generated.getValue()));
+ }
+
+ assertThat(container.listBlobsByPrefix("foo-").size(), equalTo(numberOfFooBlobs));
+ assertThat(container.listBlobsByPrefix("bar-").size(), equalTo(numberOfBarBlobs));
+ assertThat(container.listBlobsByPrefix("baz-").size(), equalTo(0));
+
+ String newName = "bar-new";
+ // Move to a new location
+ container.move(name, newName);
+ assertThat(container.listBlobsByPrefix(name).size(), equalTo(0));
+ blobs = container.listBlobsByPrefix(newName);
+ assertThat(blobs.size(), equalTo(1));
+ assertThat(blobs.get(newName).length(), equalTo(generatedBlobs.get(name)));
+ assertThat(data, equalTo(readBlobFully(container, newName, length)));
+ }
+ }
+
+ protected abstract BlobStore newBlobStore() throws IOException;
+}
diff --git a/core/src/test/java/org/elasticsearch/test/ESBlobStoreRepositoryIntegTestCase.java b/core/src/test/java/org/elasticsearch/test/ESBlobStoreRepositoryIntegTestCase.java
new file mode 100644
index 0000000000..dc49683de6
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/test/ESBlobStoreRepositoryIntegTestCase.java
@@ -0,0 +1,197 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.test;
+
+import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequestBuilder;
+import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse;
+import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequestBuilder;
+import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse;
+import org.elasticsearch.action.index.IndexRequestBuilder;
+
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Locale;
+import java.util.Set;
+import java.util.concurrent.ExecutionException;
+
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.greaterThan;
+
+/**
+ * Basic integration tests for blob-based repository validation.
+ */
+public abstract class ESBlobStoreRepositoryIntegTestCase extends ESIntegTestCase {
+
+ protected abstract void createTestRepository(String name);
+
+ public void testSnapshotAndRestore() throws Exception {
+ String repoName = randomAsciiName();
+ logger.info("--> creating repository {}", repoName);
+ createTestRepository(repoName);
+ int indexCount = randomIntBetween(1, 5);
+ int[] docCounts = new int[indexCount];
+ String[] indexNames = generateRandomNames(indexCount);
+ for (int i = 0; i < indexCount; i++) {
+ logger.info("--> create random index {} with {} records", indexNames[i], docCounts[i]);
+ docCounts[i] = iterations(10, 1000);
+ addRandomDocuments(indexNames[i], docCounts[i]);
+ assertHitCount(client().prepareSearch(indexNames[i]).setSize(0).get(), docCounts[i]);
+ }
+
+ String snapshotName = randomAsciiName();
+ logger.info("--> create snapshot {}:{}", repoName, snapshotName);
+ assertSuccessfulSnapshot(client().admin().cluster().prepareCreateSnapshot(repoName, snapshotName).setWaitForCompletion(true).setIndices(indexNames));
+
+ List<String> deleteIndices = randomSubsetOf(randomIntBetween(0, indexCount), indexNames);
+ if (deleteIndices.size() > 0) {
+ logger.info("--> delete indices {}", deleteIndices);
+ assertAcked(client().admin().indices().prepareDelete(deleteIndices.toArray(new String[deleteIndices.size()])));
+ }
+
+ Set<String> closeIndices = new HashSet<>(Arrays.asList(indexNames));
+ closeIndices.removeAll(deleteIndices);
+
+ if (closeIndices.size() > 0) {
+ for (String index : closeIndices) {
+ if (randomBoolean()) {
+ logger.info("--> add random documents to {}", index);
+ addRandomDocuments(index, randomIntBetween(10, 1000));
+ } else {
+ int docCount = (int) client().prepareSearch(index).setSize(0).get().getHits().totalHits();
+ int deleteCount = randomIntBetween(1, docCount);
+ logger.info("--> delete {} random documents from {}", deleteCount, index);
+ for (int i = 0; i < deleteCount; i++) {
+ int doc = randomIntBetween(0, docCount - 1);
+ client().prepareDelete(index, index, Integer.toString(doc)).get();
+ }
+ client().admin().indices().prepareRefresh(index).get();
+ }
+ }
+
+ logger.info("--> close indices {}", closeIndices);
+ assertAcked(client().admin().indices().prepareClose(closeIndices.toArray(new String[closeIndices.size()])));
+ }
+
+ logger.info("--> restore all indices from the snapshot");
+ assertSuccessfulRestore(client().admin().cluster().prepareRestoreSnapshot(repoName, snapshotName).setWaitForCompletion(true));
+
+ ensureGreen();
+
+ for (int i = 0; i < indexCount; i++) {
+ assertHitCount(client().prepareSearch(indexNames[i]).setSize(0).get(), docCounts[i]);
+ }
+ }
+
+ public void testMultipleSnapshotAndRollback() throws Exception {
+ String repoName = randomAsciiName();
+ logger.info("--> creating repository {}", repoName);
+ createTestRepository(repoName);
+ int iterationCount = randomIntBetween(2, 5);
+ int[] docCounts = new int[iterationCount];
+ String indexName = randomAsciiName();
+ String snapshotName = randomAsciiName();
+ assertAcked(client().admin().indices().prepareCreate(indexName).get());
+ for (int i = 0; i < iterationCount; i++) {
+ if (randomBoolean() && i > 0) { // don't delete on the first iteration
+ int docCount = docCounts[i - 1];
+ if (docCount > 0) {
+ int deleteCount = randomIntBetween(1, docCount);
+ logger.info("--> delete {} random documents from {}", deleteCount, indexName);
+ for (int j = 0; j < deleteCount; j++) {
+ int doc = randomIntBetween(0, docCount - 1);
+ client().prepareDelete(indexName, indexName, Integer.toString(doc)).get();
+ }
+ client().admin().indices().prepareRefresh(indexName).get();
+ }
+ } else {
+ int docCount = randomIntBetween(10, 1000);
+ logger.info("--> add {} random documents to {}", docCount, indexName);
+ addRandomDocuments(indexName, docCount);
+ }
+ // Check number of documents in this iteration
+ docCounts[i] = (int) client().prepareSearch(indexName).setSize(0).get().getHits().totalHits();
+ logger.info("--> create snapshot {}:{} with {} documents", repoName, snapshotName + "-" + i, docCounts[i]);
+ assertSuccessfulSnapshot(client().admin().cluster().prepareCreateSnapshot(repoName, snapshotName + "-" + i).setWaitForCompletion(true).setIndices(indexName));
+ }
+
+ int restoreOperations = randomIntBetween(1, 3);
+ for (int i = 0; i < restoreOperations; i++) {
+ int iterationToRestore = randomIntBetween(0, iterationCount - 1);
+ logger.info("--> performing restore of the iteration {}", iterationToRestore);
+
+ logger.info("--> close index");
+ assertAcked(client().admin().indices().prepareClose(indexName));
+
+ logger.info("--> restore index from the snapshot");
+ assertSuccessfulRestore(client().admin().cluster().prepareRestoreSnapshot(repoName, snapshotName + "-" + iterationToRestore).setWaitForCompletion(true));
+ ensureGreen();
+ assertHitCount(client().prepareSearch(indexName).setSize(0).get(), docCounts[iterationToRestore]);
+ }
+ }
+
+ protected void addRandomDocuments(String name, int numDocs) throws ExecutionException, InterruptedException {
+ IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[numDocs];
+ for (int i = 0; i < numDocs; i++) {
+ indexRequestBuilders[i] = client().prepareIndex(name, name, Integer.toString(i))
+ .setRouting(randomAsciiOfLength(randomIntBetween(1, 10))).setSource("field", "value");
+ }
+ indexRandom(true, indexRequestBuilders);
+ }
+
+ protected String[] generateRandomNames(int num) {
+ Set<String> names = new HashSet<>();
+ for (int i = 0; i < num; i++) {
+ String name;
+ do {
+ name = randomAsciiName();
+ } while (names.contains(name));
+ names.add(name);
+ }
+ return names.toArray(new String[num]);
+ }
+
+ public static CreateSnapshotResponse assertSuccessfulSnapshot(CreateSnapshotRequestBuilder requestBuilder) {
+ CreateSnapshotResponse response = requestBuilder.get();
+ assertSuccessfulSnapshot(response);
+ return response;
+ }
+
+ public static void assertSuccessfulSnapshot(CreateSnapshotResponse response) {
+ assertThat(response.getSnapshotInfo().successfulShards(), greaterThan(0));
+ assertThat(response.getSnapshotInfo().successfulShards(), equalTo(response.getSnapshotInfo().totalShards()));
+ }
+
+ public static RestoreSnapshotResponse assertSuccessfulRestore(RestoreSnapshotRequestBuilder requestBuilder) {
+ RestoreSnapshotResponse response = requestBuilder.get();
+ assertSuccessfulRestore(response);
+ return response;
+ }
+
+ public static void assertSuccessfulRestore(RestoreSnapshotResponse response) {
+ assertThat(response.getRestoreInfo().successfulShards(), greaterThan(0));
+ assertThat(response.getRestoreInfo().successfulShards(), equalTo(response.getRestoreInfo().totalShards()));
+ }
+
+ public static String randomAsciiName() {
+ return randomAsciiOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT);
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/test/ESBlobStoreTestCase.java b/core/src/test/java/org/elasticsearch/test/ESBlobStoreTestCase.java
new file mode 100644
index 0000000000..80432d628e
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/test/ESBlobStoreTestCase.java
@@ -0,0 +1,82 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.test;
+
+import org.elasticsearch.common.blobstore.BlobContainer;
+import org.elasticsearch.common.blobstore.BlobPath;
+import org.elasticsearch.common.blobstore.BlobStore;
+import org.elasticsearch.common.bytes.BytesArray;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.io.InputStream;
+
+import static org.hamcrest.CoreMatchers.equalTo;
+
+/**
+ * Generic test case for blob store implementation.
+ * These tests check basic blob store functionality.
+ */
+public abstract class ESBlobStoreTestCase extends ESTestCase {
+
+ public void testContainerCreationAndDeletion() throws IOException {
+ try(final BlobStore store = newBlobStore()) {
+ final BlobContainer containerFoo = store.blobContainer(new BlobPath().add("foo"));
+ final BlobContainer containerBar = store.blobContainer(new BlobPath().add("bar"));
+ byte[] data1 = randomBytes(randomIntBetween(10, scaledRandomIntBetween(1024, 1 << 16)));
+ byte[] data2 = randomBytes(randomIntBetween(10, scaledRandomIntBetween(1024, 1 << 16)));
+ containerFoo.writeBlob("test", new BytesArray(data1));
+ containerBar.writeBlob("test", new BytesArray(data2));
+
+ assertArrayEquals(readBlobFully(containerFoo, "test", data1.length), data1);
+ assertArrayEquals(readBlobFully(containerBar, "test", data2.length), data2);
+
+ assertTrue(containerFoo.blobExists("test"));
+ assertTrue(containerBar.blobExists("test"));
+ store.delete(new BlobPath());
+ assertFalse(containerFoo.blobExists("test"));
+ assertFalse(containerBar.blobExists("test"));
+ }
+ }
+
+ public static byte[] writeRandomBlob(BlobContainer container, String name, int length) throws IOException {
+ byte[] data = randomBytes(length);
+ container.writeBlob(name, new BytesArray(data));
+ return data;
+ }
+
+ public static byte[] readBlobFully(BlobContainer container, String name, int length) throws IOException {
+ byte[] data = new byte[length];
+ try (InputStream inputStream = container.readBlob(name)) {
+ assertThat(inputStream.read(data), equalTo(length));
+ assertThat(inputStream.read(), equalTo(-1));
+ }
+ return data;
+ }
+
+ public static byte[] randomBytes(int length) {
+ byte[] data = new byte[length];
+ for (int i = 0; i < data.length; i++) {
+ data[i] = (byte) randomInt();
+ }
+ return data;
+ }
+
+ protected abstract BlobStore newBlobStore() throws IOException;
+}
diff --git a/core/src/test/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java b/core/src/test/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java
index 60f1bad608..e36ac66234 100644
--- a/core/src/test/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java
+++ b/core/src/test/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java
@@ -28,6 +28,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
+import org.elasticsearch.env.Environment;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.node.Node;
import org.elasticsearch.test.ESIntegTestCase;
@@ -114,7 +115,7 @@ public class SimpleThreadPoolIT extends ESIntegTestCase {
for (String threadName : threadNames) {
// ignore some shared threads we know that are created within the same VM, like the shared discovery one
// or the ones that are occasionally come up from ESSingleNodeTestCase
- if (threadName.contains("[" + ESSingleNodeTestCase.nodeName() + "]")
+ if (threadName.contains("[node_s_0]") // TODO: this can't possibly be right! single node and integ test are unrelated!
|| threadName.contains("Keep-Alive-Timer")) {
continue;
}
@@ -186,7 +187,7 @@ public class SimpleThreadPoolIT extends ESIntegTestCase {
public void testThreadPoolLeakingThreadsWithTribeNode() {
Settings settings = Settings.builder()
.put("node.name", "thread_pool_leaking_threads_tribe_node")
- .put("path.home", createTempDir())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
.put("tribe.t1.cluster.name", "non_existing_cluster")
//trigger initialization failure of one of the tribes (doesn't require starting the node)
.put("tribe.t1.plugin.mandatory", "non_existing").build();
diff --git a/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolSerializationTests.java b/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolSerializationTests.java
index e655f45268..c30954f731 100644
--- a/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolSerializationTests.java
+++ b/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolSerializationTests.java
@@ -63,7 +63,7 @@ public class ThreadPoolSerializationTests extends ESTestCase {
ThreadPool.Info newInfo = new ThreadPool.Info();
newInfo.readFrom(input);
- assertThat(newInfo.getQueueSize().singles(), is(10000l));
+ assertThat(newInfo.getQueueSize().singles(), is(10000L));
}
public void testThatNegativeQueueSizesCanBeSerialized() throws Exception {
diff --git a/core/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java b/core/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java
index 09653c12e0..01fae3036d 100644
--- a/core/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java
+++ b/core/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java
@@ -29,6 +29,7 @@ import org.elasticsearch.threadpool.ThreadPool.Names;
import java.lang.reflect.Field;
import java.util.Arrays;
import java.util.HashSet;
+import java.util.Map;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.Executor;
@@ -47,6 +48,7 @@ import static org.hamcrest.Matchers.sameInstance;
/**
*/
public class UpdateThreadPoolSettingsTests extends ESTestCase {
+
public void testCorrectThreadPoolTypePermittedInSettings() throws InterruptedException {
String threadPoolName = randomThreadPoolName();
ThreadPool.ThreadPoolType correctThreadPoolType = ThreadPool.THREAD_POOL_TYPES.get(threadPoolName);
@@ -323,7 +325,7 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase {
try {
Settings nodeSettings = Settings.settingsBuilder()
.put("threadpool." + threadPoolName + ".queue_size", 1000)
- .put("name", "testCachedExecutorType").build();
+ .put("name", "testShutdownNowInterrupts").build();
threadPool = new ThreadPool(nodeSettings);
ClusterSettings clusterSettings = new ClusterSettings(nodeSettings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
threadPool.setClusterSettings(clusterSettings);
@@ -377,7 +379,7 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase {
assertEquals(info.getThreadPoolType(), ThreadPool.ThreadPoolType.FIXED);
assertThat(info.getMin(), equalTo(1));
assertThat(info.getMax(), equalTo(1));
- assertThat(info.getQueueSize().singles(), equalTo(1l));
+ assertThat(info.getQueueSize().singles(), equalTo(1L));
} else {
for (Field field : Names.class.getFields()) {
if (info.getName().equalsIgnoreCase(field.getName())) {
@@ -409,7 +411,7 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase {
foundPool2 = true;
assertThat(info.getMax(), equalTo(10));
assertThat(info.getMin(), equalTo(10));
- assertThat(info.getQueueSize().singles(), equalTo(1l));
+ assertThat(info.getQueueSize().singles(), equalTo(1L));
assertEquals(info.getThreadPoolType(), ThreadPool.ThreadPoolType.FIXED);
} else {
for (Field field : Names.class.getFields()) {
@@ -452,11 +454,10 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase {
Set<ThreadPool.ThreadPoolType> set = new HashSet<>();
set.addAll(Arrays.asList(ThreadPool.ThreadPoolType.values()));
set.remove(ThreadPool.THREAD_POOL_TYPES.get(threadPoolName));
- ThreadPool.ThreadPoolType invalidThreadPoolType = randomFrom(set.toArray(new ThreadPool.ThreadPoolType[set.size()]));
- return invalidThreadPoolType;
+ return randomFrom(set.toArray(new ThreadPool.ThreadPoolType[set.size()]));
}
private String randomThreadPool(ThreadPool.ThreadPoolType type) {
- return randomFrom(ThreadPool.THREAD_POOL_TYPES.entrySet().stream().filter(t -> t.getValue().equals(type)).map(t -> t.getKey()).collect(Collectors.toList()));
+ return randomFrom(ThreadPool.THREAD_POOL_TYPES.entrySet().stream().filter(t -> t.getValue().equals(type)).map(Map.Entry::getKey).collect(Collectors.toList()));
}
}
diff --git a/core/src/test/java/org/elasticsearch/timestamp/SimpleTimestampIT.java b/core/src/test/java/org/elasticsearch/timestamp/SimpleTimestampIT.java
index 30ed8fe25c..1bb51fab09 100644
--- a/core/src/test/java/org/elasticsearch/timestamp/SimpleTimestampIT.java
+++ b/core/src/test/java/org/elasticsearch/timestamp/SimpleTimestampIT.java
@@ -74,7 +74,7 @@ public class SimpleTimestampIT extends ESIntegTestCase {
getResponse = client().prepareGet("test", "type1", "1").setFields("_timestamp").setRealtime(false).execute().actionGet();
timestamp = ((Number) getResponse.getField("_timestamp").getValue()).longValue();
- assertThat(timestamp, equalTo(10l));
+ assertThat(timestamp, equalTo(10L));
// verify its the same timestamp when going the replica
getResponse = client().prepareGet("test", "type1", "1").setFields("_timestamp").setRealtime(false).execute().actionGet();
assertThat(((Number) getResponse.getField("_timestamp").getValue()).longValue(), equalTo(timestamp));
@@ -84,7 +84,7 @@ public class SimpleTimestampIT extends ESIntegTestCase {
getResponse = client().prepareGet("test", "type1", "1").setFields("_timestamp").setRealtime(false).execute().actionGet();
timestamp = ((Number) getResponse.getField("_timestamp").getValue()).longValue();
- assertThat(timestamp, equalTo(20l));
+ assertThat(timestamp, equalTo(20L));
// verify its the same timestamp when going the replica
getResponse = client().prepareGet("test", "type1", "1").setFields("_timestamp").setRealtime(false).execute().actionGet();
assertThat(((Number) getResponse.getField("_timestamp").getValue()).longValue(), equalTo(timestamp));
diff --git a/core/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/core/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java
index a5b6e08de3..4688daec7d 100644
--- a/core/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java
+++ b/core/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java
@@ -37,6 +37,7 @@ import org.junit.Before;
import java.io.IOException;
import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.ExecutionException;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
@@ -204,6 +205,61 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase {
serviceA.removeHandler("sayHello");
}
+ public void testThreadContext() throws ExecutionException, InterruptedException {
+
+ serviceA.registerRequestHandler("ping_pong", StringMessageRequest::new, ThreadPool.Names.GENERIC, (request, channel) -> {
+ assertEquals("ping_user", threadPool.getThreadContext().getHeader("test.ping.user"));
+ assertNull(threadPool.getThreadContext().getTransient("my_private_context"));
+ try {
+ StringMessageResponse response = new StringMessageResponse("pong");
+ threadPool.getThreadContext().putHeader("test.pong.user", "pong_user");
+ channel.sendResponse(response);
+ } catch (IOException e) {
+ assertThat(e.getMessage(), false, equalTo(true));
+ }
+ });
+ final Object context = new Object();
+ final String executor = randomFrom(ThreadPool.THREAD_POOL_TYPES.keySet().toArray(new String[0]));
+ BaseTransportResponseHandler<StringMessageResponse> baseTransportResponseHandler = new BaseTransportResponseHandler<StringMessageResponse>() {
+ @Override
+ public StringMessageResponse newInstance() {
+ return new StringMessageResponse();
+ }
+
+ @Override
+ public String executor() {
+ return executor;
+ }
+
+ @Override
+ public void handleResponse(StringMessageResponse response) {
+ assertThat("pong", equalTo(response.message));
+ assertEquals("ping_user", threadPool.getThreadContext().getHeader("test.ping.user"));
+ assertNull(threadPool.getThreadContext().getHeader("test.pong.user"));
+ assertSame(context, threadPool.getThreadContext().getTransient("my_private_context"));
+ threadPool.getThreadContext().putHeader("some.temp.header", "booooom");
+ }
+
+ @Override
+ public void handleException(TransportException exp) {
+ assertThat("got exception instead of a response: " + exp.getMessage(), false, equalTo(true));
+ }
+ };
+ StringMessageRequest ping = new StringMessageRequest("ping");
+ threadPool.getThreadContext().putHeader("test.ping.user", "ping_user");
+ threadPool.getThreadContext().putTransient("my_private_context", context);
+
+ TransportFuture<StringMessageResponse> res = serviceB.submitRequest(nodeA, "ping_pong", ping, baseTransportResponseHandler);
+
+ StringMessageResponse message = res.get();
+ assertThat("pong", equalTo(message.message));
+ assertEquals("ping_user", threadPool.getThreadContext().getHeader("test.ping.user"));
+ assertSame(context, threadPool.getThreadContext().getTransient("my_private_context"));
+ assertNull("this header is only visible in the handler context", threadPool.getThreadContext().getHeader("some.temp.header"));
+
+ serviceA.removeHandler("sayHello");
+ }
+
public void testLocalNodeConnection() throws InterruptedException {
assertTrue("serviceA is not connected to nodeA", serviceA.nodeConnected(nodeA));
if (((TransportService) serviceA).getLocalNode() != null) {
@@ -373,7 +429,7 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase {
@Override
public void handleException(TransportException exp) {
- assertThat("bad message !!!", equalTo(exp.getCause().getMessage()));
+ assertThat("runtime_exception: bad message !!!", equalTo(exp.getCause().getMessage()));
}
});
@@ -381,7 +437,7 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase {
res.txGet();
fail("exception should be thrown");
} catch (Exception e) {
- assertThat(e.getCause().getMessage(), equalTo("bad message !!!"));
+ assertThat(e.getCause().getMessage(), equalTo("runtime_exception: bad message !!!"));
}
serviceA.removeHandler("sayHelloException");
diff --git a/core/src/test/java/org/elasticsearch/transport/ContextAndHeaderTransportIT.java b/core/src/test/java/org/elasticsearch/transport/ContextAndHeaderTransportIT.java
new file mode 100644
index 0000000000..e2ff218a94
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/transport/ContextAndHeaderTransportIT.java
@@ -0,0 +1,379 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.transport;
+
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.impl.client.HttpClients;
+import org.elasticsearch.action.ActionListener;
+import org.elasticsearch.action.ActionModule;
+import org.elasticsearch.action.ActionRequest;
+import org.elasticsearch.action.ActionResponse;
+import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
+import org.elasticsearch.action.get.GetRequest;
+import org.elasticsearch.action.index.IndexRequest;
+import org.elasticsearch.action.percolate.PercolateResponse;
+import org.elasticsearch.action.search.SearchRequest;
+import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.action.support.ActionFilter;
+import org.elasticsearch.action.termvectors.MultiTermVectorsRequest;
+import org.elasticsearch.client.Client;
+import org.elasticsearch.common.inject.AbstractModule;
+import org.elasticsearch.common.inject.Inject;
+import org.elasticsearch.common.inject.Module;
+import org.elasticsearch.common.network.NetworkModule;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.http.HttpServerTransport;
+import org.elasticsearch.index.query.BoolQueryBuilder;
+import org.elasticsearch.index.query.GeoShapeQueryBuilder;
+import org.elasticsearch.index.query.MoreLikeThisQueryBuilder;
+import org.elasticsearch.index.query.MoreLikeThisQueryBuilder.Item;
+import org.elasticsearch.index.query.QueryBuilders;
+import org.elasticsearch.index.query.TermsQueryBuilder;
+import org.elasticsearch.indices.cache.query.terms.TermsLookup;
+import org.elasticsearch.plugins.Plugin;
+import org.elasticsearch.rest.RestController;
+import org.elasticsearch.test.ESIntegTestCase;
+import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
+import org.elasticsearch.test.rest.client.http.HttpRequestBuilder;
+import org.elasticsearch.test.rest.client.http.HttpResponse;
+import org.elasticsearch.threadpool.ThreadPool;
+import org.junit.After;
+import org.junit.Before;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.concurrent.CopyOnWriteArrayList;
+
+import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS;
+import static org.elasticsearch.common.settings.Settings.settingsBuilder;
+import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
+import static org.elasticsearch.rest.RestStatus.OK;
+import static org.elasticsearch.test.ESIntegTestCase.Scope.SUITE;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasStatus;
+import static org.hamcrest.Matchers.greaterThan;
+import static org.hamcrest.Matchers.hasSize;
+import static org.hamcrest.Matchers.is;
+
+@ClusterScope(scope = SUITE)
+public class ContextAndHeaderTransportIT extends ESIntegTestCase {
+ private static final List<RequestAndHeaders> requests = new CopyOnWriteArrayList<>();
+ private String randomHeaderKey = randomAsciiOfLength(10);
+ private String randomHeaderValue = randomAsciiOfLength(20);
+ private String queryIndex = "query-" + randomAsciiOfLength(10).toLowerCase(Locale.ROOT);
+ private String lookupIndex = "lookup-" + randomAsciiOfLength(10).toLowerCase(Locale.ROOT);
+
+ @Override
+ protected Settings nodeSettings(int nodeOrdinal) {
+ return settingsBuilder()
+ .put(super.nodeSettings(nodeOrdinal))
+ .put("script.indexed", "true")
+ .put(NetworkModule.HTTP_ENABLED.getKey(), true)
+ .build();
+ }
+
+ @Override
+ protected Collection<Class<? extends Plugin>> nodePlugins() {
+ return pluginList(ActionLoggingPlugin.class);
+ }
+
+ @Before
+ public void createIndices() throws Exception {
+ String mapping = jsonBuilder().startObject().startObject("type")
+ .startObject("properties")
+ .startObject("location").field("type", "geo_shape").endObject()
+ .startObject("name").field("type", "string").endObject()
+ .endObject()
+ .endObject().endObject().string();
+
+ Settings settings = settingsBuilder()
+ .put(indexSettings())
+ .put(SETTING_NUMBER_OF_SHARDS, 1) // A single shard will help to keep the tests repeatable.
+ .build();
+ assertAcked(transportClient().admin().indices().prepareCreate(lookupIndex)
+ .setSettings(settings).addMapping("type", mapping));
+ assertAcked(transportClient().admin().indices().prepareCreate(queryIndex)
+ .setSettings(settings).addMapping("type", mapping));
+ ensureGreen(queryIndex, lookupIndex);
+ requests.clear();
+ }
+
+ @After
+ public void checkAllRequestsContainHeaders() {
+ assertRequestsContainHeader(IndexRequest.class);
+ assertRequestsContainHeader(RefreshRequest.class);
+ }
+
+ public void testThatTermsLookupGetRequestContainsContextAndHeaders() throws Exception {
+ transportClient().prepareIndex(lookupIndex, "type", "1")
+ .setSource(jsonBuilder().startObject().array("followers", "foo", "bar", "baz").endObject()).get();
+ transportClient().prepareIndex(queryIndex, "type", "1")
+ .setSource(jsonBuilder().startObject().field("username", "foo").endObject()).get();
+ transportClient().admin().indices().prepareRefresh(queryIndex, lookupIndex).get();
+
+ TermsQueryBuilder termsLookupFilterBuilder = QueryBuilders.termsLookupQuery("username", new TermsLookup(lookupIndex, "type", "1", "followers"));
+ BoolQueryBuilder queryBuilder = QueryBuilders.boolQuery().must(QueryBuilders.matchAllQuery()).must(termsLookupFilterBuilder);
+
+ SearchResponse searchResponse = transportClient()
+ .prepareSearch(queryIndex)
+ .setQuery(queryBuilder)
+ .get();
+ assertNoFailures(searchResponse);
+ assertHitCount(searchResponse, 1);
+
+ assertGetRequestsContainHeaders();
+ }
+
+
+
+ public void testThatGeoShapeQueryGetRequestContainsContextAndHeaders() throws Exception {
+ transportClient().prepareIndex(lookupIndex, "type", "1").setSource(jsonBuilder().startObject()
+ .field("name", "Munich Suburban Area")
+ .startObject("location")
+ .field("type", "polygon")
+ .startArray("coordinates").startArray()
+ .startArray().value(11.34).value(48.25).endArray()
+ .startArray().value(11.68).value(48.25).endArray()
+ .startArray().value(11.65).value(48.06).endArray()
+ .startArray().value(11.37).value(48.13).endArray()
+ .startArray().value(11.34).value(48.25).endArray() // close the polygon
+ .endArray().endArray()
+ .endObject()
+ .endObject())
+ .get();
+ // second document
+ transportClient().prepareIndex(queryIndex, "type", "1").setSource(jsonBuilder().startObject()
+ .field("name", "Munich Center")
+ .startObject("location")
+ .field("type", "point")
+ .startArray("coordinates").value(11.57).value(48.13).endArray()
+ .endObject()
+ .endObject())
+ .get();
+ transportClient().admin().indices().prepareRefresh(lookupIndex, queryIndex).get();
+
+ GeoShapeQueryBuilder queryBuilder = QueryBuilders.geoShapeQuery("location", "1", "type")
+ .indexedShapeIndex(lookupIndex)
+ .indexedShapePath("location");
+
+ SearchResponse searchResponse = transportClient()
+ .prepareSearch(queryIndex)
+ .setQuery(queryBuilder)
+ .get();
+ assertNoFailures(searchResponse);
+ assertHitCount(searchResponse, 1);
+ assertThat(requests, hasSize(greaterThan(0)));
+
+ assertGetRequestsContainHeaders();
+ }
+
+ public void testThatMoreLikeThisQueryMultiTermVectorRequestContainsContextAndHeaders() throws Exception {
+ transportClient().prepareIndex(lookupIndex, "type", "1")
+ .setSource(jsonBuilder().startObject().field("name", "Star Wars - The new republic").endObject())
+ .get();
+ transportClient().prepareIndex(queryIndex, "type", "1")
+ .setSource(jsonBuilder().startObject().field("name", "Jar Jar Binks - A horrible mistake").endObject())
+ .get();
+ transportClient().prepareIndex(queryIndex, "type", "2")
+ .setSource(jsonBuilder().startObject().field("name", "Star Wars - Return of the jedi").endObject())
+ .get();
+ transportClient().admin().indices().prepareRefresh(lookupIndex, queryIndex).get();
+
+ MoreLikeThisQueryBuilder moreLikeThisQueryBuilder = QueryBuilders.moreLikeThisQuery(new String[]{"name"}, null,
+ new Item[]{new Item(lookupIndex, "type", "1")})
+ .minTermFreq(1)
+ .minDocFreq(1);
+
+ SearchResponse searchResponse = transportClient()
+ .prepareSearch(queryIndex)
+ .setQuery(moreLikeThisQueryBuilder)
+ .get();
+ assertNoFailures(searchResponse);
+ assertHitCount(searchResponse, 1);
+
+ assertRequestsContainHeader(MultiTermVectorsRequest.class);
+ }
+
+ public void testThatPercolatingExistingDocumentGetRequestContainsContextAndHeaders() throws Exception {
+ Client client = transportClient();
+ client.prepareIndex(lookupIndex, ".percolator", "1")
+ .setSource(jsonBuilder().startObject().startObject("query").startObject("match").field("name", "star wars").endObject().endObject().endObject())
+ .get();
+ client.prepareIndex(lookupIndex, "type", "1")
+ .setSource(jsonBuilder().startObject().field("name", "Star Wars - The new republic").endObject())
+ .get();
+ client.admin().indices().prepareRefresh(lookupIndex).get();
+
+ GetRequest getRequest = client.prepareGet(lookupIndex, "type", "1").request();
+ PercolateResponse response = client.preparePercolate().setDocumentType("type").setGetRequest(getRequest).get();
+ assertThat(response.getCount(), is(1L));
+
+ assertGetRequestsContainHeaders();
+ }
+
+ public void testThatRelevantHttpHeadersBecomeRequestHeaders() throws Exception {
+ String releventHeaderName = "relevant_" + randomHeaderKey;
+ for (RestController restController : internalCluster().getDataNodeInstances(RestController.class)) {
+ restController.registerRelevantHeaders(releventHeaderName);
+ }
+
+ CloseableHttpClient httpClient = HttpClients.createDefault();
+ HttpResponse response = new HttpRequestBuilder(httpClient)
+ .httpTransport(internalCluster().getDataNodeInstance(HttpServerTransport.class))
+ .addHeader(randomHeaderKey, randomHeaderValue)
+ .addHeader(releventHeaderName, randomHeaderValue)
+ .path("/" + queryIndex + "/_search")
+ .execute();
+
+ assertThat(response, hasStatus(OK));
+ List<RequestAndHeaders> searchRequests = getRequests(SearchRequest.class);
+ assertThat(searchRequests, hasSize(greaterThan(0)));
+ for (RequestAndHeaders requestAndHeaders : searchRequests) {
+ assertThat(requestAndHeaders.headers.containsKey(releventHeaderName), is(true));
+ // was not specified, thus is not included
+ assertThat(requestAndHeaders.headers.containsKey(randomHeaderKey), is(false));
+ }
+ }
+
+ private List<RequestAndHeaders> getRequests(Class<?> clazz) {
+ List<RequestAndHeaders> results = new ArrayList<>();
+ for (RequestAndHeaders request : requests) {
+ if (request.request.getClass().equals(clazz)) {
+ results.add(request);
+ }
+ }
+
+ return results;
+ }
+
+ private void assertRequestsContainHeader(Class<? extends ActionRequest> clazz) {
+ List<RequestAndHeaders> classRequests = getRequests(clazz);
+ for (RequestAndHeaders request : classRequests) {
+ assertRequestContainsHeader(request.request, request.headers);
+ }
+ }
+
+ private void assertGetRequestsContainHeaders() {
+ assertGetRequestsContainHeaders(this.lookupIndex);
+ }
+
+ private void assertGetRequestsContainHeaders(String index) {
+ List<RequestAndHeaders> getRequests = getRequests(GetRequest.class);
+ assertThat(getRequests, hasSize(greaterThan(0)));
+
+ for (RequestAndHeaders request : getRequests) {
+ if (!((GetRequest)request.request).index().equals(index)) {
+ continue;
+ }
+ assertRequestContainsHeader(request.request, request.headers);
+ }
+ }
+
+ private void assertRequestContainsHeader(ActionRequest request, Map<String, String> context) {
+ String msg = String.format(Locale.ROOT, "Expected header %s to be in request %s", randomHeaderKey, request.getClass().getName());
+ if (request instanceof IndexRequest) {
+ IndexRequest indexRequest = (IndexRequest) request;
+ msg = String.format(Locale.ROOT, "Expected header %s to be in index request %s/%s/%s", randomHeaderKey,
+ indexRequest.index(), indexRequest.type(), indexRequest.id());
+ }
+ assertThat(msg, context.containsKey(randomHeaderKey), is(true));
+ assertThat(context.get(randomHeaderKey).toString(), is(randomHeaderValue));
+ }
+
+ /**
+ * a transport client that adds our random header
+ */
+ private Client transportClient() {
+ return internalCluster().transportClient().filterWithHeader(Collections.singletonMap(randomHeaderKey, randomHeaderValue));
+ }
+
+ public static class ActionLoggingPlugin extends Plugin {
+
+ @Override
+ public String name() {
+ return "test-action-logging";
+ }
+
+ @Override
+ public String description() {
+ return "Test action logging";
+ }
+
+ @Override
+ public Collection<Module> nodeModules() {
+ return Collections.<Module>singletonList(new ActionLoggingModule());
+ }
+
+ public void onModule(ActionModule module) {
+ module.registerFilter(LoggingFilter.class);
+ }
+ }
+
+ public static class ActionLoggingModule extends AbstractModule {
+ @Override
+ protected void configure() {
+ bind(LoggingFilter.class).asEagerSingleton();
+ }
+
+ }
+
+ public static class LoggingFilter extends ActionFilter.Simple {
+
+ private final ThreadPool threadPool;
+
+ @Inject
+ public LoggingFilter(Settings settings, ThreadPool pool) {
+ super(settings);
+ this.threadPool = pool;
+ }
+
+ @Override
+ public int order() {
+ return 999;
+ }
+
+ @Override
+ protected boolean apply(String action, ActionRequest request, ActionListener listener) {
+ requests.add(new RequestAndHeaders(threadPool.getThreadContext().getHeaders(), request));
+ return true;
+ }
+
+ @Override
+ protected boolean apply(String action, ActionResponse response, ActionListener listener) {
+ return true;
+ }
+ }
+
+ private static class RequestAndHeaders {
+ final Map<String, String> headers;
+ final ActionRequest request;
+
+ private RequestAndHeaders(Map<String, String> headers, ActionRequest request) {
+ this.headers = headers;
+ this.request = request;
+ }
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/transport/NettySizeHeaderFrameDecoderTests.java b/core/src/test/java/org/elasticsearch/transport/NettySizeHeaderFrameDecoderTests.java
index 33d0f6ef52..def9a119ac 100644
--- a/core/src/test/java/org/elasticsearch/transport/NettySizeHeaderFrameDecoderTests.java
+++ b/core/src/test/java/org/elasticsearch/transport/NettySizeHeaderFrameDecoderTests.java
@@ -51,10 +51,10 @@ import static org.hamcrest.Matchers.is;
public class NettySizeHeaderFrameDecoderTests extends ESTestCase {
private final Settings settings = settingsBuilder()
- .put("name", "foo")
- .put("transport.host", "127.0.0.1")
- .put("transport.tcp.port", "0")
- .build();
+ .put("name", "foo")
+ .put(TransportSettings.BIND_HOST.getKey(), "127.0.0.1")
+ .put(TransportSettings.PORT.getKey(), "0")
+ .build();
private ThreadPool threadPool;
private NettyTransport nettyTransport;
diff --git a/core/src/test/java/org/elasticsearch/transport/TransportMessageTests.java b/core/src/test/java/org/elasticsearch/transport/TransportMessageTests.java
deleted file mode 100644
index a94b06f6f0..0000000000
--- a/core/src/test/java/org/elasticsearch/transport/TransportMessageTests.java
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.transport;
-
-import org.elasticsearch.Version;
-import org.elasticsearch.common.io.stream.BytesStreamOutput;
-import org.elasticsearch.common.io.stream.StreamInput;
-import org.elasticsearch.test.ESTestCase;
-
-import static org.hamcrest.Matchers.equalTo;
-import static org.hamcrest.Matchers.is;
-
-/**
- *
- */
-public class TransportMessageTests extends ESTestCase {
- public void testSerialization() throws Exception {
- Message message = new Message();
- message.putHeader("key1", "value1");
- message.putHeader("key2", "value2");
- message.putInContext("key3", "value3");
-
- BytesStreamOutput out = new BytesStreamOutput();
- out.setVersion(Version.CURRENT);
- message.writeTo(out);
- StreamInput in = StreamInput.wrap(out.bytes());
- in.setVersion(Version.CURRENT);
- message = new Message();
- message.readFrom(in);
- assertThat(message.getHeaders().size(), is(2));
- assertThat((String) message.getHeader("key1"), equalTo("value1"));
- assertThat((String) message.getHeader("key2"), equalTo("value2"));
- assertThat(message.isContextEmpty(), is(true));
-
- // ensure that casting is not needed
- String key1 = message.getHeader("key1");
- assertThat(key1, is("value1"));
- }
-
- public void testCopyHeadersAndContext() throws Exception {
- Message m1 = new Message();
- m1.putHeader("key1", "value1");
- m1.putHeader("key2", "value2");
- m1.putInContext("key3", "value3");
-
- Message m2 = new Message(m1);
-
- assertThat(m2.getHeaders().size(), is(2));
- assertThat((String) m2.getHeader("key1"), equalTo("value1"));
- assertThat((String) m2.getHeader("key2"), equalTo("value2"));
- assertThat((String) m2.getFromContext("key3"), equalTo("value3"));
-
- // ensure that casting is not needed
- String key3 = m2.getFromContext("key3");
- assertThat(key3, is("value3"));
- testContext(m2, "key3", "value3");
- }
-
- // ensure that generic arg like this is not needed: TransportMessage<?> transportMessage
- private void testContext(TransportMessage transportMessage, String key, String expectedValue) {
- String result = transportMessage.getFromContext(key);
- assertThat(result, is(expectedValue));
-
- }
-
- private static class Message extends TransportMessage<Message> {
-
- private Message() {
- }
-
- private Message(Message message) {
- super(message);
- }
- }
-}
diff --git a/core/src/test/java/org/elasticsearch/transport/netty/NettyScheduledPingTests.java b/core/src/test/java/org/elasticsearch/transport/netty/NettyScheduledPingTests.java
index 7a939a5a1b..d720706b77 100644
--- a/core/src/test/java/org/elasticsearch/transport/netty/NettyScheduledPingTests.java
+++ b/core/src/test/java/org/elasticsearch/transport/netty/NettyScheduledPingTests.java
@@ -36,6 +36,7 @@ import org.elasticsearch.transport.TransportRequestHandler;
import org.elasticsearch.transport.TransportRequestOptions;
import org.elasticsearch.transport.TransportResponse;
import org.elasticsearch.transport.TransportResponseOptions;
+import org.elasticsearch.transport.TransportSettings;
import java.io.IOException;
@@ -49,7 +50,7 @@ public class NettyScheduledPingTests extends ESTestCase {
public void testScheduledPing() throws Exception {
ThreadPool threadPool = new ThreadPool(getClass().getName());
- Settings settings = Settings.builder().put(NettyTransport.PING_SCHEDULE, "5ms").put("transport.tcp.port", 0).build();
+ Settings settings = Settings.builder().put(NettyTransport.PING_SCHEDULE.getKey(), "5ms").put(TransportSettings.PORT.getKey(), 0).build();
final NettyTransport nettyA = new NettyTransport(settings, threadPool, new NetworkService(settings), BigArrays.NON_RECYCLING_INSTANCE, Version.CURRENT, new NamedWriteableRegistry());
MockTransportService serviceA = new MockTransportService(settings, nettyA, threadPool);
@@ -68,12 +69,12 @@ public class NettyScheduledPingTests extends ESTestCase {
assertBusy(new Runnable() {
@Override
public void run() {
- assertThat(nettyA.scheduledPing.successfulPings.count(), greaterThan(100l));
- assertThat(nettyB.scheduledPing.successfulPings.count(), greaterThan(100l));
+ assertThat(nettyA.scheduledPing.successfulPings.count(), greaterThan(100L));
+ assertThat(nettyB.scheduledPing.successfulPings.count(), greaterThan(100L));
}
});
- assertThat(nettyA.scheduledPing.failedPings.count(), equalTo(0l));
- assertThat(nettyB.scheduledPing.failedPings.count(), equalTo(0l));
+ assertThat(nettyA.scheduledPing.failedPings.count(), equalTo(0L));
+ assertThat(nettyB.scheduledPing.failedPings.count(), equalTo(0L));
serviceA.registerRequestHandler("sayHello", TransportRequest.Empty::new, ThreadPool.Names.GENERIC, new TransportRequestHandler<TransportRequest.Empty>() {
@Override
@@ -117,12 +118,12 @@ public class NettyScheduledPingTests extends ESTestCase {
assertBusy(new Runnable() {
@Override
public void run() {
- assertThat(nettyA.scheduledPing.successfulPings.count(), greaterThan(200l));
- assertThat(nettyB.scheduledPing.successfulPings.count(), greaterThan(200l));
+ assertThat(nettyA.scheduledPing.successfulPings.count(), greaterThan(200L));
+ assertThat(nettyB.scheduledPing.successfulPings.count(), greaterThan(200L));
}
});
- assertThat(nettyA.scheduledPing.failedPings.count(), equalTo(0l));
- assertThat(nettyB.scheduledPing.failedPings.count(), equalTo(0l));
+ assertThat(nettyA.scheduledPing.failedPings.count(), equalTo(0L));
+ assertThat(nettyB.scheduledPing.failedPings.count(), equalTo(0L));
Releasables.close(serviceA, serviceB);
terminate(threadPool);
diff --git a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportIT.java b/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportIT.java
index 55f9bc49df..a8d06d39f5 100644
--- a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportIT.java
+++ b/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportIT.java
@@ -34,6 +34,7 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.InetSocketTransportAddress;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
+import org.elasticsearch.node.Node;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
@@ -42,6 +43,7 @@ import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.ActionNotFoundTransportException;
import org.elasticsearch.transport.RequestHandlerRegistry;
import org.elasticsearch.transport.TransportRequest;
+import org.elasticsearch.transport.TransportSettings;
import org.jboss.netty.channel.Channel;
import org.jboss.netty.channel.ChannelPipeline;
import org.jboss.netty.channel.ChannelPipelineFactory;
@@ -49,6 +51,7 @@ import org.jboss.netty.channel.ChannelPipelineFactory;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.Collection;
+import java.util.Collections;
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
import static org.hamcrest.Matchers.containsString;
@@ -65,7 +68,7 @@ public class NettyTransportIT extends ESIntegTestCase {
@Override
protected Settings nodeSettings(int nodeOrdinal) {
return settingsBuilder().put(super.nodeSettings(nodeOrdinal))
- .put("node.mode", "network")
+ .put(Node.NODE_MODE_SETTING.getKey(), "network")
.put(NetworkModule.TRANSPORT_TYPE_KEY, "exception-throwing").build();
}
@@ -78,13 +81,12 @@ public class NettyTransportIT extends ESIntegTestCase {
Client transportClient = internalCluster().transportClient();
ClusterHealthResponse clusterIndexHealths = transportClient.admin().cluster().prepareHealth().get();
assertThat(clusterIndexHealths.getStatus(), is(ClusterHealthStatus.GREEN));
-
try {
- transportClient.admin().cluster().prepareHealth().putHeader("ERROR", "MY MESSAGE").get();
+ transportClient.filterWithHeader(Collections.singletonMap("ERROR", "MY MESSAGE")).admin().cluster().prepareHealth().get();
fail("Expected exception, but didnt happen");
} catch (ElasticsearchException e) {
assertThat(e.getMessage(), containsString("MY MESSAGE"));
- assertThat(channelProfileName, is(NettyTransport.DEFAULT_PROFILE));
+ assertThat(channelProfileName, is(TransportSettings.DEFAULT_PROFILE));
}
}
@@ -126,7 +128,7 @@ public class NettyTransportIT extends ESIntegTestCase {
@Override
public ChannelPipeline getPipeline() throws Exception {
ChannelPipeline pipeline = super.getPipeline();
- pipeline.replace("dispatcher", "dispatcher", new MessageChannelHandler(nettyTransport, logger, NettyTransport.DEFAULT_PROFILE) {
+ pipeline.replace("dispatcher", "dispatcher", new MessageChannelHandler(nettyTransport, logger, TransportSettings.DEFAULT_PROFILE) {
@Override
protected String handleRequest(Channel channel, StreamInput buffer, long requestId, Version version) throws IOException {
@@ -141,8 +143,9 @@ public class NettyTransportIT extends ESIntegTestCase {
final TransportRequest request = reg.newRequest();
request.remoteAddress(new InetSocketTransportAddress((InetSocketAddress) channel.getRemoteAddress()));
request.readFrom(buffer);
- if (request.hasHeader("ERROR")) {
- throw new ElasticsearchException((String) request.getHeader("ERROR"));
+ String error = threadPool.getThreadContext().getHeader("ERROR");
+ if (error != null) {
+ throw new ElasticsearchException(error);
}
if (reg.getExecutor() == ThreadPool.Names.SAME) {
//noinspection unchecked
diff --git a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportMultiPortIntegrationIT.java b/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportMultiPortIntegrationIT.java
index ee49012291..f936b5f675 100644
--- a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportMultiPortIntegrationIT.java
+++ b/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportMultiPortIntegrationIT.java
@@ -28,6 +28,8 @@ import org.elasticsearch.common.network.NetworkModule;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.InetSocketTransportAddress;
import org.elasticsearch.common.transport.TransportAddress;
+import org.elasticsearch.env.Environment;
+import org.elasticsearch.node.Node;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
import org.elasticsearch.test.ESIntegTestCase.Scope;
@@ -61,7 +63,7 @@ public class NettyTransportMultiPortIntegrationIT extends ESIntegTestCase {
.put(super.nodeSettings(nodeOrdinal))
.put("network.host", "127.0.0.1")
.put(NetworkModule.TRANSPORT_TYPE_KEY, "netty")
- .put("node.mode", "network")
+ .put(Node.NODE_MODE_SETTING.getKey(), "network")
.put("transport.profiles.client1.port", randomPortRange)
.put("transport.profiles.client1.publish_host", "127.0.0.7")
.put("transport.profiles.client1.publish_port", "4321")
@@ -73,7 +75,7 @@ public class NettyTransportMultiPortIntegrationIT extends ESIntegTestCase {
Settings settings = settingsBuilder()
.put("cluster.name", internalCluster().getClusterName())
.put(NetworkModule.TRANSPORT_TYPE_KEY, "netty")
- .put("path.home", createTempDir().toString())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
try (TransportClient transportClient = TransportClient.builder().settings(settings).build()) {
transportClient.addTransportAddress(new InetSocketTransportAddress(InetAddress.getByName("127.0.0.1"), randomPort));
diff --git a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportMultiPortTests.java b/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportMultiPortTests.java
index 1c8869772e..6eaf738761 100644
--- a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportMultiPortTests.java
+++ b/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportMultiPortTests.java
@@ -31,6 +31,7 @@ import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
+import org.elasticsearch.transport.TransportSettings;
import org.junit.Before;
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
@@ -52,7 +53,7 @@ public class NettyTransportMultiPortTests extends ESTestCase {
public void testThatNettyCanBindToMultiplePorts() throws Exception {
Settings settings = settingsBuilder()
.put("network.host", host)
- .put("transport.tcp.port", 22) // will not actually bind to this
+ .put(TransportSettings.PORT.getKey(), 22) // will not actually bind to this
.put("transport.profiles.default.port", 0)
.put("transport.profiles.client1.port", 0)
.build();
@@ -69,7 +70,7 @@ public class NettyTransportMultiPortTests extends ESTestCase {
public void testThatDefaultProfileInheritsFromStandardSettings() throws Exception {
Settings settings = settingsBuilder()
.put("network.host", host)
- .put("transport.tcp.port", 0)
+ .put(TransportSettings.PORT.getKey(), 0)
.put("transport.profiles.client1.port", 0)
.build();
@@ -86,7 +87,7 @@ public class NettyTransportMultiPortTests extends ESTestCase {
Settings settings = settingsBuilder()
.put("network.host", host)
- .put("transport.tcp.port", 0)
+ .put(TransportSettings.PORT.getKey(), 0)
.put("transport.profiles.client1.whatever", "foo")
.build();
@@ -102,8 +103,7 @@ public class NettyTransportMultiPortTests extends ESTestCase {
public void testThatDefaultProfilePortOverridesGeneralConfiguration() throws Exception {
Settings settings = settingsBuilder()
.put("network.host", host)
- .put("transport.tcp.port", 22) // will not actually bind to this
- .put("transport.netty.port", 23) // will not actually bind to this
+ .put(TransportSettings.PORT.getKey(), 22) // will not actually bind to this
.put("transport.profiles.default.port", 0)
.build();
@@ -119,7 +119,7 @@ public class NettyTransportMultiPortTests extends ESTestCase {
public void testThatProfileWithoutValidNameIsIgnored() throws Exception {
Settings settings = settingsBuilder()
.put("network.host", host)
- .put("transport.tcp.port", 0)
+ .put(TransportSettings.PORT.getKey(), 0)
// mimics someone trying to define a profile for .local which is the profile for a node request to itself
.put("transport.profiles." + TransportService.DIRECT_RESPONSE_PROFILE + ".port", 22) // will not actually bind to this
.put("transport.profiles..port", 23) // will not actually bind to this
diff --git a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportPublishAddressIT.java b/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportPublishAddressIT.java
index ea67ce3271..0fceda3166 100644
--- a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportPublishAddressIT.java
+++ b/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportPublishAddressIT.java
@@ -27,6 +27,7 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.BoundTransportAddress;
import org.elasticsearch.common.transport.InetSocketTransportAddress;
import org.elasticsearch.common.transport.TransportAddress;
+import org.elasticsearch.node.Node;
import org.elasticsearch.test.ESIntegTestCase;
import java.net.Inet4Address;
@@ -46,7 +47,7 @@ public class NettyTransportPublishAddressIT extends ESIntegTestCase {
return Settings.builder()
.put(super.nodeSettings(nodeOrdinal))
.put(NetworkModule.TRANSPORT_TYPE_KEY, "netty")
- .put("node.mode", "network").build();
+ .put(Node.NODE_MODE_SETTING.getKey(), "network").build();
}
public void testDifferentPorts() throws Exception {
diff --git a/core/src/test/java/org/elasticsearch/transport/netty/SimpleNettyTransportTests.java b/core/src/test/java/org/elasticsearch/transport/netty/SimpleNettyTransportTests.java
index bd26319f4a..8364dea224 100644
--- a/core/src/test/java/org/elasticsearch/transport/netty/SimpleNettyTransportTests.java
+++ b/core/src/test/java/org/elasticsearch/transport/netty/SimpleNettyTransportTests.java
@@ -29,6 +29,7 @@ import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.test.transport.MockTransportService;
import org.elasticsearch.transport.AbstractSimpleTransportTestCase;
import org.elasticsearch.transport.ConnectTransportException;
+import org.elasticsearch.transport.TransportSettings;
import java.net.InetAddress;
import java.net.UnknownHostException;
@@ -39,7 +40,7 @@ public class SimpleNettyTransportTests extends AbstractSimpleTransportTestCase {
@Override
protected MockTransportService build(Settings settings, Version version, NamedWriteableRegistry namedWriteableRegistry) {
- settings = Settings.builder().put(settings).put("transport.tcp.port", "0").build();
+ settings = Settings.builder().put(settings).put(TransportSettings.PORT.getKey(), "0").build();
MockTransportService transportService = new MockTransportService(settings, new NettyTransport(settings, threadPool, new NetworkService(settings), BigArrays.NON_RECYCLING_INSTANCE, version, namedWriteableRegistry), threadPool);
transportService.start();
return transportService;
diff --git a/core/src/test/java/org/elasticsearch/tribe/TribeIT.java b/core/src/test/java/org/elasticsearch/tribe/TribeIT.java
index 1350dcbb8e..ae4555b891 100644
--- a/core/src/test/java/org/elasticsearch/tribe/TribeIT.java
+++ b/core/src/test/java/org/elasticsearch/tribe/TribeIT.java
@@ -32,6 +32,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.Strings;
+import org.elasticsearch.common.network.NetworkModule;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.discovery.MasterNotDiscoveredException;
@@ -51,6 +52,7 @@ import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Map;
+import java.util.function.Function;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
@@ -78,7 +80,7 @@ public class TribeIT extends ESIntegTestCase {
NodeConfigurationSource nodeConfigurationSource = new NodeConfigurationSource() {
@Override
public Settings nodeSettings(int nodeOrdinal) {
- return Settings.builder().put(Node.HTTP_ENABLED, false).build();
+ return Settings.builder().put(NetworkModule.HTTP_ENABLED.getKey(), false).build();
}
@Override
@@ -93,7 +95,7 @@ public class TribeIT extends ESIntegTestCase {
};
cluster2 = new InternalTestCluster(InternalTestCluster.configuredNodeMode(), randomLong(), createTempDir(), 2, 2,
- Strings.randomBase64UUID(getRandom()), nodeConfigurationSource, 0, false, SECOND_CLUSTER_NODE_PREFIX, Collections.emptyList());
+ Strings.randomBase64UUID(getRandom()), nodeConfigurationSource, 0, false, SECOND_CLUSTER_NODE_PREFIX, Collections.emptyList(), Function.identity());
cluster2.beforeTest(getRandom(), 0.1);
cluster2.ensureAtLeastNumDataNodes(2);
@@ -134,8 +136,8 @@ public class TribeIT extends ESIntegTestCase {
tribe2Defaults.put("tribe.t2." + entry.getKey(), entry.getValue());
}
// give each tribe it's unicast hosts to connect to
- tribe1Defaults.putArray("tribe.t1." + UnicastZenPing.DISCOVERY_ZEN_PING_UNICAST_HOSTS, getUnicastHosts(internalCluster().client()));
- tribe1Defaults.putArray("tribe.t2." + UnicastZenPing.DISCOVERY_ZEN_PING_UNICAST_HOSTS, getUnicastHosts(cluster2.client()));
+ tribe1Defaults.putArray("tribe.t1." + UnicastZenPing.DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING.getKey(), getUnicastHosts(internalCluster().client()));
+ tribe1Defaults.putArray("tribe.t2." + UnicastZenPing.DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING.getKey(), getUnicastHosts(cluster2.client()));
Settings merged = Settings.builder()
.put("tribe.t1.cluster.name", internalCluster().getClusterName())
@@ -241,8 +243,8 @@ public class TribeIT extends ESIntegTestCase {
logger.info("wait till test1 and test2 exists in the tribe node state");
awaitIndicesInClusterState("test1", "test2");
- assertThat(tribeClient.admin().cluster().prepareState().get().getState().getMetaData().index("test1").getSettings().get(TribeService.TRIBE_NAME), equalTo("t1"));
- assertThat(tribeClient.admin().cluster().prepareState().get().getState().getMetaData().index("test2").getSettings().get(TribeService.TRIBE_NAME), equalTo("t2"));
+ assertThat(tribeClient.admin().cluster().prepareState().get().getState().getMetaData().index("test1").getSettings().get("tribe.name"), equalTo("t1"));
+ assertThat(tribeClient.admin().cluster().prepareState().get().getState().getMetaData().index("test2").getSettings().get("tribe.name"), equalTo("t2"));
assertThat(tribeClient.admin().cluster().prepareState().get().getState().getMetaData().hasIndex("conflict"), equalTo(false));
}
@@ -268,9 +270,9 @@ public class TribeIT extends ESIntegTestCase {
logger.info("wait till test1 and test2 exists in the tribe node state");
awaitIndicesInClusterState("test1", "test2", "conflict");
- assertThat(tribeClient.admin().cluster().prepareState().get().getState().getMetaData().index("test1").getSettings().get(TribeService.TRIBE_NAME), equalTo("t1"));
- assertThat(tribeClient.admin().cluster().prepareState().get().getState().getMetaData().index("test2").getSettings().get(TribeService.TRIBE_NAME), equalTo("t2"));
- assertThat(tribeClient.admin().cluster().prepareState().get().getState().getMetaData().index("conflict").getSettings().get(TribeService.TRIBE_NAME), equalTo(tribe));
+ assertThat(tribeClient.admin().cluster().prepareState().get().getState().getMetaData().index("test1").getSettings().get("tribe.name"), equalTo("t1"));
+ assertThat(tribeClient.admin().cluster().prepareState().get().getState().getMetaData().index("test2").getSettings().get("tribe.name"), equalTo("t2"));
+ assertThat(tribeClient.admin().cluster().prepareState().get().getState().getMetaData().index("conflict").getSettings().get("tribe.name"), equalTo(tribe));
}
public void testTribeOnOneCluster() throws Exception {
@@ -295,8 +297,8 @@ public class TribeIT extends ESIntegTestCase {
tribeClient.admin().indices().prepareRefresh().get();
logger.info("verify they are there");
- assertHitCount(tribeClient.prepareSearch().setSize(0).get(), 2l);
- assertHitCount(tribeClient.prepareSearch().get(), 2l);
+ assertHitCount(tribeClient.prepareSearch().setSize(0).get(), 2L);
+ assertHitCount(tribeClient.prepareSearch().get(), 2L);
assertBusy(new Runnable() {
@Override
public void run() {
@@ -314,8 +316,8 @@ public class TribeIT extends ESIntegTestCase {
logger.info("verify they are there");
- assertHitCount(tribeClient.prepareSearch().setSize(0).get(), 4l);
- assertHitCount(tribeClient.prepareSearch().get(), 4l);
+ assertHitCount(tribeClient.prepareSearch().setSize(0).get(), 4L);
+ assertHitCount(tribeClient.prepareSearch().get(), 4L);
assertBusy(new Runnable() {
@Override
public void run() {
@@ -435,7 +437,7 @@ public class TribeIT extends ESIntegTestCase {
if (!node.dataNode()) {
continue;
}
- if (tribeName.equals(node.getAttributes().get(TribeService.TRIBE_NAME))) {
+ if (tribeName.equals(node.getAttributes().get("tribe.name"))) {
count++;
}
}
diff --git a/core/src/test/java/org/elasticsearch/ttl/SimpleTTLIT.java b/core/src/test/java/org/elasticsearch/ttl/SimpleTTLIT.java
index f871995cc2..cb8165b4aa 100644
--- a/core/src/test/java/org/elasticsearch/ttl/SimpleTTLIT.java
+++ b/core/src/test/java/org/elasticsearch/ttl/SimpleTTLIT.java
@@ -65,8 +65,6 @@ public class SimpleTTLIT extends ESIntegTestCase {
return settingsBuilder()
.put(super.nodeSettings(nodeOrdinal))
.put("indices.ttl.interval", PURGE_INTERVAL, TimeUnit.MILLISECONDS)
- .put("cluster.routing.operation.use_type", false) // make sure we control the shard computation
- .put("cluster.routing.operation.hash.type", "djb")
.build();
}
@@ -113,7 +111,7 @@ public class SimpleTTLIT extends ESIntegTestCase {
ttl0 = ((Number) getResponse.getField("_ttl").getValue()).longValue();
assertThat(ttl0, lessThanOrEqualTo(providedTTLValue - (currentTime - now)));
} else {
- assertThat(providedTTLValue - (currentTime - now), lessThanOrEqualTo(0l));
+ assertThat(providedTTLValue - (currentTime - now), lessThanOrEqualTo(0L));
}
// verify the ttl is still decreasing when going to the replica
currentTime = System.currentTimeMillis();
@@ -122,7 +120,7 @@ public class SimpleTTLIT extends ESIntegTestCase {
ttl0 = ((Number) getResponse.getField("_ttl").getValue()).longValue();
assertThat(ttl0, lessThanOrEqualTo(providedTTLValue - (currentTime - now)));
} else {
- assertThat(providedTTLValue - (currentTime - now), lessThanOrEqualTo(0l));
+ assertThat(providedTTLValue - (currentTime - now), lessThanOrEqualTo(0L));
}
// non realtime get (stored)
currentTime = System.currentTimeMillis();
@@ -131,7 +129,7 @@ public class SimpleTTLIT extends ESIntegTestCase {
ttl0 = ((Number) getResponse.getField("_ttl").getValue()).longValue();
assertThat(ttl0, lessThanOrEqualTo(providedTTLValue - (currentTime - now)));
} else {
- assertThat(providedTTLValue - (currentTime - now), lessThanOrEqualTo(0l));
+ assertThat(providedTTLValue - (currentTime - now), lessThanOrEqualTo(0L));
}
// non realtime get going the replica
currentTime = System.currentTimeMillis();
@@ -140,7 +138,7 @@ public class SimpleTTLIT extends ESIntegTestCase {
ttl0 = ((Number) getResponse.getField("_ttl").getValue()).longValue();
assertThat(ttl0, lessThanOrEqualTo(providedTTLValue - (currentTime - now)));
} else {
- assertThat(providedTTLValue - (currentTime - now), lessThanOrEqualTo(0l));
+ assertThat(providedTTLValue - (currentTime - now), lessThanOrEqualTo(0L));
}
// no TTL provided so no TTL fetched
diff --git a/core/src/test/java/org/elasticsearch/update/UpdateIT.java b/core/src/test/java/org/elasticsearch/update/UpdateIT.java
index 7c72014532..b6d785a497 100644
--- a/core/src/test/java/org/elasticsearch/update/UpdateIT.java
+++ b/core/src/test/java/org/elasticsearch/update/UpdateIT.java
@@ -33,14 +33,15 @@ import org.elasticsearch.client.transport.NoNodeAvailableException;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentFactory;
+import org.elasticsearch.index.MergePolicyConfig;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.engine.DocumentMissingException;
import org.elasticsearch.index.engine.VersionConflictEngineException;
-import org.elasticsearch.index.MergePolicyConfig;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.CompiledScript;
import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.Script;
+import org.elasticsearch.script.ScriptEngineRegistry;
import org.elasticsearch.script.ScriptEngineService;
import org.elasticsearch.script.ScriptModule;
import org.elasticsearch.script.ScriptService;
@@ -92,7 +93,7 @@ public class UpdateIT extends ESIntegTestCase {
}
public void onModule(ScriptModule module) {
- module.addScriptEngine(PutFieldValuesScriptEngine.class);
+ module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(PutFieldValuesScriptEngine.class, PutFieldValuesScriptEngine.TYPES));
}
}
@@ -101,22 +102,24 @@ public class UpdateIT extends ESIntegTestCase {
public static final String NAME = "put_values";
+ public static final List<String> TYPES = Collections.singletonList(NAME);
+
@Override
public void close() throws IOException {
}
@Override
- public String[] types() {
- return new String[] { NAME };
+ public List<String> getTypes() {
+ return TYPES;
}
@Override
- public String[] extensions() {
- return types();
+ public List<String> getExtensions() {
+ return TYPES;
}
@Override
- public boolean sandboxed() {
+ public boolean isSandboxed() {
return true;
}
@@ -154,12 +157,6 @@ public class UpdateIT extends ESIntegTestCase {
return ctx;
}
-
- @Override
- public Object unwrap(Object value) {
- return value;
- }
-
};
}
@@ -190,7 +187,7 @@ public class UpdateIT extends ESIntegTestCase {
}
public void onModule(ScriptModule module) {
- module.addScriptEngine(FieldIncrementScriptEngine.class);
+ module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(FieldIncrementScriptEngine.class, FieldIncrementScriptEngine.TYPES));
}
}
@@ -199,22 +196,24 @@ public class UpdateIT extends ESIntegTestCase {
public static final String NAME = "field_inc";
+ public static final List<String> TYPES = Collections.singletonList(NAME);
+
@Override
public void close() throws IOException {
}
@Override
- public String[] types() {
- return new String[] { NAME };
+ public List<String> getTypes() {
+ return TYPES;
}
@Override
- public String[] extensions() {
- return types();
+ public List<String> getExtensions() {
+ return TYPES;
}
@Override
- public boolean sandboxed() {
+ public boolean isSandboxed() {
return true;
}
@@ -245,12 +244,6 @@ public class UpdateIT extends ESIntegTestCase {
source.put(field, currentValue.longValue() + inc.longValue());
return ctx;
}
-
- @Override
- public Object unwrap(Object value) {
- return value;
- }
-
};
}
@@ -281,7 +274,7 @@ public class UpdateIT extends ESIntegTestCase {
}
public void onModule(ScriptModule module) {
- module.addScriptEngine(ScriptedUpsertScriptEngine.class);
+ module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(ScriptedUpsertScriptEngine.class, ScriptedUpsertScriptEngine.TYPES));
}
}
@@ -290,22 +283,24 @@ public class UpdateIT extends ESIntegTestCase {
public static final String NAME = "scripted_upsert";
+ public static final List<String> TYPES = Collections.singletonList(NAME);
+
@Override
public void close() throws IOException {
}
@Override
- public String[] types() {
- return new String[] { NAME };
+ public List<String> getTypes() {
+ return TYPES;
}
@Override
- public String[] extensions() {
- return types();
+ public List<String> getExtensions() {
+ return TYPES;
}
@Override
- public boolean sandboxed() {
+ public boolean isSandboxed() {
return true;
}
@@ -336,12 +331,6 @@ public class UpdateIT extends ESIntegTestCase {
source.put("balance", oldBalance.intValue() - deduction);
return ctx;
}
-
- @Override
- public Object unwrap(Object value) {
- return value;
- }
-
};
}
@@ -372,7 +361,7 @@ public class UpdateIT extends ESIntegTestCase {
}
public void onModule(ScriptModule module) {
- module.addScriptEngine(ExtractContextInSourceScriptEngine.class);
+ module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(ExtractContextInSourceScriptEngine.class, ExtractContextInSourceScriptEngine.TYPES));
}
}
@@ -381,22 +370,24 @@ public class UpdateIT extends ESIntegTestCase {
public static final String NAME = "extract_ctx";
+ public static final List<String> TYPES = Collections.singletonList(NAME);
+
@Override
public void close() throws IOException {
}
@Override
- public String[] types() {
- return new String[] { NAME };
+ public List<String> getTypes() {
+ return TYPES;
}
@Override
- public String[] extensions() {
- return types();
+ public List<String> getExtensions() {
+ return TYPES;
}
@Override
- public boolean sandboxed() {
+ public boolean isSandboxed() {
return true;
}
@@ -428,12 +419,6 @@ public class UpdateIT extends ESIntegTestCase {
return ctx;
}
-
- @Override
- public Object unwrap(Object value) {
- return value;
- }
-
};
}
@@ -613,13 +598,13 @@ public class UpdateIT extends ESIntegTestCase {
client().prepareUpdate(indexOrAlias(), "type", "1")
.setScript(new Script("", ScriptService.ScriptType.INLINE, "put_values", Collections.singletonMap("text", "v2"))).setVersion(1).get();
- assertThat(client().prepareGet("test", "type", "1").get().getVersion(), equalTo(2l));
+ assertThat(client().prepareGet("test", "type", "1").get().getVersion(), equalTo(2L));
// and again with a higher version..
client().prepareUpdate(indexOrAlias(), "type", "1")
.setScript(new Script("", ScriptService.ScriptType.INLINE, "put_values", Collections.singletonMap("text", "v3"))).setVersion(2).get();
- assertThat(client().prepareGet("test", "type", "1").get().getVersion(), equalTo(3l));
+ assertThat(client().prepareGet("test", "type", "1").get().getVersion(), equalTo(3L));
// after delete
client().prepareDelete("test", "type", "1").get();
@@ -643,7 +628,7 @@ public class UpdateIT extends ESIntegTestCase {
.setVersion(10).setVersionType(VersionType.FORCE).get();
GetResponse get = get("test", "type", "2");
- assertThat(get.getVersion(), equalTo(10l));
+ assertThat(get.getVersion(), equalTo(10L));
assertThat((String) get.getSource().get("text"), equalTo("v10"));
// upserts - the combination with versions is a bit weird. Test are here to ensure we do not change our behavior unintentionally
@@ -653,7 +638,7 @@ public class UpdateIT extends ESIntegTestCase {
.setScript(new Script("", ScriptService.ScriptType.INLINE, "put_values", Collections.singletonMap("text", "v2")))
.setVersion(10).setUpsert("{ \"text\": \"v0\" }").get();
get = get("test", "type", "3");
- assertThat(get.getVersion(), equalTo(1l));
+ assertThat(get.getVersion(), equalTo(1L));
assertThat((String) get.getSource().get("text"), equalTo("v0"));
// retry on conflict is rejected:
diff --git a/core/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java b/core/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java
index 1657116991..8bc69d4c17 100644
--- a/core/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java
+++ b/core/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java
@@ -92,7 +92,7 @@ public class SimpleValidateQueryIT extends ESIntegTestCase {
refresh();
- for (Client client : internalCluster()) {
+ for (Client client : internalCluster().getClients()) {
ValidateQueryResponse response = client.admin().indices().prepareValidateQuery("test")
.setQuery(QueryBuilders.wrapperQuery("foo".getBytes(StandardCharsets.UTF_8)))
.setExplain(true)
@@ -104,7 +104,7 @@ public class SimpleValidateQueryIT extends ESIntegTestCase {
}
- for (Client client : internalCluster()) {
+ for (Client client : internalCluster().getClients()) {
ValidateQueryResponse response = client.admin().indices().prepareValidateQuery("test")
.setQuery(QueryBuilders.queryStringQuery("foo"))
.setExplain(true)
@@ -212,6 +212,7 @@ public class SimpleValidateQueryIT extends ESIntegTestCase {
assertThat(validateQueryResponse.getQueryExplanation().get(0).getExplanation(), containsString("field:\"foo (one* two*)\""));
}
+ @SuppressWarnings("deprecation") // fuzzy queries will be removed in 4.0
public void testExplainWithRewriteValidateQuery() throws Exception {
client().admin().indices().prepareCreate("test")
.addMapping("type1", "field", "type=string,analyzer=whitespace")
diff --git a/core/src/test/java/org/elasticsearch/versioning/SimpleVersioningIT.java b/core/src/test/java/org/elasticsearch/versioning/SimpleVersioningIT.java
index edbbebbbc4..3432411b22 100644
--- a/core/src/test/java/org/elasticsearch/versioning/SimpleVersioningIT.java
+++ b/core/src/test/java/org/elasticsearch/versioning/SimpleVersioningIT.java
@@ -75,23 +75,23 @@ public class SimpleVersioningIT extends ESIntegTestCase {
createIndex("test");
ensureGreen("test"); // we are testing force here which doesn't work if we are recovering at the same time - zzzzz...
IndexResponse indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").setVersion(12).setVersionType(VersionType.FORCE).get();
- assertThat(indexResponse.getVersion(), equalTo(12l));
+ assertThat(indexResponse.getVersion(), equalTo(12L));
indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_2").setVersion(12).setVersionType(VersionType.FORCE).get();
- assertThat(indexResponse.getVersion(), equalTo(12l));
+ assertThat(indexResponse.getVersion(), equalTo(12L));
indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_2").setVersion(14).setVersionType(VersionType.FORCE).get();
- assertThat(indexResponse.getVersion(), equalTo(14l));
+ assertThat(indexResponse.getVersion(), equalTo(14L));
indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").setVersion(13).setVersionType(VersionType.FORCE).get();
- assertThat(indexResponse.getVersion(), equalTo(13l));
+ assertThat(indexResponse.getVersion(), equalTo(13L));
client().admin().indices().prepareRefresh().execute().actionGet();
if (randomBoolean()) {
refresh();
}
for (int i = 0; i < 10; i++) {
- assertThat(client().prepareGet("test", "type", "1").get().getVersion(), equalTo(13l));
+ assertThat(client().prepareGet("test", "type", "1").get().getVersion(), equalTo(13L));
}
// deleting with a lower version works.
@@ -105,13 +105,13 @@ public class SimpleVersioningIT extends ESIntegTestCase {
createIndex("test");
IndexResponse indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").setVersion(12).setVersionType(VersionType.EXTERNAL_GTE).get();
- assertThat(indexResponse.getVersion(), equalTo(12l));
+ assertThat(indexResponse.getVersion(), equalTo(12L));
indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_2").setVersion(12).setVersionType(VersionType.EXTERNAL_GTE).get();
- assertThat(indexResponse.getVersion(), equalTo(12l));
+ assertThat(indexResponse.getVersion(), equalTo(12L));
indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_2").setVersion(14).setVersionType(VersionType.EXTERNAL_GTE).get();
- assertThat(indexResponse.getVersion(), equalTo(14l));
+ assertThat(indexResponse.getVersion(), equalTo(14L));
assertThrows(client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").setVersion(13).setVersionType(VersionType.EXTERNAL_GTE),
VersionConflictEngineException.class);
@@ -121,7 +121,7 @@ public class SimpleVersioningIT extends ESIntegTestCase {
refresh();
}
for (int i = 0; i < 10; i++) {
- assertThat(client().prepareGet("test", "type", "1").get().getVersion(), equalTo(14l));
+ assertThat(client().prepareGet("test", "type", "1").get().getVersion(), equalTo(14L));
}
// deleting with a lower version fails.
@@ -144,7 +144,7 @@ public class SimpleVersioningIT extends ESIntegTestCase {
// But delete with a higher version is OK.
deleteResponse = client().prepareDelete("test", "type", "1").setVersion(18).setVersionType(VersionType.EXTERNAL_GTE).execute().actionGet();
assertThat(deleteResponse.isFound(), equalTo(false));
- assertThat(deleteResponse.getVersion(), equalTo(18l));
+ assertThat(deleteResponse.getVersion(), equalTo(18L));
}
public void testExternalVersioning() throws Exception {
@@ -152,10 +152,10 @@ public class SimpleVersioningIT extends ESIntegTestCase {
ensureGreen();
IndexResponse indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").setVersion(12).setVersionType(VersionType.EXTERNAL).execute().actionGet();
- assertThat(indexResponse.getVersion(), equalTo(12l));
+ assertThat(indexResponse.getVersion(), equalTo(12L));
indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").setVersion(14).setVersionType(VersionType.EXTERNAL).execute().actionGet();
- assertThat(indexResponse.getVersion(), equalTo(14l));
+ assertThat(indexResponse.getVersion(), equalTo(14L));
assertThrows(client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").setVersion(13).setVersionType(VersionType.EXTERNAL).execute(),
VersionConflictEngineException.class);
@@ -164,7 +164,7 @@ public class SimpleVersioningIT extends ESIntegTestCase {
refresh();
}
for (int i = 0; i < 10; i++) {
- assertThat(client().prepareGet("test", "type", "1").execute().actionGet().getVersion(), equalTo(14l));
+ assertThat(client().prepareGet("test", "type", "1").execute().actionGet().getVersion(), equalTo(14L));
}
// deleting with a lower version fails.
@@ -175,7 +175,7 @@ public class SimpleVersioningIT extends ESIntegTestCase {
// Delete with a higher version deletes all versions up to the given one.
DeleteResponse deleteResponse = client().prepareDelete("test", "type", "1").setVersion(17).setVersionType(VersionType.EXTERNAL).execute().actionGet();
assertThat(deleteResponse.isFound(), equalTo(true));
- assertThat(deleteResponse.getVersion(), equalTo(17l));
+ assertThat(deleteResponse.getVersion(), equalTo(17L));
// Deleting with a lower version keeps on failing after a delete.
assertThrows(
@@ -186,17 +186,17 @@ public class SimpleVersioningIT extends ESIntegTestCase {
// But delete with a higher version is OK.
deleteResponse = client().prepareDelete("test", "type", "1").setVersion(18).setVersionType(VersionType.EXTERNAL).execute().actionGet();
assertThat(deleteResponse.isFound(), equalTo(false));
- assertThat(deleteResponse.getVersion(), equalTo(18l));
+ assertThat(deleteResponse.getVersion(), equalTo(18L));
// TODO: This behavior breaks rest api returning http status 201, good news is that it this is only the case until deletes GC kicks in.
indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").setVersion(19).setVersionType(VersionType.EXTERNAL).execute().actionGet();
- assertThat(indexResponse.getVersion(), equalTo(19l));
+ assertThat(indexResponse.getVersion(), equalTo(19L));
deleteResponse = client().prepareDelete("test", "type", "1").setVersion(20).setVersionType(VersionType.EXTERNAL).execute().actionGet();
assertThat(deleteResponse.isFound(), equalTo(true));
- assertThat(deleteResponse.getVersion(), equalTo(20l));
+ assertThat(deleteResponse.getVersion(), equalTo(20L));
// Make sure that the next delete will be GC. Note we do it on the index settings so it will be cleaned up
HashMap<String, Object> newSettings = new HashMap<>();
@@ -207,7 +207,7 @@ public class SimpleVersioningIT extends ESIntegTestCase {
// And now we have previous version return -1
indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").setVersion(20).setVersionType(VersionType.EXTERNAL).execute().actionGet();
- assertThat(indexResponse.getVersion(), equalTo(20l));
+ assertThat(indexResponse.getVersion(), equalTo(20L));
}
public void testRequireUnitsOnUpdateSettings() throws Exception {
@@ -233,7 +233,7 @@ public class SimpleVersioningIT extends ESIntegTestCase {
IndexResponse indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_1")
.setCreate(true).execute().actionGet();
- assertThat(indexResponse.getVersion(), equalTo(1l));
+ assertThat(indexResponse.getVersion(), equalTo(1L));
}
public void testInternalVersioning() throws Exception {
@@ -241,10 +241,10 @@ public class SimpleVersioningIT extends ESIntegTestCase {
ensureGreen();
IndexResponse indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").execute().actionGet();
- assertThat(indexResponse.getVersion(), equalTo(1l));
+ assertThat(indexResponse.getVersion(), equalTo(1L));
indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_2").setVersion(1).execute().actionGet();
- assertThat(indexResponse.getVersion(), equalTo(2l));
+ assertThat(indexResponse.getVersion(), equalTo(2L));
assertThrows(
client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").setVersion(1).execute(),
@@ -264,13 +264,13 @@ public class SimpleVersioningIT extends ESIntegTestCase {
client().admin().indices().prepareRefresh().execute().actionGet();
for (int i = 0; i < 10; i++) {
- assertThat(client().prepareGet("test", "type", "1").execute().actionGet().getVersion(), equalTo(2l));
+ assertThat(client().prepareGet("test", "type", "1").execute().actionGet().getVersion(), equalTo(2L));
}
// search with versioning
for (int i = 0; i < 10; i++) {
SearchResponse searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setVersion(true).execute().actionGet();
- assertThat(searchResponse.getHits().getAt(0).version(), equalTo(2l));
+ assertThat(searchResponse.getHits().getAt(0).version(), equalTo(2L));
}
// search without versioning
@@ -281,7 +281,7 @@ public class SimpleVersioningIT extends ESIntegTestCase {
DeleteResponse deleteResponse = client().prepareDelete("test", "type", "1").setVersion(2).execute().actionGet();
assertThat(deleteResponse.isFound(), equalTo(true));
- assertThat(deleteResponse.getVersion(), equalTo(3l));
+ assertThat(deleteResponse.getVersion(), equalTo(3L));
assertThrows(client().prepareDelete("test", "type", "1").setVersion(2).execute(), VersionConflictEngineException.class);
@@ -290,7 +290,7 @@ public class SimpleVersioningIT extends ESIntegTestCase {
// and thus the transaction is increased.
deleteResponse = client().prepareDelete("test", "type", "1").setVersion(3).execute().actionGet();
assertThat(deleteResponse.isFound(), equalTo(false));
- assertThat(deleteResponse.getVersion(), equalTo(4l));
+ assertThat(deleteResponse.getVersion(), equalTo(4L));
}
public void testSimpleVersioningWithFlush() throws Exception {
@@ -298,12 +298,12 @@ public class SimpleVersioningIT extends ESIntegTestCase {
ensureGreen();
IndexResponse indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").execute().actionGet();
- assertThat(indexResponse.getVersion(), equalTo(1l));
+ assertThat(indexResponse.getVersion(), equalTo(1L));
client().admin().indices().prepareFlush().execute().actionGet();
indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_2").setVersion(1).execute().actionGet();
- assertThat(indexResponse.getVersion(), equalTo(2l));
+ assertThat(indexResponse.getVersion(), equalTo(2L));
client().admin().indices().prepareFlush().execute().actionGet();
@@ -321,12 +321,12 @@ public class SimpleVersioningIT extends ESIntegTestCase {
client().admin().indices().prepareRefresh().execute().actionGet();
for (int i = 0; i < 10; i++) {
- assertThat(client().prepareGet("test", "type", "1").execute().actionGet().getVersion(), equalTo(2l));
+ assertThat(client().prepareGet("test", "type", "1").execute().actionGet().getVersion(), equalTo(2L));
}
for (int i = 0; i < 10; i++) {
SearchResponse searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setVersion(true).execute().actionGet();
- assertThat(searchResponse.getHits().getAt(0).version(), equalTo(2l));
+ assertThat(searchResponse.getHits().getAt(0).version(), equalTo(2L));
}
}
@@ -338,7 +338,7 @@ public class SimpleVersioningIT extends ESIntegTestCase {
assertThat(bulkResponse.hasFailures(), equalTo(false));
assertThat(bulkResponse.getItems().length, equalTo(1));
IndexResponse indexResponse = bulkResponse.getItems()[0].getResponse();
- assertThat(indexResponse.getVersion(), equalTo(1l));
+ assertThat(indexResponse.getVersion(), equalTo(1L));
}
diff --git a/core/src/test/java/org/elasticsearch/watcher/ResourceWatcherServiceTests.java b/core/src/test/java/org/elasticsearch/watcher/ResourceWatcherServiceTests.java
index fe36b74962..6c6c45e9cf 100644
--- a/core/src/test/java/org/elasticsearch/watcher/ResourceWatcherServiceTests.java
+++ b/core/src/test/java/org/elasticsearch/watcher/ResourceWatcherServiceTests.java
@@ -79,7 +79,7 @@ public class ResourceWatcherServiceTests extends ESTestCase {
};
// checking default freq
- WatcherHandle<?> handle = service.add(watcher);
+ WatcherHandle handle = service.add(watcher);
assertThat(handle, notNullValue());
assertThat(handle.frequency(), equalTo(ResourceWatcherService.Frequency.MEDIUM));
assertThat(service.lowMonitor.watchers.size(), is(0));
diff --git a/core/src/test/resources/org/elasticsearch/index/mapper/all/mapping.json b/core/src/test/resources/org/elasticsearch/index/mapper/all/mapping.json
index f956b84f95..eb9b7833c6 100644
--- a/core/src/test/resources/org/elasticsearch/index/mapper/all/mapping.json
+++ b/core/src/test/resources/org/elasticsearch/index/mapper/all/mapping.json
@@ -11,7 +11,7 @@
"properties":{
"first":{
"type":"string",
- "store":"yes",
+ "store":true,
"include_in_all":false
},
"last":{
@@ -29,7 +29,7 @@
"properties":{
"location":{
"type":"string",
- "store":"yes"
+ "store":true
}
}
},
diff --git a/core/src/test/resources/org/elasticsearch/index/mapper/all/mapping_boost_omit_positions_on_all.json b/core/src/test/resources/org/elasticsearch/index/mapper/all/mapping_boost_omit_positions_on_all.json
index 452ef9f083..42bba4354c 100644
--- a/core/src/test/resources/org/elasticsearch/index/mapper/all/mapping_boost_omit_positions_on_all.json
+++ b/core/src/test/resources/org/elasticsearch/index/mapper/all/mapping_boost_omit_positions_on_all.json
@@ -11,7 +11,7 @@
"properties":{
"first":{
"type":"string",
- "store":"yes",
+ "store":true,
"include_in_all":false
},
"last":{
@@ -29,7 +29,7 @@
"properties":{
"location":{
"type":"string",
- "store":"yes"
+ "store":true
}
}
},
diff --git a/core/src/test/resources/org/elasticsearch/index/mapper/all/mapping_offsets_on_all.json b/core/src/test/resources/org/elasticsearch/index/mapper/all/mapping_offsets_on_all.json
index f6b0699bee..388ac13e9e 100644
--- a/core/src/test/resources/org/elasticsearch/index/mapper/all/mapping_offsets_on_all.json
+++ b/core/src/test/resources/org/elasticsearch/index/mapper/all/mapping_offsets_on_all.json
@@ -11,7 +11,7 @@
"properties":{
"first":{
"type":"string",
- "store":"yes",
+ "store":true,
"include_in_all":false
},
"last":{
@@ -29,7 +29,7 @@
"properties":{
"location":{
"type":"string",
- "store":"yes"
+ "store":true
}
}
},
diff --git a/core/src/test/resources/org/elasticsearch/index/mapper/all/mapping_omit_positions_on_all.json b/core/src/test/resources/org/elasticsearch/index/mapper/all/mapping_omit_positions_on_all.json
index f8e418ce8e..57aad9e8fa 100644
--- a/core/src/test/resources/org/elasticsearch/index/mapper/all/mapping_omit_positions_on_all.json
+++ b/core/src/test/resources/org/elasticsearch/index/mapper/all/mapping_omit_positions_on_all.json
@@ -11,7 +11,7 @@
"properties":{
"first":{
"type":"string",
- "store":"yes",
+ "store":true,
"include_in_all":false
},
"last":{
@@ -28,7 +28,7 @@
"properties":{
"location":{
"type":"string",
- "store":"yes"
+ "store":true
}
}
},
diff --git a/core/src/test/resources/org/elasticsearch/index/mapper/all/noboost-mapping.json b/core/src/test/resources/org/elasticsearch/index/mapper/all/noboost-mapping.json
index 799a3ab460..9b1119f1a2 100644
--- a/core/src/test/resources/org/elasticsearch/index/mapper/all/noboost-mapping.json
+++ b/core/src/test/resources/org/elasticsearch/index/mapper/all/noboost-mapping.json
@@ -10,7 +10,7 @@
"properties":{
"first":{
"type":"string",
- "store":"yes",
+ "store":true,
"include_in_all":false
},
"last":{
@@ -27,7 +27,7 @@
"properties":{
"location":{
"type":"string",
- "store":"yes"
+ "store":true
}
}
},
diff --git a/core/src/test/resources/org/elasticsearch/index/mapper/all/store-mapping.json b/core/src/test/resources/org/elasticsearch/index/mapper/all/store-mapping.json
index 8f653a3484..66fed5928a 100644
--- a/core/src/test/resources/org/elasticsearch/index/mapper/all/store-mapping.json
+++ b/core/src/test/resources/org/elasticsearch/index/mapper/all/store-mapping.json
@@ -2,7 +2,7 @@
"person":{
"_all":{
"enabled":true,
- "store":"yes"
+ "store":true
},
"properties":{
"name":{
@@ -11,7 +11,7 @@
"properties":{
"first":{
"type":"string",
- "store":"yes",
+ "store":true,
"include_in_all":false
},
"last":{
@@ -29,7 +29,7 @@
"properties":{
"location":{
"type":"string",
- "store":"yes"
+ "store":true
}
}
},
diff --git a/core/src/test/resources/org/elasticsearch/index/mapper/dynamictemplate/genericstore/test-mapping.json b/core/src/test/resources/org/elasticsearch/index/mapper/dynamictemplate/genericstore/test-mapping.json
index d99067c2b5..70bf6dc7b5 100644
--- a/core/src/test/resources/org/elasticsearch/index/mapper/dynamictemplate/genericstore/test-mapping.json
+++ b/core/src/test/resources/org/elasticsearch/index/mapper/dynamictemplate/genericstore/test-mapping.json
@@ -5,7 +5,7 @@
"template_1":{
"match":"*",
"mapping":{
- "store":"yes"
+ "store":true
}
}
}
diff --git a/core/src/test/resources/org/elasticsearch/index/mapper/dynamictemplate/pathmatch/test-mapping.json b/core/src/test/resources/org/elasticsearch/index/mapper/dynamictemplate/pathmatch/test-mapping.json
index dce33dadfc..3c273e6ed9 100644
--- a/core/src/test/resources/org/elasticsearch/index/mapper/dynamictemplate/pathmatch/test-mapping.json
+++ b/core/src/test/resources/org/elasticsearch/index/mapper/dynamictemplate/pathmatch/test-mapping.json
@@ -5,7 +5,7 @@
"template_1":{
"path_match":"obj1.obj2.*",
"mapping":{
- "store":"no"
+ "store":false
}
}
},
@@ -13,7 +13,7 @@
"template_2":{
"path_match":"obj1.*",
"mapping":{
- "store":"yes"
+ "store":true
}
}
},
@@ -27,4 +27,4 @@
}
]
}
-} \ No newline at end of file
+}
diff --git a/core/src/test/resources/org/elasticsearch/index/mapper/dynamictemplate/simple/test-mapping.json b/core/src/test/resources/org/elasticsearch/index/mapper/dynamictemplate/simple/test-mapping.json
index 9c8f8d8e6a..7a7e96d12a 100644
--- a/core/src/test/resources/org/elasticsearch/index/mapper/dynamictemplate/simple/test-mapping.json
+++ b/core/src/test/resources/org/elasticsearch/index/mapper/dynamictemplate/simple/test-mapping.json
@@ -7,12 +7,12 @@
"mapping":{
"type":"{dynamic_type}",
"index":"analyzed",
- "store":"yes",
+ "store":true,
"fields":{
"org":{
"type":"{dynamic_type}",
"index":"not_analyzed",
- "store":"yes"
+ "store":true
}
}
}
diff --git a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/test-mapping1.json b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/test-mapping1.json
index 61f08af57e..0c2f9ab3f8 100644
--- a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/test-mapping1.json
+++ b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/test-mapping1.json
@@ -4,7 +4,7 @@
"name":{
type:"string",
index:"analyzed",
- store:"yes"
+ store:true
}
}
}
diff --git a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/test-mapping2.json b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/test-mapping2.json
index 02ce8957a5..37064a038b 100644
--- a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/test-mapping2.json
+++ b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/test-mapping2.json
@@ -4,12 +4,12 @@
"name":{
"type" :"string",
"index" :"analyzed",
- "store" :"yes",
+ "store" :true,
"fields":{
"name":{
"type" :"string",
"index" :"analyzed",
- "store" :"yes"
+ "store" :true
},
"indexed":{
"type" :"string",
@@ -18,7 +18,7 @@
"not_indexed":{
"type" :"string",
"index" :"no",
- "store" :"yes"
+ "store" :true
}
}
}
diff --git a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/test-mapping3.json b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/test-mapping3.json
index ea07675446..564d4b5ac0 100644
--- a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/test-mapping3.json
+++ b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/test-mapping3.json
@@ -4,12 +4,12 @@
"name" : {
"type" : "string",
"index" : "analyzed",
- "store" : "yes",
+ "store" : true,
"fields": {
"name" : {
"type" : "string",
"index" : "analyzed",
- "store" : "yes"
+ "store" : true
},
"indexed":{
type:"string",
@@ -18,12 +18,12 @@
"not_indexed":{
type:"string",
index:"no",
- store:"yes"
+ store:true
},
"not_indexed2":{
type:"string",
index:"no",
- store:"yes"
+ store:true
}
}
}
diff --git a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/test-mapping4.json b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/test-mapping4.json
index 384c2634cb..7d2fea2d2f 100644
--- a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/test-mapping4.json
+++ b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/test-mapping4.json
@@ -4,12 +4,12 @@
"name":{
type:"string",
index:"analyzed",
- store:"yes",
+ store:true,
"fields":{
"not_indexed3":{
type:"string",
index:"no",
- store:"yes"
+ store:true
}
}
}
diff --git a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/upgrade1.json b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/upgrade1.json
index 595f62210d..8224cd69cf 100644
--- a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/upgrade1.json
+++ b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/upgrade1.json
@@ -4,7 +4,7 @@
"name":{
type:"string",
index:"analyzed",
- store:"yes",
+ store:true,
"fields":{
"indexed":{
type:"string",
@@ -13,7 +13,7 @@
"not_indexed":{
type:"string",
index:"no",
- store:"yes"
+ store:true
}
}
}
diff --git a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/upgrade2.json b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/upgrade2.json
index 3cfca9c313..42315cc8e3 100644
--- a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/upgrade2.json
+++ b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/upgrade2.json
@@ -4,7 +4,7 @@
"name":{
type:"string",
index:"analyzed",
- store:"yes",
+ store:true,
"fields":{
"indexed":{
type:"string",
@@ -13,12 +13,12 @@
"not_indexed":{
type:"string",
index:"no",
- store:"yes"
+ store:true
},
"not_indexed2":{
type:"string",
index:"no",
- store:"yes"
+ store:true
}
}
}
diff --git a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/upgrade3.json b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/upgrade3.json
index 046b0c234d..4d42947526 100644
--- a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/upgrade3.json
+++ b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/merge/upgrade3.json
@@ -8,7 +8,7 @@
"not_indexed3":{
type:"string",
index:"no",
- store:"yes"
+ store:true
}
}
}
diff --git a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/test-multi-field-type-no-default-field.json b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/test-multi-field-type-no-default-field.json
index 99b74c0167..19d7e45ad4 100644
--- a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/test-multi-field-type-no-default-field.json
+++ b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/test-multi-field-type-no-default-field.json
@@ -11,7 +11,7 @@
"not_indexed": {
"type": "string",
"index": "no",
- "store": "yes"
+ "store": true
}
}
},
@@ -23,7 +23,7 @@
},
"stored": {
"type": "long",
- "store": "yes"
+ "store": true
}
}
}
diff --git a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/test-multi-fields.json b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/test-multi-fields.json
index b116665829..3be34a9839 100644
--- a/core/src/test/resources/org/elasticsearch/index/mapper/multifield/test-multi-fields.json
+++ b/core/src/test/resources/org/elasticsearch/index/mapper/multifield/test-multi-fields.json
@@ -4,30 +4,30 @@
"name": {
"type": "string",
"index": "analyzed",
- "store": "yes",
+ "store": true,
"fields": {
"indexed": {
"type": "string",
"index": "analyzed",
- "store": "no"
+ "store": false
},
"not_indexed": {
"type": "string",
"index": "no",
- "store": "yes"
+ "store": true
},
"test1": {
"type": "string",
"index": "analyzed",
- "store": "yes",
+ "store": true,
"fielddata": {
"loading": "eager"
}
},
"test2": {
"type": "token_count",
- "index": "not_analyzed",
- "store": "yes",
+ "index": true,
+ "store": true,
"analyzer": "simple"
}
}
@@ -47,4 +47,4 @@
}
}
}
-} \ No newline at end of file
+}
diff --git a/core/src/test/resources/org/elasticsearch/index/mapper/simple/test-mapping.json b/core/src/test/resources/org/elasticsearch/index/mapper/simple/test-mapping.json
index e001673758..a37946bf2e 100644
--- a/core/src/test/resources/org/elasticsearch/index/mapper/simple/test-mapping.json
+++ b/core/src/test/resources/org/elasticsearch/index/mapper/simple/test-mapping.json
@@ -15,7 +15,7 @@
properties:{
first:{
type:"string",
- store:"yes"
+ store:true
},
last:{
type:"string",
@@ -30,7 +30,7 @@
properties:{
location:{
type:"string",
- store:"yes"
+ store:true
}
}
},
diff --git a/core/src/test/resources/org/elasticsearch/plugins/loading/classpath/es-plugin-test.properties b/core/src/test/resources/org/elasticsearch/plugins/loading/classpath/es-plugin-test.properties
deleted file mode 100644
index f57bea58cf..0000000000
--- a/core/src/test/resources/org/elasticsearch/plugins/loading/classpath/es-plugin-test.properties
+++ /dev/null
@@ -1,19 +0,0 @@
-################################################################
-# Licensed to Elasticsearch under one or more contributor
-# license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright
-# ownership. Elasticsearch licenses this file to you under
-# the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-################################################################
-plugin=org.elasticsearch.plugins.loading.classpath.InClassPathPlugin \ No newline at end of file