summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy4
-rw-r--r--buildSrc/version.properties2
-rw-r--r--client/benchmark/src/main/java/org/elasticsearch/client/benchmark/ops/bulk/BulkBenchmarkTask.java4
-rw-r--r--core/build.gradle120
-rw-r--r--core/src/main/java/org/apache/log4j/Java9Hack.java37
-rw-r--r--core/src/main/java/org/apache/log4j/package-info.java23
-rw-r--r--core/src/main/java/org/elasticsearch/ElasticsearchException.java1
-rw-r--r--core/src/main/java/org/elasticsearch/ExceptionsHelper.java4
-rw-r--r--core/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java3
-rw-r--r--core/src/main/java/org/elasticsearch/action/admin/cluster/reroute/TransportClusterRerouteAction.java11
-rw-r--r--core/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java5
-rw-r--r--core/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java3
-rw-r--r--core/src/main/java/org/elasticsearch/action/admin/indices/delete/TransportDeleteIndexAction.java3
-rw-r--r--core/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/TransportPutMappingAction.java5
-rw-r--r--core/src/main/java/org/elasticsearch/action/admin/indices/open/TransportOpenIndexAction.java3
-rw-r--r--core/src/main/java/org/elasticsearch/action/admin/indices/settings/put/TransportUpdateSettingsAction.java3
-rw-r--r--core/src/main/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresAction.java4
-rw-r--r--core/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteIndexTemplateAction.java3
-rw-r--r--core/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutIndexTemplateAction.java3
-rw-r--r--core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeSettingsAction.java3
-rw-r--r--core/src/main/java/org/elasticsearch/action/bulk/BulkRequestHandler.java13
-rw-r--r--core/src/main/java/org/elasticsearch/action/bulk/Retry.java4
-rw-r--r--core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java7
-rw-r--r--core/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java3
-rw-r--r--core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java5
-rw-r--r--core/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java44
-rw-r--r--core/src/main/java/org/elasticsearch/action/search/SearchDfsQueryAndFetchAsyncAction.java7
-rw-r--r--core/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java9
-rw-r--r--core/src/main/java/org/elasticsearch/action/search/SearchQueryAndFetchAsyncAction.java4
-rw-r--r--core/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java7
-rw-r--r--core/src/main/java/org/elasticsearch/action/search/SearchScrollQueryAndFetchAsyncAction.java9
-rw-r--r--core/src/main/java/org/elasticsearch/action/search/SearchScrollQueryThenFetchAsyncAction.java9
-rw-r--r--core/src/main/java/org/elasticsearch/action/search/TransportClearScrollAction.java3
-rw-r--r--core/src/main/java/org/elasticsearch/action/support/AbstractListenableActionFuture.java4
-rw-r--r--core/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java6
-rw-r--r--core/src/main/java/org/elasticsearch/action/support/ThreadedActionListener.java10
-rw-r--r--core/src/main/java/org/elasticsearch/action/support/TransportAction.java10
-rw-r--r--core/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java7
-rw-r--r--core/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java9
-rw-r--r--core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java5
-rw-r--r--core/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java5
-rw-r--r--core/src/main/java/org/elasticsearch/action/support/replication/ReplicationOperation.java19
-rw-r--r--core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java27
-rw-r--r--core/src/main/java/org/elasticsearch/action/support/replication/TransportWriteAction.java6
-rw-r--r--core/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java7
-rw-r--r--core/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java5
-rw-r--r--core/src/main/java/org/elasticsearch/action/termvectors/TransportShardMultiTermsVectorAction.java3
-rw-r--r--core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java10
-rw-r--r--core/src/main/java/org/elasticsearch/bootstrap/BootstrapCheck.java13
-rw-r--r--core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java1
-rw-r--r--core/src/main/java/org/elasticsearch/bootstrap/ElasticsearchUncaughtExceptionHandler.java11
-rw-r--r--core/src/main/java/org/elasticsearch/bootstrap/JNACLibrary.java4
-rw-r--r--core/src/main/java/org/elasticsearch/bootstrap/JNAKernel32Library.java4
-rw-r--r--core/src/main/java/org/elasticsearch/bootstrap/JNANatives.java4
-rw-r--r--core/src/main/java/org/elasticsearch/bootstrap/JarHell.java6
-rw-r--r--core/src/main/java/org/elasticsearch/bootstrap/Natives.java4
-rw-r--r--core/src/main/java/org/elasticsearch/bootstrap/Seccomp.java4
-rw-r--r--core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java24
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/ClusterModule.java4
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/ClusterStateObserver.java8
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java6
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/NodeConnectionsService.java5
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java39
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java4
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java5
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java3
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java5
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java6
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/routing/RoutingService.java7
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java6
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/service/ClusterService.java75
-rw-r--r--core/src/main/java/org/elasticsearch/common/SuppressLoggerChecks.java3
-rw-r--r--core/src/main/java/org/elasticsearch/common/breaker/ChildMemoryCircuitBreaker.java8
-rw-r--r--core/src/main/java/org/elasticsearch/common/breaker/MemoryCircuitBreaker.java8
-rw-r--r--core/src/main/java/org/elasticsearch/common/component/AbstractComponent.java11
-rw-r--r--core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java13
-rw-r--r--core/src/main/java/org/elasticsearch/common/inject/spi/Elements.java4
-rw-r--r--core/src/main/java/org/elasticsearch/common/io/FileSystemUtils.java4
-rw-r--r--core/src/main/java/org/elasticsearch/common/logging/ConsoleAppender.java260
-rw-r--r--core/src/main/java/org/elasticsearch/common/logging/DeprecationLogger.java27
-rw-r--r--core/src/main/java/org/elasticsearch/common/logging/ESLogger.java208
-rw-r--r--core/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java47
-rw-r--r--core/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java174
-rw-r--r--core/src/main/java/org/elasticsearch/common/logging/Loggers.java90
-rw-r--r--core/src/main/java/org/elasticsearch/common/logging/PrefixMessageFactory.java221
-rw-r--r--core/src/main/java/org/elasticsearch/common/logging/TerminalAppender.java44
-rw-r--r--core/src/main/java/org/elasticsearch/common/lucene/LoggerInfoStream.java10
-rw-r--r--core/src/main/java/org/elasticsearch/common/lucene/Lucene.java7
-rw-r--r--core/src/main/java/org/elasticsearch/common/network/IfConfig.java4
-rw-r--r--core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java10
-rw-r--r--core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java15
-rw-r--r--core/src/main/java/org/elasticsearch/common/settings/Setting.java16
-rw-r--r--core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java4
-rw-r--r--core/src/main/java/org/elasticsearch/common/util/IndexFolderUpgrader.java4
-rw-r--r--core/src/main/java/org/elasticsearch/common/util/concurrent/AbstractLifecycleRunnable.java6
-rw-r--r--core/src/main/java/org/elasticsearch/common/util/concurrent/AsyncIOProcessor.java6
-rw-r--r--core/src/main/java/org/elasticsearch/common/util/concurrent/LoggingRunnable.java13
-rw-r--r--core/src/main/java/org/elasticsearch/discovery/AckClusterStatePublishResponseHandler.java7
-rw-r--r--core/src/main/java/org/elasticsearch/discovery/local/LocalDiscovery.java11
-rw-r--r--core/src/main/java/org/elasticsearch/discovery/zen/NodeJoinController.java13
-rw-r--r--core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java43
-rw-r--r--core/src/main/java/org/elasticsearch/discovery/zen/fd/MasterFaultDetection.java12
-rw-r--r--core/src/main/java/org/elasticsearch/discovery/zen/fd/NodesFaultDetection.java18
-rw-r--r--core/src/main/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPing.java20
-rw-r--r--core/src/main/java/org/elasticsearch/discovery/zen/publish/PendingClusterStatesQueue.java6
-rw-r--r--core/src/main/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateAction.java13
-rw-r--r--core/src/main/java/org/elasticsearch/env/NodeEnvironment.java13
-rw-r--r--core/src/main/java/org/elasticsearch/gateway/AsyncShardFetch.java9
-rw-r--r--core/src/main/java/org/elasticsearch/gateway/Gateway.java5
-rw-r--r--core/src/main/java/org/elasticsearch/gateway/GatewayAllocator.java4
-rw-r--r--core/src/main/java/org/elasticsearch/gateway/GatewayService.java3
-rw-r--r--core/src/main/java/org/elasticsearch/gateway/LocalAllocateDangledIndices.java6
-rw-r--r--core/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java10
-rw-r--r--core/src/main/java/org/elasticsearch/gateway/MetaStateService.java3
-rw-r--r--core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java5
-rw-r--r--core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java10
-rw-r--r--core/src/main/java/org/elasticsearch/index/AbstractIndexComponent.java7
-rw-r--r--core/src/main/java/org/elasticsearch/index/CompositeIndexEventListener.java25
-rw-r--r--core/src/main/java/org/elasticsearch/index/IndexService.java20
-rw-r--r--core/src/main/java/org/elasticsearch/index/IndexSettings.java4
-rw-r--r--core/src/main/java/org/elasticsearch/index/IndexWarmer.java6
-rw-r--r--core/src/main/java/org/elasticsearch/index/IndexingSlowLog.java8
-rw-r--r--core/src/main/java/org/elasticsearch/index/MergePolicyConfig.java7
-rw-r--r--core/src/main/java/org/elasticsearch/index/SearchSlowLog.java12
-rw-r--r--core/src/main/java/org/elasticsearch/index/analysis/Analysis.java12
-rw-r--r--core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java3
-rw-r--r--core/src/main/java/org/elasticsearch/index/codec/CodecService.java4
-rw-r--r--core/src/main/java/org/elasticsearch/index/codec/PerFieldMappingPostingFormatCodec.java6
-rw-r--r--core/src/main/java/org/elasticsearch/index/engine/ElasticsearchConcurrentMergeScheduler.java6
-rw-r--r--core/src/main/java/org/elasticsearch/index/engine/Engine.java28
-rw-r--r--core/src/main/java/org/elasticsearch/index/engine/EngineSearcher.java6
-rw-r--r--core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java6
-rw-r--r--core/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalsBuilder.java4
-rw-r--r--core/src/main/java/org/elasticsearch/index/fielddata/plain/DocValuesIndexFieldData.java6
-rw-r--r--core/src/main/java/org/elasticsearch/index/mapper/LegacyIpIndexFieldData.java16
-rwxr-xr-xcore/src/main/java/org/elasticsearch/index/mapper/MapperService.java3
-rw-r--r--core/src/main/java/org/elasticsearch/index/mapper/StringFieldMapper.java4
-rw-r--r--core/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java2
-rw-r--r--core/src/main/java/org/elasticsearch/index/shard/AbstractIndexShardComponent.java7
-rw-r--r--core/src/main/java/org/elasticsearch/index/shard/ElasticsearchMergePolicy.java4
-rw-r--r--core/src/main/java/org/elasticsearch/index/shard/IndexShard.java5
-rw-r--r--core/src/main/java/org/elasticsearch/index/shard/IndexShardOperationsLock.java6
-rw-r--r--core/src/main/java/org/elasticsearch/index/shard/IndexingOperationListener.java19
-rw-r--r--core/src/main/java/org/elasticsearch/index/shard/RefreshListeners.java6
-rw-r--r--core/src/main/java/org/elasticsearch/index/shard/SearchOperationListener.java27
-rw-r--r--core/src/main/java/org/elasticsearch/index/shard/ShardPath.java6
-rw-r--r--core/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java6
-rw-r--r--core/src/main/java/org/elasticsearch/index/shard/TranslogRecoveryPerformer.java6
-rw-r--r--core/src/main/java/org/elasticsearch/index/store/IndexStore.java4
-rw-r--r--core/src/main/java/org/elasticsearch/index/store/IndexStoreConfig.java4
-rw-r--r--core/src/main/java/org/elasticsearch/index/store/Store.java32
-rw-r--r--core/src/main/java/org/elasticsearch/index/translog/Translog.java3
-rw-r--r--core/src/main/java/org/elasticsearch/index/translog/TranslogToolCli.java4
-rw-r--r--core/src/main/java/org/elasticsearch/index/warmer/ShardIndexWarmerService.java6
-rw-r--r--core/src/main/java/org/elasticsearch/indices/IndexingMemoryController.java3
-rw-r--r--core/src/main/java/org/elasticsearch/indices/IndicesService.java29
-rw-r--r--core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java5
-rw-r--r--core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java26
-rw-r--r--core/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java6
-rw-r--r--core/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java9
-rw-r--r--core/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java30
-rw-r--r--core/src/main/java/org/elasticsearch/indices/recovery/RecoveriesCollection.java9
-rw-r--r--core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java24
-rw-r--r--core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java7
-rw-r--r--core/src/main/java/org/elasticsearch/indices/recovery/SharedFSRecoverySourceHandler.java4
-rw-r--r--core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java11
-rw-r--r--core/src/main/java/org/elasticsearch/monitor/fs/FsProbe.java3
-rw-r--r--core/src/main/java/org/elasticsearch/monitor/fs/FsService.java4
-rw-r--r--core/src/main/java/org/elasticsearch/monitor/jvm/JvmGcMonitorService.java8
-rw-r--r--core/src/main/java/org/elasticsearch/node/Node.java36
-rw-r--r--core/src/main/java/org/elasticsearch/plugins/PluginCli.java3
-rw-r--r--core/src/main/java/org/elasticsearch/plugins/PluginsService.java49
-rw-r--r--core/src/main/java/org/elasticsearch/repositories/RepositoriesService.java13
-rw-r--r--core/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java17
-rw-r--r--core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java40
-rw-r--r--core/src/main/java/org/elasticsearch/rest/BytesRestResponse.java9
-rw-r--r--core/src/main/java/org/elasticsearch/rest/RestController.java10
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/RestActionListener.java4
-rw-r--r--core/src/main/java/org/elasticsearch/script/ScriptService.java6
-rw-r--r--core/src/main/java/org/elasticsearch/search/suggest/completion2x/Completion090PostingsFormat.java4
-rw-r--r--core/src/main/java/org/elasticsearch/snapshots/RestoreService.java10
-rw-r--r--core/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java7
-rw-r--r--core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java19
-rw-r--r--core/src/main/java/org/elasticsearch/tasks/LoggingTaskListener.java7
-rw-r--r--core/src/main/java/org/elasticsearch/tasks/TaskManager.java10
-rw-r--r--core/src/main/java/org/elasticsearch/tasks/TaskResultsService.java3
-rw-r--r--core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java7
-rw-r--r--core/src/main/java/org/elasticsearch/transport/TcpTransport.java36
-rw-r--r--core/src/main/java/org/elasticsearch/transport/TransportChannelResponseHandler.java10
-rw-r--r--core/src/main/java/org/elasticsearch/transport/TransportService.java42
-rw-r--r--core/src/main/java/org/elasticsearch/transport/local/LocalTransport.java13
-rw-r--r--core/src/main/java/org/elasticsearch/tribe/TribeService.java7
-rw-r--r--core/src/main/java/org/elasticsearch/watcher/FileWatcher.java4
-rw-r--r--core/src/test/java/org/apache/log4j/Java9HackTests.java28
-rw-r--r--core/src/test/java/org/elasticsearch/action/support/replication/ReplicationOperationTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/bootstrap/BootstrapCheckTests.java8
-rw-r--r--core/src/test/java/org/elasticsearch/bootstrap/MaxMapCountCheckTests.java9
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/allocation/AwarenessAllocationIT.java4
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java4
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/allocation/FilteringAllocationIT.java4
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/allocation/AddIncrementallyTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/allocation/AwarenessAllocationTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/allocation/BalanceConfigurationTests.java6
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/allocation/ClusterRebalanceRoutingTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/allocation/ConcurrentRebalanceRoutingTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/allocation/DeadNodesAllocationTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/allocation/ElectReplicaAsPrimaryDuringRelocationTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/allocation/ExpectedShardSizeAllocationTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedNodeRoutingTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/allocation/FilterRoutingTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/allocation/IndexBalanceTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/allocation/PreferPrimaryAllocationTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/allocation/PrimaryElectionRoutingTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/allocation/PrimaryNotRelocatedWhileBeingRecoveredTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/allocation/RebalanceAfterActiveTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/allocation/ReplicaAllocatedAfterPrimaryTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/allocation/RoutingNodesIntegrityTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/allocation/SameShardRoutingTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardVersioningTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsLimitAllocationTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardNoReplicasRoutingTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardOneReplicaRoutingTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/allocation/TenShardsOneReplicaRoutingTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/allocation/ThrottlingAllocationTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/allocation/UpdateNumberOfReplicasTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceIT.java9
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceTests.java23
-rw-r--r--core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java31
-rw-r--r--core/src/test/java/org/elasticsearch/common/logging/ESLoggerTests.java169
-rw-r--r--core/src/test/java/org/elasticsearch/common/logging/LoggingConfigurationTests.java196
-rw-r--r--core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java41
-rw-r--r--core/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java9
-rw-r--r--core/src/test/java/org/elasticsearch/common/util/concurrent/AbstractLifecycleRunnableTests.java5
-rw-r--r--core/src/test/java/org/elasticsearch/discovery/BlockingClusterStatePublishResponseHandlerTests.java6
-rw-r--r--core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java5
-rw-r--r--core/src/test/java/org/elasticsearch/discovery/zen/NodeJoinControllerTests.java3
-rw-r--r--core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java6
-rw-r--r--core/src/test/java/org/elasticsearch/gateway/GatewayIndexStateIT.java5
-rw-r--r--core/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/gateway/ReusePeerRecoverySharedTest.java4
-rw-r--r--core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java84
-rw-r--r--core/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java8
-rw-r--r--core/src/test/java/org/elasticsearch/index/shard/CommitPointsTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java4
-rw-r--r--core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java5
-rw-r--r--core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java58
-rw-r--r--core/src/test/java/org/elasticsearch/indices/state/SimpleIndexStateIT.java4
-rw-r--r--core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java6
-rw-r--r--core/src/test/java/org/elasticsearch/monitor/jvm/JvmGcMonitorServiceTests.java6
-rw-r--r--core/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java6
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java3
-rw-r--r--core/src/test/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java1
-rw-r--r--core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java14
-rw-r--r--core/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java3
-rw-r--r--core/src/test/java/org/elasticsearch/test/MockLogAppender.java34
-rw-r--r--core/src/test/resources/org/elasticsearch/common/logging/config/logging.yml13
-rw-r--r--core/src/test/resources/org/elasticsearch/common/logging/config/test2/logging.yml10
-rw-r--r--core/src/test/resources/org/elasticsearch/common/logging/config/test2/test3/logging.yml10
-rw-r--r--distribution/build.gradle4
-rw-r--r--distribution/licenses/apache-log4j-extras-1.2.17.jar.sha11
-rw-r--r--distribution/licenses/apache-log4j-extras-NOTICE5
-rw-r--r--distribution/licenses/log4j-1.2-api-2.6.2.jar.sha11
-rw-r--r--distribution/licenses/log4j-1.2.17.jar.sha11
-rw-r--r--distribution/licenses/log4j-LICENSE.txt (renamed from distribution/licenses/apache-log4j-extras-LICENSE)0
-rw-r--r--distribution/licenses/log4j-NOTICE.txt (renamed from distribution/licenses/log4j-NOTICE)0
-rw-r--r--distribution/licenses/log4j-api-2.6.2.jar.sha11
-rw-r--r--distribution/licenses/log4j-api-LICENSE.txt (renamed from distribution/licenses/log4j-LICENSE)0
-rw-r--r--distribution/licenses/log4j-api-NOTICE.txt5
-rw-r--r--distribution/licenses/log4j-core-2.6.2.jar.sha11
-rw-r--r--distribution/licenses/log4j-core-LICENSE.txt202
-rw-r--r--distribution/licenses/log4j-core-NOTICE.txt5
-rw-r--r--distribution/src/main/resources/config/jvm.options4
-rw-r--r--distribution/src/main/resources/config/log4j2.properties73
-rw-r--r--distribution/src/main/resources/config/logging.yml86
-rw-r--r--modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java13
-rw-r--r--modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngineService.java3
-rw-r--r--modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java1
-rw-r--r--modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java6
-rw-r--r--modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexByScrollAction.java4
-rw-r--r--modules/reindex/src/main/java/org/elasticsearch/index/reindex/BulkByScrollTask.java4
-rw-r--r--modules/reindex/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java14
-rw-r--r--modules/reindex/src/main/java/org/elasticsearch/index/reindex/ScrollableHitSource.java6
-rw-r--r--modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java4
-rw-r--r--modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java4
-rw-r--r--modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java4
-rw-r--r--modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSource.java9
-rw-r--r--modules/transport-netty3/build.gradle2
-rw-r--r--modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/Netty3HttpServerTransport.java13
-rw-r--r--modules/transport-netty3/src/main/java/org/elasticsearch/transport/netty3/Netty3InternalESLogger.java6
-rw-r--r--modules/transport-netty3/src/main/java/org/elasticsearch/transport/netty3/Netty3OpenChannelsHandler.java6
-rw-r--r--modules/transport-netty3/src/main/java/org/elasticsearch/transport/netty3/Netty3Transport.java5
-rw-r--r--modules/transport-netty4/build.gradle2
-rw-r--r--modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java9
-rw-r--r--modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4InternalESLogger.java4
-rw-r--r--modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4OpenChannelsHandler.java8
-rw-r--r--modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java5
-rw-r--r--plugins/discovery-azure-classic/src/main/java/org/elasticsearch/cloud/azure/classic/AzureDiscoveryModule.java6
-rw-r--r--plugins/discovery-azure-classic/src/main/java/org/elasticsearch/plugin/discovery/azure/classic/AzureDiscoveryPlugin.java4
-rw-r--r--plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java10
-rw-r--r--plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsSigner.java4
-rw-r--r--plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java3
-rw-r--r--plugins/discovery-ec2/src/main/java/org/elasticsearch/plugin/discovery/ec2/Ec2DiscoveryPlugin.java35
-rw-r--r--plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AmazonEC2Mock.java4
-rw-r--r--plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceInstancesServiceImpl.java3
-rw-r--r--plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceModule.java4
-rw-r--r--plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java3
-rw-r--r--plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/RetryHttpInitializerWrapper.java5
-rw-r--r--plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java4
-rw-r--r--plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoverTests.java5
-rw-r--r--plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceMockUtils.java4
-rw-r--r--plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java5
-rw-r--r--plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/MapperAttachmentsPlugin.java15
-rw-r--r--plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/blobstore/AzureBlobContainer.java4
-rw-r--r--plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceImpl.java3
-rw-r--r--plugins/repository-azure/src/main/java/org/elasticsearch/plugin/repository/azure/AzureRepositoryPlugin.java12
-rw-r--r--plugins/repository-hdfs/build.gradle5
-rw-r--r--plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/AwsSigner.java4
-rw-r--r--plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/InternalAwsS3Service.java8
-rw-r--r--plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/blobstore/DefaultS3OutputStream.java4
-rw-r--r--plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/TestAmazonS3.java4
-rw-r--r--plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AbstractS3SnapshotRestoreTest.java3
-rw-r--r--qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerConfigurationTests.java126
-rw-r--r--qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java98
-rw-r--r--qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/config/log4j2.properties32
-rw-r--r--qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/config/second/log4j2.properties8
-rw-r--r--qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/config/third/log4j2.properties8
-rw-r--r--qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java4
-rw-r--r--test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java9
-rw-r--r--test/framework/src/main/java/org/elasticsearch/common/logging/TestLoggers.java57
-rw-r--r--test/framework/src/main/java/org/elasticsearch/index/store/EsBaseDirectoryTestCase.java4
-rw-r--r--test/framework/src/main/java/org/elasticsearch/test/BackgroundIndexer.java7
-rw-r--r--test/framework/src/main/java/org/elasticsearch/test/CorruptionUtils.java4
-rw-r--r--test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java14
-rw-r--r--test/framework/src/main/java/org/elasticsearch/test/ExternalNode.java4
-rw-r--r--test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java4
-rw-r--r--test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java4
-rw-r--r--test/framework/src/main/java/org/elasticsearch/test/OldIndexUtils.java8
-rw-r--r--test/framework/src/main/java/org/elasticsearch/test/TestCluster.java4
-rw-r--r--test/framework/src/main/java/org/elasticsearch/test/disruption/NetworkDisruption.java4
-rw-r--r--test/framework/src/main/java/org/elasticsearch/test/disruption/NetworkPartition.java204
-rw-r--r--test/framework/src/main/java/org/elasticsearch/test/disruption/SingleNodeDisruption.java4
-rw-r--r--test/framework/src/main/java/org/elasticsearch/test/engine/AssertingSearcher.java6
-rw-r--r--test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java4
-rw-r--r--test/framework/src/main/java/org/elasticsearch/test/junit/listeners/LoggingListener.java14
-rw-r--r--test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java4
-rw-r--r--test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestClient.java5
-rw-r--r--test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java4
-rw-r--r--test/framework/src/main/java/org/elasticsearch/test/rest/yaml/Stash.java4
-rw-r--r--test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java4
-rw-r--r--test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/GreaterThanAssertion.java4
-rw-r--r--test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/GreaterThanEqualToAssertion.java4
-rw-r--r--test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/IsFalseAssertion.java4
-rw-r--r--test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/IsTrueAssertion.java4
-rw-r--r--test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/LengthAssertion.java4
-rw-r--r--test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/LessThanAssertion.java4
-rw-r--r--test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/LessThanOrEqualToAssertion.java4
-rw-r--r--test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/MatchAssertion.java4
-rw-r--r--test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java5
-rw-r--r--test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java5
-rw-r--r--test/framework/src/main/java/org/elasticsearch/test/tasks/MockTaskManager.java18
-rw-r--r--test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java7
-rw-r--r--test/framework/src/test/java/org/elasticsearch/test/test/LoggingListenerTests.java99
-rw-r--r--test/logger-usage/src/test/java/org/elasticsearch/test/loggerusage/ESLoggerUsageTests.java24
368 files changed, 2906 insertions, 2651 deletions
diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy
index b7b563bf15..0a454ee100 100644
--- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy
+++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy
@@ -97,8 +97,8 @@ public class PluginBuildPlugin extends BuildPlugin {
// with a full elasticsearch server that includes optional deps
provided "org.locationtech.spatial4j:spatial4j:${project.versions.spatial4j}"
provided "com.vividsolutions:jts:${project.versions.jts}"
- provided "log4j:log4j:${project.versions.log4j}"
- provided "log4j:apache-log4j-extras:${project.versions.log4j}"
+ provided "org.apache.logging.log4j:log4j-api:${project.versions.log4j}"
+ provided "org.apache.logging.log4j:log4j-core:${project.versions.log4j}"
provided "net.java.dev.jna:jna:${project.versions.jna}"
}
}
diff --git a/buildSrc/version.properties b/buildSrc/version.properties
index 7ac4d1f7ed..e96f982459 100644
--- a/buildSrc/version.properties
+++ b/buildSrc/version.properties
@@ -6,7 +6,7 @@ spatial4j = 0.6
jts = 1.13
jackson = 2.8.1
snakeyaml = 1.15
-log4j = 1.2.17
+log4j = 2.6.2
slf4j = 1.6.2
jna = 4.2.2
diff --git a/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/ops/bulk/BulkBenchmarkTask.java b/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/ops/bulk/BulkBenchmarkTask.java
index daf7213ed5..214a75d12c 100644
--- a/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/ops/bulk/BulkBenchmarkTask.java
+++ b/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/ops/bulk/BulkBenchmarkTask.java
@@ -18,13 +18,13 @@
*/
package org.elasticsearch.client.benchmark.ops.bulk;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.client.benchmark.BenchmarkTask;
import org.elasticsearch.client.benchmark.metrics.Sample;
import org.elasticsearch.client.benchmark.metrics.SampleRecorder;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.io.PathUtils;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.ESLoggerFactory;
import java.io.BufferedReader;
@@ -135,7 +135,7 @@ public class BulkBenchmarkTask implements BenchmarkTask {
private static final class BulkIndexer implements Runnable {
- private static final ESLogger logger = ESLoggerFactory.getLogger(BulkIndexer.class.getName());
+ private static final Logger logger = ESLoggerFactory.getLogger(BulkIndexer.class.getName());
private final BlockingQueue<List<String>> bulkData;
private final int warmupIterations;
diff --git a/core/build.gradle b/core/build.gradle
index 0e87c21757..4eab7ed5d5 100644
--- a/core/build.gradle
+++ b/core/build.gradle
@@ -85,8 +85,10 @@ dependencies {
compile "com.vividsolutions:jts:${versions.jts}", optional
// logging
- compile "log4j:log4j:${versions.log4j}", optional
- compile "log4j:apache-log4j-extras:${versions.log4j}", optional
+ compile "org.apache.logging.log4j:log4j-api:${versions.log4j}", optional
+ compile "org.apache.logging.log4j:log4j-core:${versions.log4j}", optional
+ // to bridge dependencies that are still on Log4j 1 to Log4j 2
+ compile "org.apache.logging.log4j:log4j-1.2-api:${versions.log4j}", optional
compile "net.java.dev.jna:jna:${versions.jna}"
@@ -154,32 +156,94 @@ thirdPartyAudit.excludes = [
// classes are missing!
// from com.fasterxml.jackson.dataformat.yaml.YAMLMapper (jackson-dataformat-yaml)
- 'com.fasterxml.jackson.databind.ObjectMapper',
-
- // from org.apache.log4j.receivers.net.JMSReceiver (log4j-extras)
- 'javax.jms.Message',
- 'javax.jms.MessageListener',
- 'javax.jms.ObjectMessage',
- 'javax.jms.TopicConnection',
- 'javax.jms.TopicConnectionFactory',
- 'javax.jms.TopicPublisher',
- 'javax.jms.TopicSession',
- 'javax.jms.TopicSubscriber',
-
- // from org.apache.log4j.net.SMTPAppender (log4j)
- 'javax.mail.Authenticator',
- 'javax.mail.Message$RecipientType',
- 'javax.mail.Message',
- 'javax.mail.Multipart',
- 'javax.mail.PasswordAuthentication',
- 'javax.mail.Session',
- 'javax.mail.Transport',
- 'javax.mail.internet.InternetAddress',
- 'javax.mail.internet.InternetHeaders',
- 'javax.mail.internet.MimeBodyPart',
- 'javax.mail.internet.MimeMessage',
- 'javax.mail.internet.MimeMultipart',
- 'javax.mail.internet.MimeUtility',
+ 'com.fasterxml.jackson.databind.ObjectMapper',
+
+ // from log4j
+ 'com.fasterxml.jackson.annotation.JsonInclude$Include',
+ 'com.fasterxml.jackson.databind.DeserializationContext',
+ 'com.fasterxml.jackson.databind.JsonMappingException',
+ 'com.fasterxml.jackson.databind.JsonNode',
+ 'com.fasterxml.jackson.databind.Module$SetupContext',
+ 'com.fasterxml.jackson.databind.ObjectReader',
+ 'com.fasterxml.jackson.databind.ObjectWriter',
+ 'com.fasterxml.jackson.databind.SerializerProvider',
+ 'com.fasterxml.jackson.databind.deser.std.StdDeserializer',
+ 'com.fasterxml.jackson.databind.deser.std.StdScalarDeserializer',
+ 'com.fasterxml.jackson.databind.module.SimpleModule',
+ 'com.fasterxml.jackson.databind.ser.impl.SimpleBeanPropertyFilter',
+ 'com.fasterxml.jackson.databind.ser.impl.SimpleFilterProvider',
+ 'com.fasterxml.jackson.databind.ser.std.StdScalarSerializer',
+ 'com.fasterxml.jackson.databind.ser.std.StdSerializer',
+ 'com.fasterxml.jackson.dataformat.xml.JacksonXmlModule',
+ 'com.fasterxml.jackson.dataformat.xml.XmlMapper',
+ 'com.fasterxml.jackson.dataformat.xml.util.DefaultXmlPrettyPrinter',
+ 'com.lmax.disruptor.BlockingWaitStrategy',
+ 'com.lmax.disruptor.BusySpinWaitStrategy',
+ 'com.lmax.disruptor.EventFactory',
+ 'com.lmax.disruptor.EventTranslator',
+ 'com.lmax.disruptor.EventTranslatorTwoArg',
+ 'com.lmax.disruptor.EventTranslatorVararg',
+ 'com.lmax.disruptor.ExceptionHandler',
+ 'com.lmax.disruptor.LifecycleAware',
+ 'com.lmax.disruptor.RingBuffer',
+ 'com.lmax.disruptor.Sequence',
+ 'com.lmax.disruptor.SequenceReportingEventHandler',
+ 'com.lmax.disruptor.SleepingWaitStrategy',
+ 'com.lmax.disruptor.TimeoutBlockingWaitStrategy',
+ 'com.lmax.disruptor.WaitStrategy',
+ 'com.lmax.disruptor.YieldingWaitStrategy',
+ 'com.lmax.disruptor.dsl.Disruptor',
+ 'com.lmax.disruptor.dsl.ProducerType',
+ 'javax.jms.Connection',
+ 'javax.jms.ConnectionFactory',
+ 'javax.jms.Destination',
+ 'javax.jms.Message',
+ 'javax.jms.MessageConsumer',
+ 'javax.jms.MessageListener',
+ 'javax.jms.MessageProducer',
+ 'javax.jms.ObjectMessage',
+ 'javax.jms.Session',
+ 'javax.mail.Authenticator',
+ 'javax.mail.Message$RecipientType',
+ 'javax.mail.PasswordAuthentication',
+ 'javax.mail.Session',
+ 'javax.mail.Transport',
+ 'javax.mail.internet.InternetAddress',
+ 'javax.mail.internet.InternetHeaders',
+ 'javax.mail.internet.MimeBodyPart',
+ 'javax.mail.internet.MimeMessage',
+ 'javax.mail.internet.MimeMultipart',
+ 'javax.mail.internet.MimeUtility',
+ 'javax.mail.util.ByteArrayDataSource',
+ 'javax.persistence.AttributeConverter',
+ 'javax.persistence.EntityManager',
+ 'javax.persistence.EntityManagerFactory',
+ 'javax.persistence.EntityTransaction',
+ 'javax.persistence.Persistence',
+ 'javax.persistence.PersistenceException',
+ 'org.apache.commons.compress.compressors.CompressorStreamFactory',
+ 'org.apache.commons.compress.utils.IOUtils',
+ 'org.apache.commons.csv.CSVFormat',
+ 'org.apache.commons.csv.QuoteMode',
+ 'org.apache.kafka.clients.producer.KafkaProducer',
+ 'org.apache.kafka.clients.producer.Producer',
+ 'org.apache.kafka.clients.producer.ProducerRecord',
+ 'org.codehaus.stax2.XMLStreamWriter2',
+ 'org.osgi.framework.AdaptPermission',
+ 'org.osgi.framework.AdminPermission',
+ 'org.osgi.framework.Bundle',
+ 'org.osgi.framework.BundleActivator',
+ 'org.osgi.framework.BundleContext',
+ 'org.osgi.framework.BundleEvent',
+ 'org.osgi.framework.BundleReference',
+ 'org.osgi.framework.FrameworkUtil',
+ 'org.osgi.framework.SynchronousBundleListener',
+ 'org.osgi.framework.wiring.BundleWire',
+ 'org.osgi.framework.wiring.BundleWiring',
+ 'org.zeromq.ZMQ$Context',
+ 'org.zeromq.ZMQ$Socket',
+ 'org.zeromq.ZMQ',
+
// from org.locationtech.spatial4j.io.GeoJSONReader (spatial4j)
'org.noggit.JSONParser',
]
diff --git a/core/src/main/java/org/apache/log4j/Java9Hack.java b/core/src/main/java/org/apache/log4j/Java9Hack.java
deleted file mode 100644
index 831cf5b35a..0000000000
--- a/core/src/main/java/org/apache/log4j/Java9Hack.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.log4j;
-
-import org.apache.log4j.helpers.ThreadLocalMap;
-
-/**
- * Log4j 1.2 MDC breaks because it parses java.version incorrectly (does not handle new java9 versioning).
- *
- * This hack fixes up the pkg private members as if it had detected the java version correctly.
- */
-public class Java9Hack {
-
- public static void fixLog4j() {
- if (MDC.mdc.tlm == null) {
- MDC.mdc.java1 = false;
- MDC.mdc.tlm = new ThreadLocalMap();
- }
- }
-}
diff --git a/core/src/main/java/org/apache/log4j/package-info.java b/core/src/main/java/org/apache/log4j/package-info.java
deleted file mode 100644
index f628016aa6..0000000000
--- a/core/src/main/java/org/apache/log4j/package-info.java
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-/**
- * Hack to fix Log4j 1.2 in Java 9.
- */
-package org.apache.log4j;
diff --git a/core/src/main/java/org/elasticsearch/ElasticsearchException.java b/core/src/main/java/org/elasticsearch/ElasticsearchException.java
index 7830d521ff..750f133ea1 100644
--- a/core/src/main/java/org/elasticsearch/ElasticsearchException.java
+++ b/core/src/main/java/org/elasticsearch/ElasticsearchException.java
@@ -19,6 +19,7 @@
package org.elasticsearch;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.action.support.replication.ReplicationOperation;
import org.elasticsearch.cluster.action.shard.ShardStateAction;
import org.elasticsearch.common.io.stream.StreamInput;
diff --git a/core/src/main/java/org/elasticsearch/ExceptionsHelper.java b/core/src/main/java/org/elasticsearch/ExceptionsHelper.java
index 772daab2c7..c30662a093 100644
--- a/core/src/main/java/org/elasticsearch/ExceptionsHelper.java
+++ b/core/src/main/java/org/elasticsearch/ExceptionsHelper.java
@@ -19,12 +19,12 @@
package org.elasticsearch;
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.IndexFormatTooNewException;
import org.apache.lucene.index.IndexFormatTooOldException;
import org.elasticsearch.action.ShardOperationFailedException;
import org.elasticsearch.common.Nullable;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.index.Index;
import org.elasticsearch.rest.RestStatus;
@@ -39,7 +39,7 @@ import java.util.Set;
public final class ExceptionsHelper {
- private static final ESLogger logger = Loggers.getLogger(ExceptionsHelper.class);
+ private static final Logger logger = Loggers.getLogger(ExceptionsHelper.class);
public static RuntimeException convertToRuntime(Exception e) {
if (e instanceof RuntimeException) {
diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java
index 7d0a628892..20c88e8f43 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java
@@ -19,6 +19,7 @@
package org.elasticsearch.action.admin.cluster.health;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.IndicesOptions;
@@ -105,7 +106,7 @@ public class TransportClusterHealthAction extends TransportMasterNodeReadAction<
@Override
public void onFailure(String source, Exception e) {
- logger.error("unexpected failure during [{}]", e, source);
+ logger.error(new ParameterizedMessage("unexpected failure during [{}]", source), e);
listener.onFailure(e);
}
diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/reroute/TransportClusterRerouteAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/reroute/TransportClusterRerouteAction.java
index 875562ad64..10c0b25b70 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/cluster/reroute/TransportClusterRerouteAction.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/reroute/TransportClusterRerouteAction.java
@@ -19,6 +19,8 @@
package org.elasticsearch.action.admin.cluster.reroute;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.master.TransportMasterNodeAction;
@@ -33,7 +35,6 @@ import org.elasticsearch.cluster.routing.allocation.RoutingExplanations;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.inject.Inject;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
@@ -77,13 +78,13 @@ public class TransportClusterRerouteAction extends TransportMasterNodeAction<Clu
private final ClusterRerouteRequest request;
private final ActionListener<ClusterRerouteResponse> listener;
- private final ESLogger logger;
+ private final Logger logger;
private final AllocationService allocationService;
private volatile ClusterState clusterStateToSend;
private volatile RoutingExplanations explanations;
- ClusterRerouteResponseAckedClusterStateUpdateTask(ESLogger logger, AllocationService allocationService, ClusterRerouteRequest request,
- ActionListener<ClusterRerouteResponse> listener) {
+ ClusterRerouteResponseAckedClusterStateUpdateTask(Logger logger, AllocationService allocationService, ClusterRerouteRequest request,
+ ActionListener<ClusterRerouteResponse> listener) {
super(Priority.IMMEDIATE, request, listener);
this.request = request;
this.listener = listener;
@@ -103,7 +104,7 @@ public class TransportClusterRerouteAction extends TransportMasterNodeAction<Clu
@Override
public void onFailure(String source, Exception e) {
- logger.debug("failed to perform [{}]", e, source);
+ logger.debug(new ParameterizedMessage("failed to perform [{}]", source), e);
super.onFailure(source, e);
}
diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java
index 4464b5d793..9f200ef9cf 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java
@@ -19,6 +19,7 @@
package org.elasticsearch.action.admin.cluster.settings;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters;
@@ -148,7 +149,7 @@ public class TransportClusterUpdateSettingsAction extends TransportMasterNodeAct
@Override
public void onFailure(String source, Exception e) {
//if the reroute fails we only log
- logger.debug("failed to perform [{}]", e, source);
+ logger.debug(new ParameterizedMessage("failed to perform [{}]", source), e);
listener.onFailure(new ElasticsearchException("reroute after update settings failed", e));
}
@@ -166,7 +167,7 @@ public class TransportClusterUpdateSettingsAction extends TransportMasterNodeAct
@Override
public void onFailure(String source, Exception e) {
- logger.debug("failed to perform [{}]", e, source);
+ logger.debug(new ParameterizedMessage("failed to perform [{}]", source), e);
super.onFailure(source, e);
}
diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java
index 06810c4dcd..6b526dae2b 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/indices/close/TransportCloseIndexAction.java
@@ -19,6 +19,7 @@
package org.elasticsearch.action.admin.indices.close;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.DestructiveOperations;
@@ -108,7 +109,7 @@ public class TransportCloseIndexAction extends TransportMasterNodeAction<CloseIn
@Override
public void onFailure(Exception t) {
- logger.debug("failed to close indices [{}]", t, (Object)concreteIndices);
+ logger.debug(new ParameterizedMessage("failed to close indices [{}]", (Object) concreteIndices), t);
listener.onFailure(t);
}
});
diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/delete/TransportDeleteIndexAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/delete/TransportDeleteIndexAction.java
index 947936bddc..a10a901dd5 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/indices/delete/TransportDeleteIndexAction.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/indices/delete/TransportDeleteIndexAction.java
@@ -19,6 +19,7 @@
package org.elasticsearch.action.admin.indices.delete;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.DestructiveOperations;
@@ -100,7 +101,7 @@ public class TransportDeleteIndexAction extends TransportMasterNodeAction<Delete
@Override
public void onFailure(Exception t) {
- logger.debug("failed to delete indices [{}]", t, concreteIndices);
+ logger.debug(new ParameterizedMessage("failed to delete indices [{}]", concreteIndices), t);
listener.onFailure(t);
}
});
diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/TransportPutMappingAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/TransportPutMappingAction.java
index a03472262e..5bfd8c2cbb 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/TransportPutMappingAction.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/TransportPutMappingAction.java
@@ -19,6 +19,7 @@
package org.elasticsearch.action.admin.indices.mapping.put;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.master.TransportMasterNodeAction;
@@ -92,12 +93,12 @@ public class TransportPutMappingAction extends TransportMasterNodeAction<PutMapp
@Override
public void onFailure(Exception t) {
- logger.debug("failed to put mappings on indices [{}], type [{}]", t, concreteIndices, request.type());
+ logger.debug(new ParameterizedMessage("failed to put mappings on indices [{}], type [{}]", concreteIndices, request.type()), t);
listener.onFailure(t);
}
});
} catch (IndexNotFoundException ex) {
- logger.debug("failed to put mappings on indices [{}], type [{}]", ex, request.indices(), request.type());
+ logger.debug(new ParameterizedMessage("failed to put mappings on indices [{}], type [{}]", request.indices(), request.type()), ex);
throw ex;
}
}
diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/open/TransportOpenIndexAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/open/TransportOpenIndexAction.java
index d672d55b32..c89f1f856d 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/indices/open/TransportOpenIndexAction.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/indices/open/TransportOpenIndexAction.java
@@ -19,6 +19,7 @@
package org.elasticsearch.action.admin.indices.open;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.DestructiveOperations;
@@ -93,7 +94,7 @@ public class TransportOpenIndexAction extends TransportMasterNodeAction<OpenInde
@Override
public void onFailure(Exception t) {
- logger.debug("failed to open indices [{}]", t, (Object)concreteIndices);
+ logger.debug(new ParameterizedMessage("failed to open indices [{}]", (Object) concreteIndices), t);
listener.onFailure(t);
}
});
diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/settings/put/TransportUpdateSettingsAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/settings/put/TransportUpdateSettingsAction.java
index 5655400465..0f5fb1e188 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/indices/settings/put/TransportUpdateSettingsAction.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/indices/settings/put/TransportUpdateSettingsAction.java
@@ -19,6 +19,7 @@
package org.elasticsearch.action.admin.indices.settings.put;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.master.TransportMasterNodeAction;
@@ -92,7 +93,7 @@ public class TransportUpdateSettingsAction extends TransportMasterNodeAction<Upd
@Override
public void onFailure(Exception t) {
- logger.debug("failed to update settings on indices [{}]", t, (Object)concreteIndices);
+ logger.debug(new ParameterizedMessage("failed to update settings on indices [{}]", (Object) concreteIndices), t);
listener.onFailure(t);
}
});
diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresAction.java
index 2de65e090e..e13578d66d 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresAction.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresAction.java
@@ -18,6 +18,7 @@
*/
package org.elasticsearch.action.admin.indices.shards;
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.util.CollectionUtil;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.FailedNodeException;
@@ -41,7 +42,6 @@ import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.collect.ImmutableOpenIntMap;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.inject.Inject;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.CountDown;
import org.elasticsearch.gateway.AsyncShardFetch;
@@ -150,7 +150,7 @@ public class TransportIndicesShardStoresAction extends TransportMasterNodeReadAc
private class InternalAsyncFetch extends AsyncShardFetch<NodeGatewayStartedShards> {
- InternalAsyncFetch(ESLogger logger, String type, ShardId shardId, TransportNodesListGatewayStartedShards action) {
+ InternalAsyncFetch(Logger logger, String type, ShardId shardId, TransportNodesListGatewayStartedShards action) {
super(logger, type, shardId, action);
}
diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteIndexTemplateAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteIndexTemplateAction.java
index 33addcb844..9ef0f57389 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteIndexTemplateAction.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteIndexTemplateAction.java
@@ -18,6 +18,7 @@
*/
package org.elasticsearch.action.admin.indices.template.delete;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.master.TransportMasterNodeAction;
@@ -73,7 +74,7 @@ public class TransportDeleteIndexTemplateAction extends TransportMasterNodeActio
@Override
public void onFailure(Exception e) {
- logger.debug("failed to delete templates [{}]", e, request.name());
+ logger.debug(new ParameterizedMessage("failed to delete templates [{}]", request.name()), e);
listener.onFailure(e);
}
});
diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutIndexTemplateAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutIndexTemplateAction.java
index 0d14c4d24d..a772ecb406 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutIndexTemplateAction.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutIndexTemplateAction.java
@@ -18,6 +18,7 @@
*/
package org.elasticsearch.action.admin.indices.template.put;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.master.TransportMasterNodeAction;
@@ -94,7 +95,7 @@ public class TransportPutIndexTemplateAction extends TransportMasterNodeAction<P
@Override
public void onFailure(Exception e) {
- logger.debug("failed to put template [{}]", e, request.name());
+ logger.debug(new ParameterizedMessage("failed to put template [{}]", request.name()), e);
listener.onFailure(e);
}
});
diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeSettingsAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeSettingsAction.java
index 3b77892086..8732e5cb28 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeSettingsAction.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeSettingsAction.java
@@ -19,6 +19,7 @@
package org.elasticsearch.action.admin.indices.upgrade.post;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.master.TransportMasterNodeAction;
@@ -79,7 +80,7 @@ public class TransportUpgradeSettingsAction extends TransportMasterNodeAction<Up
@Override
public void onFailure(Exception t) {
- logger.debug("failed to upgrade minimum compatibility version settings on indices [{}]", t, request.versions().keySet());
+ logger.debug(new ParameterizedMessage("failed to upgrade minimum compatibility version settings on indices [{}]", request.versions().keySet()), t);
listener.onFailure(t);
}
});
diff --git a/core/src/main/java/org/elasticsearch/action/bulk/BulkRequestHandler.java b/core/src/main/java/org/elasticsearch/action/bulk/BulkRequestHandler.java
index a829e4b029..2dba45e876 100644
--- a/core/src/main/java/org/elasticsearch/action/bulk/BulkRequestHandler.java
+++ b/core/src/main/java/org/elasticsearch/action/bulk/BulkRequestHandler.java
@@ -18,9 +18,10 @@
*/
package org.elasticsearch.action.bulk;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.client.Client;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
@@ -31,7 +32,7 @@ import java.util.concurrent.TimeUnit;
* Abstracts the low-level details of bulk request handling
*/
abstract class BulkRequestHandler {
- protected final ESLogger logger;
+ protected final Logger logger;
protected final Client client;
protected BulkRequestHandler(Client client) {
@@ -76,12 +77,12 @@ abstract class BulkRequestHandler {
listener.afterBulk(executionId, bulkRequest, bulkResponse);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
- logger.info("Bulk request {} has been cancelled.", e, executionId);
+ logger.info(new ParameterizedMessage("Bulk request {} has been cancelled.", executionId), e);
if (!afterCalled) {
listener.afterBulk(executionId, bulkRequest, e);
}
} catch (Exception e) {
- logger.warn("Failed to execute bulk request {}.", e, executionId);
+ logger.warn(new ParameterizedMessage("Failed to execute bulk request {}.", executionId), e);
if (!afterCalled) {
listener.afterBulk(executionId, bulkRequest, e);
}
@@ -142,10 +143,10 @@ abstract class BulkRequestHandler {
bulkRequestSetupSuccessful = true;
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
- logger.info("Bulk request {} has been cancelled.", e, executionId);
+ logger.info(new ParameterizedMessage("Bulk request {} has been cancelled.", executionId), e);
listener.afterBulk(executionId, bulkRequest, e);
} catch (Exception e) {
- logger.warn("Failed to execute bulk request {}.", e, executionId);
+ logger.warn(new ParameterizedMessage("Failed to execute bulk request {}.", executionId), e);
listener.afterBulk(executionId, bulkRequest, e);
} finally {
if (!bulkRequestSetupSuccessful && acquired) { // if we fail on client.bulk() release the semaphore
diff --git a/core/src/main/java/org/elasticsearch/action/bulk/Retry.java b/core/src/main/java/org/elasticsearch/action/bulk/Retry.java
index 95778785ab..375796ae80 100644
--- a/core/src/main/java/org/elasticsearch/action/bulk/Retry.java
+++ b/core/src/main/java/org/elasticsearch/action/bulk/Retry.java
@@ -18,12 +18,12 @@
*/
package org.elasticsearch.action.bulk;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.ActionFuture;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.PlainActionFuture;
import org.elasticsearch.client.Client;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.FutureUtils;
@@ -89,7 +89,7 @@ public class Retry {
}
static class AbstractRetryHandler implements ActionListener<BulkResponse> {
- private final ESLogger logger;
+ private final Logger logger;
private final Client client;
private final ActionListener<BulkResponse> listener;
private final Iterator<TimeValue> backoff;
diff --git a/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java b/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java
index 745449c0a7..5538a2bc5c 100644
--- a/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java
+++ b/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java
@@ -19,6 +19,7 @@
package org.elasticsearch.action.bulk;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.ActionRequest;
@@ -30,8 +31,8 @@ import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.index.TransportIndexAction;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.replication.ReplicationRequest;
-import org.elasticsearch.action.support.replication.TransportWriteAction;
import org.elasticsearch.action.support.replication.ReplicationResponse.ShardInfo;
+import org.elasticsearch.action.support.replication.TransportWriteAction;
import org.elasticsearch.action.update.UpdateHelper;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.action.update.UpdateResponse;
@@ -183,9 +184,9 @@ public class TransportShardBulkAction extends TransportWriteAction<BulkShardRequ
private <ReplicationRequestT extends ReplicationRequest<ReplicationRequestT>> void logFailure(Throwable t, String operation, ShardId shardId, ReplicationRequest<ReplicationRequestT> request) {
if (ExceptionsHelper.status(t) == RestStatus.CONFLICT) {
- logger.trace("{} failed to execute bulk item ({}) {}", t, shardId, operation, request);
+ logger.trace(new ParameterizedMessage("{} failed to execute bulk item ({}) {}", shardId, operation, request), t);
} else {
- logger.debug("{} failed to execute bulk item ({}) {}", t, shardId, operation, request);
+ logger.debug(new ParameterizedMessage("{} failed to execute bulk item ({}) {}", shardId, operation, request), t);
}
}
diff --git a/core/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java b/core/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java
index d9e5eaa2e1..59d05a228b 100644
--- a/core/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java
+++ b/core/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java
@@ -19,6 +19,7 @@
package org.elasticsearch.action.get;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.TransportActions;
@@ -92,7 +93,7 @@ public class TransportShardMultiGetAction extends TransportSingleShardAction<Mul
if (TransportActions.isShardNotAvailableException(e)) {
throw (ElasticsearchException) e;
} else {
- logger.debug("{} failed to execute multi_get for [{}]/[{}]", e, shardId, item.type(), item.id());
+ logger.debug(new ParameterizedMessage("{} failed to execute multi_get for [{}]/[{}]", shardId, item.type(), item.id()), e);
response.add(request.locations.get(i), new MultiGetResponse.Failure(request.index(), item.type(), item.id(), e));
}
}
diff --git a/core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java b/core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java
index cf7d2cf1e5..0927debd9f 100644
--- a/core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java
+++ b/core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java
@@ -19,6 +19,7 @@
package org.elasticsearch.action.ingest;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionResponse;
@@ -90,7 +91,7 @@ public final class IngestActionFilter extends AbstractComponent implements Actio
void processIndexRequest(Task task, String action, ActionListener listener, ActionFilterChain chain, IndexRequest indexRequest) {
executionService.executeIndexRequest(indexRequest, t -> {
- logger.error("failed to execute pipeline [{}]", t, indexRequest.getPipeline());
+ logger.error(new ParameterizedMessage("failed to execute pipeline [{}]", indexRequest.getPipeline()), t);
listener.onFailure(t);
}, success -> {
// TransportIndexAction uses IndexRequest and same action name on the node that receives the request and the node that
@@ -105,7 +106,7 @@ public final class IngestActionFilter extends AbstractComponent implements Actio
long ingestStartTimeInNanos = System.nanoTime();
BulkRequestModifier bulkRequestModifier = new BulkRequestModifier(original);
executionService.executeBulkRequest(() -> bulkRequestModifier, (indexRequest, exception) -> {
- logger.debug("failed to execute pipeline [{}] for document [{}/{}/{}]", exception, indexRequest.getPipeline(), indexRequest.index(), indexRequest.type(), indexRequest.id());
+ logger.debug(new ParameterizedMessage("failed to execute pipeline [{}] for document [{}/{}/{}]", indexRequest.getPipeline(), indexRequest.index(), indexRequest.type(), indexRequest.id()), exception);
bulkRequestModifier.markCurrentItemAsFailed(exception);
}, (exception) -> {
if (exception != null) {
diff --git a/core/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java b/core/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java
index a61384f959..3292562d15 100644
--- a/core/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java
+++ b/core/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java
@@ -20,6 +20,8 @@
package org.elasticsearch.action.search;
import com.carrotsearch.hppc.IntArrayList;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TopDocs;
import org.elasticsearch.action.ActionListener;
@@ -35,7 +37,6 @@ import org.elasticsearch.cluster.routing.ShardIterator;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Nullable;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.util.concurrent.AtomicArray;
import org.elasticsearch.search.SearchPhaseResult;
import org.elasticsearch.search.SearchShardTarget;
@@ -58,7 +59,7 @@ import static org.elasticsearch.action.search.TransportSearchHelper.internalSear
abstract class AbstractSearchAsyncAction<FirstResult extends SearchPhaseResult> extends AbstractAsyncAction {
- protected final ESLogger logger;
+ protected final Logger logger;
protected final SearchTransportService searchTransportService;
private final IndexNameExpressionResolver indexNameExpressionResolver;
protected final SearchPhaseController searchPhaseController;
@@ -77,7 +78,7 @@ abstract class AbstractSearchAsyncAction<FirstResult extends SearchPhaseResult>
private final Object shardFailuresMutex = new Object();
protected volatile ScoreDoc[] sortedShardDocs;
- protected AbstractSearchAsyncAction(ESLogger logger, SearchTransportService searchTransportService, ClusterService clusterService,
+ protected AbstractSearchAsyncAction(Logger logger, SearchTransportService searchTransportService, ClusterService clusterService,
IndexNameExpressionResolver indexNameExpressionResolver,
SearchPhaseController searchPhaseController, ThreadPool threadPool, SearchRequest request,
ActionListener<SearchResponse> listener) {
@@ -191,7 +192,9 @@ abstract class AbstractSearchAsyncAction<FirstResult extends SearchPhaseResult>
innerMoveToSecondPhase();
} catch (Exception e) {
if (logger.isDebugEnabled()) {
- logger.debug("{}: Failed to execute [{}] while moving to second phase", e, shardIt.shardId(), request);
+ logger.debug(
+ new ParameterizedMessage("{}: Failed to execute [{}] while moving to second phase", shardIt.shardId(), request),
+ e);
}
raiseEarlyFailure(new ReduceSearchPhaseException(firstPhaseName(), "", e, buildShardFailures()));
}
@@ -211,15 +214,21 @@ abstract class AbstractSearchAsyncAction<FirstResult extends SearchPhaseResult>
if (totalOps.incrementAndGet() == expectedTotalOps) {
if (logger.isDebugEnabled()) {
if (e != null && !TransportActions.isShardNotAvailableException(e)) {
- logger.debug("{}: Failed to execute [{}]", e, shard != null ? shard.shortSummary() : shardIt.shardId(), request);
+ logger.debug(
+ new ParameterizedMessage(
+ "{}: Failed to execute [{}]",
+ shard != null ? shard.shortSummary() :
+ shardIt.shardId(),
+ request),
+ e);
} else if (logger.isTraceEnabled()) {
- logger.trace("{}: Failed to execute [{}]", e, shard, request);
+ logger.trace(new ParameterizedMessage("{}: Failed to execute [{}]", shard, request), e);
}
}
final ShardSearchFailure[] shardSearchFailures = buildShardFailures();
if (successfulOps.get() == 0) {
if (logger.isDebugEnabled()) {
- logger.debug("All shards failed for phase: [{}]", e, firstPhaseName());
+ logger.debug(new ParameterizedMessage("All shards failed for phase: [{}]", firstPhaseName()), e);
}
// no successful ops, raise an exception
@@ -236,10 +245,13 @@ abstract class AbstractSearchAsyncAction<FirstResult extends SearchPhaseResult>
final ShardRouting nextShard = shardIt.nextOrNull();
final boolean lastShard = nextShard == null;
// trace log this exception
- if (logger.isTraceEnabled()) {
- logger.trace("{}: Failed to execute [{}] lastShard [{}]", e, shard != null ? shard.shortSummary() : shardIt.shardId(),
- request, lastShard);
- }
+ logger.trace(
+ () -> new ParameterizedMessage(
+ "{}: Failed to execute [{}] lastShard [{}]",
+ shard != null ? shard.shortSummary() : shardIt.shardId(),
+ request,
+ lastShard),
+ e);
if (!lastShard) {
try {
performFirstPhase(shardIndex, shardIt, nextShard);
@@ -251,8 +263,14 @@ abstract class AbstractSearchAsyncAction<FirstResult extends SearchPhaseResult>
// no more shards active, add a failure
if (logger.isDebugEnabled() && !logger.isTraceEnabled()) { // do not double log this exception
if (e != null && !TransportActions.isShardNotAvailableException(e)) {
- logger.debug("{}: Failed to execute [{}] lastShard [{}]", e,
- shard != null ? shard.shortSummary() : shardIt.shardId(), request, lastShard);
+ logger.debug(
+ new ParameterizedMessage(
+ "{}: Failed to execute [{}] lastShard [{}]",
+ shard != null ? shard.shortSummary() :
+ shardIt.shardId(),
+ request,
+ lastShard),
+ e);
}
}
}
diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchDfsQueryAndFetchAsyncAction.java b/core/src/main/java/org/elasticsearch/action/search/SearchDfsQueryAndFetchAsyncAction.java
index 8614d7b118..f769b416f4 100644
--- a/core/src/main/java/org/elasticsearch/action/search/SearchDfsQueryAndFetchAsyncAction.java
+++ b/core/src/main/java/org/elasticsearch/action/search/SearchDfsQueryAndFetchAsyncAction.java
@@ -19,12 +19,13 @@
package org.elasticsearch.action.search;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionRunnable;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.service.ClusterService;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.util.concurrent.AtomicArray;
import org.elasticsearch.search.action.SearchTransportService;
import org.elasticsearch.search.controller.SearchPhaseController;
@@ -43,7 +44,7 @@ class SearchDfsQueryAndFetchAsyncAction extends AbstractSearchAsyncAction<DfsSea
private final AtomicArray<QueryFetchSearchResult> queryFetchResults;
- SearchDfsQueryAndFetchAsyncAction(ESLogger logger, SearchTransportService searchTransportService,
+ SearchDfsQueryAndFetchAsyncAction(Logger logger, SearchTransportService searchTransportService,
ClusterService clusterService, IndexNameExpressionResolver indexNameExpressionResolver,
SearchPhaseController searchPhaseController, ThreadPool threadPool,
SearchRequest request, ActionListener<SearchResponse> listener) {
@@ -105,7 +106,7 @@ class SearchDfsQueryAndFetchAsyncAction extends AbstractSearchAsyncAction<DfsSea
void onSecondPhaseFailure(Exception e, QuerySearchRequest querySearchRequest, int shardIndex, DfsSearchResult dfsResult,
AtomicInteger counter) {
if (logger.isDebugEnabled()) {
- logger.debug("[{}] Failed to execute query phase", e, querySearchRequest.id());
+ logger.debug(new ParameterizedMessage("[{}] Failed to execute query phase", querySearchRequest.id()), e);
}
this.addShardFailure(shardIndex, dfsResult.shardTarget(), e);
successfulOps.decrementAndGet();
diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java b/core/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java
index 9d8305cf6b..efffcef05c 100644
--- a/core/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java
+++ b/core/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java
@@ -20,13 +20,14 @@
package org.elasticsearch.action.search;
import com.carrotsearch.hppc.IntArrayList;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.search.ScoreDoc;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionRunnable;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.service.ClusterService;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.util.concurrent.AtomicArray;
import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.action.SearchTransportService;
@@ -50,7 +51,7 @@ class SearchDfsQueryThenFetchAsyncAction extends AbstractSearchAsyncAction<DfsSe
final AtomicArray<FetchSearchResult> fetchResults;
final AtomicArray<IntArrayList> docIdsToLoad;
- SearchDfsQueryThenFetchAsyncAction(ESLogger logger, SearchTransportService searchTransportService,
+ SearchDfsQueryThenFetchAsyncAction(Logger logger, SearchTransportService searchTransportService,
ClusterService clusterService, IndexNameExpressionResolver indexNameExpressionResolver,
SearchPhaseController searchPhaseController, ThreadPool threadPool,
SearchRequest request, ActionListener<SearchResponse> listener) {
@@ -113,7 +114,7 @@ class SearchDfsQueryThenFetchAsyncAction extends AbstractSearchAsyncAction<DfsSe
void onQueryFailure(Exception e, QuerySearchRequest querySearchRequest, int shardIndex, DfsSearchResult dfsResult,
AtomicInteger counter) {
if (logger.isDebugEnabled()) {
- logger.debug("[{}] Failed to execute query phase", e, querySearchRequest.id());
+ logger.debug(new ParameterizedMessage("[{}] Failed to execute query phase", querySearchRequest.id()), e);
}
this.addShardFailure(shardIndex, dfsResult.shardTarget(), e);
successfulOps.decrementAndGet();
@@ -182,7 +183,7 @@ class SearchDfsQueryThenFetchAsyncAction extends AbstractSearchAsyncAction<DfsSe
void onFetchFailure(Exception e, ShardFetchSearchRequest fetchSearchRequest, int shardIndex,
SearchShardTarget shardTarget, AtomicInteger counter) {
if (logger.isDebugEnabled()) {
- logger.debug("[{}] Failed to execute fetch phase", e, fetchSearchRequest.id());
+ logger.debug(new ParameterizedMessage("[{}] Failed to execute fetch phase", fetchSearchRequest.id()), e);
}
this.addShardFailure(shardIndex, shardTarget, e);
successfulOps.decrementAndGet();
diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchQueryAndFetchAsyncAction.java b/core/src/main/java/org/elasticsearch/action/search/SearchQueryAndFetchAsyncAction.java
index fad4d60275..2e13a0d26e 100644
--- a/core/src/main/java/org/elasticsearch/action/search/SearchQueryAndFetchAsyncAction.java
+++ b/core/src/main/java/org/elasticsearch/action/search/SearchQueryAndFetchAsyncAction.java
@@ -19,12 +19,12 @@
package org.elasticsearch.action.search;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionRunnable;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.service.ClusterService;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.search.action.SearchTransportService;
import org.elasticsearch.search.controller.SearchPhaseController;
import org.elasticsearch.search.fetch.QueryFetchSearchResult;
@@ -36,7 +36,7 @@ import java.io.IOException;
class SearchQueryAndFetchAsyncAction extends AbstractSearchAsyncAction<QueryFetchSearchResult> {
- SearchQueryAndFetchAsyncAction(ESLogger logger, SearchTransportService searchTransportService,
+ SearchQueryAndFetchAsyncAction(Logger logger, SearchTransportService searchTransportService,
ClusterService clusterService, IndexNameExpressionResolver indexNameExpressionResolver,
SearchPhaseController searchPhaseController, ThreadPool threadPool,
SearchRequest request, ActionListener<SearchResponse> listener) {
diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java b/core/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java
index 5f90d291dd..327050d1db 100644
--- a/core/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java
+++ b/core/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java
@@ -20,13 +20,14 @@
package org.elasticsearch.action.search;
import com.carrotsearch.hppc.IntArrayList;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.search.ScoreDoc;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionRunnable;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.service.ClusterService;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.util.concurrent.AtomicArray;
import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.action.SearchTransportService;
@@ -46,7 +47,7 @@ class SearchQueryThenFetchAsyncAction extends AbstractSearchAsyncAction<QuerySea
final AtomicArray<FetchSearchResult> fetchResults;
final AtomicArray<IntArrayList> docIdsToLoad;
- SearchQueryThenFetchAsyncAction(ESLogger logger, SearchTransportService searchService,
+ SearchQueryThenFetchAsyncAction(Logger logger, SearchTransportService searchService,
ClusterService clusterService, IndexNameExpressionResolver indexNameExpressionResolver,
SearchPhaseController searchPhaseController, ThreadPool threadPool,
SearchRequest request, ActionListener<SearchResponse> listener) {
@@ -115,7 +116,7 @@ class SearchQueryThenFetchAsyncAction extends AbstractSearchAsyncAction<QuerySea
void onFetchFailure(Exception e, ShardFetchSearchRequest fetchSearchRequest, int shardIndex, SearchShardTarget shardTarget,
AtomicInteger counter) {
if (logger.isDebugEnabled()) {
- logger.debug("[{}] Failed to execute fetch phase", e, fetchSearchRequest.id());
+ logger.debug(new ParameterizedMessage("[{}] Failed to execute fetch phase", fetchSearchRequest.id()), e);
}
this.addShardFailure(shardIndex, shardTarget, e);
successfulOps.decrementAndGet();
diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchScrollQueryAndFetchAsyncAction.java b/core/src/main/java/org/elasticsearch/action/search/SearchScrollQueryAndFetchAsyncAction.java
index 72154f224d..7f1b0d5447 100644
--- a/core/src/main/java/org/elasticsearch/action/search/SearchScrollQueryAndFetchAsyncAction.java
+++ b/core/src/main/java/org/elasticsearch/action/search/SearchScrollQueryAndFetchAsyncAction.java
@@ -19,12 +19,13 @@
package org.elasticsearch.action.search;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.search.ScoreDoc;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.service.ClusterService;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.util.concurrent.AtomicArray;
import org.elasticsearch.search.action.SearchTransportService;
import org.elasticsearch.search.controller.SearchPhaseController;
@@ -40,7 +41,7 @@ import static org.elasticsearch.action.search.TransportSearchHelper.internalScro
class SearchScrollQueryAndFetchAsyncAction extends AbstractAsyncAction {
- private final ESLogger logger;
+ private final Logger logger;
private final SearchPhaseController searchPhaseController;
private final SearchTransportService searchTransportService;
private final SearchScrollRequest request;
@@ -52,7 +53,7 @@ class SearchScrollQueryAndFetchAsyncAction extends AbstractAsyncAction {
private final AtomicInteger successfulOps;
private final AtomicInteger counter;
- SearchScrollQueryAndFetchAsyncAction(ESLogger logger, ClusterService clusterService,
+ SearchScrollQueryAndFetchAsyncAction(Logger logger, ClusterService clusterService,
SearchTransportService searchTransportService, SearchPhaseController searchPhaseController,
SearchScrollRequest request, ParsedScrollId scrollId, ActionListener<SearchResponse> listener) {
this.logger = logger;
@@ -146,7 +147,7 @@ class SearchScrollQueryAndFetchAsyncAction extends AbstractAsyncAction {
private void onPhaseFailure(Exception e, long searchId, int shardIndex) {
if (logger.isDebugEnabled()) {
- logger.debug("[{}] Failed to execute query phase", e, searchId);
+ logger.debug(new ParameterizedMessage("[{}] Failed to execute query phase", searchId), e);
}
addShardFailure(shardIndex, new ShardSearchFailure(e));
successfulOps.decrementAndGet();
diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchScrollQueryThenFetchAsyncAction.java b/core/src/main/java/org/elasticsearch/action/search/SearchScrollQueryThenFetchAsyncAction.java
index d9f649a7a5..cddb244327 100644
--- a/core/src/main/java/org/elasticsearch/action/search/SearchScrollQueryThenFetchAsyncAction.java
+++ b/core/src/main/java/org/elasticsearch/action/search/SearchScrollQueryThenFetchAsyncAction.java
@@ -20,12 +20,13 @@
package org.elasticsearch.action.search;
import com.carrotsearch.hppc.IntArrayList;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.search.ScoreDoc;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.service.ClusterService;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.util.concurrent.AtomicArray;
import org.elasticsearch.search.action.SearchTransportService;
import org.elasticsearch.search.controller.SearchPhaseController;
@@ -43,7 +44,7 @@ import static org.elasticsearch.action.search.TransportSearchHelper.internalScro
class SearchScrollQueryThenFetchAsyncAction extends AbstractAsyncAction {
- private final ESLogger logger;
+ private final Logger logger;
private final SearchTransportService searchTransportService;
private final SearchPhaseController searchPhaseController;
private final SearchScrollRequest request;
@@ -56,7 +57,7 @@ class SearchScrollQueryThenFetchAsyncAction extends AbstractAsyncAction {
private volatile ScoreDoc[] sortedShardDocs;
private final AtomicInteger successfulOps;
- SearchScrollQueryThenFetchAsyncAction(ESLogger logger, ClusterService clusterService,
+ SearchScrollQueryThenFetchAsyncAction(Logger logger, ClusterService clusterService,
SearchTransportService searchTransportService, SearchPhaseController searchPhaseController,
SearchScrollRequest request, ParsedScrollId scrollId, ActionListener<SearchResponse> listener) {
this.logger = logger;
@@ -146,7 +147,7 @@ class SearchScrollQueryThenFetchAsyncAction extends AbstractAsyncAction {
void onQueryPhaseFailure(final int shardIndex, final AtomicInteger counter, final long searchId, Exception failure) {
if (logger.isDebugEnabled()) {
- logger.debug("[{}] Failed to execute query phase", failure, searchId);
+ logger.debug(new ParameterizedMessage("[{}] Failed to execute query phase", searchId), failure);
}
addShardFailure(shardIndex, new ShardSearchFailure(failure));
successfulOps.decrementAndGet();
diff --git a/core/src/main/java/org/elasticsearch/action/search/TransportClearScrollAction.java b/core/src/main/java/org/elasticsearch/action/search/TransportClearScrollAction.java
index 092b69fc93..20ce4a8c1d 100644
--- a/core/src/main/java/org/elasticsearch/action/search/TransportClearScrollAction.java
+++ b/core/src/main/java/org/elasticsearch/action/search/TransportClearScrollAction.java
@@ -19,6 +19,7 @@
package org.elasticsearch.action.search;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.HandledTransportAction;
@@ -144,7 +145,7 @@ public class TransportClearScrollAction extends HandledTransportAction<ClearScro
}
void onFailedFreedContext(Throwable e, DiscoveryNode node) {
- logger.warn("Clear SC failed on node[{}]", e, node);
+ logger.warn(new ParameterizedMessage("Clear SC failed on node[{}]", node), e);
if (expectedOps.countDown()) {
listener.onResponse(new ClearScrollResponse(false, numberOfFreedSearchContexts.get()));
} else {
diff --git a/core/src/main/java/org/elasticsearch/action/support/AbstractListenableActionFuture.java b/core/src/main/java/org/elasticsearch/action/support/AbstractListenableActionFuture.java
index 78a61ebfe1..ee260ddd1e 100644
--- a/core/src/main/java/org/elasticsearch/action/support/AbstractListenableActionFuture.java
+++ b/core/src/main/java/org/elasticsearch/action/support/AbstractListenableActionFuture.java
@@ -19,9 +19,9 @@
package org.elasticsearch.action.support;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ListenableActionFuture;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.threadpool.ThreadPool;
@@ -33,7 +33,7 @@ import java.util.List;
*/
public abstract class AbstractListenableActionFuture<T, L> extends AdapterActionFuture<T, L> implements ListenableActionFuture<T> {
- private static final ESLogger logger = Loggers.getLogger(AbstractListenableActionFuture.class);
+ private static final Logger logger = Loggers.getLogger(AbstractListenableActionFuture.class);
final ThreadPool threadPool;
volatile Object listeners;
diff --git a/core/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java b/core/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java
index b4033d6a9f..521f4b21a0 100644
--- a/core/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java
+++ b/core/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java
@@ -18,6 +18,7 @@
*/
package org.elasticsearch.action.support;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionResponse;
@@ -75,8 +76,9 @@ public abstract class HandledTransportAction<Request extends ActionRequest<Reque
try {
channel.sendResponse(e);
} catch (Exception e1) {
- logger.warn("Failed to send error response for action [{}] and request [{}]", e1,
- actionName, request);
+ logger.warn(
+ new ParameterizedMessage("Failed to send error response for action [{}] and request [{}]", actionName, request),
+ e1);
}
}
});
diff --git a/core/src/main/java/org/elasticsearch/action/support/ThreadedActionListener.java b/core/src/main/java/org/elasticsearch/action/support/ThreadedActionListener.java
index 4ff7cdaa7b..3b14c838b0 100644
--- a/core/src/main/java/org/elasticsearch/action/support/ThreadedActionListener.java
+++ b/core/src/main/java/org/elasticsearch/action/support/ThreadedActionListener.java
@@ -19,10 +19,10 @@
package org.elasticsearch.action.support;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.transport.TransportClient;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
import org.elasticsearch.threadpool.ThreadPool;
@@ -39,12 +39,12 @@ public final class ThreadedActionListener<Response> implements ActionListener<Re
*/
public static class Wrapper {
- private final ESLogger logger;
+ private final Logger logger;
private final ThreadPool threadPool;
private final boolean threadedListener;
- public Wrapper(ESLogger logger, Settings settings, ThreadPool threadPool) {
+ public Wrapper(Logger logger, Settings settings, ThreadPool threadPool) {
this.logger = logger;
this.threadPool = threadPool;
// Should the action listener be threaded or not by default. Action listeners are automatically threaded for
@@ -68,13 +68,13 @@ public final class ThreadedActionListener<Response> implements ActionListener<Re
}
}
- private final ESLogger logger;
+ private final Logger logger;
private final ThreadPool threadPool;
private final String executor;
private final ActionListener<Response> listener;
private final boolean forceExecution;
- public ThreadedActionListener(ESLogger logger, ThreadPool threadPool, String executor, ActionListener<Response> listener,
+ public ThreadedActionListener(Logger logger, ThreadPool threadPool, String executor, ActionListener<Response> listener,
boolean forceExecution) {
this.logger = logger;
this.threadPool = threadPool;
diff --git a/core/src/main/java/org/elasticsearch/action/support/TransportAction.java b/core/src/main/java/org/elasticsearch/action/support/TransportAction.java
index 582878a427..7d1a091d6b 100644
--- a/core/src/main/java/org/elasticsearch/action/support/TransportAction.java
+++ b/core/src/main/java/org/elasticsearch/action/support/TransportAction.java
@@ -19,6 +19,7 @@
package org.elasticsearch.action.support;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.ActionFuture;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionRequest;
@@ -27,7 +28,6 @@ import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.component.AbstractComponent;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.tasks.TaskListener;
@@ -165,9 +165,9 @@ public abstract class TransportAction<Request extends ActionRequest<Request>, Re
private final TransportAction<Request, Response> action;
private final AtomicInteger index = new AtomicInteger();
- private final ESLogger logger;
+ private final Logger logger;
- private RequestFilterChain(TransportAction<Request, Response> action, ESLogger logger) {
+ private RequestFilterChain(TransportAction<Request, Response> action, Logger logger) {
this.action = action;
this.logger = logger;
}
@@ -201,9 +201,9 @@ public abstract class TransportAction<Request extends ActionRequest<Request>, Re
private final ActionFilter[] filters;
private final AtomicInteger index;
- private final ESLogger logger;
+ private final Logger logger;
- private ResponseFilterChain(ActionFilter[] filters, ESLogger logger) {
+ private ResponseFilterChain(ActionFilter[] filters, Logger logger) {
this.filters = filters;
this.index = new AtomicInteger(filters.length);
this.logger = logger;
diff --git a/core/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java b/core/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java
index 826d76de83..39165b9a9b 100644
--- a/core/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java
+++ b/core/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java
@@ -19,6 +19,7 @@
package org.elasticsearch.action.support.broadcast;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.NoShardAvailableActionException;
import org.elasticsearch.action.support.ActionFilters;
@@ -37,10 +38,10 @@ import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.threadpool.ThreadPool;
-import org.elasticsearch.transport.TransportResponseHandler;
import org.elasticsearch.transport.TransportChannel;
import org.elasticsearch.transport.TransportException;
import org.elasticsearch.transport.TransportRequestHandler;
+import org.elasticsearch.transport.TransportResponseHandler;
import org.elasticsearch.transport.TransportService;
import java.util.concurrent.atomic.AtomicInteger;
@@ -224,7 +225,7 @@ public abstract class TransportBroadcastAction<Request extends BroadcastRequest<
if (e != null) {
if (logger.isTraceEnabled()) {
if (!TransportActions.isShardNotAvailableException(e)) {
- logger.trace("{}: failed to execute [{}]", e, shard != null ? shard.shortSummary() : shardIt.shardId(), request);
+ logger.trace(new ParameterizedMessage("{}: failed to execute [{}]", shard != null ? shard.shortSummary() : shardIt.shardId(), request), e);
}
}
}
@@ -233,7 +234,7 @@ public abstract class TransportBroadcastAction<Request extends BroadcastRequest<
if (logger.isDebugEnabled()) {
if (e != null) {
if (!TransportActions.isShardNotAvailableException(e)) {
- logger.debug("{}: failed to execute [{}]", e, shard != null ? shard.shortSummary() : shardIt.shardId(), request);
+ logger.debug(new ParameterizedMessage("{}: failed to execute [{}]", shard != null ? shard.shortSummary() : shardIt.shardId(), request), e);
}
}
}
diff --git a/core/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java b/core/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java
index cb5ba8788c..ec1dfa1706 100644
--- a/core/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java
+++ b/core/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java
@@ -19,6 +19,7 @@
package org.elasticsearch.action.support.broadcast.node;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.FailedNodeException;
import org.elasticsearch.action.IndicesRequest;
@@ -46,13 +47,13 @@ import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.threadpool.ThreadPool;
-import org.elasticsearch.transport.TransportResponseHandler;
import org.elasticsearch.transport.NodeShouldNotConnectException;
import org.elasticsearch.transport.TransportChannel;
import org.elasticsearch.transport.TransportException;
import org.elasticsearch.transport.TransportRequest;
import org.elasticsearch.transport.TransportRequestHandler;
import org.elasticsearch.transport.TransportResponse;
+import org.elasticsearch.transport.TransportResponseHandler;
import org.elasticsearch.transport.TransportService;
import java.io.IOException;
@@ -363,7 +364,7 @@ public abstract class TransportBroadcastByNodeAction<Request extends BroadcastRe
protected void onNodeFailure(DiscoveryNode node, int nodeIndex, Throwable t) {
String nodeId = node.getId();
if (logger.isDebugEnabled() && !(t instanceof NodeShouldNotConnectException)) {
- logger.debug("failed to execute [{}] on node [{}]", t, actionName, nodeId);
+ logger.debug(new ParameterizedMessage("failed to execute [{}] on node [{}]", actionName, nodeId), t);
}
// this is defensive to protect against the possibility of double invocation
@@ -441,11 +442,11 @@ public abstract class TransportBroadcastByNodeAction<Request extends BroadcastRe
shardResults[shardIndex] = failure;
if (TransportActions.isShardNotAvailableException(e)) {
if (logger.isTraceEnabled()) {
- logger.trace("[{}] failed to execute operation for shard [{}]", e, actionName, shardRouting.shortSummary());
+ logger.trace(new ParameterizedMessage("[{}] failed to execute operation for shard [{}]", actionName, shardRouting.shortSummary()), e);
}
} else {
if (logger.isDebugEnabled()) {
- logger.debug("[{}] failed to execute operation for shard [{}]", e, actionName, shardRouting.shortSummary());
+ logger.debug(new ParameterizedMessage("[{}] failed to execute operation for shard [{}]", actionName, shardRouting.shortSummary()), e);
}
}
}
diff --git a/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java b/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java
index 7d6d437573..73de3b2f81 100644
--- a/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java
+++ b/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java
@@ -19,6 +19,7 @@
package org.elasticsearch.action.support.master;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionListenerResponseHandler;
import org.elasticsearch.action.ActionResponse;
@@ -155,7 +156,7 @@ public abstract class TransportMasterNodeAction<Request extends MasterNodeReques
public void onFailure(Exception t) {
if (t instanceof Discovery.FailedToCommitClusterStateException
|| (t instanceof NotMasterException)) {
- logger.debug("master could not publish cluster state or stepped down before publishing action [{}], scheduling a retry", t, actionName);
+ logger.debug(new ParameterizedMessage("master could not publish cluster state or stepped down before publishing action [{}], scheduling a retry", actionName), t);
retry(t, MasterNodeChangePredicate.INSTANCE);
} else {
listener.onFailure(t);
@@ -209,7 +210,7 @@ public abstract class TransportMasterNodeAction<Request extends MasterNodeReques
@Override
public void onTimeout(TimeValue timeout) {
- logger.debug("timed out while retrying [{}] after failure (timeout [{}])", failure, actionName, timeout);
+ logger.debug(new ParameterizedMessage("timed out while retrying [{}] after failure (timeout [{}])", actionName, timeout), failure);
listener.onFailure(new MasterNotDiscoveredException(failure));
}
}, changePredicate
diff --git a/core/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java b/core/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java
index 276484286b..59bfa416df 100644
--- a/core/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java
+++ b/core/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java
@@ -19,6 +19,7 @@
package org.elasticsearch.action.support.nodes;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.FailedNodeException;
import org.elasticsearch.action.NoSuchNodeException;
@@ -31,13 +32,13 @@ import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.threadpool.ThreadPool;
-import org.elasticsearch.transport.TransportResponseHandler;
import org.elasticsearch.transport.NodeShouldNotConnectException;
import org.elasticsearch.transport.TransportChannel;
import org.elasticsearch.transport.TransportException;
import org.elasticsearch.transport.TransportRequest;
import org.elasticsearch.transport.TransportRequestHandler;
import org.elasticsearch.transport.TransportRequestOptions;
+import org.elasticsearch.transport.TransportResponseHandler;
import org.elasticsearch.transport.TransportService;
import java.util.ArrayList;
@@ -238,7 +239,7 @@ public abstract class TransportNodesAction<NodesRequest extends BaseNodesRequest
private void onFailure(int idx, String nodeId, Throwable t) {
if (logger.isDebugEnabled() && !(t instanceof NodeShouldNotConnectException)) {
- logger.debug("failed to execute on node [{}]", t, nodeId);
+ logger.debug(new ParameterizedMessage("failed to execute on node [{}]", nodeId), t);
}
if (accumulateExceptions()) {
responses.set(idx, new FailedNodeException(nodeId, "Failed node [" + nodeId + "]", t));
diff --git a/core/src/main/java/org/elasticsearch/action/support/replication/ReplicationOperation.java b/core/src/main/java/org/elasticsearch/action/support/replication/ReplicationOperation.java
index 27f0504413..639743e144 100644
--- a/core/src/main/java/org/elasticsearch/action/support/replication/ReplicationOperation.java
+++ b/core/src/main/java/org/elasticsearch/action/support/replication/ReplicationOperation.java
@@ -18,6 +18,8 @@
*/
package org.elasticsearch.action.support.replication;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.ActionListener;
@@ -31,7 +33,6 @@ import org.elasticsearch.cluster.routing.IndexRoutingTable;
import org.elasticsearch.cluster.routing.IndexShardRoutingTable;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.common.io.stream.StreamInput;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.index.engine.VersionConflictEngineException;
import org.elasticsearch.index.shard.ShardId;
@@ -56,7 +57,7 @@ public class ReplicationOperation<
ReplicaRequest extends ReplicationRequest<ReplicaRequest>,
PrimaryResultT extends ReplicationOperation.PrimaryResult<ReplicaRequest>
> {
- private final ESLogger logger;
+ private final Logger logger;
private final Request request;
private final Supplier<ClusterState> clusterStateSupplier;
private final String opType;
@@ -86,7 +87,7 @@ public class ReplicationOperation<
public ReplicationOperation(Request request, Primary<Request, ReplicaRequest, PrimaryResultT> primary,
ActionListener<PrimaryResultT> listener,
boolean executeOnReplicas, Replicas<ReplicaRequest> replicas,
- Supplier<ClusterState> clusterStateSupplier, ESLogger logger, String opType) {
+ Supplier<ClusterState> clusterStateSupplier, Logger logger, String opType) {
this.executeOnReplicas = executeOnReplicas;
this.replicasProxy = replicas;
this.primary = primary;
@@ -189,8 +190,14 @@ public class ReplicationOperation<
@Override
public void onFailure(Exception replicaException) {
- logger.trace("[{}] failure while performing [{}] on replica {}, request [{}]", replicaException, shard.shardId(), opType,
- shard, replicaRequest);
+ logger.trace(
+ new ParameterizedMessage(
+ "[{}] failure while performing [{}] on replica {}, request [{}]",
+ shard.shardId(),
+ opType,
+ shard,
+ replicaRequest),
+ replicaException);
if (ignoreReplicaException(replicaException)) {
decPendingAndFinishIfNeeded();
} else {
@@ -198,7 +205,7 @@ public class ReplicationOperation<
shardReplicaFailures.add(new ReplicationResponse.ShardInfo.Failure(
shard.shardId(), shard.currentNodeId(), replicaException, restStatus, false));
String message = String.format(Locale.ROOT, "failed to perform %s on replica %s", opType, shard);
- logger.warn("[{}] {}", replicaException, shard.shardId(), message);
+ logger.warn(new ParameterizedMessage("[{}] {}", shard.shardId(), message), replicaException);
replicasProxy.failShard(shard, replicaRequest.primaryTerm(), message, replicaException,
ReplicationOperation.this::decPendingAndFinishIfNeeded,
ReplicationOperation.this::onPrimaryDemoted,
diff --git a/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java b/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java
index 592edbe6f5..65de45f759 100644
--- a/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java
+++ b/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java
@@ -19,6 +19,7 @@
package org.elasticsearch.action.support.replication;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionListenerResponseHandler;
@@ -56,7 +57,6 @@ import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.node.NodeClosedException;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.threadpool.ThreadPool;
-import org.elasticsearch.transport.TransportResponseHandler;
import org.elasticsearch.transport.ConnectTransportException;
import org.elasticsearch.transport.TransportChannel;
import org.elasticsearch.transport.TransportChannelResponseHandler;
@@ -65,6 +65,7 @@ import org.elasticsearch.transport.TransportRequestHandler;
import org.elasticsearch.transport.TransportRequestOptions;
import org.elasticsearch.transport.TransportResponse;
import org.elasticsearch.transport.TransportResponse.Empty;
+import org.elasticsearch.transport.TransportResponseHandler;
import org.elasticsearch.transport.TransportService;
import java.io.IOException;
@@ -215,7 +216,7 @@ public abstract class TransportReplicationAction<
channel.sendResponse(e);
} catch (Exception inner) {
inner.addSuppressed(e);
- logger.warn("Failed to send response for {}", inner, actionName);
+ logger.warn(new ParameterizedMessage("Failed to send response for {}", actionName), inner);
}
}
});
@@ -444,7 +445,9 @@ public abstract class TransportReplicationAction<
@Override
public void onFailure(Exception e) {
if (e instanceof RetryOnReplicaException) {
- logger.trace("Retrying operation on replica, action [{}], request [{}]", e, transportReplicaAction, request);
+ logger.trace(
+ new ParameterizedMessage("Retrying operation on replica, action [{}], request [{}]", transportReplicaAction, request),
+ e);
final ThreadContext.StoredContext context = threadPool.getThreadContext().newStoredContext();
observer.waitForNextChange(new ClusterStateObserver.Listener() {
@Override
@@ -479,7 +482,9 @@ public abstract class TransportReplicationAction<
channel.sendResponse(e);
} catch (IOException responseException) {
responseException.addSuppressed(e);
- logger.warn("failed to send error message back to client for action [{}]", responseException, transportReplicaAction);
+ logger.warn(
+ new ParameterizedMessage("failed to send error message back to client for action [{}]", transportReplicaAction),
+ responseException);
}
}
@@ -682,8 +687,12 @@ public abstract class TransportReplicationAction<
final Throwable cause = exp.unwrapCause();
if (cause instanceof ConnectTransportException || cause instanceof NodeClosedException ||
(isPrimaryAction && retryPrimaryException(cause))) {
- logger.trace("received an error from node [{}] for request [{}], scheduling a retry", exp, node.getId(),
- request);
+ logger.trace(
+ new ParameterizedMessage(
+ "received an error from node [{}] for request [{}], scheduling a retry",
+ node.getId(),
+ request),
+ exp);
retry(exp);
} else {
finishAsFailed(exp);
@@ -729,7 +738,7 @@ public abstract class TransportReplicationAction<
void finishAsFailed(Exception failure) {
if (finished.compareAndSet(false, true)) {
setPhase(task, "failed");
- logger.trace("operation failed. action [{}], request [{}]", failure, actionName, request);
+ logger.trace(new ParameterizedMessage("operation failed. action [{}], request [{}]", actionName, request), failure);
listener.onFailure(failure);
} else {
assert false : "finishAsFailed called but operation is already finished";
@@ -737,7 +746,9 @@ public abstract class TransportReplicationAction<
}
void finishWithUnexpectedFailure(Exception failure) {
- logger.warn("unexpected error during the primary phase for action [{}], request [{}]", failure, actionName, request);
+ logger.warn(
+ new ParameterizedMessage("unexpected error during the primary phase for action [{}], request [{}]", actionName, request),
+ failure);
if (finished.compareAndSet(false, true)) {
setPhase(task, "failed");
listener.onFailure(failure);
diff --git a/core/src/main/java/org/elasticsearch/action/support/replication/TransportWriteAction.java b/core/src/main/java/org/elasticsearch/action/support/replication/TransportWriteAction.java
index c472c7454a..39b49a4a40 100644
--- a/core/src/main/java/org/elasticsearch/action/support/replication/TransportWriteAction.java
+++ b/core/src/main/java/org/elasticsearch/action/support/replication/TransportWriteAction.java
@@ -19,6 +19,7 @@
package org.elasticsearch.action.support.replication;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.WriteRequest;
@@ -27,7 +28,6 @@ import org.elasticsearch.cluster.action.shard.ShardStateAction;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Nullable;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.shard.IndexShard;
@@ -241,13 +241,13 @@ public abstract class TransportWriteAction<
private final RespondingWriteResult respond;
private final IndexShard indexShard;
private final WriteRequest<?> request;
- private final ESLogger logger;
+ private final Logger logger;
AsyncAfterWriteAction(final IndexShard indexShard,
final WriteRequest<?> request,
@Nullable final Translog.Location location,
final RespondingWriteResult respond,
- final ESLogger logger) {
+ final Logger logger) {
this.indexShard = indexShard;
this.request = request;
boolean waitUntilRefresh = false;
diff --git a/core/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java b/core/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java
index a3964b6bbf..500bacbd51 100644
--- a/core/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java
+++ b/core/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java
@@ -19,6 +19,7 @@
package org.elasticsearch.action.support.single.shard;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.action.NoShardAvailableActionException;
@@ -39,10 +40,10 @@ import org.elasticsearch.common.logging.LoggerMessageFormat;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.threadpool.ThreadPool;
-import org.elasticsearch.transport.TransportResponseHandler;
import org.elasticsearch.transport.TransportChannel;
import org.elasticsearch.transport.TransportException;
import org.elasticsearch.transport.TransportRequestHandler;
+import org.elasticsearch.transport.TransportResponseHandler;
import org.elasticsearch.transport.TransportService;
import java.util.function.Supplier;
@@ -187,7 +188,7 @@ public abstract class TransportSingleShardAction<Request extends SingleShardRequ
private void onFailure(ShardRouting shardRouting, Exception e) {
if (logger.isTraceEnabled() && e != null) {
- logger.trace("{}: failed to execute [{}]", e, shardRouting, internalRequest.request());
+ logger.trace(new ParameterizedMessage("{}: failed to execute [{}]", shardRouting, internalRequest.request()), e);
}
perform(e);
}
@@ -205,7 +206,7 @@ public abstract class TransportSingleShardAction<Request extends SingleShardRequ
failure = new NoShardAvailableActionException(null, LoggerMessageFormat.format("No shard available for [{}]", internalRequest.request()), failure);
} else {
if (logger.isDebugEnabled()) {
- logger.debug("{}: failed to execute [{}]", failure, null, internalRequest.request());
+ logger.debug(new ParameterizedMessage("{}: failed to execute [{}]", null, internalRequest.request()), failure);
}
}
listener.onFailure(failure);
diff --git a/core/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java b/core/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java
index a30d9c1f25..93cfee0cfc 100644
--- a/core/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java
+++ b/core/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java
@@ -19,6 +19,7 @@
package org.elasticsearch.action.support.tasks;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.ResourceNotFoundException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.FailedNodeException;
@@ -38,7 +39,6 @@ import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.threadpool.ThreadPool;
-import org.elasticsearch.transport.TransportResponseHandler;
import org.elasticsearch.transport.NodeShouldNotConnectException;
import org.elasticsearch.transport.TransportChannel;
import org.elasticsearch.transport.TransportException;
@@ -46,6 +46,7 @@ import org.elasticsearch.transport.TransportRequest;
import org.elasticsearch.transport.TransportRequestHandler;
import org.elasticsearch.transport.TransportRequestOptions;
import org.elasticsearch.transport.TransportResponse;
+import org.elasticsearch.transport.TransportResponseHandler;
import org.elasticsearch.transport.TransportService;
import java.io.IOException;
@@ -275,7 +276,7 @@ public abstract class TransportTasksAction<
private void onFailure(int idx, String nodeId, Throwable t) {
if (logger.isDebugEnabled() && !(t instanceof NodeShouldNotConnectException)) {
- logger.debug("failed to execute on node [{}]", t, nodeId);
+ logger.debug(new ParameterizedMessage("failed to execute on node [{}]", nodeId), t);
}
if (accumulateExceptions()) {
responses.set(idx, new FailedNodeException(nodeId, "Failed node [" + nodeId + "]", t));
diff --git a/core/src/main/java/org/elasticsearch/action/termvectors/TransportShardMultiTermsVectorAction.java b/core/src/main/java/org/elasticsearch/action/termvectors/TransportShardMultiTermsVectorAction.java
index da12831f1c..f89bba93dd 100644
--- a/core/src/main/java/org/elasticsearch/action/termvectors/TransportShardMultiTermsVectorAction.java
+++ b/core/src/main/java/org/elasticsearch/action/termvectors/TransportShardMultiTermsVectorAction.java
@@ -19,6 +19,7 @@
package org.elasticsearch.action.termvectors;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.TransportActions;
@@ -87,7 +88,7 @@ public class TransportShardMultiTermsVectorAction extends TransportSingleShardAc
if (TransportActions.isShardNotAvailableException(t)) {
throw (ElasticsearchException) t;
} else {
- logger.debug("{} failed to execute multi term vectors for [{}]/[{}]", t, shardId, termVectorsRequest.type(), termVectorsRequest.id());
+ logger.debug(new ParameterizedMessage("{} failed to execute multi term vectors for [{}]/[{}]", shardId, termVectorsRequest.type(), termVectorsRequest.id()), t);
response.add(request.locations.get(i),
new MultiTermVectorsResponse.Failure(request.index(), termVectorsRequest.type(), termVectorsRequest.id(), t));
}
diff --git a/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java b/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java
index e2c2e3dce7..f342cd2879 100644
--- a/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java
+++ b/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java
@@ -19,6 +19,7 @@
package org.elasticsearch.bootstrap;
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.util.Constants;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.StringHelper;
@@ -28,7 +29,6 @@ import org.elasticsearch.cli.Terminal;
import org.elasticsearch.common.PidFile;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.inject.CreationException;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.LogConfigurator;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
@@ -81,7 +81,7 @@ final class Bootstrap {
/** initialize native resources */
public static void initializeNatives(Path tmpFile, boolean mlockAll, boolean seccomp, boolean ctrlHandler) {
- final ESLogger logger = Loggers.getLogger(Bootstrap.class);
+ final Logger logger = Loggers.getLogger(Bootstrap.class);
// check if the user is running as root, and bail
if (Natives.definitelyRunningAsRoot()) {
@@ -227,7 +227,7 @@ final class Bootstrap {
INSTANCE = new Bootstrap();
Environment environment = initialEnvironment(foreground, pidFile, esSettings);
- LogConfigurator.configure(environment.settings(), true);
+ LogConfigurator.configure(environment);
checkForCustomConfFile();
if (environment.pidFile() != null) {
@@ -264,7 +264,7 @@ final class Bootstrap {
if (foreground) {
Loggers.disableConsoleLogging();
}
- ESLogger logger = Loggers.getLogger(Bootstrap.class);
+ Logger logger = Loggers.getLogger(Bootstrap.class);
if (INSTANCE.node != null) {
logger = Loggers.getLogger(Bootstrap.class, Node.NODE_NAME_SETTING.get(INSTANCE.node.settings()));
}
@@ -310,7 +310,7 @@ final class Bootstrap {
private static void checkUnsetAndMaybeExit(String confFileSetting, String settingName) {
if (confFileSetting != null && confFileSetting.isEmpty() == false) {
- ESLogger logger = Loggers.getLogger(Bootstrap.class);
+ Logger logger = Loggers.getLogger(Bootstrap.class);
logger.info("{} is no longer supported. elasticsearch.yml must be placed in the config directory and cannot be renamed.", settingName);
exit(1);
}
diff --git a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCheck.java b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCheck.java
index 94143f9986..b604bfb078 100644
--- a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCheck.java
+++ b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCheck.java
@@ -19,10 +19,11 @@
package org.elasticsearch.bootstrap;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.util.Constants;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.io.PathUtils;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.BoundTransportAddress;
@@ -100,7 +101,7 @@ final class BootstrapCheck {
final boolean enforceLimits,
final boolean ignoreSystemChecks,
final List<Check> checks,
- final ESLogger logger) {
+ final Logger logger) {
final List<String> errors = new ArrayList<>();
final List<String> ignoredErrors = new ArrayList<>();
@@ -136,7 +137,7 @@ final class BootstrapCheck {
}
- static void log(final ESLogger logger, final String error) {
+ static void log(final Logger logger, final String error) {
logger.warn(error);
}
@@ -417,7 +418,7 @@ final class BootstrapCheck {
}
// visible for testing
- long getMaxMapCount(ESLogger logger) {
+ long getMaxMapCount(Logger logger) {
final Path path = getProcSysVmMaxMapCountPath();
try (final BufferedReader bufferedReader = getBufferedReader(path)) {
final String rawProcSysVmMaxMapCount = readProcSysVmMaxMapCount(bufferedReader);
@@ -425,11 +426,11 @@ final class BootstrapCheck {
try {
return parseProcSysVmMaxMapCount(rawProcSysVmMaxMapCount);
} catch (final NumberFormatException e) {
- logger.warn("unable to parse vm.max_map_count [{}]", e, rawProcSysVmMaxMapCount);
+ logger.warn(new ParameterizedMessage("unable to parse vm.max_map_count [{}]", rawProcSysVmMaxMapCount), e);
}
}
} catch (final IOException e) {
- logger.warn("I/O exception while trying to read [{}]", e, path);
+ logger.warn(new ParameterizedMessage("I/O exception while trying to read [{}]", path), e);
}
return -1;
}
diff --git a/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java b/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java
index de6a0339cb..fa9b153047 100644
--- a/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java
+++ b/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java
@@ -116,4 +116,5 @@ class Elasticsearch extends SettingCommand {
static void close(String[] args) throws IOException {
Bootstrap.stop();
}
+
}
diff --git a/core/src/main/java/org/elasticsearch/bootstrap/ElasticsearchUncaughtExceptionHandler.java b/core/src/main/java/org/elasticsearch/bootstrap/ElasticsearchUncaughtExceptionHandler.java
index 45d54ed4a6..57580d33fc 100644
--- a/core/src/main/java/org/elasticsearch/bootstrap/ElasticsearchUncaughtExceptionHandler.java
+++ b/core/src/main/java/org/elasticsearch/bootstrap/ElasticsearchUncaughtExceptionHandler.java
@@ -19,9 +19,10 @@
package org.elasticsearch.bootstrap;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.index.MergePolicy;
import org.elasticsearch.common.SuppressForbidden;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import java.io.IOError;
@@ -76,14 +77,14 @@ class ElasticsearchUncaughtExceptionHandler implements Thread.UncaughtExceptionH
// visible for testing
void onFatalUncaught(final String threadName, final Throwable t) {
- final ESLogger logger = Loggers.getLogger(ElasticsearchUncaughtExceptionHandler.class, loggingPrefixSupplier.get());
- logger.error("fatal error in thread [{}], exiting", t, threadName);
+ final Logger logger = Loggers.getLogger(ElasticsearchUncaughtExceptionHandler.class, loggingPrefixSupplier.get());
+ logger.error(new ParameterizedMessage("fatal error in thread [{}], exiting", threadName), t);
}
// visible for testing
void onNonFatalUncaught(final String threadName, final Throwable t) {
- final ESLogger logger = Loggers.getLogger(ElasticsearchUncaughtExceptionHandler.class, loggingPrefixSupplier.get());
- logger.warn("uncaught exception in thread [{}]", t, threadName);
+ final Logger logger = Loggers.getLogger(ElasticsearchUncaughtExceptionHandler.class, loggingPrefixSupplier.get());
+ logger.warn(new ParameterizedMessage("uncaught exception in thread [{}]", threadName), t);
}
// visible for testing
diff --git a/core/src/main/java/org/elasticsearch/bootstrap/JNACLibrary.java b/core/src/main/java/org/elasticsearch/bootstrap/JNACLibrary.java
index 5d1369b21f..fe0f400698 100644
--- a/core/src/main/java/org/elasticsearch/bootstrap/JNACLibrary.java
+++ b/core/src/main/java/org/elasticsearch/bootstrap/JNACLibrary.java
@@ -22,8 +22,8 @@ package org.elasticsearch.bootstrap;
import com.sun.jna.Native;
import com.sun.jna.NativeLong;
import com.sun.jna.Structure;
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.util.Constants;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import java.util.Arrays;
@@ -34,7 +34,7 @@ import java.util.List;
*/
final class JNACLibrary {
- private static final ESLogger logger = Loggers.getLogger(JNACLibrary.class);
+ private static final Logger logger = Loggers.getLogger(JNACLibrary.class);
public static final int MCL_CURRENT = 1;
public static final int ENOMEM = 12;
diff --git a/core/src/main/java/org/elasticsearch/bootstrap/JNAKernel32Library.java b/core/src/main/java/org/elasticsearch/bootstrap/JNAKernel32Library.java
index 50dab6888b..747ba2e458 100644
--- a/core/src/main/java/org/elasticsearch/bootstrap/JNAKernel32Library.java
+++ b/core/src/main/java/org/elasticsearch/bootstrap/JNAKernel32Library.java
@@ -25,8 +25,8 @@ import com.sun.jna.NativeLong;
import com.sun.jna.Pointer;
import com.sun.jna.Structure;
import com.sun.jna.win32.StdCallLibrary;
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.util.Constants;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import java.util.ArrayList;
@@ -40,7 +40,7 @@ import java.util.List;
*/
final class JNAKernel32Library {
- private static final ESLogger logger = Loggers.getLogger(JNAKernel32Library.class);
+ private static final Logger logger = Loggers.getLogger(JNAKernel32Library.class);
// Callbacks must be kept around in order to be able to be called later,
// when the Windows ConsoleCtrlHandler sends an event.
diff --git a/core/src/main/java/org/elasticsearch/bootstrap/JNANatives.java b/core/src/main/java/org/elasticsearch/bootstrap/JNANatives.java
index 5a8693b313..5f3e357ff5 100644
--- a/core/src/main/java/org/elasticsearch/bootstrap/JNANatives.java
+++ b/core/src/main/java/org/elasticsearch/bootstrap/JNANatives.java
@@ -21,8 +21,8 @@ package org.elasticsearch.bootstrap;
import com.sun.jna.Native;
import com.sun.jna.Pointer;
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.util.Constants;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.monitor.jvm.JvmInfo;
@@ -39,7 +39,7 @@ class JNANatives {
/** no instantiation */
private JNANatives() {}
- private static final ESLogger logger = Loggers.getLogger(JNANatives.class);
+ private static final Logger logger = Loggers.getLogger(JNANatives.class);
// Set to true, in case native mlockall call was successful
static boolean LOCAL_MLOCKALL = false;
diff --git a/core/src/main/java/org/elasticsearch/bootstrap/JarHell.java b/core/src/main/java/org/elasticsearch/bootstrap/JarHell.java
index 3f77f6bcee..f8cf62164e 100644
--- a/core/src/main/java/org/elasticsearch/bootstrap/JarHell.java
+++ b/core/src/main/java/org/elasticsearch/bootstrap/JarHell.java
@@ -19,10 +19,10 @@
package org.elasticsearch.bootstrap;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.Version;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.io.PathUtils;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import java.io.IOException;
@@ -76,7 +76,7 @@ public class JarHell {
*/
public static void checkJarHell() throws Exception {
ClassLoader loader = JarHell.class.getClassLoader();
- ESLogger logger = Loggers.getLogger(JarHell.class);
+ Logger logger = Loggers.getLogger(JarHell.class);
if (logger.isDebugEnabled()) {
logger.debug("java.class.path: {}", System.getProperty("java.class.path"));
logger.debug("sun.boot.class.path: {}", System.getProperty("sun.boot.class.path"));
@@ -150,7 +150,7 @@ public class JarHell {
*/
@SuppressForbidden(reason = "needs JarFile for speed, just reading entries")
public static void checkJarHell(URL urls[]) throws Exception {
- ESLogger logger = Loggers.getLogger(JarHell.class);
+ Logger logger = Loggers.getLogger(JarHell.class);
// we don't try to be sneaky and use deprecated/internal/not portable stuff
// like sun.boot.class.path, and with jigsaw we don't yet have a way to get
// a "list" at all. So just exclude any elements underneath the java home
diff --git a/core/src/main/java/org/elasticsearch/bootstrap/Natives.java b/core/src/main/java/org/elasticsearch/bootstrap/Natives.java
index 6dba1f3a1b..9fad34e329 100644
--- a/core/src/main/java/org/elasticsearch/bootstrap/Natives.java
+++ b/core/src/main/java/org/elasticsearch/bootstrap/Natives.java
@@ -19,7 +19,7 @@
package org.elasticsearch.bootstrap;
-import org.elasticsearch.common.logging.ESLogger;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.logging.Loggers;
import java.nio.file.Path;
@@ -32,7 +32,7 @@ final class Natives {
/** no instantiation */
private Natives() {}
- private static final ESLogger logger = Loggers.getLogger(Natives.class);
+ private static final Logger logger = Loggers.getLogger(Natives.class);
// marker to determine if the JNA class files are available to the JVM
static final boolean JNA_AVAILABLE;
diff --git a/core/src/main/java/org/elasticsearch/bootstrap/Seccomp.java b/core/src/main/java/org/elasticsearch/bootstrap/Seccomp.java
index 6f6c3dc557..88c618d445 100644
--- a/core/src/main/java/org/elasticsearch/bootstrap/Seccomp.java
+++ b/core/src/main/java/org/elasticsearch/bootstrap/Seccomp.java
@@ -26,9 +26,9 @@ import com.sun.jna.NativeLong;
import com.sun.jna.Pointer;
import com.sun.jna.Structure;
import com.sun.jna.ptr.PointerByReference;
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.util.Constants;
import org.apache.lucene.util.IOUtils;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import java.io.IOException;
@@ -92,7 +92,7 @@ import java.util.Map;
*/
// not an example of how to write code!!!
final class Seccomp {
- private static final ESLogger logger = Loggers.getLogger(Seccomp.class);
+ private static final Logger logger = Loggers.getLogger(Seccomp.class);
// Linux implementation, based on seccomp(2) or prctl(2) with bpf filtering
diff --git a/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java b/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java
index 7bc0f54648..81d2245721 100644
--- a/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java
+++ b/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java
@@ -20,7 +20,7 @@
package org.elasticsearch.client.transport;
import com.carrotsearch.hppc.cursors.ObjectCursor;
-
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionListener;
@@ -32,9 +32,8 @@ import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
import org.elasticsearch.client.Requests;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.node.DiscoveryNode;
-import org.elasticsearch.common.component.AbstractComponent;
-import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.Randomness;
+import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings;
@@ -43,11 +42,11 @@ import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.common.util.concurrent.FutureUtils;
import org.elasticsearch.threadpool.ThreadPool;
-import org.elasticsearch.transport.TransportResponseHandler;
import org.elasticsearch.transport.ConnectTransportException;
import org.elasticsearch.transport.FutureTransportResponseHandler;
import org.elasticsearch.transport.TransportException;
import org.elasticsearch.transport.TransportRequestOptions;
+import org.elasticsearch.transport.TransportResponseHandler;
import org.elasticsearch.transport.TransportService;
import java.io.Closeable;
@@ -340,7 +339,7 @@ public class TransportClientNodesService extends AbstractComponent implements Cl
transportService.connectToNode(node);
} catch (Exception e) {
it.remove();
- logger.debug("failed to connect to discovered node [{}]", e, node);
+ logger.debug(new ParameterizedMessage("failed to connect to discovered node [{}]", node), e);
}
}
}
@@ -377,7 +376,7 @@ public class TransportClientNodesService extends AbstractComponent implements Cl
logger.trace("connecting to listed node (light) [{}]", listedNode);
transportService.connectToNodeLight(listedNode);
} catch (Exception e) {
- logger.debug("failed to connect to node [{}], removed from nodes list", e, listedNode);
+ logger.debug(new ParameterizedMessage("failed to connect to node [{}], removed from nodes list", listedNode), e);
newFilteredNodes.add(listedNode);
continue;
}
@@ -409,7 +408,7 @@ public class TransportClientNodesService extends AbstractComponent implements Cl
newNodes.add(listedNode);
}
} catch (Exception e) {
- logger.info("failed to get node info for {}, disconnecting...", e, listedNode);
+ logger.info(new ParameterizedMessage("failed to get node info for {}, disconnecting...", listedNode), e);
transportService.disconnectFromNode(listedNode);
}
}
@@ -453,7 +452,7 @@ public class TransportClientNodesService extends AbstractComponent implements Cl
transportService.connectToNodeLight(listedNode);
}
} catch (Exception e) {
- logger.debug("failed to connect to node [{}], ignoring...", e, listedNode);
+ logger.debug(new ParameterizedMessage("failed to connect to node [{}], ignoring...", listedNode), e);
latch.countDown();
return;
}
@@ -482,13 +481,18 @@ public class TransportClientNodesService extends AbstractComponent implements Cl
@Override
public void handleException(TransportException e) {
- logger.info("failed to get local cluster state for {}, disconnecting...", e, listedNode);
+ logger.info(
+ new ParameterizedMessage(
+ "failed to get local cluster state for {}, disconnecting...",
+ listedNode),
+ e);
transportService.disconnectFromNode(listedNode);
latch.countDown();
}
});
} catch (Exception e) {
- logger.info("failed to get local cluster state info for {}, disconnecting...", e, listedNode);
+ logger.info(
+ new ParameterizedMessage("failed to get local cluster state info for {}, disconnecting...", listedNode), e);
transportService.disconnectFromNode(listedNode);
latch.countDown();
}
diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java
index 2b00c57537..4e582cb32c 100644
--- a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java
+++ b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java
@@ -19,6 +19,7 @@
package org.elasticsearch.cluster;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.cluster.action.index.MappingUpdatedAction;
import org.elasticsearch.cluster.action.index.NodeMappingRefreshAction;
import org.elasticsearch.cluster.action.shard.ShardStateAction;
@@ -44,17 +45,16 @@ import org.elasticsearch.cluster.routing.allocation.decider.ConcurrentRebalanceA
import org.elasticsearch.cluster.routing.allocation.decider.DiskThresholdDecider;
import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider;
+import org.elasticsearch.cluster.routing.allocation.decider.MaxRetryAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.NodeVersionAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.RebalanceOnlyWhenActiveAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.ReplicaAfterPrimaryActiveAllocationDecider;
-import org.elasticsearch.cluster.routing.allocation.decider.MaxRetryAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.SameShardAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.SnapshotInProgressAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.AbstractModule;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Setting;
diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterStateObserver.java b/core/src/main/java/org/elasticsearch/cluster/ClusterStateObserver.java
index 228ac3f41b..e18ec5543d 100644
--- a/core/src/main/java/org/elasticsearch/cluster/ClusterStateObserver.java
+++ b/core/src/main/java/org/elasticsearch/cluster/ClusterStateObserver.java
@@ -19,10 +19,10 @@
package org.elasticsearch.cluster;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Nullable;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.ThreadContext;
@@ -35,7 +35,7 @@ import java.util.concurrent.atomic.AtomicReference;
*/
public class ClusterStateObserver {
- protected final ESLogger logger;
+ protected final Logger logger;
public final ChangePredicate MATCH_ALL_CHANGES_PREDICATE = new EventPredicate() {
@@ -58,7 +58,7 @@ public class ClusterStateObserver {
volatile boolean timedOut;
- public ClusterStateObserver(ClusterService clusterService, ESLogger logger, ThreadContext contextHolder) {
+ public ClusterStateObserver(ClusterService clusterService, Logger logger, ThreadContext contextHolder) {
this(clusterService, new TimeValue(60000), logger, contextHolder);
}
@@ -67,7 +67,7 @@ public class ClusterStateObserver {
* will fail any existing or new #waitForNextChange calls. Set to null
* to wait indefinitely
*/
- public ClusterStateObserver(ClusterService clusterService, @Nullable TimeValue timeout, ESLogger logger, ThreadContext contextHolder) {
+ public ClusterStateObserver(ClusterService clusterService, @Nullable TimeValue timeout, Logger logger, ThreadContext contextHolder) {
this.clusterService = clusterService;
this.lastObservedState = new AtomicReference<>(new ObservedState(clusterService.state()));
this.timeOutValue = timeout;
diff --git a/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java b/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java
index 534f007e8b..c2cdaf90bc 100644
--- a/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java
+++ b/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java
@@ -19,6 +19,7 @@
package org.elasticsearch.cluster;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.LatchedActionListener;
import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
@@ -39,7 +40,6 @@ import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
@@ -379,7 +379,7 @@ public class InternalClusterInfoService extends AbstractComponent implements Clu
return clusterInfo;
}
- static void buildShardLevelInfo(ESLogger logger, ShardStats[] stats, ImmutableOpenMap.Builder<String, Long> newShardSizes,
+ static void buildShardLevelInfo(Logger logger, ShardStats[] stats, ImmutableOpenMap.Builder<String, Long> newShardSizes,
ImmutableOpenMap.Builder<ShardRouting, String> newShardRoutingToDataPath, ClusterState state) {
MetaData meta = state.getMetaData();
for (ShardStats s : stats) {
@@ -402,7 +402,7 @@ public class InternalClusterInfoService extends AbstractComponent implements Clu
}
}
- static void fillDiskUsagePerNode(ESLogger logger, List<NodeStats> nodeStatsArray,
+ static void fillDiskUsagePerNode(Logger logger, List<NodeStats> nodeStatsArray,
ImmutableOpenMap.Builder<String, DiskUsage> newLeastAvaiableUsages,
ImmutableOpenMap.Builder<String, DiskUsage> newMostAvaiableUsages) {
for (NodeStats nodeStats : nodeStatsArray) {
diff --git a/core/src/main/java/org/elasticsearch/cluster/NodeConnectionsService.java b/core/src/main/java/org/elasticsearch/cluster/NodeConnectionsService.java
index a487bda0db..e58881ad59 100644
--- a/core/src/main/java/org/elasticsearch/cluster/NodeConnectionsService.java
+++ b/core/src/main/java/org/elasticsearch/cluster/NodeConnectionsService.java
@@ -18,6 +18,7 @@
*/
package org.elasticsearch.cluster;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.inject.Inject;
@@ -91,7 +92,7 @@ public class NodeConnectionsService extends AbstractLifecycleComponent {
try {
transportService.disconnectFromNode(node);
} catch (Exception e) {
- logger.warn("failed to disconnect to node [{}]", e, node);
+ logger.warn(new ParameterizedMessage("failed to disconnect to node [{}]", node), e);
}
}
}
@@ -113,7 +114,7 @@ public class NodeConnectionsService extends AbstractLifecycleComponent {
nodeFailureCount = nodeFailureCount + 1;
// log every 6th failure
if ((nodeFailureCount % 6) == 1) {
- logger.warn("failed to connect to node {} (tried [{}] times)", e, node, nodeFailureCount);
+ logger.warn(new ParameterizedMessage("failed to connect to node {} (tried [{}] times)", node, nodeFailureCount), e);
}
nodes.put(node, nodeFailureCount);
}
diff --git a/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java b/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java
index c21d10382a..239251edfc 100644
--- a/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java
+++ b/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java
@@ -19,6 +19,8 @@
package org.elasticsearch.cluster.action.shard;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.cluster.ClusterChangedEvent;
@@ -43,7 +45,6 @@ import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.discovery.Discovery;
@@ -108,7 +109,7 @@ public class ShardStateAction extends AbstractComponent {
if (isMasterChannelException(exp)) {
waitForNewMasterAndRetry(actionName, observer, shardEntry, listener);
} else {
- logger.warn("{} unexpected failure while sending request [{}] to [{}] for shard entry [{}]", exp, shardEntry.shardId, actionName, masterNode, shardEntry);
+ logger.warn(new ParameterizedMessage("{} unexpected failure while sending request [{}] to [{}] for shard entry [{}]", shardEntry.shardId, actionName, masterNode, shardEntry), exp);
listener.onFailure(exp instanceof RemoteTransportException ? (Exception) (exp.getCause() instanceof Exception ? exp.getCause() : new ElasticsearchException(exp.getCause())) : exp);
}
}
@@ -169,7 +170,7 @@ public class ShardStateAction extends AbstractComponent {
@Override
public void onClusterServiceClose() {
- logger.warn("{} node closed while execution action [{}] for shard entry [{}]", shardEntry.failure, shardEntry.shardId, actionName, shardEntry);
+ logger.warn(new ParameterizedMessage("{} node closed while execution action [{}] for shard entry [{}]", shardEntry.shardId, actionName, shardEntry), shardEntry.failure);
listener.onFailure(new NodeClosedException(clusterService.localNode()));
}
@@ -184,9 +185,9 @@ public class ShardStateAction extends AbstractComponent {
private static class ShardFailedTransportHandler implements TransportRequestHandler<ShardEntry> {
private final ClusterService clusterService;
private final ShardFailedClusterStateTaskExecutor shardFailedClusterStateTaskExecutor;
- private final ESLogger logger;
+ private final Logger logger;
- public ShardFailedTransportHandler(ClusterService clusterService, ShardFailedClusterStateTaskExecutor shardFailedClusterStateTaskExecutor, ESLogger logger) {
+ public ShardFailedTransportHandler(ClusterService clusterService, ShardFailedClusterStateTaskExecutor shardFailedClusterStateTaskExecutor, Logger logger) {
this.clusterService = clusterService;
this.shardFailedClusterStateTaskExecutor = shardFailedClusterStateTaskExecutor;
this.logger = logger;
@@ -194,7 +195,7 @@ public class ShardStateAction extends AbstractComponent {
@Override
public void messageReceived(ShardEntry request, TransportChannel channel) throws Exception {
- logger.warn("{} received shard failed for {}", request.failure, request.shardId, request);
+ logger.warn(new ParameterizedMessage("{} received shard failed for {}", request.shardId, request), request.failure);
clusterService.submitStateUpdateTask(
"shard-failed",
request,
@@ -203,12 +204,12 @@ public class ShardStateAction extends AbstractComponent {
new ClusterStateTaskListener() {
@Override
public void onFailure(String source, Exception e) {
- logger.error("{} unexpected failure while failing shard [{}]", e, request.shardId, request);
+ logger.error(new ParameterizedMessage("{} unexpected failure while failing shard [{}]", request.shardId, request), e);
try {
channel.sendResponse(e);
} catch (Exception channelException) {
channelException.addSuppressed(e);
- logger.warn("{} failed to send failure [{}] while failing shard [{}]", channelException, request.shardId, e, request);
+ logger.warn(new ParameterizedMessage("{} failed to send failure [{}] while failing shard [{}]", request.shardId, e, request), channelException);
}
}
@@ -218,7 +219,7 @@ public class ShardStateAction extends AbstractComponent {
try {
channel.sendResponse(new NotMasterException(source));
} catch (Exception channelException) {
- logger.warn("{} failed to send no longer master while failing shard [{}]", channelException, request.shardId, request);
+ logger.warn(new ParameterizedMessage("{} failed to send no longer master while failing shard [{}]", request.shardId, request), channelException);
}
}
@@ -227,7 +228,7 @@ public class ShardStateAction extends AbstractComponent {
try {
channel.sendResponse(TransportResponse.Empty.INSTANCE);
} catch (Exception channelException) {
- logger.warn("{} failed to send response while failing shard [{}]", channelException, request.shardId, request);
+ logger.warn(new ParameterizedMessage("{} failed to send response while failing shard [{}]", request.shardId, request), channelException);
}
}
}
@@ -238,9 +239,9 @@ public class ShardStateAction extends AbstractComponent {
public static class ShardFailedClusterStateTaskExecutor implements ClusterStateTaskExecutor<ShardEntry> {
private final AllocationService allocationService;
private final RoutingService routingService;
- private final ESLogger logger;
+ private final Logger logger;
- public ShardFailedClusterStateTaskExecutor(AllocationService allocationService, RoutingService routingService, ESLogger logger) {
+ public ShardFailedClusterStateTaskExecutor(AllocationService allocationService, RoutingService routingService, Logger logger) {
this.allocationService = allocationService;
this.routingService = routingService;
this.logger = logger;
@@ -315,7 +316,7 @@ public class ShardStateAction extends AbstractComponent {
}
batchResultBuilder.successes(tasksToBeApplied);
} catch (Exception e) {
- logger.warn("failed to apply failed shards {}", e, shardRoutingsToBeApplied);
+ logger.warn(new ParameterizedMessage("failed to apply failed shards {}", shardRoutingsToBeApplied), e);
// failures are communicated back to the requester
// cluster state will not be updated in this case
batchResultBuilder.failures(tasksToBeApplied, e);
@@ -352,9 +353,9 @@ public class ShardStateAction extends AbstractComponent {
private static class ShardStartedTransportHandler implements TransportRequestHandler<ShardEntry> {
private final ClusterService clusterService;
private final ShardStartedClusterStateTaskExecutor shardStartedClusterStateTaskExecutor;
- private final ESLogger logger;
+ private final Logger logger;
- public ShardStartedTransportHandler(ClusterService clusterService, ShardStartedClusterStateTaskExecutor shardStartedClusterStateTaskExecutor, ESLogger logger) {
+ public ShardStartedTransportHandler(ClusterService clusterService, ShardStartedClusterStateTaskExecutor shardStartedClusterStateTaskExecutor, Logger logger) {
this.clusterService = clusterService;
this.shardStartedClusterStateTaskExecutor = shardStartedClusterStateTaskExecutor;
this.logger = logger;
@@ -375,9 +376,9 @@ public class ShardStateAction extends AbstractComponent {
public static class ShardStartedClusterStateTaskExecutor implements ClusterStateTaskExecutor<ShardEntry>, ClusterStateTaskListener {
private final AllocationService allocationService;
- private final ESLogger logger;
+ private final Logger logger;
- public ShardStartedClusterStateTaskExecutor(AllocationService allocationService, ESLogger logger) {
+ public ShardStartedClusterStateTaskExecutor(AllocationService allocationService, Logger logger) {
this.allocationService = allocationService;
this.logger = logger;
}
@@ -431,7 +432,7 @@ public class ShardStateAction extends AbstractComponent {
}
builder.successes(tasksToBeApplied);
} catch (Exception e) {
- logger.warn("failed to apply started shards {}", e, shardRoutingsToBeApplied);
+ logger.warn(new ParameterizedMessage("failed to apply started shards {}", shardRoutingsToBeApplied), e);
builder.failures(tasksToBeApplied, e);
}
@@ -440,7 +441,7 @@ public class ShardStateAction extends AbstractComponent {
@Override
public void onFailure(String source, Exception e) {
- logger.error("unexpected failure during [{}]", e, source);
+ logger.error(new ParameterizedMessage("unexpected failure during [{}]", source), e);
}
}
diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java
index 29bb55f810..fd7e08fec3 100644
--- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java
+++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java
@@ -22,6 +22,7 @@ package org.elasticsearch.cluster.metadata;
import com.carrotsearch.hppc.ObjectHashSet;
import com.carrotsearch.hppc.cursors.ObjectCursor;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.util.CollectionUtil;
import org.elasticsearch.cluster.Diff;
import org.elasticsearch.cluster.Diffable;
@@ -38,7 +39,6 @@ import org.elasticsearch.common.collect.HppcMaps;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
@@ -758,7 +758,7 @@ public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData>, Fr
/** As of 2.0 we require units for time and byte-sized settings. This methods adds default units to any cluster settings that don't
* specify a unit. */
- public static MetaData addDefaultUnitsIfNeeded(ESLogger logger, MetaData metaData) {
+ public static MetaData addDefaultUnitsIfNeeded(Logger logger, MetaData metaData) {
Settings.Builder newPersistentSettings = null;
for(Map.Entry<String,String> ent : metaData.persistentSettings().getAsMap().entrySet()) {
String settingName = ent.getKey();
diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java
index 041850a0b2..6023d6bbaf 100644
--- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java
+++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java
@@ -21,6 +21,7 @@ package org.elasticsearch.cluster.metadata;
import com.carrotsearch.hppc.cursors.ObjectCursor;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.util.CollectionUtil;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.Version;
@@ -446,9 +447,9 @@ public class MetaDataCreateIndexService extends AbstractComponent {
@Override
public void onFailure(String source, Exception e) {
if (e instanceof IndexAlreadyExistsException) {
- logger.trace("[{}] failed to create", e, request.index());
+ logger.trace(new ParameterizedMessage("[{}] failed to create", request.index()), e);
} else {
- logger.debug("[{}] failed to create", e, request.index());
+ logger.debug(new ParameterizedMessage("[{}] failed to create", request.index()), e);
}
super.onFailure(source, e);
}
diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java
index 04631df57a..c21454a09a 100644
--- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java
+++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java
@@ -20,7 +20,6 @@
package org.elasticsearch.cluster.metadata;
import com.carrotsearch.hppc.cursors.ObjectCursor;
-
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesClusterStateUpdateRequest;
@@ -64,7 +63,7 @@ public class MetaDataIndexAliasesService extends AbstractComponent {
private final AliasValidator aliasValidator;
private final NodeServicesProvider nodeServicesProvider;
-
+
private final MetaDataDeleteIndexService deleteIndexService;
@Inject
diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java
index c44fee0fb2..1e218902e2 100644
--- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java
+++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java
@@ -20,6 +20,7 @@
package org.elasticsearch.cluster.metadata;
import com.carrotsearch.hppc.cursors.ObjectCursor;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingClusterStateUpdateRequest;
import org.elasticsearch.cluster.AckedClusterStateTaskListener;
@@ -193,7 +194,7 @@ public class MetaDataMappingService extends AbstractComponent {
}
}
} catch (Exception e) {
- logger.warn("[{}] failed to refresh-mapping in cluster state", e, index);
+ logger.warn(new ParameterizedMessage("[{}] failed to refresh-mapping in cluster state", index), e);
}
return dirty;
}
@@ -207,7 +208,7 @@ public class MetaDataMappingService extends AbstractComponent {
refreshTask,
ClusterStateTaskConfig.build(Priority.HIGH),
refreshExecutor,
- (source, e) -> logger.warn("failure during [{}]", e, source)
+ (source, e) -> logger.warn(new ParameterizedMessage("failure during [{}]", source), e)
);
}
diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java
index 4eeb433951..553e3b7324 100644
--- a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java
+++ b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java
@@ -21,6 +21,7 @@ package org.elasticsearch.cluster.routing;
import com.carrotsearch.hppc.ObjectIntHashMap;
import com.carrotsearch.hppc.cursors.ObjectCursor;
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.util.CollectionUtil;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData;
@@ -30,7 +31,6 @@ import org.elasticsearch.cluster.routing.UnassignedInfo.AllocationStatus;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Randomness;
import org.elasticsearch.common.collect.Tuple;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.shard.ShardId;
@@ -452,7 +452,7 @@ public class RoutingNodes implements Iterable<RoutingNode> {
*
* @return the started shard
*/
- public ShardRouting startShard(ESLogger logger, ShardRouting initializingShard, RoutingChangesObserver routingChangesObserver) {
+ public ShardRouting startShard(Logger logger, ShardRouting initializingShard, RoutingChangesObserver routingChangesObserver) {
ensureMutable();
ShardRouting startedShard = started(initializingShard);
logger.trace("{} marked shard as started (routing: {})", initializingShard.shardId(), initializingShard);
@@ -484,7 +484,7 @@ public class RoutingNodes implements Iterable<RoutingNode> {
* - If shard is a (primary or replica) relocation target, this also clears the relocation information on the source shard.
*
*/
- public void failShard(ESLogger logger, ShardRouting failedShard, UnassignedInfo unassignedInfo, IndexMetaData indexMetaData,
+ public void failShard(Logger logger, ShardRouting failedShard, UnassignedInfo unassignedInfo, IndexMetaData indexMetaData,
RoutingChangesObserver routingChangesObserver) {
ensureMutable();
assert failedShard.assignedToNode() : "only assigned shards can be failed";
diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingService.java b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingService.java
index cfe48dd711..641fdacc63 100644
--- a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingService.java
+++ b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingService.java
@@ -19,6 +19,7 @@
package org.elasticsearch.cluster.routing;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateUpdateTask;
@@ -113,16 +114,16 @@ public class RoutingService extends AbstractLifecycleComponent {
rerouting.set(false);
ClusterState state = clusterService.state();
if (logger.isTraceEnabled()) {
- logger.error("unexpected failure during [{}], current state:\n{}", e, source, state.prettyPrint());
+ logger.error(new ParameterizedMessage("unexpected failure during [{}], current state:\n{}", source, state.prettyPrint()), e);
} else {
- logger.error("unexpected failure during [{}], current state version [{}]", e, source, state.version());
+ logger.error(new ParameterizedMessage("unexpected failure during [{}], current state version [{}]", source, state.version()), e);
}
}
});
} catch (Exception e) {
rerouting.set(false);
ClusterState state = clusterService.state();
- logger.warn("failed to reroute routing table, current state:\n{}", e, state.prettyPrint());
+ logger.warn(new ParameterizedMessage("failed to reroute routing table, current state:\n{}", state.prettyPrint()), e);
}
}
}
diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java
index c86e256bd6..7087ae57c4 100644
--- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java
+++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java
@@ -19,6 +19,7 @@
package org.elasticsearch.cluster.routing.allocation.allocator;
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.IntroSorter;
import org.elasticsearch.cluster.metadata.IndexMetaData;
@@ -37,7 +38,6 @@ import org.elasticsearch.cluster.routing.allocation.decider.DiskThresholdDecider
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
@@ -209,7 +209,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards
* A {@link Balancer}
*/
public static class Balancer {
- private final ESLogger logger;
+ private final Logger logger;
private final Map<String, ModelNode> nodes = new HashMap<>();
private final RoutingAllocation allocation;
private final RoutingNodes routingNodes;
@@ -219,7 +219,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards
private final MetaData metaData;
private final float avgShardsPerNode;
- public Balancer(ESLogger logger, RoutingAllocation allocation, WeightFunction weight, float threshold) {
+ public Balancer(Logger logger, RoutingAllocation allocation, WeightFunction weight, float threshold) {
this.logger = logger;
this.allocation = allocation;
this.weight = weight;
diff --git a/core/src/main/java/org/elasticsearch/cluster/service/ClusterService.java b/core/src/main/java/org/elasticsearch/cluster/service/ClusterService.java
index 21e2defd9b..83aa231f35 100644
--- a/core/src/main/java/org/elasticsearch/cluster/service/ClusterService.java
+++ b/core/src/main/java/org/elasticsearch/cluster/service/ClusterService.java
@@ -19,6 +19,8 @@
package org.elasticsearch.cluster.service;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.cluster.AckedClusterStateTaskListener;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.ClusterName;
@@ -43,7 +45,6 @@ import org.elasticsearch.cluster.routing.RoutingTable;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Setting;
@@ -554,9 +555,16 @@ public class ClusterService extends AbstractLifecycleComponent {
} catch (Exception e) {
TimeValue executionTime = TimeValue.timeValueMillis(Math.max(0, TimeValue.nsecToMSec(currentTimeInNanos() - startTimeNS)));
if (logger.isTraceEnabled()) {
- logger.trace("failed to execute cluster state update in [{}], state:\nversion [{}], source [{}]\n{}{}{}", e, executionTime,
- previousClusterState.version(), tasksSummary, previousClusterState.nodes().prettyPrint(),
- previousClusterState.routingTable().prettyPrint(), previousClusterState.getRoutingNodes().prettyPrint());
+ logger.trace(
+ new ParameterizedMessage(
+ "failed to execute cluster state update in [{}], state:\nversion [{}], source [{}]\n{}{}{}",
+ executionTime,
+ previousClusterState.version(),
+ tasksSummary,
+ previousClusterState.nodes().prettyPrint(),
+ previousClusterState.routingTable().prettyPrint(),
+ previousClusterState.getRoutingNodes().prettyPrint()),
+ e);
}
warnAboutSlowTaskIfNeeded(executionTime, tasksSummary);
batchResult = ClusterStateTaskExecutor.BatchResult.<T>builder()
@@ -587,7 +595,7 @@ public class ClusterService extends AbstractLifecycleComponent {
executionResult.handle(
() -> proccessedListeners.add(updateTask),
ex -> {
- logger.debug("cluster state update task {} failed", ex, updateTask.toString(executor));
+ logger.debug(new ParameterizedMessage("cluster state update task {} failed", updateTask.toString(executor)), ex);
updateTask.listener.onFailure(updateTask.source, ex);
}
);
@@ -670,7 +678,12 @@ public class ClusterService extends AbstractLifecycleComponent {
try {
clusterStatePublisher.accept(clusterChangedEvent, ackListener);
} catch (Discovery.FailedToCommitClusterStateException t) {
- logger.warn("failing [{}]: failed to commit cluster state version [{}]", t, tasksSummary, newClusterState.version());
+ logger.warn(
+ new ParameterizedMessage(
+ "failing [{}]: failed to commit cluster state version [{}]",
+ tasksSummary,
+ newClusterState.version()),
+ t);
proccessedListeners.forEach(task -> task.listener.onFailure(task.source, t));
return;
}
@@ -713,7 +726,9 @@ public class ClusterService extends AbstractLifecycleComponent {
try {
ackListener.onNodeAck(newClusterState.nodes().getLocalNode(), null);
} catch (Exception e) {
- logger.debug("error while processing ack for master node [{}]", e, newClusterState.nodes().getLocalNode());
+ logger.debug(
+ new ParameterizedMessage("error while processing ack for master node [{}]", newClusterState.nodes().getLocalNode()),
+ e);
}
}
@@ -724,7 +739,11 @@ public class ClusterService extends AbstractLifecycleComponent {
try {
executor.clusterStatePublished(clusterChangedEvent);
} catch (Exception e) {
- logger.error("exception thrown while notifying executor of new cluster state publication [{}]", e, tasksSummary);
+ logger.error(
+ new ParameterizedMessage(
+ "exception thrown while notifying executor of new cluster state publication [{}]",
+ tasksSummary),
+ e);
}
TimeValue executionTime = TimeValue.timeValueMillis(Math.max(0, TimeValue.nsecToMSec(currentTimeInNanos() - startTimeNS)));
@@ -733,8 +752,15 @@ public class ClusterService extends AbstractLifecycleComponent {
warnAboutSlowTaskIfNeeded(executionTime, tasksSummary);
} catch (Exception e) {
TimeValue executionTime = TimeValue.timeValueMillis(Math.max(0, TimeValue.nsecToMSec(currentTimeInNanos() - startTimeNS)));
- logger.warn("failed to apply updated cluster state in [{}]:\nversion [{}], uuid [{}], source [{}]\n{}", e, executionTime,
- newClusterState.version(), newClusterState.stateUUID(), tasksSummary, newClusterState.prettyPrint());
+ logger.warn(
+ new ParameterizedMessage(
+ "failed to apply updated cluster state in [{}]:\nversion [{}], uuid [{}], source [{}]\n{}",
+ executionTime,
+ newClusterState.version(),
+ newClusterState.stateUUID(),
+ tasksSummary,
+ newClusterState.prettyPrint()),
+ e);
// TODO: do we want to call updateTask.onFailure here?
}
@@ -743,7 +769,7 @@ public class ClusterService extends AbstractLifecycleComponent {
// this one is overridden in tests so we can control time
protected long currentTimeInNanos() {return System.nanoTime();}
- private static SafeClusterStateTaskListener safe(ClusterStateTaskListener listener, ESLogger logger) {
+ private static SafeClusterStateTaskListener safe(ClusterStateTaskListener listener, Logger logger) {
if (listener instanceof AckedClusterStateTaskListener) {
return new SafeAckedClusterStateTaskListener((AckedClusterStateTaskListener) listener, logger);
} else {
@@ -753,9 +779,9 @@ public class ClusterService extends AbstractLifecycleComponent {
private static class SafeClusterStateTaskListener implements ClusterStateTaskListener {
private final ClusterStateTaskListener listener;
- private final ESLogger logger;
+ private final Logger logger;
- public SafeClusterStateTaskListener(ClusterStateTaskListener listener, ESLogger logger) {
+ public SafeClusterStateTaskListener(ClusterStateTaskListener listener, Logger logger) {
this.listener = listener;
this.logger = logger;
}
@@ -766,7 +792,7 @@ public class ClusterService extends AbstractLifecycleComponent {
listener.onFailure(source, e);
} catch (Exception inner) {
inner.addSuppressed(e);
- logger.error("exception thrown by listener notifying of failure from [{}]", inner, source);
+ logger.error(new ParameterizedMessage("exception thrown by listener notifying of failure from [{}]", source), inner);
}
}
@@ -775,7 +801,9 @@ public class ClusterService extends AbstractLifecycleComponent {
try {
listener.onNoLongerMaster(source);
} catch (Exception e) {
- logger.error("exception thrown by listener while notifying no longer master from [{}]", e, source);
+ logger.error(
+ new ParameterizedMessage("exception thrown by listener while notifying no longer master from [{}]", source),
+ e);
}
}
@@ -785,21 +813,22 @@ public class ClusterService extends AbstractLifecycleComponent {
listener.clusterStateProcessed(source, oldState, newState);
} catch (Exception e) {
logger.error(
+ new ParameterizedMessage(
"exception thrown by listener while notifying of cluster state processed from [{}], old cluster state:\n" +
- "{}\nnew cluster state:\n{}",
- e,
+ "{}\nnew cluster state:\n{}",
source,
oldState.prettyPrint(),
- newState.prettyPrint());
+ newState.prettyPrint()),
+ e);
}
}
}
private static class SafeAckedClusterStateTaskListener extends SafeClusterStateTaskListener implements AckedClusterStateTaskListener {
private final AckedClusterStateTaskListener listener;
- private final ESLogger logger;
+ private final Logger logger;
- public SafeAckedClusterStateTaskListener(AckedClusterStateTaskListener listener, ESLogger logger) {
+ public SafeAckedClusterStateTaskListener(AckedClusterStateTaskListener listener, Logger logger) {
super(listener, logger);
this.listener = listener;
this.logger = logger;
@@ -996,7 +1025,7 @@ public class ClusterService extends AbstractLifecycleComponent {
private static class AckCountDownListener implements Discovery.AckListener {
- private static final ESLogger logger = Loggers.getLogger(AckCountDownListener.class);
+ private static final Logger logger = Loggers.getLogger(AckCountDownListener.class);
private final AckedClusterStateTaskListener ackedTaskListener;
private final CountDown countDown;
@@ -1040,7 +1069,9 @@ public class ClusterService extends AbstractLifecycleComponent {
logger.trace("ack received from node [{}], cluster_state update (version: {})", node, clusterStateVersion);
} else {
this.lastFailure = e;
- logger.debug("ack received from node [{}], cluster_state update (version: {})", e, node, clusterStateVersion);
+ logger.debug(
+ new ParameterizedMessage("ack received from node [{}], cluster_state update (version: {})", node, clusterStateVersion),
+ e);
}
if (countDown.countDown()) {
diff --git a/core/src/main/java/org/elasticsearch/common/SuppressLoggerChecks.java b/core/src/main/java/org/elasticsearch/common/SuppressLoggerChecks.java
index c6f23f72f9..dc6a88447b 100644
--- a/core/src/main/java/org/elasticsearch/common/SuppressLoggerChecks.java
+++ b/core/src/main/java/org/elasticsearch/common/SuppressLoggerChecks.java
@@ -16,13 +16,14 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.elasticsearch.common;
+package org.elasticsearch.common;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
+
/**
* Annotation to suppress logging usage checks errors inside a whole class or a method.
*/
diff --git a/core/src/main/java/org/elasticsearch/common/breaker/ChildMemoryCircuitBreaker.java b/core/src/main/java/org/elasticsearch/common/breaker/ChildMemoryCircuitBreaker.java
index e6c67dbe9b..68bf52e9e0 100644
--- a/core/src/main/java/org/elasticsearch/common/breaker/ChildMemoryCircuitBreaker.java
+++ b/core/src/main/java/org/elasticsearch/common/breaker/ChildMemoryCircuitBreaker.java
@@ -19,7 +19,7 @@
package org.elasticsearch.common.breaker;
-import org.elasticsearch.common.logging.ESLogger;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.indices.breaker.BreakerSettings;
import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService;
@@ -36,7 +36,7 @@ public class ChildMemoryCircuitBreaker implements CircuitBreaker {
private final double overheadConstant;
private final AtomicLong used;
private final AtomicLong trippedCount;
- private final ESLogger logger;
+ private final Logger logger;
private final HierarchyCircuitBreakerService parent;
private final String name;
@@ -48,7 +48,7 @@ public class ChildMemoryCircuitBreaker implements CircuitBreaker {
* @param parent parent circuit breaker service to delegate tripped breakers to
* @param name the name of the breaker
*/
- public ChildMemoryCircuitBreaker(BreakerSettings settings, ESLogger logger,
+ public ChildMemoryCircuitBreaker(BreakerSettings settings, Logger logger,
HierarchyCircuitBreakerService parent, String name) {
this(settings, null, logger, parent, name);
}
@@ -64,7 +64,7 @@ public class ChildMemoryCircuitBreaker implements CircuitBreaker {
* @param oldBreaker the previous circuit breaker to inherit the used value from (starting offset)
*/
public ChildMemoryCircuitBreaker(BreakerSettings settings, ChildMemoryCircuitBreaker oldBreaker,
- ESLogger logger, HierarchyCircuitBreakerService parent, String name) {
+ Logger logger, HierarchyCircuitBreakerService parent, String name) {
this.name = name;
this.settings = settings;
this.memoryBytesLimit = settings.getLimit();
diff --git a/core/src/main/java/org/elasticsearch/common/breaker/MemoryCircuitBreaker.java b/core/src/main/java/org/elasticsearch/common/breaker/MemoryCircuitBreaker.java
index 7dbdd7d6a6..3ac4a52994 100644
--- a/core/src/main/java/org/elasticsearch/common/breaker/MemoryCircuitBreaker.java
+++ b/core/src/main/java/org/elasticsearch/common/breaker/MemoryCircuitBreaker.java
@@ -18,7 +18,7 @@
*/
package org.elasticsearch.common.breaker;
-import org.elasticsearch.common.logging.ESLogger;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.unit.ByteSizeValue;
import java.util.concurrent.atomic.AtomicLong;
@@ -33,7 +33,7 @@ public class MemoryCircuitBreaker implements CircuitBreaker {
private final double overheadConstant;
private final AtomicLong used;
private final AtomicLong trippedCount;
- private final ESLogger logger;
+ private final Logger logger;
/**
@@ -43,7 +43,7 @@ public class MemoryCircuitBreaker implements CircuitBreaker {
* @param limit circuit breaker limit
* @param overheadConstant constant multiplier for byte estimations
*/
- public MemoryCircuitBreaker(ByteSizeValue limit, double overheadConstant, ESLogger logger) {
+ public MemoryCircuitBreaker(ByteSizeValue limit, double overheadConstant, Logger logger) {
this(limit, overheadConstant, null, logger);
}
@@ -56,7 +56,7 @@ public class MemoryCircuitBreaker implements CircuitBreaker {
* @param overheadConstant constant multiplier for byte estimations
* @param oldBreaker the previous circuit breaker to inherit the used value from (starting offset)
*/
- public MemoryCircuitBreaker(ByteSizeValue limit, double overheadConstant, MemoryCircuitBreaker oldBreaker, ESLogger logger) {
+ public MemoryCircuitBreaker(ByteSizeValue limit, double overheadConstant, MemoryCircuitBreaker oldBreaker, Logger logger) {
this.memoryBytesLimit = limit.bytes();
this.overheadConstant = overheadConstant;
if (oldBreaker == null) {
diff --git a/core/src/main/java/org/elasticsearch/common/component/AbstractComponent.java b/core/src/main/java/org/elasticsearch/common/component/AbstractComponent.java
index fa49a80123..8cb51f2b06 100644
--- a/core/src/main/java/org/elasticsearch/common/component/AbstractComponent.java
+++ b/core/src/main/java/org/elasticsearch/common/component/AbstractComponent.java
@@ -19,19 +19,17 @@
package org.elasticsearch.common.component;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.logging.DeprecationLogger;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.node.Node;
-/**
- *
- */
public abstract class AbstractComponent {
- protected final ESLogger logger;
+ protected final Logger logger;
protected final DeprecationLogger deprecationLogger;
protected final Settings settings;
@@ -42,7 +40,7 @@ public abstract class AbstractComponent {
}
public AbstractComponent(Settings settings, Class customClass) {
- this.logger = Loggers.getLogger(customClass, settings);
+ this.logger = LogManager.getLogger(customClass);
this.deprecationLogger = new DeprecationLogger(logger);
this.settings = settings;
}
@@ -71,4 +69,5 @@ public abstract class AbstractComponent {
deprecationLogger.deprecated("Setting [{}] has been removed, use [{}] instead", settingName, alternativeName);
}
}
+
}
diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java
index 84d02eb4ec..cb2f8bb4e7 100644
--- a/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java
+++ b/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java
@@ -19,20 +19,15 @@
package org.elasticsearch.common.geo.builders;
-import org.locationtech.spatial4j.context.jts.JtsSpatialContext;
-import org.locationtech.spatial4j.exception.InvalidShapeException;
-import org.locationtech.spatial4j.shape.Shape;
-import org.locationtech.spatial4j.shape.jts.JtsGeometry;
import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.Geometry;
import com.vividsolutions.jts.geom.GeometryFactory;
-
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.action.support.ToXContentToBytes;
import org.elasticsearch.common.io.stream.NamedWriteable;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.common.unit.DistanceUnit.Distance;
import org.elasticsearch.common.xcontent.ToXContent;
@@ -40,6 +35,10 @@ import org.elasticsearch.common.xcontent.XContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.GeoShapeFieldMapper;
+import org.locationtech.spatial4j.context.jts.JtsSpatialContext;
+import org.locationtech.spatial4j.exception.InvalidShapeException;
+import org.locationtech.spatial4j.shape.Shape;
+import org.locationtech.spatial4j.shape.jts.JtsGeometry;
import java.io.IOException;
import java.util.ArrayList;
@@ -53,7 +52,7 @@ import java.util.Locale;
*/
public abstract class ShapeBuilder extends ToXContentToBytes implements NamedWriteable {
- protected static final ESLogger LOGGER = ESLoggerFactory.getLogger(ShapeBuilder.class.getName());
+ protected static final Logger LOGGER = ESLoggerFactory.getLogger(ShapeBuilder.class.getName());
private static final boolean DEBUG;
static {
diff --git a/core/src/main/java/org/elasticsearch/common/inject/spi/Elements.java b/core/src/main/java/org/elasticsearch/common/inject/spi/Elements.java
index 30911def55..bfb084dd47 100644
--- a/core/src/main/java/org/elasticsearch/common/inject/spi/Elements.java
+++ b/core/src/main/java/org/elasticsearch/common/inject/spi/Elements.java
@@ -16,6 +16,7 @@
package org.elasticsearch.common.inject.spi;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.inject.Binder;
import org.elasticsearch.common.inject.Binding;
@@ -40,7 +41,6 @@ import org.elasticsearch.common.inject.internal.PrivateElementsImpl;
import org.elasticsearch.common.inject.internal.ProviderMethodsModule;
import org.elasticsearch.common.inject.internal.SourceProvider;
import org.elasticsearch.common.inject.matcher.Matcher;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import java.lang.annotation.Annotation;
@@ -351,7 +351,7 @@ public final class Elements {
return builder;
}
- private static ESLogger logger = Loggers.getLogger(Elements.class);
+ private static Logger logger = Loggers.getLogger(Elements.class);
protected Object getSource() {
Object ret;
diff --git a/core/src/main/java/org/elasticsearch/common/io/FileSystemUtils.java b/core/src/main/java/org/elasticsearch/common/io/FileSystemUtils.java
index 46175c0f66..943368059d 100644
--- a/core/src/main/java/org/elasticsearch/common/io/FileSystemUtils.java
+++ b/core/src/main/java/org/elasticsearch/common/io/FileSystemUtils.java
@@ -19,8 +19,8 @@
package org.elasticsearch.common.io;
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.util.IOUtils;
-import org.elasticsearch.common.logging.ESLogger;
import java.io.BufferedReader;
import java.io.IOException;
@@ -101,7 +101,7 @@ public final class FileSystemUtils {
* Check that a directory exists, is a directory and is readable
* by the current user
*/
- public static boolean isAccessibleDirectory(Path directory, ESLogger logger) {
+ public static boolean isAccessibleDirectory(Path directory, Logger logger) {
assert directory != null && logger != null;
if (!Files.exists(directory)) {
diff --git a/core/src/main/java/org/elasticsearch/common/logging/ConsoleAppender.java b/core/src/main/java/org/elasticsearch/common/logging/ConsoleAppender.java
deleted file mode 100644
index 7c33389974..0000000000
--- a/core/src/main/java/org/elasticsearch/common/logging/ConsoleAppender.java
+++ /dev/null
@@ -1,260 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.common.logging;
-
-import org.apache.log4j.Layout;
-import org.apache.log4j.WriterAppender;
-import org.apache.log4j.helpers.LogLog;
-import org.elasticsearch.common.SuppressForbidden;
-
-import java.io.IOException;
-import java.io.OutputStream;
-
-/**
- * ConsoleAppender appends log events to <code>System.out</code> or
- * <code>System.err</code> using a layout specified by the user. The
- * default target is <code>System.out</code>.
- * <p>Elasticsearch: Adapter from log4j to allow to disable console logging...</p>
- *
- * @author Ceki G&uuml;lc&uuml;
- * @author Curt Arnold
- * @since 1.1
- */
-public class ConsoleAppender extends WriterAppender {
-
- public static final String SYSTEM_OUT = "System.out";
- public static final String SYSTEM_ERR = "System.err";
-
- protected String target = SYSTEM_OUT;
-
- /**
- * Determines if the appender honors reassignments of System.out
- * or System.err made after configuration.
- */
- private boolean follow = true;
-
- /**
- * Constructs an unconfigured appender.
- */
- public ConsoleAppender() {
- }
-
- /**
- * Creates a configured appender.
- *
- * @param layout layout, may not be null.
- */
- public ConsoleAppender(Layout layout) {
- this(layout, SYSTEM_OUT);
- }
-
- /**
- * Creates a configured appender.
- *
- * @param layout layout, may not be null.
- * @param target target, either "System.err" or "System.out".
- */
- public ConsoleAppender(Layout layout, String target) {
- setLayout(layout);
- setTarget(target);
- activateOptions();
- }
-
- /**
- * Sets the value of the <b>Target</b> option. Recognized values
- * are "System.out" and "System.err". Any other value will be
- * ignored.
- */
- public void setTarget(String value) {
- String v = value.trim();
-
- if (SYSTEM_OUT.equalsIgnoreCase(v)) {
- target = SYSTEM_OUT;
- } else if (SYSTEM_ERR.equalsIgnoreCase(v)) {
- target = SYSTEM_ERR;
- } else {
- targetWarn(value);
- }
- }
-
- /**
- * Returns the current value of the <b>Target</b> property. The
- * default value of the option is "System.out".
- * <p>
- * See also {@link #setTarget}.
- */
- public String getTarget() {
- return target;
- }
-
- /**
- * Sets whether the appender honors reassignments of System.out
- * or System.err made after configuration.
- *
- * @param newValue if true, appender will use value of System.out or
- * System.err in force at the time when logging events are appended.
- * @since 1.2.13
- */
- public final void setFollow(final boolean newValue) {
- follow = newValue;
- }
-
- /**
- * Gets whether the appender honors reassignments of System.out
- * or System.err made after configuration.
- *
- * @return true if appender will use value of System.out or
- * System.err in force at the time when logging events are appended.
- * @since 1.2.13
- */
- public final boolean getFollow() {
- return follow;
- }
-
- void targetWarn(String val) {
- LogLog.warn("[" + val + "] should be System.out or System.err.");
- LogLog.warn("Using previously set target, System.out by default.");
- }
-
- /**
- * Prepares the appender for use.
- */
- @Override
- @SuppressForbidden(reason = "System#out")
- public void activateOptions() {
- if (follow) {
- if (target.equals(SYSTEM_ERR)) {
- setWriter(createWriter(new SystemErrStream()));
- } else {
- setWriter(createWriter(new SystemOutStream()));
- }
- } else {
- if (target.equals(SYSTEM_ERR)) {
- setWriter(createWriter(System.err));
- } else {
- setWriter(createWriter(System.out));
- }
- }
-
- super.activateOptions();
- }
-
- /**
- * {@inheritDoc}
- */
- @Override
- protected
- final void closeWriter() {
- if (follow) {
- super.closeWriter();
- }
- }
-
-
- /**
- * An implementation of OutputStream that redirects to the
- * current System.err.
- */
- @SuppressForbidden(reason = "System#err")
- private static class SystemErrStream extends OutputStream {
- public SystemErrStream() {
- }
-
- @Override
- public void close() {
- }
-
- @Override
- public void flush() {
- System.err.flush();
- }
-
- @Override
- public void write(final byte[] b) throws IOException {
- if (!Loggers.consoleLoggingEnabled()) {
- return;
- }
- System.err.write(b);
- }
-
- @Override
-
- public void write(final byte[] b, final int off, final int len)
- throws IOException {
- if (!Loggers.consoleLoggingEnabled()) {
- return;
- }
- System.err.write(b, off, len);
- }
-
- @Override
- public void write(final int b) throws IOException {
- if (!Loggers.consoleLoggingEnabled()) {
- return;
- }
- System.err.write(b);
- }
- }
-
- /**
- * An implementation of OutputStream that redirects to the
- * current System.out.
- */
- @SuppressForbidden(reason = "System#err")
- private static class SystemOutStream extends OutputStream {
- public SystemOutStream() {
- }
-
- @Override
- public void close() {
- }
-
- @Override
- public void flush() {
- System.out.flush();
- }
-
- @Override
- public void write(final byte[] b) throws IOException {
- if (!Loggers.consoleLoggingEnabled()) {
- return;
- }
- System.out.write(b);
- }
-
- @Override
- public void write(final byte[] b, final int off, final int len)
- throws IOException {
- if (!Loggers.consoleLoggingEnabled()) {
- return;
- }
- System.out.write(b, off, len);
- }
-
- @Override
- public void write(final int b) throws IOException {
- if (!Loggers.consoleLoggingEnabled()) {
- return;
- }
- System.out.write(b);
- }
- }
-
-}
diff --git a/core/src/main/java/org/elasticsearch/common/logging/DeprecationLogger.java b/core/src/main/java/org/elasticsearch/common/logging/DeprecationLogger.java
index 5970f91732..294632dbdc 100644
--- a/core/src/main/java/org/elasticsearch/common/logging/DeprecationLogger.java
+++ b/core/src/main/java/org/elasticsearch/common/logging/DeprecationLogger.java
@@ -19,6 +19,9 @@
package org.elasticsearch.common.logging;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.common.SuppressLoggerChecks;
import org.elasticsearch.common.util.concurrent.ThreadContext;
@@ -31,6 +34,12 @@ import java.util.concurrent.CopyOnWriteArraySet;
*/
public class DeprecationLogger {
+ private final Logger logger;
+
+ public Logger getLogger() {
+ return logger;
+ }
+
/**
* The "Warning" Header comes from RFC-7234. As the RFC describes, it's generally used for caching purposes, but it can be
* used for <em>any</em> warning.
@@ -84,22 +93,20 @@ public class DeprecationLogger {
}
}
- private final ESLogger logger;
-
/**
* Creates a new deprecation logger based on the parent logger. Automatically
* prefixes the logger name with "deprecation", if it starts with "org.elasticsearch.",
* it replaces "org.elasticsearch" with "org.elasticsearch.deprecation" to maintain
* the "org.elasticsearch" namespace.
*/
- public DeprecationLogger(ESLogger parentLogger) {
+ public DeprecationLogger(Logger parentLogger) {
String name = parentLogger.getName();
if (name.startsWith("org.elasticsearch")) {
name = name.replace("org.elasticsearch.", "org.elasticsearch.deprecation.");
} else {
name = "deprecation." + name;
}
- this.logger = ESLoggerFactory.getLogger(parentLogger.getPrefix(), name);
+ this.logger = LogManager.getLogger(name, parentLogger.getMessageFactory());
}
/**
@@ -113,27 +120,27 @@ public class DeprecationLogger {
* Logs a deprecated message to the deprecation log, as well as to the local {@link ThreadContext}.
*
* @param threadContexts The node's {@link ThreadContext} (outside of concurrent tests, this should only ever have one context).
- * @param msg The deprecation message.
+ * @param message The deprecation message.
* @param params The parameters used to fill in the message, if any exist.
*/
@SuppressLoggerChecks(reason = "safely delegates to logger")
- void deprecated(Set<ThreadContext> threadContexts, String msg, Object... params) {
+ void deprecated(Set<ThreadContext> threadContexts, String message, Object... params) {
Iterator<ThreadContext> iterator = threadContexts.iterator();
if (iterator.hasNext()) {
- final String formattedMsg = LoggerMessageFormat.format(msg, params);
+ final String formattedMessage = LoggerMessageFormat.format(message, params);
while (iterator.hasNext()) {
try {
- iterator.next().addResponseHeader(DEPRECATION_HEADER, formattedMsg);
+ iterator.next().addResponseHeader(DEPRECATION_HEADER, formattedMessage);
} catch (IllegalStateException e) {
// ignored; it should be removed shortly
}
}
- logger.debug(formattedMsg);
+ logger.warn(formattedMessage);
} else {
- logger.debug(msg, params);
+ logger.warn(message, params);
}
}
diff --git a/core/src/main/java/org/elasticsearch/common/logging/ESLogger.java b/core/src/main/java/org/elasticsearch/common/logging/ESLogger.java
deleted file mode 100644
index b2a2aa333c..0000000000
--- a/core/src/main/java/org/elasticsearch/common/logging/ESLogger.java
+++ /dev/null
@@ -1,208 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.common.logging;
-
-import org.apache.log4j.Level;
-import org.apache.log4j.Logger;
-import org.elasticsearch.common.SuppressLoggerChecks;
-
-import static org.elasticsearch.common.logging.LoggerMessageFormat.format;
-
-/**
- * Elasticsearch's logger wrapper.
- */
-@SuppressLoggerChecks(reason = "safely delegates to itself")
-public class ESLogger {
- private static final String FQCN = ESLogger.class.getName();
-
- private final String prefix;
- private final Logger logger;
-
- public ESLogger(String prefix, Logger logger) {
- this.prefix = prefix;
- this.logger = logger;
- }
-
- /**
- * The prefix of the log.
- */
- public String getPrefix() {
- return this.prefix;
- }
-
- /**
- * Fetch the underlying logger so we can look at it. Only exists for testing.
- */
- Logger getLogger() {
- return logger;
- }
-
- /**
- * Set the level of the logger. If the new level is null, the logger will inherit it's level from its nearest ancestor with a non-null
- * level.
- */
- public void setLevel(String level) {
- if (level == null) {
- logger.setLevel(null);
- } else if ("error".equalsIgnoreCase(level)) {
- logger.setLevel(Level.ERROR);
- } else if ("warn".equalsIgnoreCase(level)) {
- logger.setLevel(Level.WARN);
- } else if ("info".equalsIgnoreCase(level)) {
- logger.setLevel(Level.INFO);
- } else if ("debug".equalsIgnoreCase(level)) {
- logger.setLevel(Level.DEBUG);
- } else if ("trace".equalsIgnoreCase(level)) {
- logger.setLevel(Level.TRACE);
- }
- }
-
- /**
- * The level of this logger. If null then the logger is inheriting it's level from its nearest ancestor with a non-null level.
- */
- public String getLevel() {
- if (logger.getLevel() == null) {
- return null;
- }
- return logger.getLevel().toString();
- }
-
- /**
- * The name of this logger.
- */
- public String getName() {
- return logger.getName();
- }
-
- /**
- * Returns {@code true} if a TRACE level message should be logged.
- */
- public boolean isTraceEnabled() {
- return logger.isTraceEnabled();
- }
-
- /**
- * Returns {@code true} if a DEBUG level message should be logged.
- */
- public boolean isDebugEnabled() {
- return logger.isDebugEnabled();
- }
-
- /**
- * Returns {@code true} if an INFO level message should be logged.
- */
- public boolean isInfoEnabled() {
- return logger.isInfoEnabled();
- }
-
- /**
- * Returns {@code true} if a WARN level message should be logged.
- */
- public boolean isWarnEnabled() {
- return logger.isEnabledFor(Level.WARN);
- }
-
- /**
- * Returns {@code true} if an ERROR level message should be logged.
- */
- public boolean isErrorEnabled() {
- return logger.isEnabledFor(Level.ERROR);
- }
-
- /**
- * Logs a TRACE level message.
- */
- public void trace(String msg, Object... params) {
- trace(msg, null, params);
- }
-
- /**
- * Logs a TRACE level message with an exception.
- */
- public void trace(String msg, Throwable cause, Object... params) {
- if (isTraceEnabled()) {
- logger.log(FQCN, Level.TRACE, format(prefix, msg, params), cause);
- }
- }
-
- /**
- * Logs a DEBUG level message.
- */
- public void debug(String msg, Object... params) {
- debug(msg, null, params);
- }
-
- /**
- * Logs a DEBUG level message with an exception.
- */
- public void debug(String msg, Throwable cause, Object... params) {
- if (isDebugEnabled()) {
- logger.log(FQCN, Level.DEBUG, format(prefix, msg, params), cause);
- }
- }
-
- /**
- * Logs a INFO level message.
- */
- public void info(String msg, Object... params) {
- info(msg, null, params);
- }
-
- /**
- * Logs a INFO level message with an exception.
- */
- public void info(String msg, Throwable cause, Object... params) {
- if (isInfoEnabled()) {
- logger.log(FQCN, Level.INFO, format(prefix, msg, params), cause);
- }
- }
-
- /**
- * Logs a WARN level message.
- */
- public void warn(String msg, Object... params) {
- warn(msg, null, params);
- }
-
- /**
- * Logs a WARN level message with an exception.
- */
- public void warn(String msg, Throwable cause, Object... params) {
- if (isWarnEnabled()) {
- logger.log(FQCN, Level.WARN, format(prefix, msg, params), cause);
- }
- }
-
- /**
- * Logs a ERROR level message.
- */
- public void error(String msg, Object... params) {
- error(msg, null, params);
- }
-
- /**
- * Logs a ERROR level message with an exception.
- */
- public void error(String msg, Throwable cause, Object... params) {
- if (isErrorEnabled()) {
- logger.log(FQCN, Level.ERROR, format(prefix, msg, params), cause);
- }
- }
-}
diff --git a/core/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java b/core/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java
index c0951c47df..853df3d31a 100644
--- a/core/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java
+++ b/core/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java
@@ -19,31 +19,46 @@
package org.elasticsearch.common.logging;
-import org.apache.log4j.Logger;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.message.MessageFactory;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
import java.util.Locale;
+import java.util.function.Function;
/**
- * Factory to get {@link ESLogger}s
+ * Factory to get {@link Logger}s
*/
public abstract class ESLoggerFactory {
- public static final Setting<LogLevel> LOG_DEFAULT_LEVEL_SETTING =
- new Setting<>("logger.level", LogLevel.INFO.name(), LogLevel::parse, Property.NodeScope);
- public static final Setting<LogLevel> LOG_LEVEL_SETTING =
- Setting.prefixKeySetting("logger.", LogLevel.INFO.name(), LogLevel::parse,
+ public static final Setting<Level> LOG_DEFAULT_LEVEL_SETTING =
+ new Setting<>("logger.level", Level.INFO.name(), Level::valueOf, Property.NodeScope);
+ public static final Setting<Level> LOG_LEVEL_SETTING =
+ Setting.prefixKeySetting("logger.", Level.INFO.name(), Level::valueOf,
Property.Dynamic, Property.NodeScope);
- public static ESLogger getLogger(String prefix, String name) {
- prefix = prefix == null ? null : prefix.intern();
+ public static Logger getLogger(String prefix, String name) {
name = name.intern();
- return new ESLogger(prefix, Logger.getLogger(name));
+ final Logger logger = getLogger(new PrefixMessageFactory(), name);
+ final MessageFactory factory = logger.getMessageFactory();
+ // in some cases, we initialize the logger before we are ready to set the prefix
+ // we can not re-initialize the logger, so the above getLogger might return an existing
+ // instance without the prefix set; thus, we hack around this by resetting the prefix
+ if (prefix != null && factory instanceof PrefixMessageFactory) {
+ ((PrefixMessageFactory) factory).setPrefix(prefix.intern());
+ }
+ return logger;
+ }
+
+ public static Logger getLogger(MessageFactory messageFactory, String name) {
+ return LogManager.getLogger(name, messageFactory);
}
- public static ESLogger getLogger(String name) {
- return getLogger(null, name);
+ public static Logger getLogger(String name) {
+ return getLogger((String)null, name);
}
public static DeprecationLogger getDeprecationLogger(String name) {
@@ -54,18 +69,12 @@ public abstract class ESLoggerFactory {
return new DeprecationLogger(getLogger(prefix, name));
}
- public static ESLogger getRootLogger() {
- return new ESLogger(null, Logger.getRootLogger());
+ public static Logger getRootLogger() {
+ return LogManager.getRootLogger();
}
private ESLoggerFactory() {
// Utility class can't be built.
}
- public enum LogLevel {
- WARN, TRACE, INFO, DEBUG, ERROR;
- public static LogLevel parse(String level) {
- return valueOf(level.toUpperCase(Locale.ROOT));
- }
- }
}
diff --git a/core/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java b/core/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java
index e203999d33..dbd31637ae 100644
--- a/core/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java
+++ b/core/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java
@@ -19,12 +19,20 @@
package org.elasticsearch.common.logging;
-import org.apache.log4j.Java9Hack;
-import org.apache.log4j.PropertyConfigurator;
-import org.apache.lucene.util.Constants;
-import org.elasticsearch.ElasticsearchException;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.core.LoggerContext;
+import org.apache.logging.log4j.core.config.AbstractConfiguration;
+import org.apache.logging.log4j.core.config.Configurator;
+import org.apache.logging.log4j.core.config.builder.api.ConfigurationBuilder;
+import org.apache.logging.log4j.core.config.builder.api.ConfigurationBuilderFactory;
+import org.apache.logging.log4j.core.config.builder.impl.BuiltConfiguration;
+import org.apache.logging.log4j.core.config.composite.CompositeConfiguration;
+import org.apache.logging.log4j.core.config.properties.PropertiesConfiguration;
+import org.apache.logging.log4j.core.config.properties.PropertiesConfigurationFactory;
+import org.elasticsearch.cluster.ClusterName;
+import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.common.settings.SettingsException;
import org.elasticsearch.env.Environment;
import java.io.IOException;
@@ -34,144 +42,62 @@ import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
-import java.util.Arrays;
+import java.util.ArrayList;
import java.util.EnumSet;
-import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import java.util.Properties;
import java.util.Set;
-import static java.util.Collections.unmodifiableMap;
-import static org.elasticsearch.common.Strings.cleanPath;
-
-/**
- * Configures log4j with a special set of replacements.
- */
public class LogConfigurator {
- static final List<String> ALLOWED_SUFFIXES = Arrays.asList(".yml", ".yaml", ".json", ".properties");
-
- private static final Map<String, String> REPLACEMENTS;
static {
- Map<String, String> replacements = new HashMap<>();
- // Appenders
- replacements.put("async", "org.apache.log4j.AsyncAppender");
- replacements.put("console", ConsoleAppender.class.getName());
- replacements.put("dailyRollingFile", "org.apache.log4j.DailyRollingFileAppender");
- replacements.put("externallyRolledFile", "org.apache.log4j.ExternallyRolledFileAppender");
- replacements.put("extrasRollingFile", "org.apache.log4j.rolling.RollingFileAppender");
- replacements.put("file", "org.apache.log4j.FileAppender");
- replacements.put("jdbc", "org.apache.log4j.jdbc.JDBCAppender");
- replacements.put("jms", "org.apache.log4j.net.JMSAppender");
- replacements.put("lf5", "org.apache.log4j.lf5.LF5Appender");
- replacements.put("ntevent", "org.apache.log4j.nt.NTEventLogAppender");
- replacements.put("null", "org.apache.log4j.NullAppender");
- replacements.put("rollingFile", "org.apache.log4j.RollingFileAppender");
- replacements.put("smtp", "org.apache.log4j.net.SMTPAppender");
- replacements.put("socket", "org.apache.log4j.net.SocketAppender");
- replacements.put("socketHub", "org.apache.log4j.net.SocketHubAppender");
- replacements.put("syslog", "org.apache.log4j.net.SyslogAppender");
- replacements.put("telnet", "org.apache.log4j.net.TelnetAppender");
- replacements.put("terminal", TerminalAppender.class.getName());
-
- // Policies
- replacements.put("timeBased", "org.apache.log4j.rolling.TimeBasedRollingPolicy");
- replacements.put("sizeBased", "org.apache.log4j.rolling.SizeBasedTriggeringPolicy");
-
- // Layouts
- replacements.put("simple", "org.apache.log4j.SimpleLayout");
- replacements.put("html", "org.apache.log4j.HTMLLayout");
- replacements.put("pattern", "org.apache.log4j.PatternLayout");
- replacements.put("consolePattern", "org.apache.log4j.PatternLayout");
- replacements.put("enhancedPattern", "org.apache.log4j.EnhancedPatternLayout");
- replacements.put("ttcc", "org.apache.log4j.TTCCLayout");
- replacements.put("xml", "org.apache.log4j.XMLLayout");
- REPLACEMENTS = unmodifiableMap(replacements);
-
- if (Constants.JRE_IS_MINIMUM_JAVA9) {
- Java9Hack.fixLog4j();
- }
+ // we initialize the status logger immediately otherwise Log4j will complain when we try to get the context
+ final ConfigurationBuilder<BuiltConfiguration> builder = ConfigurationBuilderFactory.newConfigurationBuilder();
+ builder.setStatusLevel(Level.ERROR);
+ Configurator.initialize(builder.build());
}
- private static boolean loaded;
-
/**
- * Consolidates settings and converts them into actual log4j settings, then initializes loggers and appenders.
- * @param settings custom settings that should be applied
- * @param resolveConfig controls whether the logging conf file should be read too or not.
+ * for triggering class initialization
*/
- public static void configure(Settings settings, boolean resolveConfig) {
- if (loaded) {
- return;
- }
- loaded = true;
- // TODO: this is partly a copy of InternalSettingsPreparer...we should pass in Environment and not do all this...
- Environment environment = new Environment(settings);
-
- Settings.Builder settingsBuilder = Settings.builder();
- if (resolveConfig) {
- resolveConfig(environment, settingsBuilder);
- }
-
- // add custom settings after config was added so that they are not overwritten by config
- settingsBuilder.put(settings);
- settingsBuilder.replacePropertyPlaceholders();
- Properties props = new Properties();
- for (Map.Entry<String, String> entry : settingsBuilder.build().getAsMap().entrySet()) {
- String key = "log4j." + entry.getKey();
- String value = entry.getValue();
- value = REPLACEMENTS.getOrDefault(value, value);
- if (key.endsWith(".value")) {
- props.setProperty(key.substring(0, key.length() - ".value".length()), value);
- } else if (key.endsWith(".type")) {
- props.setProperty(key.substring(0, key.length() - ".type".length()), value);
- } else {
- props.setProperty(key, value);
- }
- }
- // ensure explicit path to logs dir exists
- props.setProperty("log4j.path.logs", cleanPath(environment.logsFile().toAbsolutePath().toString()));
- PropertyConfigurator.configure(props);
+ public static void init() {
}
- /**
- * sets the loaded flag to false so that logging configuration can be
- * overridden. Should only be used in tests.
- */
- static void reset() {
- loaded = false;
- }
+ public static void configure(final Environment environment) throws IOException {
+ final Settings settings = environment.settings();
+
+ setLogConfigurationSystemProperty(environment, settings);
- static void resolveConfig(Environment env, final Settings.Builder settingsBuilder) {
+ final LoggerContext context = (LoggerContext) LogManager.getContext(false);
- try {
- Set<FileVisitOption> options = EnumSet.of(FileVisitOption.FOLLOW_LINKS);
- Files.walkFileTree(env.configFile(), options, Integer.MAX_VALUE, new SimpleFileVisitor<Path>() {
- @Override
- public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
- String fileName = file.getFileName().toString();
- if (fileName.startsWith("logging.")) {
- for (String allowedSuffix : ALLOWED_SUFFIXES) {
- if (fileName.endsWith(allowedSuffix)) {
- loadConfig(file, settingsBuilder);
- break;
- }
- }
- }
- return FileVisitResult.CONTINUE;
+ final Set<FileVisitOption> options = EnumSet.of(FileVisitOption.FOLLOW_LINKS);
+ final List<AbstractConfiguration> configurations = new ArrayList<>();
+ final PropertiesConfigurationFactory factory = new PropertiesConfigurationFactory();
+ Files.walkFileTree(environment.configFile(), options, Integer.MAX_VALUE, new SimpleFileVisitor<Path>() {
+ @Override
+ public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
+ if (file.getFileName().toString().equals("log4j2.properties")) {
+ configurations.add((PropertiesConfiguration) factory.getConfiguration(file.toString(), file.toUri()));
}
- });
- } catch (IOException ioe) {
- throw new ElasticsearchException("Failed to load logging configuration", ioe);
+ return FileVisitResult.CONTINUE;
+ }
+ });
+ context.start(new CompositeConfiguration(configurations));
+
+ if (ESLoggerFactory.LOG_DEFAULT_LEVEL_SETTING.exists(settings)) {
+ Loggers.setLevel(ESLoggerFactory.getRootLogger(), ESLoggerFactory.LOG_DEFAULT_LEVEL_SETTING.get(settings));
}
- }
- static void loadConfig(Path file, Settings.Builder settingsBuilder) {
- try {
- settingsBuilder.loadFromPath(file);
- } catch (IOException | SettingsException | NoClassDefFoundError e) {
- // ignore
+ final Map<String, String> levels = settings.filter(ESLoggerFactory.LOG_LEVEL_SETTING::match).getAsMap();
+ for (String key : levels.keySet()) {
+ final Level level = ESLoggerFactory.LOG_LEVEL_SETTING.getConcreteSetting(key).get(settings);
+ Loggers.setLevel(Loggers.getLogger(key.substring("logger.".length())), level);
}
}
+
+ @SuppressForbidden(reason = "sets system property for logging configuraton")
+ private static void setLogConfigurationSystemProperty(Environment environment, Settings settings) {
+ System.setProperty("es.logs", environment.logsFile().resolve(ClusterName.CLUSTER_NAME_SETTING.get(settings).value()).toString());
+ }
+
}
diff --git a/core/src/main/java/org/elasticsearch/common/logging/Loggers.java b/core/src/main/java/org/elasticsearch/common/logging/Loggers.java
index 4a938e38a2..fb96639fb1 100644
--- a/core/src/main/java/org/elasticsearch/common/logging/Loggers.java
+++ b/core/src/main/java/org/elasticsearch/common/logging/Loggers.java
@@ -19,15 +19,19 @@
package org.elasticsearch.common.logging;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.core.LoggerContext;
+import org.apache.logging.log4j.core.config.Configuration;
+import org.apache.logging.log4j.core.config.Configurator;
+import org.apache.logging.log4j.core.config.LoggerConfig;
+import org.apache.logging.log4j.message.MessageFactory;
import org.elasticsearch.common.Classes;
-import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.node.Node;
-import java.net.InetAddress;
-import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.List;
@@ -39,6 +43,11 @@ import static org.elasticsearch.common.util.CollectionUtils.asArrayList;
*/
public class Loggers {
+ static {
+ // ensure that the status logger is configured before we touch any loggers
+ LogConfigurator.init();
+ }
+
private static final String commonPrefix = System.getProperty("es.logger.prefix", "org.elasticsearch.");
public static final String SPACE = " ";
@@ -57,51 +66,29 @@ public class Loggers {
return consoleLoggingEnabled;
}
- public static ESLogger getLogger(Class<?> clazz, Settings settings, ShardId shardId, String... prefixes) {
+ public static Logger getLogger(Class<?> clazz, Settings settings, ShardId shardId, String... prefixes) {
return getLogger(clazz, settings, shardId.getIndex(), asArrayList(Integer.toString(shardId.id()), prefixes).toArray(new String[0]));
}
/**
- * Just like {@link #getLogger(Class, org.elasticsearch.common.settings.Settings,ShardId,String...)} but String loggerName instead of
+ * Just like {@link #getLogger(Class, org.elasticsearch.common.settings.Settings, ShardId, String...)} but String loggerName instead of
* Class.
*/
- public static ESLogger getLogger(String loggerName, Settings settings, ShardId shardId, String... prefixes) {
+ public static Logger getLogger(String loggerName, Settings settings, ShardId shardId, String... prefixes) {
return getLogger(loggerName, settings,
- asArrayList(shardId.getIndexName(), Integer.toString(shardId.id()), prefixes).toArray(new String[0]));
+ asArrayList(shardId.getIndexName(), Integer.toString(shardId.id()), prefixes).toArray(new String[0]));
}
- public static ESLogger getLogger(Class<?> clazz, Settings settings, Index index, String... prefixes) {
+ public static Logger getLogger(Class<?> clazz, Settings settings, Index index, String... prefixes) {
return getLogger(clazz, settings, asArrayList(SPACE, index.getName(), prefixes).toArray(new String[0]));
}
- public static ESLogger getLogger(Class<?> clazz, Settings settings, String... prefixes) {
+ public static Logger getLogger(Class<?> clazz, Settings settings, String... prefixes) {
return getLogger(buildClassLoggerName(clazz), settings, prefixes);
}
- @SuppressForbidden(reason = "using localhost for logging on which host it is is fine")
- private static InetAddress getHostAddress() {
- try {
- return InetAddress.getLocalHost();
- } catch (UnknownHostException e) {
- return null;
- }
- }
-
- @SuppressForbidden(reason = "do not know what this method does")
- public static ESLogger getLogger(String loggerName, Settings settings, String... prefixes) {
+ public static Logger getLogger(String loggerName, Settings settings, String... prefixes) {
List<String> prefixesList = new ArrayList<>();
- if (settings.getAsBoolean("logger.logHostAddress", false)) {
- final InetAddress addr = getHostAddress();
- if (addr != null) {
- prefixesList.add(addr.getHostAddress());
- }
- }
- if (settings.getAsBoolean("logger.logHostName", false)) {
- final InetAddress addr = getHostAddress();
- if (addr != null) {
- prefixesList.add(addr.getHostName());
- }
- }
if (Node.NODE_NAME_SETTING.exists(settings)) {
prefixesList.add(Node.NODE_NAME_SETTING.get(settings));
}
@@ -111,23 +98,23 @@ public class Loggers {
return getLogger(getLoggerName(loggerName), prefixesList.toArray(new String[prefixesList.size()]));
}
- public static ESLogger getLogger(ESLogger parentLogger, String s) {
- return ESLoggerFactory.getLogger(parentLogger.getPrefix(), getLoggerName(parentLogger.getName() + s));
+ public static Logger getLogger(Logger parentLogger, String s) {
+ return ESLoggerFactory.getLogger(parentLogger.<MessageFactory>getMessageFactory(), getLoggerName(parentLogger.getName() + s));
}
- public static ESLogger getLogger(String s) {
+ public static Logger getLogger(String s) {
return ESLoggerFactory.getLogger(getLoggerName(s));
}
- public static ESLogger getLogger(Class<?> clazz) {
+ public static Logger getLogger(Class<?> clazz) {
return ESLoggerFactory.getLogger(getLoggerName(buildClassLoggerName(clazz)));
}
- public static ESLogger getLogger(Class<?> clazz, String... prefixes) {
+ public static Logger getLogger(Class<?> clazz, String... prefixes) {
return getLogger(buildClassLoggerName(clazz), prefixes);
}
- public static ESLogger getLogger(String name, String... prefixes) {
+ public static Logger getLogger(String name, String... prefixes) {
String prefix = null;
if (prefixes != null && prefixes.length > 0) {
StringBuilder sb = new StringBuilder();
@@ -148,6 +135,32 @@ public class Loggers {
return ESLoggerFactory.getLogger(prefix, getLoggerName(name));
}
+ /**
+ * Set the level of the logger. If the new level is null, the logger will inherit it's level from its nearest ancestor with a non-null
+ * level.
+ */
+ public static void setLevel(Logger logger, String level) {
+ final Level l;
+ if (level == null) {
+ l = null;
+ } else {
+ l = Level.valueOf(level);
+ }
+ setLevel(logger, l);
+ }
+
+ public static void setLevel(Logger logger, Level level) {
+ if (!"".equals(logger.getName())) {
+ Configurator.setLevel(logger.getName(), level);
+ } else {
+ LoggerContext ctx = LoggerContext.getContext(false);
+ Configuration config = ctx.getConfiguration();
+ LoggerConfig loggerConfig = config.getLoggerConfig(logger.getName());
+ loggerConfig.setLevel(level);
+ ctx.updateLoggers();
+ }
+ }
+
private static String buildClassLoggerName(Class<?> clazz) {
String name = clazz.getName();
if (name.startsWith("org.elasticsearch.")) {
@@ -162,4 +175,5 @@ public class Loggers {
}
return commonPrefix + name;
}
+
}
diff --git a/core/src/main/java/org/elasticsearch/common/logging/PrefixMessageFactory.java b/core/src/main/java/org/elasticsearch/common/logging/PrefixMessageFactory.java
new file mode 100644
index 0000000000..a141ceb75a
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/common/logging/PrefixMessageFactory.java
@@ -0,0 +1,221 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.logging;
+
+import org.apache.logging.log4j.message.Message;
+import org.apache.logging.log4j.message.MessageFactory2;
+import org.apache.logging.log4j.message.ObjectMessage;
+import org.apache.logging.log4j.message.ParameterizedMessage;
+import org.apache.logging.log4j.message.SimpleMessage;
+
+public class PrefixMessageFactory implements MessageFactory2 {
+
+ private String prefix = "";
+
+ public String getPrefix() {
+ return prefix;
+ }
+
+ public void setPrefix(String prefix) {
+ this.prefix = prefix;
+ }
+
+ @Override
+ public Message newMessage(Object message) {
+ return new PrefixObjectMessage(prefix, message);
+ }
+
+ private static class PrefixObjectMessage extends ObjectMessage {
+
+ private final String prefix;
+ private final Object object;
+ private String prefixObjectString;
+
+ private PrefixObjectMessage(String prefix, Object object) {
+ super(object);
+ this.prefix = prefix;
+ this.object = object;
+ }
+
+ @Override
+ public String getFormattedMessage() {
+ if (prefixObjectString == null) {
+ prefixObjectString = prefix + super.getFormattedMessage();
+ }
+ return prefixObjectString;
+ }
+
+ @Override
+ public void formatTo(StringBuilder buffer) {
+ buffer.append(prefix);
+ super.formatTo(buffer);
+ }
+
+ @Override
+ public Object[] getParameters() {
+ return new Object[]{prefix, object};
+ }
+
+ }
+
+ @Override
+ public Message newMessage(String message) {
+ return new PrefixSimpleMessage(prefix, message);
+ }
+
+ private static class PrefixSimpleMessage extends SimpleMessage {
+
+ private final String prefix;
+ private String prefixMessage;
+
+ PrefixSimpleMessage(String prefix, String message) {
+ super(message);
+ this.prefix = prefix;
+ }
+
+ PrefixSimpleMessage(String prefix, CharSequence charSequence) {
+ super(charSequence);
+ this.prefix = prefix;
+ }
+
+ @Override
+ public String getFormattedMessage() {
+ if (prefixMessage == null) {
+ prefixMessage = prefix + super.getFormattedMessage();
+ }
+ return prefixMessage;
+ }
+
+ @Override
+ public void formatTo(StringBuilder buffer) {
+ buffer.append(prefix);
+ super.formatTo(buffer);
+ }
+
+ @Override
+ public int length() {
+ return prefixMessage.length();
+ }
+
+ @Override
+ public char charAt(int index) {
+ return prefixMessage.charAt(index);
+ }
+
+ @Override
+ public CharSequence subSequence(int start, int end) {
+ return prefixMessage.subSequence(start, end);
+ }
+
+ }
+
+ @Override
+ public Message newMessage(String message, Object... params) {
+ return new PrefixParameterizedMessage(prefix, message, params);
+ }
+
+ private static class PrefixParameterizedMessage extends ParameterizedMessage {
+
+ private static ThreadLocal<StringBuilder> threadLocalStringBuilder = ThreadLocal.withInitial(StringBuilder::new);
+
+ private final String prefix;
+ private String formattedMessage;
+
+ private PrefixParameterizedMessage(String prefix, String messagePattern, Object... arguments) {
+ super(messagePattern, arguments);
+ this.prefix = prefix;
+ }
+
+ @Override
+ public String getFormattedMessage() {
+ if (formattedMessage == null) {
+ final StringBuilder buffer = threadLocalStringBuilder.get();
+ buffer.setLength(0);
+ formatTo(buffer);
+ formattedMessage = buffer.toString();
+ }
+ return formattedMessage;
+ }
+
+ @Override
+ public void formatTo(StringBuilder buffer) {
+ buffer.append(prefix);
+ super.formatTo(buffer);
+ }
+
+ }
+
+ @Override
+ public Message newMessage(CharSequence charSequence) {
+ return new PrefixSimpleMessage(prefix, charSequence);
+ }
+
+ @Override
+ public Message newMessage(String message, Object p0) {
+ return new PrefixParameterizedMessage(prefix, message, p0);
+ }
+
+ @Override
+ public Message newMessage(String message, Object p0, Object p1) {
+ return new PrefixParameterizedMessage(prefix, message, p0, p1);
+ }
+
+ @Override
+ public Message newMessage(String message, Object p0, Object p1, Object p2) {
+ return new PrefixParameterizedMessage(prefix, message, p0, p1, p2);
+ }
+
+ @Override
+ public Message newMessage(String message, Object p0, Object p1, Object p2, Object p3) {
+ return new PrefixParameterizedMessage(prefix, message, p0, p1, p2, p3);
+ }
+
+ @Override
+ public Message newMessage(String message, Object p0, Object p1, Object p2, Object p3, Object p4) {
+ return new PrefixParameterizedMessage(prefix, message, p0, p1, p2, p3, p4);
+ }
+
+ @Override
+ public Message newMessage(String message, Object p0, Object p1, Object p2, Object p3, Object p4, Object p5) {
+ return new PrefixParameterizedMessage(prefix, message, p0, p1, p2, p3, p4, p5);
+ }
+
+ @Override
+ public Message newMessage(String message, Object p0, Object p1, Object p2, Object p3, Object p4, Object p5, Object p6) {
+ return new PrefixParameterizedMessage(prefix, message, p0, p1, p2, p3, p4, p5, p6);
+ }
+
+ @Override
+ public Message newMessage(String message, Object p0, Object p1, Object p2, Object p3, Object p4, Object p5, Object p6, Object p7) {
+ return new PrefixParameterizedMessage(prefix, message, p0, p1, p2, p3, p4, p5, p6, p7);
+ }
+
+ @Override
+ public Message newMessage(
+ String message, Object p0, Object p1, Object p2, Object p3, Object p4, Object p5, Object p6, Object p7, Object p8) {
+ return new PrefixParameterizedMessage(prefix, message, p0, p1, p2, p3, p4, p5, p6, p7, p8);
+ }
+
+ @Override
+ public Message newMessage(
+ String message, Object p0, Object p1, Object p2, Object p3, Object p4, Object p5, Object p6, Object p7, Object p8, Object p9) {
+ return new PrefixParameterizedMessage(prefix, message, p0, p1, p2, p3, p4, p5, p6, p7, p8, p9);
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/common/logging/TerminalAppender.java b/core/src/main/java/org/elasticsearch/common/logging/TerminalAppender.java
deleted file mode 100644
index e967ad9d79..0000000000
--- a/core/src/main/java/org/elasticsearch/common/logging/TerminalAppender.java
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-package org.elasticsearch.common.logging;
-
-import org.apache.log4j.AppenderSkeleton;
-import org.apache.log4j.spi.LoggingEvent;
-import org.elasticsearch.cli.Terminal;
-
-/**
- * TerminalAppender logs event to Terminal.DEFAULT. It is used for example by the PluginCli.
- * */
-public class TerminalAppender extends AppenderSkeleton {
- @Override
- protected void append(LoggingEvent event) {
- Terminal.DEFAULT.println(event.getRenderedMessage());
- }
-
- @Override
- public void close() {
- }
-
- @Override
- public boolean requiresLayout() {
- return false;
- }
-}
diff --git a/core/src/main/java/org/elasticsearch/common/lucene/LoggerInfoStream.java b/core/src/main/java/org/elasticsearch/common/lucene/LoggerInfoStream.java
index 0000a138bd..c4ef2ef8c7 100644
--- a/core/src/main/java/org/elasticsearch/common/lucene/LoggerInfoStream.java
+++ b/core/src/main/java/org/elasticsearch/common/lucene/LoggerInfoStream.java
@@ -19,8 +19,8 @@
package org.elasticsearch.common.lucene;
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.util.InfoStream;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
/** An InfoStream (for Lucene's IndexWriter) that redirects
@@ -30,12 +30,12 @@ public final class LoggerInfoStream extends InfoStream {
/** Used for component-specific logging: */
/** Logger for everything */
- private final ESLogger logger;
+ private final Logger logger;
/** Logger for IndexFileDeleter */
- private final ESLogger ifdLogger;
+ private final Logger ifdLogger;
- public LoggerInfoStream(ESLogger parentLogger) {
+ public LoggerInfoStream(Logger parentLogger) {
logger = Loggers.getLogger(parentLogger, ".lucene.iw");
ifdLogger = Loggers.getLogger(parentLogger, ".lucene.iw.ifd");
}
@@ -52,7 +52,7 @@ public final class LoggerInfoStream extends InfoStream {
return getLogger(component).isTraceEnabled() && component.equals("TP") == false;
}
- private ESLogger getLogger(String component) {
+ private Logger getLogger(String component) {
if (component.equals("IFD")) {
return ifdLogger;
} else {
diff --git a/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java b/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java
index d555e199d0..02a097f04e 100644
--- a/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java
+++ b/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java
@@ -19,6 +19,8 @@
package org.elasticsearch.common.lucene;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.analysis.core.KeywordAnalyzer;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.codecs.CodecUtil;
@@ -67,7 +69,6 @@ import org.elasticsearch.common.Strings;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.util.iterable.Iterables;
import org.elasticsearch.index.analysis.AnalyzerScope;
import org.elasticsearch.index.analysis.NamedAnalyzer;
@@ -104,14 +105,14 @@ public class Lucene {
public static final TopDocs EMPTY_TOP_DOCS = new TopDocs(0, EMPTY_SCORE_DOCS, 0.0f);
- public static Version parseVersion(@Nullable String version, Version defaultVersion, ESLogger logger) {
+ public static Version parseVersion(@Nullable String version, Version defaultVersion, Logger logger) {
if (version == null) {
return defaultVersion;
}
try {
return Version.parse(version);
} catch (ParseException e) {
- logger.warn("no version match {}, default to {}", e, version, defaultVersion);
+ logger.warn(new ParameterizedMessage("no version match {}, default to {}", version, defaultVersion), e);
return defaultVersion;
}
}
diff --git a/core/src/main/java/org/elasticsearch/common/network/IfConfig.java b/core/src/main/java/org/elasticsearch/common/network/IfConfig.java
index e32129c938..7fd4cc6d2f 100644
--- a/core/src/main/java/org/elasticsearch/common/network/IfConfig.java
+++ b/core/src/main/java/org/elasticsearch/common/network/IfConfig.java
@@ -19,7 +19,7 @@
package org.elasticsearch.common.network;
-import org.elasticsearch.common.logging.ESLogger;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.logging.Loggers;
import java.io.IOException;
@@ -36,7 +36,7 @@ import java.util.Locale;
*/
final class IfConfig {
- private static final ESLogger logger = Loggers.getLogger(IfConfig.class);
+ private static final Logger logger = Loggers.getLogger(IfConfig.class);
private static final String INDENT = " ";
/** log interface configuration at debug level, if its enabled */
diff --git a/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java b/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java
index 152a5629dd..3f8f23953d 100644
--- a/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java
+++ b/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java
@@ -19,8 +19,8 @@
package org.elasticsearch.common.settings;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.search.spell.LevensteinDistance;
-import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.CollectionUtil;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.collect.Tuple;
@@ -35,7 +35,6 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.SortedMap;
-import java.util.SortedSet;
import java.util.TreeMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.function.BiConsumer;
@@ -129,7 +128,7 @@ public abstract class AbstractScopedSettings extends AbstractComponent {
settingUpdater.getValue(current, previous);
} catch (RuntimeException ex) {
exceptions.add(ex);
- logger.debug("failed to prepareCommit settings for [{}]", ex, settingUpdater);
+ logger.debug(new ParameterizedMessage("failed to prepareCommit settings for [{}]", settingUpdater), ex);
}
}
// here we are exhaustive and record all settings that failed.
@@ -157,7 +156,7 @@ public abstract class AbstractScopedSettings extends AbstractComponent {
try {
applyRunnables.add(settingUpdater.updater(current, previous));
} catch (Exception ex) {
- logger.warn("failed to prepareCommit settings for [{}]", ex, settingUpdater);
+ logger.warn(new ParameterizedMessage("failed to prepareCommit settings for [{}]", settingUpdater), ex);
throw ex;
}
}
@@ -521,7 +520,8 @@ public abstract class AbstractScopedSettings extends AbstractComponent {
}
} catch (IllegalArgumentException ex) {
changed = true;
- logger.warn("found invalid setting: {} value: {} - archiving",ex , entry.getKey(), entry.getValue());
+ logger.warn(
+ new ParameterizedMessage("found invalid setting: {} value: {} - archiving", entry.getKey(), entry.getValue()), ex);
/*
* We put them back in here such that tools can check from the outside if there are any indices with broken settings. The
* setting can remain there but we want users to be aware that some of their setting are broken and they can research why
diff --git a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java
index 094530dac6..1ce156b853 100644
--- a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java
+++ b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java
@@ -45,6 +45,7 @@ import org.elasticsearch.cluster.routing.allocation.decider.SnapshotInProgressAl
import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.logging.ESLoggerFactory;
+import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.network.NetworkModule;
import org.elasticsearch.common.network.NetworkService;
import org.elasticsearch.common.settings.Setting.Property;
@@ -132,7 +133,7 @@ public final class ClusterSettings extends AbstractScopedSettings {
if (ESLoggerFactory.LOG_LEVEL_SETTING.getConcreteSetting(key).exists(settings) == false) {
builder.putNull(key);
} else {
- builder.put(key, ESLoggerFactory.LOG_LEVEL_SETTING.getConcreteSetting(key).get(settings).name());
+ builder.put(key, ESLoggerFactory.LOG_LEVEL_SETTING.getConcreteSetting(key).get(settings));
}
}
}
@@ -144,12 +145,18 @@ public final class ClusterSettings extends AbstractScopedSettings {
for (String key : value.getAsMap().keySet()) {
assert loggerPredicate.test(key);
String component = key.substring("logger.".length());
+ if ("level".equals(component)) {
+ continue;
+ }
if ("_root".equals(component)) {
final String rootLevel = value.get(key);
- ESLoggerFactory.getRootLogger().setLevel(rootLevel == null ? ESLoggerFactory.LOG_DEFAULT_LEVEL_SETTING.get(settings)
- .name() : rootLevel);
+ if (rootLevel == null) {
+ Loggers.setLevel(ESLoggerFactory.getRootLogger(), ESLoggerFactory.LOG_DEFAULT_LEVEL_SETTING.get(settings));
+ } else {
+ Loggers.setLevel(ESLoggerFactory.getRootLogger(), rootLevel);
+ }
} else {
- ESLoggerFactory.getLogger(component).setLevel(value.get(key));
+ Loggers.setLevel(ESLoggerFactory.getLogger(component), value.get(key));
}
}
}
diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java
index 1d67008ab0..12f4805ba2 100644
--- a/core/src/main/java/org/elasticsearch/common/settings/Setting.java
+++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java
@@ -18,6 +18,7 @@
*/
package org.elasticsearch.common.settings;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.action.support.ToXContentToBytes;
@@ -26,7 +27,6 @@ import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.logging.DeprecationLogger;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.unit.ByteSizeValue;
@@ -374,7 +374,7 @@ public class Setting<T> extends ToXContentToBytes {
/**
* Build a new updater with a noop validator.
*/
- final AbstractScopedSettings.SettingUpdater<T> newUpdater(Consumer<T> consumer, ESLogger logger) {
+ final AbstractScopedSettings.SettingUpdater<T> newUpdater(Consumer<T> consumer, Logger logger) {
return newUpdater(consumer, logger, (s) -> {});
}
@@ -382,7 +382,7 @@ public class Setting<T> extends ToXContentToBytes {
* Build the updater responsible for validating new values, logging the new
* value, and eventually setting the value where it belongs.
*/
- AbstractScopedSettings.SettingUpdater<T> newUpdater(Consumer<T> consumer, ESLogger logger, Consumer<T> validator) {
+ AbstractScopedSettings.SettingUpdater<T> newUpdater(Consumer<T> consumer, Logger logger, Consumer<T> validator) {
if (isDynamic()) {
return new Updater(consumer, logger, validator);
} else {
@@ -395,7 +395,7 @@ public class Setting<T> extends ToXContentToBytes {
* and its usage for details.
*/
static <A, B> AbstractScopedSettings.SettingUpdater<Tuple<A, B>> compoundUpdater(final BiConsumer<A, B> consumer,
- final Setting<A> aSetting, final Setting<B> bSetting, ESLogger logger) {
+ final Setting<A> aSetting, final Setting<B> bSetting, Logger logger) {
final AbstractScopedSettings.SettingUpdater<A> aSettingUpdater = aSetting.newUpdater(null, logger);
final AbstractScopedSettings.SettingUpdater<B> bSettingUpdater = bSetting.newUpdater(null, logger);
return new AbstractScopedSettings.SettingUpdater<Tuple<A, B>>() {
@@ -424,10 +424,10 @@ public class Setting<T> extends ToXContentToBytes {
private final class Updater implements AbstractScopedSettings.SettingUpdater<T> {
private final Consumer<T> consumer;
- private final ESLogger logger;
+ private final Logger logger;
private final Consumer<T> accept;
- public Updater(Consumer<T> consumer, ESLogger logger, Consumer<T> accept) {
+ public Updater(Consumer<T> consumer, Logger logger, Consumer<T> accept) {
this.consumer = consumer;
this.logger = logger;
this.accept = accept;
@@ -707,7 +707,7 @@ public class Setting<T> extends ToXContentToBytes {
}
@Override
- public AbstractScopedSettings.SettingUpdater<Settings> newUpdater(Consumer<Settings> consumer, ESLogger logger,
+ public AbstractScopedSettings.SettingUpdater<Settings> newUpdater(Consumer<Settings> consumer, Logger logger,
Consumer<Settings> validator) {
if (isDynamic() == false) {
throw new IllegalStateException("setting [" + getKey() + "] is not dynamic");
@@ -831,7 +831,7 @@ public class Setting<T> extends ToXContentToBytes {
}
@Override
- AbstractScopedSettings.SettingUpdater<T> newUpdater(Consumer<T> consumer, ESLogger logger, Consumer<T> validator) {
+ AbstractScopedSettings.SettingUpdater<T> newUpdater(Consumer<T> consumer, Logger logger, Consumer<T> validator) {
throw new UnsupportedOperationException("Affix settings can't be updated. Use #getConcreteSetting for updating.");
}
diff --git a/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java b/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java
index 5fd19c4fc1..60276ce14f 100644
--- a/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java
+++ b/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java
@@ -19,9 +19,9 @@
package org.elasticsearch.common.settings;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.inject.Binder;
import org.elasticsearch.common.inject.Module;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
@@ -51,7 +51,7 @@ public class SettingsModule implements Module {
private final Map<String, Setting<?>> indexSettings = new HashMap<>();
private static final Predicate<String> TRIBE_CLIENT_NODE_SETTINGS_PREDICATE = (s) -> s.startsWith("tribe.")
&& TribeService.TRIBE_SETTING_KEYS.contains(s) == false;
- private final ESLogger logger;
+ private final Logger logger;
private final IndexScopedSettings indexScopedSettings;
private final ClusterSettings clusterSettings;
diff --git a/core/src/main/java/org/elasticsearch/common/util/IndexFolderUpgrader.java b/core/src/main/java/org/elasticsearch/common/util/IndexFolderUpgrader.java
index 221dc23451..7f550bc1c2 100644
--- a/core/src/main/java/org/elasticsearch/common/util/IndexFolderUpgrader.java
+++ b/core/src/main/java/org/elasticsearch/common/util/IndexFolderUpgrader.java
@@ -19,9 +19,9 @@
package org.elasticsearch.common.util;
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.cluster.metadata.IndexMetaData;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.NodeEnvironment;
@@ -41,7 +41,7 @@ import java.nio.file.StandardCopyOption;
public class IndexFolderUpgrader {
private final NodeEnvironment nodeEnv;
private final Settings settings;
- private final ESLogger logger = Loggers.getLogger(IndexFolderUpgrader.class);
+ private final Logger logger = Loggers.getLogger(IndexFolderUpgrader.class);
/**
* Creates a new upgrader instance
diff --git a/core/src/main/java/org/elasticsearch/common/util/concurrent/AbstractLifecycleRunnable.java b/core/src/main/java/org/elasticsearch/common/util/concurrent/AbstractLifecycleRunnable.java
index 30d7e63ec8..e7a38f1eb6 100644
--- a/core/src/main/java/org/elasticsearch/common/util/concurrent/AbstractLifecycleRunnable.java
+++ b/core/src/main/java/org/elasticsearch/common/util/concurrent/AbstractLifecycleRunnable.java
@@ -18,8 +18,8 @@
*/
package org.elasticsearch.common.util.concurrent;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.component.Lifecycle;
-import org.elasticsearch.common.logging.ESLogger;
import java.util.Objects;
@@ -36,7 +36,7 @@ public abstract class AbstractLifecycleRunnable extends AbstractRunnable {
/**
* The service's logger (note: this is passed in!).
*/
- private final ESLogger logger;
+ private final Logger logger;
/**
* {@link AbstractLifecycleRunnable} must be aware of the actual {@code lifecycle} to react properly.
@@ -45,7 +45,7 @@ public abstract class AbstractLifecycleRunnable extends AbstractRunnable {
* @param logger The logger to use when logging
* @throws NullPointerException if any parameter is {@code null}
*/
- public AbstractLifecycleRunnable(Lifecycle lifecycle, ESLogger logger) {
+ public AbstractLifecycleRunnable(Lifecycle lifecycle, Logger logger) {
this.lifecycle = Objects.requireNonNull(lifecycle, "lifecycle must not be null");
this.logger = Objects.requireNonNull(logger, "logger must not be null");
}
diff --git a/core/src/main/java/org/elasticsearch/common/util/concurrent/AsyncIOProcessor.java b/core/src/main/java/org/elasticsearch/common/util/concurrent/AsyncIOProcessor.java
index d201cf94f9..ad68471041 100644
--- a/core/src/main/java/org/elasticsearch/common/util/concurrent/AsyncIOProcessor.java
+++ b/core/src/main/java/org/elasticsearch/common/util/concurrent/AsyncIOProcessor.java
@@ -18,8 +18,8 @@
*/
package org.elasticsearch.common.util.concurrent;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.collect.Tuple;
-import org.elasticsearch.common.logging.ESLogger;
import java.io.IOException;
import java.util.ArrayList;
@@ -37,11 +37,11 @@ import java.util.function.Consumer;
* might be blocked until other items are processed
*/
public abstract class AsyncIOProcessor<Item> {
- private final ESLogger logger;
+ private final Logger logger;
private final ArrayBlockingQueue<Tuple<Item, Consumer<Exception>>> queue;
private final Semaphore promiseSemaphore = new Semaphore(1);
- protected AsyncIOProcessor(ESLogger logger, int queueSize) {
+ protected AsyncIOProcessor(Logger logger, int queueSize) {
this.logger = logger;
this.queue = new ArrayBlockingQueue<>(queueSize);
}
diff --git a/core/src/main/java/org/elasticsearch/common/util/concurrent/LoggingRunnable.java b/core/src/main/java/org/elasticsearch/common/util/concurrent/LoggingRunnable.java
index 878645eddf..7c5245bbac 100644
--- a/core/src/main/java/org/elasticsearch/common/util/concurrent/LoggingRunnable.java
+++ b/core/src/main/java/org/elasticsearch/common/util/concurrent/LoggingRunnable.java
@@ -19,17 +19,15 @@
package org.elasticsearch.common.util.concurrent;
-import org.elasticsearch.common.logging.ESLogger;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.message.ParameterizedMessage;
-/**
- */
public class LoggingRunnable implements Runnable {
private final Runnable runnable;
+ private final Logger logger;
- private final ESLogger logger;
-
- public LoggingRunnable(ESLogger logger, Runnable runnable) {
+ public LoggingRunnable(Logger logger, Runnable runnable) {
this.runnable = runnable;
this.logger = logger;
}
@@ -39,7 +37,8 @@ public class LoggingRunnable implements Runnable {
try {
runnable.run();
} catch (Exception e) {
- logger.warn("failed to execute [{}]", e, runnable.toString());
+ logger.warn(new ParameterizedMessage("failed to execute [{}]", runnable.toString()), e);
}
}
+
}
diff --git a/core/src/main/java/org/elasticsearch/discovery/AckClusterStatePublishResponseHandler.java b/core/src/main/java/org/elasticsearch/discovery/AckClusterStatePublishResponseHandler.java
index 26e7898959..71dc5fb3f0 100644
--- a/core/src/main/java/org/elasticsearch/discovery/AckClusterStatePublishResponseHandler.java
+++ b/core/src/main/java/org/elasticsearch/discovery/AckClusterStatePublishResponseHandler.java
@@ -18,8 +18,9 @@
*/
package org.elasticsearch.discovery;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.cluster.node.DiscoveryNode;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.ESLoggerFactory;
import java.util.Set;
@@ -31,7 +32,7 @@ import java.util.Set;
*/
public class AckClusterStatePublishResponseHandler extends BlockingClusterStatePublishResponseHandler {
- private static final ESLogger logger = ESLoggerFactory.getLogger(AckClusterStatePublishResponseHandler.class.getName());
+ private static final Logger logger = ESLoggerFactory.getLogger(AckClusterStatePublishResponseHandler.class.getName());
private final Discovery.AckListener ackListener;
@@ -68,7 +69,7 @@ public class AckClusterStatePublishResponseHandler extends BlockingClusterStateP
ackListener.onNodeAck(node, e);
} catch (Exception inner) {
inner.addSuppressed(e);
- logger.debug("error while processing ack for node [{}]", inner, node);
+ logger.debug(new ParameterizedMessage("error while processing ack for node [{}]", node), inner);
}
}
}
diff --git a/core/src/main/java/org/elasticsearch/discovery/local/LocalDiscovery.java b/core/src/main/java/org/elasticsearch/discovery/local/LocalDiscovery.java
index c544db4047..d96afd2a93 100644
--- a/core/src/main/java/org/elasticsearch/discovery/local/LocalDiscovery.java
+++ b/core/src/main/java/org/elasticsearch/discovery/local/LocalDiscovery.java
@@ -19,6 +19,7 @@
package org.elasticsearch.discovery.local;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
@@ -144,7 +145,7 @@ public class LocalDiscovery extends AbstractLifecycleComponent implements Discov
@Override
public void onFailure(String source, Exception e) {
- logger.error("unexpected failure during [{}]", e, source);
+ logger.error(new ParameterizedMessage("unexpected failure during [{}]", source), e);
}
});
} else if (firstMaster != null) {
@@ -173,7 +174,7 @@ public class LocalDiscovery extends AbstractLifecycleComponent implements Discov
@Override
public void onFailure(String source, Exception e) {
- logger.error("unexpected failure during [{}]", e, source);
+ logger.error(new ParameterizedMessage("unexpected failure during [{}]", source), e);
}
});
@@ -238,7 +239,7 @@ public class LocalDiscovery extends AbstractLifecycleComponent implements Discov
@Override
public void onFailure(String source, Exception e) {
- logger.error("unexpected failure during [{}]", e, source);
+ logger.error(new ParameterizedMessage("unexpected failure during [{}]", source), e);
}
});
}
@@ -329,7 +330,7 @@ public class LocalDiscovery extends AbstractLifecycleComponent implements Discov
newNodeSpecificClusterState = discovery.lastProcessedClusterState.readDiffFrom(StreamInput.wrap(clusterStateDiffBytes)).apply(discovery.lastProcessedClusterState);
logger.trace("sending diff cluster state version [{}] with size {} to [{}]", clusterState.version(), clusterStateDiffBytes.length, discovery.localNode().getName());
} catch (IncompatibleClusterStateVersionException ex) {
- logger.warn("incompatible cluster state version [{}] - resending complete cluster state", ex, clusterState.version());
+ logger.warn(new ParameterizedMessage("incompatible cluster state version [{}] - resending complete cluster state", clusterState.version()), ex);
}
}
if (newNodeSpecificClusterState == null) {
@@ -380,7 +381,7 @@ public class LocalDiscovery extends AbstractLifecycleComponent implements Discov
@Override
public void onFailure(String source, Exception e) {
- logger.error("unexpected failure during [{}]", e, source);
+ logger.error(new ParameterizedMessage("unexpected failure during [{}]", source), e);
publishResponseHandler.onFailure(discovery.localNode(), e);
}
diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/NodeJoinController.java b/core/src/main/java/org/elasticsearch/discovery/zen/NodeJoinController.java
index 1f4f57c4ed..a34f4eb29e 100644
--- a/core/src/main/java/org/elasticsearch/discovery/zen/NodeJoinController.java
+++ b/core/src/main/java/org/elasticsearch/discovery/zen/NodeJoinController.java
@@ -18,6 +18,8 @@
*/
package org.elasticsearch.discovery.zen;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.store.AlreadyClosedException;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterChangedEvent;
@@ -34,7 +36,6 @@ import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.component.AbstractComponent;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.LocalTransportAddress;
import org.elasticsearch.common.unit.TimeValue;
@@ -348,13 +349,13 @@ public class NodeJoinController extends AbstractComponent {
static class JoinTaskListener implements ClusterStateTaskListener {
final List<MembershipAction.JoinCallback> callbacks;
- private final ESLogger logger;
+ private final Logger logger;
- JoinTaskListener(MembershipAction.JoinCallback callback, ESLogger logger) {
+ JoinTaskListener(MembershipAction.JoinCallback callback, Logger logger) {
this(Collections.singletonList(callback), logger);
}
- JoinTaskListener(List<MembershipAction.JoinCallback> callbacks, ESLogger logger) {
+ JoinTaskListener(List<MembershipAction.JoinCallback> callbacks, Logger logger) {
this.callbacks = callbacks;
this.logger = logger;
}
@@ -365,7 +366,7 @@ public class NodeJoinController extends AbstractComponent {
try {
callback.onFailure(e);
} catch (Exception inner) {
- logger.error("error handling task failure [{}]", inner, e);
+ logger.error(new ParameterizedMessage("error handling task failure [{}]", e), inner);
}
}
}
@@ -376,7 +377,7 @@ public class NodeJoinController extends AbstractComponent {
try {
callback.onSuccess();
} catch (Exception e) {
- logger.error("unexpected error during [{}]", e, source);
+ logger.error(new ParameterizedMessage("unexpected error during [{}]", source), e);
}
}
}
diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java
index d380b1fd60..59957ffc7a 100644
--- a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java
+++ b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java
@@ -19,6 +19,8 @@
package org.elasticsearch.discovery.zen;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.Version;
@@ -45,7 +47,6 @@ import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.inject.internal.Nullable;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
@@ -259,7 +260,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implements Discover
try {
membership.sendLeaveRequestBlocking(nodes.getMasterNode(), nodes.getLocalNode(), TimeValue.timeValueSeconds(1));
} catch (Exception e) {
- logger.debug("failed to send leave request to master [{}]", e, nodes.getMasterNode());
+ logger.debug(new ParameterizedMessage("failed to send leave request to master [{}]", nodes.getMasterNode()), e);
}
} else {
// we're master -> let other potential master we left and start a master election now rather then wait for masterFD
@@ -271,7 +272,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implements Discover
try {
membership.sendLeaveRequest(nodes.getLocalNode(), possibleMaster);
} catch (Exception e) {
- logger.debug("failed to send leave request from master [{}] to possible master [{}]", e, nodes.getMasterNode(), possibleMaster);
+ logger.debug(new ParameterizedMessage("failed to send leave request from master [{}] to possible master [{}]", nodes.getMasterNode(), possibleMaster), e);
}
}
}
@@ -330,7 +331,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implements Discover
@Override
public void onFailure(String source, Exception e) {
- logger.error("unexpected failure during [{}]", e, source);
+ logger.error(new ParameterizedMessage("unexpected failure during [{}]", source), e);
}
});
@@ -467,7 +468,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implements Discover
// first, make sure we can connect to the master
transportService.connectToNode(masterNode);
} catch (Exception e) {
- logger.warn("failed to connect to master [{}], retrying...", e, masterNode);
+ logger.warn(new ParameterizedMessage("failed to connect to master [{}], retrying...", masterNode), e);
return false;
}
int joinAttempt = 0; // we retry on illegal state if the master is not yet ready
@@ -487,7 +488,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implements Discover
}
} else {
if (logger.isTraceEnabled()) {
- logger.trace("failed to send join request to master [{}]", e, masterNode);
+ logger.trace(new ParameterizedMessage("failed to send join request to master [{}]", masterNode), e);
} else {
logger.info("failed to send join request to master [{}], reason [{}]", masterNode, ExceptionsHelper.detailedMessage(e));
}
@@ -509,7 +510,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implements Discover
private final AllocationService allocationService;
private final ElectMasterService electMasterService;
private final BiFunction<ClusterState, String, ClusterState> rejoin;
- private final ESLogger logger;
+ private final Logger logger;
static class Task {
@@ -539,7 +540,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implements Discover
final AllocationService allocationService,
final ElectMasterService electMasterService,
final BiFunction<ClusterState, String, ClusterState> rejoin,
- final ESLogger logger) {
+ final Logger logger) {
this.allocationService = allocationService;
this.electMasterService = electMasterService;
this.rejoin = rejoin;
@@ -585,7 +586,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implements Discover
@Override
public void onFailure(final String source, final Exception e) {
- logger.error("unexpected failure during [{}]", e, source);
+ logger.error(new ParameterizedMessage("unexpected failure during [{}]", source), e);
}
@Override
@@ -657,7 +658,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implements Discover
@Override
public void onFailure(String source, Exception e) {
- logger.error("unexpected failure during [{}]", e, source);
+ logger.error(new ParameterizedMessage("unexpected failure during [{}]", source), e);
}
@Override
@@ -677,7 +678,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implements Discover
return;
}
- logger.info("master_left [{}], reason [{}]", cause, masterNode, reason);
+ logger.info(new ParameterizedMessage("master_left [{}], reason [{}]", masterNode, reason), cause);
clusterService.submitStateUpdateTask("master_failed (" + masterNode + ")", new ClusterStateUpdateTask(Priority.IMMEDIATE) {
@@ -706,7 +707,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implements Discover
@Override
public void onFailure(String source, Exception e) {
- logger.error("unexpected failure during [{}]", e, source);
+ logger.error(new ParameterizedMessage("unexpected failure during [{}]", source), e);
}
@Override
@@ -791,13 +792,13 @@ public class ZenDiscovery extends AbstractLifecycleComponent implements Discover
@Override
public void onFailure(String source, Exception e) {
- logger.error("unexpected failure during [{}]", e, source);
+ logger.error(new ParameterizedMessage("unexpected failure during [{}]", source), e);
if (newClusterState != null) {
try {
publishClusterState.pendingStatesQueue().markAsFailed(newClusterState, e);
} catch (Exception inner) {
inner.addSuppressed(e);
- logger.error("unexpected exception while failing [{}]", inner, source);
+ logger.error(new ParameterizedMessage("unexpected exception while failing [{}]", source), inner);
}
}
}
@@ -821,7 +822,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implements Discover
* If the first condition fails we reject the cluster state and throw an error.
* If the second condition fails we ignore the cluster state.
*/
- public static boolean shouldIgnoreOrRejectNewClusterState(ESLogger logger, ClusterState currentState, ClusterState newClusterState) {
+ public static boolean shouldIgnoreOrRejectNewClusterState(Logger logger, ClusterState currentState, ClusterState newClusterState) {
validateStateIsFromCurrentMaster(logger, currentState.nodes(), newClusterState);
// reject cluster states that are not new from the same master
@@ -845,7 +846,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implements Discover
* This method checks for this and throws an exception if needed
*/
- public static void validateStateIsFromCurrentMaster(ESLogger logger, DiscoveryNodes currentNodes, ClusterState newClusterState) {
+ public static void validateStateIsFromCurrentMaster(Logger logger, DiscoveryNodes currentNodes, ClusterState newClusterState) {
if (currentNodes.getMasterNodeId() == null) {
return;
}
@@ -880,7 +881,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implements Discover
try {
membership.sendValidateJoinRequestBlocking(node, state, joinTimeout);
} catch (Exception e) {
- logger.warn("failed to validate incoming join request from node [{}]", e, node);
+ logger.warn(new ParameterizedMessage("failed to validate incoming join request from node [{}]", node), e);
callback.onFailure(new IllegalStateException("failure when sending a validation request to node", e));
return;
}
@@ -964,7 +965,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implements Discover
}
}
- static List<ZenPing.PingResponse> filterPingResponses(ZenPing.PingResponse[] fullPingResponses, boolean masterElectionIgnoreNonMasters, ESLogger logger) {
+ static List<ZenPing.PingResponse> filterPingResponses(ZenPing.PingResponse[] fullPingResponses, boolean masterElectionIgnoreNonMasters, Logger logger) {
List<ZenPing.PingResponse> pingResponses;
if (masterElectionIgnoreNonMasters) {
pingResponses = Arrays.stream(fullPingResponses).filter(ping -> ping.node().isMasterNode()).collect(Collectors.toList());
@@ -1034,11 +1035,11 @@ public class ZenDiscovery extends AbstractLifecycleComponent implements Discover
@Override
public void handleException(TransportException exp) {
- logger.warn("failed to send rejoin request to [{}]", exp, otherMaster);
+ logger.warn(new ParameterizedMessage("failed to send rejoin request to [{}]", otherMaster), exp);
}
});
} catch (Exception e) {
- logger.warn("failed to send rejoin request to [{}]", e, otherMaster);
+ logger.warn(new ParameterizedMessage("failed to send rejoin request to [{}]", otherMaster), e);
}
return localClusterState;
}
@@ -1157,7 +1158,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implements Discover
@Override
public void onFailure(String source, Exception e) {
- logger.error("unexpected failure during [{}]", e, source);
+ logger.error(new ParameterizedMessage("unexpected failure during [{}]", source), e);
}
});
}
diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/fd/MasterFaultDetection.java b/core/src/main/java/org/elasticsearch/discovery/zen/fd/MasterFaultDetection.java
index bca13211c6..a97f8bbbac 100644
--- a/core/src/main/java/org/elasticsearch/discovery/zen/fd/MasterFaultDetection.java
+++ b/core/src/main/java/org/elasticsearch/discovery/zen/fd/MasterFaultDetection.java
@@ -19,6 +19,7 @@
package org.elasticsearch.discovery.zen.fd;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
@@ -34,7 +35,6 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
import org.elasticsearch.threadpool.ThreadPool;
-import org.elasticsearch.transport.TransportResponseHandler;
import org.elasticsearch.transport.ConnectTransportException;
import org.elasticsearch.transport.TransportChannel;
import org.elasticsearch.transport.TransportException;
@@ -42,6 +42,7 @@ import org.elasticsearch.transport.TransportRequest;
import org.elasticsearch.transport.TransportRequestHandler;
import org.elasticsearch.transport.TransportRequestOptions;
import org.elasticsearch.transport.TransportResponse;
+import org.elasticsearch.transport.TransportResponseHandler;
import org.elasticsearch.transport.TransportService;
import java.io.IOException;
@@ -283,8 +284,13 @@ public class MasterFaultDetection extends FaultDetection {
}
int retryCount = ++MasterFaultDetection.this.retryCount;
- logger.trace("[master] failed to ping [{}], retry [{}] out of [{}]", exp, masterNode, retryCount,
- pingRetryCount);
+ logger.trace(
+ new ParameterizedMessage(
+ "[master] failed to ping [{}], retry [{}] out of [{}]",
+ masterNode,
+ retryCount,
+ pingRetryCount),
+ exp);
if (retryCount >= pingRetryCount) {
logger.debug("[master] failed to ping [{}], tried [{}] times, each with maximum [{}] timeout",
masterNode, pingRetryCount, pingRetryTimeout);
diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/fd/NodesFaultDetection.java b/core/src/main/java/org/elasticsearch/discovery/zen/fd/NodesFaultDetection.java
index 4f17b14ff6..3f01cde45f 100644
--- a/core/src/main/java/org/elasticsearch/discovery/zen/fd/NodesFaultDetection.java
+++ b/core/src/main/java/org/elasticsearch/discovery/zen/fd/NodesFaultDetection.java
@@ -19,6 +19,7 @@
package org.elasticsearch.discovery.zen.fd;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.node.DiscoveryNode;
@@ -28,7 +29,6 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
import org.elasticsearch.threadpool.ThreadPool;
-import org.elasticsearch.transport.TransportResponseHandler;
import org.elasticsearch.transport.ConnectTransportException;
import org.elasticsearch.transport.TransportChannel;
import org.elasticsearch.transport.TransportException;
@@ -36,6 +36,7 @@ import org.elasticsearch.transport.TransportRequest;
import org.elasticsearch.transport.TransportRequestHandler;
import org.elasticsearch.transport.TransportRequestOptions;
import org.elasticsearch.transport.TransportResponse;
+import org.elasticsearch.transport.TransportResponseHandler;
import org.elasticsearch.transport.TransportService;
import java.io.IOException;
@@ -166,7 +167,12 @@ public class NodesFaultDetection extends FaultDetection {
}
});
} catch (EsRejectedExecutionException ex) {
- logger.trace("[node ] [{}] ignoring node failure (reason [{}]). Local node is shutting down", ex, node, reason);
+ logger.trace(
+ new ParameterizedMessage(
+ "[node ] [{}] ignoring node failure (reason [{}]). Local node is shutting down",
+ node,
+ reason),
+ ex);
}
}
@@ -231,7 +237,13 @@ public class NodesFaultDetection extends FaultDetection {
}
retryCount++;
- logger.trace("[node ] failed to ping [{}], retry [{}] out of [{}]", exp, node, retryCount, pingRetryCount);
+ logger.trace(
+ new ParameterizedMessage(
+ "[node ] failed to ping [{}], retry [{}] out of [{}]",
+ node,
+ retryCount,
+ pingRetryCount),
+ exp);
if (retryCount >= pingRetryCount) {
logger.debug("[node ] failed to ping [{}], tried [{}] times, each with maximum [{}] timeout", node,
pingRetryCount, pingRetryTimeout);
diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPing.java b/core/src/main/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPing.java
index e1466651da..00c2308606 100644
--- a/core/src/main/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPing.java
+++ b/core/src/main/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPing.java
@@ -20,6 +20,7 @@
package org.elasticsearch.discovery.zen.ping.unicast;
import com.carrotsearch.hppc.cursors.ObjectCursor;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.Version;
@@ -46,7 +47,6 @@ import org.elasticsearch.discovery.zen.elect.ElectMasterService;
import org.elasticsearch.discovery.zen.ping.PingContextProvider;
import org.elasticsearch.discovery.zen.ping.ZenPing;
import org.elasticsearch.threadpool.ThreadPool;
-import org.elasticsearch.transport.TransportResponseHandler;
import org.elasticsearch.transport.ConnectTransportException;
import org.elasticsearch.transport.RemoteTransportException;
import org.elasticsearch.transport.TransportChannel;
@@ -55,6 +55,7 @@ import org.elasticsearch.transport.TransportRequest;
import org.elasticsearch.transport.TransportRequestHandler;
import org.elasticsearch.transport.TransportRequestOptions;
import org.elasticsearch.transport.TransportResponse;
+import org.elasticsearch.transport.TransportResponseHandler;
import org.elasticsearch.transport.TransportService;
import java.io.Closeable;
@@ -413,13 +414,18 @@ public class UnicastZenPing extends AbstractLifecycleComponent implements ZenPin
success = true;
} catch (ConnectTransportException e) {
// can't connect to the node - this is a more common path!
- logger.trace("[{}] failed to connect to {}", e, sendPingsHandler.id(), finalNodeToSend);
+ logger.trace(
+ new ParameterizedMessage("[{}] failed to connect to {}", sendPingsHandler.id(), finalNodeToSend), e);
} catch (RemoteTransportException e) {
// something went wrong on the other side
- logger.debug("[{}] received a remote error as a response to ping {}", e,
- sendPingsHandler.id(), finalNodeToSend);
+ logger.debug(
+ new ParameterizedMessage(
+ "[{}] received a remote error as a response to ping {}",
+ sendPingsHandler.id(),
+ finalNodeToSend),
+ e);
} catch (Exception e) {
- logger.warn("[{}] failed send ping to {}", e, sendPingsHandler.id(), finalNodeToSend);
+ logger.warn(new ParameterizedMessage("[{}] failed send ping to {}", sendPingsHandler.id(), finalNodeToSend), e);
} finally {
if (!success) {
latch.countDown();
@@ -486,9 +492,9 @@ public class UnicastZenPing extends AbstractLifecycleComponent implements ZenPin
latch.countDown();
if (exp instanceof ConnectTransportException) {
// ok, not connected...
- logger.trace("failed to connect to {}", exp, nodeToSend);
+ logger.trace(new ParameterizedMessage("failed to connect to {}", nodeToSend), exp);
} else {
- logger.warn("failed to send ping to [{}]", exp, node);
+ logger.warn(new ParameterizedMessage("failed to send ping to [{}]", node), exp);
}
}
});
diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/publish/PendingClusterStatesQueue.java b/core/src/main/java/org/elasticsearch/discovery/zen/publish/PendingClusterStatesQueue.java
index 24b093627b..01fb96b713 100644
--- a/core/src/main/java/org/elasticsearch/discovery/zen/publish/PendingClusterStatesQueue.java
+++ b/core/src/main/java/org/elasticsearch/discovery/zen/publish/PendingClusterStatesQueue.java
@@ -18,10 +18,10 @@
*/
package org.elasticsearch.discovery.zen.publish;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.node.DiscoveryNode;
-import org.elasticsearch.common.logging.ESLogger;
import java.util.ArrayList;
import java.util.Locale;
@@ -55,10 +55,10 @@ public class PendingClusterStatesQueue {
}
final ArrayList<ClusterStateContext> pendingStates = new ArrayList<>();
- final ESLogger logger;
+ final Logger logger;
final int maxQueueSize;
- public PendingClusterStatesQueue(ESLogger logger, int maxQueueSize) {
+ public PendingClusterStatesQueue(Logger logger, int maxQueueSize) {
this.logger = logger;
this.maxQueueSize = maxQueueSize;
}
diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateAction.java b/core/src/main/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateAction.java
index 10f874923d..21c6da7dd4 100644
--- a/core/src/main/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateAction.java
+++ b/core/src/main/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateAction.java
@@ -19,6 +19,7 @@
package org.elasticsearch.discovery.zen.publish;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterChangedEvent;
@@ -244,7 +245,7 @@ public class PublishClusterStateAction extends AbstractComponent {
bytes = serializeFullClusterState(clusterState, node.getVersion());
serializedStates.put(node.getVersion(), bytes);
} catch (Exception e) {
- logger.warn("failed to serialize cluster_state before publishing it to node {}", e, node);
+ logger.warn(new ParameterizedMessage("failed to serialize cluster_state before publishing it to node {}", node), e);
sendingController.onNodeSendFailed(node, e);
return;
}
@@ -290,13 +291,13 @@ public class PublishClusterStateAction extends AbstractComponent {
logger.debug("resending full cluster state to node {} reason {}", node, exp.getDetailedMessage());
sendFullClusterState(clusterState, serializedStates, node, publishTimeout, sendingController);
} else {
- logger.debug("failed to send cluster state to {}", exp, node);
+ logger.debug(new ParameterizedMessage("failed to send cluster state to {}", node), exp);
sendingController.onNodeSendFailed(node, exp);
}
}
});
} catch (Exception e) {
- logger.warn("error sending cluster state to {}", e, node);
+ logger.warn(new ParameterizedMessage("error sending cluster state to {}", node), e);
sendingController.onNodeSendFailed(node, e);
}
}
@@ -322,12 +323,12 @@ public class PublishClusterStateAction extends AbstractComponent {
@Override
public void handleException(TransportException exp) {
- logger.debug("failed to commit cluster state (uuid [{}], version [{}]) to {}", exp, clusterState.stateUUID(), clusterState.version(), node);
+ logger.debug(new ParameterizedMessage("failed to commit cluster state (uuid [{}], version [{}]) to {}", clusterState.stateUUID(), clusterState.version(), node), exp);
sendingController.getPublishResponseHandler().onFailure(node, exp);
}
});
} catch (Exception t) {
- logger.warn("error sending cluster state commit (uuid [{}], version [{}]) to {}", t, clusterState.stateUUID(), clusterState.version(), node);
+ logger.warn(new ParameterizedMessage("error sending cluster state commit (uuid [{}], version [{}]) to {}", clusterState.stateUUID(), clusterState.version(), node), t);
sendingController.getPublishResponseHandler().onFailure(node, t);
}
}
@@ -626,7 +627,7 @@ public class PublishClusterStateAction extends AbstractComponent {
if (committedOrFailed()) {
return committed == false;
}
- logger.trace("failed to commit version [{}]. {}", reason, clusterState.version(), details);
+ logger.trace(new ParameterizedMessage("failed to commit version [{}]. {}", clusterState.version(), details), reason);
committed = false;
committedOrFailedLatch.countDown();
return true;
diff --git a/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java b/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java
index 4ddf0e38b7..89bbdfff72 100644
--- a/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java
+++ b/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java
@@ -19,6 +19,8 @@
package org.elasticsearch.env;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.SegmentInfos;
import org.apache.lucene.store.Directory;
@@ -36,7 +38,6 @@ import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.UUIDs;
import org.elasticsearch.common.io.FileSystemUtils;
import org.elasticsearch.common.logging.DeprecationLogger;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
@@ -83,7 +84,7 @@ import static java.util.Collections.unmodifiableSet;
*/
public final class NodeEnvironment implements Closeable {
- private final ESLogger logger;
+ private final Logger logger;
public static class NodePath {
/* ${data.paths}/nodes/{node.id} */
@@ -196,7 +197,7 @@ public final class NodeEnvironment implements Closeable {
boolean success = false;
// trace logger to debug issues before the default node name is derived from the node id
- ESLogger startupTraceLogger = Loggers.getLogger(getClass(), settings);
+ Logger startupTraceLogger = Loggers.getLogger(getClass(), settings);
try {
sharedDataPath = environment.sharedDataFile();
@@ -231,7 +232,7 @@ public final class NodeEnvironment implements Closeable {
}
} catch (IOException e) {
- startupTraceLogger.trace("failed to obtain node lock on {}", e, dir.toAbsolutePath());
+ startupTraceLogger.trace(new ParameterizedMessage("failed to obtain node lock on {}", dir.toAbsolutePath()), e);
lastException = new IOException("failed to obtain lock on " + dir.toAbsolutePath(), e);
// release all the ones that were obtained up until now
releaseAndNullLocks(locks);
@@ -392,7 +393,7 @@ public final class NodeEnvironment implements Closeable {
* scans the node paths and loads existing metaData file. If not found a new meta data will be generated
* and persisted into the nodePaths
*/
- private static NodeMetaData loadOrCreateNodeMetaData(Settings settings, ESLogger logger,
+ private static NodeMetaData loadOrCreateNodeMetaData(Settings settings, Logger logger,
NodePath... nodePaths) throws IOException {
final Path[] paths = Arrays.stream(nodePaths).map(np -> np.path).toArray(Path[]::new);
NodeMetaData metaData = NodeMetaData.FORMAT.loadLatestState(logger, paths);
@@ -884,7 +885,7 @@ public final class NodeEnvironment implements Closeable {
logger.trace("releasing lock [{}]", lock);
lock.close();
} catch (IOException e) {
- logger.trace("failed to release lock [{}]", e, lock);
+ logger.trace(new ParameterizedMessage("failed to release lock [{}]", lock), e);
}
}
}
diff --git a/core/src/main/java/org/elasticsearch/gateway/AsyncShardFetch.java b/core/src/main/java/org/elasticsearch/gateway/AsyncShardFetch.java
index dc7194b949..37b7235928 100644
--- a/core/src/main/java/org/elasticsearch/gateway/AsyncShardFetch.java
+++ b/core/src/main/java/org/elasticsearch/gateway/AsyncShardFetch.java
@@ -19,6 +19,8 @@
package org.elasticsearch.gateway;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.ElasticsearchTimeoutException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.ActionListener;
@@ -30,7 +32,6 @@ import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.lease.Releasable;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.transport.ReceiveTimeoutTransportException;
@@ -63,7 +64,7 @@ public abstract class AsyncShardFetch<T extends BaseNodeResponse> implements Rel
void list(ShardId shardId, DiscoveryNode[] nodes, ActionListener<NodesResponse> listener);
}
- protected final ESLogger logger;
+ protected final Logger logger;
protected final String type;
private final ShardId shardId;
private final Lister<BaseNodesResponse<T>, T> action;
@@ -72,7 +73,7 @@ public abstract class AsyncShardFetch<T extends BaseNodeResponse> implements Rel
private boolean closed;
@SuppressWarnings("unchecked")
- protected AsyncShardFetch(ESLogger logger, String type, ShardId shardId, Lister<? extends BaseNodesResponse<T>, T> action) {
+ protected AsyncShardFetch(Logger logger, String type, ShardId shardId, Lister<? extends BaseNodesResponse<T>, T> action) {
this.logger = logger;
this.type = type;
this.shardId = shardId;
@@ -200,7 +201,7 @@ public abstract class AsyncShardFetch<T extends BaseNodeResponse> implements Rel
if (unwrappedCause instanceof EsRejectedExecutionException || unwrappedCause instanceof ReceiveTimeoutTransportException || unwrappedCause instanceof ElasticsearchTimeoutException) {
nodeEntry.restartFetching();
} else {
- logger.warn("{}: failed to list shard for {} on node [{}]", failure, shardId, type, failure.nodeId());
+ logger.warn(new ParameterizedMessage("{}: failed to list shard for {} on node [{}]", shardId, type, failure.nodeId()), failure);
nodeEntry.doneFetching(failure.getCause());
}
}
diff --git a/core/src/main/java/org/elasticsearch/gateway/Gateway.java b/core/src/main/java/org/elasticsearch/gateway/Gateway.java
index ee2abc17ab..2b832d4691 100644
--- a/core/src/main/java/org/elasticsearch/gateway/Gateway.java
+++ b/core/src/main/java/org/elasticsearch/gateway/Gateway.java
@@ -21,6 +21,7 @@ package org.elasticsearch.gateway;
import com.carrotsearch.hppc.ObjectFloatHashMap;
import com.carrotsearch.hppc.cursors.ObjectCursor;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.action.FailedNodeException;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.ClusterState;
@@ -138,7 +139,9 @@ public class Gateway extends AbstractComponent implements ClusterStateListener {
indicesService.verifyIndexMetadata(nodeServicesProvider, electedIndexMetaData, electedIndexMetaData);
}
} catch (Exception e) {
- logger.warn("recovering index {} failed - recovering as closed", e, electedIndexMetaData.getIndex());
+ logger.warn(
+ new ParameterizedMessage("recovering index {} failed - recovering as closed", electedIndexMetaData.getIndex()),
+ e);
electedIndexMetaData = IndexMetaData.builder(electedIndexMetaData).state(IndexMetaData.State.CLOSE).build();
}
diff --git a/core/src/main/java/org/elasticsearch/gateway/GatewayAllocator.java b/core/src/main/java/org/elasticsearch/gateway/GatewayAllocator.java
index a9fbe0ac82..c84a9c3378 100644
--- a/core/src/main/java/org/elasticsearch/gateway/GatewayAllocator.java
+++ b/core/src/main/java/org/elasticsearch/gateway/GatewayAllocator.java
@@ -19,6 +19,7 @@
package org.elasticsearch.gateway;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.support.nodes.BaseNodeResponse;
import org.elasticsearch.action.support.nodes.BaseNodesResponse;
import org.elasticsearch.cluster.ClusterChangedEvent;
@@ -34,7 +35,6 @@ import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lease.Releasables;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.index.shard.ShardId;
@@ -140,7 +140,7 @@ public class GatewayAllocator extends AbstractComponent {
class InternalAsyncFetch<T extends BaseNodeResponse> extends AsyncShardFetch<T> {
- public InternalAsyncFetch(ESLogger logger, String type, ShardId shardId, Lister<? extends BaseNodesResponse<T>, T> action) {
+ public InternalAsyncFetch(Logger logger, String type, ShardId shardId, Lister<? extends BaseNodesResponse<T>, T> action) {
super(logger, type, shardId, action);
}
diff --git a/core/src/main/java/org/elasticsearch/gateway/GatewayService.java b/core/src/main/java/org/elasticsearch/gateway/GatewayService.java
index 3282a8f2a4..fcc2aab9c6 100644
--- a/core/src/main/java/org/elasticsearch/gateway/GatewayService.java
+++ b/core/src/main/java/org/elasticsearch/gateway/GatewayService.java
@@ -20,6 +20,7 @@
package org.elasticsearch.gateway;
import com.carrotsearch.hppc.cursors.ObjectCursor;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateListener;
@@ -289,7 +290,7 @@ public class GatewayService extends AbstractLifecycleComponent implements Cluste
@Override
public void onFailure(String source, Exception e) {
- logger.error("unexpected failure during [{}]", e, source);
+ logger.error(new ParameterizedMessage("unexpected failure during [{}]", source), e);
GatewayRecoveryListener.this.onFailure("failed to updated cluster state");
}
diff --git a/core/src/main/java/org/elasticsearch/gateway/LocalAllocateDangledIndices.java b/core/src/main/java/org/elasticsearch/gateway/LocalAllocateDangledIndices.java
index ee987b8665..5ed58e2868 100644
--- a/core/src/main/java/org/elasticsearch/gateway/LocalAllocateDangledIndices.java
+++ b/core/src/main/java/org/elasticsearch/gateway/LocalAllocateDangledIndices.java
@@ -19,6 +19,7 @@
package org.elasticsearch.gateway;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateUpdateTask;
import org.elasticsearch.cluster.block.ClusterBlocks;
@@ -148,8 +149,7 @@ public class LocalAllocateDangledIndices extends AbstractComponent {
upgradedIndexMetaData = metaDataIndexUpgradeService.upgradeIndexMetaData(indexMetaData);
} catch (Exception ex) {
// upgrade failed - adding index as closed
- logger.warn("found dangled index [{}] on node [{}]. This index cannot be upgraded to the latest version, adding as closed", ex,
- indexMetaData.getIndex(), request.fromNode);
+ logger.warn(new ParameterizedMessage("found dangled index [{}] on node [{}]. This index cannot be upgraded to the latest version, adding as closed", indexMetaData.getIndex(), request.fromNode), ex);
upgradedIndexMetaData = IndexMetaData.builder(indexMetaData).state(IndexMetaData.State.CLOSE).version(indexMetaData.getVersion() + 1).build();
}
metaData.put(upgradedIndexMetaData, false);
@@ -176,7 +176,7 @@ public class LocalAllocateDangledIndices extends AbstractComponent {
@Override
public void onFailure(String source, Exception e) {
- logger.error("unexpected failure during [{}]", e, source);
+ logger.error(new ParameterizedMessage("unexpected failure during [{}]", source), e);
try {
channel.sendResponse(e);
} catch (Exception inner) {
diff --git a/core/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java b/core/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java
index cc6a48b855..1535c1b21d 100644
--- a/core/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java
+++ b/core/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java
@@ -18,6 +18,8 @@
*/
package org.elasticsearch.gateway;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.IndexFormatTooNewException;
@@ -30,7 +32,6 @@ import org.apache.lucene.store.SimpleFSDirectory;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.bytes.BytesArray;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.lucene.store.IndexOutputOutputStream;
import org.elasticsearch.common.lucene.store.InputStreamIndexInput;
import org.elasticsearch.common.xcontent.XContentBuilder;
@@ -254,11 +255,11 @@ public abstract class MetaDataStateFormat<T> {
* the states version from one or more data directories and if none of the latest states can be loaded an exception
* is thrown to prevent accidentally loading a previous state and silently omitting the latest state.
*
- * @param logger an elasticsearch logger instance
+ * @param logger a logger instance
* @param dataLocations the data-locations to try.
* @return the latest state or <code>null</code> if no state was found.
*/
- public T loadLatestState(ESLogger logger, Path... dataLocations) throws IOException {
+ public T loadLatestState(Logger logger, Path... dataLocations) throws IOException {
List<PathAndStateId> files = new ArrayList<>();
long maxStateId = -1;
boolean maxStateIdIsLegacy = true;
@@ -322,7 +323,8 @@ public abstract class MetaDataStateFormat<T> {
return state;
} catch (Exception e) {
exceptions.add(new IOException("failed to read " + pathAndStateId.toString(), e));
- logger.debug("{}: failed to read [{}], ignoring...", e, pathAndStateId.file.toAbsolutePath(), prefix);
+ logger.debug(
+ new ParameterizedMessage("{}: failed to read [{}], ignoring...", pathAndStateId.file.toAbsolutePath(), prefix), e);
}
}
// if we reach this something went wrong
diff --git a/core/src/main/java/org/elasticsearch/gateway/MetaStateService.java b/core/src/main/java/org/elasticsearch/gateway/MetaStateService.java
index b5ec8466c2..a2ba447887 100644
--- a/core/src/main/java/org/elasticsearch/gateway/MetaStateService.java
+++ b/core/src/main/java/org/elasticsearch/gateway/MetaStateService.java
@@ -19,6 +19,7 @@
package org.elasticsearch.gateway;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.common.Nullable;
@@ -127,7 +128,7 @@ public class MetaStateService extends AbstractComponent {
IndexMetaData.FORMAT.write(indexMetaData,
nodeEnv.indexPaths(indexMetaData.getIndex()));
} catch (Exception ex) {
- logger.warn("[{}]: failed to write index state", ex, index);
+ logger.warn(new ParameterizedMessage("[{}]: failed to write index state", index), ex);
throw new IOException("failed to write state for [" + index + "]", ex);
}
}
diff --git a/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java b/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java
index 9c5debe25e..d686bb5fe0 100644
--- a/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java
+++ b/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java
@@ -19,6 +19,7 @@
package org.elasticsearch.gateway;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.util.CollectionUtil;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
@@ -226,7 +227,7 @@ public abstract class PrimaryShardAllocator extends AbstractComponent {
logger.trace("[{}] on node [{}] has no allocation id, out-dated shard (shard state version: [{}])", shard, nodeShardState.getNode(), nodeShardState.legacyVersion());
}
} else {
- logger.trace("[{}] on node [{}] has allocation id [{}] but the store can not be opened, treating as no allocation id", nodeShardState.storeException(), shard, nodeShardState.getNode(), allocationId);
+ logger.trace(new ParameterizedMessage("[{}] on node [{}] has allocation id [{}] but the store can not be opened, treating as no allocation id", shard, nodeShardState.getNode(), allocationId), nodeShardState.storeException());
allocationId = null;
}
@@ -351,7 +352,7 @@ public abstract class PrimaryShardAllocator extends AbstractComponent {
}
} else {
// when there is an store exception, we disregard the reported version and assign it as no version (same as shard does not exist)
- logger.trace("[{}] on node [{}] has version [{}] but the store can not be opened, treating no version", nodeShardState.storeException(), shard, nodeShardState.getNode(), version);
+ logger.trace(new ParameterizedMessage("[{}] on node [{}] has version [{}] but the store can not be opened, treating no version", shard, nodeShardState.getNode(), version), nodeShardState.storeException());
version = ShardStateMetaData.NO_VERSION;
}
diff --git a/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java b/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java
index fc23ef1358..8640221401 100644
--- a/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java
+++ b/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java
@@ -19,6 +19,7 @@
package org.elasticsearch.gateway;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.FailedNodeException;
@@ -140,8 +141,13 @@ public class TransportNodesListGatewayStartedShards extends
}
Store.tryOpenIndex(shardPath.resolveIndex(), shardId, nodeEnv::shardLock, logger);
} catch (Exception exception) {
- logger.trace("{} can't open index for shard [{}] in path [{}]", exception, shardId,
- shardStateMetaData, (shardPath != null) ? shardPath.resolveIndex() : "");
+ logger.trace(
+ new ParameterizedMessage(
+ "{} can't open index for shard [{}] in path [{}]",
+ shardId,
+ shardStateMetaData,
+ (shardPath != null) ? shardPath.resolveIndex() : ""),
+ exception);
String allocationId = shardStateMetaData.allocationId != null ?
shardStateMetaData.allocationId.getId() : null;
return new NodeGatewayStartedShards(clusterService.localNode(), shardStateMetaData.legacyVersion,
diff --git a/core/src/main/java/org/elasticsearch/index/AbstractIndexComponent.java b/core/src/main/java/org/elasticsearch/index/AbstractIndexComponent.java
index b155a43610..25acdd06b4 100644
--- a/core/src/main/java/org/elasticsearch/index/AbstractIndexComponent.java
+++ b/core/src/main/java/org/elasticsearch/index/AbstractIndexComponent.java
@@ -19,16 +19,13 @@
package org.elasticsearch.index;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.logging.DeprecationLogger;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
-/**
- *
- */
public abstract class AbstractIndexComponent implements IndexComponent {
- protected final ESLogger logger;
+ protected final Logger logger;
protected final DeprecationLogger deprecationLogger;
protected final IndexSettings indexSettings;
diff --git a/core/src/main/java/org/elasticsearch/index/CompositeIndexEventListener.java b/core/src/main/java/org/elasticsearch/index/CompositeIndexEventListener.java
index 97e00b98df..c0495f37a7 100644
--- a/core/src/main/java/org/elasticsearch/index/CompositeIndexEventListener.java
+++ b/core/src/main/java/org/elasticsearch/index/CompositeIndexEventListener.java
@@ -19,9 +19,10 @@
package org.elasticsearch.index;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.common.Nullable;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.shard.IndexEventListener;
@@ -40,7 +41,7 @@ import java.util.List;
final class CompositeIndexEventListener implements IndexEventListener {
private final List<IndexEventListener> listeners;
- private final ESLogger logger;
+ private final Logger logger;
CompositeIndexEventListener(IndexSettings indexSettings, Collection<IndexEventListener> listeners) {
for (IndexEventListener listener : listeners) {
@@ -58,7 +59,7 @@ final class CompositeIndexEventListener implements IndexEventListener {
try {
listener.shardRoutingChanged(indexShard, oldRouting, newRouting);
} catch (Exception e) {
- logger.warn("[{}] failed to invoke shard touring changed callback", e, indexShard.shardId().getId());
+ logger.warn(new ParameterizedMessage("[{}] failed to invoke shard touring changed callback", indexShard.shardId().getId()), e);
}
}
}
@@ -69,7 +70,7 @@ final class CompositeIndexEventListener implements IndexEventListener {
try {
listener.afterIndexShardCreated(indexShard);
} catch (Exception e) {
- logger.warn("[{}] failed to invoke after shard created callback", e, indexShard.shardId().getId());
+ logger.warn(new ParameterizedMessage("[{}] failed to invoke after shard created callback", indexShard.shardId().getId()), e);
throw e;
}
}
@@ -81,7 +82,7 @@ final class CompositeIndexEventListener implements IndexEventListener {
try {
listener.afterIndexShardStarted(indexShard);
} catch (Exception e) {
- logger.warn("[{}] failed to invoke after shard started callback", e, indexShard.shardId().getId());
+ logger.warn(new ParameterizedMessage("[{}] failed to invoke after shard started callback", indexShard.shardId().getId()), e);
throw e;
}
}
@@ -94,7 +95,7 @@ final class CompositeIndexEventListener implements IndexEventListener {
try {
listener.beforeIndexShardClosed(shardId, indexShard, indexSettings);
} catch (Exception e) {
- logger.warn("[{}] failed to invoke before shard closed callback", e, shardId.getId());
+ logger.warn(new ParameterizedMessage("[{}] failed to invoke before shard closed callback", shardId.getId()), e);
throw e;
}
}
@@ -107,7 +108,7 @@ final class CompositeIndexEventListener implements IndexEventListener {
try {
listener.afterIndexShardClosed(shardId, indexShard, indexSettings);
} catch (Exception e) {
- logger.warn("[{}] failed to invoke after shard closed callback", e, shardId.getId());
+ logger.warn(new ParameterizedMessage("[{}] failed to invoke after shard closed callback", shardId.getId()), e);
throw e;
}
}
@@ -119,7 +120,7 @@ final class CompositeIndexEventListener implements IndexEventListener {
try {
listener.onShardInactive(indexShard);
} catch (Exception e) {
- logger.warn("[{}] failed to invoke on shard inactive callback", e, indexShard.shardId().getId());
+ logger.warn(new ParameterizedMessage("[{}] failed to invoke on shard inactive callback", indexShard.shardId().getId()), e);
throw e;
}
}
@@ -131,7 +132,7 @@ final class CompositeIndexEventListener implements IndexEventListener {
try {
listener.indexShardStateChanged(indexShard, previousState, indexShard.state(), reason);
} catch (Exception e) {
- logger.warn("[{}] failed to invoke index shard state changed callback", e, indexShard.shardId().getId());
+ logger.warn(new ParameterizedMessage("[{}] failed to invoke index shard state changed callback", indexShard.shardId().getId()), e);
throw e;
}
}
@@ -167,7 +168,7 @@ final class CompositeIndexEventListener implements IndexEventListener {
try {
listener.beforeIndexShardCreated(shardId, indexSettings);
} catch (Exception e) {
- logger.warn("[{}] failed to invoke before shard created callback", e, shardId);
+ logger.warn(new ParameterizedMessage("[{}] failed to invoke before shard created callback", shardId), e);
throw e;
}
}
@@ -228,7 +229,7 @@ final class CompositeIndexEventListener implements IndexEventListener {
try {
listener.beforeIndexShardDeleted(shardId, indexSettings);
} catch (Exception e) {
- logger.warn("[{}] failed to invoke before shard deleted callback", e, shardId.getId());
+ logger.warn(new ParameterizedMessage("[{}] failed to invoke before shard deleted callback", shardId.getId()), e);
throw e;
}
}
@@ -241,7 +242,7 @@ final class CompositeIndexEventListener implements IndexEventListener {
try {
listener.afterIndexShardDeleted(shardId, indexSettings);
} catch (Exception e) {
- logger.warn("[{}] failed to invoke after shard deleted callback", e, shardId.getId());
+ logger.warn(new ParameterizedMessage("[{}] failed to invoke after shard deleted callback", shardId.getId()), e);
throw e;
}
}
diff --git a/core/src/main/java/org/elasticsearch/index/IndexService.java b/core/src/main/java/org/elasticsearch/index/IndexService.java
index b9f93bf2ac..281c4de5f8 100644
--- a/core/src/main/java/org/elasticsearch/index/IndexService.java
+++ b/core/src/main/java/org/elasticsearch/index/IndexService.java
@@ -19,6 +19,7 @@
package org.elasticsearch.index;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
@@ -92,9 +93,6 @@ import static java.util.Collections.emptyMap;
import static java.util.Collections.unmodifiableMap;
import static org.elasticsearch.common.collect.MapBuilder.newMapBuilder;
-/**
- *
- */
public class IndexService extends AbstractIndexComponent implements IndicesClusterStateService.AllocatedIndex<IndexShard> {
private final IndexEventListener eventListener;
@@ -397,7 +395,7 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust
final boolean flushEngine = deleted.get() == false && closed.get();
indexShard.close(reason, flushEngine);
} catch (Exception e) {
- logger.debug("[{}] failed to close index shard", e, shardId);
+ logger.debug(new ParameterizedMessage("[{}] failed to close index shard", shardId), e);
// ignore
}
}
@@ -408,7 +406,7 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust
try {
store.close();
} catch (Exception e) {
- logger.warn("[{}] failed to close store on shard removal (reason: [{}])", e, shardId, reason);
+ logger.warn(new ParameterizedMessage("[{}] failed to close store on shard removal (reason: [{}])", shardId, reason), e);
}
}
}
@@ -427,7 +425,8 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust
}
} catch (IOException e) {
shardStoreDeleter.addPendingDelete(lock.getShardId(), indexSettings);
- logger.debug("[{}] failed to delete shard content - scheduled a retry", e, lock.getShardId().id());
+ logger.debug(
+ new ParameterizedMessage("[{}] failed to delete shard content - scheduled a retry", lock.getShardId().id()), e);
}
}
}
@@ -639,7 +638,7 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust
try {
shard.onSettingsChanged();
} catch (Exception e) {
- logger.warn("[{}] failed to notify shard about setting change", e, shard.shardId().id());
+ logger.warn(new ParameterizedMessage("[{}] failed to notify shard about setting change", shard.shardId().id()), e);
}
}
if (refreshTask.getInterval().equals(indexSettings.getRefreshInterval()) == false) {
@@ -781,8 +780,11 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust
} catch (Exception ex) {
if (lastThrownException == null || sameException(lastThrownException, ex) == false) {
// prevent the annoying fact of logging the same stuff all the time with an interval of 1 sec will spam all your logs
- indexService.logger.warn("failed to run task {} - suppressing re-occurring exceptions unless the exception changes",
- ex, toString());
+ indexService.logger.warn(
+ new ParameterizedMessage(
+ "failed to run task {} - suppressing re-occurring exceptions unless the exception changes",
+ toString()),
+ ex);
lastThrownException = ex;
}
} finally {
diff --git a/core/src/main/java/org/elasticsearch/index/IndexSettings.java b/core/src/main/java/org/elasticsearch/index/IndexSettings.java
index dd0551aa5b..5666fb416f 100644
--- a/core/src/main/java/org/elasticsearch/index/IndexSettings.java
+++ b/core/src/main/java/org/elasticsearch/index/IndexSettings.java
@@ -18,11 +18,11 @@
*/
package org.elasticsearch.index;
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.index.MergePolicy;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.ParseFieldMatcher;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.settings.IndexScopedSettings;
@@ -130,7 +130,7 @@ public final class IndexSettings {
private final Index index;
private final Version version;
- private final ESLogger logger;
+ private final Logger logger;
private final String nodeName;
private final Settings nodeSettings;
private final int numberOfShards;
diff --git a/core/src/main/java/org/elasticsearch/index/IndexWarmer.java b/core/src/main/java/org/elasticsearch/index/IndexWarmer.java
index ba48adb71a..8e479d8a41 100644
--- a/core/src/main/java/org/elasticsearch/index/IndexWarmer.java
+++ b/core/src/main/java/org/elasticsearch/index/IndexWarmer.java
@@ -19,6 +19,7 @@
package org.elasticsearch.index;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.index.DirectoryReader;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.settings.Settings;
@@ -147,7 +148,10 @@ public final class IndexWarmer extends AbstractComponent {
TimeValue.timeValueNanos(System.nanoTime() - start));
}
} catch (Exception e) {
- indexShard.warmerService().logger().warn("failed to warm-up global ordinals for [{}]", e, fieldType.name());
+ indexShard
+ .warmerService()
+ .logger()
+ .warn(new ParameterizedMessage("failed to warm-up global ordinals for [{}]", fieldType.name()), e);
} finally {
latch.countDown();
}
diff --git a/core/src/main/java/org/elasticsearch/index/IndexingSlowLog.java b/core/src/main/java/org/elasticsearch/index/IndexingSlowLog.java
index a145012dd2..513e87878d 100644
--- a/core/src/main/java/org/elasticsearch/index/IndexingSlowLog.java
+++ b/core/src/main/java/org/elasticsearch/index/IndexingSlowLog.java
@@ -19,9 +19,9 @@
package org.elasticsearch.index;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.Booleans;
import org.elasticsearch.common.Strings;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
@@ -34,8 +34,6 @@ import org.elasticsearch.index.shard.IndexingOperationListener;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
-/**
- */
public final class IndexingSlowLog implements IndexingOperationListener {
private final Index index;
private boolean reformat;
@@ -52,7 +50,7 @@ public final class IndexingSlowLog implements IndexingOperationListener {
private SlowLogLevel level;
- private final ESLogger indexLogger;
+ private final Logger indexLogger;
private static final String INDEX_INDEXING_SLOWLOG_PREFIX = "index.indexing.slowlog";
public static final Setting<TimeValue> INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_WARN_SETTING =
@@ -112,7 +110,7 @@ public final class IndexingSlowLog implements IndexingOperationListener {
private void setLevel(SlowLogLevel level) {
this.level = level;
- this.indexLogger.setLevel(level.name());
+ Loggers.setLevel(this.indexLogger, level.name());
}
private void setWarnThreshold(TimeValue warnThreshold) {
diff --git a/core/src/main/java/org/elasticsearch/index/MergePolicyConfig.java b/core/src/main/java/org/elasticsearch/index/MergePolicyConfig.java
index b5856d606e..52b98e2bd0 100644
--- a/core/src/main/java/org/elasticsearch/index/MergePolicyConfig.java
+++ b/core/src/main/java/org/elasticsearch/index/MergePolicyConfig.java
@@ -19,11 +19,10 @@
package org.elasticsearch.index;
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.index.MergePolicy;
import org.apache.lucene.index.NoMergePolicy;
import org.apache.lucene.index.TieredMergePolicy;
-import org.elasticsearch.common.logging.ESLogger;
-import org.elasticsearch.common.settings.IndexScopedSettings;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.unit.ByteSizeUnit;
@@ -117,7 +116,7 @@ import org.elasticsearch.common.unit.ByteSizeValue;
public final class MergePolicyConfig {
private final TieredMergePolicy mergePolicy = new TieredMergePolicy();
- private final ESLogger logger;
+ private final Logger logger;
private final boolean mergesEnabled;
public static final double DEFAULT_EXPUNGE_DELETES_ALLOWED = 10d;
@@ -155,7 +154,7 @@ public final class MergePolicyConfig {
public static final String INDEX_MERGE_ENABLED = "index.merge.enabled"; // don't convert to Setting<> and register... we only set this in tests and register via a plugin
- MergePolicyConfig(ESLogger logger, IndexSettings indexSettings) {
+ MergePolicyConfig(Logger logger, IndexSettings indexSettings) {
this.logger = logger;
double forceMergeDeletesPctAllowed = indexSettings.getValue(INDEX_MERGE_POLICY_EXPUNGE_DELETES_ALLOWED_SETTING); // percentage
ByteSizeValue floorSegment = indexSettings.getValue(INDEX_MERGE_POLICY_FLOOR_SEGMENT_SETTING);
diff --git a/core/src/main/java/org/elasticsearch/index/SearchSlowLog.java b/core/src/main/java/org/elasticsearch/index/SearchSlowLog.java
index 148b676331..19086416b8 100644
--- a/core/src/main/java/org/elasticsearch/index/SearchSlowLog.java
+++ b/core/src/main/java/org/elasticsearch/index/SearchSlowLog.java
@@ -19,8 +19,8 @@
package org.elasticsearch.index;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.Strings;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
@@ -30,8 +30,6 @@ import org.elasticsearch.search.internal.SearchContext;
import java.util.concurrent.TimeUnit;
-/**
- */
public final class SearchSlowLog implements SearchOperationListener {
private boolean reformat;
@@ -47,8 +45,8 @@ public final class SearchSlowLog implements SearchOperationListener {
private SlowLogLevel level;
- private final ESLogger queryLogger;
- private final ESLogger fetchLogger;
+ private final Logger queryLogger;
+ private final Logger fetchLogger;
private static final String INDEX_SEARCH_SLOWLOG_PREFIX = "index.search.slowlog";
public static final Setting<TimeValue> INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_WARN_SETTING =
@@ -113,8 +111,8 @@ public final class SearchSlowLog implements SearchOperationListener {
private void setLevel(SlowLogLevel level) {
this.level = level;
- this.queryLogger.setLevel(level.name());
- this.fetchLogger.setLevel(level.name());
+ Loggers.setLevel(queryLogger, level.name());
+ Loggers.setLevel(fetchLogger, level.name());
}
@Override
public void onQueryPhase(SearchContext context, long tookInNanos) {
diff --git a/core/src/main/java/org/elasticsearch/index/analysis/Analysis.java b/core/src/main/java/org/elasticsearch/index/analysis/Analysis.java
index d2158f707c..aded2bb4ee 100644
--- a/core/src/main/java/org/elasticsearch/index/analysis/Analysis.java
+++ b/core/src/main/java/org/elasticsearch/index/analysis/Analysis.java
@@ -19,10 +19,8 @@
package org.elasticsearch.index.analysis;
-import org.apache.lucene.analysis.Analyzer;
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.analysis.CharArraySet;
-import org.apache.lucene.analysis.LegacyNumericTokenStream;
-import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.ar.ArabicAnalyzer;
import org.apache.lucene.analysis.bg.BulgarianAnalyzer;
import org.apache.lucene.analysis.br.BrazilianAnalyzer;
@@ -54,12 +52,10 @@ import org.apache.lucene.analysis.ro.RomanianAnalyzer;
import org.apache.lucene.analysis.ru.RussianAnalyzer;
import org.apache.lucene.analysis.sv.SwedishAnalyzer;
import org.apache.lucene.analysis.th.ThaiAnalyzer;
-import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.analysis.tr.TurkishAnalyzer;
import org.apache.lucene.util.Version;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.FileSystemUtils;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
@@ -70,7 +66,6 @@ import java.io.Reader;
import java.nio.charset.CharacterCodingException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Path;
-import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
@@ -82,12 +77,9 @@ import java.util.Set;
import static java.util.Collections.unmodifiableMap;
-/**
- *
- */
public class Analysis {
- public static Version parseAnalysisVersion(Settings indexSettings, Settings settings, ESLogger logger) {
+ public static Version parseAnalysisVersion(Settings indexSettings, Settings settings, Logger logger) {
// check for explicit version on the specific analyzer component
String sVersion = settings.get("version");
if (sVersion != null) {
diff --git a/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java b/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java
index cc3a90c4ab..23e0803eed 100644
--- a/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java
+++ b/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java
@@ -19,6 +19,7 @@
package org.elasticsearch.index.cache.bitset;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.index.IndexReaderContext;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
@@ -258,7 +259,7 @@ public final class BitsetFilterCache extends AbstractIndexComponent implements L
indexShard.warmerService().logger().trace("warmed bitset for [{}], took [{}]", filterToWarm, TimeValue.timeValueNanos(System.nanoTime() - start));
}
} catch (Exception e) {
- indexShard.warmerService().logger().warn("failed to load bitset for [{}]", e, filterToWarm);
+ indexShard.warmerService().logger().warn(new ParameterizedMessage("failed to load bitset for [{}]", filterToWarm), e);
} finally {
latch.countDown();
}
diff --git a/core/src/main/java/org/elasticsearch/index/codec/CodecService.java b/core/src/main/java/org/elasticsearch/index/codec/CodecService.java
index 65fab8d254..59be64a85d 100644
--- a/core/src/main/java/org/elasticsearch/index/codec/CodecService.java
+++ b/core/src/main/java/org/elasticsearch/index/codec/CodecService.java
@@ -19,12 +19,12 @@
package org.elasticsearch.index.codec;
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat.Mode;
import org.apache.lucene.codecs.lucene62.Lucene62Codec;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.collect.MapBuilder;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.index.mapper.MapperService;
import java.util.Map;
@@ -44,7 +44,7 @@ public class CodecService {
/** the raw unfiltered lucene default. useful for testing */
public static final String LUCENE_DEFAULT_CODEC = "lucene_default";
- public CodecService(@Nullable MapperService mapperService, ESLogger logger) {
+ public CodecService(@Nullable MapperService mapperService, Logger logger) {
final MapBuilder<String, Codec> codecs = MapBuilder.<String, Codec>newMapBuilder();
if (mapperService == null) {
codecs.put(DEFAULT_CODEC, new Lucene62Codec());
diff --git a/core/src/main/java/org/elasticsearch/index/codec/PerFieldMappingPostingFormatCodec.java b/core/src/main/java/org/elasticsearch/index/codec/PerFieldMappingPostingFormatCodec.java
index ec4636e396..54f15feaa7 100644
--- a/core/src/main/java/org/elasticsearch/index/codec/PerFieldMappingPostingFormatCodec.java
+++ b/core/src/main/java/org/elasticsearch/index/codec/PerFieldMappingPostingFormatCodec.java
@@ -19,11 +19,11 @@
package org.elasticsearch.index.codec;
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.codecs.PostingsFormat;
import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat;
import org.apache.lucene.codecs.lucene62.Lucene62Codec;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.index.mapper.CompletionFieldMapper;
import org.elasticsearch.index.mapper.CompletionFieldMapper2x;
@@ -40,14 +40,14 @@ import org.elasticsearch.index.mapper.MapperService;
*/
// LUCENE UPGRADE: make sure to move to a new codec depending on the lucene version
public class PerFieldMappingPostingFormatCodec extends Lucene62Codec {
- private final ESLogger logger;
+ private final Logger logger;
private final MapperService mapperService;
static {
assert Codec.forName(Lucene.LATEST_CODEC).getClass().isAssignableFrom(PerFieldMappingPostingFormatCodec.class) : "PerFieldMappingPostingFormatCodec must subclass the latest lucene codec: " + Lucene.LATEST_CODEC;
}
- public PerFieldMappingPostingFormatCodec(Lucene50StoredFieldsFormat.Mode compressionMode, MapperService mapperService, ESLogger logger) {
+ public PerFieldMappingPostingFormatCodec(Lucene50StoredFieldsFormat.Mode compressionMode, MapperService mapperService, Logger logger) {
super(compressionMode);
this.mapperService = mapperService;
this.logger = logger;
diff --git a/core/src/main/java/org/elasticsearch/index/engine/ElasticsearchConcurrentMergeScheduler.java b/core/src/main/java/org/elasticsearch/index/engine/ElasticsearchConcurrentMergeScheduler.java
index 965a2e58f9..466da06dec 100644
--- a/core/src/main/java/org/elasticsearch/index/engine/ElasticsearchConcurrentMergeScheduler.java
+++ b/core/src/main/java/org/elasticsearch/index/engine/ElasticsearchConcurrentMergeScheduler.java
@@ -19,12 +19,12 @@
package org.elasticsearch.index.engine;
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.index.ConcurrentMergeScheduler;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.MergePolicy;
import org.apache.lucene.index.MergeScheduler;
import org.apache.lucene.index.OneMergeHelper;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.metrics.CounterMetric;
import org.elasticsearch.common.metrics.MeanMetric;
@@ -34,9 +34,9 @@ import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.common.util.concurrent.EsExecutors;
import org.elasticsearch.index.IndexSettings;
+import org.elasticsearch.index.MergeSchedulerConfig;
import org.elasticsearch.index.merge.MergeStats;
import org.elasticsearch.index.merge.OnGoingMerge;
-import org.elasticsearch.index.MergeSchedulerConfig;
import org.elasticsearch.index.shard.ShardId;
import java.io.IOException;
@@ -50,7 +50,7 @@ import java.util.Set;
*/
class ElasticsearchConcurrentMergeScheduler extends ConcurrentMergeScheduler {
- protected final ESLogger logger;
+ protected final Logger logger;
private final Settings indexSettings;
private final ShardId shardId;
diff --git a/core/src/main/java/org/elasticsearch/index/engine/Engine.java b/core/src/main/java/org/elasticsearch/index/engine/Engine.java
index 4ac09db8dc..c5be47bab3 100644
--- a/core/src/main/java/org/elasticsearch/index/engine/Engine.java
+++ b/core/src/main/java/org/elasticsearch/index/engine/Engine.java
@@ -19,6 +19,8 @@
package org.elasticsearch.index.engine;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.FilterLeafReader;
import org.apache.lucene.index.IndexCommit;
@@ -48,7 +50,6 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lease.Releasables;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.uid.Versions;
@@ -92,7 +93,7 @@ public abstract class Engine implements Closeable {
public static final String SYNC_COMMIT_ID = "sync_id";
protected final ShardId shardId;
- protected final ESLogger logger;
+ protected final Logger logger;
protected final EngineConfig engineConfig;
protected final Store store;
protected final AtomicBoolean isClosed = new AtomicBoolean(false);
@@ -364,7 +365,7 @@ public abstract class Engine implements Closeable {
throw ex;
} catch (Exception ex) {
ensureOpen(); // throw EngineCloseException here if we are already closed
- logger.error("failed to acquire searcher, source {}", ex, source);
+ logger.error(new ParameterizedMessage("failed to acquire searcher, source {}", source), ex);
throw new EngineException(shardId, "failed to acquire searcher, source " + source, ex);
} finally {
if (!success) { // release the ref in the case of an error...
@@ -443,8 +444,7 @@ public abstract class Engine implements Closeable {
try {
directory = engineConfig.getCodec().compoundFormat().getCompoundReader(segmentReader.directory(), segmentCommitInfo.info, IOContext.READ);
} catch (IOException e) {
- logger.warn("Error when opening compound reader for Directory [{}] and SegmentCommitInfo [{}]", e,
- segmentReader.directory(), segmentCommitInfo);
+ logger.warn(new ParameterizedMessage("Error when opening compound reader for Directory [{}] and SegmentCommitInfo [{}]", segmentReader.directory(), segmentCommitInfo), e);
return ImmutableOpenMap.of();
}
@@ -459,14 +459,14 @@ public abstract class Engine implements Closeable {
try {
files = directory.listAll();
} catch (IOException e) {
- logger.warn("Couldn't list Compound Reader Directory [{}]", e, directory);
+ logger.warn(new ParameterizedMessage("Couldn't list Compound Reader Directory [{}]", directory), e);
return ImmutableOpenMap.of();
}
} else {
try {
files = segmentReader.getSegmentInfo().files().toArray(new String[]{});
} catch (IOException e) {
- logger.warn("Couldn't list Directory from SegmentReader [{}] and SegmentInfo [{}]", e, segmentReader, segmentReader.getSegmentInfo());
+ logger.warn(new ParameterizedMessage("Couldn't list Directory from SegmentReader [{}] and SegmentInfo [{}]", segmentReader, segmentReader.getSegmentInfo()), e);
return ImmutableOpenMap.of();
}
}
@@ -480,7 +480,7 @@ public abstract class Engine implements Closeable {
} catch (NoSuchFileException | FileNotFoundException e) {
logger.warn("Tried to query fileLength but file is gone [{}] [{}]", e, directory, file);
} catch (IOException e) {
- logger.warn("Error when trying to query fileLength [{}] [{}]", e, directory, file);
+ logger.warn(new ParameterizedMessage("Error when trying to query fileLength [{}] [{}]", directory, file), e);
}
if (length == 0L) {
continue;
@@ -492,7 +492,7 @@ public abstract class Engine implements Closeable {
try {
directory.close();
} catch (IOException e) {
- logger.warn("Error when closing compound reader on Directory [{}]", e, directory);
+ logger.warn(new ParameterizedMessage("Error when closing compound reader on Directory [{}]", directory), e);
}
}
@@ -527,7 +527,7 @@ public abstract class Engine implements Closeable {
try {
segment.sizeInBytes = info.sizeInBytes();
} catch (IOException e) {
- logger.trace("failed to get size for [{}]", e, info.info.name);
+ logger.trace(new ParameterizedMessage("failed to get size for [{}]", info.info.name), e);
}
final SegmentReader segmentReader = segmentReader(reader.reader());
segment.memoryInBytes = segmentReader.ramBytesUsed();
@@ -557,7 +557,7 @@ public abstract class Engine implements Closeable {
try {
segment.sizeInBytes = info.sizeInBytes();
} catch (IOException e) {
- logger.trace("failed to get size for [{}]", e, info.info.name);
+ logger.trace(new ParameterizedMessage("failed to get size for [{}]", info.info.name), e);
}
segments.put(info.info.name, segment);
} else {
@@ -669,10 +669,10 @@ public abstract class Engine implements Closeable {
closeNoLock("engine failed on: [" + reason + "]");
} finally {
if (failedEngine != null) {
- logger.debug("tried to fail engine but engine is already failed. ignoring. [{}]", failure, reason);
+ logger.debug(new ParameterizedMessage("tried to fail engine but engine is already failed. ignoring. [{}]", reason), failure);
return;
}
- logger.warn("failed engine [{}]", failure, reason);
+ logger.warn(new ParameterizedMessage("failed engine [{}]", reason), failure);
// we must set a failure exception, generate one if not supplied
failedEngine = (failure != null) ? failure : new IllegalStateException(reason);
// we first mark the store as corrupted before we notify any listeners
@@ -696,7 +696,7 @@ public abstract class Engine implements Closeable {
store.decRef();
}
} else {
- logger.debug("tried to fail engine but could not acquire lock - engine should be failed by now [{}]", failure, reason);
+ logger.debug(new ParameterizedMessage("tried to fail engine but could not acquire lock - engine should be failed by now [{}]", reason), failure);
}
}
diff --git a/core/src/main/java/org/elasticsearch/index/engine/EngineSearcher.java b/core/src/main/java/org/elasticsearch/index/engine/EngineSearcher.java
index b32d4aa0bb..a53ac1dd41 100644
--- a/core/src/main/java/org/elasticsearch/index/engine/EngineSearcher.java
+++ b/core/src/main/java/org/elasticsearch/index/engine/EngineSearcher.java
@@ -19,10 +19,10 @@
package org.elasticsearch.index.engine;
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.SearcherManager;
import org.apache.lucene.store.AlreadyClosedException;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.index.store.Store;
import java.io.IOException;
@@ -35,9 +35,9 @@ public class EngineSearcher extends Engine.Searcher {
private final SearcherManager manager;
private final AtomicBoolean released = new AtomicBoolean(false);
private final Store store;
- private final ESLogger logger;
+ private final Logger logger;
- public EngineSearcher(String source, IndexSearcher searcher, SearcherManager manager, Store store, ESLogger logger) {
+ public EngineSearcher(String source, IndexSearcher searcher, SearcherManager manager, Store store, Logger logger) {
super(source, searcher);
this.manager = manager;
this.store = store;
diff --git a/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java
index b12bfe98bb..614528e637 100644
--- a/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java
+++ b/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java
@@ -19,6 +19,7 @@
package org.elasticsearch.index.engine;
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexCommit;
import org.apache.lucene.index.IndexFormatTooOldException;
@@ -43,7 +44,6 @@ import org.apache.lucene.util.InfoStream;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.lease.Releasable;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.lucene.LoggerInfoStream;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
@@ -1034,10 +1034,10 @@ public class InternalEngine extends Engine {
/** Extended SearcherFactory that warms the segments if needed when acquiring a new searcher */
static final class SearchFactory extends EngineSearcherFactory {
private final Engine.Warmer warmer;
- private final ESLogger logger;
+ private final Logger logger;
private final AtomicBoolean isEngineClosed;
- SearchFactory(ESLogger logger, AtomicBoolean isEngineClosed, EngineConfig engineConfig) {
+ SearchFactory(Logger logger, AtomicBoolean isEngineClosed, EngineConfig engineConfig) {
super(engineConfig);
warmer = engineConfig.getWarmer();
this.logger = logger;
diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalsBuilder.java b/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalsBuilder.java
index 2fa4476c0d..aaecf2fa89 100644
--- a/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalsBuilder.java
+++ b/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalsBuilder.java
@@ -19,6 +19,7 @@
package org.elasticsearch.index.fielddata.ordinals;
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.MultiDocValues.OrdinalMap;
@@ -26,7 +27,6 @@ import org.apache.lucene.index.RandomAccessOrds;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.packed.PackedInts;
import org.elasticsearch.common.breaker.CircuitBreaker;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.fielddata.AtomicOrdinalsFieldData;
@@ -48,7 +48,7 @@ public enum GlobalOrdinalsBuilder {
/**
* Build global ordinals for the provided {@link IndexReader}.
*/
- public static IndexOrdinalsFieldData build(final IndexReader indexReader, IndexOrdinalsFieldData indexFieldData, IndexSettings indexSettings, CircuitBreakerService breakerService, ESLogger logger) throws IOException {
+ public static IndexOrdinalsFieldData build(final IndexReader indexReader, IndexOrdinalsFieldData indexFieldData, IndexSettings indexSettings, CircuitBreakerService breakerService, Logger logger) throws IOException {
assert indexReader.leaves().size() > 1;
long startTimeNS = System.nanoTime();
diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/DocValuesIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/DocValuesIndexFieldData.java
index 83bdaf221b..4621876399 100644
--- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/DocValuesIndexFieldData.java
+++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/DocValuesIndexFieldData.java
@@ -19,10 +19,9 @@
package org.elasticsearch.index.fielddata.plain;
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.index.IndexReader;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
-import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.fielddata.IndexFieldData;
@@ -34,7 +33,6 @@ import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.UidFieldMapper;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
-import java.util.Map;
import java.util.Set;
import static java.util.Collections.unmodifiableSet;
@@ -45,7 +43,7 @@ public abstract class DocValuesIndexFieldData {
protected final Index index;
protected final String fieldName;
- protected final ESLogger logger;
+ protected final Logger logger;
public DocValuesIndexFieldData(Index index, String fieldName) {
super();
diff --git a/core/src/main/java/org/elasticsearch/index/mapper/LegacyIpIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/mapper/LegacyIpIndexFieldData.java
index ffad4deeb5..feb3328227 100644
--- a/core/src/main/java/org/elasticsearch/index/mapper/LegacyIpIndexFieldData.java
+++ b/core/src/main/java/org/elasticsearch/index/mapper/LegacyIpIndexFieldData.java
@@ -18,33 +18,33 @@
*/
package org.elasticsearch.index.mapper;
-import java.io.IOException;
-import java.net.InetAddress;
-import java.net.UnknownHostException;
-import java.nio.ByteBuffer;
-
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.document.InetAddressPoint;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.util.BytesRef;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.fielddata.AtomicFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData;
+import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.fielddata.ScriptDocValues;
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
-import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource;
import org.elasticsearch.search.MultiValueMode;
+import java.io.IOException;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.nio.ByteBuffer;
+
final class LegacyIpIndexFieldData implements IndexFieldData<AtomicFieldData> {
protected final Index index;
protected final String fieldName;
- protected final ESLogger logger;
+ protected final Logger logger;
public LegacyIpIndexFieldData(Index index, String fieldName) {
this.index = index;
diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java
index 43bf505da4..2e082f78fa 100755
--- a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java
+++ b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java
@@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper;
import com.carrotsearch.hppc.ObjectHashSet;
import com.carrotsearch.hppc.cursors.ObjectCursor;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.DelegatingAnalyzerWrapper;
import org.elasticsearch.ElasticsearchGenerationException;
@@ -216,7 +217,7 @@ public class MapperService extends AbstractIndexComponent {
}
}
} catch (Exception e) {
- logger.warn("[{}] failed to add mapping [{}], source [{}]", e, index(), mappingType, mappingSource);
+ logger.warn(new ParameterizedMessage("[{}] failed to add mapping [{}], source [{}]", index(), mappingType, mappingSource), e);
throw e;
}
}
diff --git a/core/src/main/java/org/elasticsearch/index/mapper/StringFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/StringFieldMapper.java
index 5dc6efb28f..ec7a90148a 100644
--- a/core/src/main/java/org/elasticsearch/index/mapper/StringFieldMapper.java
+++ b/core/src/main/java/org/elasticsearch/index/mapper/StringFieldMapper.java
@@ -19,6 +19,7 @@
package org.elasticsearch.index.mapper;
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.SortedSetDocValuesField;
import org.apache.lucene.index.IndexOptions;
@@ -26,7 +27,6 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.common.logging.DeprecationLogger;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
@@ -187,7 +187,7 @@ public class StringFieldMapper extends FieldMapper {
private final DeprecationLogger deprecationLogger;
public TypeParser() {
- ESLogger logger = Loggers.getLogger(getClass());
+ Logger logger = Loggers.getLogger(getClass());
this.deprecationLogger = new DeprecationLogger(logger);
}
diff --git a/core/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java b/core/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java
index e388c8ea57..eaa97ac510 100644
--- a/core/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java
+++ b/core/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java
@@ -22,7 +22,6 @@ package org.elasticsearch.index.mapper;
import org.apache.lucene.index.IndexOptions;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.Version;
-import org.elasticsearch.common.Strings;
import org.elasticsearch.common.joda.FormatDateTimeFormatter;
import org.elasticsearch.common.joda.Joda;
import org.elasticsearch.common.logging.DeprecationLogger;
@@ -31,7 +30,6 @@ import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.similarity.SimilarityProvider;
-import org.elasticsearch.index.similarity.SimilarityService;
import java.util.Arrays;
import java.util.Collections;
diff --git a/core/src/main/java/org/elasticsearch/index/shard/AbstractIndexShardComponent.java b/core/src/main/java/org/elasticsearch/index/shard/AbstractIndexShardComponent.java
index c8719a610e..0e46a56248 100644
--- a/core/src/main/java/org/elasticsearch/index/shard/AbstractIndexShardComponent.java
+++ b/core/src/main/java/org/elasticsearch/index/shard/AbstractIndexShardComponent.java
@@ -19,17 +19,14 @@
package org.elasticsearch.index.shard;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.logging.DeprecationLogger;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.index.IndexSettings;
-/**
- *
- */
public abstract class AbstractIndexShardComponent implements IndexShardComponent {
- protected final ESLogger logger;
+ protected final Logger logger;
protected final DeprecationLogger deprecationLogger;
protected final ShardId shardId;
protected final IndexSettings indexSettings;
diff --git a/core/src/main/java/org/elasticsearch/index/shard/ElasticsearchMergePolicy.java b/core/src/main/java/org/elasticsearch/index/shard/ElasticsearchMergePolicy.java
index 3cc4ea1152..a98384cee1 100644
--- a/core/src/main/java/org/elasticsearch/index/shard/ElasticsearchMergePolicy.java
+++ b/core/src/main/java/org/elasticsearch/index/shard/ElasticsearchMergePolicy.java
@@ -19,13 +19,13 @@
package org.elasticsearch.index.shard;
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.MergePolicy;
import org.apache.lucene.index.MergeTrigger;
import org.apache.lucene.index.SegmentCommitInfo;
import org.apache.lucene.index.SegmentInfos;
import org.elasticsearch.Version;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import java.io.IOException;
@@ -46,7 +46,7 @@ import java.util.Map;
*/
public final class ElasticsearchMergePolicy extends MergePolicy {
- private static ESLogger logger = Loggers.getLogger(ElasticsearchMergePolicy.class);
+ private static Logger logger = Loggers.getLogger(ElasticsearchMergePolicy.class);
private final MergePolicy delegate;
diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java
index 6089f42a39..244ed5a2fd 100644
--- a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java
+++ b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java
@@ -19,6 +19,7 @@
package org.elasticsearch.index.shard;
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.codecs.PostingsFormat;
import org.apache.lucene.index.CheckIndex;
import org.apache.lucene.index.CorruptIndexException;
@@ -53,7 +54,6 @@ import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lease.Releasables;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.LoggerMessageFormat;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.metrics.MeanMetric;
@@ -134,6 +134,7 @@ import java.nio.file.NoSuchFileException;
import java.util.ArrayList;
import java.util.EnumSet;
import java.util.List;
+import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
@@ -1779,7 +1780,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
private class IndexShardRecoveryPerformer extends TranslogRecoveryPerformer {
- protected IndexShardRecoveryPerformer(ShardId shardId, MapperService mapperService, ESLogger logger) {
+ protected IndexShardRecoveryPerformer(ShardId shardId, MapperService mapperService, Logger logger) {
super(shardId, mapperService, logger);
}
diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexShardOperationsLock.java b/core/src/main/java/org/elasticsearch/index/shard/IndexShardOperationsLock.java
index a0f2714a3a..cde14dec17 100644
--- a/core/src/main/java/org/elasticsearch/index/shard/IndexShardOperationsLock.java
+++ b/core/src/main/java/org/elasticsearch/index/shard/IndexShardOperationsLock.java
@@ -18,11 +18,11 @@
*/
package org.elasticsearch.index.shard;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ThreadedActionListener;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.lease.Releasable;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.threadpool.ThreadPool;
import java.io.Closeable;
@@ -35,7 +35,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
public class IndexShardOperationsLock implements Closeable {
private final ShardId shardId;
- private final ESLogger logger;
+ private final Logger logger;
private final ThreadPool threadPool;
private static final int TOTAL_PERMITS = Integer.MAX_VALUE;
@@ -44,7 +44,7 @@ public class IndexShardOperationsLock implements Closeable {
@Nullable private List<ActionListener<Releasable>> delayedOperations; // operations that are delayed due to relocation hand-off
private volatile boolean closed;
- public IndexShardOperationsLock(ShardId shardId, ESLogger logger, ThreadPool threadPool) {
+ public IndexShardOperationsLock(ShardId shardId, Logger logger, ThreadPool threadPool) {
this.shardId = shardId;
this.logger = logger;
this.threadPool = threadPool;
diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexingOperationListener.java b/core/src/main/java/org/elasticsearch/index/shard/IndexingOperationListener.java
index 13ff87d418..de3af64dd4 100644
--- a/core/src/main/java/org/elasticsearch/index/shard/IndexingOperationListener.java
+++ b/core/src/main/java/org/elasticsearch/index/shard/IndexingOperationListener.java
@@ -18,7 +18,8 @@
*/
package org.elasticsearch.index.shard;
-import org.elasticsearch.common.logging.ESLogger;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.index.engine.Engine;
import java.util.List;
@@ -68,9 +69,9 @@ public interface IndexingOperationListener {
*/
final class CompositeListener implements IndexingOperationListener{
private final List<IndexingOperationListener> listeners;
- private final ESLogger logger;
+ private final Logger logger;
- public CompositeListener(List<IndexingOperationListener> listeners, ESLogger logger) {
+ public CompositeListener(List<IndexingOperationListener> listeners, Logger logger) {
this.listeners = listeners;
this.logger = logger;
}
@@ -82,7 +83,7 @@ public interface IndexingOperationListener {
try {
listener.preIndex(operation);
} catch (Exception e) {
- logger.warn("preIndex listener [{}] failed", e, listener);
+ logger.warn(new ParameterizedMessage("preIndex listener [{}] failed", listener), e);
}
}
return operation;
@@ -95,7 +96,7 @@ public interface IndexingOperationListener {
try {
listener.postIndex(index, created);
} catch (Exception e) {
- logger.warn("postIndex listener [{}] failed", e, listener);
+ logger.warn(new ParameterizedMessage("postIndex listener [{}] failed", listener), e);
}
}
}
@@ -108,7 +109,7 @@ public interface IndexingOperationListener {
listener.postIndex(index, ex);
} catch (Exception inner) {
inner.addSuppressed(ex);
- logger.warn("postIndex listener [{}] failed", inner, listener);
+ logger.warn(new ParameterizedMessage("postIndex listener [{}] failed", listener), inner);
}
}
}
@@ -120,7 +121,7 @@ public interface IndexingOperationListener {
try {
listener.preDelete(delete);
} catch (Exception e) {
- logger.warn("preDelete listener [{}] failed", e, listener);
+ logger.warn(new ParameterizedMessage("preDelete listener [{}] failed", listener), e);
}
}
return delete;
@@ -133,7 +134,7 @@ public interface IndexingOperationListener {
try {
listener.postDelete(delete);
} catch (Exception e) {
- logger.warn("postDelete listener [{}] failed", e, listener);
+ logger.warn(new ParameterizedMessage("postDelete listener [{}] failed", listener), e);
}
}
}
@@ -146,7 +147,7 @@ public interface IndexingOperationListener {
listener.postDelete(delete, ex);
} catch (Exception inner) {
inner.addSuppressed(ex);
- logger.warn("postDelete listener [{}] failed", inner, listener);
+ logger.warn(new ParameterizedMessage("postDelete listener [{}] failed", listener), inner);
}
}
}
diff --git a/core/src/main/java/org/elasticsearch/index/shard/RefreshListeners.java b/core/src/main/java/org/elasticsearch/index/shard/RefreshListeners.java
index 76352e79bb..ca94f1ea96 100644
--- a/core/src/main/java/org/elasticsearch/index/shard/RefreshListeners.java
+++ b/core/src/main/java/org/elasticsearch/index/shard/RefreshListeners.java
@@ -19,9 +19,9 @@
package org.elasticsearch.index.shard;
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.search.ReferenceManager;
import org.elasticsearch.common.collect.Tuple;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.index.translog.Translog;
import java.io.IOException;
@@ -41,7 +41,7 @@ public final class RefreshListeners implements ReferenceManager.RefreshListener
private final IntSupplier getMaxRefreshListeners;
private final Runnable forceRefresh;
private final Executor listenerExecutor;
- private final ESLogger logger;
+ private final Logger logger;
/**
* List of refresh listeners. Defaults to null and built on demand because most refresh cycles won't need it. Entries are never removed
@@ -54,7 +54,7 @@ public final class RefreshListeners implements ReferenceManager.RefreshListener
*/
private volatile Translog.Location lastRefreshedLocation;
- public RefreshListeners(IntSupplier getMaxRefreshListeners, Runnable forceRefresh, Executor listenerExecutor, ESLogger logger) {
+ public RefreshListeners(IntSupplier getMaxRefreshListeners, Runnable forceRefresh, Executor listenerExecutor, Logger logger) {
this.getMaxRefreshListeners = getMaxRefreshListeners;
this.forceRefresh = forceRefresh;
this.listenerExecutor = listenerExecutor;
diff --git a/core/src/main/java/org/elasticsearch/index/shard/SearchOperationListener.java b/core/src/main/java/org/elasticsearch/index/shard/SearchOperationListener.java
index 5a4ac1297f..c0c3deec2c 100644
--- a/core/src/main/java/org/elasticsearch/index/shard/SearchOperationListener.java
+++ b/core/src/main/java/org/elasticsearch/index/shard/SearchOperationListener.java
@@ -18,7 +18,8 @@
*/
package org.elasticsearch.index.shard;
-import org.elasticsearch.common.logging.ESLogger;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.search.internal.SearchContext;
import java.util.List;
@@ -107,9 +108,9 @@ public interface SearchOperationListener {
*/
final class CompositeListener implements SearchOperationListener {
private final List<SearchOperationListener> listeners;
- private final ESLogger logger;
+ private final Logger logger;
- public CompositeListener(List<SearchOperationListener> listeners, ESLogger logger) {
+ public CompositeListener(List<SearchOperationListener> listeners, Logger logger) {
this.listeners = listeners;
this.logger = logger;
}
@@ -120,7 +121,7 @@ public interface SearchOperationListener {
try {
listener.onPreQueryPhase(searchContext);
} catch (Exception e) {
- logger.warn("onPreQueryPhase listener [{}] failed", e, listener);
+ logger.warn(new ParameterizedMessage("onPreQueryPhase listener [{}] failed", listener), e);
}
}
}
@@ -131,7 +132,7 @@ public interface SearchOperationListener {
try {
listener.onFailedQueryPhase(searchContext);
} catch (Exception e) {
- logger.warn("onFailedQueryPhase listener [{}] failed", e, listener);
+ logger.warn(new ParameterizedMessage("onFailedQueryPhase listener [{}] failed", listener), e);
}
}
}
@@ -142,7 +143,7 @@ public interface SearchOperationListener {
try {
listener.onQueryPhase(searchContext, tookInNanos);
} catch (Exception e) {
- logger.warn("onQueryPhase listener [{}] failed", e, listener);
+ logger.warn(new ParameterizedMessage("onQueryPhase listener [{}] failed", listener), e);
}
}
}
@@ -153,7 +154,7 @@ public interface SearchOperationListener {
try {
listener.onPreFetchPhase(searchContext);
} catch (Exception e) {
- logger.warn("onPreFetchPhase listener [{}] failed", e, listener);
+ logger.warn(new ParameterizedMessage("onPreFetchPhase listener [{}] failed", listener), e);
}
}
}
@@ -164,7 +165,7 @@ public interface SearchOperationListener {
try {
listener.onFailedFetchPhase(searchContext);
} catch (Exception e) {
- logger.warn("onFailedFetchPhase listener [{}] failed", e, listener);
+ logger.warn(new ParameterizedMessage("onFailedFetchPhase listener [{}] failed", listener), e);
}
}
}
@@ -175,7 +176,7 @@ public interface SearchOperationListener {
try {
listener.onFetchPhase(searchContext, tookInNanos);
} catch (Exception e) {
- logger.warn("onFetchPhase listener [{}] failed", e, listener);
+ logger.warn(new ParameterizedMessage("onFetchPhase listener [{}] failed", listener), e);
}
}
}
@@ -186,7 +187,7 @@ public interface SearchOperationListener {
try {
listener.onNewContext(context);
} catch (Exception e) {
- logger.warn("onNewContext listener [{}] failed", e, listener);
+ logger.warn(new ParameterizedMessage("onNewContext listener [{}] failed", listener), e);
}
}
}
@@ -197,7 +198,7 @@ public interface SearchOperationListener {
try {
listener.onFreeContext(context);
} catch (Exception e) {
- logger.warn("onFreeContext listener [{}] failed", e, listener);
+ logger.warn(new ParameterizedMessage("onFreeContext listener [{}] failed", listener), e);
}
}
}
@@ -208,7 +209,7 @@ public interface SearchOperationListener {
try {
listener.onNewScrollContext(context);
} catch (Exception e) {
- logger.warn("onNewScrollContext listener [{}] failed", e, listener);
+ logger.warn(new ParameterizedMessage("onNewScrollContext listener [{}] failed", listener), e);
}
}
}
@@ -219,7 +220,7 @@ public interface SearchOperationListener {
try {
listener.onFreeScrollContext(context);
} catch (Exception e) {
- logger.warn("onFreeScrollContext listener [{}] failed", e, listener);
+ logger.warn(new ParameterizedMessage("onFreeScrollContext listener [{}] failed", listener), e);
}
}
}
diff --git a/core/src/main/java/org/elasticsearch/index/shard/ShardPath.java b/core/src/main/java/org/elasticsearch/index/shard/ShardPath.java
index 154619951f..23b17c290f 100644
--- a/core/src/main/java/org/elasticsearch/index/shard/ShardPath.java
+++ b/core/src/main/java/org/elasticsearch/index/shard/ShardPath.java
@@ -18,9 +18,9 @@
*/
package org.elasticsearch.index.shard;
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.cluster.metadata.IndexMetaData;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.env.ShardLock;
import org.elasticsearch.index.IndexSettings;
@@ -108,7 +108,7 @@ public final class ShardPath {
* directories with a valid shard state exist the one with the highest version will be used.
* <b>Note:</b> this method resolves custom data locations for the shard.
*/
- public static ShardPath loadShardPath(ESLogger logger, NodeEnvironment env, ShardId shardId, IndexSettings indexSettings) throws IOException {
+ public static ShardPath loadShardPath(Logger logger, NodeEnvironment env, ShardId shardId, IndexSettings indexSettings) throws IOException {
final String indexUUID = indexSettings.getUUID();
final Path[] paths = env.availableShardPaths(shardId);
Path loadedPath = null;
@@ -146,7 +146,7 @@ public final class ShardPath {
* This method tries to delete left-over shards where the index name has been reused but the UUID is different
* to allow the new shard to be allocated.
*/
- public static void deleteLeftoverShardDirectory(ESLogger logger, NodeEnvironment env, ShardLock lock, IndexSettings indexSettings) throws IOException {
+ public static void deleteLeftoverShardDirectory(Logger logger, NodeEnvironment env, ShardLock lock, IndexSettings indexSettings) throws IOException {
final String indexUUID = indexSettings.getUUID();
final Path[] paths = env.availableShardPaths(lock.getShardId());
for (Path path : paths) {
diff --git a/core/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java b/core/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java
index a986e30f2d..6ff897a8c8 100644
--- a/core/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java
+++ b/core/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java
@@ -20,6 +20,7 @@
package org.elasticsearch.index.shard;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.NoMergePolicy;
@@ -33,7 +34,6 @@ import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MappingMetaData;
import org.elasticsearch.cluster.routing.RecoverySource;
import org.elasticsearch.cluster.routing.RecoverySource.SnapshotRecoverySource;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
@@ -62,10 +62,10 @@ import static org.elasticsearch.common.unit.TimeValue.timeValueMillis;
*/
final class StoreRecovery {
- private final ESLogger logger;
+ private final Logger logger;
private final ShardId shardId;
- StoreRecovery(ShardId shardId, ESLogger logger) {
+ StoreRecovery(ShardId shardId, Logger logger) {
this.logger = logger;
this.shardId = shardId;
}
diff --git a/core/src/main/java/org/elasticsearch/index/shard/TranslogRecoveryPerformer.java b/core/src/main/java/org/elasticsearch/index/shard/TranslogRecoveryPerformer.java
index 78628a02c4..2771bab29b 100644
--- a/core/src/main/java/org/elasticsearch/index/shard/TranslogRecoveryPerformer.java
+++ b/core/src/main/java/org/elasticsearch/index/shard/TranslogRecoveryPerformer.java
@@ -18,10 +18,10 @@
*/
package org.elasticsearch.index.shard;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.engine.IgnoreOnRecoveryEngineException;
import org.elasticsearch.index.mapper.DocumentMapperForType;
@@ -44,11 +44,11 @@ import static org.elasticsearch.index.mapper.SourceToParse.source;
*/
public class TranslogRecoveryPerformer {
private final MapperService mapperService;
- private final ESLogger logger;
+ private final Logger logger;
private final Map<String, Mapping> recoveredTypes = new HashMap<>();
private final ShardId shardId;
- protected TranslogRecoveryPerformer(ShardId shardId, MapperService mapperService, ESLogger logger) {
+ protected TranslogRecoveryPerformer(ShardId shardId, MapperService mapperService, Logger logger) {
this.shardId = shardId;
this.mapperService = mapperService;
this.logger = logger;
diff --git a/core/src/main/java/org/elasticsearch/index/store/IndexStore.java b/core/src/main/java/org/elasticsearch/index/store/IndexStore.java
index 9e01d87176..783bd9af58 100644
--- a/core/src/main/java/org/elasticsearch/index/store/IndexStore.java
+++ b/core/src/main/java/org/elasticsearch/index/store/IndexStore.java
@@ -26,9 +26,7 @@ import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.shard.ShardPath;
-/**
- *
- */
+
public class IndexStore extends AbstractIndexComponent {
public static final Setting<IndexRateLimitingType> INDEX_STORE_THROTTLE_TYPE_SETTING =
new Setting<>("index.store.throttle.type", "none", IndexRateLimitingType::fromString,
diff --git a/core/src/main/java/org/elasticsearch/index/store/IndexStoreConfig.java b/core/src/main/java/org/elasticsearch/index/store/IndexStoreConfig.java
index 12558bb955..ff1f624070 100644
--- a/core/src/main/java/org/elasticsearch/index/store/IndexStoreConfig.java
+++ b/core/src/main/java/org/elasticsearch/index/store/IndexStoreConfig.java
@@ -18,8 +18,8 @@
*/
package org.elasticsearch.index.store;
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.store.StoreRateLimiting;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
@@ -49,7 +49,7 @@ public class IndexStoreConfig {
private volatile StoreRateLimiting.Type rateLimitingType;
private volatile ByteSizeValue rateLimitingThrottle;
private final StoreRateLimiting rateLimiting = new StoreRateLimiting();
- private final ESLogger logger;
+ private final Logger logger;
public IndexStoreConfig(Settings settings) {
logger = Loggers.getLogger(IndexStoreConfig.class, settings);
// we don't limit by default (we default to CMS's auto throttle instead):
diff --git a/core/src/main/java/org/elasticsearch/index/store/Store.java b/core/src/main/java/org/elasticsearch/index/store/Store.java
index ff3d89d9ff..f4ab3a9d1b 100644
--- a/core/src/main/java/org/elasticsearch/index/store/Store.java
+++ b/core/src/main/java/org/elasticsearch/index/store/Store.java
@@ -19,6 +19,8 @@
package org.elasticsearch.index.store;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.IndexCommit;
@@ -54,7 +56,6 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.store.ByteArrayIndexInput;
@@ -217,7 +218,7 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref
* Note that this method requires the caller verify it has the right to access the store and
* no concurrent file changes are happening. If in doubt, you probably want to use one of the following:
*
- * {@link #readMetadataSnapshot(Path, ShardId, NodeEnvironment.ShardLocker, ESLogger)} to read a meta data while locking
+ * {@link #readMetadataSnapshot(Path, ShardId, NodeEnvironment.ShardLocker, Logger)} to read a meta data while locking
* {@link IndexShard#snapshotStoreMetadata()} to safely read from an existing shard
* {@link IndexShard#acquireIndexCommit(boolean)} to get an {@link IndexCommit} which is safe to use but has to be freed
*
@@ -279,7 +280,7 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref
directory.deleteFile(origFile);
} catch (FileNotFoundException | NoSuchFileException e) {
} catch (Exception ex) {
- logger.debug("failed to delete file [{}]", ex, origFile);
+ logger.debug(new ParameterizedMessage("failed to delete file [{}]", origFile), ex);
}
// now, rename the files... and fail it it won't work
directory.rename(tempFile, origFile);
@@ -376,7 +377,7 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref
* @throws IOException if the index we try to read is corrupted
*/
public static MetadataSnapshot readMetadataSnapshot(Path indexLocation, ShardId shardId, NodeEnvironment.ShardLocker shardLocker,
- ESLogger logger) throws IOException {
+ Logger logger) throws IOException {
try (ShardLock lock = shardLocker.lock(shardId, TimeUnit.SECONDS.toMillis(5));
Directory dir = new SimpleFSDirectory(indexLocation)) {
failIfCorrupted(dir, shardId);
@@ -396,11 +397,11 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref
* can be successfully opened. This includes reading the segment infos and possible
* corruption markers.
*/
- public static boolean canOpenIndex(ESLogger logger, Path indexLocation, ShardId shardId, NodeEnvironment.ShardLocker shardLocker) throws IOException {
+ public static boolean canOpenIndex(Logger logger, Path indexLocation, ShardId shardId, NodeEnvironment.ShardLocker shardLocker) throws IOException {
try {
tryOpenIndex(indexLocation, shardId, shardLocker, logger);
} catch (Exception ex) {
- logger.trace("Can't open index for path [{}]", ex, indexLocation);
+ logger.trace(new ParameterizedMessage("Can't open index for path [{}]", indexLocation), ex);
return false;
}
return true;
@@ -411,7 +412,7 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref
* segment infos and possible corruption markers. If the index can not
* be opened, an exception is thrown
*/
- public static void tryOpenIndex(Path indexLocation, ShardId shardId, NodeEnvironment.ShardLocker shardLocker, ESLogger logger) throws IOException {
+ public static void tryOpenIndex(Path indexLocation, ShardId shardId, NodeEnvironment.ShardLocker shardLocker, Logger logger) throws IOException {
try (ShardLock lock = shardLocker.lock(shardId, TimeUnit.SECONDS.toMillis(5));
Directory dir = new SimpleFSDirectory(indexLocation)) {
failIfCorrupted(dir, shardId);
@@ -605,7 +606,7 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref
// if one of those files can't be deleted we better fail the cleanup otherwise we might leave an old commit point around?
throw new IllegalStateException("Can't delete " + existingFile + " - cleanup failed", ex);
}
- logger.debug("failed to delete file [{}]", ex, existingFile);
+ logger.debug(new ParameterizedMessage("failed to delete file [{}]", existingFile), ex);
// ignore, we don't really care, will get deleted later on
}
}
@@ -652,9 +653,9 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref
static final class StoreDirectory extends FilterDirectory {
- private final ESLogger deletesLogger;
+ private final Logger deletesLogger;
- StoreDirectory(Directory delegateDirectory, ESLogger deletesLogger) throws IOException {
+ StoreDirectory(Directory delegateDirectory, Logger deletesLogger) throws IOException {
super(delegateDirectory);
this.deletesLogger = deletesLogger;
}
@@ -717,7 +718,7 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref
numDocs = 0;
}
- MetadataSnapshot(IndexCommit commit, Directory directory, ESLogger logger) throws IOException {
+ MetadataSnapshot(IndexCommit commit, Directory directory, Logger logger) throws IOException {
LoadedMetadata loadedMetadata = loadMetadata(commit, directory, logger);
metadata = loadedMetadata.fileMetadata;
commitUserData = loadedMetadata.userData;
@@ -780,7 +781,7 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref
}
}
- static LoadedMetadata loadMetadata(IndexCommit commit, Directory directory, ESLogger logger) throws IOException {
+ static LoadedMetadata loadMetadata(IndexCommit commit, Directory directory, Logger logger) throws IOException {
long numDocs;
Map<String, StoreFileMetaData> builder = new HashMap<>();
Map<String, String> commitUserDataBuilder = new HashMap<>();
@@ -823,8 +824,7 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref
// Lucene checks the checksum after it tries to lookup the codec etc.
// in that case we might get only IAE or similar exceptions while we are really corrupt...
// TODO we should check the checksum in lucene if we hit an exception
- logger.warn("failed to build store metadata. checking segment info integrity (with commit [{}])",
- ex, commit == null ? "no" : "yes");
+ logger.warn(new ParameterizedMessage("failed to build store metadata. checking segment info integrity (with commit [{}])", commit == null ? "no" : "yes"), ex);
Lucene.checkSegmentInfoIntegrity(directory);
} catch (CorruptIndexException | IndexFormatTooOldException | IndexFormatTooNewException cex) {
cex.addSuppressed(ex);
@@ -839,7 +839,7 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref
}
private static void checksumFromLuceneFile(Directory directory, String file, Map<String, StoreFileMetaData> builder,
- ESLogger logger, Version version, boolean readFileAsHash) throws IOException {
+ Logger logger, Version version, boolean readFileAsHash) throws IOException {
final String checksum;
final BytesRefBuilder fileHash = new BytesRefBuilder();
try (final IndexInput in = directory.openInput(file, IOContext.READONCE)) {
@@ -859,7 +859,7 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref
}
} catch (Exception ex) {
- logger.debug("Can retrieve checksum from file [{}]", ex, file);
+ logger.debug(new ParameterizedMessage("Can retrieve checksum from file [{}]", file), ex);
throw ex;
}
builder.put(file, new StoreFileMetaData(file, length, checksum, version, fileHash.get()));
diff --git a/core/src/main/java/org/elasticsearch/index/translog/Translog.java b/core/src/main/java/org/elasticsearch/index/translog/Translog.java
index 68a1dd1aa3..3a76c99a90 100644
--- a/core/src/main/java/org/elasticsearch/index/translog/Translog.java
+++ b/core/src/main/java/org/elasticsearch/index/translog/Translog.java
@@ -19,6 +19,7 @@
package org.elasticsearch.index.translog;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TwoPhaseCommit;
import org.apache.lucene.store.AlreadyClosedException;
@@ -257,7 +258,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
try {
Files.delete(tempFile);
} catch (IOException ex) {
- logger.warn("failed to delete temp file {}", ex, tempFile);
+ logger.warn(new ParameterizedMessage("failed to delete temp file {}", tempFile), ex);
}
}
}
diff --git a/core/src/main/java/org/elasticsearch/index/translog/TranslogToolCli.java b/core/src/main/java/org/elasticsearch/index/translog/TranslogToolCli.java
index eaf50f25a0..a25e5d4cc7 100644
--- a/core/src/main/java/org/elasticsearch/index/translog/TranslogToolCli.java
+++ b/core/src/main/java/org/elasticsearch/index/translog/TranslogToolCli.java
@@ -21,10 +21,8 @@ package org.elasticsearch.index.translog;
import org.elasticsearch.cli.MultiCommand;
import org.elasticsearch.cli.Terminal;
-import org.elasticsearch.common.logging.LogConfigurator;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
-import org.elasticsearch.index.translog.TruncateTranslogCommand;
import org.elasticsearch.node.internal.InternalSettingsPreparer;
/**
@@ -49,7 +47,7 @@ public class TranslogToolCli extends MultiCommand {
.put("rootLogger", "${logger.level}, terminal")
.put("logger.level", loggerLevel)
.build(), Terminal.DEFAULT);
- LogConfigurator.configure(loggingEnvironment.settings(), false);
+ // LogConfigurator.configure(loggingEnvironment.settings(), false);
exit(new TranslogToolCli().main(args, Terminal.DEFAULT));
}
diff --git a/core/src/main/java/org/elasticsearch/index/warmer/ShardIndexWarmerService.java b/core/src/main/java/org/elasticsearch/index/warmer/ShardIndexWarmerService.java
index 57decb25f5..d10a951937 100644
--- a/core/src/main/java/org/elasticsearch/index/warmer/ShardIndexWarmerService.java
+++ b/core/src/main/java/org/elasticsearch/index/warmer/ShardIndexWarmerService.java
@@ -19,7 +19,7 @@
package org.elasticsearch.index.warmer;
-import org.elasticsearch.common.logging.ESLogger;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.metrics.CounterMetric;
import org.elasticsearch.common.metrics.MeanMetric;
import org.elasticsearch.index.IndexSettings;
@@ -28,8 +28,6 @@ import org.elasticsearch.index.shard.ShardId;
import java.util.concurrent.TimeUnit;
-/**
- */
public class ShardIndexWarmerService extends AbstractIndexShardComponent {
private final CounterMetric current = new CounterMetric();
@@ -39,7 +37,7 @@ public class ShardIndexWarmerService extends AbstractIndexShardComponent {
super(shardId, indexSettings);
}
- public ESLogger logger() {
+ public Logger logger() {
return this.logger;
}
diff --git a/core/src/main/java/org/elasticsearch/indices/IndexingMemoryController.java b/core/src/main/java/org/elasticsearch/indices/IndexingMemoryController.java
index 2e82e81959..73d0f0eb82 100644
--- a/core/src/main/java/org/elasticsearch/indices/IndexingMemoryController.java
+++ b/core/src/main/java/org/elasticsearch/indices/IndexingMemoryController.java
@@ -19,6 +19,7 @@
package org.elasticsearch.indices;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
@@ -177,7 +178,7 @@ public class IndexingMemoryController extends AbstractComponent implements Index
@Override
public void onFailure(Exception e) {
- logger.warn("failed to write indexing buffer for shard [{}]; ignoring", e, shard.shardId());
+ logger.warn(new ParameterizedMessage("failed to write indexing buffer for shard [{}]; ignoring", shard.shardId()), e);
}
});
}
diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesService.java b/core/src/main/java/org/elasticsearch/indices/IndicesService.java
index b951eaefaa..880b80aff7 100644
--- a/core/src/main/java/org/elasticsearch/indices/IndicesService.java
+++ b/core/src/main/java/org/elasticsearch/indices/IndicesService.java
@@ -20,6 +20,8 @@
package org.elasticsearch.indices;
import com.carrotsearch.hppc.cursors.ObjectCursor;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.store.LockObtainFailedException;
import org.apache.lucene.util.CollectionUtil;
@@ -49,7 +51,6 @@ import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.lease.Releasable;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.IndexScopedSettings;
import org.elasticsearch.common.settings.Setting;
@@ -218,7 +219,7 @@ public class IndicesService extends AbstractLifecycleComponent
try {
removeIndex(index, "shutdown", false);
} catch (Exception e) {
- logger.warn("failed to remove index on stop [{}]", e, index);
+ logger.warn(new ParameterizedMessage("failed to remove index on stop [{}]", index), e);
} finally {
latch.countDown();
}
@@ -296,7 +297,7 @@ public class IndicesService extends AbstractLifecycleComponent
}
} catch (IllegalIndexShardStateException e) {
// we can safely ignore illegal state on ones that are closing for example
- logger.trace("{} ignoring shard stats", e, indexShard.shardId());
+ logger.trace(new ParameterizedMessage("{} ignoring shard stats", indexShard.shardId()), e);
}
}
}
@@ -474,7 +475,7 @@ public class IndicesService extends AbstractLifecycleComponent
try {
removeIndex(index, reason, false);
} catch (Exception e) {
- logger.warn("failed to remove index ({})", e, reason);
+ logger.warn(new ParameterizedMessage("failed to remove index ({})", reason), e);
}
}
@@ -565,7 +566,7 @@ public class IndicesService extends AbstractLifecycleComponent
try {
removeIndex(index, reason, true);
} catch (Exception e) {
- logger.warn("failed to delete index ({})", e, reason);
+ logger.warn(new ParameterizedMessage("failed to delete index ({})", reason), e);
}
}
@@ -585,7 +586,7 @@ public class IndicesService extends AbstractLifecycleComponent
}
deleteIndexStore(reason, metaData, clusterState);
} catch (IOException e) {
- logger.warn("[{}] failed to delete unassigned index (reason [{}])", e, metaData.getIndex(), reason);
+ logger.warn(new ParameterizedMessage("[{}] failed to delete unassigned index (reason [{}])", metaData.getIndex(), reason), e);
}
}
}
@@ -637,9 +638,9 @@ public class IndicesService extends AbstractLifecycleComponent
}
success = true;
} catch (LockObtainFailedException ex) {
- logger.debug("{} failed to delete index store - at least one shards is still locked", ex, index);
+ logger.debug(new ParameterizedMessage("{} failed to delete index store - at least one shards is still locked", index), ex);
} catch (Exception ex) {
- logger.warn("{} failed to delete index", ex, index);
+ logger.warn(new ParameterizedMessage("{} failed to delete index", index), ex);
} finally {
if (success == false) {
addPendingDelete(index, indexSettings);
@@ -746,7 +747,7 @@ public class IndicesService extends AbstractLifecycleComponent
try {
metaData = metaStateService.loadIndexState(index);
} catch (IOException e) {
- logger.warn("[{}] failed to load state file from a stale deleted index, folders will be left on disk", e, index);
+ logger.warn(new ParameterizedMessage("[{}] failed to load state file from a stale deleted index, folders will be left on disk", index), e);
return null;
}
final IndexSettings indexSettings = buildIndexSettings(metaData);
@@ -755,7 +756,7 @@ public class IndicesService extends AbstractLifecycleComponent
} catch (IOException e) {
// we just warn about the exception here because if deleteIndexStoreIfDeletionAllowed
// throws an exception, it gets added to the list of pending deletes to be tried again
- logger.warn("[{}] failed to delete index on disk", e, metaData.getIndex());
+ logger.warn(new ParameterizedMessage("[{}] failed to delete index on disk", metaData.getIndex()), e);
}
return metaData;
}
@@ -927,7 +928,7 @@ public class IndicesService extends AbstractLifecycleComponent
nodeEnv.deleteIndexDirectoryUnderLock(index, indexSettings);
iterator.remove();
} catch (IOException ex) {
- logger.debug("{} retry pending delete", ex, index);
+ logger.debug(new ParameterizedMessage("{} retry pending delete", index), ex);
}
} else {
assert delete.shardId != -1;
@@ -937,7 +938,7 @@ public class IndicesService extends AbstractLifecycleComponent
deleteShardStore("pending delete", shardLock, delete.settings);
iterator.remove();
} catch (IOException ex) {
- logger.debug("{} retry pending delete", ex, shardLock.getShardId());
+ logger.debug(new ParameterizedMessage("{} retry pending delete", shardLock.getShardId()), ex);
}
} else {
logger.warn("{} no shard lock for pending delete", delete.shardId);
@@ -1000,13 +1001,13 @@ public class IndicesService extends AbstractLifecycleComponent
private static final class CacheCleaner implements Runnable, Releasable {
private final IndicesFieldDataCache cache;
- private final ESLogger logger;
+ private final Logger logger;
private final ThreadPool threadPool;
private final TimeValue interval;
private final AtomicBoolean closed = new AtomicBoolean(false);
private final IndicesRequestCache requestCache;
- public CacheCleaner(IndicesFieldDataCache cache, IndicesRequestCache requestCache, ESLogger logger, ThreadPool threadPool, TimeValue interval) {
+ public CacheCleaner(IndicesFieldDataCache cache, IndicesRequestCache requestCache, Logger logger, ThreadPool threadPool, TimeValue interval) {
this.cache = cache;
this.requestCache = requestCache;
this.logger = logger;
diff --git a/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java b/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java
index f3812f6900..2f0ed23194 100644
--- a/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java
+++ b/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java
@@ -18,6 +18,7 @@
*/
package org.elasticsearch.indices.analysis;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.analysis.hunspell.Dictionary;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.SimpleFSDirectory;
@@ -138,7 +139,7 @@ public class HunspellService extends AbstractComponent {
} catch (Exception e) {
// The cache loader throws unchecked exception (see #loadDictionary()),
// here we simply report the exception and continue loading the dictionaries
- logger.error("exception while loading dictionary {}", e, file.getFileName());
+ logger.error(new ParameterizedMessage("exception while loading dictionary {}", file.getFileName()), e);
}
}
}
@@ -196,7 +197,7 @@ public class HunspellService extends AbstractComponent {
}
} catch (Exception e) {
- logger.error("Could not load hunspell dictionary [{}]", e, locale);
+ logger.error(new ParameterizedMessage("Could not load hunspell dictionary [{}]", locale), e);
throw e;
} finally {
IOUtils.close(affixStream);
diff --git a/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java b/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java
index eb1d9f07dc..1b73be4e09 100644
--- a/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java
+++ b/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java
@@ -19,6 +19,8 @@
package org.elasticsearch.indices.cluster;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.store.LockObtainFailedException;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.ClusterState;
@@ -37,7 +39,6 @@ import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.inject.Inject;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
@@ -269,7 +270,7 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent imple
threadPool.generic().execute(new AbstractRunnable() {
@Override
public void onFailure(Exception e) {
- logger.warn("[{}] failed to complete pending deletion for index", e, index);
+ logger.warn(new ParameterizedMessage("[{}] failed to complete pending deletion for index", index), e);
}
@Override
@@ -559,7 +560,7 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent imple
* routing to *require* peer recovery, use {@link ShardRouting#recoverySource()} to
* check if its needed or not.
*/
- private static DiscoveryNode findSourceNodeForPeerRecovery(ESLogger logger, RoutingTable routingTable, DiscoveryNodes nodes,
+ private static DiscoveryNode findSourceNodeForPeerRecovery(Logger logger, RoutingTable routingTable, DiscoveryNodes nodes,
ShardRouting shardRouting) {
DiscoveryNode sourceNode = null;
if (!shardRouting.primary()) {
@@ -637,11 +638,12 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent imple
} catch (Exception inner) {
inner.addSuppressed(failure);
logger.warn(
- "[{}][{}] failed to remove shard after failure ([{}])",
- inner,
- shardRouting.getIndexName(),
- shardRouting.getId(),
- message);
+ new ParameterizedMessage(
+ "[{}][{}] failed to remove shard after failure ([{}])",
+ shardRouting.getIndexName(),
+ shardRouting.getId(),
+ message),
+ inner);
}
if (sendShardFailure) {
sendFailShard(shardRouting, message, failure);
@@ -650,17 +652,19 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent imple
private void sendFailShard(ShardRouting shardRouting, String message, @Nullable Exception failure) {
try {
- logger.warn("[{}] marking and sending shard failed due to [{}]", failure, shardRouting.shardId(), message);
+ logger.warn(
+ new ParameterizedMessage("[{}] marking and sending shard failed due to [{}]", shardRouting.shardId(), message), failure);
failedShardsCache.put(shardRouting.shardId(), shardRouting);
shardStateAction.localShardFailed(shardRouting, message, failure, SHARD_STATE_ACTION_LISTENER);
} catch (Exception inner) {
if (failure != null) inner.addSuppressed(failure);
logger.warn(
+ new ParameterizedMessage(
"[{}][{}] failed to mark shard as failed (because of [{}])",
- inner,
shardRouting.getIndexName(),
shardRouting.getId(),
- message);
+ message),
+ inner);
}
}
diff --git a/core/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java b/core/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java
index 3ab18dd1bd..81e9f3fac5 100644
--- a/core/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java
+++ b/core/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java
@@ -19,6 +19,7 @@
package org.elasticsearch.indices.fielddata.cache;
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
@@ -31,7 +32,6 @@ import org.elasticsearch.common.cache.RemovalListener;
import org.elasticsearch.common.cache.RemovalNotification;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.lease.Releasable;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
@@ -108,13 +108,13 @@ public class IndicesFieldDataCache extends AbstractComponent implements RemovalL
* A specific cache instance for the relevant parameters of it (index, fieldNames, fieldType).
*/
static class IndexFieldCache implements IndexFieldDataCache, SegmentReader.CoreClosedListener, IndexReader.ReaderClosedListener {
- private final ESLogger logger;
+ private final Logger logger;
final Index index;
final String fieldName;
private final Cache<Key, Accountable> cache;
private final Listener[] listeners;
- IndexFieldCache(ESLogger logger,final Cache<Key, Accountable> cache, Index index, String fieldName, Listener... listeners) {
+ IndexFieldCache(Logger logger,final Cache<Key, Accountable> cache, Index index, String fieldName, Listener... listeners) {
this.logger = logger;
this.listeners = listeners;
this.index = index;
diff --git a/core/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java b/core/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java
index 236b376871..f5c208b39b 100644
--- a/core/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java
+++ b/core/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java
@@ -18,6 +18,7 @@
*/
package org.elasticsearch.indices.flush;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.admin.indices.flush.FlushRequest;
@@ -50,12 +51,12 @@ import org.elasticsearch.index.shard.ShardNotFoundException;
import org.elasticsearch.indices.IndexClosedException;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.threadpool.ThreadPool;
-import org.elasticsearch.transport.TransportResponseHandler;
import org.elasticsearch.transport.TransportChannel;
import org.elasticsearch.transport.TransportException;
import org.elasticsearch.transport.TransportRequest;
import org.elasticsearch.transport.TransportRequestHandler;
import org.elasticsearch.transport.TransportResponse;
+import org.elasticsearch.transport.TransportResponseHandler;
import org.elasticsearch.transport.TransportService;
import java.io.IOException;
@@ -100,7 +101,7 @@ public class SyncedFlushService extends AbstractComponent implements IndexEventL
@Override
public void onFailure(Exception e) {
- logger.debug("{} sync flush on inactive shard failed", e, indexShard.shardId());
+ logger.debug(new ParameterizedMessage("{} sync flush on inactive shard failed", indexShard.shardId()), e);
}
});
}
@@ -335,7 +336,7 @@ public class SyncedFlushService extends AbstractComponent implements IndexEventL
@Override
public void handleException(TransportException exp) {
- logger.trace("{} error while performing synced flush on [{}], skipping", exp, shardId, shard);
+ logger.trace(new ParameterizedMessage("{} error while performing synced flush on [{}], skipping", shardId, shard), exp);
results.put(shard, new ShardSyncedFlushResponse(exp.getMessage()));
contDownAndSendResponseIfDone(syncId, shards, shardId, totalShards, listener, countDown, results);
}
@@ -391,7 +392,7 @@ public class SyncedFlushService extends AbstractComponent implements IndexEventL
@Override
public void handleException(TransportException exp) {
- logger.trace("{} error while performing pre synced flush on [{}], skipping", exp, shardId, shard);
+ logger.trace(new ParameterizedMessage("{} error while performing pre synced flush on [{}], skipping", shardId, shard), exp);
if (countDown.countDown()) {
listener.onResponse(commitIds);
}
diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java b/core/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java
index 37fe07e16a..f5a0e1cfd7 100644
--- a/core/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java
+++ b/core/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java
@@ -19,6 +19,7 @@
package org.elasticsearch.indices.recovery;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.store.RateLimiter;
import org.elasticsearch.ElasticsearchException;
@@ -141,7 +142,7 @@ public class PeerRecoveryTargetService extends AbstractComponent implements Inde
protected void retryRecovery(final RecoveryTarget recoveryTarget, final Throwable reason, TimeValue retryAfter, final
StartRecoveryRequest currentRequest) {
- logger.trace("will retry recovery with id [{}] in [{}]", reason, recoveryTarget.recoveryId(), retryAfter);
+ logger.trace(new ParameterizedMessage("will retry recovery with id [{}] in [{}]", recoveryTarget.recoveryId(), retryAfter), reason);
retryRecovery(recoveryTarget, retryAfter, currentRequest);
}
@@ -233,7 +234,12 @@ public class PeerRecoveryTargetService extends AbstractComponent implements Inde
logger.trace("recovery cancelled", e);
} catch (Exception e) {
if (logger.isTraceEnabled()) {
- logger.trace("[{}][{}] Got exception on recovery", e, request.shardId().getIndex().getName(), request.shardId().id());
+ logger.trace(
+ new ParameterizedMessage(
+ "[{}][{}] Got exception on recovery",
+ request.shardId().getIndex().getName(),
+ request.shardId().id()),
+ e);
}
Throwable cause = ExceptionsHelper.unwrapCause(e);
if (cause instanceof CancellableThreads.ExecutionCancelledException) {
@@ -345,8 +351,11 @@ public class PeerRecoveryTargetService extends AbstractComponent implements Inde
// which causes local mapping changes since the mapping (clusterstate) might not have arrived on this node.
// we want to wait until these mappings are processed but also need to do some maintenance and roll back the
// number of processed (completed) operations in this batch to ensure accounting is correct.
- logger.trace("delaying recovery due to missing mapping changes (rolling back stats for [{}] ops)", exception, exception
- .completedOperations());
+ logger.trace(
+ new ParameterizedMessage(
+ "delaying recovery due to missing mapping changes (rolling back stats for [{}] ops)",
+ exception.completedOperations()),
+ exception);
final RecoveryState.Translog translog = recoveryTarget.state().getTranslog();
translog.decrementRecoveredOperations(exception.completedOperations()); // do the maintainance and rollback competed ops
// we do not need to use a timeout here since the entire recovery mechanism has an inactivity protection (it will be
@@ -425,8 +434,12 @@ public class PeerRecoveryTargetService extends AbstractComponent implements Inde
logger.trace("successfully waited for cluster state with version {} (current: {})", clusterStateVersion,
observer.observedState().getVersion());
} catch (Exception e) {
- logger.debug("failed waiting for cluster state with version {} (current: {})", e, clusterStateVersion,
- observer.observedState());
+ logger.debug(
+ new ParameterizedMessage(
+ "failed waiting for cluster state with version {} (current: {})",
+ clusterStateVersion,
+ observer.observedState()),
+ e);
throw ExceptionsHelper.convertToRuntime(e);
}
}
@@ -504,13 +517,14 @@ public class PeerRecoveryTargetService extends AbstractComponent implements Inde
public void onFailure(Exception e) {
try (RecoveriesCollection.RecoveryRef recoveryRef = onGoingRecoveries.getRecovery(recoveryId)) {
if (recoveryRef != null) {
- logger.error("unexpected error during recovery [{}], failing shard", e, recoveryId);
+ logger.error(new ParameterizedMessage("unexpected error during recovery [{}], failing shard", recoveryId), e);
onGoingRecoveries.failRecovery(recoveryId,
new RecoveryFailedException(recoveryRef.status().state(), "unexpected error", e),
true // be safe
);
} else {
- logger.debug("unexpected error during recovery, but recovery id [{}] is finished", e, recoveryId);
+ logger.debug(
+ new ParameterizedMessage("unexpected error during recovery, but recovery id [{}] is finished", recoveryId), e);
}
}
}
diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoveriesCollection.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoveriesCollection.java
index 65da26eb67..d5f06299cb 100644
--- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoveriesCollection.java
+++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoveriesCollection.java
@@ -19,9 +19,10 @@
package org.elasticsearch.indices.recovery;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.ElasticsearchTimeoutException;
import org.elasticsearch.cluster.node.DiscoveryNode;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.Callback;
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
@@ -46,11 +47,11 @@ public class RecoveriesCollection {
/** This is the single source of truth for ongoing recoveries. If it's not here, it was canceled or done */
private final ConcurrentMap<Long, RecoveryTarget> onGoingRecoveries = ConcurrentCollections.newConcurrentMap();
- private final ESLogger logger;
+ private final Logger logger;
private final ThreadPool threadPool;
private final Callback<Long> ensureClusterStateVersionCallback;
- public RecoveriesCollection(ESLogger logger, ThreadPool threadPool, Callback<Long> ensureClusterStateVersionCallback) {
+ public RecoveriesCollection(Logger logger, ThreadPool threadPool, Callback<Long> ensureClusterStateVersionCallback) {
this.logger = logger;
this.threadPool = threadPool;
this.ensureClusterStateVersionCallback = ensureClusterStateVersionCallback;
@@ -222,7 +223,7 @@ public class RecoveriesCollection {
@Override
public void onFailure(Exception e) {
- logger.error("unexpected error while monitoring recovery [{}]", e, recoveryId);
+ logger.error(new ParameterizedMessage("unexpected error while monitoring recovery [{}]", recoveryId), e);
}
@Override
diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java
index f78929e516..143b00522a 100644
--- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java
+++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java
@@ -19,6 +19,8 @@
package org.elasticsearch.indices.recovery;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.IndexCommit;
import org.apache.lucene.index.IndexFormatTooNewException;
@@ -36,7 +38,6 @@ import org.elasticsearch.common.StopWatch;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.lease.Releasable;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.lucene.store.InputStreamIndexInput;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.util.CancellableThreads;
@@ -72,7 +73,7 @@ import java.util.stream.StreamSupport;
*/
public class RecoverySourceHandler {
- protected final ESLogger logger;
+ protected final Logger logger;
// Shard that is going to be recovered (the "source")
private final IndexShard shard;
private final String indexName;
@@ -107,7 +108,7 @@ public class RecoverySourceHandler {
final Supplier<Long> currentClusterStateVersionSupplier,
Function<String, Releasable> delayNewRecoveries,
final int fileChunkSizeInBytes,
- final ESLogger logger) {
+ final Logger logger) {
this.shard = shard;
this.recoveryTarget = recoveryTarget;
this.request = request;
@@ -314,8 +315,12 @@ public class RecoverySourceHandler {
RemoteTransportException exception = new RemoteTransportException("File corruption occurred on recovery but " +
"checksums are ok", null);
exception.addSuppressed(targetException);
- logger.warn("{} Remote file corruption during finalization of recovery on node {}. local checksum OK",
- corruptIndexException, shard.shardId(), request.targetNode());
+ logger.warn(
+ new ParameterizedMessage(
+ "{} Remote file corruption during finalization of recovery on node {}. local checksum OK",
+ shard.shardId(),
+ request.targetNode()),
+ corruptIndexException);
throw exception;
} else {
throw targetException;
@@ -557,8 +562,13 @@ public class RecoverySourceHandler {
RemoteTransportException exception = new RemoteTransportException("File corruption occurred on recovery but " +
"checksums are ok", null);
exception.addSuppressed(e);
- logger.warn("{} Remote file corruption on node {}, recovering {}. local checksum OK",
- corruptIndexException, shardId, request.targetNode(), md);
+ logger.warn(
+ new ParameterizedMessage(
+ "{} Remote file corruption on node {}, recovering {}. local checksum OK",
+ shardId,
+ request.targetNode(),
+ md),
+ corruptIndexException);
throw exception;
}
} else {
diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java
index 2cb1d89c15..4b8bdf6dcb 100644
--- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java
+++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java
@@ -19,6 +19,8 @@
package org.elasticsearch.indices.recovery;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.IndexFormatTooNewException;
import org.apache.lucene.index.IndexFormatTooOldException;
@@ -31,7 +33,6 @@ import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.UUIDs;
import org.elasticsearch.common.bytes.BytesReference;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.util.Callback;
@@ -62,7 +63,7 @@ import java.util.concurrent.atomic.AtomicLong;
*/
public class RecoveryTarget extends AbstractRefCounted implements RecoveryTargetHandler {
- private final ESLogger logger;
+ private final Logger logger;
private static final AtomicLong idGenerator = new AtomicLong();
@@ -293,7 +294,7 @@ public class RecoveryTarget extends AbstractRefCounted implements RecoveryTarget
try {
entry.getValue().close();
} catch (Exception e) {
- logger.debug("error while closing recovery output [{}]", e, entry.getValue());
+ logger.debug(new ParameterizedMessage("error while closing recovery output [{}]", entry.getValue()), e);
}
iterator.remove();
}
diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/SharedFSRecoverySourceHandler.java b/core/src/main/java/org/elasticsearch/indices/recovery/SharedFSRecoverySourceHandler.java
index 3865696326..591176f047 100644
--- a/core/src/main/java/org/elasticsearch/indices/recovery/SharedFSRecoverySourceHandler.java
+++ b/core/src/main/java/org/elasticsearch/indices/recovery/SharedFSRecoverySourceHandler.java
@@ -19,8 +19,8 @@
package org.elasticsearch.indices.recovery;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.lease.Releasable;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.translog.Translog;
@@ -39,7 +39,7 @@ public class SharedFSRecoverySourceHandler extends RecoverySourceHandler {
public SharedFSRecoverySourceHandler(IndexShard shard, RecoveryTargetHandler recoveryTarget, StartRecoveryRequest request,
Supplier<Long> currentClusterStateVersionSupplier,
- Function<String, Releasable> delayNewRecoveries, ESLogger logger) {
+ Function<String, Releasable> delayNewRecoveries, Logger logger) {
super(shard, recoveryTarget, request, currentClusterStateVersionSupplier, delayNewRecoveries, -1, logger);
this.shard = shard;
this.request = request;
diff --git a/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java b/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java
index bc7e7f59fc..c86d721e73 100644
--- a/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java
+++ b/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java
@@ -19,6 +19,7 @@
package org.elasticsearch.indices.store;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
@@ -228,7 +229,7 @@ public class IndicesStore extends AbstractComponent implements ClusterStateListe
@Override
public void handleException(TransportException exp) {
- logger.debug("shards active request failed for {}", exp, shardId);
+ logger.debug(new ParameterizedMessage("shards active request failed for {}", shardId), exp);
if (awaitingResponses.decrementAndGet() == 0) {
allNodesResponded();
}
@@ -266,14 +267,14 @@ public class IndicesStore extends AbstractComponent implements ClusterStateListe
try {
indicesService.deleteShardStore("no longer used", shardId, currentState);
} catch (Exception ex) {
- logger.debug("{} failed to delete unallocated shard, ignoring", ex, shardId);
+ logger.debug(new ParameterizedMessage("{} failed to delete unallocated shard, ignoring", shardId), ex);
}
return currentState;
}
@Override
public void onFailure(String source, Exception e) {
- logger.error("{} unexpected error during deletion of unallocated shard", e, shardId);
+ logger.error(new ParameterizedMessage("{} unexpected error during deletion of unallocated shard", shardId), e);
}
});
}
@@ -323,9 +324,9 @@ public class IndicesStore extends AbstractComponent implements ClusterStateListe
try {
channel.sendResponse(new ShardActiveResponse(shardActive, clusterService.localNode()));
} catch (IOException e) {
- logger.error("failed send response for shard active while trying to delete shard {} - shard will probably not be removed", e, request.shardId);
+ logger.error(new ParameterizedMessage("failed send response for shard active while trying to delete shard {} - shard will probably not be removed", request.shardId), e);
} catch (EsRejectedExecutionException e) {
- logger.error("failed send response for shard active while trying to delete shard {} - shard will probably not be removed", e, request.shardId);
+ logger.error(new ParameterizedMessage("failed send response for shard active while trying to delete shard {} - shard will probably not be removed", request.shardId), e);
}
}
}, new ClusterStateObserver.ValidationPredicate() {
diff --git a/core/src/main/java/org/elasticsearch/monitor/fs/FsProbe.java b/core/src/main/java/org/elasticsearch/monitor/fs/FsProbe.java
index f0b6155692..66fa055787 100644
--- a/core/src/main/java/org/elasticsearch/monitor/fs/FsProbe.java
+++ b/core/src/main/java/org/elasticsearch/monitor/fs/FsProbe.java
@@ -19,6 +19,7 @@
package org.elasticsearch.monitor.fs;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.util.Constants;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.collect.Tuple;
@@ -112,7 +113,7 @@ public class FsProbe extends AbstractComponent {
} catch (Exception e) {
// do not fail Elasticsearch if something unexpected
// happens here
- logger.debug("unexpected exception processing /proc/diskstats for devices {}", e, devicesNumbers);
+ logger.debug(new ParameterizedMessage("unexpected exception processing /proc/diskstats for devices {}", devicesNumbers), e);
return null;
}
}
diff --git a/core/src/main/java/org/elasticsearch/monitor/fs/FsService.java b/core/src/main/java/org/elasticsearch/monitor/fs/FsService.java
index 9f38538f26..96467b4d40 100644
--- a/core/src/main/java/org/elasticsearch/monitor/fs/FsService.java
+++ b/core/src/main/java/org/elasticsearch/monitor/fs/FsService.java
@@ -19,8 +19,8 @@
package org.elasticsearch.monitor.fs;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.component.AbstractComponent;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings;
@@ -55,7 +55,7 @@ public class FsService extends AbstractComponent {
return cache.getOrRefresh();
}
- private static FsInfo stats(FsProbe probe, FsInfo initialValue, ESLogger logger) {
+ private static FsInfo stats(FsProbe probe, FsInfo initialValue, Logger logger) {
try {
return probe.stats(initialValue);
} catch (IOException e) {
diff --git a/core/src/main/java/org/elasticsearch/monitor/jvm/JvmGcMonitorService.java b/core/src/main/java/org/elasticsearch/monitor/jvm/JvmGcMonitorService.java
index a842ba2849..3a19fe5bd0 100644
--- a/core/src/main/java/org/elasticsearch/monitor/jvm/JvmGcMonitorService.java
+++ b/core/src/main/java/org/elasticsearch/monitor/jvm/JvmGcMonitorService.java
@@ -19,14 +19,13 @@
package org.elasticsearch.monitor.jvm;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
-import org.elasticsearch.common.util.concurrent.FutureUtils;
import org.elasticsearch.monitor.jvm.JvmStats.GarbageCollector;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.threadpool.ThreadPool.Cancellable;
@@ -36,7 +35,6 @@ import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
-import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import java.util.function.BiFunction;
@@ -207,7 +205,7 @@ public class JvmGcMonitorService extends AbstractLifecycleComponent {
"[gc][{}][{}][{}] duration [{}], collections [{}]/[{}], total [{}]/[{}], memory [{}]->[{}]/[{}], all_pools {}";
static void logSlowGc(
- final ESLogger logger,
+ final Logger logger,
final JvmMonitor.Threshold threshold,
final long seq,
final JvmMonitor.SlowGcEvent slowGcEvent,
@@ -307,7 +305,7 @@ public class JvmGcMonitorService extends AbstractLifecycleComponent {
private static final String OVERHEAD_LOG_MESSAGE = "[gc][{}] overhead, spent [{}] collecting in the last [{}]";
static void logGcOverhead(
- final ESLogger logger,
+ final Logger logger,
final JvmMonitor.Threshold threshold,
final long current,
final long elapsed,
diff --git a/core/src/main/java/org/elasticsearch/node/Node.java b/core/src/main/java/org/elasticsearch/node/Node.java
index 523e6faefb..ff1f5c2594 100644
--- a/core/src/main/java/org/elasticsearch/node/Node.java
+++ b/core/src/main/java/org/elasticsearch/node/Node.java
@@ -19,6 +19,10 @@
package org.elasticsearch.node;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.core.LoggerContext;
+import org.apache.logging.log4j.core.config.Configurator;
import org.apache.lucene.util.Constants;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.Build;
@@ -51,7 +55,6 @@ import org.elasticsearch.common.inject.ModulesBuilder;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.logging.DeprecationLogger;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.network.NetworkAddress;
import org.elasticsearch.common.network.NetworkModule;
@@ -119,6 +122,7 @@ import org.elasticsearch.transport.TransportService;
import org.elasticsearch.tribe.TribeService;
import org.elasticsearch.watcher.ResourceWatcherService;
+import javax.management.MBeanServerPermission;
import java.io.BufferedWriter;
import java.io.Closeable;
import java.io.IOException;
@@ -129,11 +133,13 @@ import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
+import java.security.AccessControlException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
+import java.util.Locale;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
@@ -216,7 +222,7 @@ public class Node implements Closeable {
boolean success = false;
{
// use temp logger just to say we are starting. we can't use it later on because the node name might not be set
- ESLogger logger = Loggers.getLogger(Node.class, NODE_NAME_SETTING.get(environment.settings()));
+ Logger logger = Loggers.getLogger(Node.class, NODE_NAME_SETTING.get(environment.settings()));
logger.info("initializing ...");
}
@@ -236,7 +242,7 @@ public class Node implements Closeable {
final boolean hadPredefinedNodeName = NODE_NAME_SETTING.exists(tmpSettings);
tmpSettings = addNodeNameIfNeeded(tmpSettings, nodeEnvironment.nodeId());
- ESLogger logger = Loggers.getLogger(Node.class, tmpSettings);
+ Logger logger = Loggers.getLogger(Node.class, tmpSettings);
if (hadPredefinedNodeName == false) {
logger.info("node name [{}] derived from node ID; set [{}] to override",
NODE_NAME_SETTING.get(tmpSettings), NODE_NAME_SETTING.getKey());
@@ -454,7 +460,7 @@ public class Node implements Closeable {
return this;
}
- ESLogger logger = Loggers.getLogger(Node.class, NODE_NAME_SETTING.get(settings));
+ Logger logger = Loggers.getLogger(Node.class, NODE_NAME_SETTING.get(settings));
logger.info("starting ...");
// hack around dependency injection problem (for now...)
injector.getInstance(Discovery.class).setAllocationService(injector.getInstance(AllocationService.class));
@@ -569,7 +575,7 @@ public class Node implements Closeable {
if (!lifecycle.moveToStopped()) {
return this;
}
- ESLogger logger = Loggers.getLogger(Node.class, NODE_NAME_SETTING.get(settings));
+ Logger logger = Loggers.getLogger(Node.class, NODE_NAME_SETTING.get(settings));
logger.info("stopping ...");
injector.getInstance(TribeService.class).stop();
@@ -600,6 +606,24 @@ public class Node implements Closeable {
injector.getInstance(IndicesService.class).stop();
logger.info("stopped");
+ final String log4jShutdownEnabled = System.getProperty("es.log4j.shutdownEnabled", "true");
+ final boolean shutdownEnabled;
+ switch (log4jShutdownEnabled) {
+ case "true":
+ shutdownEnabled = true;
+ break;
+ case "false":
+ shutdownEnabled = false;
+ break;
+ default:
+ throw new IllegalArgumentException(
+ "invalid value for [es.log4j.shutdownEnabled], was [" + log4jShutdownEnabled + "] but must be [true] or [false]");
+ }
+ if (shutdownEnabled) {
+ LoggerContext context = (LoggerContext) LogManager.getContext(false);
+ Configurator.shutdown(context);
+ }
+
return this;
}
@@ -615,7 +639,7 @@ public class Node implements Closeable {
return;
}
- ESLogger logger = Loggers.getLogger(Node.class, NODE_NAME_SETTING.get(settings));
+ Logger logger = Loggers.getLogger(Node.class, NODE_NAME_SETTING.get(settings));
logger.info("closing ...");
List<Closeable> toClose = new ArrayList<>();
StopWatch stopWatch = new StopWatch("node_close");
diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginCli.java b/core/src/main/java/org/elasticsearch/plugins/PluginCli.java
index 3ce60882cc..a49aab3003 100644
--- a/core/src/main/java/org/elasticsearch/plugins/PluginCli.java
+++ b/core/src/main/java/org/elasticsearch/plugins/PluginCli.java
@@ -21,7 +21,6 @@ package org.elasticsearch.plugins;
import org.elasticsearch.cli.MultiCommand;
import org.elasticsearch.cli.Terminal;
-import org.elasticsearch.common.logging.LogConfigurator;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.node.internal.InternalSettingsPreparer;
@@ -54,7 +53,7 @@ public class PluginCli extends MultiCommand {
.put("rootLogger", "${logger.level}, terminal")
.put("logger.level", loggerLevel)
.build(), Terminal.DEFAULT);
- LogConfigurator.configure(loggingEnvironment.settings(), false);
+ // LogConfigurator.configure(loggingEnvironment.settings(), false);
exit(new PluginCli().main(args, Terminal.DEFAULT));
}
diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginsService.java b/core/src/main/java/org/elasticsearch/plugins/PluginsService.java
index ccbde1310d..a4c32722f7 100644
--- a/core/src/main/java/org/elasticsearch/plugins/PluginsService.java
+++ b/core/src/main/java/org/elasticsearch/plugins/PluginsService.java
@@ -19,26 +19,8 @@
package org.elasticsearch.plugins;
-import java.io.IOException;
-import java.lang.reflect.InvocationTargetException;
-import java.lang.reflect.Method;
-import java.net.URL;
-import java.net.URLClassLoader;
-import java.nio.file.DirectoryStream;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.function.Function;
-import java.util.stream.Collectors;
-
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.analysis.util.CharFilterFactory;
import org.apache.lucene.analysis.util.TokenFilterFactory;
import org.apache.lucene.analysis.util.TokenizerFactory;
@@ -54,7 +36,6 @@ import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.component.LifecycleComponent;
import org.elasticsearch.common.inject.Module;
import org.elasticsearch.common.io.FileSystemUtils;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
@@ -62,6 +43,26 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexModule;
import org.elasticsearch.threadpool.ExecutorBuilder;
+import java.io.IOException;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.nio.file.DirectoryStream;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.function.Function;
+import java.util.stream.Collectors;
+
import static org.elasticsearch.common.io.FileSystemUtils.isAccessibleDirectory;
/**
@@ -211,7 +212,7 @@ public class PluginsService extends AbstractComponent {
this.onModuleReferences = Collections.unmodifiableMap(onModuleReferences);
}
- private static void logPluginInfo(final List<PluginInfo> pluginInfos, final String type, final ESLogger logger) {
+ private static void logPluginInfo(final List<PluginInfo> pluginInfos, final String type, final Logger logger) {
assert pluginInfos != null;
if (pluginInfos.isEmpty()) {
logger.info("no " + type + "s loaded");
@@ -245,7 +246,7 @@ public class PluginsService extends AbstractComponent {
logger.warn("plugin {}, failed to invoke custom onModule method", e, plugin.v1().getName());
throw new ElasticsearchException("failed to invoke onModule", e);
} catch (Exception e) {
- logger.warn("plugin {}, failed to invoke custom onModule method", e, plugin.v1().getName());
+ logger.warn(new ParameterizedMessage("plugin {}, failed to invoke custom onModule method", plugin.v1().getName()), e);
throw e;
}
}
@@ -346,7 +347,7 @@ public class PluginsService extends AbstractComponent {
}
static List<Bundle> getPluginBundles(Path pluginsDirectory) throws IOException {
- ESLogger logger = Loggers.getLogger(PluginsService.class);
+ Logger logger = Loggers.getLogger(PluginsService.class);
// TODO: remove this leniency, but tests bogusly rely on it
if (!isAccessibleDirectory(pluginsDirectory, logger)) {
diff --git a/core/src/main/java/org/elasticsearch/repositories/RepositoriesService.java b/core/src/main/java/org/elasticsearch/repositories/RepositoriesService.java
index 076853fd75..8df4412b45 100644
--- a/core/src/main/java/org/elasticsearch/repositories/RepositoriesService.java
+++ b/core/src/main/java/org/elasticsearch/repositories/RepositoriesService.java
@@ -19,6 +19,7 @@
package org.elasticsearch.repositories;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.cluster.AckedClusterStateUpdateTask;
import org.elasticsearch.cluster.ClusterChangedEvent;
@@ -140,7 +141,7 @@ public class RepositoriesService extends AbstractComponent implements ClusterSta
@Override
public void onFailure(String source, Exception e) {
- logger.warn("failed to create repository [{}]", e, request.name);
+ logger.warn(new ParameterizedMessage("failed to create repository [{}]", request.name), e);
super.onFailure(source, e);
}
@@ -214,7 +215,7 @@ public class RepositoriesService extends AbstractComponent implements ClusterSta
try {
repository.endVerification(verificationToken);
} catch (Exception e) {
- logger.warn("[{}] failed to finish repository verification", e, repositoryName);
+ logger.warn(new ParameterizedMessage("[{}] failed to finish repository verification", repositoryName), e);
listener.onFailure(e);
return;
}
@@ -231,7 +232,7 @@ public class RepositoriesService extends AbstractComponent implements ClusterSta
repository.endVerification(verificationToken);
} catch (Exception inner) {
inner.addSuppressed(e);
- logger.warn("[{}] failed to finish repository verification", inner, repositoryName);
+ logger.warn(new ParameterizedMessage("[{}] failed to finish repository verification", repositoryName), inner);
}
listener.onFailure(e);
}
@@ -293,14 +294,14 @@ public class RepositoriesService extends AbstractComponent implements ClusterSta
} catch (RepositoryException ex) {
// TODO: this catch is bogus, it means the old repo is already closed,
// but we have nothing to replace it
- logger.warn("failed to change repository [{}]", ex, repositoryMetaData.name());
+ logger.warn(new ParameterizedMessage("failed to change repository [{}]", repositoryMetaData.name()), ex);
}
}
} else {
try {
repository = createRepository(repositoryMetaData);
} catch (RepositoryException ex) {
- logger.warn("failed to create repository [{}]", ex, repositoryMetaData.name());
+ logger.warn(new ParameterizedMessage("failed to create repository [{}]", repositoryMetaData.name()), ex);
}
}
if (repository != null) {
@@ -382,7 +383,7 @@ public class RepositoriesService extends AbstractComponent implements ClusterSta
repository.start();
return repository;
} catch (Exception e) {
- logger.warn("failed to create repository [{}][{}]", e, repositoryMetaData.type(), repositoryMetaData.name());
+ logger.warn(new ParameterizedMessage("failed to create repository [{}][{}]", repositoryMetaData.type(), repositoryMetaData.name()), e);
throw new RepositoryException(repositoryMetaData.name(), "failed to create repository", e);
}
}
diff --git a/core/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java b/core/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java
index 65544421c8..a040cf8349 100644
--- a/core/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java
+++ b/core/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java
@@ -19,14 +19,9 @@
package org.elasticsearch.repositories;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.concurrent.CopyOnWriteArrayList;
-import java.util.concurrent.atomic.AtomicInteger;
-
import com.carrotsearch.hppc.ObjectContainer;
import com.carrotsearch.hppc.cursors.ObjectCursor;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
@@ -45,6 +40,12 @@ import org.elasticsearch.transport.TransportRequestHandler;
import org.elasticsearch.transport.TransportResponse;
import org.elasticsearch.transport.TransportService;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.CopyOnWriteArrayList;
+import java.util.concurrent.atomic.AtomicInteger;
+
public class VerifyNodeRepositoryAction extends AbstractComponent {
public static final String ACTION_NAME = "internal:admin/repository/verify";
@@ -83,7 +84,7 @@ public class VerifyNodeRepositoryAction extends AbstractComponent {
try {
doVerify(repository, verificationToken, localNode);
} catch (Exception e) {
- logger.warn("[{}] failed to verify repository", e, repository);
+ logger.warn(new ParameterizedMessage("[{}] failed to verify repository", repository), e);
errors.add(new VerificationFailure(node.getId(), e));
}
if (counter.decrementAndGet() == 0) {
@@ -154,7 +155,7 @@ public class VerifyNodeRepositoryAction extends AbstractComponent {
try {
doVerify(request.repository, request.verificationToken, localNode);
} catch (Exception ex) {
- logger.warn("[{}] failed to verify repository", ex, request.repository);
+ logger.warn(new ParameterizedMessage("[{}] failed to verify repository", request.repository), ex);
throw ex;
}
channel.sendResponse(TransportResponse.Empty.INSTANCE);
diff --git a/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java
index c6b4634e1c..f4dfc48500 100644
--- a/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java
+++ b/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java
@@ -19,6 +19,7 @@
package org.elasticsearch.repositories.blobstore;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.IndexCommit;
import org.apache.lucene.index.IndexFormatTooNewException;
@@ -353,10 +354,9 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
try {
snapshotInfo = getSnapshotInfo(snapshotId);
} catch (SnapshotException e) {
- logger.warn("[{}] repository is on a pre-5.0 format with an index file that contains snapshot [{}] but " +
- "the corresponding snap-{}.dat file cannot be read. The snapshot will no longer be included in " +
- "the repository but its data directories will remain.", e, getMetadata().name(),
- snapshotId, snapshotId.getUUID());
+ logger.warn(new ParameterizedMessage("[{}] repository is on a pre-5.0 format with an index file that contains snapshot [{}] but " +
+ "the corresponding snap-{}.dat file cannot be read. The snapshot will no longer be included in " +
+ "the repository but its data directories will remain.", getMetadata().name(), snapshotId, snapshotId.getUUID()), e);
continue;
}
for (final String indexName : snapshotInfo.indices()) {
@@ -424,7 +424,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
try {
indexMetaDataFormat(snapshot.version()).delete(indexMetaDataBlobContainer, snapshotId.getUUID());
} catch (IOException ex) {
- logger.warn("[{}] failed to delete metadata for index [{}]", ex, snapshotId, index);
+ logger.warn(new ParameterizedMessage("[{}] failed to delete metadata for index [{}]", snapshotId, index), ex);
}
if (metaData != null) {
IndexMetaData indexMetaData = metaData.index(index);
@@ -433,7 +433,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
try {
delete(snapshotId, snapshot.version(), indexId, new ShardId(indexMetaData.getIndex(), shardId));
} catch (SnapshotException ex) {
- logger.warn("[{}] failed to delete shard data for shard [{}][{}]", ex, snapshotId, index, shardId);
+ logger.warn(new ParameterizedMessage("[{}] failed to delete shard data for shard [{}][{}]", snapshotId, index, shardId), ex);
}
}
}
@@ -452,12 +452,12 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
// we'll ignore that and accept that cleanup didn't fully succeed.
// since we are using UUIDs for path names, this won't be an issue for
// snapshotting indices of the same name
- logger.debug("[{}] index [{}] no longer part of any snapshots in the repository, but failed to clean up " +
- "its index folder due to the directory not being empty.", dnee, metadata.name(), indexId);
+ logger.debug(new ParameterizedMessage("[{}] index [{}] no longer part of any snapshots in the repository, but failed to clean up " +
+ "its index folder due to the directory not being empty.", metadata.name(), indexId), dnee);
} catch (IOException ioe) {
// a different IOException occurred while trying to delete - will just log the issue for now
- logger.debug("[{}] index [{}] no longer part of any snapshots in the repository, but failed to clean up " +
- "its index folder.", ioe, metadata.name(), indexId);
+ logger.debug(new ParameterizedMessage("[{}] index [{}] no longer part of any snapshots in the repository, but failed to clean up " +
+ "its index folder.", metadata.name(), indexId), ioe);
}
}
} catch (IOException ex) {
@@ -471,7 +471,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
try {
snapshotFormat(snapshotInfo.version()).delete(snapshotsBlobContainer, blobId);
} catch (IOException e) {
- logger.warn("[{}] Unable to delete snapshot file [{}]", e, snapshotInfo.snapshotId(), blobId);
+ logger.warn(new ParameterizedMessage("[{}] Unable to delete snapshot file [{}]", snapshotInfo.snapshotId(), blobId), e);
}
} else {
// we don't know the version, first try the current format, then the legacy format
@@ -483,7 +483,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
snapshotLegacyFormat.delete(snapshotsBlobContainer, blobId);
} catch (IOException e2) {
// neither snapshot file could be deleted, log the error
- logger.warn("Unable to delete snapshot file [{}]", e, blobId);
+ logger.warn(new ParameterizedMessage("Unable to delete snapshot file [{}]", blobId), e);
}
}
}
@@ -495,7 +495,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
try {
globalMetaDataFormat(snapshotInfo.version()).delete(snapshotsBlobContainer, blobId);
} catch (IOException e) {
- logger.warn("[{}] Unable to delete global metadata file [{}]", e, snapshotInfo.snapshotId(), blobId);
+ logger.warn(new ParameterizedMessage("[{}] Unable to delete global metadata file [{}]", snapshotInfo.snapshotId(), blobId), e);
}
} else {
// we don't know the version, first try the current format, then the legacy format
@@ -507,7 +507,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
globalMetaDataLegacyFormat.delete(snapshotsBlobContainer, blobId);
} catch (IOException e2) {
// neither global metadata file could be deleted, log the error
- logger.warn("Unable to delete global metadata file [{}]", e, blobId);
+ logger.warn(new ParameterizedMessage("Unable to delete global metadata file [{}]", blobId), e);
}
}
}
@@ -1074,7 +1074,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
blobContainer.deleteBlob(blobName);
} catch (IOException e) {
// TODO: don't catch and let the user handle it?
- logger.debug("[{}] [{}] error deleting blob [{}] during cleanup", e, snapshotId, shardId, blobName);
+ logger.debug(new ParameterizedMessage("[{}] [{}] error deleting blob [{}] during cleanup", snapshotId, shardId, blobName), e);
}
}
}
@@ -1151,7 +1151,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
indexShardSnapshotsFormat.read(blobContainer, Integer.toString(latest));
return new Tuple<>(shardSnapshots, latest);
} catch (IOException e) {
- logger.warn("failed to read index file [{}]", e, SNAPSHOT_INDEX_PREFIX + latest);
+ logger.warn(new ParameterizedMessage("failed to read index file [{}]", SNAPSHOT_INDEX_PREFIX + latest), e);
}
}
@@ -1169,7 +1169,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
snapshots.add(new SnapshotFiles(snapshot.snapshot(), snapshot.indexFiles()));
}
} catch (IOException e) {
- logger.warn("failed to read commit point [{}]", e, name);
+ logger.warn(new ParameterizedMessage("failed to read commit point [{}]", name), e);
}
}
return new Tuple<>(new BlobStoreIndexShardSnapshots(snapshots), -1);
@@ -1252,7 +1252,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
// in a bwc compatible way.
maybeRecalculateMetadataHash(blobContainer, fileInfo, metadata);
} catch (Exception e) {
- logger.warn("{} Can't calculate hash from blob for file [{}] [{}]", e, shardId, fileInfo.physicalName(), fileInfo.metadata());
+ logger.warn(new ParameterizedMessage("{} Can't calculate hash from blob for file [{}] [{}]", shardId, fileInfo.physicalName(), fileInfo.metadata()), e);
}
if (fileInfo.isSame(md) && snapshotFileExistsInBlobs(fileInfo, blobs)) {
// a commit point file with the same name, size and checksum was already copied to repository
@@ -1525,7 +1525,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
logger.trace("[{}] [{}] restoring from to an empty shard", shardId, snapshotId);
recoveryTargetMetadata = Store.MetadataSnapshot.EMPTY;
} catch (IOException e) {
- logger.warn("{} Can't read metadata from store, will not reuse any local file while restoring", e, shardId);
+ logger.warn(new ParameterizedMessage("{} Can't read metadata from store, will not reuse any local file while restoring", shardId), e);
recoveryTargetMetadata = Store.MetadataSnapshot.EMPTY;
}
@@ -1541,7 +1541,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
maybeRecalculateMetadataHash(blobContainer, fileInfo, recoveryTargetMetadata);
} catch (Exception e) {
// if the index is broken we might not be able to read it
- logger.warn("{} Can't calculate hash from blog for file [{}] [{}]", e, shardId, fileInfo.physicalName(), fileInfo.metadata());
+ logger.warn(new ParameterizedMessage("{} Can't calculate hash from blog for file [{}] [{}]", shardId, fileInfo.physicalName(), fileInfo.metadata()), e);
}
snapshotMetaData.put(fileInfo.metadata().name(), fileInfo.metadata());
fileInfos.put(fileInfo.metadata().name(), fileInfo);
diff --git a/core/src/main/java/org/elasticsearch/rest/BytesRestResponse.java b/core/src/main/java/org/elasticsearch/rest/BytesRestResponse.java
index fcf79962b6..23523600bc 100644
--- a/core/src/main/java/org/elasticsearch/rest/BytesRestResponse.java
+++ b/core/src/main/java/org/elasticsearch/rest/BytesRestResponse.java
@@ -19,11 +19,12 @@
package org.elasticsearch.rest;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
@@ -111,7 +112,7 @@ public class BytesRestResponse extends RestResponse {
return this.status;
}
- private static final ESLogger SUPPRESSED_ERROR_LOGGER = ESLoggerFactory.getLogger("rest.suppressed");
+ private static final Logger SUPPRESSED_ERROR_LOGGER = ESLoggerFactory.getLogger("rest.suppressed");
private static XContentBuilder convert(RestChannel channel, RestStatus status, Exception e) throws IOException {
XContentBuilder builder = channel.newErrorBuilder().startObject();
@@ -123,9 +124,9 @@ public class BytesRestResponse extends RestResponse {
params = new ToXContent.DelegatingMapParams(Collections.singletonMap(ElasticsearchException.REST_EXCEPTION_SKIP_STACK_TRACE, "false"), channel.request());
} else {
if (status.getStatus() < 500) {
- SUPPRESSED_ERROR_LOGGER.debug("path: {}, params: {}", e, channel.request().rawPath(), channel.request().params());
+ SUPPRESSED_ERROR_LOGGER.debug(new ParameterizedMessage("path: {}, params: {}", channel.request().rawPath(), channel.request().params()), e);
} else {
- SUPPRESSED_ERROR_LOGGER.warn("path: {}, params: {}", e, channel.request().rawPath(), channel.request().params());
+ SUPPRESSED_ERROR_LOGGER.warn(new ParameterizedMessage("path: {}, params: {}", channel.request().rawPath(), channel.request().params()), e);
}
params = channel.request();
}
diff --git a/core/src/main/java/org/elasticsearch/rest/RestController.java b/core/src/main/java/org/elasticsearch/rest/RestController.java
index d5ba350ff4..ee44108dbd 100644
--- a/core/src/main/java/org/elasticsearch/rest/RestController.java
+++ b/core/src/main/java/org/elasticsearch/rest/RestController.java
@@ -19,6 +19,7 @@
package org.elasticsearch.rest;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.bytes.BytesArray;
@@ -28,17 +29,12 @@ import org.elasticsearch.common.path.PathTrie;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.common.xcontent.XContentBuilder;
-import org.elasticsearch.plugins.ActionPlugin;
import java.io.IOException;
import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
-import static java.util.Collections.unmodifiableSet;
import static org.elasticsearch.rest.RestStatus.BAD_REQUEST;
import static org.elasticsearch.rest.RestStatus.OK;
@@ -213,7 +209,7 @@ public class RestController extends AbstractLifecycleComponent {
channel.sendResponse(new BytesRestResponse(channel, e));
} catch (Exception inner) {
inner.addSuppressed(e);
- logger.error("failed to send failure response for uri [{}]", inner, request.uri());
+ logger.error(new ParameterizedMessage("failed to send failure response for uri [{}]", request.uri()), inner);
}
}
@@ -315,7 +311,7 @@ public class RestController extends AbstractLifecycleComponent {
try {
channel.sendResponse(new BytesRestResponse(channel, e));
} catch (IOException e1) {
- logger.error("Failed to send failure response for uri [{}]", e1, request.uri());
+ logger.error(new ParameterizedMessage("Failed to send failure response for uri [{}]", request.uri()), e1);
}
}
}
diff --git a/core/src/main/java/org/elasticsearch/rest/action/RestActionListener.java b/core/src/main/java/org/elasticsearch/rest/action/RestActionListener.java
index 5074a12079..572da497c1 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/RestActionListener.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/RestActionListener.java
@@ -19,8 +19,8 @@
package org.elasticsearch.rest.action;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.ActionListener;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.rest.BytesRestResponse;
import org.elasticsearch.rest.RestChannel;
@@ -33,7 +33,7 @@ public abstract class RestActionListener<Response> implements ActionListener<Res
// we use static here so we won't have to pass the actual logger each time for a very rare case of logging
// where the settings don't matter that much
- private static ESLogger logger = Loggers.getLogger(RestResponseListener.class);
+ private static Logger logger = Loggers.getLogger(RestResponseListener.class);
protected final RestChannel channel;
diff --git a/core/src/main/java/org/elasticsearch/script/ScriptService.java b/core/src/main/java/org/elasticsearch/script/ScriptService.java
index 793c87077e..80b4f5fb1f 100644
--- a/core/src/main/java/org/elasticsearch/script/ScriptService.java
+++ b/core/src/main/java/org/elasticsearch/script/ScriptService.java
@@ -19,6 +19,7 @@
package org.elasticsearch.script;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.ResourceNotFoundException;
import org.elasticsearch.action.ActionListener;
@@ -527,8 +528,7 @@ public class ScriptService extends AbstractComponent implements Closeable, Clust
"Limit of script size in bytes [{}] has been exceeded for script [{}] with size [{}]",
allowedScriptSizeInBytes,
identifier,
- scriptSizeInBytes
- );
+ scriptSizeInBytes);
throw new IllegalArgumentException(message);
}
}
@@ -605,7 +605,7 @@ public class ScriptService extends AbstractComponent implements Closeable, Clust
logger.warn("skipping compile of script file [{}] as all scripted operations are disabled for file scripts", file.toAbsolutePath());
}
} catch (Exception e) {
- logger.warn("failed to load/compile script [{}]", e, scriptNameExt.v1());
+ logger.warn(new ParameterizedMessage("failed to load/compile script [{}]", scriptNameExt.v1()), e);
}
}
}
diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion2x/Completion090PostingsFormat.java b/core/src/main/java/org/elasticsearch/search/suggest/completion2x/Completion090PostingsFormat.java
index d3008e999e..245f2416b4 100644
--- a/core/src/main/java/org/elasticsearch/search/suggest/completion2x/Completion090PostingsFormat.java
+++ b/core/src/main/java/org/elasticsearch/search/suggest/completion2x/Completion090PostingsFormat.java
@@ -18,6 +18,7 @@
*/
package org.elasticsearch.search.suggest.completion2x;
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.codecs.FieldsConsumer;
import org.apache.lucene.codecs.FieldsProducer;
@@ -42,7 +43,6 @@ import org.apache.lucene.util.Accountables;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.Version;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.index.mapper.CompletionFieldMapper2x;
import org.elasticsearch.index.mapper.MappedFieldType;
@@ -75,7 +75,7 @@ public class Completion090PostingsFormat extends PostingsFormat {
public static final int SUGGEST_VERSION_CURRENT = SUGGEST_CODEC_VERSION;
public static final String EXTENSION = "cmp";
- private static final ESLogger logger = Loggers.getLogger(Completion090PostingsFormat.class);
+ private static final Logger logger = Loggers.getLogger(Completion090PostingsFormat.class);
private PostingsFormat delegatePostingsFormat;
private static final Map<String, CompletionLookupProvider> providers;
private CompletionLookupProvider writeProvider;
diff --git a/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java b/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java
index 2ccf24a8dc..960e40893b 100644
--- a/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java
+++ b/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java
@@ -22,6 +22,7 @@ import com.carrotsearch.hppc.IntHashSet;
import com.carrotsearch.hppc.IntSet;
import com.carrotsearch.hppc.cursors.ObjectCursor;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.IndicesOptions;
@@ -63,7 +64,6 @@ import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.ShardId;
-import org.elasticsearch.repositories.IndexId;
import org.elasticsearch.repositories.RepositoriesService;
import org.elasticsearch.repositories.Repository;
import org.elasticsearch.repositories.RepositoryData;
@@ -461,7 +461,7 @@ public class RestoreService extends AbstractComponent implements ClusterStateLis
@Override
public void onFailure(String source, Exception e) {
- logger.warn("[{}] failed to restore snapshot", e, snapshotId);
+ logger.warn(new ParameterizedMessage("[{}] failed to restore snapshot", snapshotId), e);
listener.onFailure(e);
}
@@ -478,7 +478,7 @@ public class RestoreService extends AbstractComponent implements ClusterStateLis
} catch (Exception e) {
- logger.warn("[{}] failed to restore snapshot", e, request.repositoryName + ":" + request.snapshotName);
+ logger.warn(new ParameterizedMessage("[{}] failed to restore snapshot", request.repositoryName + ":" + request.snapshotName), e);
listener.onFailure(e);
}
}
@@ -602,7 +602,7 @@ public class RestoreService extends AbstractComponent implements ClusterStateLis
@Override
public void onFailure(String source, @Nullable Exception e) {
for (UpdateIndexShardRestoreStatusRequest request : drainedRequests) {
- logger.warn("[{}][{}] failed to update snapshot status to [{}]", e, request.snapshot(), request.shardId(), request.status());
+ logger.warn(new ParameterizedMessage("[{}][{}] failed to update snapshot status to [{}]", request.snapshot(), request.shardId(), request.status()), e);
}
}
@@ -670,7 +670,7 @@ public class RestoreService extends AbstractComponent implements ClusterStateLis
try {
listener.onResponse(new RestoreCompletionResponse(snapshot, restoreInfo));
} catch (Exception e) {
- logger.warn("failed to update snapshot status for [{}]", e, listener);
+ logger.warn(new ParameterizedMessage("failed to update snapshot status for [{}]", listener), e);
}
}
}
diff --git a/core/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java b/core/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java
index e957d2deb6..3611ec7273 100644
--- a/core/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java
+++ b/core/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java
@@ -20,6 +20,7 @@
package org.elasticsearch.snapshots;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.index.IndexCommit;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.cluster.ClusterChangedEvent;
@@ -312,7 +313,7 @@ public class SnapshotShardsService extends AbstractLifecycleComponent implements
@Override
public void onFailure(Exception e) {
- logger.warn("[{}] [{}] failed to create snapshot", e, shardId, entry.getKey());
+ logger.warn(new ParameterizedMessage("[{}] [{}] failed to create snapshot", shardId, entry.getKey()), e);
updateIndexShardSnapshotStatus(entry.getKey(), shardId, new SnapshotsInProgress.ShardSnapshotStatus(localNodeId, SnapshotsInProgress.State.FAILED, ExceptionsHelper.detailedMessage(e)));
}
@@ -494,7 +495,7 @@ public class SnapshotShardsService extends AbstractLifecycleComponent implements
UPDATE_SNAPSHOT_ACTION_NAME, request, EmptyTransportResponseHandler.INSTANCE_SAME);
}
} catch (Exception e) {
- logger.warn("[{}] [{}] failed to update snapshot state", e, request.snapshot(), request.status());
+ logger.warn(new ParameterizedMessage("[{}] [{}] failed to update snapshot state", request.snapshot(), request.status()), e);
}
}
@@ -578,7 +579,7 @@ public class SnapshotShardsService extends AbstractLifecycleComponent implements
@Override
public void onFailure(String source, Exception e) {
for (UpdateIndexShardSnapshotStatusRequest request : drainedRequests) {
- logger.warn("[{}][{}] failed to update snapshot status to [{}]", e, request.snapshot(), request.shardId(), request.status());
+ logger.warn(new ParameterizedMessage("[{}][{}] failed to update snapshot status to [{}]", request.snapshot(), request.shardId(), request.status()), e);
}
}
});
diff --git a/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java
index a0c1ddf1ea..8c6bd51ae2 100644
--- a/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java
+++ b/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java
@@ -21,6 +21,7 @@ package org.elasticsearch.snapshots;
import com.carrotsearch.hppc.cursors.ObjectCursor;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.util.CollectionUtil;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.ActionListener;
@@ -180,7 +181,7 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus
snapshotSet.add(repository.getSnapshotInfo(snapshotId));
} catch (Exception ex) {
if (ignoreUnavailable) {
- logger.warn("failed to get snapshot [{}]", ex, snapshotId);
+ logger.warn(new ParameterizedMessage("failed to get snapshot [{}]", snapshotId), ex);
} else {
throw new SnapshotException(repositoryName, snapshotId, "Snapshot could not be read", ex);
}
@@ -254,7 +255,7 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus
@Override
public void onFailure(String source, Exception e) {
- logger.warn("[{}][{}] failed to create snapshot", e, repositoryName, snapshotName);
+ logger.warn(new ParameterizedMessage("[{}][{}] failed to create snapshot", repositoryName, snapshotName), e);
newSnapshot = null;
listener.onFailure(e);
}
@@ -405,7 +406,7 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus
@Override
public void onFailure(String source, Exception e) {
- logger.warn("[{}] failed to create snapshot", e, snapshot.snapshot().getSnapshotId());
+ logger.warn(new ParameterizedMessage("[{}] failed to create snapshot", snapshot.snapshot().getSnapshotId()), e);
removeSnapshotFromClusterState(snapshot.snapshot(), null, e, new CleanupAfterErrorListener(snapshot, true, userCreateSnapshotListener, e));
}
@@ -427,7 +428,7 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus
}
});
} catch (Exception e) {
- logger.warn("failed to create snapshot [{}]", e, snapshot.snapshot().getSnapshotId());
+ logger.warn(new ParameterizedMessage("failed to create snapshot [{}]", snapshot.snapshot().getSnapshotId()), e);
removeSnapshotFromClusterState(snapshot.snapshot(), null, e, new CleanupAfterErrorListener(snapshot, snapshotCreated, userCreateSnapshotListener, e));
}
}
@@ -469,7 +470,7 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus
Collections.emptyList());
} catch (Exception inner) {
inner.addSuppressed(exception);
- logger.warn("[{}] failed to close snapshot in repository", inner, snapshot.snapshot());
+ logger.warn(new ParameterizedMessage("[{}] failed to close snapshot in repository", snapshot.snapshot()), inner);
}
}
userCreateSnapshotListener.onFailure(e);
@@ -722,7 +723,7 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus
@Override
public void onFailure(String source, Exception e) {
- logger.warn("failed to update snapshot state after shards started from [{}] ", e, source);
+ logger.warn(new ParameterizedMessage("failed to update snapshot state after shards started from [{}] ", source), e);
}
});
}
@@ -876,7 +877,7 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus
SnapshotInfo snapshotInfo = repository.finalizeSnapshot(snapshot.getSnapshotId(), entry.indices(), entry.startTime(), failure, entry.shards().size(), Collections.unmodifiableList(shardFailures));
removeSnapshotFromClusterState(snapshot, snapshotInfo, null);
} catch (Exception e) {
- logger.warn("[{}] failed to finalize snapshot", e, snapshot);
+ logger.warn(new ParameterizedMessage("[{}] failed to finalize snapshot", snapshot), e);
removeSnapshotFromClusterState(snapshot, null, e);
}
}
@@ -925,7 +926,7 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus
@Override
public void onFailure(String source, Exception e) {
- logger.warn("[{}] failed to remove snapshot metadata", e, snapshot);
+ logger.warn(new ParameterizedMessage("[{}] failed to remove snapshot metadata", snapshot), e);
if (listener != null) {
listener.onFailure(e);
}
@@ -941,7 +942,7 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus
listener.onSnapshotFailure(snapshot, failure);
}
} catch (Exception t) {
- logger.warn("failed to notify listener [{}]", t, listener);
+ logger.warn(new ParameterizedMessage("failed to notify listener [{}]", listener), t);
}
}
if (listener != null) {
diff --git a/core/src/main/java/org/elasticsearch/tasks/LoggingTaskListener.java b/core/src/main/java/org/elasticsearch/tasks/LoggingTaskListener.java
index 002d0b6a46..2378a356d8 100644
--- a/core/src/main/java/org/elasticsearch/tasks/LoggingTaskListener.java
+++ b/core/src/main/java/org/elasticsearch/tasks/LoggingTaskListener.java
@@ -19,7 +19,8 @@
package org.elasticsearch.tasks;
-import org.elasticsearch.common.logging.ESLogger;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.common.logging.Loggers;
/**
@@ -27,7 +28,7 @@ import org.elasticsearch.common.logging.Loggers;
* need a listener but aren't returning the result to the user.
*/
public final class LoggingTaskListener<Response> implements TaskListener<Response> {
- private static final ESLogger logger = Loggers.getLogger(LoggingTaskListener.class);
+ private static final Logger logger = Loggers.getLogger(LoggingTaskListener.class);
/**
* Get the instance of NoopActionListener cast appropriately.
@@ -49,6 +50,6 @@ public final class LoggingTaskListener<Response> implements TaskListener<Respons
@Override
public void onFailure(Task task, Throwable e) {
- logger.warn("{} failed with exception", e, task.getId());
+ logger.warn(new ParameterizedMessage("{} failed with exception", task.getId()), e);
}
}
diff --git a/core/src/main/java/org/elasticsearch/tasks/TaskManager.java b/core/src/main/java/org/elasticsearch/tasks/TaskManager.java
index f0fea6aa2a..8c0aa2244d 100644
--- a/core/src/main/java/org/elasticsearch/tasks/TaskManager.java
+++ b/core/src/main/java/org/elasticsearch/tasks/TaskManager.java
@@ -19,6 +19,7 @@
package org.elasticsearch.tasks;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchTimeoutException;
import org.elasticsearch.ExceptionsHelper;
@@ -46,6 +47,7 @@ import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Consumer;
+import static org.elasticsearch.ExceptionsHelper.detailedMessage;
import static org.elasticsearch.common.unit.TimeValue.timeValueMillis;
/**
@@ -166,7 +168,7 @@ public class TaskManager extends AbstractComponent implements ClusterStateListen
try {
taskResult = task.result(localNode, error);
} catch (IOException ex) {
- logger.warn("couldn't store error {}", ex, ExceptionsHelper.detailedMessage(error));
+ logger.warn(new ParameterizedMessage("couldn't store error {}", ExceptionsHelper.detailedMessage(error)), ex);
listener.onFailure(ex);
return;
}
@@ -178,7 +180,7 @@ public class TaskManager extends AbstractComponent implements ClusterStateListen
@Override
public void onFailure(Exception e) {
- logger.warn("couldn't store error {}", e, ExceptionsHelper.detailedMessage(error));
+ logger.warn(new ParameterizedMessage("couldn't store error {}", ExceptionsHelper.detailedMessage(error)), e);
listener.onFailure(e);
}
});
@@ -199,7 +201,7 @@ public class TaskManager extends AbstractComponent implements ClusterStateListen
try {
taskResult = task.result(localNode, response);
} catch (IOException ex) {
- logger.warn("couldn't store response {}", ex, response);
+ logger.warn(new ParameterizedMessage("couldn't store response {}", response), ex);
listener.onFailure(ex);
return;
}
@@ -212,7 +214,7 @@ public class TaskManager extends AbstractComponent implements ClusterStateListen
@Override
public void onFailure(Exception e) {
- logger.warn("couldn't store response {}", e, response);
+ logger.warn(new ParameterizedMessage("couldn't store response {}", response), e);
listener.onFailure(e);
}
});
diff --git a/core/src/main/java/org/elasticsearch/tasks/TaskResultsService.java b/core/src/main/java/org/elasticsearch/tasks/TaskResultsService.java
index 4b68e8af97..a05919acdf 100644
--- a/core/src/main/java/org/elasticsearch/tasks/TaskResultsService.java
+++ b/core/src/main/java/org/elasticsearch/tasks/TaskResultsService.java
@@ -18,6 +18,7 @@
*/
package org.elasticsearch.tasks;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ExceptionsHelper;
@@ -163,7 +164,7 @@ public class TaskResultsService extends AbstractComponent {
Streams.copy(is, out);
return out.toString(IOUtils.UTF_8);
} catch (Exception e) {
- logger.error("failed to create tasks results index template [{}]", e, TASK_RESULT_INDEX_MAPPING_FILE);
+ logger.error(new ParameterizedMessage("failed to create tasks results index template [{}]", TASK_RESULT_INDEX_MAPPING_FILE), e);
throw new IllegalStateException("failed to create tasks results index template [" + TASK_RESULT_INDEX_MAPPING_FILE + "]", e);
}
diff --git a/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java
index 044f19eaee..08617b75e0 100644
--- a/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java
+++ b/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java
@@ -19,6 +19,7 @@
package org.elasticsearch.threadpool;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.util.Counter;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.common.Nullable;
@@ -413,7 +414,7 @@ public class ThreadPool extends AbstractComponent implements Closeable {
try {
runnable.run();
} catch (Exception e) {
- logger.warn("failed to run {}", e, runnable.toString());
+ logger.warn(new ParameterizedMessage("failed to run {}", runnable.toString()), e);
throw e;
}
}
@@ -779,14 +780,14 @@ public class ThreadPool extends AbstractComponent implements Closeable {
@Override
public void onFailure(Exception e) {
- threadPool.logger.warn("failed to run scheduled task [{}] on thread pool [{}]", e, runnable.toString(), executor);
+ threadPool.logger.warn(new ParameterizedMessage("failed to run scheduled task [{}] on thread pool [{}]", runnable.toString(), executor), e);
}
@Override
public void onRejection(Exception e) {
run = false;
if (threadPool.logger.isDebugEnabled()) {
- threadPool.logger.debug("scheduled task [{}] was rejected on thread pool [{}]", e, runnable, executor);
+ threadPool.logger.debug(new ParameterizedMessage("scheduled task [{}] was rejected on thread pool [{}]", runnable, executor), e);
}
}
diff --git a/core/src/main/java/org/elasticsearch/transport/TcpTransport.java b/core/src/main/java/org/elasticsearch/transport/TcpTransport.java
index 505890f2ea..ffe3bfcd96 100644
--- a/core/src/main/java/org/elasticsearch/transport/TcpTransport.java
+++ b/core/src/main/java/org/elasticsearch/transport/TcpTransport.java
@@ -20,6 +20,7 @@ package org.elasticsearch.transport;
import com.carrotsearch.hppc.IntHashSet;
import com.carrotsearch.hppc.IntSet;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ExceptionsHelper;
@@ -258,10 +259,10 @@ public abstract class TcpTransport<Channel> extends AbstractLifecycleComponent i
sendMessage(channel, pingHeader, successfulPings::inc, false);
} catch (Exception e) {
if (isOpen(channel)) {
- logger.debug("[{}] failed to send ping transport message", e, node);
+ logger.debug(new ParameterizedMessage("[{}] failed to send ping transport message", node), e);
failedPings.inc();
} else {
- logger.trace("[{}] failed to send ping transport message (channel closed)", e, node);
+ logger.trace(new ParameterizedMessage("[{}] failed to send ping transport message (channel closed)", node), e);
}
}
}
@@ -397,7 +398,7 @@ public abstract class TcpTransport<Channel> extends AbstractLifecycleComponent i
try {
nodeChannels = connectToChannels(node);
} catch (Exception e) {
- logger.trace("failed to connect to [{}], cleaning dangling connections", e, node);
+ logger.trace(new ParameterizedMessage("failed to connect to [{}], cleaning dangling connections", node), e);
throw e;
}
}
@@ -772,7 +773,7 @@ public abstract class TcpTransport<Channel> extends AbstractLifecycleComponent i
try {
closeChannels(entry.getValue());
} catch (Exception e) {
- logger.debug("Error closing serverChannel for profile [{}]", e, entry.getKey());
+ logger.debug(new ParameterizedMessage("Error closing serverChannel for profile [{}]", entry.getKey()), e);
}
}
@@ -802,21 +803,27 @@ public abstract class TcpTransport<Channel> extends AbstractLifecycleComponent i
return;
}
if (isCloseConnectionException(e)) {
- logger.trace("close connection exception caught on transport layer [{}], disconnecting from relevant node", e,
- channel);
+ logger.trace(
+ new ParameterizedMessage(
+ "close connection exception caught on transport layer [{}], disconnecting from relevant node",
+ channel),
+ e);
// close the channel, which will cause a node to be disconnected if relevant
disconnectFromNodeChannel(channel, e);
} else if (isConnectException(e)) {
- logger.trace("connect exception caught on transport layer [{}]", e, channel);
+ logger.trace(new ParameterizedMessage("connect exception caught on transport layer [{}]", channel), e);
// close the channel as safe measure, which will cause a node to be disconnected if relevant
disconnectFromNodeChannel(channel, e);
} else if (e instanceof BindException) {
- logger.trace("bind exception caught on transport layer [{}]", e, channel);
+ logger.trace(new ParameterizedMessage("bind exception caught on transport layer [{}]", channel), e);
// close the channel as safe measure, which will cause a node to be disconnected if relevant
disconnectFromNodeChannel(channel, e);
} else if (e instanceof CancelledKeyException) {
- logger.trace("cancelled key exception caught on transport layer [{}], disconnecting from relevant node", e,
- channel);
+ logger.trace(
+ new ParameterizedMessage(
+ "cancelled key exception caught on transport layer [{}], disconnecting from relevant node",
+ channel),
+ e);
// close the channel as safe measure, which will cause a node to be disconnected if relevant
disconnectFromNodeChannel(channel, e);
} else if (e instanceof TcpTransport.HttpOnTransportException) {
@@ -825,7 +832,7 @@ public abstract class TcpTransport<Channel> extends AbstractLifecycleComponent i
sendMessage(channel, new BytesArray(e.getMessage().getBytes(StandardCharsets.UTF_8)), () -> {}, true);
}
} else {
- logger.warn("exception caught on transport layer [{}], closing connection", e, channel);
+ logger.warn(new ParameterizedMessage("exception caught on transport layer [{}], closing connection", channel), e);
// close the channel, which will cause a node to be disconnected if relevant
disconnectFromNodeChannel(channel, e);
}
@@ -1260,7 +1267,7 @@ public abstract class TcpTransport<Channel> extends AbstractLifecycleComponent i
try {
handler.handleException(rtx);
} catch (Exception e) {
- logger.error("failed to handle exception response [{}]", e, handler);
+ logger.error(new ParameterizedMessage("failed to handle exception response [{}]", handler), e);
}
});
}
@@ -1297,7 +1304,7 @@ public abstract class TcpTransport<Channel> extends AbstractLifecycleComponent i
transportChannel.sendResponse(e);
} catch (IOException inner) {
inner.addSuppressed(e);
- logger.warn("Failed to send error message back to client for action [{}]", inner, action);
+ logger.warn(new ParameterizedMessage("Failed to send error message back to client for action [{}]", action), inner);
}
}
return action;
@@ -1343,7 +1350,8 @@ public abstract class TcpTransport<Channel> extends AbstractLifecycleComponent i
transportChannel.sendResponse(e);
} catch (Exception inner) {
inner.addSuppressed(e);
- logger.warn("Failed to send error message back to client for action [{}]", inner, reg.getAction());
+ logger.warn(
+ new ParameterizedMessage("Failed to send error message back to client for action [{}]", reg.getAction()), inner);
}
}
}
diff --git a/core/src/main/java/org/elasticsearch/transport/TransportChannelResponseHandler.java b/core/src/main/java/org/elasticsearch/transport/TransportChannelResponseHandler.java
index 5f30296409..4e1a08ca36 100644
--- a/core/src/main/java/org/elasticsearch/transport/TransportChannelResponseHandler.java
+++ b/core/src/main/java/org/elasticsearch/transport/TransportChannelResponseHandler.java
@@ -19,7 +19,8 @@
package org.elasticsearch.transport;
-import org.elasticsearch.common.logging.ESLogger;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.threadpool.ThreadPool;
import java.io.IOException;
@@ -30,12 +31,12 @@ import java.util.function.Supplier;
*/
public class TransportChannelResponseHandler<T extends TransportResponse> implements TransportResponseHandler<T> {
- private final ESLogger logger;
+ private final Logger logger;
private final TransportChannel channel;
private final String extraInfoOnError;
private final Supplier<T> responseSupplier;
- public TransportChannelResponseHandler(ESLogger logger, TransportChannel channel, String extraInfoOnError,
+ public TransportChannelResponseHandler(Logger logger, TransportChannel channel, String extraInfoOnError,
Supplier<T> responseSupplier) {
this.logger = logger;
this.channel = channel;
@@ -62,7 +63,8 @@ public class TransportChannelResponseHandler<T extends TransportResponse> implem
try {
channel.sendResponse(exp);
} catch (IOException e) {
- logger.debug("failed to send failure {}", e, extraInfoOnError == null ? "" : "(" + extraInfoOnError + ")");
+ logger.debug(
+ new ParameterizedMessage("failed to send failure {}", extraInfoOnError == null ? "" : "(" + extraInfoOnError + ")"), e);
}
}
diff --git a/core/src/main/java/org/elasticsearch/transport/TransportService.java b/core/src/main/java/org/elasticsearch/transport/TransportService.java
index 15164a5d20..5c18d89654 100644
--- a/core/src/main/java/org/elasticsearch/transport/TransportService.java
+++ b/core/src/main/java/org/elasticsearch/transport/TransportService.java
@@ -19,6 +19,8 @@
package org.elasticsearch.transport;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.cluster.node.liveness.TransportLivenessAction;
import org.elasticsearch.cluster.ClusterName;
@@ -29,7 +31,6 @@ import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.metrics.MeanMetric;
import org.elasticsearch.common.regex.Regex;
@@ -108,7 +109,7 @@ public class TransportService extends AbstractLifecycleComponent {
listSetting("transport.tracer.exclude", Arrays.asList("internal:discovery/zen/fd*", TransportLivenessAction.NAME),
Function.identity(), Property.Dynamic, Property.NodeScope);
- private final ESLogger tracerLog;
+ private final Logger tracerLog;
volatile String[] tracerLogInclude;
volatile String[] tracelLogExclude;
@@ -205,11 +206,19 @@ public class TransportService extends AbstractLifecycleComponent {
@Override
public void onRejection(Exception e) {
// if we get rejected during node shutdown we don't wanna bubble it up
- logger.debug("failed to notify response handler on rejection, action: {}", e, holderToNotify.action());
+ logger.debug(
+ new ParameterizedMessage(
+ "failed to notify response handler on rejection, action: {}",
+ holderToNotify.action()),
+ e);
}
@Override
public void onFailure(Exception e) {
- logger.warn("failed to notify response handler on exception, action: {}", e, holderToNotify.action());
+ logger.warn(
+ new ParameterizedMessage(
+ "failed to notify response handler on exception, action: {}",
+ holderToNotify.action()),
+ e);
}
@Override
public void doRun() {
@@ -483,11 +492,19 @@ public class TransportService extends AbstractLifecycleComponent {
@Override
public void onRejection(Exception e) {
// if we get rejected during node shutdown we don't wanna bubble it up
- logger.debug("failed to notify response handler on rejection, action: {}", e, holderToNotify.action());
+ logger.debug(
+ new ParameterizedMessage(
+ "failed to notify response handler on rejection, action: {}",
+ holderToNotify.action()),
+ e);
}
@Override
public void onFailure(Exception e) {
- logger.warn("failed to notify response handler on exception, action: {}", e, holderToNotify.action());
+ logger.warn(
+ new ParameterizedMessage(
+ "failed to notify response handler on exception, action: {}",
+ holderToNotify.action()),
+ e);
}
@Override
protected void doRun() throws Exception {
@@ -528,7 +545,8 @@ public class TransportService extends AbstractLifecycleComponent {
channel.sendResponse(e);
} catch (Exception inner) {
inner.addSuppressed(e);
- logger.warn("failed to notify channel of error message for action [{}]", inner, action);
+ logger.warn(
+ new ParameterizedMessage("failed to notify channel of error message for action [{}]", action), inner);
}
}
});
@@ -539,7 +557,7 @@ public class TransportService extends AbstractLifecycleComponent {
channel.sendResponse(e);
} catch (Exception inner) {
inner.addSuppressed(e);
- logger.warn("failed to notify channel of error message for action [{}]", inner, action);
+ logger.warn(new ParameterizedMessage("failed to notify channel of error message for action [{}]", action), inner);
}
}
@@ -661,7 +679,7 @@ public class TransportService extends AbstractLifecycleComponent {
}
protected void traceResponseSent(long requestId, String action, Exception e) {
- tracerLog.trace("[{}][{}] sent error response", e, requestId, action);
+ tracerLog.trace(new ParameterizedMessage("[{}][{}] sent error response", requestId, action), e);
}
@Override
@@ -941,14 +959,14 @@ public class TransportService extends AbstractLifecycleComponent {
}
static class DirectResponseChannel implements TransportChannel {
- final ESLogger logger;
+ final Logger logger;
final DiscoveryNode localNode;
private final String action;
private final long requestId;
final TransportServiceAdapter adapter;
final ThreadPool threadPool;
- public DirectResponseChannel(ESLogger logger, DiscoveryNode localNode, String action, long requestId,
+ public DirectResponseChannel(Logger logger, DiscoveryNode localNode, String action, long requestId,
TransportServiceAdapter adapter, ThreadPool threadPool) {
this.logger = logger;
this.localNode = localNode;
@@ -1034,7 +1052,7 @@ public class TransportService extends AbstractLifecycleComponent {
try {
handler.handleException(rtx);
} catch (Exception e) {
- logger.error("failed to handle exception for action [{}], handler [{}]", e, action, handler);
+ logger.error(new ParameterizedMessage("failed to handle exception for action [{}], handler [{}]", action, handler), e);
}
}
diff --git a/core/src/main/java/org/elasticsearch/transport/local/LocalTransport.java b/core/src/main/java/org/elasticsearch/transport/local/LocalTransport.java
index 61559442ff..39d7502af9 100644
--- a/core/src/main/java/org/elasticsearch/transport/local/LocalTransport.java
+++ b/core/src/main/java/org/elasticsearch/transport/local/LocalTransport.java
@@ -19,6 +19,7 @@
package org.elasticsearch.transport.local;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.node.DiscoveryNode;
@@ -71,9 +72,6 @@ import java.util.concurrent.atomic.AtomicLong;
import static org.elasticsearch.common.util.concurrent.ConcurrentCollections.newConcurrentMap;
-/**
- *
- */
public class LocalTransport extends AbstractLifecycleComponent implements Transport {
public static final String LOCAL_TRANSPORT_THREAD_NAME_PREFIX = "local_transport";
@@ -306,7 +304,7 @@ public class LocalTransport extends AbstractLifecycleComponent implements Transp
});
}
} else {
- logger.warn("Failed to receive message for action [{}]", e, action);
+ logger.warn(new ParameterizedMessage("Failed to receive message for action [{}]", action), e);
}
}
}
@@ -355,7 +353,8 @@ public class LocalTransport extends AbstractLifecycleComponent implements Transp
transportChannel.sendResponse(e);
} catch (Exception inner) {
inner.addSuppressed(e);
- logger.warn("Failed to send error message back to client for action [{}]", inner, action);
+ logger.warn(
+ new ParameterizedMessage("Failed to send error message back to client for action [{}]", action), inner);
}
}
}
@@ -366,7 +365,7 @@ public class LocalTransport extends AbstractLifecycleComponent implements Transp
transportChannel.sendResponse(e);
} catch (Exception inner) {
inner.addSuppressed(e);
- logger.warn("Failed to send error message back to client for action [{}]", inner, action);
+ logger.warn(new ParameterizedMessage("Failed to send error message back to client for action [{}]", action), inner);
}
}
@@ -414,7 +413,7 @@ public class LocalTransport extends AbstractLifecycleComponent implements Transp
try {
handler.handleException(rtx);
} catch (Exception e) {
- logger.error("failed to handle exception response [{}]", e, handler);
+ logger.error(new ParameterizedMessage("failed to handle exception response [{}]", handler), e);
}
}
diff --git a/core/src/main/java/org/elasticsearch/tribe/TribeService.java b/core/src/main/java/org/elasticsearch/tribe/TribeService.java
index 3ca8015527..8ee09ba7ef 100644
--- a/core/src/main/java/org/elasticsearch/tribe/TribeService.java
+++ b/core/src/main/java/org/elasticsearch/tribe/TribeService.java
@@ -19,6 +19,7 @@
package org.elasticsearch.tribe;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.support.master.TransportMasterNodeReadAction;
@@ -274,7 +275,7 @@ public class TribeService extends AbstractLifecycleComponent {
otherNode.close();
} catch (Exception inner) {
inner.addSuppressed(e);
- logger.warn("failed to close node {} on failed start", inner, otherNode);
+ logger.warn(new ParameterizedMessage("failed to close node {} on failed start", otherNode), inner);
}
}
if (e instanceof RuntimeException) {
@@ -296,7 +297,7 @@ public class TribeService extends AbstractLifecycleComponent {
try {
node.close();
} catch (Exception e) {
- logger.warn("failed to close node {}", e, node);
+ logger.warn(new ParameterizedMessage("failed to close node {}", node), e);
}
}
}
@@ -320,7 +321,7 @@ public class TribeService extends AbstractLifecycleComponent {
event,
ClusterStateTaskConfig.build(Priority.NORMAL),
executor,
- (source, e) -> logger.warn("failed to process [{}]", e, source));
+ (source, e) -> logger.warn(new ParameterizedMessage("failed to process [{}]", source), e));
}
}
diff --git a/core/src/main/java/org/elasticsearch/watcher/FileWatcher.java b/core/src/main/java/org/elasticsearch/watcher/FileWatcher.java
index a6b0bdd840..8d5c04e770 100644
--- a/core/src/main/java/org/elasticsearch/watcher/FileWatcher.java
+++ b/core/src/main/java/org/elasticsearch/watcher/FileWatcher.java
@@ -18,8 +18,8 @@
*/
package org.elasticsearch.watcher;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.io.FileSystemUtils;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import java.io.IOException;
@@ -38,7 +38,7 @@ public class FileWatcher extends AbstractResourceWatcher<FileChangesListener> {
private FileObserver rootFileObserver;
private Path file;
- private static final ESLogger logger = Loggers.getLogger(FileWatcher.class);
+ private static final Logger logger = Loggers.getLogger(FileWatcher.class);
/**
* Creates new file watcher on the given directory
diff --git a/core/src/test/java/org/apache/log4j/Java9HackTests.java b/core/src/test/java/org/apache/log4j/Java9HackTests.java
deleted file mode 100644
index e917f1d306..0000000000
--- a/core/src/test/java/org/apache/log4j/Java9HackTests.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.log4j;
-
-import org.elasticsearch.test.ESTestCase;
-
-public class Java9HackTests extends ESTestCase {
- public void testJava9Hack() {
- assertNotNull(MDC.mdc.tlm != null);
- }
-}
diff --git a/core/src/test/java/org/elasticsearch/action/support/replication/ReplicationOperationTests.java b/core/src/test/java/org/elasticsearch/action/support/replication/ReplicationOperationTests.java
index 23cf7be8ce..b5edc1b53c 100644
--- a/core/src/test/java/org/elasticsearch/action/support/replication/ReplicationOperationTests.java
+++ b/core/src/test/java/org/elasticsearch/action/support/replication/ReplicationOperationTests.java
@@ -18,6 +18,7 @@
*/
package org.elasticsearch.action.support.replication;
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.index.CorruptIndexException;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.ActionListener;
@@ -34,7 +35,6 @@ import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.ShardRoutingState;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.index.shard.IndexShardNotStartedException;
@@ -491,7 +491,7 @@ public class ReplicationOperationTests extends ESTestCase {
public TestReplicationOperation(Request request, Primary<Request, Request, TestPrimary.Result> primary,
ActionListener<TestPrimary.Result> listener, boolean executeOnReplicas,
- Replicas<Request> replicas, Supplier<ClusterState> clusterStateSupplier, ESLogger logger, String opType) {
+ Replicas<Request> replicas, Supplier<ClusterState> clusterStateSupplier, Logger logger, String opType) {
super(request, primary, listener, executeOnReplicas, replicas, clusterStateSupplier, logger, opType);
}
}
diff --git a/core/src/test/java/org/elasticsearch/bootstrap/BootstrapCheckTests.java b/core/src/test/java/org/elasticsearch/bootstrap/BootstrapCheckTests.java
index cbbe05f6e7..ab305c65cb 100644
--- a/core/src/test/java/org/elasticsearch/bootstrap/BootstrapCheckTests.java
+++ b/core/src/test/java/org/elasticsearch/bootstrap/BootstrapCheckTests.java
@@ -19,8 +19,8 @@
package org.elasticsearch.bootstrap;
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.util.Constants;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.BoundTransportAddress;
import org.elasticsearch.common.transport.TransportAddress;
@@ -67,13 +67,13 @@ public class BootstrapCheckTests extends ESTestCase {
}
public void testNoLogMessageInNonProductionMode() {
- final ESLogger logger = mock(ESLogger.class);
+ final Logger logger = mock(Logger.class);
BootstrapCheck.check(false, randomBoolean(), Collections.emptyList(), logger);
verifyNoMoreInteractions(logger);
}
public void testLogMessageInProductionMode() {
- final ESLogger logger = mock(ESLogger.class);
+ final Logger logger = mock(Logger.class);
final boolean ignoreSystemChecks = randomBoolean();
BootstrapCheck.check(true, ignoreSystemChecks, Collections.emptyList(), logger);
verify(logger).info("bound or publishing to a non-loopback or non-link-local address, enforcing bootstrap checks");
@@ -550,7 +550,7 @@ public class BootstrapCheckTests extends ESTestCase {
() -> BootstrapCheck.check(true, false, Collections.singletonList(check), "testIgnoringSystemChecks"));
assertThat(notIgnored, hasToString(containsString("error")));
- final ESLogger logger = mock(ESLogger.class);
+ final Logger logger = mock(Logger.class);
// nothing should happen if we ignore system checks
BootstrapCheck.check(true, true, Collections.singletonList(check), logger);
diff --git a/core/src/test/java/org/elasticsearch/bootstrap/MaxMapCountCheckTests.java b/core/src/test/java/org/elasticsearch/bootstrap/MaxMapCountCheckTests.java
index b48fcc78c6..ad60f658a5 100644
--- a/core/src/test/java/org/elasticsearch/bootstrap/MaxMapCountCheckTests.java
+++ b/core/src/test/java/org/elasticsearch/bootstrap/MaxMapCountCheckTests.java
@@ -19,10 +19,11 @@
package org.elasticsearch.bootstrap;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.util.Constants;
import org.elasticsearch.common.SuppressLoggerChecks;
import org.elasticsearch.common.io.PathUtils;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.test.ESTestCase;
import java.io.BufferedReader;
@@ -67,16 +68,16 @@ public class MaxMapCountCheckTests extends ESTestCase {
reset(reader);
final IOException ioException = new IOException("fatal");
when(reader.readLine()).thenThrow(ioException);
- final ESLogger logger = mock(ESLogger.class);
+ final Logger logger = mock(Logger.class);
assertThat(check.getMaxMapCount(logger), equalTo(-1L));
- verify(logger).warn("I/O exception while trying to read [{}]", ioException, procSysVmMaxMapCountPath);
+ verify(logger).warn(new ParameterizedMessage("I/O exception while trying to read [{}]", procSysVmMaxMapCountPath), ioException);
verify(reader).close();
reset(reader);
reset(logger);
when(reader.readLine()).thenReturn("eof");
assertThat(check.getMaxMapCount(logger), equalTo(-1L));
- verify(logger).warn(eq("unable to parse vm.max_map_count [{}]"), any(NumberFormatException.class), eq("eof"));
+ verify(logger).warn(eq(new ParameterizedMessage("unable to parse vm.max_map_count [{}]", "eof")), any(NumberFormatException.class));
verify(reader).close();
}
diff --git a/core/src/test/java/org/elasticsearch/cluster/allocation/AwarenessAllocationIT.java b/core/src/test/java/org/elasticsearch/cluster/allocation/AwarenessAllocationIT.java
index 94863dc4f5..7e914a06a4 100644
--- a/core/src/test/java/org/elasticsearch/cluster/allocation/AwarenessAllocationIT.java
+++ b/core/src/test/java/org/elasticsearch/cluster/allocation/AwarenessAllocationIT.java
@@ -20,6 +20,7 @@
package org.elasticsearch.cluster.allocation;
import com.carrotsearch.hppc.ObjectIntHashMap;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.routing.IndexRoutingTable;
@@ -27,7 +28,6 @@ import org.elasticsearch.cluster.routing.IndexShardRoutingTable;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.allocation.decider.AwarenessAllocationDecider;
import org.elasticsearch.common.Priority;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.discovery.zen.ZenDiscovery;
@@ -46,7 +46,7 @@ import static org.hamcrest.Matchers.equalTo;
@ClusterScope(scope= ESIntegTestCase.Scope.TEST, numDataNodes =0, minNumDataNodes = 2)
public class AwarenessAllocationIT extends ESIntegTestCase {
- private final ESLogger logger = Loggers.getLogger(AwarenessAllocationIT.class);
+ private final Logger logger = Loggers.getLogger(AwarenessAllocationIT.class);
@Override
protected int numberOfReplicas() {
diff --git a/core/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java b/core/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java
index 70af580824..f022b2a385 100644
--- a/core/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java
+++ b/core/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java
@@ -19,6 +19,7 @@
package org.elasticsearch.cluster.allocation;
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteResponse;
@@ -39,7 +40,6 @@ import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDeci
import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.io.FileSystemUtils;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
@@ -69,7 +69,7 @@ import static org.hamcrest.Matchers.hasSize;
*/
@ClusterScope(scope = Scope.TEST, numDataNodes = 0)
public class ClusterRerouteIT extends ESIntegTestCase {
- private final ESLogger logger = Loggers.getLogger(ClusterRerouteIT.class);
+ private final Logger logger = Loggers.getLogger(ClusterRerouteIT.class);
public void testRerouteWithCommands_disableAllocationSettings() throws Exception {
Settings commonSettings = Settings.builder()
diff --git a/core/src/test/java/org/elasticsearch/cluster/allocation/FilteringAllocationIT.java b/core/src/test/java/org/elasticsearch/cluster/allocation/FilteringAllocationIT.java
index 4cc0408642..627fc03701 100644
--- a/core/src/test/java/org/elasticsearch/cluster/allocation/FilteringAllocationIT.java
+++ b/core/src/test/java/org/elasticsearch/cluster/allocation/FilteringAllocationIT.java
@@ -19,12 +19,12 @@
package org.elasticsearch.cluster.allocation;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.routing.IndexRoutingTable;
import org.elasticsearch.cluster.routing.IndexShardRoutingTable;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.query.QueryBuilders;
@@ -39,7 +39,7 @@ import static org.hamcrest.Matchers.equalTo;
@ClusterScope(scope= Scope.TEST, numDataNodes =0)
public class FilteringAllocationIT extends ESIntegTestCase {
- private final ESLogger logger = Loggers.getLogger(FilteringAllocationIT.class);
+ private final Logger logger = Loggers.getLogger(FilteringAllocationIT.class);
public void testDecommissionNodeNoReplicas() throws Exception {
logger.info("--> starting 2 nodes");
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AddIncrementallyTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AddIncrementallyTests.java
index 7971b2773b..3803f54ba7 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AddIncrementallyTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AddIncrementallyTests.java
@@ -20,6 +20,7 @@
package org.elasticsearch.cluster.routing.allocation;
import com.carrotsearch.hppc.cursors.ObjectCursor;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData;
@@ -30,7 +31,6 @@ import org.elasticsearch.cluster.routing.RoutingNode;
import org.elasticsearch.cluster.routing.RoutingNodes;
import org.elasticsearch.cluster.routing.RoutingTable;
import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.CollectionUtils;
@@ -45,7 +45,7 @@ import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING;
import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED;
public class AddIncrementallyTests extends ESAllocationTestCase {
- private final ESLogger logger = Loggers.getLogger(AddIncrementallyTests.class);
+ private final Logger logger = Loggers.getLogger(AddIncrementallyTests.class);
public void testAddNodesAndIndices() {
Settings.Builder settings = Settings.builder();
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java
index 8c764ed2ae..24faf9c45a 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java
@@ -19,6 +19,7 @@
package org.elasticsearch.cluster.routing.allocation;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData;
@@ -41,7 +42,6 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.StreamInput;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.network.NetworkModule;
import org.elasticsearch.common.settings.Settings;
@@ -66,7 +66,7 @@ import static org.hamcrest.Matchers.nullValue;
/**
*/
public class AllocationCommandsTests extends ESAllocationTestCase {
- private final ESLogger logger = Loggers.getLogger(AllocationCommandsTests.class);
+ private final Logger logger = Loggers.getLogger(AllocationCommandsTests.class);
public void testMoveShardCommand() {
AllocationService allocation = createAllocationService(Settings.builder().put("cluster.routing.allocation.node_concurrent_recoveries", 10).build());
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AwarenessAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AwarenessAllocationTests.java
index bf71fa766e..f85cc97560 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AwarenessAllocationTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AwarenessAllocationTests.java
@@ -19,6 +19,7 @@
package org.elasticsearch.cluster.routing.allocation;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData;
@@ -31,7 +32,6 @@ import org.elasticsearch.cluster.routing.allocation.command.AllocationCommands;
import org.elasticsearch.cluster.routing.allocation.command.CancelAllocationCommand;
import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand;
import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.cluster.ESAllocationTestCase;
@@ -49,7 +49,7 @@ import static org.hamcrest.Matchers.sameInstance;
*/
public class AwarenessAllocationTests extends ESAllocationTestCase {
- private final ESLogger logger = Loggers.getLogger(AwarenessAllocationTests.class);
+ private final Logger logger = Loggers.getLogger(AwarenessAllocationTests.class);
public void testMoveShardOnceNewNodeWithAttributeAdded1() {
AllocationService strategy = createAllocationService(Settings.builder()
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/BalanceConfigurationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/BalanceConfigurationTests.java
index f45567d280..b426220614 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/BalanceConfigurationTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/BalanceConfigurationTests.java
@@ -20,6 +20,7 @@
package org.elasticsearch.cluster.routing.allocation;
import com.carrotsearch.hppc.cursors.ObjectCursor;
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.util.ArrayUtil;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterState;
@@ -36,7 +37,6 @@ import org.elasticsearch.cluster.routing.ShardRoutingState;
import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator;
import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocator;
import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Settings;
@@ -52,7 +52,7 @@ import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED;
public class BalanceConfigurationTests extends ESAllocationTestCase {
- private final ESLogger logger = Loggers.getLogger(BalanceConfigurationTests.class);
+ private final Logger logger = Loggers.getLogger(BalanceConfigurationTests.class);
// TODO maybe we can randomize these numbers somehow
final int numberOfNodes = 25;
final int numberOfIndices = 12;
@@ -198,7 +198,7 @@ public class BalanceConfigurationTests extends ESAllocationTestCase {
}
- private void assertReplicaBalance(ESLogger logger, RoutingNodes nodes, int numberOfNodes, int numberOfIndices, int numberOfReplicas, int numberOfShards, float treshold) {
+ private void assertReplicaBalance(Logger logger, RoutingNodes nodes, int numberOfNodes, int numberOfIndices, int numberOfReplicas, int numberOfShards, float treshold) {
final int numShards = numberOfIndices * numberOfShards * (numberOfReplicas + 1);
final float avgNumShards = (float) (numShards) / (float) (numberOfNodes);
final int minAvgNumberOfShards = Math.round(Math.round(Math.floor(avgNumShards - treshold)));
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ClusterRebalanceRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ClusterRebalanceRoutingTests.java
index 9346bebd17..7d7db4ef21 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ClusterRebalanceRoutingTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ClusterRebalanceRoutingTests.java
@@ -19,6 +19,7 @@
package org.elasticsearch.cluster.routing.allocation;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData;
@@ -29,7 +30,6 @@ import org.elasticsearch.cluster.routing.RoutingTable;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.UnassignedInfo;
import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.cluster.ESAllocationTestCase;
@@ -45,7 +45,7 @@ import static org.hamcrest.Matchers.anyOf;
import static org.hamcrest.Matchers.equalTo;
public class ClusterRebalanceRoutingTests extends ESAllocationTestCase {
- private final ESLogger logger = Loggers.getLogger(ClusterRebalanceRoutingTests.class);
+ private final Logger logger = Loggers.getLogger(ClusterRebalanceRoutingTests.class);
public void testAlways() {
AllocationService strategy = createAllocationService(Settings.builder().put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(),
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ConcurrentRebalanceRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ConcurrentRebalanceRoutingTests.java
index 36565e1f85..d7b56c6508 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ConcurrentRebalanceRoutingTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ConcurrentRebalanceRoutingTests.java
@@ -19,6 +19,7 @@
package org.elasticsearch.cluster.routing.allocation;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData;
@@ -26,7 +27,6 @@ import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.RoutingNodes;
import org.elasticsearch.cluster.routing.RoutingTable;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.cluster.ESAllocationTestCase;
@@ -39,7 +39,7 @@ import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.nullValue;
public class ConcurrentRebalanceRoutingTests extends ESAllocationTestCase {
- private final ESLogger logger = Loggers.getLogger(ConcurrentRebalanceRoutingTests.class);
+ private final Logger logger = Loggers.getLogger(ConcurrentRebalanceRoutingTests.class);
public void testClusterConcurrentRebalance() {
AllocationService strategy = createAllocationService(Settings.builder()
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/DeadNodesAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/DeadNodesAllocationTests.java
index abc69cbf91..c4d8545e97 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/DeadNodesAllocationTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/DeadNodesAllocationTests.java
@@ -19,6 +19,7 @@
package org.elasticsearch.cluster.routing.allocation;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData;
@@ -28,7 +29,6 @@ import org.elasticsearch.cluster.routing.RoutingTable;
import org.elasticsearch.cluster.routing.allocation.command.AllocationCommands;
import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand;
import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.cluster.ESAllocationTestCase;
@@ -41,7 +41,7 @@ import static org.hamcrest.Matchers.equalTo;
/**
*/
public class DeadNodesAllocationTests extends ESAllocationTestCase {
- private final ESLogger logger = Loggers.getLogger(DeadNodesAllocationTests.class);
+ private final Logger logger = Loggers.getLogger(DeadNodesAllocationTests.class);
public void testSimpleDeadNodeOnStartedPrimaryShard() {
AllocationService allocation = createAllocationService(Settings.builder()
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ElectReplicaAsPrimaryDuringRelocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ElectReplicaAsPrimaryDuringRelocationTests.java
index d840e5ef41..5707a229da 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ElectReplicaAsPrimaryDuringRelocationTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ElectReplicaAsPrimaryDuringRelocationTests.java
@@ -19,6 +19,7 @@
package org.elasticsearch.cluster.routing.allocation;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData;
@@ -27,7 +28,6 @@ import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.IndexShardRoutingTable;
import org.elasticsearch.cluster.routing.RoutingNodes;
import org.elasticsearch.cluster.routing.RoutingTable;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.cluster.ESAllocationTestCase;
@@ -40,7 +40,7 @@ import static org.hamcrest.Matchers.equalTo;
*
*/
public class ElectReplicaAsPrimaryDuringRelocationTests extends ESAllocationTestCase {
- private final ESLogger logger = Loggers.getLogger(ElectReplicaAsPrimaryDuringRelocationTests.class);
+ private final Logger logger = Loggers.getLogger(ElectReplicaAsPrimaryDuringRelocationTests.class);
public void testElectReplicaAsPrimaryDuringRelocation() {
AllocationService strategy = createAllocationService(Settings.builder().put("cluster.routing.allocation.node_concurrent_recoveries", 10).build());
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ExpectedShardSizeAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ExpectedShardSizeAllocationTests.java
index 15f08a9723..d76b666549 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ExpectedShardSizeAllocationTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ExpectedShardSizeAllocationTests.java
@@ -19,6 +19,7 @@
package org.elasticsearch.cluster.routing.allocation;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterInfo;
import org.elasticsearch.cluster.ClusterInfoService;
@@ -32,7 +33,6 @@ import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.ShardRoutingState;
import org.elasticsearch.cluster.routing.allocation.command.AllocationCommands;
import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.cluster.ESAllocationTestCase;
@@ -43,7 +43,7 @@ import static org.hamcrest.Matchers.equalTo;
/**
*/
public class ExpectedShardSizeAllocationTests extends ESAllocationTestCase {
- private final ESLogger logger = Loggers.getLogger(ExpectedShardSizeAllocationTests.class);
+ private final Logger logger = Loggers.getLogger(ExpectedShardSizeAllocationTests.class);
public void testInitializingHasExpectedSize() {
final long byteSize = randomIntBetween(0, Integer.MAX_VALUE);
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedNodeRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedNodeRoutingTests.java
index 811c57e420..6dd5c3a7b3 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedNodeRoutingTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedNodeRoutingTests.java
@@ -19,6 +19,7 @@
package org.elasticsearch.cluster.routing.allocation;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData;
@@ -28,7 +29,6 @@ import org.elasticsearch.cluster.routing.RoutingNode;
import org.elasticsearch.cluster.routing.RoutingNodes;
import org.elasticsearch.cluster.routing.RoutingTable;
import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.cluster.ESAllocationTestCase;
@@ -38,7 +38,7 @@ import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED;
import static org.hamcrest.Matchers.equalTo;
public class FailedNodeRoutingTests extends ESAllocationTestCase {
- private final ESLogger logger = Loggers.getLogger(FailedNodeRoutingTests.class);
+ private final Logger logger = Loggers.getLogger(FailedNodeRoutingTests.class);
public void testSimpleFailedNodeTest() {
AllocationService strategy = createAllocationService(Settings.builder().put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(),
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java
index 8c7043a743..852fbd1944 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java
@@ -19,6 +19,7 @@
package org.elasticsearch.cluster.routing.allocation;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData;
@@ -30,7 +31,6 @@ import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.allocation.command.AllocationCommands;
import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand;
import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.cluster.ESAllocationTestCase;
@@ -54,7 +54,7 @@ import static org.hamcrest.Matchers.nullValue;
*
*/
public class FailedShardsRoutingTests extends ESAllocationTestCase {
- private final ESLogger logger = Loggers.getLogger(FailedShardsRoutingTests.class);
+ private final Logger logger = Loggers.getLogger(FailedShardsRoutingTests.class);
public void testFailedShardPrimaryRelocatingToAndFrom() {
AllocationService allocation = createAllocationService(Settings.builder()
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FilterRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FilterRoutingTests.java
index 16b918373a..6d0542b95c 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FilterRoutingTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FilterRoutingTests.java
@@ -19,6 +19,7 @@
package org.elasticsearch.cluster.routing.allocation;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData;
@@ -28,7 +29,6 @@ import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.RoutingTable;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.ShardRoutingState;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.cluster.ESAllocationTestCase;
@@ -44,7 +44,7 @@ import static org.hamcrest.Matchers.equalTo;
/**
*/
public class FilterRoutingTests extends ESAllocationTestCase {
- private final ESLogger logger = Loggers.getLogger(FilterRoutingTests.class);
+ private final Logger logger = Loggers.getLogger(FilterRoutingTests.class);
public void testClusterFilters() {
AllocationService strategy = createAllocationService(Settings.builder()
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/IndexBalanceTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/IndexBalanceTests.java
index 311a1825c1..cd35669f04 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/IndexBalanceTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/IndexBalanceTests.java
@@ -19,6 +19,7 @@
package org.elasticsearch.cluster.routing.allocation;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData;
@@ -27,7 +28,6 @@ import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.RoutingNodes;
import org.elasticsearch.cluster.routing.RoutingTable;
import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.cluster.ESAllocationTestCase;
@@ -42,7 +42,7 @@ import static org.hamcrest.Matchers.nullValue;
*
*/
public class IndexBalanceTests extends ESAllocationTestCase {
- private final ESLogger logger = Loggers.getLogger(IndexBalanceTests.class);
+ private final Logger logger = Loggers.getLogger(IndexBalanceTests.class);
public void testBalanceAllNodesStarted() {
AllocationService strategy = createAllocationService(Settings.builder()
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java
index 2e8679ef81..eab7d469da 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java
@@ -19,6 +19,7 @@
package org.elasticsearch.cluster.routing.allocation;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
@@ -44,7 +45,6 @@ import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllo
import org.elasticsearch.cluster.routing.allocation.decider.NodeVersionAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.ReplicaAfterPrimaryActiveAllocationDecider;
import org.elasticsearch.common.UUIDs;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.LocalTransportAddress;
@@ -76,7 +76,7 @@ import static org.hamcrest.Matchers.nullValue;
*
*/
public class NodeVersionAllocationDeciderTests extends ESAllocationTestCase {
- private final ESLogger logger = Loggers.getLogger(NodeVersionAllocationDeciderTests.class);
+ private final Logger logger = Loggers.getLogger(NodeVersionAllocationDeciderTests.class);
public void testDoNotAllocateFromPrimary() {
AllocationService strategy = createAllocationService(Settings.builder()
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/PreferPrimaryAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/PreferPrimaryAllocationTests.java
index 17e4011952..7badada5d5 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/PreferPrimaryAllocationTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/PreferPrimaryAllocationTests.java
@@ -19,6 +19,7 @@
package org.elasticsearch.cluster.routing.allocation;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData;
@@ -26,7 +27,6 @@ import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.RoutingTable;
import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.cluster.ESAllocationTestCase;
@@ -37,7 +37,7 @@ import static org.hamcrest.Matchers.equalTo;
/**
*/
public class PreferPrimaryAllocationTests extends ESAllocationTestCase {
- private final ESLogger logger = Loggers.getLogger(PreferPrimaryAllocationTests.class);
+ private final Logger logger = Loggers.getLogger(PreferPrimaryAllocationTests.class);
public void testPreferPrimaryAllocationOverReplicas() {
logger.info("create an allocation with 1 initial recoveries");
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/PrimaryElectionRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/PrimaryElectionRoutingTests.java
index 2d07e60f14..ccdc4deb02 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/PrimaryElectionRoutingTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/PrimaryElectionRoutingTests.java
@@ -19,6 +19,7 @@
package org.elasticsearch.cluster.routing.allocation;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData;
@@ -26,7 +27,6 @@ import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.RoutingNodes;
import org.elasticsearch.cluster.routing.RoutingTable;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.cluster.ESAllocationTestCase;
@@ -40,7 +40,7 @@ import static org.hamcrest.Matchers.nullValue;
*
*/
public class PrimaryElectionRoutingTests extends ESAllocationTestCase {
- private final ESLogger logger = Loggers.getLogger(PrimaryElectionRoutingTests.class);
+ private final Logger logger = Loggers.getLogger(PrimaryElectionRoutingTests.class);
public void testBackupElectionToPrimaryWhenPrimaryCanBeAllocatedToAnotherNode() {
AllocationService strategy = createAllocationService(Settings.builder().put("cluster.routing.allocation.node_concurrent_recoveries", 10).build());
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/PrimaryNotRelocatedWhileBeingRecoveredTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/PrimaryNotRelocatedWhileBeingRecoveredTests.java
index a2dab942e3..c1d4a168e8 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/PrimaryNotRelocatedWhileBeingRecoveredTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/PrimaryNotRelocatedWhileBeingRecoveredTests.java
@@ -19,6 +19,7 @@
package org.elasticsearch.cluster.routing.allocation;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData;
@@ -26,7 +27,6 @@ import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.RoutingNodes;
import org.elasticsearch.cluster.routing.RoutingTable;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.cluster.ESAllocationTestCase;
@@ -39,7 +39,7 @@ import static org.hamcrest.Matchers.equalTo;
*
*/
public class PrimaryNotRelocatedWhileBeingRecoveredTests extends ESAllocationTestCase {
- private final ESLogger logger = Loggers.getLogger(PrimaryNotRelocatedWhileBeingRecoveredTests.class);
+ private final Logger logger = Loggers.getLogger(PrimaryNotRelocatedWhileBeingRecoveredTests.class);
public void testPrimaryNotRelocatedWhileBeingRecoveredFrom() {
AllocationService strategy = createAllocationService(Settings.builder()
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RebalanceAfterActiveTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RebalanceAfterActiveTests.java
index f1c4d99d87..a6929b651e 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RebalanceAfterActiveTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RebalanceAfterActiveTests.java
@@ -19,6 +19,7 @@
package org.elasticsearch.cluster.routing.allocation;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterInfo;
import org.elasticsearch.cluster.ClusterInfoService;
@@ -31,7 +32,6 @@ import org.elasticsearch.cluster.routing.RoutingNodes;
import org.elasticsearch.cluster.routing.RoutingTable;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.cluster.ESAllocationTestCase;
@@ -47,7 +47,7 @@ import static org.hamcrest.Matchers.nullValue;
*
*/
public class RebalanceAfterActiveTests extends ESAllocationTestCase {
- private final ESLogger logger = Loggers.getLogger(RebalanceAfterActiveTests.class);
+ private final Logger logger = Loggers.getLogger(RebalanceAfterActiveTests.class);
public void testRebalanceOnlyAfterAllShardsAreActive() {
final long[] sizes = new long[5];
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ReplicaAllocatedAfterPrimaryTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ReplicaAllocatedAfterPrimaryTests.java
index d7ad0972b1..cf9db4ec54 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ReplicaAllocatedAfterPrimaryTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ReplicaAllocatedAfterPrimaryTests.java
@@ -19,6 +19,7 @@
package org.elasticsearch.cluster.routing.allocation;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData;
@@ -26,7 +27,6 @@ import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.RoutingNodes;
import org.elasticsearch.cluster.routing.RoutingTable;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.cluster.ESAllocationTestCase;
@@ -42,7 +42,7 @@ import static org.hamcrest.Matchers.nullValue;
*
*/
public class ReplicaAllocatedAfterPrimaryTests extends ESAllocationTestCase {
- private final ESLogger logger = Loggers.getLogger(ReplicaAllocatedAfterPrimaryTests.class);
+ private final Logger logger = Loggers.getLogger(ReplicaAllocatedAfterPrimaryTests.class);
public void testBackupIsAllocatedAfterPrimary() {
AllocationService strategy = createAllocationService(Settings.builder().put("cluster.routing.allocation.node_concurrent_recoveries", 10).build());
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RoutingNodesIntegrityTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RoutingNodesIntegrityTests.java
index dbcd035ad7..c5998b0b73 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RoutingNodesIntegrityTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RoutingNodesIntegrityTests.java
@@ -19,6 +19,7 @@
package org.elasticsearch.cluster.routing.allocation;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData;
@@ -29,7 +30,6 @@ import org.elasticsearch.cluster.routing.RoutingNodes;
import org.elasticsearch.cluster.routing.RoutingTable;
import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.cluster.ESAllocationTestCase;
@@ -42,7 +42,7 @@ import static org.hamcrest.Matchers.equalTo;
*
*/
public class RoutingNodesIntegrityTests extends ESAllocationTestCase {
- private final ESLogger logger = Loggers.getLogger(IndexBalanceTests.class);
+ private final Logger logger = Loggers.getLogger(IndexBalanceTests.class);
public void testBalanceAllNodesStarted() {
AllocationService strategy = createAllocationService(Settings.builder()
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SameShardRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SameShardRoutingTests.java
index 908ec3a423..69c2efe9b0 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SameShardRoutingTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SameShardRoutingTests.java
@@ -19,6 +19,7 @@
package org.elasticsearch.cluster.routing.allocation;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
@@ -30,7 +31,6 @@ import org.elasticsearch.cluster.routing.RoutingTable;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.ShardRoutingState;
import org.elasticsearch.cluster.routing.allocation.decider.SameShardAllocationDecider;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.LocalTransportAddress;
@@ -44,7 +44,7 @@ import static org.hamcrest.Matchers.equalTo;
/**
*/
public class SameShardRoutingTests extends ESAllocationTestCase {
- private final ESLogger logger = Loggers.getLogger(SameShardRoutingTests.class);
+ private final Logger logger = Loggers.getLogger(SameShardRoutingTests.class);
public void testSameHost() {
AllocationService strategy = createAllocationService(
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardVersioningTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardVersioningTests.java
index de3189956a..534e2af5a8 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardVersioningTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardVersioningTests.java
@@ -19,6 +19,7 @@
package org.elasticsearch.cluster.routing.allocation;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData;
@@ -27,7 +28,6 @@ import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.RoutingNodes;
import org.elasticsearch.cluster.routing.RoutingTable;
import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.cluster.ESAllocationTestCase;
@@ -38,7 +38,7 @@ import static org.elasticsearch.cluster.routing.ShardRoutingState.UNASSIGNED;
import static org.hamcrest.Matchers.equalTo;
public class ShardVersioningTests extends ESAllocationTestCase {
- private final ESLogger logger = Loggers.getLogger(ShardVersioningTests.class);
+ private final Logger logger = Loggers.getLogger(ShardVersioningTests.class);
public void testSimple() {
AllocationService strategy = createAllocationService(Settings.builder().put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(),
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsLimitAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsLimitAllocationTests.java
index b01fb9bcc6..930c099f95 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsLimitAllocationTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsLimitAllocationTests.java
@@ -19,6 +19,7 @@
package org.elasticsearch.cluster.routing.allocation;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData;
@@ -29,7 +30,6 @@ import org.elasticsearch.cluster.routing.RoutingTable;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.ShardRoutingState;
import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.cluster.ESAllocationTestCase;
@@ -43,7 +43,7 @@ import static org.hamcrest.Matchers.equalTo;
/**
*/
public class ShardsLimitAllocationTests extends ESAllocationTestCase {
- private final ESLogger logger = Loggers.getLogger(ShardsLimitAllocationTests.class);
+ private final Logger logger = Loggers.getLogger(ShardsLimitAllocationTests.class);
public void testIndexLevelShardsLimitAllocate() {
AllocationService strategy = createAllocationService(Settings.builder().put("cluster.routing.allocation.node_concurrent_recoveries", 10).build());
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardNoReplicasRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardNoReplicasRoutingTests.java
index 0a1bf218b5..c30ceef08b 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardNoReplicasRoutingTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardNoReplicasRoutingTests.java
@@ -19,6 +19,7 @@
package org.elasticsearch.cluster.routing.allocation;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData;
@@ -30,7 +31,6 @@ import org.elasticsearch.cluster.routing.RoutingNodes;
import org.elasticsearch.cluster.routing.RoutingTable;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.cluster.ESAllocationTestCase;
@@ -56,7 +56,7 @@ import static org.hamcrest.Matchers.nullValue;
*
*/
public class SingleShardNoReplicasRoutingTests extends ESAllocationTestCase {
- private final ESLogger logger = Loggers.getLogger(SingleShardNoReplicasRoutingTests.class);
+ private final Logger logger = Loggers.getLogger(SingleShardNoReplicasRoutingTests.class);
public void testSingleIndexStartedShard() {
AllocationService strategy = createAllocationService(Settings.builder().put("cluster.routing.allocation.node_concurrent_recoveries", 10).build());
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardOneReplicaRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardOneReplicaRoutingTests.java
index 95d3681529..cad5654eaa 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardOneReplicaRoutingTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardOneReplicaRoutingTests.java
@@ -19,6 +19,7 @@
package org.elasticsearch.cluster.routing.allocation;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData;
@@ -26,7 +27,6 @@ import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.RoutingNodes;
import org.elasticsearch.cluster.routing.RoutingTable;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.cluster.ESAllocationTestCase;
@@ -41,7 +41,7 @@ import static org.hamcrest.Matchers.nullValue;
*
*/
public class SingleShardOneReplicaRoutingTests extends ESAllocationTestCase {
- private final ESLogger logger = Loggers.getLogger(SingleShardOneReplicaRoutingTests.class);
+ private final Logger logger = Loggers.getLogger(SingleShardOneReplicaRoutingTests.class);
public void testSingleIndexFirstStartPrimaryThenBackups() {
AllocationService strategy = createAllocationService(Settings.builder().put("cluster.routing.allocation.node_concurrent_recoveries", 10).build());
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/TenShardsOneReplicaRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/TenShardsOneReplicaRoutingTests.java
index f629e62767..a6a7e6c065 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/TenShardsOneReplicaRoutingTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/TenShardsOneReplicaRoutingTests.java
@@ -19,6 +19,7 @@
package org.elasticsearch.cluster.routing.allocation;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData;
@@ -27,7 +28,6 @@ import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.RoutingNodes;
import org.elasticsearch.cluster.routing.RoutingTable;
import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.cluster.ESAllocationTestCase;
@@ -44,7 +44,7 @@ import static org.hamcrest.Matchers.nullValue;
*
*/
public class TenShardsOneReplicaRoutingTests extends ESAllocationTestCase {
- private final ESLogger logger = Loggers.getLogger(TenShardsOneReplicaRoutingTests.class);
+ private final Logger logger = Loggers.getLogger(TenShardsOneReplicaRoutingTests.class);
public void testSingleIndexFirstStartPrimaryThenBackups() {
AllocationService strategy = createAllocationService(Settings.builder()
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ThrottlingAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ThrottlingAllocationTests.java
index 497d2e0e26..9083085b5c 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ThrottlingAllocationTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ThrottlingAllocationTests.java
@@ -20,6 +20,7 @@
package org.elasticsearch.cluster.routing.allocation;
import com.carrotsearch.hppc.IntHashSet;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData;
@@ -30,7 +31,6 @@ import org.elasticsearch.cluster.routing.RoutingTable;
import org.elasticsearch.cluster.routing.allocation.command.AllocationCommands;
import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand;
import org.elasticsearch.cluster.routing.allocation.decider.Decision;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.snapshots.Snapshot;
@@ -47,7 +47,7 @@ import static org.hamcrest.Matchers.equalTo;
*
*/
public class ThrottlingAllocationTests extends ESAllocationTestCase {
- private final ESLogger logger = Loggers.getLogger(ThrottlingAllocationTests.class);
+ private final Logger logger = Loggers.getLogger(ThrottlingAllocationTests.class);
public void testPrimaryRecoveryThrottling() {
AllocationService strategy = createAllocationService(Settings.builder()
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/UpdateNumberOfReplicasTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/UpdateNumberOfReplicasTests.java
index 9486c4c3fc..4857086e24 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/UpdateNumberOfReplicasTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/UpdateNumberOfReplicasTests.java
@@ -19,6 +19,7 @@
package org.elasticsearch.cluster.routing.allocation;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData;
@@ -26,7 +27,6 @@ import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.RoutingNodes;
import org.elasticsearch.cluster.routing.RoutingTable;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.cluster.ESAllocationTestCase;
@@ -43,7 +43,7 @@ import static org.hamcrest.Matchers.nullValue;
*
*/
public class UpdateNumberOfReplicasTests extends ESAllocationTestCase {
- private final ESLogger logger = Loggers.getLogger(UpdateNumberOfReplicasTests.class);
+ private final Logger logger = Loggers.getLogger(UpdateNumberOfReplicasTests.class);
public void testUpdateNumberOfReplicas() {
AllocationService strategy = createAllocationService(Settings.builder().put("cluster.routing.allocation.node_concurrent_recoveries", 10).build());
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationTests.java
index 0e0ba4cea0..a9c15de61d 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationTests.java
@@ -20,6 +20,7 @@
package org.elasticsearch.cluster.routing.allocation.decider;
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData;
@@ -31,7 +32,6 @@ import org.elasticsearch.cluster.routing.allocation.AllocationService;
import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider.Allocation;
import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider.Rebalance;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Settings;
@@ -52,7 +52,7 @@ import static org.hamcrest.Matchers.equalTo;
*/
public class EnableAllocationTests extends ESAllocationTestCase {
- private final ESLogger logger = Loggers.getLogger(EnableAllocationTests.class);
+ private final Logger logger = Loggers.getLogger(EnableAllocationTests.class);
public void testClusterEnableNone() {
AllocationService strategy = createAllocationService(Settings.builder()
diff --git a/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceIT.java b/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceIT.java
index b7eb532e10..35f0601e71 100644
--- a/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceIT.java
+++ b/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceIT.java
@@ -18,6 +18,7 @@
*/
package org.elasticsearch.cluster.service;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksResponse;
import org.elasticsearch.cluster.AckedClusterStateUpdateTask;
@@ -125,7 +126,7 @@ public class ClusterServiceIT extends ESIntegTestCase {
@Override
public void onFailure(String source, Exception e) {
- logger.error("failed to execute callback in test {}", e, source);
+ logger.error(new ParameterizedMessage("failed to execute callback in test {}", source), e);
onFailure.set(true);
latch.countDown();
}
@@ -196,7 +197,7 @@ public class ClusterServiceIT extends ESIntegTestCase {
@Override
public void onFailure(String source, Exception e) {
- logger.error("failed to execute callback in test {}", e, source);
+ logger.error(new ParameterizedMessage("failed to execute callback in test {}", source), e);
onFailure.set(true);
latch.countDown();
}
@@ -270,7 +271,7 @@ public class ClusterServiceIT extends ESIntegTestCase {
@Override
public void onFailure(String source, Exception e) {
- logger.error("failed to execute callback in test {}", e, source);
+ logger.error(new ParameterizedMessage("failed to execute callback in test {}", source), e);
onFailure.set(true);
latch.countDown();
}
@@ -344,7 +345,7 @@ public class ClusterServiceIT extends ESIntegTestCase {
@Override
public void onFailure(String source, Exception e) {
- logger.error("failed to execute callback in test {}", e, source);
+ logger.error(new ParameterizedMessage("failed to execute callback in test {}", source), e);
onFailure.set(true);
latch.countDown();
}
diff --git a/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceTests.java b/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceTests.java
index 1002774d2c..83d87c2297 100644
--- a/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceTests.java
@@ -18,8 +18,10 @@
*/
package org.elasticsearch.cluster.service;
-import org.apache.log4j.Level;
-import org.apache.log4j.Logger;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterChangedEvent;
@@ -35,6 +37,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.lease.Releasable;
+import org.elasticsearch.common.logging.TestLoggers;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.LocalTransportAddress;
@@ -69,9 +72,7 @@ import java.util.stream.Collectors;
import static java.util.Collections.emptyMap;
import static java.util.Collections.emptySet;
import static org.elasticsearch.test.ClusterServiceUtils.setState;
-import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.anyOf;
-import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.equalTo;
@@ -331,7 +332,7 @@ public class ClusterServiceTests extends ESTestCase {
ClusterStateTaskListener listener = new ClusterStateTaskListener() {
@Override
public void onFailure(String source, Exception e) {
- logger.error("unexpected failure: [{}]", e, source);
+ logger.error(new ParameterizedMessage("unexpected failure: [{}]", source), e);
failures.add(new Tuple<>(source, e));
updateLatch.countDown();
}
@@ -685,8 +686,8 @@ public class ClusterServiceTests extends ESTestCase {
mockAppender.addExpectation(new MockLogAppender.SeenEventExpectation("test3", "cluster.service", Level.DEBUG,
"*processing [test3]: took [3s] done applying updated cluster_state (version: *, uuid: *)"));
- Logger rootLogger = Logger.getRootLogger();
- rootLogger.addAppender(mockAppender);
+ Logger rootLogger = LogManager.getRootLogger();
+ TestLoggers.addAppender(rootLogger, mockAppender);
try {
final CountDownLatch latch = new CountDownLatch(4);
clusterService.currentTimeOverride = System.nanoTime();
@@ -761,7 +762,7 @@ public class ClusterServiceTests extends ESTestCase {
});
latch.await();
} finally {
- rootLogger.removeAppender(mockAppender);
+ TestLoggers.removeAppender(rootLogger, mockAppender);
}
mockAppender.assertAllExpectationsMatched();
}
@@ -778,8 +779,8 @@ public class ClusterServiceTests extends ESTestCase {
mockAppender.addExpectation(new MockLogAppender.SeenEventExpectation("test4", "cluster.service", Level.WARN,
"*cluster state update task [test4] took [34s] above the warn threshold of *"));
- Logger rootLogger = Logger.getRootLogger();
- rootLogger.addAppender(mockAppender);
+ Logger rootLogger = LogManager.getRootLogger();
+ TestLoggers.addAppender(rootLogger, mockAppender);
try {
final CountDownLatch latch = new CountDownLatch(5);
final CountDownLatch processedFirstTask = new CountDownLatch(1);
@@ -875,7 +876,7 @@ public class ClusterServiceTests extends ESTestCase {
});
latch.await();
} finally {
- rootLogger.removeAppender(mockAppender);
+ TestLoggers.removeAppender(rootLogger, mockAppender);
}
mockAppender.assertAllExpectationsMatched();
}
diff --git a/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java b/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java
index 7f423d1bb9..7e69003a1e 100644
--- a/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java
+++ b/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java
@@ -19,6 +19,7 @@
package org.elasticsearch.cluster.settings;
+import org.apache.logging.log4j.Level;
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequestBuilder;
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse;
import org.elasticsearch.cluster.metadata.MetaData;
@@ -331,27 +332,27 @@ public class ClusterSettingsIT extends ESIntegTestCase {
public void testLoggerLevelUpdate() {
assertAcked(prepareCreate("test"));
- final String rootLevel = ESLoggerFactory.getRootLogger().getLevel();
- final String testLevel = ESLoggerFactory.getLogger("test").getLevel();
- try {
- client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder().put("logger._root", "BOOM")).execute().actionGet();
- fail("Expected IllegalArgumentException");
- } catch (IllegalArgumentException e) {
- assertEquals("No enum constant org.elasticsearch.common.logging.ESLoggerFactory.LogLevel.BOOM", e.getMessage());
- }
+ final IllegalArgumentException e =
+ expectThrows(
+ IllegalArgumentException.class,
+ () -> client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder().put("logger._root", "BOOM")).execute().actionGet());
+ assertEquals("Unknown level constant [BOOM].", e.getMessage());
try {
- client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder().put("logger.test", "TRACE").put("logger._root", "trace")).execute().actionGet();
- assertEquals("TRACE", ESLoggerFactory.getLogger("test").getLevel());
- assertEquals("TRACE", ESLoggerFactory.getRootLogger().getLevel());
+ final Settings.Builder testSettings = Settings.builder().put("logger.test", "TRACE").put("logger._root", "trace");
+ client().admin().cluster().prepareUpdateSettings().setTransientSettings(testSettings).execute().actionGet();
+ assertEquals(Level.TRACE, ESLoggerFactory.getLogger("test").getLevel());
+ assertEquals(Level.TRACE, ESLoggerFactory.getRootLogger().getLevel());
} finally {
if (randomBoolean()) {
- client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder().putNull("logger.test").putNull("logger._root")).execute().actionGet();
+ final Settings.Builder defaultSettings = Settings.builder().putNull("logger.test").putNull("logger._root");
+ client().admin().cluster().prepareUpdateSettings().setTransientSettings(defaultSettings).execute().actionGet();
} else {
- client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder().putNull("logger.*")).execute().actionGet();
+ final Settings.Builder defaultSettings = Settings.builder().putNull("logger.*");
+ client().admin().cluster().prepareUpdateSettings().setTransientSettings(defaultSettings).execute().actionGet();
}
- assertEquals(testLevel, ESLoggerFactory.getLogger("test").getLevel());
- assertEquals(rootLevel, ESLoggerFactory.getRootLogger().getLevel());
+ assertEquals(ESLoggerFactory.LOG_DEFAULT_LEVEL_SETTING.get(Settings.EMPTY), ESLoggerFactory.getLogger("test").getLevel());
+ assertEquals(ESLoggerFactory.LOG_DEFAULT_LEVEL_SETTING.get(Settings.EMPTY), ESLoggerFactory.getRootLogger().getLevel());
}
}
diff --git a/core/src/test/java/org/elasticsearch/common/logging/ESLoggerTests.java b/core/src/test/java/org/elasticsearch/common/logging/ESLoggerTests.java
deleted file mode 100644
index 67a6c0555c..0000000000
--- a/core/src/test/java/org/elasticsearch/common/logging/ESLoggerTests.java
+++ /dev/null
@@ -1,169 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.common.logging;
-
-import org.apache.log4j.AppenderSkeleton;
-import org.apache.log4j.Level;
-import org.apache.log4j.Logger;
-import org.apache.log4j.spi.LocationInfo;
-import org.apache.log4j.spi.LoggingEvent;
-import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.env.Environment;
-import org.elasticsearch.test.ESTestCase;
-import org.junit.After;
-
-import java.nio.file.Path;
-import java.util.ArrayList;
-import java.util.List;
-
-import static org.hamcrest.Matchers.equalTo;
-import static org.hamcrest.Matchers.notNullValue;
-
-public class ESLoggerTests extends ESTestCase {
-
- private ESLogger esTestLogger;
- private TestAppender testAppender;
- private String testLevel;
- private DeprecationLogger deprecationLogger;
- private TestAppender deprecationAppender;
-
- @Override
- public void setUp() throws Exception {
- super.setUp();
- this.testLevel = ESLoggerFactory.getLogger("test").getLevel();
- LogConfigurator.reset();
- Path configDir = getDataPath("config");
- // Need to set custom path.conf so we can use a custom logging.yml file for the test
- Settings settings = Settings.builder()
- .put(Environment.PATH_CONF_SETTING.getKey(), configDir.toAbsolutePath())
- .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
- .build();
- LogConfigurator.configure(settings, true);
-
- esTestLogger = ESLoggerFactory.getLogger("test");
- Logger testLogger = esTestLogger.getLogger();
- assertThat(testLogger.getLevel(), equalTo(Level.TRACE));
- testAppender = new TestAppender();
- testLogger.addAppender(testAppender);
-
- // deprecation setup, needs to be set to debug to log
- deprecationLogger = ESLoggerFactory.getDeprecationLogger("test");
- deprecationAppender = new TestAppender();
- ESLogger logger = ESLoggerFactory.getLogger("deprecation.test");
- logger.setLevel("DEBUG");
- logger.getLogger().addAppender(deprecationAppender);
- }
-
- @Override
- @After
- public void tearDown() throws Exception {
- super.tearDown();
- esTestLogger.setLevel(testLevel);
- Logger testLogger = esTestLogger.getLogger();
- testLogger.removeAppender(testAppender);
- Logger deprecationLogger = ESLoggerFactory.getLogger("deprecation.test").getLogger();
- deprecationLogger.removeAppender(deprecationAppender);
- }
-
- public void testLocationInfoTest() {
- esTestLogger.error("This is an error");
- esTestLogger.warn("This is a warning");
- esTestLogger.info("This is an info");
- esTestLogger.debug("This is a debug");
- esTestLogger.trace("This is a trace");
- List<LoggingEvent> events = testAppender.getEvents();
- assertThat(events, notNullValue());
- assertThat(events.size(), equalTo(5));
- LoggingEvent event = events.get(0);
- assertThat(event, notNullValue());
- assertThat(event.getLevel(), equalTo(Level.ERROR));
- assertThat(event.getRenderedMessage(), equalTo("This is an error"));
- LocationInfo locationInfo = event.getLocationInformation();
- assertThat(locationInfo, notNullValue());
- assertThat(locationInfo.getClassName(), equalTo(ESLoggerTests.class.getCanonicalName()));
- assertThat(locationInfo.getMethodName(), equalTo("testLocationInfoTest"));
- event = events.get(1);
- assertThat(event, notNullValue());
- assertThat(event.getLevel(), equalTo(Level.WARN));
- assertThat(event.getRenderedMessage(), equalTo("This is a warning"));
- locationInfo = event.getLocationInformation();
- assertThat(locationInfo, notNullValue());
- assertThat(locationInfo.getClassName(), equalTo(ESLoggerTests.class.getCanonicalName()));
- assertThat(locationInfo.getMethodName(), equalTo("testLocationInfoTest"));
- event = events.get(2);
- assertThat(event, notNullValue());
- assertThat(event.getLevel(), equalTo(Level.INFO));
- assertThat(event.getRenderedMessage(), equalTo("This is an info"));
- locationInfo = event.getLocationInformation();
- assertThat(locationInfo, notNullValue());
- assertThat(locationInfo.getClassName(), equalTo(ESLoggerTests.class.getCanonicalName()));
- assertThat(locationInfo.getMethodName(), equalTo("testLocationInfoTest"));
- event = events.get(3);
- assertThat(event, notNullValue());
- assertThat(event.getLevel(), equalTo(Level.DEBUG));
- assertThat(event.getRenderedMessage(), equalTo("This is a debug"));
- locationInfo = event.getLocationInformation();
- assertThat(locationInfo, notNullValue());
- assertThat(locationInfo.getClassName(), equalTo(ESLoggerTests.class.getCanonicalName()));
- assertThat(locationInfo.getMethodName(), equalTo("testLocationInfoTest"));
- event = events.get(4);
- assertThat(event, notNullValue());
- assertThat(event.getLevel(), equalTo(Level.TRACE));
- assertThat(event.getRenderedMessage(), equalTo("This is a trace"));
- locationInfo = event.getLocationInformation();
- assertThat(locationInfo, notNullValue());
- assertThat(locationInfo.getClassName(), equalTo(ESLoggerTests.class.getCanonicalName()));
- assertThat(locationInfo.getMethodName(), equalTo("testLocationInfoTest"));
- }
-
- public void testDeprecationLogger() {
- deprecationLogger.deprecated("This is a deprecation message");
- List<LoggingEvent> deprecationEvents = deprecationAppender.getEvents();
- LoggingEvent event = deprecationEvents.get(0);
- assertThat(event, notNullValue());
- assertThat(event.getLevel(), equalTo(Level.DEBUG));
- assertThat(event.getRenderedMessage(), equalTo("This is a deprecation message"));
- }
-
- private static class TestAppender extends AppenderSkeleton {
-
- private List<LoggingEvent> events = new ArrayList<>();
-
- @Override
- public void close() {
- }
-
- @Override
- public boolean requiresLayout() {
- return false;
- }
-
- @Override
- protected void append(LoggingEvent event) {
- // Forces it to generate the location information
- event.getLocationInformation();
- events.add(event);
- }
-
- public List<LoggingEvent> getEvents() {
- return events;
- }
- }
-}
diff --git a/core/src/test/java/org/elasticsearch/common/logging/LoggingConfigurationTests.java b/core/src/test/java/org/elasticsearch/common/logging/LoggingConfigurationTests.java
deleted file mode 100644
index 581a959936..0000000000
--- a/core/src/test/java/org/elasticsearch/common/logging/LoggingConfigurationTests.java
+++ /dev/null
@@ -1,196 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.common.logging;
-
-import java.nio.charset.StandardCharsets;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.StandardOpenOption;
-import java.util.Arrays;
-
-import org.apache.log4j.Appender;
-import org.apache.log4j.Logger;
-import org.apache.log4j.MDC;
-import org.elasticsearch.cli.MockTerminal;
-import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.env.Environment;
-import org.elasticsearch.node.internal.InternalSettingsPreparer;
-import org.elasticsearch.test.ESTestCase;
-import org.junit.Before;
-
-import static org.hamcrest.Matchers.is;
-import static org.hamcrest.Matchers.notNullValue;
-import static org.hamcrest.Matchers.nullValue;
-
-/**
- *
- */
-public class LoggingConfigurationTests extends ESTestCase {
-
- @Before
- public void before() throws Exception {
- LogConfigurator.reset();
- }
-
- public void testResolveMultipleConfigs() throws Exception {
- String level = ESLoggerFactory.getLogger("test").getLevel();
- try {
- Path configDir = getDataPath("config");
- Settings settings = Settings.builder()
- .put(Environment.PATH_CONF_SETTING.getKey(), configDir.toAbsolutePath())
- .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
- .build();
- LogConfigurator.configure(settings, true);
-
- ESLogger esLogger = ESLoggerFactory.getLogger("test");
- Logger logger = esLogger.getLogger();
- Appender appender = logger.getAppender("console");
- assertThat(appender, notNullValue());
-
- esLogger = ESLoggerFactory.getLogger("second");
- logger = esLogger.getLogger();
- appender = logger.getAppender("console2");
- assertThat(appender, notNullValue());
-
- esLogger = ESLoggerFactory.getLogger("third");
- logger = esLogger.getLogger();
- appender = logger.getAppender("console3");
- assertThat(appender, notNullValue());
- } finally {
- ESLoggerFactory.getLogger("test").setLevel(level);
- }
- }
-
- public void testResolveJsonLoggingConfig() throws Exception {
- Path tmpDir = createTempDir();
- Path loggingConf = tmpDir.resolve(loggingConfiguration("json"));
- Files.write(loggingConf, "{\"json\": \"foo\"}".getBytes(StandardCharsets.UTF_8));
- Environment environment = new Environment(
- Settings.builder()
- .put(Environment.PATH_CONF_SETTING.getKey(), tmpDir.toAbsolutePath())
- .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
- .build());
-
- Settings.Builder builder = Settings.builder();
- LogConfigurator.resolveConfig(environment, builder);
-
- Settings logSettings = builder.build();
- assertThat(logSettings.get("json"), is("foo"));
- }
-
- public void testResolveYamlLoggingConfig() throws Exception {
- Path tmpDir = createTempDir();
- Path loggingConf1 = tmpDir.resolve(loggingConfiguration("yml"));
- Path loggingConf2 = tmpDir.resolve(loggingConfiguration("yaml"));
- Files.write(loggingConf1, "yml: bar".getBytes(StandardCharsets.UTF_8));
- Files.write(loggingConf2, "yaml: bar".getBytes(StandardCharsets.UTF_8));
- Environment environment = new Environment(
- Settings.builder()
- .put(Environment.PATH_CONF_SETTING.getKey(), tmpDir.toAbsolutePath())
- .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
- .build());
-
- Settings.Builder builder = Settings.builder();
- LogConfigurator.resolveConfig(environment, builder);
-
- Settings logSettings = builder.build();
- assertThat(logSettings.get("yml"), is("bar"));
- assertThat(logSettings.get("yaml"), is("bar"));
- }
-
- public void testResolveConfigInvalidFilename() throws Exception {
- Path tmpDir = createTempDir();
- Path invalidSuffix = tmpDir.resolve(loggingConfiguration(randomFrom(LogConfigurator.ALLOWED_SUFFIXES)) + randomInvalidSuffix());
- Files.write(invalidSuffix, "yml: bar".getBytes(StandardCharsets.UTF_8));
- Environment environment = new Environment(
- Settings.builder()
- .put(Environment.PATH_CONF_SETTING.getKey(), invalidSuffix.toAbsolutePath())
- .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
- .build());
-
- Settings.Builder builder = Settings.builder();
- LogConfigurator.resolveConfig(environment, builder);
-
- Settings logSettings = builder.build();
- assertThat(logSettings.get("yml"), nullValue());
- }
-
- // tests that custom settings are not overwritten by settings in the config file
- public void testResolveOrder() throws Exception {
- Path tmpDir = createTempDir();
- Path loggingConf = tmpDir.resolve(loggingConfiguration("yaml"));
- Files.write(loggingConf, "logger.test_resolve_order: INFO, file\n".getBytes(StandardCharsets.UTF_8));
- Files.write(loggingConf, "appender.file.type: file\n".getBytes(StandardCharsets.UTF_8), StandardOpenOption.APPEND);
- Environment environment = InternalSettingsPreparer.prepareEnvironment(
- Settings.builder()
- .put(Environment.PATH_CONF_SETTING.getKey(), tmpDir.toAbsolutePath())
- .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
- .put("logger.test_resolve_order", "TRACE, console")
- .put("appender.console.type", "console")
- .put("appender.console.layout.type", "consolePattern")
- .put("appender.console.layout.conversionPattern", "[%d{ISO8601}][%-5p][%-25c] %m%n")
- .build(), new MockTerminal());
- LogConfigurator.configure(environment.settings(), true);
- // args should overwrite whatever is in the config
- ESLogger esLogger = ESLoggerFactory.getLogger("test_resolve_order");
- Logger logger = esLogger.getLogger();
- Appender appender = logger.getAppender("console");
- assertThat(appender, notNullValue());
- assertTrue(logger.isTraceEnabled());
- appender = logger.getAppender("file");
- assertThat(appender, nullValue());
- }
-
- // tests that config file is not read when we call LogConfigurator.configure(Settings, false)
- public void testConfigNotRead() throws Exception {
- Path tmpDir = createTempDir();
- Path loggingConf = tmpDir.resolve(loggingConfiguration("yaml"));
- Files.write(loggingConf,
- Arrays.asList(
- "logger.test_config_not_read: INFO, console",
- "appender.console.type: console"),
- StandardCharsets.UTF_8);
- Environment environment = InternalSettingsPreparer.prepareEnvironment(
- Settings.builder()
- .put(Environment.PATH_CONF_SETTING.getKey(), tmpDir.toAbsolutePath())
- .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
- .build(), new MockTerminal());
- LogConfigurator.configure(environment.settings(), false);
- ESLogger esLogger = ESLoggerFactory.getLogger("test_config_not_read");
-
- assertNotNull(esLogger);
- Logger logger = esLogger.getLogger();
- Appender appender = logger.getAppender("console");
- // config was not read
- assertNull(appender);
- }
-
- private static String loggingConfiguration(String suffix) {
- return "logging." + randomAsciiOfLength(randomIntBetween(0, 10)) + "." + suffix;
- }
-
- private static String randomInvalidSuffix() {
- String randomSuffix;
- do {
- randomSuffix = randomAsciiOfLength(randomIntBetween(1, 5));
- } while (LogConfigurator.ALLOWED_SUFFIXES.contains(randomSuffix));
- return randomSuffix;
- }
-}
diff --git a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java
index dee20d6b32..bd6cb530b2 100644
--- a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java
+++ b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java
@@ -18,11 +18,13 @@
*/
package org.elasticsearch.common.settings;
+import org.apache.logging.log4j.Level;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider;
import org.elasticsearch.common.logging.ESLoggerFactory;
+import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.index.IndexModule;
import org.elasticsearch.test.ESTestCase;
@@ -302,45 +304,44 @@ public class ScopedSettingsTests extends ESTestCase {
}
public void testLoggingUpdates() {
- final String level = ESLoggerFactory.getRootLogger().getLevel();
- final String testLevel = ESLoggerFactory.getLogger("test").getLevel();
- String property = randomFrom(ESLoggerFactory.LogLevel.values()).toString();
+ final Level level = ESLoggerFactory.getRootLogger().getLevel();
+ final Level testLevel = ESLoggerFactory.getLogger("test").getLevel();
+ Level property = randomFrom(Level.values());
Settings.Builder builder = Settings.builder().put("logger.level", property);
try {
ClusterSettings settings = new ClusterSettings(builder.build(), ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
- try {
- settings.validate(Settings.builder().put("logger._root", "boom").build());
- fail();
- } catch (IllegalArgumentException ex) {
- assertEquals("No enum constant org.elasticsearch.common.logging.ESLoggerFactory.LogLevel.BOOM", ex.getMessage());
- }
+ IllegalArgumentException ex =
+ expectThrows(
+ IllegalArgumentException.class,
+ () -> settings.validate(Settings.builder().put("logger._root", "boom").build()));
+ assertEquals("Unknown level constant [BOOM].", ex.getMessage());
assertEquals(level, ESLoggerFactory.getRootLogger().getLevel());
settings.applySettings(Settings.builder().put("logger._root", "TRACE").build());
- assertEquals("TRACE", ESLoggerFactory.getRootLogger().getLevel());
+ assertEquals(Level.TRACE, ESLoggerFactory.getRootLogger().getLevel());
settings.applySettings(Settings.builder().build());
assertEquals(property, ESLoggerFactory.getRootLogger().getLevel());
settings.applySettings(Settings.builder().put("logger.test", "TRACE").build());
- assertEquals("TRACE", ESLoggerFactory.getLogger("test").getLevel());
+ assertEquals(Level.TRACE, ESLoggerFactory.getLogger("test").getLevel());
settings.applySettings(Settings.builder().build());
- assertEquals(testLevel, ESLoggerFactory.getLogger("test").getLevel());
+ assertEquals(property, ESLoggerFactory.getLogger("test").getLevel());
} finally {
- ESLoggerFactory.getRootLogger().setLevel(level);
- ESLoggerFactory.getLogger("test").setLevel(testLevel);
+ Loggers.setLevel(ESLoggerFactory.getRootLogger(), level);
+ Loggers.setLevel(ESLoggerFactory.getLogger("test"), testLevel);
}
}
public void testFallbackToLoggerLevel() {
- final String level = ESLoggerFactory.getRootLogger().getLevel();
+ final Level level = ESLoggerFactory.getRootLogger().getLevel();
try {
- ClusterSettings settings = new ClusterSettings(Settings.builder().put("logger.level", "ERROR").build(),
- ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
+ ClusterSettings settings =
+ new ClusterSettings(Settings.builder().put("logger.level", "ERROR").build(), ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
assertEquals(level, ESLoggerFactory.getRootLogger().getLevel());
settings.applySettings(Settings.builder().put("logger._root", "TRACE").build());
- assertEquals("TRACE", ESLoggerFactory.getRootLogger().getLevel());
+ assertEquals(Level.TRACE, ESLoggerFactory.getRootLogger().getLevel());
settings.applySettings(Settings.builder().build()); // here we fall back to 'logger.level' which is our default.
- assertEquals("ERROR", ESLoggerFactory.getRootLogger().getLevel());
+ assertEquals(Level.ERROR, ESLoggerFactory.getRootLogger().getLevel());
} finally {
- ESLoggerFactory.getRootLogger().setLevel(level);
+ Loggers.setLevel(ESLoggerFactory.getRootLogger(), level);
}
}
diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java
index 692134916e..dc0545624d 100644
--- a/core/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java
+++ b/core/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java
@@ -144,14 +144,9 @@ public class SettingsModuleTests extends ModuleTestCase {
{
Settings settings = Settings.builder().put("logger._root", "BOOM").put("logger.transport", "WOW").build();
- try {
- new SettingsModule(settings);
- fail();
- } catch (IllegalArgumentException ex) {
- assertEquals("No enum constant org.elasticsearch.common.logging.ESLoggerFactory.LogLevel.BOOM", ex.getMessage());
- }
+ IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> new SettingsModule(settings));
+ assertEquals("Unknown level constant [BOOM].", ex.getMessage());
}
-
}
public void testRegisterSettingsFilter() {
diff --git a/core/src/test/java/org/elasticsearch/common/util/concurrent/AbstractLifecycleRunnableTests.java b/core/src/test/java/org/elasticsearch/common/util/concurrent/AbstractLifecycleRunnableTests.java
index 02adb78319..3f712c44d3 100644
--- a/core/src/test/java/org/elasticsearch/common/util/concurrent/AbstractLifecycleRunnableTests.java
+++ b/core/src/test/java/org/elasticsearch/common/util/concurrent/AbstractLifecycleRunnableTests.java
@@ -18,11 +18,10 @@
*/
package org.elasticsearch.common.util.concurrent;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.SuppressLoggerChecks;
import org.elasticsearch.common.component.Lifecycle;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.test.ESTestCase;
-
import org.mockito.InOrder;
import java.util.concurrent.Callable;
@@ -38,7 +37,7 @@ import static org.mockito.Mockito.when;
*/
public class AbstractLifecycleRunnableTests extends ESTestCase {
private final Lifecycle lifecycle = mock(Lifecycle.class);
- private final ESLogger logger = mock(ESLogger.class);
+ private final Logger logger = mock(Logger.class);
public void testDoRunOnlyRunsWhenNotStoppedOrClosed() throws Exception {
Callable<?> runCallable = mock(Callable.class);
diff --git a/core/src/test/java/org/elasticsearch/discovery/BlockingClusterStatePublishResponseHandlerTests.java b/core/src/test/java/org/elasticsearch/discovery/BlockingClusterStatePublishResponseHandlerTests.java
index 4ff4c4cd03..481d15020f 100644
--- a/core/src/test/java/org/elasticsearch/discovery/BlockingClusterStatePublishResponseHandlerTests.java
+++ b/core/src/test/java/org/elasticsearch/discovery/BlockingClusterStatePublishResponseHandlerTests.java
@@ -18,9 +18,9 @@
*/
package org.elasticsearch.discovery;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.node.DiscoveryNode;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.transport.LocalTransportAddress;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
@@ -45,10 +45,10 @@ public class BlockingClusterStatePublishResponseHandlerTests extends ESTestCase
final boolean fail;
final DiscoveryNode node;
final CyclicBarrier barrier;
- final ESLogger logger;
+ final Logger logger;
final BlockingClusterStatePublishResponseHandler handler;
- public PublishResponder(boolean fail, DiscoveryNode node, CyclicBarrier barrier, ESLogger logger, BlockingClusterStatePublishResponseHandler handler) {
+ public PublishResponder(boolean fail, DiscoveryNode node, CyclicBarrier barrier, Logger logger, BlockingClusterStatePublishResponseHandler handler) {
this.fail = fail;
this.node = node;
diff --git a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java
index 517bd2c738..d2e6fd4ea7 100644
--- a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java
+++ b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java
@@ -19,6 +19,7 @@
package org.elasticsearch.discovery;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.index.CorruptIndexException;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.DocWriteResponse;
@@ -507,7 +508,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase {
logger.trace("[{}] indexed id [{}] through node [{}]", name, id, node);
} catch (ElasticsearchException e) {
exceptedExceptions.add(e);
- logger.trace("[{}] failed id [{}] through node [{}]", e, name, id, node);
+ logger.trace(new ParameterizedMessage("[{}] failed id [{}] through node [{}]", name, id, node), e);
} finally {
countDownLatchRef.get().countDown();
logger.trace("[{}] decreased counter : {}", name, countDownLatchRef.get().getCount());
@@ -715,7 +716,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase {
@Override
public void onFailure(String source, Exception e) {
- logger.warn("failure [{}]", e, source);
+ logger.warn(new ParameterizedMessage("failure [{}]", source), e);
}
});
diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/NodeJoinControllerTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/NodeJoinControllerTests.java
index 4b37b106db..dd5a2c5f49 100644
--- a/core/src/test/java/org/elasticsearch/discovery/zen/NodeJoinControllerTests.java
+++ b/core/src/test/java/org/elasticsearch/discovery/zen/NodeJoinControllerTests.java
@@ -18,6 +18,7 @@
*/
package org.elasticsearch.discovery.zen;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterState;
@@ -722,7 +723,7 @@ public class NodeJoinControllerTests extends ESTestCase {
@Override
public void onFailure(Exception e) {
- logger.error("unexpected error for {}", e, future);
+ logger.error(new ParameterizedMessage("unexpected error for {}", future), e);
future.markAsFailed(e);
}
});
diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java
index b1658845af..31c828ec30 100644
--- a/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java
+++ b/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java
@@ -19,6 +19,7 @@
package org.elasticsearch.discovery.zen.publish;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterChangedEvent;
@@ -36,7 +37,6 @@ import org.elasticsearch.common.Randomness;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.discovery.Discovery;
@@ -98,9 +98,9 @@ public class PublishClusterStateActionTests extends ESTestCase {
public volatile ClusterState clusterState;
- private final ESLogger logger;
+ private final Logger logger;
- public MockNode(DiscoveryNode discoveryNode, MockTransportService service, @Nullable ClusterStateListener listener, ESLogger logger) {
+ public MockNode(DiscoveryNode discoveryNode, MockTransportService service, @Nullable ClusterStateListener listener, Logger logger) {
this.discoveryNode = discoveryNode;
this.service = service;
this.listener = listener;
diff --git a/core/src/test/java/org/elasticsearch/gateway/GatewayIndexStateIT.java b/core/src/test/java/org/elasticsearch/gateway/GatewayIndexStateIT.java
index 7ea916f4a5..9e10de56d0 100644
--- a/core/src/test/java/org/elasticsearch/gateway/GatewayIndexStateIT.java
+++ b/core/src/test/java/org/elasticsearch/gateway/GatewayIndexStateIT.java
@@ -19,6 +19,7 @@
package org.elasticsearch.gateway;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
@@ -33,7 +34,6 @@ import org.elasticsearch.cluster.metadata.MappingMetaData;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.routing.ShardRoutingState;
import org.elasticsearch.common.Priority;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
@@ -56,7 +56,6 @@ import java.util.List;
import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
-import static org.elasticsearch.index.query.QueryBuilders.matchQuery;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
import static org.hamcrest.Matchers.equalTo;
@@ -65,7 +64,7 @@ import static org.hamcrest.Matchers.nullValue;
@ClusterScope(scope = Scope.TEST, numDataNodes = 0)
public class GatewayIndexStateIT extends ESIntegTestCase {
- private final ESLogger logger = Loggers.getLogger(GatewayIndexStateIT.class);
+ private final Logger logger = Loggers.getLogger(GatewayIndexStateIT.class);
public void testMappingMetaDataParsed() throws Exception {
logger.info("--> starting 1 nodes");
diff --git a/core/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java b/core/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java
index 4cf505d839..0f0e69b264 100644
--- a/core/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java
+++ b/core/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java
@@ -18,6 +18,7 @@
*/
package org.elasticsearch.gateway;
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.store.ChecksumIndexInput;
import org.apache.lucene.store.Directory;
@@ -34,7 +35,6 @@ import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.common.UUIDs;
import org.elasticsearch.common.collect.ImmutableOpenMap;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
@@ -193,7 +193,7 @@ public class MetaDataStateFormatTests extends ESTestCase {
}
}
- public static void corruptFile(Path file, ESLogger logger) throws IOException {
+ public static void corruptFile(Path file, Logger logger) throws IOException {
Path fileToCorrupt = file;
try (final SimpleFSDirectory dir = new SimpleFSDirectory(fileToCorrupt.getParent())) {
long checksumBeforeCorruption;
diff --git a/core/src/test/java/org/elasticsearch/gateway/ReusePeerRecoverySharedTest.java b/core/src/test/java/org/elasticsearch/gateway/ReusePeerRecoverySharedTest.java
index a6f4e6db23..6454f8a220 100644
--- a/core/src/test/java/org/elasticsearch/gateway/ReusePeerRecoverySharedTest.java
+++ b/core/src/test/java/org/elasticsearch/gateway/ReusePeerRecoverySharedTest.java
@@ -19,11 +19,11 @@
package org.elasticsearch.gateway;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse;
import org.elasticsearch.action.admin.indices.stats.IndexStats;
import org.elasticsearch.action.admin.indices.stats.ShardStats;
import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.indices.recovery.RecoveryState;
@@ -56,7 +56,7 @@ public class ReusePeerRecoverySharedTest {
* should this use synced flush? can't use synced from in the bwc
* tests
*/
- public static void testCase(Settings indexSettings, Runnable restartCluster, ESLogger logger, boolean useSyncIds) {
+ public static void testCase(Settings indexSettings, Runnable restartCluster, Logger logger, boolean useSyncIds) {
/*
* prevent any rebalance actions during the peer recovery if we run into
* a relocation the reuse count will be 0 and this fails the test. We
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTests.java b/core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTests.java
index 9e4d5b27ad..de271d720c 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTests.java
@@ -19,12 +19,12 @@
package org.elasticsearch.index.analysis.synonyms;
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.lucene.all.AllTokenStream;
import org.elasticsearch.common.settings.Settings;
@@ -45,7 +45,7 @@ import static org.hamcrest.Matchers.equalTo;
/**
*/
public class SynonymsAnalysisTests extends ESTestCase {
- protected final ESLogger logger = Loggers.getLogger(getClass());
+ protected final Logger logger = Loggers.getLogger(getClass());
private AnalysisService analysisService;
public void testSynonymsAnalysis() throws IOException {
diff --git a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java
index 4715264d98..2c47e61708 100644
--- a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java
+++ b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java
@@ -20,11 +20,12 @@
package org.elasticsearch.index.engine;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
-import org.apache.log4j.AppenderSkeleton;
-import org.apache.log4j.Level;
-import org.apache.log4j.LogManager;
-import org.apache.log4j.Logger;
-import org.apache.log4j.spi.LoggingEvent;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.core.LogEvent;
+import org.apache.logging.log4j.core.appender.AbstractAppender;
+import org.apache.logging.log4j.core.filter.RegexFilter;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.NumericDocValuesField;
@@ -58,8 +59,9 @@ import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.FileSystemUtils;
-import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
-import org.elasticsearch.common.logging.ESLogger;
+import org.elasticsearch.common.logging.Loggers;
+import org.elasticsearch.common.logging.PrefixMessageFactory;
+import org.elasticsearch.common.logging.TestLoggers;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.uid.Versions;
import org.elasticsearch.common.settings.Settings;
@@ -1504,16 +1506,21 @@ public class InternalEngineTests extends ESTestCase {
assertTrue(index.isCreated());
}
- private static class MockAppender extends AppenderSkeleton {
+ private static class MockAppender extends AbstractAppender {
public boolean sawIndexWriterMessage;
public boolean sawIndexWriterIFDMessage;
+ public MockAppender(final String name) throws IllegalAccessException {
+ super(name, RegexFilter.createFilter(".*(\n.*)*", new String[0], true, null, null), null);
+ }
+
@Override
- protected void append(LoggingEvent event) {
- if (event.getLevel() == Level.TRACE && event.getMessage().toString().contains("[index][1] ")) {
+ public void append(LogEvent event) {
+ final String formattedMessage = event.getMessage().getFormattedMessage();
+ if (event.getLevel() == Level.TRACE && formattedMessage.contains("[index][1] ")) {
if (event.getLoggerName().endsWith("lucene.iw") &&
- event.getMessage().toString().contains("IW: apply all deletes during flush")) {
+ formattedMessage.contains("IW: apply all deletes during flush")) {
sawIndexWriterMessage = true;
}
if (event.getLoggerName().endsWith("lucene.iw.ifd")) {
@@ -1521,28 +1528,20 @@ public class InternalEngineTests extends ESTestCase {
}
}
}
-
- @Override
- public boolean requiresLayout() {
- return false;
- }
-
- @Override
- public void close() {
- }
}
// #5891: make sure IndexWriter's infoStream output is
// sent to lucene.iw with log level TRACE:
- public void testIndexWriterInfoStream() {
+ public void testIndexWriterInfoStream() throws IllegalAccessException {
assumeFalse("who tests the tester?", VERBOSE);
- MockAppender mockAppender = new MockAppender();
+ MockAppender mockAppender = new MockAppender("testIndexWriterInfoStream");
- Logger rootLogger = Logger.getRootLogger();
+ Logger rootLogger = LogManager.getRootLogger();
Level savedLevel = rootLogger.getLevel();
- rootLogger.addAppender(mockAppender);
- rootLogger.setLevel(Level.DEBUG);
+ TestLoggers.addAppender(rootLogger, mockAppender);
+ Loggers.setLevel(rootLogger, Level.DEBUG);
+ rootLogger = LogManager.getRootLogger();
try {
// First, with DEBUG, which should NOT log IndexWriter output:
@@ -1552,32 +1551,35 @@ public class InternalEngineTests extends ESTestCase {
assertFalse(mockAppender.sawIndexWriterMessage);
// Again, with TRACE, which should log IndexWriter output:
- rootLogger.setLevel(Level.TRACE);
+ Loggers.setLevel(rootLogger, Level.TRACE);
engine.index(new Engine.Index(newUid("2"), doc));
engine.flush();
assertTrue(mockAppender.sawIndexWriterMessage);
} finally {
- rootLogger.removeAppender(mockAppender);
- rootLogger.setLevel(savedLevel);
+ TestLoggers.removeAppender(rootLogger, mockAppender);
+ Loggers.setLevel(rootLogger, savedLevel.toString());
}
}
// #8603: make sure we can separately log IFD's messages
- public void testIndexWriterIFDInfoStream() {
+ public void testIndexWriterIFDInfoStream() throws IllegalAccessException {
assumeFalse("who tests the tester?", VERBOSE);
- MockAppender mockAppender = new MockAppender();
+ MockAppender mockAppender = new MockAppender("testIndexWriterIFDInfoStream");
- // Works when running this test inside Intellij:
- Logger iwIFDLogger = LogManager.exists("org.elasticsearch.index.engine.lucene.iw.ifd");
- if (iwIFDLogger == null) {
- // Works when running this test from command line:
- iwIFDLogger = LogManager.exists("index.engine.lucene.iw.ifd");
+ final Logger iwIFDLogger;
+ if (LogManager.getContext(false).hasLogger("org.elasticsearch.index.engine.lucene.iw.ifd", new PrefixMessageFactory())) {
+ // Works when running this test inside Intellij:
+ iwIFDLogger = LogManager.getLogger("org.elasticsearch.index.engine.lucene.iw.ifd");
assertNotNull(iwIFDLogger);
+ } else {
+ // Works when running this test from command line:
+ assertTrue(LogManager.getContext(false).hasLogger("index.engine.lucene.iw.ifd", new PrefixMessageFactory()));
+ iwIFDLogger = LogManager.getLogger("index.engine.lucene.iw.ifd");
}
- iwIFDLogger.addAppender(mockAppender);
- iwIFDLogger.setLevel(Level.DEBUG);
+ TestLoggers.addAppender(iwIFDLogger, mockAppender);
+ Loggers.setLevel(iwIFDLogger, Level.DEBUG);
try {
// First, with DEBUG, which should NOT log IndexWriter output:
@@ -1588,15 +1590,15 @@ public class InternalEngineTests extends ESTestCase {
assertFalse(mockAppender.sawIndexWriterIFDMessage);
// Again, with TRACE, which should only log IndexWriter IFD output:
- iwIFDLogger.setLevel(Level.TRACE);
+ Loggers.setLevel(iwIFDLogger, Level.TRACE);
engine.index(new Engine.Index(newUid("2"), doc));
engine.flush();
assertFalse(mockAppender.sawIndexWriterMessage);
assertTrue(mockAppender.sawIndexWriterIFDMessage);
} finally {
- iwIFDLogger.removeAppender(mockAppender);
- iwIFDLogger.setLevel(null);
+ TestLoggers.removeAppender(iwIFDLogger, mockAppender);
+ Loggers.setLevel(iwIFDLogger, (Level) null);
}
}
@@ -2003,7 +2005,7 @@ public class InternalEngineTests extends ESTestCase {
public final AtomicInteger recoveredOps = new AtomicInteger(0);
- public TranslogHandler(String indexName, ESLogger logger) {
+ public TranslogHandler(String indexName, Logger logger) {
super(new ShardId("test", "_na_", 0), null, logger);
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
RootObjectMapper.Builder rootBuilder = new RootObjectMapper.Builder("test");
diff --git a/core/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java b/core/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java
index a690d2ae7a..371764acc9 100644
--- a/core/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java
+++ b/core/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java
@@ -18,14 +18,14 @@
*/
package org.elasticsearch.index.replication;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.cluster.node.DiscoveryNode;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.store.Store;
import org.elasticsearch.index.translog.Translog;
+import org.elasticsearch.indices.recovery.PeerRecoveryTargetService;
import org.elasticsearch.indices.recovery.RecoveryState;
import org.elasticsearch.indices.recovery.RecoveryTarget;
-import org.elasticsearch.indices.recovery.PeerRecoveryTargetService;
import java.io.IOException;
import java.util.EnumSet;
@@ -62,10 +62,10 @@ public class RecoveryDuringReplicationTests extends ESIndexLevelReplicationTestC
private final RecoveryState.Stage stageToBlock;
public static final EnumSet<RecoveryState.Stage> SUPPORTED_STAGES =
EnumSet.of(RecoveryState.Stage.INDEX, RecoveryState.Stage.TRANSLOG, RecoveryState.Stage.FINALIZE);
- private final ESLogger logger;
+ private final Logger logger;
BlockingTarget(RecoveryState.Stage stageToBlock, CountDownLatch recoveryBlocked, CountDownLatch releaseRecovery, IndexShard shard,
- DiscoveryNode sourceNode, PeerRecoveryTargetService.RecoveryListener listener, ESLogger logger) {
+ DiscoveryNode sourceNode, PeerRecoveryTargetService.RecoveryListener listener, Logger logger) {
super(shard, sourceNode, listener, version -> {});
this.recoveryBlocked = recoveryBlocked;
this.releaseRecovery = releaseRecovery;
diff --git a/core/src/test/java/org/elasticsearch/index/shard/CommitPointsTests.java b/core/src/test/java/org/elasticsearch/index/shard/CommitPointsTests.java
index 8821f0b9e7..c723538c83 100644
--- a/core/src/test/java/org/elasticsearch/index/shard/CommitPointsTests.java
+++ b/core/src/test/java/org/elasticsearch/index/shard/CommitPointsTests.java
@@ -19,7 +19,7 @@
package org.elasticsearch.index.shard;
-import org.elasticsearch.common.logging.ESLogger;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.test.ESTestCase;
@@ -33,7 +33,7 @@ import static org.hamcrest.Matchers.nullValue;
*
*/
public class CommitPointsTests extends ESTestCase {
- private final ESLogger logger = Loggers.getLogger(CommitPointsTests.class);
+ private final Logger logger = Loggers.getLogger(CommitPointsTests.class);
public void testCommitPointXContent() throws Exception {
ArrayList<CommitPoint.FileInfo> indexFiles = new ArrayList<>();
diff --git a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java
index 652cb6e778..4ce9f281fb 100644
--- a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java
+++ b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java
@@ -18,6 +18,7 @@
*/
package org.elasticsearch.index.shard;
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.index.CorruptIndexException;
@@ -73,7 +74,6 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lease.Releasables;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.LocalTransportAddress;
import org.elasticsearch.common.unit.ByteSizeUnit;
@@ -473,7 +473,7 @@ public class IndexShardTests extends ESSingleNodeTestCase {
assertNotNull(indexStats.getShards()[0].getCommitStats().getUserData().get(Engine.SYNC_COMMIT_ID));
}
- public static ShardStateMetaData load(ESLogger logger, Path... shardPaths) throws IOException {
+ public static ShardStateMetaData load(Logger logger, Path... shardPaths) throws IOException {
return ShardStateMetaData.FORMAT.loadLatestState(logger, shardPaths);
}
diff --git a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java
index 08b4d8ac71..7bdeeef5ca 100644
--- a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java
+++ b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java
@@ -20,6 +20,7 @@
package org.elasticsearch.index.translog;
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.index.Term;
import org.apache.lucene.mockfile.FilterFileChannel;
@@ -667,7 +668,7 @@ public class TranslogTests extends ESTestCase {
@Override
public void onFailure(Exception e) {
- logger.error("--> writer [{}] had an error", e, threadName);
+ logger.error(new ParameterizedMessage("--> writer [{}] had an error", threadName), e);
errors.add(e);
}
}, threadName);
@@ -682,7 +683,7 @@ public class TranslogTests extends ESTestCase {
@Override
public void onFailure(Exception e) {
- logger.error("--> reader [{}] had an error", e, threadId);
+ logger.error(new ParameterizedMessage("--> reader [{}] had an error", threadId), e);
errors.add(e);
try {
closeView();
diff --git a/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java b/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java
index 4d82acf87f..81d1e05e9e 100644
--- a/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java
+++ b/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java
@@ -19,16 +19,20 @@
package org.elasticsearch.indices.settings;
-import org.apache.log4j.AppenderSkeleton;
-import org.apache.log4j.Level;
-import org.apache.log4j.Logger;
-import org.apache.log4j.spi.LoggingEvent;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.core.LogEvent;
+import org.apache.logging.log4j.core.appender.AbstractAppender;
+import org.apache.logging.log4j.core.filter.RegexFilter;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse;
import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.Priority;
+import org.elasticsearch.common.logging.Loggers;
+import org.elasticsearch.common.logging.TestLoggers;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexModule;
@@ -348,13 +352,17 @@ public class UpdateSettingsIT extends ESIntegTestCase {
logger.info("test: test done");
}
- private static class MockAppender extends AppenderSkeleton {
+ private static class MockAppender extends AbstractAppender {
public boolean sawUpdateMaxThreadCount;
public boolean sawUpdateAutoThrottle;
+ public MockAppender(final String name) throws IllegalAccessException {
+ super(name, RegexFilter.createFilter(".*(\n.*)*", new String[0], true, null, null), null);
+ }
+
@Override
- protected void append(LoggingEvent event) {
- String message = event.getMessage().toString();
+ public void append(LogEvent event) {
+ String message = event.getMessage().getFormattedMessage();
if (event.getLevel() == Level.TRACE &&
event.getLoggerName().endsWith("lucene.iw")) {
}
@@ -366,22 +374,14 @@ public class UpdateSettingsIT extends ESIntegTestCase {
}
}
- @Override
- public boolean requiresLayout() {
- return false;
- }
-
- @Override
- public void close() {
- }
}
- public void testUpdateAutoThrottleSettings() {
- MockAppender mockAppender = new MockAppender();
- Logger rootLogger = Logger.getRootLogger();
+ public void testUpdateAutoThrottleSettings() throws IllegalAccessException {
+ MockAppender mockAppender = new MockAppender("testUpdateAutoThrottleSettings");
+ Logger rootLogger = LogManager.getRootLogger();
Level savedLevel = rootLogger.getLevel();
- rootLogger.addAppender(mockAppender);
- rootLogger.setLevel(Level.TRACE);
+ TestLoggers.addAppender(rootLogger, mockAppender);
+ Loggers.setLevel(rootLogger, Level.TRACE);
try {
// No throttling at first, only 1 non-replicated shard, force lots of merging:
@@ -412,18 +412,18 @@ public class UpdateSettingsIT extends ESIntegTestCase {
GetSettingsResponse getSettingsResponse = client().admin().indices().prepareGetSettings("test").get();
assertThat(getSettingsResponse.getSetting("test", MergeSchedulerConfig.AUTO_THROTTLE_SETTING.getKey()), equalTo("false"));
} finally {
- rootLogger.removeAppender(mockAppender);
- rootLogger.setLevel(savedLevel);
+ TestLoggers.removeAppender(rootLogger, mockAppender);
+ Loggers.setLevel(rootLogger, savedLevel);
}
}
// #6882: make sure we can change index.merge.scheduler.max_thread_count live
- public void testUpdateMergeMaxThreadCount() {
- MockAppender mockAppender = new MockAppender();
- Logger rootLogger = Logger.getRootLogger();
+ public void testUpdateMergeMaxThreadCount() throws IllegalAccessException {
+ MockAppender mockAppender = new MockAppender("testUpdateMergeMaxThreadCount");
+ Logger rootLogger = LogManager.getRootLogger();
Level savedLevel = rootLogger.getLevel();
- rootLogger.addAppender(mockAppender);
- rootLogger.setLevel(Level.TRACE);
+ TestLoggers.addAppender(rootLogger, mockAppender);
+ Loggers.setLevel(rootLogger, Level.TRACE);
try {
@@ -456,8 +456,8 @@ public class UpdateSettingsIT extends ESIntegTestCase {
assertThat(getSettingsResponse.getSetting("test", MergeSchedulerConfig.MAX_THREAD_COUNT_SETTING.getKey()), equalTo("1"));
} finally {
- rootLogger.removeAppender(mockAppender);
- rootLogger.setLevel(savedLevel);
+ TestLoggers.removeAppender(rootLogger, mockAppender);
+ Loggers.setLevel(rootLogger, savedLevel);
}
}
diff --git a/core/src/test/java/org/elasticsearch/indices/state/SimpleIndexStateIT.java b/core/src/test/java/org/elasticsearch/indices/state/SimpleIndexStateIT.java
index 66687ea74f..0515887a55 100644
--- a/core/src/test/java/org/elasticsearch/indices/state/SimpleIndexStateIT.java
+++ b/core/src/test/java/org/elasticsearch/indices/state/SimpleIndexStateIT.java
@@ -19,6 +19,7 @@
package org.elasticsearch.indices.state;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
import org.elasticsearch.action.admin.indices.close.CloseIndexResponse;
@@ -28,7 +29,6 @@ import org.elasticsearch.action.support.ActiveShardCount;
import org.elasticsearch.cluster.health.ClusterHealthStatus;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.routing.ShardRoutingState;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexNotFoundException;
@@ -43,7 +43,7 @@ import static org.hamcrest.Matchers.nullValue;
*/
@ESIntegTestCase.ClusterScope(minNumDataNodes = 2)
public class SimpleIndexStateIT extends ESIntegTestCase {
- private final ESLogger logger = Loggers.getLogger(SimpleIndexStateIT.class);
+ private final Logger logger = Loggers.getLogger(SimpleIndexStateIT.class);
public void testSimpleOpenClose() {
logger.info("--> creating test index");
diff --git a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java
index 90aae81dc3..873e59a336 100644
--- a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java
+++ b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java
@@ -19,6 +19,7 @@
package org.elasticsearch.indices.store;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
import org.elasticsearch.cluster.ClusterState;
@@ -37,7 +38,6 @@ import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationComman
import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Priority;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.env.Environment;
@@ -474,11 +474,11 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase {
* the ShardActiveRequest.
*/
public static class ReclocationStartEndTracer extends MockTransportService.Tracer {
- private final ESLogger logger;
+ private final Logger logger;
private final CountDownLatch beginRelocationLatch;
private final CountDownLatch receivedShardExistsRequestLatch;
- public ReclocationStartEndTracer(ESLogger logger, CountDownLatch beginRelocationLatch, CountDownLatch receivedShardExistsRequestLatch) {
+ public ReclocationStartEndTracer(Logger logger, CountDownLatch beginRelocationLatch, CountDownLatch receivedShardExistsRequestLatch) {
this.logger = logger;
this.beginRelocationLatch = beginRelocationLatch;
this.receivedShardExistsRequestLatch = receivedShardExistsRequestLatch;
diff --git a/core/src/test/java/org/elasticsearch/monitor/jvm/JvmGcMonitorServiceTests.java b/core/src/test/java/org/elasticsearch/monitor/jvm/JvmGcMonitorServiceTests.java
index ab5b1ac475..972072c0eb 100644
--- a/core/src/test/java/org/elasticsearch/monitor/jvm/JvmGcMonitorServiceTests.java
+++ b/core/src/test/java/org/elasticsearch/monitor/jvm/JvmGcMonitorServiceTests.java
@@ -19,7 +19,7 @@
package org.elasticsearch.monitor.jvm;
-import org.elasticsearch.common.logging.ESLogger;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.test.ESTestCase;
@@ -32,7 +32,7 @@ import static org.mockito.Mockito.when;
public class JvmGcMonitorServiceTests extends ESTestCase {
public void testSlowGcLogging() {
- final ESLogger logger = mock(ESLogger.class);
+ final Logger logger = mock(Logger.class);
when(logger.isWarnEnabled()).thenReturn(true);
when(logger.isInfoEnabled()).thenReturn(true);
when(logger.isDebugEnabled()).thenReturn(true);
@@ -138,7 +138,7 @@ public class JvmGcMonitorServiceTests extends ESTestCase {
final int current = randomIntBetween(1, Integer.MAX_VALUE);
final long elapsed = randomIntBetween(current, Integer.MAX_VALUE);
final long seq = randomIntBetween(1, Integer.MAX_VALUE);
- final ESLogger logger = mock(ESLogger.class);
+ final Logger logger = mock(Logger.class);
when(logger.isWarnEnabled()).thenReturn(true);
when(logger.isInfoEnabled()).thenReturn(true);
when(logger.isDebugEnabled()).thenReturn(true);
diff --git a/core/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java b/core/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java
index 63b572f4d5..2fad27bddf 100644
--- a/core/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java
+++ b/core/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java
@@ -19,16 +19,16 @@
package org.elasticsearch.recovery;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.admin.indices.refresh.RefreshResponse;
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
import org.elasticsearch.action.admin.indices.stats.ShardStats;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.search.SearchResponse;
-import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.routing.ShardRouting;
+import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Priority;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
@@ -54,7 +54,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoTi
@TestLogging("_root:DEBUG,index.shard:TRACE")
public class RecoveryWhileUnderLoadIT extends ESIntegTestCase {
- private final ESLogger logger = Loggers.getLogger(RecoveryWhileUnderLoadIT.class);
+ private final Logger logger = Loggers.getLogger(RecoveryWhileUnderLoadIT.class);
public void testRecoverWhileUnderLoadAllocateReplicasTest() throws Exception {
logger.info("--> creating test index ...");
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java
index 4e46a0b6a6..621257fccf 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java
@@ -18,6 +18,7 @@
*/
package org.elasticsearch.search.aggregations.metrics;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.ShardSearchFailure;
import org.elasticsearch.plugins.Plugin;
@@ -477,7 +478,7 @@ public class StatsIT extends AbstractNumericTestCase {
ShardSearchFailure[] failures = response.getShardFailures();
if (failures.length != expectedFailures) {
for (ShardSearchFailure failure : failures) {
- logger.error("Shard Failure: {}", failure.getCause(), failure);
+ logger.error(new ParameterizedMessage("Shard Failure: {}", failure), failure.getCause());
}
fail("Unexpected shard failures!");
}
diff --git a/core/src/test/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java b/core/src/test/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java
index df9af970f9..eeb14baf37 100644
--- a/core/src/test/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java
+++ b/core/src/test/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java
@@ -26,7 +26,6 @@ import org.apache.lucene.util.BytesRef;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.termvectors.TermVectorsRequest;
import org.elasticsearch.action.termvectors.TermVectorsResponse;
-import org.elasticsearch.common.Priority;
import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
diff --git a/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java b/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java
index 14f3e8e0bb..3a6e84fd1c 100644
--- a/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java
+++ b/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java
@@ -19,12 +19,7 @@
package org.elasticsearch.search.geo;
-import org.elasticsearch.common.logging.ESLoggerFactory;
-import org.locationtech.spatial4j.context.SpatialContext;
-import org.locationtech.spatial4j.distance.DistanceUtils;
-import org.locationtech.spatial4j.exception.InvalidShapeException;
-import org.locationtech.spatial4j.shape.Shape;
-
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy;
import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree;
import org.apache.lucene.spatial.query.SpatialArgs;
@@ -48,6 +43,7 @@ import org.elasticsearch.common.geo.builders.MultiPolygonBuilder;
import org.elasticsearch.common.geo.builders.PolygonBuilder;
import org.elasticsearch.common.geo.builders.ShapeBuilders;
import org.elasticsearch.common.io.Streams;
+import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
@@ -59,6 +55,10 @@ import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.InternalSettingsPlugin;
import org.elasticsearch.test.VersionUtils;
import org.junit.BeforeClass;
+import org.locationtech.spatial4j.context.SpatialContext;
+import org.locationtech.spatial4j.distance.DistanceUtils;
+import org.locationtech.spatial4j.exception.InvalidShapeException;
+import org.locationtech.spatial4j.shape.Shape;
import java.io.ByteArrayOutputStream;
import java.io.FileNotFoundException;
@@ -562,7 +562,7 @@ public class GeoFilterIT extends ESIntegTestCase {
strategy.makeQuery(args);
return true;
} catch (UnsupportedSpatialOperation e) {
- ESLoggerFactory.getLogger(GeoFilterIT.class.getName()).info("Unsupported spatial operation {}", e, relation);
+ ESLoggerFactory.getLogger(GeoFilterIT.class.getName()).info(new ParameterizedMessage("Unsupported spatial operation {}", relation), e);
return false;
}
}
diff --git a/core/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java b/core/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java
index c71bfa0c1e..f0ea27e355 100644
--- a/core/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java
+++ b/core/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java
@@ -18,6 +18,7 @@
*/
package org.elasticsearch.snapshots;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksResponse;
import org.elasticsearch.cluster.ClusterChangedEvent;
@@ -281,7 +282,7 @@ public abstract class AbstractSnapshotIntegTestCase extends ESIntegTestCase {
@Override
public void onFailure(String source, Exception e) {
- logger.warn("failed to execute [{}]", e, source);
+ logger.warn(new ParameterizedMessage("failed to execute [{}]", source), e);
}
});
diff --git a/core/src/test/java/org/elasticsearch/test/MockLogAppender.java b/core/src/test/java/org/elasticsearch/test/MockLogAppender.java
index 8f10ccd653..786319e2ae 100644
--- a/core/src/test/java/org/elasticsearch/test/MockLogAppender.java
+++ b/core/src/test/java/org/elasticsearch/test/MockLogAppender.java
@@ -18,9 +18,10 @@
*/
package org.elasticsearch.test;
-import org.apache.log4j.AppenderSkeleton;
-import org.apache.log4j.Level;
-import org.apache.log4j.spi.LoggingEvent;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.core.LogEvent;
+import org.apache.logging.log4j.core.appender.AbstractAppender;
+import org.apache.logging.log4j.core.filter.RegexFilter;
import org.elasticsearch.common.regex.Regex;
import java.util.ArrayList;
@@ -32,13 +33,14 @@ import static org.hamcrest.MatcherAssert.assertThat;
/**
* Test appender that can be used to verify that certain events were logged correctly
*/
-public class MockLogAppender extends AppenderSkeleton {
+public class MockLogAppender extends AbstractAppender {
private static final String COMMON_PREFIX = System.getProperty("es.logger.prefix", "org.elasticsearch.");
private List<LoggingExpectation> expectations;
- public MockLogAppender() {
+ public MockLogAppender() throws IllegalAccessException {
+ super("mock", RegexFilter.createFilter(".*(\n.*)*", new String[0], true, null, null), null);
expectations = new ArrayList<>();
}
@@ -47,22 +49,12 @@ public class MockLogAppender extends AppenderSkeleton {
}
@Override
- protected void append(LoggingEvent loggingEvent) {
+ public void append(LogEvent event) {
for (LoggingExpectation expectation : expectations) {
- expectation.match(loggingEvent);
+ expectation.match(event);
}
}
- @Override
- public void close() {
-
- }
-
- @Override
- public boolean requiresLayout() {
- return false;
- }
-
public void assertAllExpectationsMatched() {
for (LoggingExpectation expectation : expectations) {
expectation.assertMatched();
@@ -70,7 +62,7 @@ public class MockLogAppender extends AppenderSkeleton {
}
public interface LoggingExpectation {
- void match(LoggingEvent loggingEvent);
+ void match(LogEvent event);
void assertMatched();
}
@@ -91,10 +83,10 @@ public class MockLogAppender extends AppenderSkeleton {
}
@Override
- public void match(LoggingEvent event) {
- if (event.getLevel() == level && event.getLoggerName().equals(logger)) {
+ public void match(LogEvent event) {
+ if (event.getLevel().equals(level) && event.getLoggerName().equals(logger)) {
if (Regex.isSimpleMatchPattern(message)) {
- if (Regex.simpleMatch(message, event.getMessage().toString())) {
+ if (Regex.simpleMatch(message, event.getMessage().getFormattedMessage())) {
saw = true;
}
} else {
diff --git a/core/src/test/resources/org/elasticsearch/common/logging/config/logging.yml b/core/src/test/resources/org/elasticsearch/common/logging/config/logging.yml
deleted file mode 100644
index 548b186e46..0000000000
--- a/core/src/test/resources/org/elasticsearch/common/logging/config/logging.yml
+++ /dev/null
@@ -1,13 +0,0 @@
-# you can override using a command-line parameter
-# -E logger.level=(ERROR|WARN|INFO|DEBUG|TRACE)
-logger.level: INFO
-rootLogger: ${logger.level}, console
-logger:
- test: TRACE, console
-
-appender:
- console:
- type: console
- layout:
- type: consolePattern
- conversionPattern: "[%d{ISO8601}][%-5p][%-25c] %m%n"
diff --git a/core/src/test/resources/org/elasticsearch/common/logging/config/test2/logging.yml b/core/src/test/resources/org/elasticsearch/common/logging/config/test2/logging.yml
deleted file mode 100644
index 71fbce639a..0000000000
--- a/core/src/test/resources/org/elasticsearch/common/logging/config/test2/logging.yml
+++ /dev/null
@@ -1,10 +0,0 @@
-logger:
- # log action execution errors for easier debugging
- second: DEBUG, console2
-
-appender:
- console2:
- type: console
- layout:
- type: consolePattern
- conversionPattern: "[%d{ISO8601}][%-5p][%-25c] %m%n" \ No newline at end of file
diff --git a/core/src/test/resources/org/elasticsearch/common/logging/config/test2/test3/logging.yml b/core/src/test/resources/org/elasticsearch/common/logging/config/test2/test3/logging.yml
deleted file mode 100644
index edfe0c9ed4..0000000000
--- a/core/src/test/resources/org/elasticsearch/common/logging/config/test2/test3/logging.yml
+++ /dev/null
@@ -1,10 +0,0 @@
-logger:
- # log action execution errors for easier debugging
- third: DEBUG, console3
-
-appender:
- console3:
- type: console
- layout:
- type: consolePattern
- conversionPattern: "[%d{ISO8601}][%-5p][%-25c] %m%n" \ No newline at end of file
diff --git a/distribution/build.gradle b/distribution/build.gradle
index 5409cfd3bc..bcd18e7f80 100644
--- a/distribution/build.gradle
+++ b/distribution/build.gradle
@@ -295,7 +295,7 @@ configure(subprojects.findAll { ['deb', 'rpm'].contains(it.name) }) {
that'll happen when createEtc runs. */
outputs.file "${packagingFiles}/etc/elasticsearch/elasticsearch.yml"
outputs.file "${packagingFiles}/etc/elasticsearch/jvm.options"
- outputs.file "${packagingFiles}/etc/elasticsearch/logging.yml"
+ outputs.file "${packagingFiles}/etc/elasticsearch/log4j2.properties"
}
task createPidDir(type: EmptyDirTask) {
@@ -375,7 +375,7 @@ configure(subprojects.findAll { ['deb', 'rpm'].contains(it.name) }) {
configurationFile '/etc/elasticsearch/elasticsearch.yml'
configurationFile '/etc/elasticsearch/jvm.options'
- configurationFile '/etc/elasticsearch/logging.yml'
+ configurationFile '/etc/elasticsearch/log4j2.properties'
into('/etc/elasticsearch') {
fileMode 0750
permissionGroup 'elasticsearch'
diff --git a/distribution/licenses/apache-log4j-extras-1.2.17.jar.sha1 b/distribution/licenses/apache-log4j-extras-1.2.17.jar.sha1
deleted file mode 100644
index 147721b891..0000000000
--- a/distribution/licenses/apache-log4j-extras-1.2.17.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-85863614d82185d7e51fe21c00aa9117a523a8b6
diff --git a/distribution/licenses/apache-log4j-extras-NOTICE b/distribution/licenses/apache-log4j-extras-NOTICE
deleted file mode 100644
index e02b7500ec..0000000000
--- a/distribution/licenses/apache-log4j-extras-NOTICE
+++ /dev/null
@@ -1,5 +0,0 @@
-Apache Extras Companion for log4j 1.2.
-Copyright 2007 The Apache Software Foundation
-
-This product includes software developed at
-The Apache Software Foundation (http://www.apache.org/). \ No newline at end of file
diff --git a/distribution/licenses/log4j-1.2-api-2.6.2.jar.sha1 b/distribution/licenses/log4j-1.2-api-2.6.2.jar.sha1
new file mode 100644
index 0000000000..5bf4bcab46
--- /dev/null
+++ b/distribution/licenses/log4j-1.2-api-2.6.2.jar.sha1
@@ -0,0 +1 @@
+3b4c5a8b734b6a29b2f03380535a48da6284b210 \ No newline at end of file
diff --git a/distribution/licenses/log4j-1.2.17.jar.sha1 b/distribution/licenses/log4j-1.2.17.jar.sha1
deleted file mode 100644
index 383110e29f..0000000000
--- a/distribution/licenses/log4j-1.2.17.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-5af35056b4d257e4b64b9e8069c0746e8b08629f
diff --git a/distribution/licenses/apache-log4j-extras-LICENSE b/distribution/licenses/log4j-LICENSE.txt
index 6279e5206d..6279e5206d 100644
--- a/distribution/licenses/apache-log4j-extras-LICENSE
+++ b/distribution/licenses/log4j-LICENSE.txt
diff --git a/distribution/licenses/log4j-NOTICE b/distribution/licenses/log4j-NOTICE.txt
index 0375732360..0375732360 100644
--- a/distribution/licenses/log4j-NOTICE
+++ b/distribution/licenses/log4j-NOTICE.txt
diff --git a/distribution/licenses/log4j-api-2.6.2.jar.sha1 b/distribution/licenses/log4j-api-2.6.2.jar.sha1
new file mode 100644
index 0000000000..e4f9af7497
--- /dev/null
+++ b/distribution/licenses/log4j-api-2.6.2.jar.sha1
@@ -0,0 +1 @@
+bd1b74a5d170686362091c7cf596bbc3adf5c09b \ No newline at end of file
diff --git a/distribution/licenses/log4j-LICENSE b/distribution/licenses/log4j-api-LICENSE.txt
index 6279e5206d..6279e5206d 100644
--- a/distribution/licenses/log4j-LICENSE
+++ b/distribution/licenses/log4j-api-LICENSE.txt
diff --git a/distribution/licenses/log4j-api-NOTICE.txt b/distribution/licenses/log4j-api-NOTICE.txt
new file mode 100644
index 0000000000..0375732360
--- /dev/null
+++ b/distribution/licenses/log4j-api-NOTICE.txt
@@ -0,0 +1,5 @@
+Apache log4j
+Copyright 2007 The Apache Software Foundation
+
+This product includes software developed at
+The Apache Software Foundation (http://www.apache.org/). \ No newline at end of file
diff --git a/distribution/licenses/log4j-core-2.6.2.jar.sha1 b/distribution/licenses/log4j-core-2.6.2.jar.sha1
new file mode 100644
index 0000000000..0ac4323411
--- /dev/null
+++ b/distribution/licenses/log4j-core-2.6.2.jar.sha1
@@ -0,0 +1 @@
+00a91369f655eb1639c6aece5c5eb5108db18306 \ No newline at end of file
diff --git a/distribution/licenses/log4j-core-LICENSE.txt b/distribution/licenses/log4j-core-LICENSE.txt
new file mode 100644
index 0000000000..6279e5206d
--- /dev/null
+++ b/distribution/licenses/log4j-core-LICENSE.txt
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright 1999-2005 The Apache Software Foundation
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/distribution/licenses/log4j-core-NOTICE.txt b/distribution/licenses/log4j-core-NOTICE.txt
new file mode 100644
index 0000000000..0375732360
--- /dev/null
+++ b/distribution/licenses/log4j-core-NOTICE.txt
@@ -0,0 +1,5 @@
+Apache log4j
+Copyright 2007 The Apache Software Foundation
+
+This product includes software developed at
+The Apache Software Foundation (http://www.apache.org/). \ No newline at end of file
diff --git a/distribution/src/main/resources/config/jvm.options b/distribution/src/main/resources/config/jvm.options
index 2feba02550..2216288ce4 100644
--- a/distribution/src/main/resources/config/jvm.options
+++ b/distribution/src/main/resources/config/jvm.options
@@ -62,6 +62,10 @@
# flag to explicitly tell Netty to not use unsafe
-Dio.netty.noUnsafe=true
+# log4j 2
+-Dlog4j.shutdownHookEnabled=false
+-Dlog4j2.disable.jmx=true
+
## heap dumps
# generate a heap dump when an allocation from the Java heap fails
diff --git a/distribution/src/main/resources/config/log4j2.properties b/distribution/src/main/resources/config/log4j2.properties
new file mode 100644
index 0000000000..06d8200b52
--- /dev/null
+++ b/distribution/src/main/resources/config/log4j2.properties
@@ -0,0 +1,73 @@
+status = error
+
+# log action execution errors for easier debugging
+logger.action.name = action
+logger.action.level = debug
+
+appender.console.type = Console
+appender.console.name = console
+appender.console.layout.type = PatternLayout
+appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %m%n
+
+appender.rolling.type = RollingFile
+appender.rolling.name = rolling
+appender.rolling.fileName = ${sys:es.logs}.log
+appender.rolling.layout.type = PatternLayout
+appender.rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %.10000m%n
+appender.rolling.filePattern = ${sys:es.logs}-%d{yyyy-MM-dd}.log
+appender.rolling.policies.type = Policies
+appender.rolling.policies.time.type = TimeBasedTriggeringPolicy
+appender.rolling.policies.time.interval = 1
+appender.rolling.policies.time.modulate = true
+
+rootLogger.level = info
+rootLogger.appenderRef.console.ref = console
+rootLogger.appenderRef.rolling.ref = rolling
+
+appender.deprecation_rolling.type = RollingFile
+appender.deprecation_rolling.name = deprecation_rolling
+appender.deprecation_rolling.fileName = ${sys:es.logs}_deprecation.log
+appender.deprecation_rolling.layout.type = PatternLayout
+appender.deprecation_rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %.10000m%n
+appender.deprecation_rolling.filePattern = ${sys:es.logs}_deprecation-%d{yyyy-MM-dd}.log
+appender.deprecation_rolling.policies.type = Policies
+appender.deprecation_rolling.policies.time.type = TimeBasedTriggeringPolicy
+appender.deprecation_rolling.policies.time.interval = 1
+appender.deprecation_rolling.policies.time.modulate = true
+
+logger.deprecation.name = deprecation
+logger.deprecation.level = warn
+logger.deprecation.appenderRef.deprecation_rolling.ref = deprecation_rolling
+logger.deprecation.additivity = false
+
+appender.index_search_slowlog_rolling.type = RollingFile
+appender.index_search_slowlog_rolling.name = index_search_slowlog_rolling
+appender.index_search_slowlog_rolling.fileName = ${sys:es.logs}_index_search_slowlog.log
+appender.index_search_slowlog_rolling.layout.type = PatternLayout
+appender.index_search_slowlog_rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %.10000m%n
+appender.index_search_slowlog_rolling.filePattern = ${sys:es.logs}_index_search_slowlog-%d{yyyy-MM-dd}.log
+appender.index_search_slowlog_rolling.policies.type = Policies
+appender.index_search_slowlog_rolling.policies.time.type = TimeBasedTriggeringPolicy
+appender.index_search_slowlog_rolling.policies.time.interval = 1
+appender.index_search_slowlog_rolling.policies.time.modulate = true
+
+logger.index_search_slowlog_rolling.name = index.search.slowlog
+logger.index_search_slowlog_rolling.level = trace
+logger.index_search_slowlog_rolling.appenderRef.index_search_slowlog_rolling.ref = index_search_slowlog_rolling
+logger.index_search_slowlog_rolling.additivity = false
+
+appender.index_indexing_slowlog_rolling.type = RollingFile
+appender.index_indexing_slowlog_rolling.name = index_indexing_slowlog_rolling
+appender.index_indexing_slowlog_rolling.fileName = ${sys:es.logs}_index_indexing_slowlog.log
+appender.index_indexing_slowlog_rolling.layout.type = PatternLayout
+appender.index_indexing_slowlog_rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %.10000m%n
+appender.index_indexing_slowlog_rolling.filePattern = ${sys:es.logs}_index_indexing_slowlog-%d{yyyy-MM-dd}.log
+appender.index_indexing_slowlog_rolling.policies.type = Policies
+appender.index_indexing_slowlog_rolling.policies.time.type = TimeBasedTriggeringPolicy
+appender.index_indexing_slowlog_rolling.policies.time.interval = 1
+appender.index_indexing_slowlog_rolling.policies.time.modulate = true
+
+logger.index_indexing_slowlog.name = index.indexing.slowlog.index
+logger.index_indexing_slowlog.level = trace
+logger.index_indexing_slowlog.appenderRef.index_indexing_slowlog_rolling.ref = index_indexing_slowlog_rolling
+logger.index_indexing_slowlog.additivity = false
diff --git a/distribution/src/main/resources/config/logging.yml b/distribution/src/main/resources/config/logging.yml
deleted file mode 100644
index 11cd181ebd..0000000000
--- a/distribution/src/main/resources/config/logging.yml
+++ /dev/null
@@ -1,86 +0,0 @@
-# you can override using a command-line parameter
-# -E logger.level=(ERROR|WARN|INFO|DEBUG|TRACE)
-logger.level: INFO
-rootLogger: ${logger.level}, console, file
-logger:
- # log action execution errors for easier debugging
- action: DEBUG
-
- # deprecation logging, turn to DEBUG to see them
- deprecation: INFO, deprecation_log_file
-
- # reduce the logging for aws, too much is logged under the default INFO
- com.amazonaws: WARN
- # aws will try to do some sketchy JMX stuff, but its not needed.
- com.amazonaws.jmx.SdkMBeanRegistrySupport: ERROR
- com.amazonaws.metrics.AwsSdkMetrics: ERROR
-
- org.apache.http: INFO
-
- # gateway
- #gateway: DEBUG
- #index.gateway: DEBUG
-
- # peer shard recovery
- #indices.recovery: DEBUG
-
- # discovery
- #discovery: TRACE
-
- index.search.slowlog: TRACE, index_search_slow_log_file
- index.indexing.slowlog: TRACE, index_indexing_slow_log_file
-
-additivity:
- index.search.slowlog: false
- index.indexing.slowlog: false
- deprecation: false
-
-appender:
- console:
- type: console
- layout:
- type: consolePattern
- conversionPattern: "[%d{ISO8601}][%-5p][%-25c] %m%n"
-
- file:
- type: dailyRollingFile
- file: ${path.logs}/${cluster.name}.log
- datePattern: "'.'yyyy-MM-dd"
- layout:
- type: pattern
- conversionPattern: "[%d{ISO8601}][%-5p][%-25c] %.10000m%n"
-
- # Use the following log4j-extras RollingFileAppender to enable gzip compression of log files.
- # For more information see https://logging.apache.org/log4j/extras/apidocs/org/apache/log4j/rolling/RollingFileAppender.html
- #file:
- #type: extrasRollingFile
- #file: ${path.logs}/${cluster.name}.log
- #rollingPolicy: timeBased
- #rollingPolicy.FileNamePattern: ${path.logs}/${cluster.name}.log.%d{yyyy-MM-dd}.gz
- #layout:
- #type: pattern
- #conversionPattern: "[%d{ISO8601}][%-5p][%-25c] %m%n"
-
- deprecation_log_file:
- type: dailyRollingFile
- file: ${path.logs}/${cluster.name}_deprecation.log
- datePattern: "'.'yyyy-MM-dd"
- layout:
- type: pattern
- conversionPattern: "[%d{ISO8601}][%-5p][%-25c] %m%n"
-
- index_search_slow_log_file:
- type: dailyRollingFile
- file: ${path.logs}/${cluster.name}_index_search_slowlog.log
- datePattern: "'.'yyyy-MM-dd"
- layout:
- type: pattern
- conversionPattern: "[%d{ISO8601}][%-5p][%-25c] %m%n"
-
- index_indexing_slow_log_file:
- type: dailyRollingFile
- file: ${path.logs}/${cluster.name}_index_indexing_slowlog.log
- datePattern: "'.'yyyy-MM-dd"
- layout:
- type: pattern
- conversionPattern: "[%d{ISO8601}][%-5p][%-25c] %m%n"
diff --git a/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java b/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java
index 6216ec2354..43f39b477e 100644
--- a/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java
+++ b/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java
@@ -23,10 +23,14 @@ import groovy.lang.Binding;
import groovy.lang.GroovyClassLoader;
import groovy.lang.GroovyCodeSource;
import groovy.lang.Script;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.message.ParameterizedMessage;
+import org.apache.logging.log4j.util.Supplier;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.Scorer;
import org.codehaus.groovy.ast.ClassCodeExpressionTransformer;
import org.codehaus.groovy.ast.ClassNode;
+import org.codehaus.groovy.ast.Parameter;
import org.codehaus.groovy.ast.expr.ConstantExpression;
import org.codehaus.groovy.ast.expr.Expression;
import org.codehaus.groovy.classgen.GeneratorContext;
@@ -43,7 +47,6 @@ import org.elasticsearch.bootstrap.BootstrapInfo;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.hash.MessageDigests;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.script.ClassPermission;
import org.elasticsearch.script.CompiledScript;
@@ -248,14 +251,14 @@ public class GroovyScriptEngineService extends AbstractComponent implements Scri
private final Script script;
private final LeafSearchLookup lookup;
private final Map<String, Object> variables;
- private final ESLogger logger;
+ private final Logger logger;
- public GroovyScript(CompiledScript compiledScript, Script script, ESLogger logger) {
+ public GroovyScript(CompiledScript compiledScript, Script script, Logger logger) {
this(compiledScript, script, null, logger);
}
@SuppressWarnings("unchecked")
- public GroovyScript(CompiledScript compiledScript, Script script, @Nullable LeafSearchLookup lookup, ESLogger logger) {
+ public GroovyScript(CompiledScript compiledScript, Script script, @Nullable LeafSearchLookup lookup, Logger logger) {
this.compiledScript = compiledScript;
this.script = script;
this.lookup = lookup;
@@ -299,7 +302,7 @@ public class GroovyScriptEngineService extends AbstractComponent implements Scri
// resulting in the uncaughtExceptionHandler handling it.
final StackTraceElement[] elements = ae.getStackTrace();
if (elements.length > 0 && "org.codehaus.groovy.runtime.InvokerHelper".equals(elements[0].getClassName())) {
- logger.trace("failed to run {}", ae, compiledScript);
+ logger.trace((Supplier<?>) () -> new ParameterizedMessage("failed to run {}", compiledScript), ae);
throw new ScriptException("Error evaluating " + compiledScript.name(),
ae, emptyList(), "", compiledScript.lang());
}
diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngineService.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngineService.java
index 66ecf23fa0..c308496958 100644
--- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngineService.java
+++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngineService.java
@@ -20,6 +20,7 @@ package org.elasticsearch.script.mustache;
import com.github.mustachejava.Mustache;
import com.github.mustachejava.MustacheFactory;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.SpecialPermission;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.component.AbstractComponent;
@@ -165,7 +166,7 @@ public final class MustacheScriptEngineService extends AbstractComponent impleme
return null;
});
} catch (Exception e) {
- logger.error("Error running {}", e, template);
+ logger.error(new ParameterizedMessage("Error running {}", template), e);
throw new GeneralScriptException("Error running " + template, e);
}
return result.bytes();
diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java
index 8154c63290..c30a988a4f 100644
--- a/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java
+++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java
@@ -48,6 +48,7 @@ import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
+import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;
diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java
index 0178d2e1fb..32824e969d 100644
--- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java
+++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java
@@ -19,6 +19,7 @@
package org.elasticsearch.index.reindex;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.DocWriteResponse;
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
@@ -31,7 +32,6 @@ import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.bulk.Retry;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.client.ParentTaskAssigningClient;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
@@ -64,7 +64,7 @@ import static org.elasticsearch.search.sort.SortBuilders.fieldSort;
* their tests can use them. Most methods run in the listener thread pool because the are meant to be fast and don't expect to block.
*/
public abstract class AbstractAsyncBulkByScrollAction<Request extends AbstractBulkByScrollRequest<Request>> {
- protected final ESLogger logger;
+ protected final Logger logger;
protected final BulkByScrollTask task;
protected final ThreadPool threadPool;
/**
@@ -81,7 +81,7 @@ public abstract class AbstractAsyncBulkByScrollAction<Request extends AbstractBu
private final Retry bulkRetry;
private final ScrollableHitSource scrollSource;
- public AbstractAsyncBulkByScrollAction(BulkByScrollTask task, ESLogger logger, ParentTaskAssigningClient client,
+ public AbstractAsyncBulkByScrollAction(BulkByScrollTask task, Logger logger, ParentTaskAssigningClient client,
ThreadPool threadPool, Request mainRequest, ActionListener<BulkIndexByScrollResponse> listener) {
this.task = task;
this.logger = logger;
diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexByScrollAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexByScrollAction.java
index ed5211da14..f441e52722 100644
--- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexByScrollAction.java
+++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexByScrollAction.java
@@ -19,6 +19,7 @@
package org.elasticsearch.index.reindex;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.bulk.BulkRequest;
@@ -26,7 +27,6 @@ import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.client.ParentTaskAssigningClient;
import org.elasticsearch.cluster.ClusterState;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.mapper.IdFieldMapper;
@@ -71,7 +71,7 @@ public abstract class AbstractAsyncBulkIndexByScrollAction<Request extends Abstr
*/
private final BiFunction<RequestWrapper<?>, ScrollableHitSource.Hit, RequestWrapper<?>> scriptApplier;
- public AbstractAsyncBulkIndexByScrollAction(BulkByScrollTask task, ESLogger logger, ParentTaskAssigningClient client,
+ public AbstractAsyncBulkIndexByScrollAction(BulkByScrollTask task, Logger logger, ParentTaskAssigningClient client,
ThreadPool threadPool, Request mainRequest,
ActionListener<BulkIndexByScrollResponse> listener,
ScriptService scriptService, ClusterState clusterState) {
diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/BulkByScrollTask.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/BulkByScrollTask.java
index a547c5303b..ade1f8c2f8 100644
--- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/BulkByScrollTask.java
+++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/BulkByScrollTask.java
@@ -19,10 +19,10 @@
package org.elasticsearch.index.reindex;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
@@ -49,7 +49,7 @@ import static org.elasticsearch.common.unit.TimeValue.timeValueNanos;
* Task storing information about a currently running BulkByScroll request.
*/
public class BulkByScrollTask extends CancellableTask {
- private static final ESLogger logger = ESLoggerFactory.getLogger(BulkByScrollTask.class.getPackage().getName());
+ private static final Logger logger = ESLoggerFactory.getLogger(BulkByScrollTask.class.getPackage().getName());
/**
* The total number of documents this request will process. 0 means we don't yet know or, possibly, there are actually 0 documents
diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java
index 030753e941..a5a0f867d9 100644
--- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java
+++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java
@@ -19,6 +19,8 @@
package org.elasticsearch.index.reindex;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.bulk.BackoffPolicy;
@@ -31,7 +33,6 @@ import org.elasticsearch.action.search.ShardSearchFailure;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.ParentTaskAssigningClient;
import org.elasticsearch.common.bytes.BytesReference;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
@@ -60,7 +61,7 @@ public class ClientScrollableHitSource extends ScrollableHitSource {
private final ParentTaskAssigningClient client;
private final SearchRequest firstSearchRequest;
- public ClientScrollableHitSource(ESLogger logger, BackoffPolicy backoffPolicy, ThreadPool threadPool, Runnable countSearchRetry,
+ public ClientScrollableHitSource(Logger logger, BackoffPolicy backoffPolicy, ThreadPool threadPool, Runnable countSearchRetry,
Consumer<Exception> fail, ParentTaskAssigningClient client, SearchRequest firstSearchRequest) {
super(logger, backoffPolicy, threadPool, countSearchRetry, fail);
this.client = client;
@@ -105,7 +106,7 @@ public class ClientScrollableHitSource extends ScrollableHitSource {
@Override
public void onFailure(Exception e) {
- logger.warn("Failed to clear scroll [{}]", e, scrollId);
+ logger.warn(new ParameterizedMessage("Failed to clear scroll [{}]", scrollId), e);
}
});
}
@@ -144,11 +145,12 @@ public class ClientScrollableHitSource extends ScrollableHitSource {
if (retries.hasNext()) {
retryCount += 1;
TimeValue delay = retries.next();
- logger.trace("retrying rejected search after [{}]", e, delay);
+ logger.trace(new ParameterizedMessage("retrying rejected search after [{}]", delay), e);
countSearchRetry.run();
threadPool.schedule(delay, ThreadPool.Names.SAME, this);
} else {
- logger.warn("giving up on search because we retried [{}] times without success", e, retryCount);
+ logger.warn(
+ new ParameterizedMessage("giving up on search because we retried [{}] times without success", retryCount), e);
fail.accept(e);
}
} else {
@@ -242,7 +244,7 @@ public class ClientScrollableHitSource extends ScrollableHitSource {
public Long getTTL() {
return fieldValue(TTLFieldMapper.NAME);
}
-
+
private <T> T fieldValue(String fieldName) {
SearchHitField field = delegate.field(fieldName);
return field == null ? null : field.value();
diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ScrollableHitSource.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ScrollableHitSource.java
index da601fca08..0b4b66222b 100644
--- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ScrollableHitSource.java
+++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ScrollableHitSource.java
@@ -19,6 +19,7 @@
package org.elasticsearch.index.reindex;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.bulk.BackoffPolicy;
import org.elasticsearch.action.search.ShardSearchFailure;
@@ -28,7 +29,6 @@ import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
@@ -50,13 +50,13 @@ import static java.util.Objects.requireNonNull;
public abstract class ScrollableHitSource implements Closeable {
private final AtomicReference<String> scrollId = new AtomicReference<>();
- protected final ESLogger logger;
+ protected final Logger logger;
protected final BackoffPolicy backoffPolicy;
protected final ThreadPool threadPool;
protected final Runnable countSearchRetry;
protected final Consumer<Exception> fail;
- public ScrollableHitSource(ESLogger logger, BackoffPolicy backoffPolicy, ThreadPool threadPool, Runnable countSearchRetry,
+ public ScrollableHitSource(Logger logger, BackoffPolicy backoffPolicy, ThreadPool threadPool, Runnable countSearchRetry,
Consumer<Exception> fail) {
this.logger = logger;
this.backoffPolicy = backoffPolicy;
diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java
index df07bd3485..99362e75f9 100644
--- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java
+++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java
@@ -19,6 +19,7 @@
package org.elasticsearch.index.reindex;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.action.support.ActionFilters;
@@ -29,7 +30,6 @@ import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.tasks.Task;
@@ -68,7 +68,7 @@ public class TransportDeleteByQueryAction extends HandledTransportAction<DeleteB
*/
static class AsyncDeleteBySearchAction extends AbstractAsyncBulkIndexByScrollAction<DeleteByQueryRequest> {
- public AsyncDeleteBySearchAction(BulkByScrollTask task, ESLogger logger, ParentTaskAssigningClient client, ThreadPool threadPool,
+ public AsyncDeleteBySearchAction(BulkByScrollTask task, Logger logger, ParentTaskAssigningClient client, ThreadPool threadPool,
DeleteByQueryRequest request, ActionListener<BulkIndexByScrollResponse> listener,
ScriptService scriptService, ClusterState clusterState) {
super(task, logger, client, threadPool, request, listener, scriptService, clusterState);
diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java
index 57d29283bb..33aca02835 100644
--- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java
+++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java
@@ -27,6 +27,7 @@ import org.apache.http.client.CredentialsProvider;
import org.apache.http.impl.client.BasicCredentialsProvider;
import org.apache.http.impl.nio.reactor.IOReactorConfig;
import org.apache.http.message.BasicHeader;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.bulk.BackoffPolicy;
@@ -44,7 +45,6 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.inject.Inject;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.lucene.uid.Versions;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
@@ -226,7 +226,7 @@ public class TransportReindexAction extends HandledTransportAction<ReindexReques
*/
private List<Thread> createdThreads = emptyList();
- public AsyncIndexBySearchAction(BulkByScrollTask task, ESLogger logger, ParentTaskAssigningClient client, ThreadPool threadPool,
+ public AsyncIndexBySearchAction(BulkByScrollTask task, Logger logger, ParentTaskAssigningClient client, ThreadPool threadPool,
ReindexRequest request, ActionListener<BulkIndexByScrollResponse> listener,
ScriptService scriptService, ClusterState clusterState) {
super(task, logger, client, threadPool, request, listener, scriptService, clusterState);
diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java
index 79c013482e..0f4bf5695d 100644
--- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java
+++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java
@@ -19,6 +19,7 @@
package org.elasticsearch.index.reindex;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.support.ActionFilters;
@@ -29,7 +30,6 @@ import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.mapper.IdFieldMapper;
@@ -81,7 +81,7 @@ public class TransportUpdateByQueryAction extends HandledTransportAction<UpdateB
*/
static class AsyncIndexBySearchAction extends AbstractAsyncBulkIndexByScrollAction<UpdateByQueryRequest> {
- public AsyncIndexBySearchAction(BulkByScrollTask task, ESLogger logger, ParentTaskAssigningClient client, ThreadPool threadPool,
+ public AsyncIndexBySearchAction(BulkByScrollTask task, Logger logger, ParentTaskAssigningClient client, ThreadPool threadPool,
UpdateByQueryRequest request, ActionListener<BulkIndexByScrollResponse> listener,
ScriptService scriptService, ClusterState clusterState) {
super(task, logger, client, threadPool, request, listener, scriptService, clusterState);
diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSource.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSource.java
index 572913493c..96ecd38cc1 100644
--- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSource.java
+++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSource.java
@@ -21,6 +21,8 @@ package org.elasticsearch.index.reindex.remote;
import org.apache.http.HttpEntity;
import org.apache.http.util.EntityUtils;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.Version;
@@ -34,7 +36,6 @@ import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParseFieldMatcherSupplier;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
import org.elasticsearch.common.util.concurrent.ThreadContext;
@@ -70,7 +71,7 @@ public class RemoteScrollableHitSource extends ScrollableHitSource {
private final SearchRequest searchRequest;
Version remoteVersion;
- public RemoteScrollableHitSource(ESLogger logger, BackoffPolicy backoffPolicy, ThreadPool threadPool, Runnable countSearchRetry,
+ public RemoteScrollableHitSource(Logger logger, BackoffPolicy backoffPolicy, ThreadPool threadPool, Runnable countSearchRetry,
Consumer<Exception> fail, RestClient client, BytesReference query, SearchRequest searchRequest) {
super(logger, backoffPolicy, threadPool, countSearchRetry, fail);
this.query = query;
@@ -126,7 +127,7 @@ public class RemoteScrollableHitSource extends ScrollableHitSource {
@Override
public void onFailure(Exception t) {
- logger.warn("Failed to clear scroll [{}]", t, scrollId);
+ logger.warn(new ParameterizedMessage("Failed to clear scroll [{}]", scrollId), t);
}
});
}
@@ -173,7 +174,7 @@ public class RemoteScrollableHitSource extends ScrollableHitSource {
if (RestStatus.TOO_MANY_REQUESTS.getStatus() == re.getResponse().getStatusLine().getStatusCode()) {
if (retries.hasNext()) {
TimeValue delay = retries.next();
- logger.trace("retrying rejected search after [{}]", e, delay);
+ logger.trace(new ParameterizedMessage("retrying rejected search after [{}]", delay), e);
countSearchRetry.run();
threadPool.schedule(delay, ThreadPool.Names.SAME, RetryHelper.this);
return;
diff --git a/modules/transport-netty3/build.gradle b/modules/transport-netty3/build.gradle
index e13170e5c2..eae0608f92 100644
--- a/modules/transport-netty3/build.gradle
+++ b/modules/transport-netty3/build.gradle
@@ -123,5 +123,5 @@ thirdPartyAudit.excludes = [
// from org.jboss.netty.util.internal.logging.InternalLoggerFactory (netty) - it's optional
'org.slf4j.Logger',
- 'org.slf4j.LoggerFactory',
+ 'org.slf4j.LoggerFactory'
]
diff --git a/modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/Netty3HttpServerTransport.java b/modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/Netty3HttpServerTransport.java
index d12636de5c..298751f97d 100644
--- a/modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/Netty3HttpServerTransport.java
+++ b/modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/Netty3HttpServerTransport.java
@@ -21,6 +21,7 @@ package org.elasticsearch.http.netty3;
import com.carrotsearch.hppc.IntHashSet;
import com.carrotsearch.hppc.IntSet;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.inject.Inject;
@@ -495,10 +496,18 @@ public class Netty3HttpServerTransport extends AbstractLifecycleComponent implem
return;
}
if (!NetworkExceptionHelper.isCloseConnectionException(e.getCause())) {
- logger.warn("Caught exception while handling client http traffic, closing connection {}", e.getCause(), ctx.getChannel());
+ logger.warn(
+ new ParameterizedMessage(
+ "Caught exception while handling client http traffic, closing connection {}",
+ ctx.getChannel()),
+ e.getCause());
ctx.getChannel().close();
} else {
- logger.debug("Caught exception while handling client http traffic, closing connection {}", e.getCause(), ctx.getChannel());
+ logger.debug(
+ new ParameterizedMessage(
+ "Caught exception while handling client http traffic, closing connection {}",
+ ctx.getChannel()),
+ e.getCause());
ctx.getChannel().close();
}
}
diff --git a/modules/transport-netty3/src/main/java/org/elasticsearch/transport/netty3/Netty3InternalESLogger.java b/modules/transport-netty3/src/main/java/org/elasticsearch/transport/netty3/Netty3InternalESLogger.java
index 6ff941c48e..03c9671ad7 100644
--- a/modules/transport-netty3/src/main/java/org/elasticsearch/transport/netty3/Netty3InternalESLogger.java
+++ b/modules/transport-netty3/src/main/java/org/elasticsearch/transport/netty3/Netty3InternalESLogger.java
@@ -19,8 +19,8 @@
package org.elasticsearch.transport.netty3;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.SuppressLoggerChecks;
-import org.elasticsearch.common.logging.ESLogger;
import org.jboss.netty.logging.AbstractInternalLogger;
/**
@@ -29,9 +29,9 @@ import org.jboss.netty.logging.AbstractInternalLogger;
@SuppressLoggerChecks(reason = "safely delegates to logger")
final class Netty3InternalESLogger extends AbstractInternalLogger {
- private final ESLogger logger;
+ private final Logger logger;
- Netty3InternalESLogger(ESLogger logger) {
+ Netty3InternalESLogger(Logger logger) {
this.logger = logger;
}
diff --git a/modules/transport-netty3/src/main/java/org/elasticsearch/transport/netty3/Netty3OpenChannelsHandler.java b/modules/transport-netty3/src/main/java/org/elasticsearch/transport/netty3/Netty3OpenChannelsHandler.java
index 6a7732723c..bbfb775d0e 100644
--- a/modules/transport-netty3/src/main/java/org/elasticsearch/transport/netty3/Netty3OpenChannelsHandler.java
+++ b/modules/transport-netty3/src/main/java/org/elasticsearch/transport/netty3/Netty3OpenChannelsHandler.java
@@ -19,8 +19,8 @@
package org.elasticsearch.transport.netty3;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.lease.Releasable;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.metrics.CounterMetric;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.jboss.netty.channel.Channel;
@@ -42,9 +42,9 @@ public class Netty3OpenChannelsHandler implements ChannelUpstreamHandler, Releas
final CounterMetric openChannelsMetric = new CounterMetric();
final CounterMetric totalChannelsMetric = new CounterMetric();
- final ESLogger logger;
+ final Logger logger;
- public Netty3OpenChannelsHandler(ESLogger logger) {
+ public Netty3OpenChannelsHandler(Logger logger) {
this.logger = logger;
}
diff --git a/modules/transport-netty3/src/main/java/org/elasticsearch/transport/netty3/Netty3Transport.java b/modules/transport-netty3/src/main/java/org/elasticsearch/transport/netty3/Netty3Transport.java
index 71a9ca25d4..a967f83afb 100644
--- a/modules/transport-netty3/src/main/java/org/elasticsearch/transport/netty3/Netty3Transport.java
+++ b/modules/transport-netty3/src/main/java/org/elasticsearch/transport/netty3/Netty3Transport.java
@@ -19,13 +19,13 @@
package org.elasticsearch.transport.netty3;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.Booleans;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
-import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.network.NetworkService;
import org.elasticsearch.common.network.NetworkService.TcpSettings;
@@ -46,7 +46,6 @@ import org.elasticsearch.transport.TransportServiceAdapter;
import org.elasticsearch.transport.TransportSettings;
import org.jboss.netty.bootstrap.ClientBootstrap;
import org.jboss.netty.bootstrap.ServerBootstrap;
-import org.jboss.netty.buffer.ChannelBuffer;
import org.jboss.netty.channel.AdaptiveReceiveBufferSizePredictorFactory;
import org.jboss.netty.channel.Channel;
import org.jboss.netty.channel.ChannelFuture;
@@ -554,7 +553,7 @@ public class Netty3Transport extends TcpTransport<Channel> {
try {
serverBootstrap.releaseExternalResources();
} catch (Exception e) {
- logger.debug("Error closing serverBootstrap for profile [{}]", e, name);
+ logger.debug(new ParameterizedMessage("Error closing serverBootstrap for profile [{}]", name), e);
}
}
serverBootstraps.clear();
diff --git a/modules/transport-netty4/build.gradle b/modules/transport-netty4/build.gradle
index 5012b3a163..4fc38bc394 100644
--- a/modules/transport-netty4/build.gradle
+++ b/modules/transport-netty4/build.gradle
@@ -125,8 +125,6 @@ thirdPartyAudit.excludes = [
'net.jpountz.lz4.LZ4FastDecompressor',
'net.jpountz.xxhash.StreamingXXHash32',
'net.jpountz.xxhash.XXHashFactory',
- 'org.apache.logging.log4j.LogManager',
- 'org.apache.logging.log4j.Logger',
'org.apache.tomcat.Apr',
'org.apache.tomcat.jni.CertificateRequestedCallback',
'org.apache.tomcat.jni.CertificateRequestedCallback$KeyMaterial',
diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java
index 1a6e8ae60b..b76abbd1db 100644
--- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java
+++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java
@@ -43,6 +43,7 @@ import io.netty.handler.codec.http.HttpObjectAggregator;
import io.netty.handler.codec.http.HttpRequestDecoder;
import io.netty.handler.codec.http.HttpResponseEncoder;
import io.netty.handler.timeout.ReadTimeoutException;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.inject.Inject;
@@ -512,10 +513,14 @@ public class Netty4HttpServerTransport extends AbstractLifecycleComponent implem
return;
}
if (!NetworkExceptionHelper.isCloseConnectionException(cause)) {
- logger.warn("caught exception while handling client http traffic, closing connection {}", cause, ctx.channel());
+ logger.warn(
+ new ParameterizedMessage("caught exception while handling client http traffic, closing connection {}", ctx.channel()),
+ cause);
ctx.channel().close();
} else {
- logger.debug("caught exception while handling client http traffic, closing connection {}", cause, ctx.channel());
+ logger.debug(
+ new ParameterizedMessage("caught exception while handling client http traffic, closing connection {}", ctx.channel()),
+ cause);
ctx.channel().close();
}
}
diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4InternalESLogger.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4InternalESLogger.java
index 6155529401..aaa277e34b 100644
--- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4InternalESLogger.java
+++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4InternalESLogger.java
@@ -20,14 +20,14 @@
package org.elasticsearch.transport.netty4;
import io.netty.util.internal.logging.AbstractInternalLogger;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.SuppressLoggerChecks;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
@SuppressLoggerChecks(reason = "safely delegates to logger")
class Netty4InternalESLogger extends AbstractInternalLogger {
- private final ESLogger logger;
+ private final Logger logger;
Netty4InternalESLogger(final String name) {
super(name);
diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4OpenChannelsHandler.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4OpenChannelsHandler.java
index 0562a0d466..2270c90967 100644
--- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4OpenChannelsHandler.java
+++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4OpenChannelsHandler.java
@@ -25,16 +25,14 @@ import io.netty.channel.ChannelFutureListener;
import io.netty.channel.ChannelHandler;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelInboundHandlerAdapter;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.lease.Releasable;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.metrics.CounterMetric;
import java.io.IOException;
import java.util.Collections;
-import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
-import java.util.stream.Collectors;
@ChannelHandler.Sharable
public class Netty4OpenChannelsHandler extends ChannelInboundHandlerAdapter implements Releasable {
@@ -43,9 +41,9 @@ public class Netty4OpenChannelsHandler extends ChannelInboundHandlerAdapter impl
final CounterMetric openChannelsMetric = new CounterMetric();
final CounterMetric totalChannelsMetric = new CounterMetric();
- final ESLogger logger;
+ final Logger logger;
- public Netty4OpenChannelsHandler(ESLogger logger) {
+ public Netty4OpenChannelsHandler(Logger logger) {
this.logger = logger;
}
diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java
index 9de5a31dbe..05f1efbbe0 100644
--- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java
+++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java
@@ -33,12 +33,12 @@ import io.netty.channel.FixedRecvByteBufAllocator;
import io.netty.channel.RecvByteBufAllocator;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.oio.OioEventLoopGroup;
-import io.netty.channel.socket.SocketChannel;
import io.netty.channel.socket.nio.NioServerSocketChannel;
import io.netty.channel.socket.nio.NioSocketChannel;
import io.netty.channel.socket.oio.OioServerSocketChannel;
import io.netty.channel.socket.oio.OioSocketChannel;
import io.netty.util.concurrent.Future;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.cluster.node.DiscoveryNode;
@@ -495,7 +495,8 @@ public class Netty4Transport extends TcpTransport<Channel> {
for (final Tuple<String, Future<?>> future : serverBootstrapCloseFutures) {
future.v2().awaitUninterruptibly();
if (!future.v2().isSuccess()) {
- logger.debug("Error closing server bootstrap for profile [{}]", future.v2().cause(), future.v1());
+ logger.debug(
+ new ParameterizedMessage("Error closing server bootstrap for profile [{}]", future.v1()), future.v2().cause());
}
}
serverBootstraps.clear();
diff --git a/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/cloud/azure/classic/AzureDiscoveryModule.java b/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/cloud/azure/classic/AzureDiscoveryModule.java
index da684fd824..7ee62dd877 100644
--- a/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/cloud/azure/classic/AzureDiscoveryModule.java
+++ b/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/cloud/azure/classic/AzureDiscoveryModule.java
@@ -19,13 +19,13 @@
package org.elasticsearch.cloud.azure.classic;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.cloud.azure.classic.management.AzureComputeService;
import org.elasticsearch.cloud.azure.classic.management.AzureComputeServiceImpl;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.inject.Inject;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
@@ -43,7 +43,7 @@ import org.elasticsearch.plugin.discovery.azure.classic.AzureDiscoveryPlugin;
* @see AzureComputeServiceImpl
*/
public class AzureDiscoveryModule extends AbstractModule {
- protected final ESLogger logger;
+ protected final Logger logger;
private Settings settings;
// pkg private so it is settable by tests
@@ -69,7 +69,7 @@ public class AzureDiscoveryModule extends AbstractModule {
* Check if discovery is meant to start
* @return true if we can start discovery features
*/
- public static boolean isDiscoveryReady(Settings settings, ESLogger logger) {
+ public static boolean isDiscoveryReady(Settings settings, Logger logger) {
// User set discovery.type: azure
if (!AzureDiscoveryPlugin.AZURE.equalsIgnoreCase(DiscoveryModule.DISCOVERY_TYPE_SETTING.get(settings))) {
logger.trace("discovery.type not set to {}", AzureDiscoveryPlugin.AZURE);
diff --git a/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/plugin/discovery/azure/classic/AzureDiscoveryPlugin.java b/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/plugin/discovery/azure/classic/AzureDiscoveryPlugin.java
index 4c0ac17331..db5c1cc5c4 100644
--- a/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/plugin/discovery/azure/classic/AzureDiscoveryPlugin.java
+++ b/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/plugin/discovery/azure/classic/AzureDiscoveryPlugin.java
@@ -19,11 +19,11 @@
package org.elasticsearch.plugin.discovery.azure.classic;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.cloud.azure.classic.AzureDiscoveryModule;
import org.elasticsearch.cloud.azure.classic.management.AzureComputeService;
import org.elasticsearch.common.inject.Module;
import org.elasticsearch.common.logging.DeprecationLogger;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
@@ -41,7 +41,7 @@ public class AzureDiscoveryPlugin extends Plugin {
public static final String AZURE = "azure";
private final Settings settings;
- protected final ESLogger logger = Loggers.getLogger(AzureDiscoveryPlugin.class);
+ protected final Logger logger = Loggers.getLogger(AzureDiscoveryPlugin.class);
public AzureDiscoveryPlugin(Settings settings) {
this.settings = settings;
diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java
index e35b082899..1a4bf278f3 100644
--- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java
+++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java
@@ -30,14 +30,12 @@ import com.amazonaws.internal.StaticCredentialsProvider;
import com.amazonaws.retry.RetryPolicy;
import com.amazonaws.services.ec2.AmazonEC2;
import com.amazonaws.services.ec2.AmazonEC2Client;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchException;
-import org.elasticsearch.cloud.aws.network.Ec2NameResolver;
import org.elasticsearch.common.Randomness;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.inject.Inject;
-import org.elasticsearch.common.logging.ESLogger;
-import org.elasticsearch.common.network.NetworkService;
import org.elasticsearch.common.settings.Settings;
import java.util.Random;
@@ -71,7 +69,7 @@ public class AwsEc2ServiceImpl extends AbstractLifecycleComponent implements Aws
return this.client;
}
- protected static AWSCredentialsProvider buildCredentials(ESLogger logger, Settings settings) {
+ protected static AWSCredentialsProvider buildCredentials(Logger logger, Settings settings) {
AWSCredentialsProvider credentials;
String key = CLOUD_EC2.KEY_SETTING.get(settings);
@@ -87,7 +85,7 @@ public class AwsEc2ServiceImpl extends AbstractLifecycleComponent implements Aws
return credentials;
}
- protected static ClientConfiguration buildConfiguration(ESLogger logger, Settings settings) {
+ protected static ClientConfiguration buildConfiguration(Logger logger, Settings settings) {
ClientConfiguration clientConfiguration = new ClientConfiguration();
// the response metadata cache is only there for diagnostics purposes,
// but can force objects from every response to the old generation.
@@ -135,7 +133,7 @@ public class AwsEc2ServiceImpl extends AbstractLifecycleComponent implements Aws
return clientConfiguration;
}
- protected static String findEndpoint(ESLogger logger, Settings settings) {
+ protected static String findEndpoint(Logger logger, Settings settings) {
String endpoint = null;
if (CLOUD_EC2.ENDPOINT_SETTING.exists(settings)) {
endpoint = CLOUD_EC2.ENDPOINT_SETTING.get(settings);
diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsSigner.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsSigner.java
index a76a2b04a9..11732725e9 100644
--- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsSigner.java
+++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsSigner.java
@@ -21,12 +21,12 @@ package org.elasticsearch.cloud.aws;
import com.amazonaws.ClientConfiguration;
import com.amazonaws.auth.SignerFactory;
-import org.elasticsearch.common.logging.ESLogger;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.logging.Loggers;
public class AwsSigner {
- private static final ESLogger logger = Loggers.getLogger(AwsSigner.class);
+ private static final Logger logger = Loggers.getLogger(AwsSigner.class);
private AwsSigner() {
diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java
index 1f3043fe6d..513a47f0b0 100644
--- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java
+++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java
@@ -27,6 +27,7 @@ import com.amazonaws.services.ec2.model.Filter;
import com.amazonaws.services.ec2.model.GroupIdentifier;
import com.amazonaws.services.ec2.model.Instance;
import com.amazonaws.services.ec2.model.Reservation;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.Version;
import org.elasticsearch.cloud.aws.AwsEc2Service;
import org.elasticsearch.cloud.aws.AwsEc2Service.DISCOVERY_EC2;
@@ -175,7 +176,7 @@ public class AwsEc2UnicastHostsProvider extends AbstractComponent implements Uni
emptyMap(), emptySet(), Version.CURRENT.minimumCompatibilityVersion()));
}
} catch (Exception e) {
- logger.warn("failed ot add {}, address {}", e, instance.getInstanceId(), address);
+ logger.warn(new ParameterizedMessage("failed ot add {}, address {}", instance.getInstanceId(), address), e);
}
} else {
logger.trace("not adding {}, address is null, host_type {}", instance.getInstanceId(), hostType);
diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/plugin/discovery/ec2/Ec2DiscoveryPlugin.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/plugin/discovery/ec2/Ec2DiscoveryPlugin.java
index 346372f554..7f8e983e52 100644
--- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/plugin/discovery/ec2/Ec2DiscoveryPlugin.java
+++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/plugin/discovery/ec2/Ec2DiscoveryPlugin.java
@@ -19,21 +19,7 @@
package org.elasticsearch.plugin.discovery.ec2;
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.UncheckedIOException;
-import java.net.URL;
-import java.net.URLConnection;
-import java.nio.charset.StandardCharsets;
-import java.security.AccessController;
-import java.security.PrivilegedAction;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.List;
-
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.SpecialPermission;
import org.elasticsearch.cloud.aws.AwsEc2Service;
import org.elasticsearch.cloud.aws.AwsEc2ServiceImpl;
@@ -41,9 +27,7 @@ import org.elasticsearch.cloud.aws.Ec2Module;
import org.elasticsearch.cloud.aws.network.Ec2NameResolver;
import org.elasticsearch.common.component.LifecycleComponent;
import org.elasticsearch.common.inject.Module;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
-import org.elasticsearch.common.network.NetworkModule;
import org.elasticsearch.common.network.NetworkService;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
@@ -54,12 +38,27 @@ import org.elasticsearch.node.Node;
import org.elasticsearch.plugins.DiscoveryPlugin;
import org.elasticsearch.plugins.Plugin;
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.UncheckedIOException;
+import java.net.URL;
+import java.net.URLConnection;
+import java.nio.charset.StandardCharsets;
+import java.security.AccessController;
+import java.security.PrivilegedAction;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.List;
+
/**
*
*/
public class Ec2DiscoveryPlugin extends Plugin implements DiscoveryPlugin {
- private static ESLogger logger = Loggers.getLogger(Ec2DiscoveryPlugin.class);
+ private static Logger logger = Loggers.getLogger(Ec2DiscoveryPlugin.class);
public static final String EC2 = "ec2";
diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AmazonEC2Mock.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AmazonEC2Mock.java
index 49f4f88541..050a25bb18 100644
--- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AmazonEC2Mock.java
+++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AmazonEC2Mock.java
@@ -373,7 +373,7 @@ import com.amazonaws.services.ec2.model.TerminateInstancesResult;
import com.amazonaws.services.ec2.model.UnassignPrivateIpAddressesRequest;
import com.amazonaws.services.ec2.model.UnmonitorInstancesRequest;
import com.amazonaws.services.ec2.model.UnmonitorInstancesResult;
-import org.elasticsearch.common.logging.ESLogger;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.logging.ESLoggerFactory;
import java.util.ArrayList;
@@ -386,7 +386,7 @@ import java.util.regex.Pattern;
public class AmazonEC2Mock implements AmazonEC2 {
- private static final ESLogger logger = ESLoggerFactory.getLogger(AmazonEC2Mock.class.getName());
+ private static final Logger logger = ESLoggerFactory.getLogger(AmazonEC2Mock.class.getName());
public static final String PREFIX_PRIVATE_IP = "10.0.0.";
public static final String PREFIX_PUBLIC_IP = "8.8.8.";
diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceInstancesServiceImpl.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceInstancesServiceImpl.java
index 5ec4b18e91..8d05871c44 100644
--- a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceInstancesServiceImpl.java
+++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceInstancesServiceImpl.java
@@ -28,6 +28,7 @@ import com.google.api.client.json.jackson2.JacksonFactory;
import com.google.api.services.compute.Compute;
import com.google.api.services.compute.model.Instance;
import com.google.api.services.compute.model.InstanceList;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.SpecialPermission;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
@@ -82,7 +83,7 @@ public class GceInstancesServiceImpl extends AbstractLifecycleComponent implemen
return instanceList.isEmpty() || instanceList.getItems() == null ?
Collections.<Instance>emptyList() : instanceList.getItems();
} catch (PrivilegedActionException e) {
- logger.warn("Problem fetching instance list for zone {}", e, zoneId);
+ logger.warn(new ParameterizedMessage("Problem fetching instance list for zone {}", zoneId), e);
logger.debug("Full exception:", e);
// assist type inference
return Collections.<Instance>emptyList();
diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceModule.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceModule.java
index 81d10c756e..71e9fbc780 100644
--- a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceModule.java
+++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceModule.java
@@ -19,8 +19,8 @@
package org.elasticsearch.cloud.gce;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.inject.AbstractModule;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
@@ -29,7 +29,7 @@ public class GceModule extends AbstractModule {
static Class<? extends GceInstancesService> computeServiceImpl = GceInstancesServiceImpl.class;
protected final Settings settings;
- protected final ESLogger logger = Loggers.getLogger(GceModule.class);
+ protected final Logger logger = Loggers.getLogger(GceModule.class);
public GceModule(Settings settings) {
this.settings = settings;
diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java
index c73df8f839..1ac0e85ff3 100644
--- a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java
+++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java
@@ -22,6 +22,7 @@ package org.elasticsearch.discovery.gce;
import com.google.api.services.compute.model.AccessConfig;
import com.google.api.services.compute.model.Instance;
import com.google.api.services.compute.model.NetworkInterface;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.Version;
import org.elasticsearch.cloud.gce.GceInstancesService;
import org.elasticsearch.cluster.node.DiscoveryNode;
@@ -245,7 +246,7 @@ public class GceUnicastHostsProvider extends AbstractComponent implements Unicas
}
}
} catch (Exception e) {
- logger.warn("failed to add {}, address {}", e, name, ip_private);
+ logger.warn(new ParameterizedMessage("failed to add {}, address {}", name, ip_private), e);
}
}
diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/RetryHttpInitializerWrapper.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/RetryHttpInitializerWrapper.java
index b21d397d78..c005aa05a7 100644
--- a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/RetryHttpInitializerWrapper.java
+++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/RetryHttpInitializerWrapper.java
@@ -29,8 +29,8 @@ import com.google.api.client.http.HttpResponse;
import com.google.api.client.http.HttpUnsuccessfulResponseHandler;
import com.google.api.client.util.ExponentialBackOff;
import com.google.api.client.util.Sleeper;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.SpecialPermission;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.common.unit.TimeValue;
@@ -43,8 +43,7 @@ public class RetryHttpInitializerWrapper implements HttpRequestInitializer {
private TimeValue maxWait;
- private static final ESLogger logger =
- ESLoggerFactory.getLogger(RetryHttpInitializerWrapper.class.getName());
+ private static final Logger logger = ESLoggerFactory.getLogger(RetryHttpInitializerWrapper.class.getName());
// Intercepts the request for filling in the "Authorization"
// header field, as well as recovering from certain unsuccessful
diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java
index 031f7eaf10..aeec991182 100644
--- a/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java
+++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java
@@ -21,6 +21,7 @@ package org.elasticsearch.plugin.discovery.gce;
import com.google.api.client.http.HttpHeaders;
import com.google.api.client.util.ClassInfo;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.SpecialPermission;
import org.elasticsearch.cloud.gce.GceInstancesService;
import org.elasticsearch.cloud.gce.GceMetadataService;
@@ -28,7 +29,6 @@ import org.elasticsearch.cloud.gce.GceModule;
import org.elasticsearch.cloud.gce.network.GceNameResolver;
import org.elasticsearch.common.component.LifecycleComponent;
import org.elasticsearch.common.inject.Module;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.network.NetworkService;
import org.elasticsearch.common.settings.Setting;
@@ -51,7 +51,7 @@ public class GceDiscoveryPlugin extends Plugin implements DiscoveryPlugin {
public static final String GCE = "gce";
private final Settings settings;
- protected final ESLogger logger = Loggers.getLogger(GceDiscoveryPlugin.class);
+ protected final Logger logger = Loggers.getLogger(GceDiscoveryPlugin.class);
static {
/*
diff --git a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoverTests.java b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoverTests.java
index adb06f2595..98f6fd0dc1 100644
--- a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoverTests.java
+++ b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoverTests.java
@@ -23,11 +23,11 @@ import com.sun.net.httpserver.Headers;
import com.sun.net.httpserver.HttpServer;
import com.sun.net.httpserver.HttpsConfigurator;
import com.sun.net.httpserver.HttpsServer;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.cloud.gce.GceInstancesServiceImpl;
import org.elasticsearch.cloud.gce.GceMetadataService;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.io.FileSystemUtils;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
@@ -40,6 +40,7 @@ import org.junit.BeforeClass;
import javax.net.ssl.KeyManagerFactory;
import javax.net.ssl.SSLContext;
import javax.net.ssl.TrustManagerFactory;
+
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
@@ -125,7 +126,7 @@ public class GceDiscoverTests extends ESIntegTestCase {
httpsServer.createContext("/compute/v1/projects/testproject/zones/primaryzone/instances", (s) -> {
Headers headers = s.getResponseHeaders();
headers.add("Content-Type", "application/json; charset=UTF-8");
- ESLogger logger = Loggers.getLogger(GceDiscoverTests.class);
+ Logger logger = Loggers.getLogger(GceDiscoverTests.class);
try {
Path[] files = FileSystemUtils.files(logDir);
StringBuilder builder = new StringBuilder("{\"id\": \"dummy\",\"items\":[");
diff --git a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceMockUtils.java b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceMockUtils.java
index 88a6fbd9e9..0100955453 100644
--- a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceMockUtils.java
+++ b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceMockUtils.java
@@ -26,9 +26,9 @@ import com.google.api.client.json.Json;
import com.google.api.client.testing.http.MockHttpTransport;
import com.google.api.client.testing.http.MockLowLevelHttpRequest;
import com.google.api.client.testing.http.MockLowLevelHttpResponse;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.Streams;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.util.Callback;
@@ -37,7 +37,7 @@ import java.io.InputStream;
import java.net.URL;
public class GceMockUtils {
- protected static final ESLogger logger = Loggers.getLogger(GceMockUtils.class);
+ protected static final Logger logger = Loggers.getLogger(GceMockUtils.class);
public static final String GCE_METADATA_URL = "http://metadata.google.internal/computeMetadata/v1/instance";
diff --git a/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java b/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java
index a7d8228397..1031f0d103 100644
--- a/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java
+++ b/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java
@@ -19,6 +19,7 @@
package org.elasticsearch.mapper.attachments;
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.search.Query;
@@ -26,7 +27,6 @@ import org.apache.tika.language.LanguageIdentifier;
import org.apache.tika.metadata.Metadata;
import org.elasticsearch.Version;
import org.elasticsearch.common.collect.Iterators;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
@@ -75,7 +75,7 @@ import static org.elasticsearch.index.mapper.TypeParsers.parseMultiField;
*/
public class AttachmentMapper extends FieldMapper {
- private static ESLogger logger = ESLoggerFactory.getLogger("mapper.attachment");
+ private static Logger logger = ESLoggerFactory.getLogger("mapper.attachment");
public static final Setting<Boolean> INDEX_ATTACHMENT_IGNORE_ERRORS_SETTING =
Setting.boolSetting("index.mapping.attachment.ignore_errors", true, Property.IndexScope);
public static final Setting<Boolean> INDEX_ATTACHMENT_DETECT_LANGUAGE_SETTING =
@@ -653,4 +653,5 @@ public class AttachmentMapper extends FieldMapper {
protected String contentType() {
return CONTENT_TYPE;
}
+
}
diff --git a/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/MapperAttachmentsPlugin.java b/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/MapperAttachmentsPlugin.java
index 6cf957f05c..36b12ec0f4 100644
--- a/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/MapperAttachmentsPlugin.java
+++ b/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/MapperAttachmentsPlugin.java
@@ -19,22 +19,22 @@
package org.elasticsearch.mapper.attachments;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.logging.DeprecationLogger;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.plugins.MapperPlugin;
import org.elasticsearch.plugins.Plugin;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+
public class MapperAttachmentsPlugin extends Plugin implements MapperPlugin {
- private static ESLogger logger = ESLoggerFactory.getLogger("mapper.attachment");
+ private static Logger logger = ESLoggerFactory.getLogger("mapper.attachment");
private static DeprecationLogger deprecationLogger = new DeprecationLogger(logger);
@Override
@@ -50,4 +50,5 @@ public class MapperAttachmentsPlugin extends Plugin implements MapperPlugin {
public Map<String, Mapper.TypeParser> getMappers() {
return Collections.singletonMap("attachment", new AttachmentMapper.TypeParser());
}
+
}
diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/blobstore/AzureBlobContainer.java b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/blobstore/AzureBlobContainer.java
index 8cb7b9085e..05bb911476 100644
--- a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/blobstore/AzureBlobContainer.java
+++ b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/blobstore/AzureBlobContainer.java
@@ -20,12 +20,12 @@
package org.elasticsearch.cloud.azure.blobstore;
import com.microsoft.azure.storage.StorageException;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.blobstore.BlobMetaData;
import org.elasticsearch.common.blobstore.BlobPath;
import org.elasticsearch.common.blobstore.support.AbstractBlobContainer;
import org.elasticsearch.common.io.Streams;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.repositories.RepositoryException;
@@ -43,7 +43,7 @@ import java.util.Map;
*/
public class AzureBlobContainer extends AbstractBlobContainer {
- protected final ESLogger logger = Loggers.getLogger(AzureBlobContainer.class);
+ protected final Logger logger = Loggers.getLogger(AzureBlobContainer.class);
protected final AzureBlobStore blobStore;
protected final String keyPath;
diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceImpl.java b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceImpl.java
index 4e5dfb3efd..b7206d3659 100644
--- a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceImpl.java
+++ b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceImpl.java
@@ -27,6 +27,7 @@ import com.microsoft.azure.storage.blob.CloudBlobClient;
import com.microsoft.azure.storage.blob.CloudBlobContainer;
import com.microsoft.azure.storage.blob.CloudBlockBlob;
import com.microsoft.azure.storage.blob.ListBlobItem;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.blobstore.BlobMetaData;
import org.elasticsearch.common.blobstore.support.PlainBlobMetaData;
@@ -172,7 +173,7 @@ public class AzureStorageServiceImpl extends AbstractComponent implements AzureS
logger.trace("creating container [{}]", container);
blobContainer.createIfNotExists();
} catch (IllegalArgumentException e) {
- logger.trace("fails creating container [{}]", e, container);
+ logger.trace(() -> new ParameterizedMessage("fails creating container [{}]", container), e);
throw new RepositoryException(container, e.getMessage());
}
}
diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/plugin/repository/azure/AzureRepositoryPlugin.java b/plugins/repository-azure/src/main/java/org/elasticsearch/plugin/repository/azure/AzureRepositoryPlugin.java
index fcd7bf96b2..5b938fce18 100644
--- a/plugins/repository-azure/src/main/java/org/elasticsearch/plugin/repository/azure/AzureRepositoryPlugin.java
+++ b/plugins/repository-azure/src/main/java/org/elasticsearch/plugin/repository/azure/AzureRepositoryPlugin.java
@@ -19,15 +19,8 @@
package org.elasticsearch.plugin.repository.azure;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-
import org.elasticsearch.cloud.azure.storage.AzureStorageService;
import org.elasticsearch.cloud.azure.storage.AzureStorageServiceImpl;
-import org.elasticsearch.common.logging.ESLogger;
-import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
@@ -36,6 +29,11 @@ import org.elasticsearch.plugins.RepositoryPlugin;
import org.elasticsearch.repositories.Repository;
import org.elasticsearch.repositories.azure.AzureRepository;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+
/**
* A plugin to add a repository type that writes to and from the Azure cloud storage service.
*/
diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle
index 544e040773..8cfb5043b6 100644
--- a/plugins/repository-hdfs/build.gradle
+++ b/plugins/repository-hdfs/build.gradle
@@ -362,4 +362,9 @@ thirdPartyAudit.excludes = [
// optional dependencies of slf4j-api
'org.slf4j.impl.StaticMDCBinder',
'org.slf4j.impl.StaticMarkerBinder',
+
+ 'org.apache.log4j.AppenderSkeleton',
+ 'org.apache.log4j.AsyncAppender',
+ 'org.apache.log4j.helpers.ISO8601DateFormat',
+ 'org.apache.log4j.spi.ThrowableInformation'
]
diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/AwsSigner.java b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/AwsSigner.java
index 5c02671e5e..c1c36031b5 100644
--- a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/AwsSigner.java
+++ b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/AwsSigner.java
@@ -21,12 +21,12 @@ package org.elasticsearch.cloud.aws;
import com.amazonaws.ClientConfiguration;
import com.amazonaws.auth.SignerFactory;
-import org.elasticsearch.common.logging.ESLogger;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.logging.Loggers;
public class AwsSigner {
- private static final ESLogger logger = Loggers.getLogger(AwsSigner.class);
+ private static final Logger logger = Loggers.getLogger(AwsSigner.class);
private AwsSigner() {
diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/InternalAwsS3Service.java b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/InternalAwsS3Service.java
index c4d8a63adc..a9091788f2 100644
--- a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/InternalAwsS3Service.java
+++ b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/InternalAwsS3Service.java
@@ -29,11 +29,11 @@ import com.amazonaws.internal.StaticCredentialsProvider;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3Client;
import com.amazonaws.services.s3.S3ClientOptions;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.repositories.s3.S3Repository;
@@ -85,7 +85,7 @@ public class InternalAwsS3Service extends AbstractLifecycleComponent implements
return client;
}
- public static ClientConfiguration buildConfiguration(ESLogger logger, Settings settings, Protocol protocol, Integer maxRetries,
+ public static ClientConfiguration buildConfiguration(Logger logger, Settings settings, Protocol protocol, Integer maxRetries,
String endpoint, boolean useThrottleRetries) {
ClientConfiguration clientConfiguration = new ClientConfiguration();
// the response metadata cache is only there for diagnostics purposes,
@@ -122,7 +122,7 @@ public class InternalAwsS3Service extends AbstractLifecycleComponent implements
return clientConfiguration;
}
- public static AWSCredentialsProvider buildCredentials(ESLogger logger, Settings settings, Settings repositorySettings) {
+ public static AWSCredentialsProvider buildCredentials(Logger logger, Settings settings, Settings repositorySettings) {
AWSCredentialsProvider credentials;
String key = getValue(repositorySettings, settings,
S3Repository.Repository.KEY_SETTING, S3Repository.Repositories.KEY_SETTING);
@@ -140,7 +140,7 @@ public class InternalAwsS3Service extends AbstractLifecycleComponent implements
return credentials;
}
- protected static String findEndpoint(ESLogger logger, Settings settings, String endpoint, String region) {
+ protected static String findEndpoint(Logger logger, Settings settings, String endpoint, String region) {
if (Strings.isNullOrEmpty(endpoint)) {
logger.debug("no repository level endpoint has been defined. Trying to guess from repository region [{}]", region);
if (!region.isEmpty()) {
diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/blobstore/DefaultS3OutputStream.java b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/blobstore/DefaultS3OutputStream.java
index dd278a9231..ef9b25b2d1 100644
--- a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/blobstore/DefaultS3OutputStream.java
+++ b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/blobstore/DefaultS3OutputStream.java
@@ -31,7 +31,7 @@ import com.amazonaws.services.s3.model.PutObjectResult;
import com.amazonaws.services.s3.model.UploadPartRequest;
import com.amazonaws.services.s3.model.UploadPartResult;
import com.amazonaws.util.Base64;
-import org.elasticsearch.common.logging.ESLogger;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
@@ -64,7 +64,7 @@ import java.util.List;
public class DefaultS3OutputStream extends S3OutputStream {
private static final ByteSizeValue MULTIPART_MAX_SIZE = new ByteSizeValue(5, ByteSizeUnit.GB);
- private static final ESLogger logger = Loggers.getLogger("cloud.aws");
+ private static final Logger logger = Loggers.getLogger("cloud.aws");
/**
* Multipart Upload API data
*/
diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/TestAmazonS3.java b/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/TestAmazonS3.java
index 31682ee4de..37087db386 100644
--- a/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/TestAmazonS3.java
+++ b/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/TestAmazonS3.java
@@ -28,8 +28,8 @@ import com.amazonaws.services.s3.model.PutObjectResult;
import com.amazonaws.services.s3.model.S3Object;
import com.amazonaws.services.s3.model.UploadPartRequest;
import com.amazonaws.services.s3.model.UploadPartResult;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchException;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
@@ -49,7 +49,7 @@ import static com.carrotsearch.randomizedtesting.RandomizedTest.randomDouble;
*/
public class TestAmazonS3 extends AmazonS3Wrapper {
- protected final ESLogger logger = Loggers.getLogger(getClass());
+ protected final Logger logger = Loggers.getLogger(getClass());
private double writeFailureRate = 0.0;
private double readFailureRate = 0.0;
diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AbstractS3SnapshotRestoreTest.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AbstractS3SnapshotRestoreTest.java
index d9d15ce0b3..f3289beb2c 100644
--- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AbstractS3SnapshotRestoreTest.java
+++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AbstractS3SnapshotRestoreTest.java
@@ -24,6 +24,7 @@ import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.model.DeleteObjectsRequest;
import com.amazonaws.services.s3.model.ObjectListing;
import com.amazonaws.services.s3.model.S3ObjectSummary;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse;
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse;
import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse;
@@ -511,7 +512,7 @@ public abstract class AbstractS3SnapshotRestoreTest extends AbstractAwsTestCase
client.deleteObjects(multiObjectDeleteRequest);
}
} catch (Exception ex) {
- logger.warn("Failed to delete S3 repository [{}] in [{}]", ex, bucketName, region);
+ logger.warn(new ParameterizedMessage("Failed to delete S3 repository [{}] in [{}]", bucketName, region), ex);
}
}
}
diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerConfigurationTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerConfigurationTests.java
new file mode 100644
index 0000000000..4f32d83e7b
--- /dev/null
+++ b/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerConfigurationTests.java
@@ -0,0 +1,126 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.logging;
+
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.core.Appender;
+import org.apache.logging.log4j.core.LoggerContext;
+import org.apache.logging.log4j.core.config.Configuration;
+import org.apache.logging.log4j.core.config.Configurator;
+import org.apache.logging.log4j.core.config.LoggerConfig;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.env.Environment;
+import org.elasticsearch.test.ESTestCase;
+
+import java.io.IOException;
+import java.nio.file.Path;
+
+import static org.hamcrest.CoreMatchers.equalTo;
+import static org.hamcrest.Matchers.notNullValue;
+
+public class EvilLoggerConfigurationTests extends ESTestCase {
+
+ public void testResolveMultipleConfigs() throws Exception {
+ final Level level = ESLoggerFactory.getLogger("test").getLevel();
+ try {
+ final Path configDir = getDataPath("config");
+ final Settings settings = Settings.builder()
+ .put(Environment.PATH_CONF_SETTING.getKey(), configDir.toAbsolutePath())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
+ .build();
+ final Environment environment = new Environment(settings);
+ LogConfigurator.configure(environment);
+
+ {
+ final LoggerContext ctx = (LoggerContext) LogManager.getContext(false);
+ final Configuration config = ctx.getConfiguration();
+ final LoggerConfig loggerConfig = config.getLoggerConfig("test");
+ final Appender appender = loggerConfig.getAppenders().get("console");
+ assertThat(appender, notNullValue());
+ }
+
+ {
+ final LoggerContext ctx = (LoggerContext) LogManager.getContext(false);
+ final Configuration config = ctx.getConfiguration();
+ final LoggerConfig loggerConfig = config.getLoggerConfig("second");
+ final Appender appender = loggerConfig.getAppenders().get("console2");
+ assertThat(appender, notNullValue());
+ }
+
+ {
+ final LoggerContext ctx = (LoggerContext) LogManager.getContext(false);
+ final Configuration config = ctx.getConfiguration();
+ final LoggerConfig loggerConfig = config.getLoggerConfig("third");
+ final Appender appender = loggerConfig.getAppenders().get("console3");
+ assertThat(appender, notNullValue());
+ }
+ } finally {
+ Configurator.setLevel("test", level);
+ }
+ }
+
+ public void testDefaults() throws IOException {
+ final Path configDir = getDataPath("config");
+ final String level = randomFrom(Level.values()).toString();
+ final Settings settings = Settings.builder()
+ .put(Environment.PATH_CONF_SETTING.getKey(), configDir.toAbsolutePath())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
+ .put("logger.level", level)
+ .build();
+ final Environment environment = new Environment(settings);
+ LogConfigurator.configure(environment);
+
+ final String loggerName;
+ if (LogManager.getContext(false).hasLogger("org.elasticsearch.test", new PrefixMessageFactory())) {
+ loggerName = "org.elasticsearch.test";
+ } else {
+ assertTrue(LogManager.getContext(false).hasLogger("test", new PrefixMessageFactory()));
+ loggerName = "test";
+ }
+ final Logger logger = ESLoggerFactory.getLogger(loggerName);
+ assertThat(logger.getLevel().toString(), equalTo(level));
+ }
+
+ // tests that custom settings are not overwritten by settings in the config file
+ public void testResolveOrder() throws Exception {
+ final Path configDir = getDataPath("config");
+ final Settings settings = Settings.builder()
+ .put(Environment.PATH_CONF_SETTING.getKey(), configDir.toAbsolutePath())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
+ .put("logger.test_resolve_order", "TRACE")
+ .build();
+ final Environment environment = new Environment(settings);
+ LogConfigurator.configure(environment);
+
+ // args should overwrite whatever is in the config
+ final String loggerName;
+ if (LogManager.getContext(false).hasLogger("org.elasticsearch.test_resolve_order", new PrefixMessageFactory())) {
+ loggerName = "org.elasticsearch.test_resolve_order";
+ } else {
+ assertTrue(LogManager.getContext(false).hasLogger("test_resolve_order", new PrefixMessageFactory()));
+ loggerName = "test_resolve_order";
+ }
+ final Logger logger = ESLoggerFactory.getLogger(loggerName);
+ assertTrue(logger.isTraceEnabled());
+ }
+
+}
diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java
new file mode 100644
index 0000000000..ab4796bddc
--- /dev/null
+++ b/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java
@@ -0,0 +1,98 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.logging;
+
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.Logger;
+import org.elasticsearch.common.io.PathUtils;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.env.Environment;
+import org.elasticsearch.test.ESTestCase;
+
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.List;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import static org.hamcrest.Matchers.equalTo;
+
+public class EvilLoggerTests extends ESTestCase {
+
+ private Logger testLogger;
+ private DeprecationLogger deprecationLogger;
+
+ @Override
+ public void setUp() throws Exception {
+ super.setUp();
+
+ final Path configDir = getDataPath("config");
+ // need to set custom path.conf so we can use a custom log4j2.properties file for the test
+ final Settings settings = Settings.builder()
+ .put(Environment.PATH_CONF_SETTING.getKey(), configDir.toAbsolutePath())
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
+ .build();
+ final Environment environment = new Environment(settings);
+ LogConfigurator.configure(environment);
+
+ testLogger = ESLoggerFactory.getLogger("test");
+ deprecationLogger = ESLoggerFactory.getDeprecationLogger("test");
+ }
+
+ public void testLocationInfoTest() throws IOException {
+ testLogger.error("This is an error message");
+ testLogger.warn("This is a warning message");
+ testLogger.info("This is an info message");
+ testLogger.debug("This is a debug message");
+ testLogger.trace("This is a trace message");
+ final String path = System.getProperty("es.logs") + ".log";
+ final List<String> events = Files.readAllLines(PathUtils.get(path));
+ assertThat(events.size(), equalTo(5));
+ final String location = "org.elasticsearch.common.logging.EvilLoggerTests.testLocationInfoTest";
+ assertLogLine(events.get(0), Level.ERROR, location, "This is an error message");
+ assertLogLine(events.get(1), Level.WARN, location, "This is a warning message");
+ assertLogLine(events.get(2), Level.INFO, location, "This is an info message");
+ assertLogLine(events.get(3), Level.DEBUG, location, "This is a debug message");
+ assertLogLine(events.get(4), Level.TRACE, location, "This is a trace message");
+ }
+
+ private void assertLogLine(final String logLine, final Level level, final String location, final String message) {
+ final Matcher matcher = Pattern.compile("\\[(.*)\\]\\[(.*)\\(.*\\)\\] \\[\\] (.*)").matcher(logLine);
+ assertTrue(logLine, matcher.matches());
+ assertThat(matcher.group(1), equalTo(level.toString()));
+ assertThat(matcher.group(2), equalTo(location));
+ assertThat(matcher.group(3), equalTo(message));
+ }
+
+ public void testDeprecationLogger() throws IOException {
+ deprecationLogger.deprecated("This is a deprecation message");
+ final String deprecationPath = System.getProperty("es.logs") + "_deprecation.log";
+ final List<String> deprecationEvents = Files.readAllLines(PathUtils.get(deprecationPath));
+ assertThat(deprecationEvents.size(), equalTo(1));
+ assertLogLine(
+ deprecationEvents.get(0),
+ Level.WARN,
+ "org.elasticsearch.common.logging.DeprecationLogger.deprecated",
+ "This is a deprecation message");
+ }
+
+}
diff --git a/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/config/log4j2.properties b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/config/log4j2.properties
new file mode 100644
index 0000000000..be2e1339ad
--- /dev/null
+++ b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/config/log4j2.properties
@@ -0,0 +1,32 @@
+status = error
+
+appender.console.type = Console
+appender.console.name = console
+appender.console.layout.type = PatternLayout
+appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %m%n
+
+appender.file.type = File
+appender.file.name = file
+appender.file.fileName = ${sys:es.logs}.log
+appender.file.layout.type = PatternLayout
+appender.file.layout.pattern = [%p][%l] %m%n
+
+rootLogger.level = info
+rootLogger.appenderRef.console.ref = console
+rootLogger.appenderRef.file.ref = file
+
+logger.test.name = test
+logger.test.level = trace
+logger.test.appenderRef.console.ref = console
+logger.test.appenderRef.file.ref = file
+
+appender.deprecation_file.type = File
+appender.deprecation_file.name = deprecation_file
+appender.deprecation_file.fileName = ${sys:es.logs}_deprecation.log
+appender.deprecation_file.layout.type = PatternLayout
+appender.deprecation_file.layout.pattern = [%p][%l] %m%n
+
+logger.deprecation.name = deprecation
+logger.deprecation.level = warn
+logger.deprecation.appenderRef.deprecation_file.ref = deprecation_file
+logger.deprecation.additivity = false
diff --git a/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/config/second/log4j2.properties b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/config/second/log4j2.properties
new file mode 100644
index 0000000000..2ade4c896c
--- /dev/null
+++ b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/config/second/log4j2.properties
@@ -0,0 +1,8 @@
+appender.console2.type = Console
+appender.console2.name = console2
+appender.console2.layout.type = PatternLayout
+appender.console2.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %m%n
+
+logger.second.name = second
+logger.second.level = debug
+logger.second.appenderRef.console2.ref = console2
diff --git a/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/config/third/log4j2.properties b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/config/third/log4j2.properties
new file mode 100644
index 0000000000..8699f574d5
--- /dev/null
+++ b/qa/evil-tests/src/test/resources/org/elasticsearch/common/logging/config/third/log4j2.properties
@@ -0,0 +1,8 @@
+appender.console3.type = Console
+appender.console3.name = console3
+appender.console3.layout.type = PatternLayout
+appender.console3.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %m%n
+
+logger.third.name = third
+logger.third.level = debug
+logger.third.appenderRef.console3.ref = console3
diff --git a/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java b/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java
index 645cc8382d..d884b029b5 100644
--- a/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java
+++ b/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java
@@ -19,11 +19,11 @@
package org.elasticsearch.smoketest;
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.transport.TransportClient;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.common.network.NetworkModule;
import org.elasticsearch.common.settings.Settings;
@@ -71,7 +71,7 @@ public abstract class ESSmokeClientTestCase extends LuceneTestCase {
*/
public static final String TESTS_CLUSTER = "tests.cluster";
- protected static final ESLogger logger = ESLoggerFactory.getLogger(ESSmokeClientTestCase.class.getName());
+ protected static final Logger logger = ESLoggerFactory.getLogger(ESSmokeClientTestCase.class.getName());
private static final AtomicInteger counter = new AtomicInteger();
private static Client client;
diff --git a/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java b/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java
index fe624297e7..4315c9ea2d 100644
--- a/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java
+++ b/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java
@@ -20,13 +20,12 @@
package org.elasticsearch.bootstrap;
import com.carrotsearch.randomizedtesting.RandomizedRunner;
-import org.apache.log4j.Java9Hack;
-import org.apache.lucene.util.Constants;
import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.SecureSM;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.io.PathUtils;
+import org.elasticsearch.common.logging.LogConfigurator;
import org.elasticsearch.plugins.PluginInfo;
import org.junit.Assert;
@@ -66,6 +65,8 @@ public class BootstrapForTesting {
// without making things complex???
static {
+ LogConfigurator.init();
+
// make sure java.io.tmpdir exists always (in case code uses it in a static initializer)
Path javaTmpDir = PathUtils.get(Objects.requireNonNull(System.getProperty("java.io.tmpdir"),
"please set ${java.io.tmpdir} in pom.xml"));
@@ -91,10 +92,6 @@ public class BootstrapForTesting {
throw new RuntimeException("found jar hell in test classpath", e);
}
- if (Constants.JRE_IS_MINIMUM_JAVA9) {
- Java9Hack.fixLog4j();
- }
-
// install security manager if requested
if (systemPropertyAsBoolean("tests.security.manager", true)) {
try {
diff --git a/test/framework/src/main/java/org/elasticsearch/common/logging/TestLoggers.java b/test/framework/src/main/java/org/elasticsearch/common/logging/TestLoggers.java
new file mode 100644
index 0000000000..3b6cf7e2c9
--- /dev/null
+++ b/test/framework/src/main/java/org/elasticsearch/common/logging/TestLoggers.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.logging;
+
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.core.Appender;
+import org.apache.logging.log4j.core.LoggerContext;
+import org.apache.logging.log4j.core.config.AppenderRef;
+import org.apache.logging.log4j.core.config.Configuration;
+import org.apache.logging.log4j.core.config.LoggerConfig;
+
+public class TestLoggers {
+
+ public static void addAppender(final Logger logger, final Appender appender) {
+ final LoggerContext ctx = (LoggerContext) LogManager.getContext(false);
+ final Configuration config = ctx.getConfiguration();
+ config.addAppender(appender);
+ LoggerConfig loggerConfig = config.getLoggerConfig(logger.getName());
+ if (!logger.getName().equals(loggerConfig.getName())) {
+ loggerConfig = new LoggerConfig(logger.getName(), logger.getLevel(), true);
+ config.addLogger(logger.getName(), loggerConfig);
+ }
+ loggerConfig.addAppender(appender, null, null);
+ ctx.updateLoggers();
+ }
+
+ public static void removeAppender(final Logger logger, final Appender appender) {
+ final LoggerContext ctx = (LoggerContext) LogManager.getContext(false);
+ final Configuration config = ctx.getConfiguration();
+ LoggerConfig loggerConfig = config.getLoggerConfig(logger.getName());
+ if (!logger.getName().equals(loggerConfig.getName())) {
+ loggerConfig = new LoggerConfig(logger.getName(), logger.getLevel(), true);
+ config.addLogger(logger.getName(), loggerConfig);
+ }
+ loggerConfig.removeAppender(appender.getName());
+ ctx.updateLoggers();
+ }
+
+}
diff --git a/test/framework/src/main/java/org/elasticsearch/index/store/EsBaseDirectoryTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/store/EsBaseDirectoryTestCase.java
index 638d24e7f9..d6a21f3fa5 100644
--- a/test/framework/src/main/java/org/elasticsearch/index/store/EsBaseDirectoryTestCase.java
+++ b/test/framework/src/main/java/org/elasticsearch/index/store/EsBaseDirectoryTestCase.java
@@ -21,11 +21,11 @@ package org.elasticsearch.index.store;
import com.carrotsearch.randomizedtesting.annotations.Listeners;
import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite;
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.store.BaseDirectoryTestCase;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TimeUnits;
import org.elasticsearch.bootstrap.BootstrapForTesting;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.test.junit.listeners.ReproduceInfoPrinter;
@@ -43,6 +43,6 @@ public abstract class EsBaseDirectoryTestCase extends BaseDirectoryTestCase {
BootstrapForTesting.ensureInitialized();
}
- protected final ESLogger logger = Loggers.getLogger(getClass());
+ protected final Logger logger = Loggers.getLogger(getClass());
}
diff --git a/test/framework/src/main/java/org/elasticsearch/test/BackgroundIndexer.java b/test/framework/src/main/java/org/elasticsearch/test/BackgroundIndexer.java
index 4440fbe117..8ced66060a 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/BackgroundIndexer.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/BackgroundIndexer.java
@@ -20,12 +20,13 @@ package org.elasticsearch.test;/*
import com.carrotsearch.randomizedtesting.RandomizedTest;
import com.carrotsearch.randomizedtesting.generators.RandomInts;
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.bulk.BulkItemResponse;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.client.Client;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
@@ -45,7 +46,7 @@ import static org.hamcrest.Matchers.equalTo;
public class BackgroundIndexer implements AutoCloseable {
- private final ESLogger logger = Loggers.getLogger(getClass());
+ private final Logger logger = Loggers.getLogger(getClass());
final Thread[] writers;
final CountDownLatch stopLatch;
@@ -171,7 +172,7 @@ public class BackgroundIndexer implements AutoCloseable {
logger.info("**** done indexing thread {} stop: {} numDocsIndexed: {}", indexerId, stop.get(), indexCounter.get());
} catch (Exception e) {
failures.add(e);
- logger.warn("**** failed indexing thread {} on doc id {}", e, indexerId, id);
+ logger.warn(new ParameterizedMessage("**** failed indexing thread {} on doc id {}", indexerId, id), e);
} finally {
stopLatch.countDown();
}
diff --git a/test/framework/src/main/java/org/elasticsearch/test/CorruptionUtils.java b/test/framework/src/main/java/org/elasticsearch/test/CorruptionUtils.java
index 916adc142c..df306dfc9e 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/CorruptionUtils.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/CorruptionUtils.java
@@ -19,13 +19,13 @@
package org.elasticsearch.test;
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.store.ChecksumIndexInput;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.ESLoggerFactory;
import java.io.IOException;
@@ -44,7 +44,7 @@ import static org.junit.Assert.assertTrue;
public final class CorruptionUtils {
- private static ESLogger logger = ESLoggerFactory.getLogger("test");
+ private static Logger logger = ESLoggerFactory.getLogger("test");
private CorruptionUtils() {}
/**
diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java
index 784a0a4bed..b0575d7481 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java
@@ -29,7 +29,7 @@ import com.carrotsearch.randomizedtesting.generators.RandomInts;
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
import com.carrotsearch.randomizedtesting.rules.TestRuleAdapter;
-
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.uninverting.UninvertingReader;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
@@ -43,8 +43,6 @@ import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.io.PathUtilsForTesting;
-import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.MockBigArrays;
@@ -63,8 +61,8 @@ import org.elasticsearch.index.mapper.MetadataFieldMapper;
import org.elasticsearch.indices.IndicesModule;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.indices.analysis.AnalysisModule;
-import org.elasticsearch.plugins.MapperPlugin;
import org.elasticsearch.plugins.AnalysisPlugin;
+import org.elasticsearch.plugins.MapperPlugin;
import org.elasticsearch.script.MockScriptEngine;
import org.elasticsearch.script.ScriptModule;
import org.elasticsearch.script.ScriptService;
@@ -130,10 +128,16 @@ import static org.hamcrest.Matchers.equalTo;
public abstract class ESTestCase extends LuceneTestCase {
static {
+ System.setProperty("log4j.shutdownHookEnabled", "false");
+ // we can not shutdown logging when tests are running or the next test that runs within the
+ // same JVM will try to initialize logging after a security manager has been installed and
+ // this will fail
+ System.setProperty("es.log4j.shutdownEnabled", "false");
+ System.setProperty("log4j2.disable.jmx", "true");
BootstrapForTesting.ensureInitialized();
}
- protected final ESLogger logger = Loggers.getLogger(getClass());
+ protected final Logger logger = Loggers.getLogger(getClass());
// -----------------------------------------------------------------
// Suite and test case setup/cleanup.
diff --git a/test/framework/src/main/java/org/elasticsearch/test/ExternalNode.java b/test/framework/src/main/java/org/elasticsearch/test/ExternalNode.java
index cde4e5f6ac..8725ed815a 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/ExternalNode.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/ExternalNode.java
@@ -18,6 +18,7 @@
*/
package org.elasticsearch.test;
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.util.Constants;
import org.elasticsearch.action.admin.cluster.node.info.NodeInfo;
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse;
@@ -27,7 +28,6 @@ import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.common.Randomness;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.io.PathUtils;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.network.NetworkModule;
import org.elasticsearch.common.settings.Settings;
@@ -62,7 +62,7 @@ final class ExternalNode implements Closeable {
private final String clusterName;
private TransportClient client;
- private final ESLogger logger = Loggers.getLogger(getClass());
+ private final Logger logger = Loggers.getLogger(getClass());
private Settings externalNodeSettings;
diff --git a/test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java
index 6f6ac8488d..adab3b7045 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java
@@ -19,6 +19,7 @@
package org.elasticsearch.test;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.admin.cluster.node.info.NodeInfo;
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse;
import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
@@ -27,7 +28,6 @@ import org.elasticsearch.client.Client;
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.breaker.CircuitBreaker;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.network.NetworkModule;
import org.elasticsearch.common.settings.Settings;
@@ -56,7 +56,7 @@ import static org.junit.Assert.assertThat;
*/
public final class ExternalTestCluster extends TestCluster {
- private static final ESLogger logger = Loggers.getLogger(ExternalTestCluster.class);
+ private static final Logger logger = Loggers.getLogger(ExternalTestCluster.class);
private static final AtomicInteger counter = new AtomicInteger();
public static final String EXTERNAL_CLUSTER_PREFIX = "external_";
diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java
index 1dd1c5d9b6..8a3274ec72 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java
@@ -24,6 +24,7 @@ import com.carrotsearch.randomizedtesting.SysGlobals;
import com.carrotsearch.randomizedtesting.generators.RandomInts;
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.store.StoreRateLimiting;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.ElasticsearchException;
@@ -51,7 +52,6 @@ import org.elasticsearch.common.Strings;
import org.elasticsearch.common.breaker.CircuitBreaker;
import org.elasticsearch.common.io.FileSystemUtils;
import org.elasticsearch.common.lease.Releasables;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.network.NetworkModule;
import org.elasticsearch.common.settings.Settings;
@@ -147,7 +147,7 @@ import static org.junit.Assert.fail;
*/
public final class InternalTestCluster extends TestCluster {
- private final ESLogger logger = Loggers.getLogger(getClass());
+ private final Logger logger = Loggers.getLogger(getClass());
/**
* The number of ports in the range used for this JVM
diff --git a/test/framework/src/main/java/org/elasticsearch/test/OldIndexUtils.java b/test/framework/src/main/java/org/elasticsearch/test/OldIndexUtils.java
index e1967256dd..e93f355d64 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/OldIndexUtils.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/OldIndexUtils.java
@@ -19,6 +19,7 @@
package org.elasticsearch.test;
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.util.TestUtil;
import org.elasticsearch.Version;
@@ -31,7 +32,6 @@ import org.elasticsearch.action.admin.indices.upgrade.get.UpgradeStatusResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider;
import org.elasticsearch.common.io.FileSystemUtils;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.IndexFolderUpgrader;
@@ -86,7 +86,7 @@ public class OldIndexUtils {
IndexFolderUpgrader.upgradeIndicesIfNeeded(Settings.EMPTY, nodeEnvironment);
}
- public static void loadIndex(String indexName, String indexFile, Path unzipDir, Path bwcPath, ESLogger logger, Path... paths) throws
+ public static void loadIndex(String indexName, String indexFile, Path unzipDir, Path bwcPath, Logger logger, Path... paths) throws
Exception {
Path unzipDataDir = unzipDir.resolve("data");
@@ -128,7 +128,7 @@ public class OldIndexUtils {
}
// randomly distribute the files from src over dests paths
- public static void copyIndex(final ESLogger logger, final Path src, final String indexName, final Path... dests) throws IOException {
+ public static void copyIndex(final Logger logger, final Path src, final String indexName, final Path... dests) throws IOException {
Path destinationDataPath = dests[randomInt(dests.length - 1)];
for (Path dest : dests) {
Path indexDir = dest.resolve(indexName);
@@ -194,7 +194,7 @@ public class OldIndexUtils {
}
public static boolean isUpgraded(Client client, String index) throws Exception {
- ESLogger logger = Loggers.getLogger(OldIndexUtils.class);
+ Logger logger = Loggers.getLogger(OldIndexUtils.class);
int toUpgrade = 0;
for (IndexUpgradeStatus status : getUpgradeStatus(client, index)) {
logger.info("Index: {}, total: {}, toUpgrade: {}", status.getIndex(), status.getTotalBytes(), status.getToUpgradeBytes());
diff --git a/test/framework/src/main/java/org/elasticsearch/test/TestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/TestCluster.java
index 2629f655c9..124960fe92 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/TestCluster.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/TestCluster.java
@@ -20,12 +20,12 @@
package org.elasticsearch.test;
import com.carrotsearch.hppc.ObjectArrayList;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.IndexTemplateMetaData;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.index.IndexNotFoundException;
import org.elasticsearch.indices.IndexTemplateMissingException;
@@ -45,7 +45,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcke
*/
public abstract class TestCluster implements Closeable {
- protected final ESLogger logger = Loggers.getLogger(getClass());
+ protected final Logger logger = Loggers.getLogger(getClass());
private final long seed;
protected Random random;
diff --git a/test/framework/src/main/java/org/elasticsearch/test/disruption/NetworkDisruption.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/NetworkDisruption.java
index 40839f428e..cba70a4153 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/disruption/NetworkDisruption.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/disruption/NetworkDisruption.java
@@ -20,7 +20,7 @@
package org.elasticsearch.test.disruption;
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
-import org.elasticsearch.common.logging.ESLogger;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.set.Sets;
@@ -45,7 +45,7 @@ import static org.junit.Assert.assertFalse;
*/
public class NetworkDisruption implements ServiceDisruptionScheme {
- private final ESLogger logger = Loggers.getLogger(NetworkDisruption.class);
+ private final Logger logger = Loggers.getLogger(NetworkDisruption.class);
private final DisruptedLinks disruptedLinks;
private final NetworkLinkDisruptionType networkLinkDisruptionType;
diff --git a/test/framework/src/main/java/org/elasticsearch/test/disruption/NetworkPartition.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/NetworkPartition.java
new file mode 100644
index 0000000000..b6c379a0b6
--- /dev/null
+++ b/test/framework/src/main/java/org/elasticsearch/test/disruption/NetworkPartition.java
@@ -0,0 +1,204 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.test.disruption;
+
+import org.apache.logging.log4j.Logger;
+import org.elasticsearch.common.logging.Loggers;
+import org.elasticsearch.test.InternalTestCluster;
+import org.elasticsearch.test.transport.MockTransportService;
+import org.elasticsearch.transport.TransportService;
+
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Random;
+import java.util.Set;
+
+import static org.junit.Assert.assertFalse;
+
+public abstract class NetworkPartition implements ServiceDisruptionScheme {
+
+ protected final Logger logger = Loggers.getLogger(getClass());
+
+ final Set<String> nodesSideOne;
+ final Set<String> nodesSideTwo;
+ volatile boolean autoExpand;
+ protected final Random random;
+ protected volatile InternalTestCluster cluster;
+ protected volatile boolean activeDisruption = false;
+
+
+ public NetworkPartition(Random random) {
+ this.random = new Random(random.nextLong());
+ nodesSideOne = new HashSet<>();
+ nodesSideTwo = new HashSet<>();
+ autoExpand = true;
+ }
+
+ public NetworkPartition(String node1, String node2, Random random) {
+ this(random);
+ nodesSideOne.add(node1);
+ nodesSideTwo.add(node2);
+ autoExpand = false;
+ }
+
+ public NetworkPartition(Set<String> nodesSideOne, Set<String> nodesSideTwo, Random random) {
+ this(random);
+ this.nodesSideOne.addAll(nodesSideOne);
+ this.nodesSideTwo.addAll(nodesSideTwo);
+ autoExpand = false;
+ }
+
+
+ public Collection<String> getNodesSideOne() {
+ return Collections.unmodifiableCollection(nodesSideOne);
+ }
+
+ public Collection<String> getNodesSideTwo() {
+ return Collections.unmodifiableCollection(nodesSideTwo);
+ }
+
+ public Collection<String> getMajoritySide() {
+ if (nodesSideOne.size() >= nodesSideTwo.size()) {
+ return getNodesSideOne();
+ } else {
+ return getNodesSideTwo();
+ }
+ }
+
+ public Collection<String> getMinoritySide() {
+ if (nodesSideOne.size() >= nodesSideTwo.size()) {
+ return getNodesSideTwo();
+ } else {
+ return getNodesSideOne();
+ }
+ }
+
+ @Override
+ public void applyToCluster(InternalTestCluster cluster) {
+ this.cluster = cluster;
+ if (autoExpand) {
+ for (String node : cluster.getNodeNames()) {
+ applyToNode(node, cluster);
+ }
+ }
+ }
+
+ @Override
+ public void removeFromCluster(InternalTestCluster cluster) {
+ stopDisrupting();
+ }
+
+ @Override
+ public void removeAndEnsureHealthy(InternalTestCluster cluster) {
+ removeFromCluster(cluster);
+ ensureNodeCount(cluster);
+ }
+
+ protected void ensureNodeCount(InternalTestCluster cluster) {
+ assertFalse("cluster failed to form after disruption was healed", cluster.client().admin().cluster().prepareHealth()
+ .setWaitForNodes("" + cluster.size())
+ .setWaitForRelocatingShards(0)
+ .get().isTimedOut());
+ }
+
+ @Override
+ public synchronized void applyToNode(String node, InternalTestCluster cluster) {
+ if (!autoExpand || nodesSideOne.contains(node) || nodesSideTwo.contains(node)) {
+ return;
+ }
+ if (nodesSideOne.isEmpty()) {
+ nodesSideOne.add(node);
+ } else if (nodesSideTwo.isEmpty()) {
+ nodesSideTwo.add(node);
+ } else if (random.nextBoolean()) {
+ nodesSideOne.add(node);
+ } else {
+ nodesSideTwo.add(node);
+ }
+ }
+
+ @Override
+ public synchronized void removeFromNode(String node, InternalTestCluster cluster) {
+ MockTransportService transportService = (MockTransportService) cluster.getInstance(TransportService.class, node);
+ Set<String> otherSideNodes;
+ if (nodesSideOne.contains(node)) {
+ otherSideNodes = nodesSideTwo;
+ nodesSideOne.remove(node);
+ } else if (nodesSideTwo.contains(node)) {
+ otherSideNodes = nodesSideOne;
+ nodesSideTwo.remove(node);
+ } else {
+ return;
+ }
+ for (String node2 : otherSideNodes) {
+ MockTransportService transportService2 = (MockTransportService) cluster.getInstance(TransportService.class, node2);
+ removeDisruption(transportService, transportService2);
+ }
+ }
+
+ @Override
+ public synchronized void testClusterClosed() {
+
+ }
+
+ protected abstract String getPartitionDescription();
+
+ @Override
+ public synchronized void startDisrupting() {
+ if (nodesSideOne.size() == 0 || nodesSideTwo.size() == 0) {
+ return;
+ }
+ logger.info("nodes {} will be partitioned from {}. partition type [{}]", nodesSideOne, nodesSideTwo, getPartitionDescription());
+ activeDisruption = true;
+ for (String node1 : nodesSideOne) {
+ MockTransportService transportService1 = (MockTransportService) cluster.getInstance(TransportService.class, node1);
+ for (String node2 : nodesSideTwo) {
+ MockTransportService transportService2 = (MockTransportService) cluster.getInstance(TransportService.class, node2);
+ applyDisruption(transportService1, transportService2);
+ }
+ }
+ }
+
+
+ @Override
+ public synchronized void stopDisrupting() {
+ if (nodesSideOne.size() == 0 || nodesSideTwo.size() == 0 || !activeDisruption) {
+ return;
+ }
+ logger.info("restoring partition between nodes {} & nodes {}", nodesSideOne, nodesSideTwo);
+ for (String node1 : nodesSideOne) {
+ MockTransportService transportService1 = (MockTransportService) cluster.getInstance(TransportService.class, node1);
+ for (String node2 : nodesSideTwo) {
+ MockTransportService transportService2 = (MockTransportService) cluster.getInstance(TransportService.class, node2);
+ removeDisruption(transportService1, transportService2);
+ }
+ }
+ activeDisruption = false;
+ }
+
+ abstract void applyDisruption(MockTransportService transportService1, MockTransportService transportService2);
+
+
+ protected void removeDisruption(MockTransportService transportService1, MockTransportService transportService2) {
+ transportService1.clearRule(transportService2);
+ transportService2.clearRule(transportService1);
+ }
+
+}
diff --git a/test/framework/src/main/java/org/elasticsearch/test/disruption/SingleNodeDisruption.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/SingleNodeDisruption.java
index f74280c014..0a9de5a913 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/disruption/SingleNodeDisruption.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/disruption/SingleNodeDisruption.java
@@ -18,7 +18,7 @@
*/
package org.elasticsearch.test.disruption;
-import org.elasticsearch.common.logging.ESLogger;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.test.InternalTestCluster;
@@ -28,7 +28,7 @@ import static org.junit.Assert.assertFalse;
public abstract class SingleNodeDisruption implements ServiceDisruptionScheme {
- protected final ESLogger logger = Loggers.getLogger(getClass());
+ protected final Logger logger = Loggers.getLogger(getClass());
protected volatile String disruptedNode;
protected volatile InternalTestCluster cluster;
diff --git a/test/framework/src/main/java/org/elasticsearch/test/engine/AssertingSearcher.java b/test/framework/src/main/java/org/elasticsearch/test/engine/AssertingSearcher.java
index a0f027bcbd..d977a21543 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/engine/AssertingSearcher.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/engine/AssertingSearcher.java
@@ -19,8 +19,8 @@
package org.elasticsearch.test.engine;
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.search.IndexSearcher;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.shard.ShardId;
@@ -35,12 +35,12 @@ class AssertingSearcher extends Engine.Searcher {
private RuntimeException firstReleaseStack;
private final Object lock = new Object();
private final int initialRefCount;
- private final ESLogger logger;
+ private final Logger logger;
private final AtomicBoolean closed = new AtomicBoolean(false);
AssertingSearcher(IndexSearcher indexSearcher, final Engine.Searcher wrappedSearcher,
ShardId shardId,
- ESLogger logger) {
+ Logger logger) {
super(wrappedSearcher.source(), indexSearcher);
// we only use the given index searcher here instead of the IS of the wrapped searcher. the IS might be a wrapped searcher
// with a wrapped reader.
diff --git a/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java b/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java
index 304e304749..fbc4352b1e 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java
@@ -18,6 +18,7 @@
*/
package org.elasticsearch.test.engine;
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.FilterDirectoryReader;
import org.apache.lucene.index.IndexReader;
@@ -28,7 +29,6 @@ import org.apache.lucene.search.QueryCachingPolicy;
import org.apache.lucene.search.SearcherManager;
import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.ElasticsearchException;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
@@ -66,7 +66,7 @@ public final class MockEngineSupport {
private final AtomicBoolean closing = new AtomicBoolean(false);
- private final ESLogger logger = Loggers.getLogger(Engine.class);
+ private final Logger logger = Loggers.getLogger(Engine.class);
private final ShardId shardId;
private final QueryCache filterCache;
private final QueryCachingPolicy filterCachingPolicy;
diff --git a/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/LoggingListener.java b/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/LoggingListener.java
index 8237095b49..e94ae99063 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/LoggingListener.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/LoggingListener.java
@@ -18,7 +18,7 @@
*/
package org.elasticsearch.test.junit.listeners;
-import org.elasticsearch.common.logging.ESLogger;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.test.junit.annotations.TestLogging;
@@ -68,7 +68,7 @@ public class LoggingListener extends RunListener {
previousLoggingMap = reset(previousLoggingMap);
}
- private static ESLogger resolveLogger(String loggerName) {
+ private static Logger resolveLogger(String loggerName) {
if (loggerName.equalsIgnoreCase("_root")) {
return ESLoggerFactory.getRootLogger();
}
@@ -82,9 +82,9 @@ public class LoggingListener extends RunListener {
}
Map<String, String> previousValues = new HashMap<>();
for (Map.Entry<String, String> entry : map.entrySet()) {
- ESLogger esLogger = resolveLogger(entry.getKey());
- previousValues.put(entry.getKey(), esLogger.getLevel());
- esLogger.setLevel(entry.getValue());
+ Logger logger = resolveLogger(entry.getKey());
+ previousValues.put(entry.getKey(), logger.getLevel().toString());
+ Loggers.setLevel(logger, entry.getValue());
}
return previousValues;
}
@@ -109,8 +109,8 @@ public class LoggingListener extends RunListener {
private Map<String, String> reset(Map<String, String> map) {
if (map != null) {
for (Map.Entry<String, String> previousLogger : map.entrySet()) {
- ESLogger esLogger = resolveLogger(previousLogger.getKey());
- esLogger.setLevel(previousLogger.getValue());
+ Logger logger = resolveLogger(previousLogger.getKey());
+ Loggers.setLevel(logger, previousLogger.getValue());
}
}
return null;
diff --git a/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java b/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java
index 9243364955..93ad8bb1e9 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java
@@ -19,8 +19,8 @@
package org.elasticsearch.test.junit.listeners;
import com.carrotsearch.randomizedtesting.ReproduceErrorMessageBuilder;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.Strings;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.ESTestCase;
@@ -47,7 +47,7 @@ import static org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase.REST_TE
*/
public class ReproduceInfoPrinter extends RunListener {
- protected final ESLogger logger = Loggers.getLogger(ESTestCase.class);
+ protected final Logger logger = Loggers.getLogger(ESTestCase.class);
@Override
public void testStarted(Description description) throws Exception {
diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestClient.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestClient.java
index 41ae7d8c04..8040c421dc 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestClient.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestClient.java
@@ -19,19 +19,18 @@
package org.elasticsearch.test.rest.yaml;
import com.carrotsearch.randomizedtesting.RandomizedTest;
-
import org.apache.http.Header;
import org.apache.http.HttpEntity;
import org.apache.http.HttpHost;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.StringEntity;
import org.apache.http.message.BasicHeader;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.Version;
import org.elasticsearch.client.Response;
import org.elasticsearch.client.ResponseException;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.common.Strings;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestApi;
@@ -55,7 +54,7 @@ import java.util.Set;
* REST calls.
*/
public class ClientYamlTestClient {
- private static final ESLogger logger = Loggers.getLogger(ClientYamlTestClient.class);
+ private static final Logger logger = Loggers.getLogger(ClientYamlTestClient.class);
//query_string params that don't need to be declared in the spec, they are supported by default
private static final Set<String> ALWAYS_ACCEPTED_QUERY_STRING_PARAMS = Sets.newHashSet("pretty", "source", "filter_path");
diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java
index 43feb238cc..e215114403 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java
@@ -19,9 +19,9 @@
package org.elasticsearch.test.rest.yaml;
import org.apache.http.HttpHost;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.Version;
import org.elasticsearch.client.RestClient;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestSpec;
@@ -39,7 +39,7 @@ import java.util.Map;
*/
public class ClientYamlTestExecutionContext {
- private static final ESLogger logger = Loggers.getLogger(ClientYamlTestExecutionContext.class);
+ private static final Logger logger = Loggers.getLogger(ClientYamlTestExecutionContext.class);
private final Stash stash = new Stash();
diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/Stash.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/Stash.java
index dff1e59762..d9a4d957a2 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/Stash.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/Stash.java
@@ -19,8 +19,8 @@
package org.elasticsearch.test.rest.yaml;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.Strings;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
@@ -39,7 +39,7 @@ import java.util.regex.Pattern;
public class Stash implements ToXContent {
private static final Pattern EXTENDED_KEY = Pattern.compile("\\$\\{([^}]+)\\}");
- private static final ESLogger logger = Loggers.getLogger(Stash.class);
+ private static final Logger logger = Loggers.getLogger(Stash.class);
public static final Stash EMPTY = new Stash();
diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java
index af4a8e4f51..e233e9fab8 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java
@@ -18,9 +18,9 @@
*/
package org.elasticsearch.test.rest.yaml.section;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.Tuple;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.xcontent.XContentLocation;
import org.elasticsearch.test.rest.yaml.ClientYamlTestExecutionContext;
@@ -66,7 +66,7 @@ import static org.junit.Assert.fail;
*/
public class DoSection implements ExecutableSection {
- private static final ESLogger logger = Loggers.getLogger(DoSection.class);
+ private static final Logger logger = Loggers.getLogger(DoSection.class);
private final XContentLocation location;
private String catchParam;
diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/GreaterThanAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/GreaterThanAssertion.java
index 1e8f38e7a4..b531f180fd 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/GreaterThanAssertion.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/GreaterThanAssertion.java
@@ -18,7 +18,7 @@
*/
package org.elasticsearch.test.rest.yaml.section;
-import org.elasticsearch.common.logging.ESLogger;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.xcontent.XContentLocation;
@@ -34,7 +34,7 @@ import static org.junit.Assert.fail;
*/
public class GreaterThanAssertion extends Assertion {
- private static final ESLogger logger = Loggers.getLogger(GreaterThanAssertion.class);
+ private static final Logger logger = Loggers.getLogger(GreaterThanAssertion.class);
public GreaterThanAssertion(XContentLocation location, String field, Object expectedValue) {
super(location, field, expectedValue);
diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/GreaterThanEqualToAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/GreaterThanEqualToAssertion.java
index 9c9936592c..14b1a08a87 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/GreaterThanEqualToAssertion.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/GreaterThanEqualToAssertion.java
@@ -19,7 +19,7 @@
package org.elasticsearch.test.rest.yaml.section;
-import org.elasticsearch.common.logging.ESLogger;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.xcontent.XContentLocation;
@@ -35,7 +35,7 @@ import static org.junit.Assert.fail;
*/
public class GreaterThanEqualToAssertion extends Assertion {
- private static final ESLogger logger = Loggers.getLogger(GreaterThanEqualToAssertion.class);
+ private static final Logger logger = Loggers.getLogger(GreaterThanEqualToAssertion.class);
public GreaterThanEqualToAssertion(XContentLocation location, String field, Object expectedValue) {
super(location, field, expectedValue);
diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/IsFalseAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/IsFalseAssertion.java
index f679691c9c..a356182ab4 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/IsFalseAssertion.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/IsFalseAssertion.java
@@ -18,7 +18,7 @@
*/
package org.elasticsearch.test.rest.yaml.section;
-import org.elasticsearch.common.logging.ESLogger;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.xcontent.XContentLocation;
@@ -35,7 +35,7 @@ import static org.junit.Assert.assertThat;
*/
public class IsFalseAssertion extends Assertion {
- private static final ESLogger logger = Loggers.getLogger(IsFalseAssertion.class);
+ private static final Logger logger = Loggers.getLogger(IsFalseAssertion.class);
public IsFalseAssertion(XContentLocation location, String field) {
super(location, field, false);
diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/IsTrueAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/IsTrueAssertion.java
index 12bd8a34ed..76ca0de70d 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/IsTrueAssertion.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/IsTrueAssertion.java
@@ -18,7 +18,7 @@
*/
package org.elasticsearch.test.rest.yaml.section;
-import org.elasticsearch.common.logging.ESLogger;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.xcontent.XContentLocation;
@@ -36,7 +36,7 @@ import static org.junit.Assert.assertThat;
*/
public class IsTrueAssertion extends Assertion {
- private static final ESLogger logger = Loggers.getLogger(IsTrueAssertion.class);
+ private static final Logger logger = Loggers.getLogger(IsTrueAssertion.class);
public IsTrueAssertion(XContentLocation location, String field) {
super(location, field, true);
diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/LengthAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/LengthAssertion.java
index 5fd111733d..062b9ecd87 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/LengthAssertion.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/LengthAssertion.java
@@ -18,7 +18,7 @@
*/
package org.elasticsearch.test.rest.yaml.section;
-import org.elasticsearch.common.logging.ESLogger;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.xcontent.XContentLocation;
@@ -36,7 +36,7 @@ import static org.junit.Assert.assertThat;
*/
public class LengthAssertion extends Assertion {
- private static final ESLogger logger = Loggers.getLogger(LengthAssertion.class);
+ private static final Logger logger = Loggers.getLogger(LengthAssertion.class);
public LengthAssertion(XContentLocation location, String field, Object expectedValue) {
super(location, field, expectedValue);
diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/LessThanAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/LessThanAssertion.java
index 2e9ab74448..591bd83fa6 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/LessThanAssertion.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/LessThanAssertion.java
@@ -18,7 +18,7 @@
*/
package org.elasticsearch.test.rest.yaml.section;
-import org.elasticsearch.common.logging.ESLogger;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.xcontent.XContentLocation;
@@ -35,7 +35,7 @@ import static org.junit.Assert.fail;
*/
public class LessThanAssertion extends Assertion {
- private static final ESLogger logger = Loggers.getLogger(LessThanAssertion.class);
+ private static final Logger logger = Loggers.getLogger(LessThanAssertion.class);
public LessThanAssertion(XContentLocation location, String field, Object expectedValue) {
super(location, field, expectedValue);
diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/LessThanOrEqualToAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/LessThanOrEqualToAssertion.java
index 46e25332b6..7c5710f689 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/LessThanOrEqualToAssertion.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/LessThanOrEqualToAssertion.java
@@ -19,7 +19,7 @@
package org.elasticsearch.test.rest.yaml.section;
-import org.elasticsearch.common.logging.ESLogger;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.xcontent.XContentLocation;
@@ -35,7 +35,7 @@ import static org.junit.Assert.fail;
*/
public class LessThanOrEqualToAssertion extends Assertion {
- private static final ESLogger logger = Loggers.getLogger(LessThanOrEqualToAssertion.class);
+ private static final Logger logger = Loggers.getLogger(LessThanOrEqualToAssertion.class);
public LessThanOrEqualToAssertion(XContentLocation location, String field, Object expectedValue) {
super(location, field, expectedValue);
diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/MatchAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/MatchAssertion.java
index c2a52bf735..2bfb94e658 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/MatchAssertion.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/MatchAssertion.java
@@ -18,8 +18,8 @@
*/
package org.elasticsearch.test.rest.yaml.section;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.Nullable;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.xcontent.XContentLocation;
@@ -44,7 +44,7 @@ import static org.junit.Assert.assertThat;
*/
public class MatchAssertion extends Assertion {
- private static final ESLogger logger = Loggers.getLogger(MatchAssertion.class);
+ private static final Logger logger = Loggers.getLogger(MatchAssertion.class);
public MatchAssertion(XContentLocation location, String field, Object expectedValue) {
super(location, field, expectedValue);
diff --git a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java
index 1e84ca0fe7..057e7c4845 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java
@@ -21,7 +21,7 @@ package org.elasticsearch.test.store;
import com.carrotsearch.randomizedtesting.SeedUtils;
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
-
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.index.CheckIndex;
import org.apache.lucene.store.BaseDirectoryWrapper;
import org.apache.lucene.store.Directory;
@@ -34,7 +34,6 @@ import org.apache.lucene.util.TestRuleMarkFailure;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
@@ -113,7 +112,7 @@ public class MockFSDirectoryService extends FsDirectoryService {
throw new UnsupportedOperationException();
}
- public static void checkIndex(ESLogger logger, Store store, ShardId shardId) {
+ public static void checkIndex(Logger logger, Store store, ShardId shardId) {
if (store.tryIncRef()) {
logger.info("start check index");
try {
diff --git a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java
index 57d9fe3ff0..70c8d2be11 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java
@@ -19,13 +19,12 @@
package org.elasticsearch.test.store;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.Nullable;
-import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.common.settings.SettingsModule;
import org.elasticsearch.index.IndexModule;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.shard.IndexEventListener;
@@ -98,7 +97,7 @@ public class MockFSIndexStore extends IndexStore {
if (indexShard != null) {
Boolean remove = shardSet.remove(indexShard);
if (remove == Boolean.TRUE) {
- ESLogger logger = Loggers.getLogger(getClass(), indexShard.indexSettings().getSettings(), indexShard.shardId());
+ Logger logger = Loggers.getLogger(getClass(), indexShard.indexSettings().getSettings(), indexShard.shardId());
MockFSDirectoryService.checkIndex(logger, indexShard.store(), indexShard.shardId());
}
}
diff --git a/test/framework/src/main/java/org/elasticsearch/test/tasks/MockTaskManager.java b/test/framework/src/main/java/org/elasticsearch/test/tasks/MockTaskManager.java
index ec695e8bd4..815429b3ac 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/tasks/MockTaskManager.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/tasks/MockTaskManager.java
@@ -19,6 +19,7 @@
package org.elasticsearch.test.tasks;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings;
@@ -51,7 +52,11 @@ public class MockTaskManager extends TaskManager {
try {
listener.onTaskRegistered(task);
} catch (Exception e) {
- logger.warn("failed to notify task manager listener about unregistering the task with id {}", e, task.getId());
+ logger.warn(
+ new ParameterizedMessage(
+ "failed to notify task manager listener about unregistering the task with id {}",
+ task.getId()),
+ e);
}
}
}
@@ -66,7 +71,10 @@ public class MockTaskManager extends TaskManager {
try {
listener.onTaskUnregistered(task);
} catch (Exception e) {
- logger.warn("failed to notify task manager listener about unregistering the task with id {}", e, task.getId());
+ logger.warn(
+ new ParameterizedMessage("failed to notify task manager listener about unregistering the task with id {}",
+ task.getId()),
+ e);
}
}
} else {
@@ -81,7 +89,11 @@ public class MockTaskManager extends TaskManager {
try {
listener.waitForTaskCompletion(task);
} catch (Exception e) {
- logger.warn("failed to notify task manager listener about waitForTaskCompletion the task with id {}", e, task.getId());
+ logger.warn(
+ new ParameterizedMessage(
+ "failed to notify task manager listener about waitForTaskCompletion the task with id {}",
+ task.getId()),
+ e);
}
}
super.waitForTaskCompletion(task, untilInNanos);
diff --git a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java
index 33c5fcccad..d1ad12d0cc 100644
--- a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java
+++ b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java
@@ -19,6 +19,7 @@
package org.elasticsearch.transport;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionListenerResponseHandler;
@@ -535,7 +536,7 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase {
listener.actionGet();
} catch (Exception e) {
- logger.trace("caught exception while sending to node {}", e, nodeA);
+ logger.trace(new ParameterizedMessage("caught exception while sending to node {}", nodeA), e);
}
}
}
@@ -570,7 +571,7 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase {
} catch (ConnectTransportException e) {
// ok!
} catch (Exception e) {
- logger.error("caught exception while sending to node {}", e, node);
+ logger.error(new ParameterizedMessage("caught exception while sending to node {}", node), e);
sendingErrors.add(e);
}
}
@@ -1684,7 +1685,7 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase {
@Override
public void handleException(TransportException exp) {
- logger.debug("---> received exception for id {}", exp, id);
+ logger.debug(new ParameterizedMessage("---> received exception for id {}", id), exp);
allRequestsDone.countDown();
Throwable unwrap = ExceptionsHelper.unwrap(exp, IOException.class);
assertNotNull(unwrap);
diff --git a/test/framework/src/test/java/org/elasticsearch/test/test/LoggingListenerTests.java b/test/framework/src/test/java/org/elasticsearch/test/test/LoggingListenerTests.java
index bb0722365c..9077d1963c 100644
--- a/test/framework/src/test/java/org/elasticsearch/test/test/LoggingListenerTests.java
+++ b/test/framework/src/test/java/org/elasticsearch/test/test/LoggingListenerTests.java
@@ -19,7 +19,8 @@
package org.elasticsearch.test.test;
-import org.elasticsearch.common.logging.ESLogger;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.junit.annotations.TestLogging;
@@ -30,37 +31,38 @@ import org.junit.runner.Result;
import java.lang.reflect.Method;
import static org.hamcrest.CoreMatchers.equalTo;
-import static org.hamcrest.CoreMatchers.nullValue;
public class LoggingListenerTests extends ESTestCase {
+
public void testCustomLevelPerMethod() throws Exception {
LoggingListener loggingListener = new LoggingListener();
Description suiteDescription = Description.createSuiteDescription(TestClass.class);
- ESLogger abcLogger = Loggers.getLogger("abc");
- ESLogger xyzLogger = Loggers.getLogger("xyz");
+ Logger xyzLogger = Loggers.getLogger("xyz");
+ Logger abcLogger = Loggers.getLogger("abc");
- assertThat(abcLogger.getLevel(), nullValue());
- assertThat(xyzLogger.getLevel(), nullValue());
+ assertEquals(Level.ERROR, abcLogger.getLevel());
+ assertThat(xyzLogger.getLevel(), equalTo(Level.ERROR));
+ assertThat(abcLogger.getLevel(), equalTo(Level.ERROR));
loggingListener.testRunStarted(suiteDescription);
- assertThat(xyzLogger.getLevel(), nullValue());
- assertThat(abcLogger.getLevel(), nullValue());
+ assertThat(xyzLogger.getLevel(), equalTo(Level.ERROR));
+ assertThat(abcLogger.getLevel(), equalTo(Level.ERROR));
Method method = TestClass.class.getMethod("annotatedTestMethod");
TestLogging annotation = method.getAnnotation(TestLogging.class);
Description testDescription = Description.createTestDescription(LoggingListenerTests.class, "annotatedTestMethod", annotation);
loggingListener.testStarted(testDescription);
- assertThat(xyzLogger.getLevel(), equalTo("TRACE"));
- assertThat(abcLogger.getLevel(), nullValue());
+ assertThat(xyzLogger.getLevel(), equalTo(Level.TRACE));
+ assertThat(abcLogger.getLevel(), equalTo(Level.ERROR));
loggingListener.testFinished(testDescription);
- assertThat(xyzLogger.getLevel(), nullValue());
- assertThat(abcLogger.getLevel(), nullValue());
+ assertThat(xyzLogger.getLevel(), equalTo(Level.ERROR));
+ assertThat(abcLogger.getLevel(), equalTo(Level.ERROR));
loggingListener.testRunFinished(new Result());
- assertThat(xyzLogger.getLevel(), nullValue());
- assertThat(abcLogger.getLevel(), nullValue());
+ assertThat(xyzLogger.getLevel(), equalTo(Level.ERROR));
+ assertThat(abcLogger.getLevel(), equalTo(Level.ERROR));
}
public void testCustomLevelPerClass() throws Exception {
@@ -68,27 +70,27 @@ public class LoggingListenerTests extends ESTestCase {
Description suiteDescription = Description.createSuiteDescription(AnnotatedTestClass.class);
- ESLogger abcLogger = Loggers.getLogger("abc");
- ESLogger xyzLogger = Loggers.getLogger("xyz");
+ Logger abcLogger = Loggers.getLogger("abc");
+ Logger xyzLogger = Loggers.getLogger("xyz");
- assertThat(xyzLogger.getLevel(), nullValue());
- assertThat(abcLogger.getLevel(), nullValue());
+ assertThat(xyzLogger.getLevel(), equalTo(Level.ERROR));
+ assertThat(abcLogger.getLevel(), equalTo(Level.ERROR));
loggingListener.testRunStarted(suiteDescription);
- assertThat(abcLogger.getLevel(), equalTo("ERROR"));
- assertThat(xyzLogger.getLevel(), nullValue());
+ assertThat(xyzLogger.getLevel(), equalTo(Level.ERROR));
+ assertThat(abcLogger.getLevel(), equalTo(Level.WARN));
Description testDescription = Description.createTestDescription(LoggingListenerTests.class, "test");
loggingListener.testStarted(testDescription);
- assertThat(abcLogger.getLevel(), equalTo("ERROR"));
- assertThat(xyzLogger.getLevel(), nullValue());
+ assertThat(xyzLogger.getLevel(), equalTo(Level.ERROR));
+ assertThat(abcLogger.getLevel(), equalTo(Level.WARN));
loggingListener.testFinished(testDescription);
- assertThat(abcLogger.getLevel(), equalTo("ERROR"));
- assertThat(xyzLogger.getLevel(), nullValue());
+ assertThat(xyzLogger.getLevel(), equalTo(Level.ERROR));
+ assertThat(abcLogger.getLevel(), equalTo(Level.WARN));
loggingListener.testRunFinished(new Result());
- assertThat(abcLogger.getLevel(), nullValue());
- assertThat(xyzLogger.getLevel(), nullValue());
+ assertThat(xyzLogger.getLevel(), equalTo(Level.ERROR));
+ assertThat(abcLogger.getLevel(), equalTo(Level.ERROR));
}
public void testCustomLevelPerClassAndPerMethod() throws Exception {
@@ -96,49 +98,54 @@ public class LoggingListenerTests extends ESTestCase {
Description suiteDescription = Description.createSuiteDescription(AnnotatedTestClass.class);
- ESLogger abcLogger = Loggers.getLogger("abc");
- ESLogger xyzLogger = Loggers.getLogger("xyz");
+ Logger abcLogger = Loggers.getLogger("abc");
+ Logger xyzLogger = Loggers.getLogger("xyz");
- assertThat(xyzLogger.getLevel(), nullValue());
- assertThat(abcLogger.getLevel(), nullValue());
+ assertThat(xyzLogger.getLevel(), equalTo(Level.ERROR));
+ assertThat(abcLogger.getLevel(), equalTo(Level.ERROR));
loggingListener.testRunStarted(suiteDescription);
- assertThat(abcLogger.getLevel(), equalTo("ERROR"));
- assertThat(xyzLogger.getLevel(), nullValue());
+ assertThat(xyzLogger.getLevel(), equalTo(Level.ERROR));
+ assertThat(abcLogger.getLevel(), equalTo(Level.WARN));
Method method = TestClass.class.getMethod("annotatedTestMethod");
TestLogging annotation = method.getAnnotation(TestLogging.class);
Description testDescription = Description.createTestDescription(LoggingListenerTests.class, "annotatedTestMethod", annotation);
loggingListener.testStarted(testDescription);
- assertThat(abcLogger.getLevel(), equalTo("ERROR"));
- assertThat(xyzLogger.getLevel(), equalTo("TRACE"));
+ assertThat(xyzLogger.getLevel(), equalTo(Level.TRACE));
+ assertThat(abcLogger.getLevel(), equalTo(Level.WARN));
loggingListener.testFinished(testDescription);
- assertThat(abcLogger.getLevel(), equalTo("ERROR"));
- assertThat(xyzLogger.getLevel(), nullValue());
+ assertThat(xyzLogger.getLevel(), equalTo(Level.ERROR));
+ assertThat(abcLogger.getLevel(), equalTo(Level.WARN));
Method method2 = TestClass.class.getMethod("annotatedTestMethod2");
TestLogging annotation2 = method2.getAnnotation(TestLogging.class);
Description testDescription2 = Description.createTestDescription(LoggingListenerTests.class, "annotatedTestMethod2", annotation2);
loggingListener.testStarted(testDescription2);
- assertThat(abcLogger.getLevel(), equalTo("TRACE"));
- assertThat(xyzLogger.getLevel(), equalTo("DEBUG"));
+ assertThat(xyzLogger.getLevel(), equalTo(Level.DEBUG));
+ assertThat(abcLogger.getLevel(), equalTo(Level.TRACE));
loggingListener.testFinished(testDescription2);
- assertThat(abcLogger.getLevel(), equalTo("ERROR"));
- assertThat(xyzLogger.getLevel(), nullValue());
+ assertThat(xyzLogger.getLevel(), equalTo(Level.ERROR));
+ assertThat(abcLogger.getLevel(), equalTo(Level.WARN));
loggingListener.testRunFinished(new Result());
- assertThat(abcLogger.getLevel(), nullValue());
- assertThat(xyzLogger.getLevel(), nullValue());
+ assertThat(xyzLogger.getLevel(), equalTo(Level.ERROR));
+ assertThat(abcLogger.getLevel(), equalTo(Level.ERROR));
}
- @TestLogging("abc:ERROR")
+ /**
+ * dummy class used to create a junit suite description that has the @TestLogging annotation
+ */
+ @TestLogging("abc:WARN")
public static class AnnotatedTestClass {
- //dummy class used to create a junit suite description that has the @TestLogging annotation
+
}
+ /**
+ * dummy class used to create a junit suite description that doesn't have the @TestLogging annotation, but its test methods have it
+ */
public static class TestClass {
- //dummy class used to create a junit suite description that doesn't have the @TestLogging annotation, but its test methods have it
@SuppressWarnings("unused")
@TestLogging("xyz:TRACE")
@@ -147,5 +154,7 @@ public class LoggingListenerTests extends ESTestCase {
@SuppressWarnings("unused")
@TestLogging("abc:TRACE,xyz:DEBUG")
public void annotatedTestMethod2() {}
+
}
+
}
diff --git a/test/logger-usage/src/test/java/org/elasticsearch/test/loggerusage/ESLoggerUsageTests.java b/test/logger-usage/src/test/java/org/elasticsearch/test/loggerusage/ESLoggerUsageTests.java
index 73449f4351..fed7d58a7d 100644
--- a/test/logger-usage/src/test/java/org/elasticsearch/test/loggerusage/ESLoggerUsageTests.java
+++ b/test/logger-usage/src/test/java/org/elasticsearch/test/loggerusage/ESLoggerUsageTests.java
@@ -19,10 +19,11 @@
package org.elasticsearch.test.loggerusage;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.common.SuppressLoggerChecks;
-import org.elasticsearch.common.logging.ESLogger;
-import org.elasticsearch.test.loggerusage.ESLoggerUsageChecker.WrongLoggerUsage;
import org.elasticsearch.test.ESTestCase;
+import org.elasticsearch.test.loggerusage.ESLoggerUsageChecker.WrongLoggerUsage;
import java.io.IOException;
import java.io.InputStream;
@@ -46,9 +47,9 @@ public class ESLoggerUsageTests extends ESTestCase {
List<WrongLoggerUsage> errors = new ArrayList<>();
ESLoggerUsageChecker.check(errors::add, classInputStream, Predicate.isEqual(method.getName()));
if (method.getName().startsWith("checkFail")) {
- assertFalse("Expected " + method.getName() + " to have wrong ESLogger usage", errors.isEmpty());
+ assertFalse("Expected " + method.getName() + " to have wrong Logger usage", errors.isEmpty());
} else {
- assertTrue("Method " + method.getName() + " has unexpected ESLogger usage errors: " + errors, errors.isEmpty());
+ assertTrue("Method " + method.getName() + " has unexpected Logger usage errors: " + errors, errors.isEmpty());
}
} else {
assertTrue("only allow methods starting with test or check in this class", method.getName().startsWith("test"));
@@ -58,10 +59,10 @@ public class ESLoggerUsageTests extends ESTestCase {
}
public void testLoggerUsageCheckerCompatibilityWithESLogger() throws NoSuchMethodException {
- assertThat(ESLoggerUsageChecker.LOGGER_CLASS, equalTo(ESLogger.class.getName()));
+ assertThat(ESLoggerUsageChecker.LOGGER_CLASS, equalTo(Logger.class.getName()));
assertThat(ESLoggerUsageChecker.THROWABLE_CLASS, equalTo(Throwable.class.getName()));
int varargsMethodCount = 0;
- for (Method method : ESLogger.class.getMethods()) {
+ for (Method method : Logger.class.getMethods()) {
if (method.isVarArgs()) {
// check that logger usage checks all varargs methods
assertThat(ESLoggerUsageChecker.LOGGER_METHODS, hasItem(method.getName()));
@@ -74,8 +75,8 @@ public class ESLoggerUsageTests extends ESTestCase {
// check that signature is same as we expect in the usage checker
for (String methodName : ESLoggerUsageChecker.LOGGER_METHODS) {
- assertThat(ESLogger.class.getMethod(methodName, String.class, Object[].class), notNullValue());
- assertThat(ESLogger.class.getMethod(methodName, String.class, Throwable.class, Object[].class), notNullValue());
+ assertThat(Logger.class.getMethod(methodName, String.class, Object[].class), notNullValue());
+ assertThat(Logger.class.getMethod(methodName, String.class, Throwable.class, Object[].class), notNullValue());
}
}
@@ -114,7 +115,7 @@ public class ESLoggerUsageTests extends ESTestCase {
}
public void checkOrderOfExceptionArgument1() {
- logger.info("Hello {}", new Exception(), "world");
+ logger.info(new ParameterizedMessage("Hello {}", "world"), new Exception());
}
public void checkFailOrderOfExceptionArgument1() {
@@ -122,7 +123,7 @@ public class ESLoggerUsageTests extends ESTestCase {
}
public void checkOrderOfExceptionArgument2() {
- logger.info("Hello {}, {}", new Exception(), "world", 42);
+ logger.info(new ParameterizedMessage("Hello {}, {}", "world", 42), new Exception());
}
public void checkFailOrderOfExceptionArgument2() {
@@ -134,7 +135,7 @@ public class ESLoggerUsageTests extends ESTestCase {
}
public void checkFailNonConstantMessageWithArguments(boolean b) {
- logger.info(Boolean.toString(b), new Exception(), 42);
+ logger.info(new ParameterizedMessage(Boolean.toString(b), 42), new Exception());
}
public void checkComplexUsage(boolean b) {
@@ -166,4 +167,5 @@ public class ESLoggerUsageTests extends ESTestCase {
}
logger.info(message, args);
}
+
}