summaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorSimon Willnauer <simon.willnauer@elasticsearch.com>2016-09-23 08:53:50 +0200
committerGitHub <noreply@github.com>2016-09-23 08:53:50 +0200
commitfe1803c957b28f6a9099921fc23fa91eb92dfe6d (patch)
treec11e44e35482c08d306fe6a79cdefa93d16c7fda /core
parente3b7b4f032b5865e4389ae52a8e2ff54721fb2dc (diff)
Remove AnalysisService and reduce it to a simple name to analyzer mapping (#20627)
Today we hold on to all possible tokenizers, tokenfilters etc. when we create an index service on a node. This was mainly done to allow the `_analyze` API to directly access all these primitive. We fixed this in #19827 and can now get rid of the AnalysisService entirely and replace it with a simple map like class. This ensures we don't create a gazillion long living objects that are entirely useless since they are never used in most of the indices. Also those objects might consume a considerable amount of memory since they might load stopwords or synonyms etc. Closes #19828
Diffstat (limited to 'core')
-rw-r--r--core/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java51
-rw-r--r--core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java58
-rw-r--r--core/src/main/java/org/elasticsearch/index/IndexService.java14
-rw-r--r--core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java176
-rw-r--r--core/src/main/java/org/elasticsearch/index/analysis/AnalysisService.java218
-rw-r--r--core/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzerProvider.java22
-rw-r--r--core/src/main/java/org/elasticsearch/index/analysis/IndexAnalyzers.java96
-rw-r--r--core/src/main/java/org/elasticsearch/index/mapper/AllFieldMapper.java6
-rw-r--r--core/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java4
-rw-r--r--core/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper2x.java4
-rw-r--r--core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java10
-rw-r--r--core/src/main/java/org/elasticsearch/index/mapper/DocumentMapperParser.java11
-rw-r--r--core/src/main/java/org/elasticsearch/index/mapper/LegacyTokenCountFieldMapper.java2
-rw-r--r--core/src/main/java/org/elasticsearch/index/mapper/Mapper.java14
-rwxr-xr-xcore/src/main/java/org/elasticsearch/index/mapper/MapperService.java20
-rw-r--r--core/src/main/java/org/elasticsearch/index/mapper/ParseContext.java16
-rw-r--r--core/src/main/java/org/elasticsearch/index/mapper/StringFieldMapper.java6
-rw-r--r--core/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java8
-rw-r--r--core/src/main/java/org/elasticsearch/index/mapper/TokenCountFieldMapper.java2
-rw-r--r--core/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java6
-rw-r--r--core/src/main/java/org/elasticsearch/index/query/CommonTermsQueryBuilder.java2
-rw-r--r--core/src/main/java/org/elasticsearch/index/query/MatchPhrasePrefixQueryBuilder.java2
-rw-r--r--core/src/main/java/org/elasticsearch/index/query/MatchPhraseQueryBuilder.java2
-rw-r--r--core/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java2
-rw-r--r--core/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java4
-rw-r--r--core/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java2
-rw-r--r--core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java6
-rw-r--r--core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java4
-rw-r--r--core/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java2
-rw-r--r--core/src/main/java/org/elasticsearch/index/search/MatchQuery.java2
-rw-r--r--core/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java4
-rw-r--r--core/src/main/java/org/elasticsearch/search/DefaultSearchContext.java6
-rw-r--r--core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java6
-rw-r--r--core/src/main/java/org/elasticsearch/search/internal/SearchContext.java3
-rw-r--r--core/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java2
-rw-r--r--core/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorBuilder.java4
-rw-r--r--core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java37
-rw-r--r--core/src/test/java/org/elasticsearch/index/analysis/ASCIIFoldingTokenFilterFactoryTests.java9
-rw-r--r--core/src/test/java/org/elasticsearch/index/analysis/AnalysisRegistryTests.java (renamed from core/src/test/java/org/elasticsearch/index/analysis/AnalysisServiceTests.java)89
-rw-r--r--core/src/test/java/org/elasticsearch/index/analysis/AnalysisTestsHelper.java15
-rw-r--r--core/src/test/java/org/elasticsearch/index/analysis/CJKFilterFactoryTests.java17
-rw-r--r--core/src/test/java/org/elasticsearch/index/analysis/CharFilterTests.java14
-rw-r--r--core/src/test/java/org/elasticsearch/index/analysis/CompoundAnalysisTests.java9
-rw-r--r--core/src/test/java/org/elasticsearch/index/analysis/HunspellTokenFilterFactoryTests.java8
-rw-r--r--core/src/test/java/org/elasticsearch/index/analysis/KeepFilterFactoryTests.java19
-rw-r--r--core/src/test/java/org/elasticsearch/index/analysis/KeepTypesFilterFactoryTests.java5
-rw-r--r--core/src/test/java/org/elasticsearch/index/analysis/LimitTokenCountFilterFactoryTests.java19
-rw-r--r--core/src/test/java/org/elasticsearch/index/analysis/MinHashFilterFactoryTests.java9
-rw-r--r--core/src/test/java/org/elasticsearch/index/analysis/PatternCaptureTokenFilterTests.java11
-rw-r--r--core/src/test/java/org/elasticsearch/index/analysis/ShingleTokenFilterFactoryTests.java17
-rw-r--r--core/src/test/java/org/elasticsearch/index/analysis/StemmerTokenFilterFactoryTests.java15
-rw-r--r--core/src/test/java/org/elasticsearch/index/analysis/StopAnalyzerTests.java9
-rw-r--r--core/src/test/java/org/elasticsearch/index/analysis/StopTokenFilterTests.java11
-rw-r--r--core/src/test/java/org/elasticsearch/index/analysis/WordDelimiterTokenFilterFactoryTests.java33
-rw-r--r--core/src/test/java/org/elasticsearch/index/analysis/commongrams/CommonGramsTokenFilterFactoryTests.java61
-rw-r--r--core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTests.java9
-rw-r--r--core/src/test/java/org/elasticsearch/index/codec/CodecTests.java6
-rw-r--r--core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java12
-rw-r--r--core/src/test/java/org/elasticsearch/index/mapper/ExternalFieldMapperTests.java7
-rw-r--r--core/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java17
-rw-r--r--core/src/test/java/org/elasticsearch/index/mapper/ParentFieldMapperTests.java21
-rw-r--r--core/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java67
62 files changed, 682 insertions, 661 deletions
diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java
index f035bc0f4b..7d7e9d2dd2 100644
--- a/core/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java
+++ b/core/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java
@@ -45,9 +45,9 @@ import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.AnalysisRegistry;
-import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.analysis.CharFilterFactory;
import org.elasticsearch.index.analysis.CustomAnalyzer;
+import org.elasticsearch.index.analysis.IndexAnalyzers;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.analysis.TokenFilterFactory;
import org.elasticsearch.index.analysis.TokenizerFactory;
@@ -145,45 +145,46 @@ public class TransportAnalyzeAction extends TransportSingleShardAction<AnalyzeRe
}
}
final AnalysisRegistry analysisRegistry = indicesService.getAnalysis();
- return analyze(request, field, analyzer, indexService != null ? indexService.analysisService() : null, analysisRegistry, environment);
+ return analyze(request, field, analyzer, indexService != null ? indexService.getIndexAnalyzers() : null, analysisRegistry, environment);
} catch (IOException e) {
throw new ElasticsearchException("analysis failed", e);
}
}
- public static AnalyzeResponse analyze(AnalyzeRequest request, String field, Analyzer analyzer, AnalysisService analysisService, AnalysisRegistry analysisRegistry, Environment environment) throws IOException {
+ public static AnalyzeResponse analyze(AnalyzeRequest request, String field, Analyzer analyzer, IndexAnalyzers indexAnalyzers, AnalysisRegistry analysisRegistry, Environment environment) throws IOException {
boolean closeAnalyzer = false;
if (analyzer == null && request.analyzer() != null) {
- if (analysisService == null) {
+ if (indexAnalyzers == null) {
analyzer = analysisRegistry.getAnalyzer(request.analyzer());
if (analyzer == null) {
throw new IllegalArgumentException("failed to find global analyzer [" + request.analyzer() + "]");
}
} else {
- analyzer = analysisService.analyzer(request.analyzer());
+ analyzer = indexAnalyzers.get(request.analyzer());
if (analyzer == null) {
throw new IllegalArgumentException("failed to find analyzer [" + request.analyzer() + "]");
}
}
} else if (request.tokenizer() != null) {
- TokenizerFactory tokenizerFactory = parseTokenizerFactory(request, analysisService, analysisRegistry, environment);
+ final IndexSettings indexSettings = indexAnalyzers == null ? null : indexAnalyzers.getIndexSettings();
+ TokenizerFactory tokenizerFactory = parseTokenizerFactory(request, indexAnalyzers, analysisRegistry, environment);
TokenFilterFactory[] tokenFilterFactories = new TokenFilterFactory[0];
- tokenFilterFactories = getTokenFilterFactories(request, analysisService, analysisRegistry, environment, tokenFilterFactories);
+ tokenFilterFactories = getTokenFilterFactories(request, indexSettings, analysisRegistry, environment, tokenFilterFactories);
CharFilterFactory[] charFilterFactories = new CharFilterFactory[0];
- charFilterFactories = getCharFilterFactories(request, analysisService, analysisRegistry, environment, charFilterFactories);
+ charFilterFactories = getCharFilterFactories(request, indexSettings, analysisRegistry, environment, charFilterFactories);
analyzer = new CustomAnalyzer(tokenizerFactory, charFilterFactories, tokenFilterFactories);
closeAnalyzer = true;
} else if (analyzer == null) {
- if (analysisService == null) {
+ if (indexAnalyzers == null) {
analyzer = analysisRegistry.getAnalyzer("standard");
} else {
- analyzer = analysisService.defaultIndexAnalyzer();
+ analyzer = indexAnalyzers.getDefaultIndexAnalyzer();
}
}
if (analyzer == null) {
@@ -446,7 +447,7 @@ public class TransportAnalyzeAction extends TransportSingleShardAction<AnalyzeRe
return extendedAttributes;
}
- private static CharFilterFactory[] getCharFilterFactories(AnalyzeRequest request, AnalysisService analysisService, AnalysisRegistry analysisRegistry,
+ private static CharFilterFactory[] getCharFilterFactories(AnalyzeRequest request, IndexSettings indexSettings, AnalysisRegistry analysisRegistry,
Environment environment, CharFilterFactory[] charFilterFactories) throws IOException {
if (request.charFilters() != null && request.charFilters().size() > 0) {
charFilterFactories = new CharFilterFactory[request.charFilters().size()];
@@ -468,19 +469,19 @@ public class TransportAnalyzeAction extends TransportSingleShardAction<AnalyzeRe
charFilterFactories[i] = charFilterFactoryFactory.get(getNaIndexSettings(settings), environment, "_anonymous_charfilter_[" + i + "]", settings);
} else {
AnalysisModule.AnalysisProvider<CharFilterFactory> charFilterFactoryFactory;
- if (analysisService == null) {
+ if (indexSettings == null) {
charFilterFactoryFactory = analysisRegistry.getCharFilterProvider(charFilter.name);
if (charFilterFactoryFactory == null) {
throw new IllegalArgumentException("failed to find global char filter under [" + charFilter.name + "]");
}
charFilterFactories[i] = charFilterFactoryFactory.get(environment, charFilter.name);
} else {
- charFilterFactoryFactory = analysisRegistry.getCharFilterProvider(charFilter.name, analysisService.getIndexSettings());
+ charFilterFactoryFactory = analysisRegistry.getCharFilterProvider(charFilter.name, indexSettings);
if (charFilterFactoryFactory == null) {
throw new IllegalArgumentException("failed to find char filter under [" + charFilter.name + "]");
}
- charFilterFactories[i] = charFilterFactoryFactory.get(analysisService.getIndexSettings(), environment, charFilter.name,
- AnalysisRegistry.getSettingsFromIndexSettings(analysisService.getIndexSettings(),
+ charFilterFactories[i] = charFilterFactoryFactory.get(indexSettings, environment, charFilter.name,
+ AnalysisRegistry.getSettingsFromIndexSettings(indexSettings,
AnalysisRegistry.INDEX_ANALYSIS_CHAR_FILTER + "." + charFilter.name));
}
}
@@ -492,7 +493,7 @@ public class TransportAnalyzeAction extends TransportSingleShardAction<AnalyzeRe
return charFilterFactories;
}
- private static TokenFilterFactory[] getTokenFilterFactories(AnalyzeRequest request, AnalysisService analysisService, AnalysisRegistry analysisRegistry,
+ private static TokenFilterFactory[] getTokenFilterFactories(AnalyzeRequest request, IndexSettings indexSettings, AnalysisRegistry analysisRegistry,
Environment environment, TokenFilterFactory[] tokenFilterFactories) throws IOException {
if (request.tokenFilters() != null && request.tokenFilters().size() > 0) {
tokenFilterFactories = new TokenFilterFactory[request.tokenFilters().size()];
@@ -514,19 +515,19 @@ public class TransportAnalyzeAction extends TransportSingleShardAction<AnalyzeRe
tokenFilterFactories[i] = tokenFilterFactoryFactory.get(getNaIndexSettings(settings), environment, "_anonymous_tokenfilter_[" + i + "]", settings);
} else {
AnalysisModule.AnalysisProvider<TokenFilterFactory> tokenFilterFactoryFactory;
- if (analysisService == null) {
+ if (indexSettings == null) {
tokenFilterFactoryFactory = analysisRegistry.getTokenFilterProvider(tokenFilter.name);
if (tokenFilterFactoryFactory == null) {
throw new IllegalArgumentException("failed to find global token filter under [" + tokenFilter.name + "]");
}
tokenFilterFactories[i] = tokenFilterFactoryFactory.get(environment, tokenFilter.name);
} else {
- tokenFilterFactoryFactory = analysisRegistry.getTokenFilterProvider(tokenFilter.name, analysisService.getIndexSettings());
+ tokenFilterFactoryFactory = analysisRegistry.getTokenFilterProvider(tokenFilter.name, indexSettings);
if (tokenFilterFactoryFactory == null) {
throw new IllegalArgumentException("failed to find token filter under [" + tokenFilter.name + "]");
}
- tokenFilterFactories[i] = tokenFilterFactoryFactory.get(analysisService.getIndexSettings(), environment, tokenFilter.name,
- AnalysisRegistry.getSettingsFromIndexSettings(analysisService.getIndexSettings(),
+ tokenFilterFactories[i] = tokenFilterFactoryFactory.get(indexSettings, environment, tokenFilter.name,
+ AnalysisRegistry.getSettingsFromIndexSettings(indexSettings,
AnalysisRegistry.INDEX_ANALYSIS_FILTER + "." + tokenFilter.name));
}
}
@@ -538,7 +539,7 @@ public class TransportAnalyzeAction extends TransportSingleShardAction<AnalyzeRe
return tokenFilterFactories;
}
- private static TokenizerFactory parseTokenizerFactory(AnalyzeRequest request, AnalysisService analysisService,
+ private static TokenizerFactory parseTokenizerFactory(AnalyzeRequest request, IndexAnalyzers indexAnalzyers,
AnalysisRegistry analysisRegistry, Environment environment) throws IOException {
TokenizerFactory tokenizerFactory;
final AnalyzeRequest.NameOrDefinition tokenizer = request.tokenizer();
@@ -558,19 +559,19 @@ public class TransportAnalyzeAction extends TransportSingleShardAction<AnalyzeRe
tokenizerFactory = tokenizerFactoryFactory.get(getNaIndexSettings(settings), environment, "_anonymous_tokenizer", settings);
} else {
AnalysisModule.AnalysisProvider<TokenizerFactory> tokenizerFactoryFactory;
- if (analysisService == null) {
+ if (indexAnalzyers == null) {
tokenizerFactoryFactory = analysisRegistry.getTokenizerProvider(tokenizer.name);
if (tokenizerFactoryFactory == null) {
throw new IllegalArgumentException("failed to find global tokenizer under [" + tokenizer.name + "]");
}
tokenizerFactory = tokenizerFactoryFactory.get(environment, tokenizer.name);
} else {
- tokenizerFactoryFactory = analysisRegistry.getTokenizerProvider(tokenizer.name, analysisService.getIndexSettings());
+ tokenizerFactoryFactory = analysisRegistry.getTokenizerProvider(tokenizer.name, indexAnalzyers.getIndexSettings());
if (tokenizerFactoryFactory == null) {
throw new IllegalArgumentException("failed to find tokenizer under [" + tokenizer.name + "]");
}
- tokenizerFactory = tokenizerFactoryFactory.get(analysisService.getIndexSettings(), environment, tokenizer.name,
- AnalysisRegistry.getSettingsFromIndexSettings(analysisService.getIndexSettings(),
+ tokenizerFactory = tokenizerFactoryFactory.get(indexAnalzyers.getIndexSettings(), environment, tokenizer.name,
+ AnalysisRegistry.getSettingsFromIndexSettings(indexAnalzyers.getIndexSettings(),
AnalysisRegistry.INDEX_ANALYSIS_TOKENIZER + "." + tokenizer.name));
}
}
diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java
index fa55043f61..d9faa52068 100644
--- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java
+++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java
@@ -26,13 +26,16 @@ import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.IndexScopedSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexSettings;
-import org.elasticsearch.index.analysis.AnalysisService;
+import org.elasticsearch.index.analysis.IndexAnalyzers;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.indices.mapper.MapperRegistry;
+import java.util.AbstractMap;
import java.util.Collections;
+import java.util.Map;
+import java.util.Set;
/**
* This service is responsible for upgrading legacy index metadata to the current version
@@ -112,9 +115,30 @@ public class MetaDataIndexUpgradeService extends AbstractComponent {
// been started yet. However, we don't really need real analyzers at this stage - so we can fake it
IndexSettings indexSettings = new IndexSettings(indexMetaData, this.settings);
SimilarityService similarityService = new SimilarityService(indexSettings, Collections.emptyMap());
+ final NamedAnalyzer fakeDefault = new NamedAnalyzer("fake_default", new Analyzer() {
+ @Override
+ protected TokenStreamComponents createComponents(String fieldName) {
+ throw new UnsupportedOperationException("shouldn't be here");
+ }
+ });
+ // this is just a fake map that always returns the same value for any possible string key
+ // also the entrySet impl isn't fully correct but we implement it since internally
+ // IndexAnalyzers will iterate over all analyzers to close them.
+ final Map<String, NamedAnalyzer> analyzerMap = new AbstractMap<String, NamedAnalyzer>() {
+ @Override
+ public NamedAnalyzer get(Object key) {
+ assert key instanceof String : "key must be a string but was: " + key.getClass();
+ return new NamedAnalyzer((String)key, fakeDefault.analyzer());
+ }
- try (AnalysisService analysisService = new FakeAnalysisService(indexSettings)) {
- MapperService mapperService = new MapperService(indexSettings, analysisService, similarityService, mapperRegistry, () -> null);
+ @Override
+ public Set<Entry<String, NamedAnalyzer>> entrySet() {
+ // just to ensure we can iterate over this single analzyer
+ return Collections.singletonMap(fakeDefault.name(), fakeDefault).entrySet();
+ }
+ };
+ try (IndexAnalyzers fakeIndexAnalzyers = new IndexAnalyzers(indexSettings, fakeDefault, fakeDefault, fakeDefault, analyzerMap)) {
+ MapperService mapperService = new MapperService(indexSettings, fakeIndexAnalzyers, similarityService, mapperRegistry, () -> null);
for (ObjectCursor<MappingMetaData> cursor : indexMetaData.getMappings().values()) {
MappingMetaData mappingMetaData = cursor.value;
mapperService.merge(mappingMetaData.type(), mappingMetaData.source(), MapperService.MergeReason.MAPPING_RECOVERY, false);
@@ -134,34 +158,6 @@ public class MetaDataIndexUpgradeService extends AbstractComponent {
return IndexMetaData.builder(indexMetaData).settings(settings).build();
}
- /**
- * A fake analysis server that returns the same keyword analyzer for all requests
- */
- private static class FakeAnalysisService extends AnalysisService {
-
- private Analyzer fakeAnalyzer = new Analyzer() {
- @Override
- protected TokenStreamComponents createComponents(String fieldName) {
- throw new UnsupportedOperationException("shouldn't be here");
- }
- };
-
- public FakeAnalysisService(IndexSettings indexSettings) {
- super(indexSettings, Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap());
- }
-
- @Override
- public NamedAnalyzer analyzer(String name) {
- return new NamedAnalyzer(name, fakeAnalyzer);
- }
-
- @Override
- public void close() {
- fakeAnalyzer.close();
- super.close();
- }
- }
-
IndexMetaData archiveBrokenIndexSettings(IndexMetaData indexMetaData) {
final Settings settings = indexMetaData.getSettings();
final Settings upgrade = indexScopedSettings.archiveUnknownOrBrokenSettings(settings);
diff --git a/core/src/main/java/org/elasticsearch/index/IndexService.java b/core/src/main/java/org/elasticsearch/index/IndexService.java
index e662e46c79..58457417fa 100644
--- a/core/src/main/java/org/elasticsearch/index/IndexService.java
+++ b/core/src/main/java/org/elasticsearch/index/IndexService.java
@@ -43,7 +43,7 @@ import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.env.ShardLock;
import org.elasticsearch.env.ShardLockObtainFailedException;
import org.elasticsearch.index.analysis.AnalysisRegistry;
-import org.elasticsearch.index.analysis.AnalysisService;
+import org.elasticsearch.index.analysis.IndexAnalyzers;
import org.elasticsearch.index.cache.IndexCache;
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
import org.elasticsearch.index.cache.query.QueryCache;
@@ -97,7 +97,7 @@ import static org.elasticsearch.common.collect.MapBuilder.newMapBuilder;
public class IndexService extends AbstractIndexComponent implements IndicesClusterStateService.AllocatedIndex<IndexShard> {
private final IndexEventListener eventListener;
- private final AnalysisService analysisService;
+ private final IndexAnalyzers indexAnalyzers;
private final IndexFieldDataService indexFieldData;
private final BitsetFilterCache bitsetFilterCache;
private final NodeEnvironment nodeEnv;
@@ -137,9 +137,9 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust
List<IndexingOperationListener> indexingOperationListeners) throws IOException {
super(indexSettings);
this.indexSettings = indexSettings;
- this.analysisService = registry.build(indexSettings);
+ this.indexAnalyzers = registry.build(indexSettings);
this.similarityService = similarityService;
- this.mapperService = new MapperService(indexSettings, analysisService, similarityService, mapperRegistry,
+ this.mapperService = new MapperService(indexSettings, indexAnalyzers, similarityService, mapperRegistry,
IndexService.this::newQueryShardContext);
this.indexFieldData = new IndexFieldDataService(indexSettings, indicesFieldDataCache,
nodeServicesProvider.getCircuitBreakerService(), mapperService);
@@ -214,8 +214,8 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust
return indexFieldData;
}
- public AnalysisService analysisService() {
- return this.analysisService;
+ public IndexAnalyzers getIndexAnalyzers() {
+ return this.indexAnalyzers;
}
public MapperService mapperService() {
@@ -239,7 +239,7 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust
}
}
} finally {
- IOUtils.close(bitsetFilterCache, indexCache, indexFieldData, analysisService, refreshTask, fsyncTask);
+ IOUtils.close(bitsetFilterCache, indexCache, indexFieldData, indexAnalyzers, refreshTask, fsyncTask);
}
}
}
diff --git a/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java b/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java
index 5ba11cd354..1d2098d439 100644
--- a/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java
+++ b/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java
@@ -18,14 +18,21 @@
*/
package org.elasticsearch.index.analysis;
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
+import org.elasticsearch.common.logging.DeprecationLogger;
+import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.env.Environment;
+import org.elasticsearch.index.Index;
+import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.IndexSettings;
+import org.elasticsearch.index.mapper.TextFieldMapper;
import org.elasticsearch.indices.analysis.AnalysisModule;
import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider;
import org.elasticsearch.indices.analysis.PreBuiltAnalyzers;
@@ -39,6 +46,7 @@ import java.util.Collections;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
+import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;
@@ -46,7 +54,7 @@ import static java.util.Collections.unmodifiableMap;
/**
* An internal registry for tokenizer, token filter, char filter and analyzer.
- * This class exists per node and allows to create per-index {@link AnalysisService} via {@link #build(IndexSettings)}
+ * This class exists per node and allows to create per-index {@link IndexAnalyzers} via {@link #build(IndexSettings)}
*/
public final class AnalysisRegistry implements Closeable {
public static final String INDEX_ANALYSIS_CHAR_FILTER = "index.analysis.char_filter";
@@ -136,17 +144,19 @@ public final class AnalysisRegistry implements Closeable {
}
/**
- * Creates an index-level {@link AnalysisService} from this registry using the given index settings
+ * Creates an index-level {@link IndexAnalyzers} from this registry using the given index settings
*/
- public AnalysisService build(IndexSettings indexSettings) throws IOException {
- final Map<String, Settings> charFiltersSettings = indexSettings.getSettings().getGroups(INDEX_ANALYSIS_CHAR_FILTER);
- final Map<String, Settings> tokenFiltersSettings = indexSettings.getSettings().getGroups(INDEX_ANALYSIS_FILTER);
- final Map<String, Settings> tokenizersSettings = indexSettings.getSettings().getGroups(INDEX_ANALYSIS_TOKENIZER);
- final Map<String, Settings> analyzersSettings = indexSettings.getSettings().getGroups("index.analysis.analyzer");
+ public IndexAnalyzers build(IndexSettings indexSettings) throws IOException {
- final Map<String, CharFilterFactory> charFilterFactories = buildMapping(false, "charfilter", indexSettings, charFiltersSettings, charFilters, prebuiltAnalysis.charFilterFactories);
- final Map<String, TokenizerFactory> tokenizerFactories = buildMapping(false, "tokenizer", indexSettings, tokenizersSettings, tokenizers, prebuiltAnalysis.tokenizerFactories);
+ final Map<String, CharFilterFactory> charFilterFactories = buildCharFilterFactories(indexSettings);
+ final Map<String, TokenizerFactory> tokenizerFactories = buildTokenizerFactories(indexSettings);
+ final Map<String, TokenFilterFactory> tokenFilterFactories = buildTokenFilterFactories(indexSettings);
+ final Map<String, AnalyzerProvider<?>> analyzierFactories = buildAnalyzerFactories(indexSettings);
+ return build(indexSettings, analyzierFactories, tokenizerFactories, charFilterFactories, tokenFilterFactories);
+ }
+ public Map<String, TokenFilterFactory> buildTokenFilterFactories(IndexSettings indexSettings) throws IOException {
+ final Map<String, Settings> tokenFiltersSettings = indexSettings.getSettings().getGroups(INDEX_ANALYSIS_FILTER);
Map<String, AnalysisModule.AnalysisProvider<TokenFilterFactory>> tokenFilters = new HashMap<>(this.tokenFilters);
/*
* synonym is different than everything else since it needs access to the tokenizer factories for this index.
@@ -154,10 +164,22 @@ public final class AnalysisRegistry implements Closeable {
* hide internal data-structures as much as possible.
*/
tokenFilters.put("synonym", requriesAnalysisSettings((is, env, name, settings) -> new SynonymTokenFilterFactory(is, env, this, name, settings)));
- final Map<String, TokenFilterFactory> tokenFilterFactories = buildMapping(false, "tokenfilter", indexSettings, tokenFiltersSettings, Collections.unmodifiableMap(tokenFilters), prebuiltAnalysis.tokenFilterFactories);
- final Map<String, AnalyzerProvider<?>> analyzierFactories = buildMapping(true, "analyzer", indexSettings, analyzersSettings,
- analyzers, prebuiltAnalysis.analyzerProviderFactories);
- return new AnalysisService(indexSettings, analyzierFactories, tokenizerFactories, charFilterFactories, tokenFilterFactories);
+ return buildMapping(false, "tokenfilter", indexSettings, tokenFiltersSettings, Collections.unmodifiableMap(tokenFilters), prebuiltAnalysis.tokenFilterFactories);
+ }
+
+ public Map<String, TokenizerFactory> buildTokenizerFactories(IndexSettings indexSettings) throws IOException {
+ final Map<String, Settings> tokenizersSettings = indexSettings.getSettings().getGroups(INDEX_ANALYSIS_TOKENIZER);
+ return buildMapping(false, "tokenizer", indexSettings, tokenizersSettings, tokenizers, prebuiltAnalysis.tokenizerFactories);
+ }
+
+ public Map<String, CharFilterFactory> buildCharFilterFactories(IndexSettings indexSettings) throws IOException {
+ final Map<String, Settings> charFiltersSettings = indexSettings.getSettings().getGroups(INDEX_ANALYSIS_CHAR_FILTER);
+ return buildMapping(false, "charfilter", indexSettings, charFiltersSettings, charFilters, prebuiltAnalysis.charFilterFactories);
+ }
+
+ public Map<String, AnalyzerProvider<?>> buildAnalyzerFactories(IndexSettings indexSettings) throws IOException {
+ final Map<String, Settings> analyzersSettings = indexSettings.getSettings().getGroups("index.analysis.analyzer");
+ return buildMapping(true, "analyzer", indexSettings, analyzersSettings, analyzers, prebuiltAnalysis.analyzerProviderFactories);
}
/**
@@ -399,4 +421,132 @@ public final class AnalysisRegistry implements Closeable {
IOUtils.close(analyzerProviderFactories.values().stream().map((a) -> ((PreBuiltAnalyzerProviderFactory)a).analyzer()).collect(Collectors.toList()));
}
}
+
+ public IndexAnalyzers build(IndexSettings indexSettings,
+ Map<String, AnalyzerProvider<?>> analyzerProviders,
+ Map<String, TokenizerFactory> tokenizerFactoryFactories,
+ Map<String, CharFilterFactory> charFilterFactoryFactories,
+ Map<String, TokenFilterFactory> tokenFilterFactoryFactories) {
+
+ Index index = indexSettings.getIndex();
+ analyzerProviders = new HashMap<>(analyzerProviders);
+ Logger logger = Loggers.getLogger(getClass(), indexSettings.getSettings());
+ DeprecationLogger deprecationLogger = new DeprecationLogger(logger);
+ Map<String, NamedAnalyzer> analyzerAliases = new HashMap<>();
+ Map<String, NamedAnalyzer> analyzers = new HashMap<>();
+ for (Map.Entry<String, AnalyzerProvider<?>> entry : analyzerProviders.entrySet()) {
+ processAnalyzerFactory(deprecationLogger, indexSettings, entry.getKey(), entry.getValue(), analyzerAliases, analyzers,
+ tokenFilterFactoryFactories, charFilterFactoryFactories, tokenizerFactoryFactories);
+ }
+ for (Map.Entry<String, NamedAnalyzer> entry : analyzerAliases.entrySet()) {
+ String key = entry.getKey();
+ if (analyzers.containsKey(key) &&
+ ("default".equals(key) || "default_search".equals(key) || "default_search_quoted".equals(key)) == false) {
+ throw new IllegalStateException("already registered analyzer with name: " + key);
+ } else {
+ NamedAnalyzer configured = entry.getValue();
+ analyzers.put(key, configured);
+ }
+ }
+
+ if (!analyzers.containsKey("default")) {
+ processAnalyzerFactory(deprecationLogger, indexSettings, "default", new StandardAnalyzerProvider(indexSettings, null, "default", Settings.Builder.EMPTY_SETTINGS),
+ analyzerAliases, analyzers, tokenFilterFactoryFactories, charFilterFactoryFactories, tokenizerFactoryFactories);
+ }
+ if (!analyzers.containsKey("default_search")) {
+ analyzers.put("default_search", analyzers.get("default"));
+ }
+ if (!analyzers.containsKey("default_search_quoted")) {
+ analyzers.put("default_search_quoted", analyzers.get("default_search"));
+ }
+
+
+ NamedAnalyzer defaultAnalyzer = analyzers.get("default");
+ if (defaultAnalyzer == null) {
+ throw new IllegalArgumentException("no default analyzer configured");
+ }
+ if (analyzers.containsKey("default_index")) {
+ final Version createdVersion = indexSettings.getIndexVersionCreated();
+ if (createdVersion.onOrAfter(Version.V_5_0_0_alpha1)) {
+ throw new IllegalArgumentException("setting [index.analysis.analyzer.default_index] is not supported anymore, use [index.analysis.analyzer.default] instead for index [" + index.getName() + "]");
+ } else {
+ deprecationLogger.deprecated("setting [index.analysis.analyzer.default_index] is deprecated, use [index.analysis.analyzer.default] instead for index [{}]", index.getName());
+ }
+ }
+ NamedAnalyzer defaultIndexAnalyzer = analyzers.containsKey("default_index") ? analyzers.get("default_index") : defaultAnalyzer;
+ NamedAnalyzer defaultSearchAnalyzer = analyzers.containsKey("default_search") ? analyzers.get("default_search") : defaultAnalyzer;
+ NamedAnalyzer defaultSearchQuoteAnalyzer = analyzers.containsKey("default_search_quote") ? analyzers.get("default_search_quote") : defaultSearchAnalyzer;
+
+ for (Map.Entry<String, NamedAnalyzer> analyzer : analyzers.entrySet()) {
+ if (analyzer.getKey().startsWith("_")) {
+ throw new IllegalArgumentException("analyzer name must not start with '_'. got \"" + analyzer.getKey() + "\"");
+ }
+ }
+ return new IndexAnalyzers(indexSettings, defaultIndexAnalyzer, defaultSearchAnalyzer, defaultSearchQuoteAnalyzer,
+ unmodifiableMap(analyzers));
+ }
+
+ private void processAnalyzerFactory(DeprecationLogger deprecationLogger,
+ IndexSettings indexSettings,
+ String name,
+ AnalyzerProvider<?> analyzerFactory,
+ Map<String, NamedAnalyzer> analyzerAliases,
+ Map<String, NamedAnalyzer> analyzers, Map<String, TokenFilterFactory> tokenFilters,
+ Map<String, CharFilterFactory> charFilters, Map<String, TokenizerFactory> tokenizers) {
+ /*
+ * Lucene defaults positionIncrementGap to 0 in all analyzers but
+ * Elasticsearch defaults them to 0 only before version 2.0
+ * and 100 afterwards so we override the positionIncrementGap if it
+ * doesn't match here.
+ */
+ int overridePositionIncrementGap = TextFieldMapper.Defaults.POSITION_INCREMENT_GAP;
+ if (analyzerFactory instanceof CustomAnalyzerProvider) {
+ ((CustomAnalyzerProvider) analyzerFactory).build(tokenizers, charFilters, tokenFilters);
+ /*
+ * Custom analyzers already default to the correct, version
+ * dependent positionIncrementGap and the user is be able to
+ * configure the positionIncrementGap directly on the analyzer so
+ * we disable overriding the positionIncrementGap to preserve the
+ * user's setting.
+ */
+ overridePositionIncrementGap = Integer.MIN_VALUE;
+ }
+ Analyzer analyzerF = analyzerFactory.get();
+ if (analyzerF == null) {
+ throw new IllegalArgumentException("analyzer [" + analyzerFactory.name() + "] created null analyzer");
+ }
+ NamedAnalyzer analyzer;
+ if (analyzerF instanceof NamedAnalyzer) {
+ // if we got a named analyzer back, use it...
+ analyzer = (NamedAnalyzer) analyzerF;
+ if (overridePositionIncrementGap >= 0 && analyzer.getPositionIncrementGap(analyzer.name()) != overridePositionIncrementGap) {
+ // unless the positionIncrementGap needs to be overridden
+ analyzer = new NamedAnalyzer(analyzer, overridePositionIncrementGap);
+ }
+ } else {
+ analyzer = new NamedAnalyzer(name, analyzerFactory.scope(), analyzerF, overridePositionIncrementGap);
+ }
+ if (analyzers.containsKey(name)) {
+ throw new IllegalStateException("already registered analyzer with name: " + name);
+ }
+ analyzers.put(name, analyzer);
+ // TODO: remove alias support completely when we no longer support pre 5.0 indices
+ final String analyzerAliasKey = "index.analysis.analyzer." + analyzerFactory.name() + ".alias";
+ if (indexSettings.getSettings().get(analyzerAliasKey) != null) {
+ if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_5_0_0_alpha6)) {
+ // do not allow alias creation if the index was created on or after v5.0 alpha6
+ throw new IllegalArgumentException("setting [" + analyzerAliasKey + "] is not supported");
+ }
+
+ // the setting is now removed but we only support it for loading indices created before v5.0
+ deprecationLogger.deprecated("setting [{}] is only allowed on index [{}] because it was created before 5.x; " +
+ "analyzer aliases can no longer be created on new indices.", analyzerAliasKey, indexSettings.getIndex().getName());
+ Set<String> aliases = Sets.newHashSet(indexSettings.getSettings().getAsArray(analyzerAliasKey));
+ for (String alias : aliases) {
+ if (analyzerAliases.putIfAbsent(alias, analyzer) != null) {
+ throw new IllegalStateException("alias [" + alias + "] is already used by [" + analyzerAliases.get(alias).name() + "]");
+ }
+ }
+ }
+ }
}
diff --git a/core/src/main/java/org/elasticsearch/index/analysis/AnalysisService.java b/core/src/main/java/org/elasticsearch/index/analysis/AnalysisService.java
deleted file mode 100644
index cb84e6c6d0..0000000000
--- a/core/src/main/java/org/elasticsearch/index/analysis/AnalysisService.java
+++ /dev/null
@@ -1,218 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.index.analysis;
-
-import org.apache.lucene.analysis.Analyzer;
-import org.elasticsearch.Version;
-import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.common.util.set.Sets;
-import org.elasticsearch.index.AbstractIndexComponent;
-import org.elasticsearch.index.IndexSettings;
-import org.elasticsearch.index.mapper.TextFieldMapper;
-
-import java.io.Closeable;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Set;
-
-import static java.util.Collections.unmodifiableMap;
-
-/**
- *
- */
-public class AnalysisService extends AbstractIndexComponent implements Closeable {
-
- private final Map<String, NamedAnalyzer> analyzers;
- private final Map<String, TokenizerFactory> tokenizers;
- private final Map<String, CharFilterFactory> charFilters;
- private final Map<String, TokenFilterFactory> tokenFilters;
-
- private final NamedAnalyzer defaultIndexAnalyzer;
- private final NamedAnalyzer defaultSearchAnalyzer;
- private final NamedAnalyzer defaultSearchQuoteAnalyzer;
-
- public AnalysisService(IndexSettings indexSettings,
- Map<String, AnalyzerProvider<?>> analyzerProviders,
- Map<String, TokenizerFactory> tokenizerFactoryFactories,
- Map<String, CharFilterFactory> charFilterFactoryFactories,
- Map<String, TokenFilterFactory> tokenFilterFactoryFactories) {
- super(indexSettings);
- this.tokenizers = unmodifiableMap(tokenizerFactoryFactories);
- this.charFilters = unmodifiableMap(charFilterFactoryFactories);
- this.tokenFilters = unmodifiableMap(tokenFilterFactoryFactories);
- analyzerProviders = new HashMap<>(analyzerProviders);
-
- Map<String, NamedAnalyzer> analyzerAliases = new HashMap<>();
- Map<String, NamedAnalyzer> analyzers = new HashMap<>();
- for (Map.Entry<String, AnalyzerProvider<?>> entry : analyzerProviders.entrySet()) {
- processAnalyzerFactory(entry.getKey(), entry.getValue(), analyzerAliases, analyzers);
- }
- for (Map.Entry<String, NamedAnalyzer> entry : analyzerAliases.entrySet()) {
- String key = entry.getKey();
- if (analyzers.containsKey(key) &&
- ("default".equals(key) || "default_search".equals(key) || "default_search_quoted".equals(key)) == false) {
- throw new IllegalStateException("already registered analyzer with name: " + key);
- } else {
- NamedAnalyzer configured = entry.getValue();
- analyzers.put(key, configured);
- }
- }
-
- if (!analyzers.containsKey("default")) {
- processAnalyzerFactory("default", new StandardAnalyzerProvider(indexSettings, null, "default", Settings.Builder.EMPTY_SETTINGS),
- analyzerAliases, analyzers);
- }
- if (!analyzers.containsKey("default_search")) {
- analyzers.put("default_search", analyzers.get("default"));
- }
- if (!analyzers.containsKey("default_search_quoted")) {
- analyzers.put("default_search_quoted", analyzers.get("default_search"));
- }
-
-
- NamedAnalyzer defaultAnalyzer = analyzers.get("default");
- if (defaultAnalyzer == null) {
- throw new IllegalArgumentException("no default analyzer configured");
- }
- if (analyzers.containsKey("default_index")) {
- final Version createdVersion = indexSettings.getIndexVersionCreated();
- if (createdVersion.onOrAfter(Version.V_5_0_0_alpha1)) {
- throw new IllegalArgumentException("setting [index.analysis.analyzer.default_index] is not supported anymore, use [index.analysis.analyzer.default] instead for index [" + index().getName() + "]");
- } else {
- deprecationLogger.deprecated("setting [index.analysis.analyzer.default_index] is deprecated, use [index.analysis.analyzer.default] instead for index [{}]", index().getName());
- }
- }
- defaultIndexAnalyzer = analyzers.containsKey("default_index") ? analyzers.get("default_index") : defaultAnalyzer;
- defaultSearchAnalyzer = analyzers.containsKey("default_search") ? analyzers.get("default_search") : defaultAnalyzer;
- defaultSearchQuoteAnalyzer = analyzers.containsKey("default_search_quote") ? analyzers.get("default_search_quote") : defaultSearchAnalyzer;
-
- for (Map.Entry<String, NamedAnalyzer> analyzer : analyzers.entrySet()) {
- if (analyzer.getKey().startsWith("_")) {
- throw new IllegalArgumentException("analyzer name must not start with '_'. got \"" + analyzer.getKey() + "\"");
- }
- }
- this.analyzers = unmodifiableMap(analyzers);
- }
-
- private void processAnalyzerFactory(String name, AnalyzerProvider<?> analyzerFactory, Map<String, NamedAnalyzer> analyzerAliases, Map<String, NamedAnalyzer> analyzers) {
- /*
- * Lucene defaults positionIncrementGap to 0 in all analyzers but
- * Elasticsearch defaults them to 0 only before version 2.0
- * and 100 afterwards so we override the positionIncrementGap if it
- * doesn't match here.
- */
- int overridePositionIncrementGap = TextFieldMapper.Defaults.POSITION_INCREMENT_GAP;
- if (analyzerFactory instanceof CustomAnalyzerProvider) {
- ((CustomAnalyzerProvider) analyzerFactory).build(this);
- /*
- * Custom analyzers already default to the correct, version
- * dependent positionIncrementGap and the user is be able to
- * configure the positionIncrementGap directly on the analyzer so
- * we disable overriding the positionIncrementGap to preserve the
- * user's setting.
- */
- overridePositionIncrementGap = Integer.MIN_VALUE;
- }
- Analyzer analyzerF = analyzerFactory.get();
- if (analyzerF == null) {
- throw new IllegalArgumentException("analyzer [" + analyzerFactory.name() + "] created null analyzer");
- }
- NamedAnalyzer analyzer;
- if (analyzerF instanceof NamedAnalyzer) {
- // if we got a named analyzer back, use it...
- analyzer = (NamedAnalyzer) analyzerF;
- if (overridePositionIncrementGap >= 0 && analyzer.getPositionIncrementGap(analyzer.name()) != overridePositionIncrementGap) {
- // unless the positionIncrementGap needs to be overridden
- analyzer = new NamedAnalyzer(analyzer, overridePositionIncrementGap);
- }
- } else {
- analyzer = new NamedAnalyzer(name, analyzerFactory.scope(), analyzerF, overridePositionIncrementGap);
- }
- if (analyzers.containsKey(name)) {
- throw new IllegalStateException("already registered analyzer with name: " + name);
- }
- analyzers.put(name, analyzer);
- // TODO: remove alias support completely when we no longer support pre 5.0 indices
- final String analyzerAliasKey = "index.analysis.analyzer." + analyzerFactory.name() + ".alias";
- if (indexSettings.getSettings().get(analyzerAliasKey) != null) {
- if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_5_0_0_alpha6)) {
- // do not allow alias creation if the index was created on or after v5.0 alpha6
- throw new IllegalArgumentException("setting [" + analyzerAliasKey + "] is not supported");
- }
-
- // the setting is now removed but we only support it for loading indices created before v5.0
- deprecationLogger.deprecated("setting [{}] is only allowed on index [{}] because it was created before 5.x; " +
- "analyzer aliases can no longer be created on new indices.", analyzerAliasKey, index().getName());
- Set<String> aliases = Sets.newHashSet(indexSettings.getSettings().getAsArray(analyzerAliasKey));
- for (String alias : aliases) {
- if (analyzerAliases.putIfAbsent(alias, analyzer) != null) {
- throw new IllegalStateException("alias [" + alias + "] is already used by [" + analyzerAliases.get(alias).name() + "]");
- }
- }
- }
- }
-
- @Override
- public void close() {
- for (NamedAnalyzer analyzer : analyzers.values()) {
- if (analyzer.scope() == AnalyzerScope.INDEX) {
- try {
- analyzer.close();
- } catch (NullPointerException e) {
- // because analyzers are aliased, they might be closed several times
- // an NPE is thrown in this case, so ignore....
- // TODO: Analyzer's can no longer have aliases in indices created in 5.x and beyond,
- // so we only allow the aliases for analyzers on indices created pre 5.x for backwards
- // compatibility. Once pre 5.0 indices are no longer supported, this check should be removed.
- } catch (Exception e) {
- logger.debug("failed to close analyzer {}", analyzer);
- }
- }
- }
- }
-
- public NamedAnalyzer analyzer(String name) {
- return analyzers.get(name);
- }
-
- public NamedAnalyzer defaultIndexAnalyzer() {
- return defaultIndexAnalyzer;
- }
-
- public NamedAnalyzer defaultSearchAnalyzer() {
- return defaultSearchAnalyzer;
- }
-
- public NamedAnalyzer defaultSearchQuoteAnalyzer() {
- return defaultSearchQuoteAnalyzer;
- }
-
- public TokenizerFactory tokenizer(String name) {
- return tokenizers.get(name);
- }
-
- public CharFilterFactory charFilter(String name) {
- return charFilters.get(name);
- }
-
- public TokenFilterFactory tokenFilter(String name) {
- return tokenFilters.get(name);
- }
-}
diff --git a/core/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzerProvider.java b/core/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzerProvider.java
index 144cbe8174..63861e8084 100644
--- a/core/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzerProvider.java
+++ b/core/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzerProvider.java
@@ -26,6 +26,7 @@ import org.elasticsearch.index.mapper.TextFieldMapper;
import java.util.ArrayList;
import java.util.List;
+import java.util.Map;
/**
* A custom analyzer that is built out of a single {@link org.apache.lucene.analysis.Tokenizer} and a list
@@ -43,35 +44,36 @@ public class CustomAnalyzerProvider extends AbstractIndexAnalyzerProvider<Custom
this.analyzerSettings = settings;
}
- public void build(AnalysisService analysisService) {
+ public void build(final Map<String, TokenizerFactory> tokenizers, final Map<String, CharFilterFactory> charFilters,
+ final Map<String, TokenFilterFactory> tokenFilters) {
String tokenizerName = analyzerSettings.get("tokenizer");
if (tokenizerName == null) {
throw new IllegalArgumentException("Custom Analyzer [" + name() + "] must be configured with a tokenizer");
}
- TokenizerFactory tokenizer = analysisService.tokenizer(tokenizerName);
+ TokenizerFactory tokenizer = tokenizers.get(tokenizerName);
if (tokenizer == null) {
throw new IllegalArgumentException("Custom Analyzer [" + name() + "] failed to find tokenizer under name [" + tokenizerName + "]");
}
- List<CharFilterFactory> charFilters = new ArrayList<>();
+ List<CharFilterFactory> charFiltersList = new ArrayList<>();
String[] charFilterNames = analyzerSettings.getAsArray("char_filter");
for (String charFilterName : charFilterNames) {
- CharFilterFactory charFilter = analysisService.charFilter(charFilterName);
+ CharFilterFactory charFilter = charFilters.get(charFilterName);
if (charFilter == null) {
throw new IllegalArgumentException("Custom Analyzer [" + name() + "] failed to find char_filter under name [" + charFilterName + "]");
}
- charFilters.add(charFilter);
+ charFiltersList.add(charFilter);
}
- List<TokenFilterFactory> tokenFilters = new ArrayList<>();
+ List<TokenFilterFactory> tokenFilterList = new ArrayList<>();
String[] tokenFilterNames = analyzerSettings.getAsArray("filter");
for (String tokenFilterName : tokenFilterNames) {
- TokenFilterFactory tokenFilter = analysisService.tokenFilter(tokenFilterName);
+ TokenFilterFactory tokenFilter = tokenFilters.get(tokenFilterName);
if (tokenFilter == null) {
throw new IllegalArgumentException("Custom Analyzer [" + name() + "] failed to find filter under name [" + tokenFilterName + "]");
}
- tokenFilters.add(tokenFilter);
+ tokenFilterList.add(tokenFilter);
}
int positionIncrementGap = TextFieldMapper.Defaults.POSITION_INCREMENT_GAP;
@@ -93,8 +95,8 @@ public class CustomAnalyzerProvider extends AbstractIndexAnalyzerProvider<Custom
int offsetGap = analyzerSettings.getAsInt("offset_gap", -1);;
this.customAnalyzer = new CustomAnalyzer(tokenizer,
- charFilters.toArray(new CharFilterFactory[charFilters.size()]),
- tokenFilters.toArray(new TokenFilterFactory[tokenFilters.size()]),
+ charFiltersList.toArray(new CharFilterFactory[charFiltersList.size()]),
+ tokenFilterList.toArray(new TokenFilterFactory[tokenFilterList.size()]),
positionIncrementGap,
offsetGap
);
diff --git a/core/src/main/java/org/elasticsearch/index/analysis/IndexAnalyzers.java b/core/src/main/java/org/elasticsearch/index/analysis/IndexAnalyzers.java
new file mode 100644
index 0000000000..127714178b
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/index/analysis/IndexAnalyzers.java
@@ -0,0 +1,96 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.index.analysis;
+
+import org.apache.lucene.util.IOUtils;
+import org.elasticsearch.index.AbstractIndexComponent;
+import org.elasticsearch.index.IndexSettings;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.util.Map;
+
+/**
+ * IndexAnalyzers contains a name to analyzer mapping for a specific index.
+ * This class only holds analyzers that are explicitly configured for an index and doesn't allow
+ * access to individual tokenizers, char or token filter.
+ *
+ * @see AnalysisRegistry
+ */
+public final class IndexAnalyzers extends AbstractIndexComponent implements Closeable {
+ private final NamedAnalyzer defaultIndexAnalyzer;
+ private final NamedAnalyzer defaultSearchAnalyzer;
+ private final NamedAnalyzer defaultSearchQuoteAnalyzer;
+ private final Map<String, NamedAnalyzer> analyzers;
+ private final IndexSettings indexSettings;
+
+ public IndexAnalyzers(IndexSettings indexSettings, NamedAnalyzer defaultIndexAnalyzer, NamedAnalyzer defaultSearchAnalyzer,
+ NamedAnalyzer defaultSearchQuoteAnalyzer, Map<String, NamedAnalyzer> analyzers) {
+ super(indexSettings);
+ this.defaultIndexAnalyzer = defaultIndexAnalyzer;
+ this.defaultSearchAnalyzer = defaultSearchAnalyzer;
+ this.defaultSearchQuoteAnalyzer = defaultSearchQuoteAnalyzer;
+ this.analyzers = analyzers;
+ this.indexSettings = indexSettings;
+ }
+
+ /**
+ * Returns an analyzer mapped to the given name or <code>null</code> if not present
+ */
+ public NamedAnalyzer get(String name) {
+ return analyzers.get(name);
+ }
+
+
+ /**
+ * Returns the default index analyzer for this index
+ */
+ public NamedAnalyzer getDefaultIndexAnalyzer() {
+ return defaultIndexAnalyzer;
+ }
+
+ /**
+ * Returns the default search analyzer for this index
+ */
+ public NamedAnalyzer getDefaultSearchAnalyzer() {
+ return defaultSearchAnalyzer;
+ }
+
+ /**
+ * Returns the default search quote analyzer for this index
+ */
+ public NamedAnalyzer getDefaultSearchQuoteAnalyzer() {
+ return defaultSearchQuoteAnalyzer;
+ }
+
+ @Override
+ public void close() throws IOException {
+ IOUtils.close(() -> analyzers.values().stream()
+ .filter(a -> a.scope() == AnalyzerScope.INDEX)
+ .iterator());
+ }
+
+ /**
+ * Returns the indices settings
+ */
+ public IndexSettings getIndexSettings() {
+ return indexSettings;
+ }
+
+}
diff --git a/core/src/main/java/org/elasticsearch/index/mapper/AllFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/AllFieldMapper.java
index c418dd5e6e..6c1477d8d0 100644
--- a/core/src/main/java/org/elasticsearch/index/mapper/AllFieldMapper.java
+++ b/core/src/main/java/org/elasticsearch/index/mapper/AllFieldMapper.java
@@ -106,9 +106,9 @@ public class AllFieldMapper extends MetadataFieldMapper {
public MetadataFieldMapper.Builder parse(String name, Map<String, Object> node,
ParserContext parserContext) throws MapperParsingException {
Builder builder = new Builder(parserContext.mapperService().fullName(NAME));
- builder.fieldType().setIndexAnalyzer(parserContext.analysisService().defaultIndexAnalyzer());
- builder.fieldType().setSearchAnalyzer(parserContext.analysisService().defaultSearchAnalyzer());
- builder.fieldType().setSearchQuoteAnalyzer(parserContext.analysisService().defaultSearchQuoteAnalyzer());
+ builder.fieldType().setIndexAnalyzer(parserContext.getIndexAnalyzers().getDefaultIndexAnalyzer());
+ builder.fieldType().setSearchAnalyzer(parserContext.getIndexAnalyzers().getDefaultSearchAnalyzer());
+ builder.fieldType().setSearchQuoteAnalyzer(parserContext.getIndexAnalyzers().getDefaultSearchQuoteAnalyzer());
// parseField below will happily parse the doc_values setting, but it is then never passed to
// the AllFieldMapper ctor in the builder since it is not valid. Here we validate
diff --git a/core/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java
index 13bb7d255a..09035bfa3c 100644
--- a/core/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java
+++ b/core/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java
@@ -153,7 +153,7 @@ public class CompletionFieldMapper extends FieldMapper implements ArrayValueMapp
if (searchAnalyzer != null) {
throw new MapperParsingException("analyzer on completion field [" + name + "] must be set when search_analyzer is set");
}
- indexAnalyzer = searchAnalyzer = parserContext.analysisService().analyzer("simple");
+ indexAnalyzer = searchAnalyzer = parserContext.getIndexAnalyzers().get("simple");
} else if (searchAnalyzer == null) {
searchAnalyzer = indexAnalyzer;
}
@@ -164,7 +164,7 @@ public class CompletionFieldMapper extends FieldMapper implements ArrayValueMapp
}
private NamedAnalyzer getNamedAnalyzer(ParserContext parserContext, String name) {
- NamedAnalyzer analyzer = parserContext.analysisService().analyzer(name);
+ NamedAnalyzer analyzer = parserContext.getIndexAnalyzers().get(name);
if (analyzer == null) {
throw new IllegalArgumentException("Can't find default or mapped analyzer with name [" + name + "]");
}
diff --git a/core/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper2x.java b/core/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper2x.java
index 655af43710..590ca0f861 100644
--- a/core/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper2x.java
+++ b/core/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper2x.java
@@ -206,7 +206,7 @@ public class CompletionFieldMapper2x extends FieldMapper {
throw new MapperParsingException(
"analyzer on completion field [" + name + "] must be set when search_analyzer is set");
}
- indexAnalyzer = searchAnalyzer = parserContext.analysisService().analyzer("simple");
+ indexAnalyzer = searchAnalyzer = parserContext.getIndexAnalyzers().get("simple");
} else if (searchAnalyzer == null) {
searchAnalyzer = indexAnalyzer;
}
@@ -217,7 +217,7 @@ public class CompletionFieldMapper2x extends FieldMapper {
}
private NamedAnalyzer getNamedAnalyzer(ParserContext parserContext, String name) {
- NamedAnalyzer analyzer = parserContext.analysisService().analyzer(name);
+ NamedAnalyzer analyzer = parserContext.getIndexAnalyzers().get(name);
if (analyzer == null) {
throw new IllegalArgumentException("Can't find default or mapped analyzer with name [" + name + "]");
}
diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java
index a4d1a0c5e4..eb6d6a9a3e 100644
--- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java
+++ b/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java
@@ -32,7 +32,7 @@ import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexSettings;
-import org.elasticsearch.index.analysis.AnalysisService;
+import org.elasticsearch.index.analysis.IndexAnalyzers;
import org.elasticsearch.index.mapper.MetadataFieldMapper.TypeParser;
import org.elasticsearch.search.internal.SearchContext;
@@ -147,11 +147,11 @@ public class DocumentMapper implements ToXContent {
}
MapperUtils.collect(this.mapping.root, newObjectMappers, newFieldMappers);
- final AnalysisService analysisService = mapperService.analysisService();
+ final IndexAnalyzers indexAnalyzers = mapperService.getIndexAnalyzers();
this.fieldMappers = new DocumentFieldMappers(newFieldMappers,
- analysisService.defaultIndexAnalyzer(),
- analysisService.defaultSearchAnalyzer(),
- analysisService.defaultSearchQuoteAnalyzer());
+ indexAnalyzers.getDefaultIndexAnalyzer(),
+ indexAnalyzers.getDefaultSearchAnalyzer(),
+ indexAnalyzers.getDefaultSearchQuoteAnalyzer());
Map<String, ObjectMapper> builder = new HashMap<>();
for (ObjectMapper objectMapper : newObjectMappers) {
diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapperParser.java b/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapperParser.java
index f336fbb01a..2cdeed9f04 100644
--- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapperParser.java
+++ b/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapperParser.java
@@ -22,14 +22,13 @@ package org.elasticsearch.index.mapper;
import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseFieldMatcher;
-import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.IndexSettings;
-import org.elasticsearch.index.analysis.AnalysisService;
+import org.elasticsearch.index.analysis.IndexAnalyzers;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.indices.mapper.MapperRegistry;
@@ -44,7 +43,7 @@ import static java.util.Collections.unmodifiableMap;
public class DocumentMapperParser {
final MapperService mapperService;
- final AnalysisService analysisService;
+ final IndexAnalyzers indexAnalyzers;
private final SimilarityService similarityService;
private final Supplier<QueryShardContext> queryShardContextSupplier;
@@ -56,12 +55,12 @@ public class DocumentMapperParser {
private final Map<String, Mapper.TypeParser> typeParsers;
private final Map<String, MetadataFieldMapper.TypeParser> rootTypeParsers;
- public DocumentMapperParser(IndexSettings indexSettings, MapperService mapperService, AnalysisService analysisService,
+ public DocumentMapperParser(IndexSettings indexSettings, MapperService mapperService, IndexAnalyzers indexAnalyzers,
SimilarityService similarityService, MapperRegistry mapperRegistry,
Supplier<QueryShardContext> queryShardContextSupplier) {
this.parseFieldMatcher = new ParseFieldMatcher(indexSettings.getSettings());
this.mapperService = mapperService;
- this.analysisService = analysisService;
+ this.indexAnalyzers = indexAnalyzers;
this.similarityService = similarityService;
this.queryShardContextSupplier = queryShardContextSupplier;
this.typeParsers = mapperRegistry.getMapperParsers();
@@ -70,7 +69,7 @@ public class DocumentMapperParser {
}
public Mapper.TypeParser.ParserContext parserContext(String type) {
- return new Mapper.TypeParser.ParserContext(type, analysisService, similarityService::getSimilarity, mapperService, typeParsers::get, indexVersionCreated, parseFieldMatcher, queryShardContextSupplier.get());
+ return new Mapper.TypeParser.ParserContext(type, indexAnalyzers, similarityService::getSimilarity, mapperService, typeParsers::get, indexVersionCreated, parseFieldMatcher, queryShardContextSupplier.get());
}
public DocumentMapper parse(@Nullable String type, CompressedXContent source) throws MapperParsingException {
diff --git a/core/src/main/java/org/elasticsearch/index/mapper/LegacyTokenCountFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/LegacyTokenCountFieldMapper.java
index 2ed1b544a0..7981b40021 100644
--- a/core/src/main/java/org/elasticsearch/index/mapper/LegacyTokenCountFieldMapper.java
+++ b/core/src/main/java/org/elasticsearch/index/mapper/LegacyTokenCountFieldMapper.java
@@ -97,7 +97,7 @@ public class LegacyTokenCountFieldMapper extends LegacyIntegerFieldMapper {
builder.nullValue(nodeIntegerValue(propNode));
iterator.remove();
} else if (propName.equals("analyzer")) {
- NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString());
+ NamedAnalyzer analyzer = parserContext.getIndexAnalyzers().get(propNode.toString());
if (analyzer == null) {
throw new MapperParsingException("Analyzer [" + propNode.toString() + "] not found for field [" + name + "]");
}
diff --git a/core/src/main/java/org/elasticsearch/index/mapper/Mapper.java b/core/src/main/java/org/elasticsearch/index/mapper/Mapper.java
index d1341c8f7d..0692856642 100644
--- a/core/src/main/java/org/elasticsearch/index/mapper/Mapper.java
+++ b/core/src/main/java/org/elasticsearch/index/mapper/Mapper.java
@@ -24,7 +24,7 @@ import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent;
-import org.elasticsearch.index.analysis.AnalysisService;
+import org.elasticsearch.index.analysis.IndexAnalyzers;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.similarity.SimilarityProvider;
@@ -85,7 +85,7 @@ public abstract class Mapper implements ToXContent, Iterable<Mapper> {
private final String type;
- private final AnalysisService analysisService;
+ private final IndexAnalyzers indexAnalyzers;
private final Function<String, SimilarityProvider> similarityLookupService;
@@ -99,11 +99,11 @@ public abstract class Mapper implements ToXContent, Iterable<Mapper> {
private final QueryShardContext queryShardContext;
- public ParserContext(String type, AnalysisService analysisService, Function<String, SimilarityProvider> similarityLookupService,
+ public ParserContext(String type, IndexAnalyzers indexAnalyzers, Function<String, SimilarityProvider> similarityLookupService,
MapperService mapperService, Function<String, TypeParser> typeParsers,
Version indexVersionCreated, ParseFieldMatcher parseFieldMatcher, QueryShardContext queryShardContext) {
this.type = type;
- this.analysisService = analysisService;
+ this.indexAnalyzers = indexAnalyzers;
this.similarityLookupService = similarityLookupService;
this.mapperService = mapperService;
this.typeParsers = typeParsers;
@@ -116,8 +116,8 @@ public abstract class Mapper implements ToXContent, Iterable<Mapper> {
return type;
}
- public AnalysisService analysisService() {
- return analysisService;
+ public IndexAnalyzers getIndexAnalyzers() {
+ return indexAnalyzers;
}
public SimilarityProvider getSimilarity(String name) {
@@ -159,7 +159,7 @@ public abstract class Mapper implements ToXContent, Iterable<Mapper> {
static class MultiFieldParserContext extends ParserContext {
MultiFieldParserContext(ParserContext in) {
- super(in.type(), in.analysisService, in.similarityLookupService(), in.mapperService(), in.typeParsers(), in.indexVersionCreated(), in.parseFieldMatcher(), in.queryShardContext());
+ super(in.type(), in.indexAnalyzers, in.similarityLookupService(), in.mapperService(), in.typeParsers(), in.indexVersionCreated(), in.parseFieldMatcher(), in.queryShardContext());
}
}
diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java
index 148fde7b64..2da082cba7 100755
--- a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java
+++ b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java
@@ -36,7 +36,7 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.IndexSettings;
-import org.elasticsearch.index.analysis.AnalysisService;
+import org.elasticsearch.index.analysis.IndexAnalyzers;
import org.elasticsearch.index.mapper.Mapper.BuilderContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.similarity.SimilarityService;
@@ -100,7 +100,7 @@ public class MapperService extends AbstractIndexComponent {
@Deprecated
public static final String PERCOLATOR_LEGACY_TYPE_NAME = ".percolator";
- private final AnalysisService analysisService;
+ private final IndexAnalyzers indexAnalyzers;
/**
* Will create types automatically if they do not exists in the mapping definition yet
@@ -127,16 +127,16 @@ public class MapperService extends AbstractIndexComponent {
final MapperRegistry mapperRegistry;
- public MapperService(IndexSettings indexSettings, AnalysisService analysisService,
+ public MapperService(IndexSettings indexSettings, IndexAnalyzers indexAnalyzers,
SimilarityService similarityService, MapperRegistry mapperRegistry,
Supplier<QueryShardContext> queryShardContextSupplier) {
super(indexSettings);
- this.analysisService = analysisService;
+ this.indexAnalyzers = indexAnalyzers;
this.fieldTypes = new FieldTypeLookup();
- this.documentParser = new DocumentMapperParser(indexSettings, this, analysisService, similarityService, mapperRegistry, queryShardContextSupplier);
- this.indexAnalyzer = new MapperAnalyzerWrapper(analysisService.defaultIndexAnalyzer(), p -> p.indexAnalyzer());
- this.searchAnalyzer = new MapperAnalyzerWrapper(analysisService.defaultSearchAnalyzer(), p -> p.searchAnalyzer());
- this.searchQuoteAnalyzer = new MapperAnalyzerWrapper(analysisService.defaultSearchQuoteAnalyzer(), p -> p.searchQuoteAnalyzer());
+ this.documentParser = new DocumentMapperParser(indexSettings, this, indexAnalyzers, similarityService, mapperRegistry, queryShardContextSupplier);
+ this.indexAnalyzer = new MapperAnalyzerWrapper(indexAnalyzers.getDefaultIndexAnalyzer(), p -> p.indexAnalyzer());
+ this.searchAnalyzer = new MapperAnalyzerWrapper(indexAnalyzers.getDefaultSearchAnalyzer(), p -> p.searchAnalyzer());
+ this.searchQuoteAnalyzer = new MapperAnalyzerWrapper(indexAnalyzers.getDefaultSearchQuoteAnalyzer(), p -> p.searchQuoteAnalyzer());
this.mapperRegistry = mapperRegistry;
this.dynamic = this.indexSettings.getValue(INDEX_MAPPER_DYNAMIC_SETTING);
@@ -171,8 +171,8 @@ public class MapperService extends AbstractIndexComponent {
};
}
- public AnalysisService analysisService() {
- return this.analysisService;
+ public IndexAnalyzers getIndexAnalyzers() {
+ return this.indexAnalyzers;
}
public DocumentMapperParser documentMapperParser() {
diff --git a/core/src/main/java/org/elasticsearch/index/mapper/ParseContext.java b/core/src/main/java/org/elasticsearch/index/mapper/ParseContext.java
index 8a2aca97e6..7ff5c4a37f 100644
--- a/core/src/main/java/org/elasticsearch/index/mapper/ParseContext.java
+++ b/core/src/main/java/org/elasticsearch/index/mapper/ParseContext.java
@@ -29,15 +29,11 @@ import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.lucene.all.AllEntries;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentParser;
-import org.elasticsearch.index.analysis.AnalysisService;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
-/**
- *
- */
public abstract class ParseContext {
/** Fork of {@link org.apache.lucene.document.Document} with additional functionality. */
@@ -243,11 +239,6 @@ public abstract class ParseContext {
}
@Override
- public AnalysisService analysisService() {
- return in.analysisService();
- }
-
- @Override
public MapperService mapperService() {
return in.mapperService();
}
@@ -386,11 +377,6 @@ public abstract class ParseContext {
}
@Override
- public AnalysisService analysisService() {
- return docMapperParser.analysisService;
- }
-
- @Override
public MapperService mapperService() {
return docMapperParser.mapperService;
}
@@ -525,8 +511,6 @@ public abstract class ParseContext {
public abstract DocumentMapper docMapper();
- public abstract AnalysisService analysisService();
-
public abstract MapperService mapperService();
public abstract Field version();
diff --git a/core/src/main/java/org/elasticsearch/index/mapper/StringFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/StringFieldMapper.java
index ec7a90148a..d290ef0fb5 100644
--- a/core/src/main/java/org/elasticsearch/index/mapper/StringFieldMapper.java
+++ b/core/src/main/java/org/elasticsearch/index/mapper/StringFieldMapper.java
@@ -318,13 +318,13 @@ public class StringFieldMapper extends FieldMapper {
// we need to update to actual analyzers if they are not set in this case...
// so we can inject the position increment gap...
if (builder.fieldType().indexAnalyzer() == null) {
- builder.fieldType().setIndexAnalyzer(parserContext.analysisService().defaultIndexAnalyzer());
+ builder.fieldType().setIndexAnalyzer(parserContext.getIndexAnalyzers().getDefaultIndexAnalyzer());
}
if (builder.fieldType().searchAnalyzer() == null) {
- builder.fieldType().setSearchAnalyzer(parserContext.analysisService().defaultSearchAnalyzer());
+ builder.fieldType().setSearchAnalyzer(parserContext.getIndexAnalyzers().getDefaultSearchAnalyzer());
}
if (builder.fieldType().searchQuoteAnalyzer() == null) {
- builder.fieldType().setSearchQuoteAnalyzer(parserContext.analysisService().defaultSearchQuoteAnalyzer());
+ builder.fieldType().setSearchQuoteAnalyzer(parserContext.getIndexAnalyzers().getDefaultSearchQuoteAnalyzer());
}
iterator.remove();
} else if (propName.equals("ignore_above")) {
diff --git a/core/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java
index 53b02717cc..63febfcaf0 100644
--- a/core/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java
+++ b/core/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java
@@ -174,13 +174,13 @@ public class TextFieldMapper extends FieldMapper {
}
node.put("fielddata", fielddata);
}
-
+
return new StringFieldMapper.TypeParser().parse(fieldName, node, parserContext);
}
TextFieldMapper.Builder builder = new TextFieldMapper.Builder(fieldName);
- builder.fieldType().setIndexAnalyzer(parserContext.analysisService().defaultIndexAnalyzer());
- builder.fieldType().setSearchAnalyzer(parserContext.analysisService().defaultSearchAnalyzer());
- builder.fieldType().setSearchQuoteAnalyzer(parserContext.analysisService().defaultSearchQuoteAnalyzer());
+ builder.fieldType().setIndexAnalyzer(parserContext.getIndexAnalyzers().getDefaultIndexAnalyzer());
+ builder.fieldType().setSearchAnalyzer(parserContext.getIndexAnalyzers().getDefaultSearchAnalyzer());
+ builder.fieldType().setSearchQuoteAnalyzer(parserContext.getIndexAnalyzers().getDefaultSearchQuoteAnalyzer());
parseTextField(builder, fieldName, node, parserContext);
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
diff --git a/core/src/main/java/org/elasticsearch/index/mapper/TokenCountFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/TokenCountFieldMapper.java
index 9eeaf4012f..3b6026d1b2 100644
--- a/core/src/main/java/org/elasticsearch/index/mapper/TokenCountFieldMapper.java
+++ b/core/src/main/java/org/elasticsearch/index/mapper/TokenCountFieldMapper.java
@@ -89,7 +89,7 @@ public class TokenCountFieldMapper extends FieldMapper {
builder.nullValue(nodeIntegerValue(propNode));
iterator.remove();
} else if (propName.equals("analyzer")) {
- NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString());
+ NamedAnalyzer analyzer = parserContext.getIndexAnalyzers().get(propNode.toString());
if (analyzer == null) {
throw new MapperParsingException("Analyzer [" + propNode.toString() + "] not found for field [" + name + "]");
}
diff --git a/core/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java b/core/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java
index eaa97ac510..f192efc24a 100644
--- a/core/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java
+++ b/core/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java
@@ -125,21 +125,21 @@ public class TypeParsers {
builder.storeTermVectorPayloads(nodeBooleanValue("store_term_vector_payloads", propNode, parserContext));
iterator.remove();
} else if (propName.equals("analyzer")) {
- NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString());
+ NamedAnalyzer analyzer = parserContext.getIndexAnalyzers().get(propNode.toString());
if (analyzer == null) {
throw new MapperParsingException("analyzer [" + propNode.toString() + "] not found for field [" + name + "]");
}
indexAnalyzer = analyzer;
iterator.remove();
} else if (propName.equals("search_analyzer")) {
- NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString());
+ NamedAnalyzer analyzer = parserContext.getIndexAnalyzers().get(propNode.toString());
if (analyzer == null) {
throw new MapperParsingException("analyzer [" + propNode.toString() + "] not found for field [" + name + "]");
}
searchAnalyzer = analyzer;
iterator.remove();
} else if (propName.equals("search_quote_analyzer")) {
- NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString());
+ NamedAnalyzer analyzer = parserContext.getIndexAnalyzers().get(propNode.toString());
if (analyzer == null) {
throw new MapperParsingException("analyzer [" + propNode.toString() + "] not found for field [" + name + "]");
}
diff --git a/core/src/main/java/org/elasticsearch/index/query/CommonTermsQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/CommonTermsQueryBuilder.java
index 21328ff8fc..3bd7a8abc1 100644
--- a/core/src/main/java/org/elasticsearch/index/query/CommonTermsQueryBuilder.java
+++ b/core/src/main/java/org/elasticsearch/index/query/CommonTermsQueryBuilder.java
@@ -383,7 +383,7 @@ public class CommonTermsQueryBuilder extends AbstractQueryBuilder<CommonTermsQue
analyzerObj = context.getMapperService().searchAnalyzer();
}
} else {
- analyzerObj = context.getMapperService().analysisService().analyzer(analyzer);
+ analyzerObj = context.getMapperService().getIndexAnalyzers().get(analyzer);
if (analyzerObj == null) {
throw new QueryShardException(context, "[common] analyzer [" + analyzer + "] not found");
}
diff --git a/core/src/main/java/org/elasticsearch/index/query/MatchPhrasePrefixQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/MatchPhrasePrefixQueryBuilder.java
index bff28d0f5b..d5e4d1d2a8 100644
--- a/core/src/main/java/org/elasticsearch/index/query/MatchPhrasePrefixQueryBuilder.java
+++ b/core/src/main/java/org/elasticsearch/index/query/MatchPhrasePrefixQueryBuilder.java
@@ -164,7 +164,7 @@ public class MatchPhrasePrefixQueryBuilder extends AbstractQueryBuilder<MatchPhr
@Override
protected Query doToQuery(QueryShardContext context) throws IOException {
// validate context specific fields
- if (analyzer != null && context.getAnalysisService().analyzer(analyzer) == null) {
+ if (analyzer != null && context.getIndexAnalyzers().get(analyzer) == null) {
throw new QueryShardException(context, "[" + NAME + "] analyzer [" + analyzer + "] not found");
}
diff --git a/core/src/main/java/org/elasticsearch/index/query/MatchPhraseQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/MatchPhraseQueryBuilder.java
index 6fd6922d9c..4715a5cfa8 100644
--- a/core/src/main/java/org/elasticsearch/index/query/MatchPhraseQueryBuilder.java
+++ b/core/src/main/java/org/elasticsearch/index/query/MatchPhraseQueryBuilder.java
@@ -140,7 +140,7 @@ public class MatchPhraseQueryBuilder extends AbstractQueryBuilder<MatchPhraseQue
@Override
protected Query doToQuery(QueryShardContext context) throws IOException {
// validate context specific fields
- if (analyzer != null && context.getAnalysisService().analyzer(analyzer) == null) {
+ if (analyzer != null && context.getIndexAnalyzers().get(analyzer) == null) {
throw new QueryShardException(context, "[" + NAME + "] analyzer [" + analyzer + "] not found");
}
diff --git a/core/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java
index dc6ac99408..25397d2a3e 100644
--- a/core/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java
+++ b/core/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java
@@ -444,7 +444,7 @@ public class MatchQueryBuilder extends AbstractQueryBuilder<MatchQueryBuilder> {
@Override
protected Query doToQuery(QueryShardContext context) throws IOException {
// validate context specific fields
- if (analyzer != null && context.getAnalysisService().analyzer(analyzer) == null) {
+ if (analyzer != null && context.getIndexAnalyzers().get(analyzer) == null) {
throw new QueryShardException(context, "[" + NAME + "] analyzer [" + analyzer + "] not found");
}
diff --git a/core/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java
index 666422f9dc..7ba39d7b34 100644
--- a/core/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java
+++ b/core/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java
@@ -147,7 +147,7 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
*/
public static final class Item implements ToXContent, Writeable {
public static final Item[] EMPTY_ARRAY = new Item[0];
-
+
public interface Field {
ParseField INDEX = new ParseField("_index");
ParseField TYPE = new ParseField("_type");
@@ -1021,7 +1021,7 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
}
// set analyzer
- Analyzer analyzerObj = context.getAnalysisService().analyzer(analyzer);
+ Analyzer analyzerObj = context.getIndexAnalyzers().get(analyzer);
if (analyzerObj == null) {
analyzerObj = context.getMapperService().searchAnalyzer();
}
diff --git a/core/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java
index 6ffcb19ea3..f45009f746 100644
--- a/core/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java
+++ b/core/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java
@@ -708,7 +708,7 @@ public class MultiMatchQueryBuilder extends AbstractQueryBuilder<MultiMatchQuery
protected Query doToQuery(QueryShardContext context) throws IOException {
MultiMatchQuery multiMatchQuery = new MultiMatchQuery(context);
if (analyzer != null) {
- if (context.getAnalysisService().analyzer(analyzer) == null) {
+ if (context.getIndexAnalyzers().get(analyzer) == null) {
throw new QueryShardException(context, "[" + NAME + "] analyzer [" + analyzer + "] not found");
}
multiMatchQuery.setAnalyzer(analyzer);
diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java b/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java
index 78869f5374..377afca2f6 100644
--- a/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java
+++ b/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java
@@ -41,7 +41,7 @@ import org.elasticsearch.common.Strings;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexSettings;
-import org.elasticsearch.index.analysis.AnalysisService;
+import org.elasticsearch.index.analysis.IndexAnalyzers;
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
@@ -116,8 +116,8 @@ public class QueryShardContext extends QueryRewriteContext {
this.isFilter = false;
}
- public AnalysisService getAnalysisService() {
- return mapperService.analysisService();
+ public IndexAnalyzers getIndexAnalyzers() {
+ return mapperService.getIndexAnalyzers();
}
public Similarity getSearchSimilarity() {
diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java
index be6a170cc2..807343237d 100644
--- a/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java
+++ b/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java
@@ -868,14 +868,14 @@ public class QueryStringQueryBuilder extends AbstractQueryBuilder<QueryStringQue
if (analyzer == null) {
qpSettings.defaultAnalyzer(context.getMapperService().searchAnalyzer());
} else {
- NamedAnalyzer namedAnalyzer = context.getAnalysisService().analyzer(analyzer);
+ NamedAnalyzer namedAnalyzer = context.getIndexAnalyzers().get(analyzer);
if (namedAnalyzer == null) {
throw new QueryShardException(context, "[query_string] analyzer [" + analyzer + "] not found");
}
qpSettings.forceAnalyzer(namedAnalyzer);
}
if (quoteAnalyzer != null) {
- NamedAnalyzer namedAnalyzer = context.getAnalysisService().analyzer(quoteAnalyzer);
+ NamedAnalyzer namedAnalyzer = context.getIndexAnalyzers().get(quoteAnalyzer);
if (namedAnalyzer == null) {
throw new QueryShardException(context, "[query_string] quote_analyzer [" + quoteAnalyzer + "] not found");
}
diff --git a/core/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java b/core/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java
index f408c0f147..fbe5964f2a 100644
--- a/core/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java
+++ b/core/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java
@@ -355,7 +355,7 @@ public class SimpleQueryStringBuilder extends AbstractQueryBuilder<SimpleQuerySt
if (analyzer == null) {
luceneAnalyzer = context.getMapperService().searchAnalyzer();
} else {
- luceneAnalyzer = context.getAnalysisService().analyzer(analyzer);
+ luceneAnalyzer = context.getIndexAnalyzers().get(analyzer);
if (luceneAnalyzer == null) {
throw new QueryShardException(context, "[" + SimpleQueryStringBuilder.NAME + "] analyzer [" + analyzer
+ "] not found");
diff --git a/core/src/main/java/org/elasticsearch/index/search/MatchQuery.java b/core/src/main/java/org/elasticsearch/index/search/MatchQuery.java
index 835ec8e143..f2161013de 100644
--- a/core/src/main/java/org/elasticsearch/index/search/MatchQuery.java
+++ b/core/src/main/java/org/elasticsearch/index/search/MatchQuery.java
@@ -204,7 +204,7 @@ public class MatchQuery {
}
return context.getMapperService().searchAnalyzer();
} else {
- Analyzer analyzer = context.getMapperService().analysisService().analyzer(this.analyzer);
+ Analyzer analyzer = context.getMapperService().getIndexAnalyzers().get(this.analyzer);
if (analyzer == null) {
throw new IllegalArgumentException("No analyzer found for [" + this.analyzer + "]");
}
diff --git a/core/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java b/core/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java
index 2a883f1e08..671178dfcc 100644
--- a/core/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java
+++ b/core/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java
@@ -214,12 +214,12 @@ public class TermVectorsService {
MapperService mapperService = indexShard.mapperService();
Analyzer analyzer;
if (perFieldAnalyzer != null && perFieldAnalyzer.containsKey(field)) {
- analyzer = mapperService.analysisService().analyzer(perFieldAnalyzer.get(field).toString());
+ analyzer = mapperService.getIndexAnalyzers().get(perFieldAnalyzer.get(field).toString());
} else {
analyzer = mapperService.fullName(field).indexAnalyzer();
}
if (analyzer == null) {
- analyzer = mapperService.analysisService().defaultIndexAnalyzer();
+ analyzer = mapperService.getIndexAnalyzers().getDefaultIndexAnalyzer();
}
return analyzer;
}
diff --git a/core/src/main/java/org/elasticsearch/search/DefaultSearchContext.java b/core/src/main/java/org/elasticsearch/search/DefaultSearchContext.java
index 20789e015a..9aab786aa3 100644
--- a/core/src/main/java/org/elasticsearch/search/DefaultSearchContext.java
+++ b/core/src/main/java/org/elasticsearch/search/DefaultSearchContext.java
@@ -39,7 +39,6 @@ import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.IndexSettings;
-import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
@@ -498,11 +497,6 @@ final class DefaultSearchContext extends SearchContext {
}
@Override
- public AnalysisService analysisService() {
- return indexService.analysisService();
- }
-
- @Override
public SimilarityService similarityService() {
return indexService.similarityService();
}
diff --git a/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java
index 04156bfac2..5084f9ecad 100644
--- a/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java
+++ b/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java
@@ -27,7 +27,6 @@ import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.BigArrays;
-import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.MappedFieldType;
@@ -260,11 +259,6 @@ public abstract class FilteredSearchContext extends SearchContext {
}
@Override
- public AnalysisService analysisService() {
- return in.analysisService();
- }
-
- @Override
public SimilarityService similarityService() {
return in.similarityService();
}
diff --git a/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java
index 459df14a35..63a1995b08 100644
--- a/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java
+++ b/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java
@@ -33,7 +33,6 @@ import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.concurrent.AbstractRefCounted;
import org.elasticsearch.common.util.concurrent.RefCounted;
import org.elasticsearch.common.util.iterable.Iterables;
-import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.MappedFieldType;
@@ -237,8 +236,6 @@ public abstract class SearchContext extends AbstractRefCounted implements Releas
public abstract MapperService mapperService();
- public abstract AnalysisService analysisService();
-
public abstract SimilarityService similarityService();
public abstract ScriptService scriptService();
diff --git a/core/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java
index 59555e049c..ca1d7a2306 100644
--- a/core/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java
+++ b/core/src/main/java/org/elasticsearch/search/suggest/SuggestionBuilder.java
@@ -319,7 +319,7 @@ public abstract class SuggestionBuilder<T extends SuggestionBuilder<T>> extends
suggestionContext.setAnalyzer(fieldType.searchAnalyzer());
}
} else {
- Analyzer luceneAnalyzer = mapperService.analysisService().analyzer(analyzer);
+ Analyzer luceneAnalyzer = mapperService.getIndexAnalyzers().get(analyzer);
if (luceneAnalyzer == null) {
throw new IllegalArgumentException("analyzer [" + analyzer + "] doesn't exists");
}
diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorBuilder.java
index 9e3beb2ccf..bf9158f9b8 100644
--- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorBuilder.java
+++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorBuilder.java
@@ -395,13 +395,13 @@ public final class DirectCandidateGeneratorBuilder implements CandidateGenerator
generator.setField(this.field);
transferIfNotNull(this.size, generator::size);
if (this.preFilter != null) {
- generator.preFilter(mapperService.analysisService().analyzer(this.preFilter));
+ generator.preFilter(mapperService.getIndexAnalyzers().get(this.preFilter));
if (generator.preFilter() == null) {
throw new IllegalArgumentException("Analyzer [" + this.preFilter + "] doesn't exists");
}
}
if (this.postFilter != null) {
- generator.postFilter(mapperService.analysisService().analyzer(this.postFilter));
+ generator.postFilter(mapperService.getIndexAnalyzers().get(this.postFilter));
if (generator.postFilter() == null) {
throw new IllegalArgumentException("Analyzer [" + this.postFilter + "] doesn't exists");
}
diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java
index 9933d51405..bcd7bba8d3 100644
--- a/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java
+++ b/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java
@@ -18,7 +18,6 @@
*/
package org.elasticsearch.action.admin.indices;
-import org.apache.lucene.analysis.minhash.MinHashFilter;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse;
@@ -29,7 +28,7 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.AnalysisRegistry;
-import org.elasticsearch.index.analysis.AnalysisService;
+import org.elasticsearch.index.analysis.IndexAnalyzers;
import org.elasticsearch.index.mapper.AllFieldMapper;
import org.elasticsearch.indices.analysis.AnalysisModule;
import org.elasticsearch.test.ESTestCase;
@@ -42,7 +41,7 @@ import static java.util.Collections.emptyList;
public class TransportAnalyzeActionTests extends ESTestCase {
- private AnalysisService analysisService;
+ private IndexAnalyzers indexAnalyzers;
private AnalysisRegistry registry;
private Environment environment;
@@ -71,10 +70,10 @@ public class TransportAnalyzeActionTests extends ESTestCase {
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings);
environment = new Environment(settings);
registry = new AnalysisModule(environment, emptyList()).getAnalysisRegistry();
- analysisService = registry.build(idxSettings);
+ indexAnalyzers = registry.build(idxSettings);
}
- public void testNoAnalysisService() throws IOException {
+ public void testNoIndexAnalyzers() throws IOException {
AnalyzeRequest request = new AnalyzeRequest();
request.analyzer("standard");
request.text("the quick brown fox");
@@ -87,7 +86,7 @@ public class TransportAnalyzeActionTests extends ESTestCase {
request.addTokenFilter("lowercase");
request.addTokenFilter("word_delimiter");
request.text("the qu1ck brown fox");
- analyze = TransportAnalyzeAction.analyze(request, AllFieldMapper.NAME, null, randomBoolean() ? analysisService : null, registry, environment);
+ analyze = TransportAnalyzeAction.analyze(request, AllFieldMapper.NAME, null, randomBoolean() ? indexAnalyzers : null, registry, environment);
tokens = analyze.getTokens();
assertEquals(6, tokens.size());
assertEquals("qu", tokens.get(1).getTerm());
@@ -100,7 +99,7 @@ public class TransportAnalyzeActionTests extends ESTestCase {
request.addTokenFilter("lowercase");
request.addTokenFilter("word_delimiter");
request.text("<p>the qu1ck brown fox</p>");
- analyze = TransportAnalyzeAction.analyze(request, AllFieldMapper.NAME, null, randomBoolean() ? analysisService : null, registry, environment);
+ analyze = TransportAnalyzeAction.analyze(request, AllFieldMapper.NAME, null, randomBoolean() ? indexAnalyzers : null, registry, environment);
tokens = analyze.getTokens();
assertEquals(6, tokens.size());
assertEquals("the", tokens.get(0).getTerm());
@@ -143,26 +142,26 @@ public class TransportAnalyzeActionTests extends ESTestCase {
assertEquals("<ALPHANUM>", tokens.get(3).getType());
}
- public void testWithAnalysisService() throws IOException {
+ public void testWithIndexAnalyzers() throws IOException {
AnalyzeRequest request = new AnalyzeRequest();
request.analyzer("standard");
request.text("the quick brown fox");
request.analyzer("custom_analyzer");
request.text("the qu1ck brown fox");
- AnalyzeResponse analyze = TransportAnalyzeAction.analyze(request, AllFieldMapper.NAME, null, analysisService, registry, environment);
+ AnalyzeResponse analyze = TransportAnalyzeAction.analyze(request, AllFieldMapper.NAME, null, indexAnalyzers, registry, environment);
List<AnalyzeResponse.AnalyzeToken> tokens = analyze.getTokens();
assertEquals(4, tokens.size());
request.analyzer("whitespace");
request.text("the qu1ck brown fox-dog");
- analyze = TransportAnalyzeAction.analyze(request, AllFieldMapper.NAME, null, analysisService, registry, environment);
+ analyze = TransportAnalyzeAction.analyze(request, AllFieldMapper.NAME, null, indexAnalyzers, registry, environment);
tokens = analyze.getTokens();
assertEquals(4, tokens.size());
request.analyzer("custom_analyzer");
request.text("the qu1ck brown fox-dog");
- analyze = TransportAnalyzeAction.analyze(request, AllFieldMapper.NAME, null, analysisService, registry, environment);
+ analyze = TransportAnalyzeAction.analyze(request, AllFieldMapper.NAME, null, indexAnalyzers, registry, environment);
tokens = analyze.getTokens();
assertEquals(5, tokens.size());
@@ -171,7 +170,7 @@ public class TransportAnalyzeActionTests extends ESTestCase {
request.addTokenFilter("lowercase");
request.addTokenFilter("wordDelimiter");
request.text("the qu1ck brown fox-dog");
- analyze = TransportAnalyzeAction.analyze(request, AllFieldMapper.NAME, null, analysisService, registry, environment);
+ analyze = TransportAnalyzeAction.analyze(request, AllFieldMapper.NAME, null, indexAnalyzers, registry, environment);
tokens = analyze.getTokens();
assertEquals(5, tokens.size());
assertEquals("the", tokens.get(0).getTerm());
@@ -184,14 +183,14 @@ public class TransportAnalyzeActionTests extends ESTestCase {
request.tokenizer("trigram");
request.addTokenFilter("synonym");
request.text("kimchy");
- analyze = TransportAnalyzeAction.analyze(request, AllFieldMapper.NAME, null, analysisService, registry, environment);
+ analyze = TransportAnalyzeAction.analyze(request, AllFieldMapper.NAME, null, indexAnalyzers, registry, environment);
tokens = analyze.getTokens();
assertEquals(2, tokens.size());
assertEquals("sha", tokens.get(0).getTerm());
assertEquals("hay", tokens.get(1).getTerm());
}
- public void testGetIndexAnalyserWithoutAnalysisService() throws IOException {
+ public void testGetIndexAnalyserWithoutIndexAnalyzers() throws IOException {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> TransportAnalyzeAction.analyze(
new AnalyzeRequest()
@@ -208,7 +207,7 @@ public class TransportAnalyzeActionTests extends ESTestCase {
new AnalyzeRequest()
.analyzer("foobar")
.text("the qu1ck brown fox"),
- AllFieldMapper.NAME, null, notGlobal ? analysisService : null, registry, environment));
+ AllFieldMapper.NAME, null, notGlobal ? indexAnalyzers : null, registry, environment));
if (notGlobal) {
assertEquals(e.getMessage(), "failed to find analyzer [foobar]");
} else {
@@ -220,7 +219,7 @@ public class TransportAnalyzeActionTests extends ESTestCase {
new AnalyzeRequest()
.tokenizer("foobar")
.text("the qu1ck brown fox"),
- AllFieldMapper.NAME, null, notGlobal ? analysisService : null, registry, environment));
+ AllFieldMapper.NAME, null, notGlobal ? indexAnalyzers : null, registry, environment));
if (notGlobal) {
assertEquals(e.getMessage(), "failed to find tokenizer under [foobar]");
} else {
@@ -233,7 +232,7 @@ public class TransportAnalyzeActionTests extends ESTestCase {
.tokenizer("whitespace")
.addTokenFilter("foobar")
.text("the qu1ck brown fox"),
- AllFieldMapper.NAME, null, notGlobal ? analysisService : null, registry, environment));
+ AllFieldMapper.NAME, null, notGlobal ? indexAnalyzers : null, registry, environment));
if (notGlobal) {
assertEquals(e.getMessage(), "failed to find token filter under [foobar]");
} else {
@@ -247,7 +246,7 @@ public class TransportAnalyzeActionTests extends ESTestCase {
.addTokenFilter("lowercase")
.addCharFilter("foobar")
.text("the qu1ck brown fox"),
- AllFieldMapper.NAME, null, notGlobal ? analysisService : null, registry, environment));
+ AllFieldMapper.NAME, null, notGlobal ? indexAnalyzers : null, registry, environment));
if (notGlobal) {
assertEquals(e.getMessage(), "failed to find char filter under [foobar]");
} else {
@@ -260,7 +259,7 @@ public class TransportAnalyzeActionTests extends ESTestCase {
request.tokenizer("whitespace");
request.addTokenFilter("min_hash");
request.text("the quick brown fox");
- AnalyzeResponse analyze = TransportAnalyzeAction.analyze(request, AllFieldMapper.NAME, null, analysisService, registry, environment);
+ AnalyzeResponse analyze = TransportAnalyzeAction.analyze(request, AllFieldMapper.NAME, null, indexAnalyzers, registry, environment);
List<AnalyzeResponse.AnalyzeToken> tokens = analyze.getTokens();
int default_hash_count = 1;
int default_bucket_size = 512;
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/ASCIIFoldingTokenFilterFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/ASCIIFoldingTokenFilterFactoryTests.java
index 7b6d74a418..d68cbaa9d3 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/ASCIIFoldingTokenFilterFactoryTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/ASCIIFoldingTokenFilterFactoryTests.java
@@ -23,6 +23,7 @@ import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.core.WhitespaceTokenizer;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
+import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.ESTokenStreamTestCase;
import java.io.IOException;
@@ -30,11 +31,11 @@ import java.io.StringReader;
public class ASCIIFoldingTokenFilterFactoryTests extends ESTokenStreamTestCase {
public void testDefault() throws IOException {
- AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(Settings.builder()
+ ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(Settings.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put("index.analysis.filter.my_ascii_folding.type", "asciifolding")
.build());
- TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_ascii_folding");
+ TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_ascii_folding");
String source = "Ansprüche";
String[] expected = new String[]{"Anspruche"};
Tokenizer tokenizer = new WhitespaceTokenizer();
@@ -43,12 +44,12 @@ public class ASCIIFoldingTokenFilterFactoryTests extends ESTokenStreamTestCase {
}
public void testPreserveOriginal() throws IOException {
- AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(Settings.builder()
+ ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(Settings.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put("index.analysis.filter.my_ascii_folding.type", "asciifolding")
.put("index.analysis.filter.my_ascii_folding.preserve_original", true)
.build());
- TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_ascii_folding");
+ TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_ascii_folding");
String source = "Ansprüche";
String[] expected = new String[]{"Anspruche", "Ansprüche"};
Tokenizer tokenizer = new WhitespaceTokenizer();
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisServiceTests.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisRegistryTests.java
index 52fcdd4bb2..4a5a0b9567 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisServiceTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisRegistryTests.java
@@ -49,12 +49,25 @@ import static java.util.Collections.singletonMap;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
-public class AnalysisServiceTests extends ESTestCase {
+public class AnalysisRegistryTests extends ESTestCase {
+
+ private AnalysisRegistry registry;
private static AnalyzerProvider<?> analyzerProvider(final String name) {
return new PreBuiltAnalyzerProvider(name, AnalyzerScope.INDEX, new EnglishAnalyzer());
}
+ @Override
+ public void setUp() throws Exception {
+ super.setUp();
+ Settings settings = Settings
+ .builder()
+ .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
+ .build();
+ registry = new AnalysisRegistry(new Environment(settings),
+ emptyMap(), emptyMap(), emptyMap(), emptyMap());
+ }
+
public void testDefaultAnalyzers() throws IOException {
Version version = VersionUtils.randomVersion(random());
Settings settings = Settings
@@ -63,29 +76,30 @@ public class AnalysisServiceTests extends ESTestCase {
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
- AnalysisService analysisService = new AnalysisRegistry(new Environment(settings), emptyMap(), emptyMap(), emptyMap(), emptyMap())
- .build(idxSettings);
- assertThat(analysisService.defaultIndexAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class));
- assertThat(analysisService.defaultSearchAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class));
- assertThat(analysisService.defaultSearchQuoteAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class));
+ IndexAnalyzers indexAnalyzers = new AnalysisRegistry(new Environment(settings), emptyMap(), emptyMap(), emptyMap(), emptyMap())
+ .build(idxSettings);
+ assertThat(indexAnalyzers.getDefaultIndexAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class));
+ assertThat(indexAnalyzers.getDefaultSearchAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class));
+ assertThat(indexAnalyzers.getDefaultSearchQuoteAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class));
}
public void testOverrideDefaultAnalyzer() throws IOException {
Version version = VersionUtils.randomVersion(random());
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
- AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings("index", settings),
- singletonMap("default", analyzerProvider("default")), emptyMap(), emptyMap(), emptyMap());
- assertThat(analysisService.defaultIndexAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class));
- assertThat(analysisService.defaultSearchAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class));
- assertThat(analysisService.defaultSearchQuoteAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class));
+ IndexAnalyzers indexAnalyzers = registry.build(IndexSettingsModule.newIndexSettings("index", settings),
+ singletonMap("default", analyzerProvider("default"))
+ , emptyMap(), emptyMap(), emptyMap());
+ assertThat(indexAnalyzers.getDefaultIndexAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class));
+ assertThat(indexAnalyzers.getDefaultSearchAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class));
+ assertThat(indexAnalyzers.getDefaultSearchQuoteAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class));
}
public void testOverrideDefaultIndexAnalyzerIsUnsupported() {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0_alpha1, Version.CURRENT);
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
- AnalyzerProvider<?> defaultIndex = new PreBuiltAnalyzerProvider("default_index", AnalyzerScope.INDEX, new EnglishAnalyzer());
+ AnalyzerProvider<?> defaultIndex = new PreBuiltAnalyzerProvider("default_index", AnalyzerScope.INDEX, new EnglishAnalyzer());
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
- () -> new AnalysisService(IndexSettingsModule.newIndexSettings("index", settings),
+ () -> registry.build(IndexSettingsModule.newIndexSettings("index", settings),
singletonMap("default_index", defaultIndex), emptyMap(), emptyMap(), emptyMap()));
assertTrue(e.getMessage().contains("[index.analysis.analyzer.default_index] is not supported"));
}
@@ -94,21 +108,21 @@ public class AnalysisServiceTests extends ESTestCase {
Version version = VersionUtils.randomVersionBetween(random(), VersionUtils.getFirstVersion(),
VersionUtils.getPreviousVersion(Version.V_5_0_0_alpha1));
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
- AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings("index", settings),
+ IndexAnalyzers indexAnalyzers = registry.build(IndexSettingsModule.newIndexSettings("index", settings),
singletonMap("default_index", analyzerProvider("default_index")), emptyMap(), emptyMap(), emptyMap());
- assertThat(analysisService.defaultIndexAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class));
- assertThat(analysisService.defaultSearchAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class));
- assertThat(analysisService.defaultSearchQuoteAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class));
+ assertThat(indexAnalyzers.getDefaultIndexAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class));
+ assertThat(indexAnalyzers.getDefaultSearchAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class));
+ assertThat(indexAnalyzers.getDefaultSearchQuoteAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class));
}
public void testOverrideDefaultSearchAnalyzer() {
Version version = VersionUtils.randomVersion(random());
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
- AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings("index", settings),
+ IndexAnalyzers indexAnalyzers = registry.build(IndexSettingsModule.newIndexSettings("index", settings),
singletonMap("default_search", analyzerProvider("default_search")), emptyMap(), emptyMap(), emptyMap());
- assertThat(analysisService.defaultIndexAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class));
- assertThat(analysisService.defaultSearchAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class));
- assertThat(analysisService.defaultSearchQuoteAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class));
+ assertThat(indexAnalyzers.getDefaultIndexAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class));
+ assertThat(indexAnalyzers.getDefaultSearchAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class));
+ assertThat(indexAnalyzers.getDefaultSearchQuoteAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class));
}
public void testBackCompatOverrideDefaultIndexAndSearchAnalyzer() {
@@ -118,11 +132,11 @@ public class AnalysisServiceTests extends ESTestCase {
Map<String, AnalyzerProvider<?>> analyzers = new HashMap<>();
analyzers.put("default_index", analyzerProvider("default_index"));
analyzers.put("default_search", analyzerProvider("default_search"));
- AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings("index", settings),
+ IndexAnalyzers indexAnalyzers = registry.build(IndexSettingsModule.newIndexSettings("index", settings),
analyzers, Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap());
- assertThat(analysisService.defaultIndexAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class));
- assertThat(analysisService.defaultSearchAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class));
- assertThat(analysisService.defaultSearchQuoteAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class));
+ assertThat(indexAnalyzers.getDefaultIndexAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class));
+ assertThat(indexAnalyzers.getDefaultSearchAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class));
+ assertThat(indexAnalyzers.getDefaultSearchQuoteAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class));
}
public void testConfigureCamelCaseTokenFilter() throws IOException {
@@ -137,10 +151,10 @@ public class AnalysisServiceTests extends ESTestCase {
.putArray("index.analysis.analyzer.custom_analyzer_1.filter", "lowercase", "word_delimiter").build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings);
-
- AnalysisService analysisService = new AnalysisModule(new Environment(settings), emptyList()).getAnalysisRegistry()
+
+ IndexAnalyzers indexAnalyzers = new AnalysisModule(new Environment(settings), emptyList()).getAnalysisRegistry()
.build(idxSettings);
- try (NamedAnalyzer custom_analyser = analysisService.analyzer("custom_analyzer")) {
+ try (NamedAnalyzer custom_analyser = indexAnalyzers.get("custom_analyzer")) {
assertNotNull(custom_analyser);
TokenStream tokenStream = custom_analyser.tokenStream("foo", "J2SE j2ee");
tokenStream.reset();
@@ -154,7 +168,7 @@ public class AnalysisServiceTests extends ESTestCase {
assertEquals("j2ee", token.get(1));
}
- try (NamedAnalyzer custom_analyser = analysisService.analyzer("custom_analyzer_1")) {
+ try (NamedAnalyzer custom_analyser = indexAnalyzers.get("custom_analyzer_1")) {
assertNotNull(custom_analyser);
TokenStream tokenStream = custom_analyser.tokenStream("foo", "J2SE j2ee");
tokenStream.reset();
@@ -178,14 +192,14 @@ public class AnalysisServiceTests extends ESTestCase {
Settings indexSettings = Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings);
- AnalysisService analysisService = new AnalysisRegistry(new Environment(settings), emptyMap(), emptyMap(), emptyMap(), emptyMap())
+ IndexAnalyzers indexAnalyzers = new AnalysisRegistry(new Environment(settings), emptyMap(), emptyMap(), emptyMap(), emptyMap())
.build(idxSettings);
- AnalysisService otherAnalysisSergice = new AnalysisRegistry(new Environment(settings), emptyMap(), emptyMap(), emptyMap(),
+ IndexAnalyzers otherIndexAnalyzers = new AnalysisRegistry(new Environment(settings), emptyMap(), emptyMap(), emptyMap(),
emptyMap()).build(idxSettings);
final int numIters = randomIntBetween(5, 20);
for (int i = 0; i < numIters; i++) {
PreBuiltAnalyzers preBuiltAnalyzers = RandomPicks.randomFrom(random(), PreBuiltAnalyzers.values());
- assertSame(analysisService.analyzer(preBuiltAnalyzers.name()), otherAnalysisSergice.analyzer(preBuiltAnalyzers.name()));
+ assertSame(indexAnalyzers.get(preBuiltAnalyzers.name()), otherIndexAnalyzers.get(preBuiltAnalyzers.name()));
}
}
@@ -204,4 +218,15 @@ public class AnalysisServiceTests extends ESTestCase {
() -> new AnalysisRegistry(new Environment(settings), emptyMap(), emptyMap(), emptyMap(), emptyMap()).build(idxSettings));
assertThat(e.getMessage(), equalTo("analyzer [test_analyzer] must specify either an analyzer type, or a tokenizer"));
}
+
+ public void testCloseIndexAnalyzersMultipleTimes() throws IOException {
+ Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build();
+ Settings indexSettings = Settings.builder()
+ .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
+ IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings);
+ IndexAnalyzers indexAnalyzers = new AnalysisRegistry(new Environment(settings), emptyMap(), emptyMap(), emptyMap(), emptyMap())
+ .build(idxSettings);
+ indexAnalyzers.close();
+ indexAnalyzers.close();
+ }
}
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTestsHelper.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTestsHelper.java
index 40ec2b412f..a60c21c1a7 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTestsHelper.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTestsHelper.java
@@ -25,6 +25,7 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.indices.analysis.AnalysisModule;
+import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.IndexSettingsModule;
import java.io.IOException;
@@ -34,21 +35,25 @@ import static java.util.Collections.emptyList;
public class AnalysisTestsHelper {
- public static AnalysisService createAnalysisServiceFromClassPath(Path baseDir, String resource) throws IOException {
+ public static ESTestCase.TestAnalysis createTestAnalysisFromClassPath(Path baseDir, String resource) throws IOException {
Settings settings = Settings.builder()
.loadFromStream(resource, AnalysisTestsHelper.class.getResourceAsStream(resource))
.put(Environment.PATH_HOME_SETTING.getKey(), baseDir.toString())
.build();
- return createAnalysisServiceFromSettings(settings);
+ return createTestAnalysisFromSettings(settings);
}
- public static AnalysisService createAnalysisServiceFromSettings(
+ public static ESTestCase.TestAnalysis createTestAnalysisFromSettings(
Settings settings) throws IOException {
if (settings.get(IndexMetaData.SETTING_VERSION_CREATED) == null) {
settings = Settings.builder().put(settings).put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
}
- IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings);
- return new AnalysisModule(new Environment(settings), emptyList()).getAnalysisRegistry().build(idxSettings);
+ IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test", settings);
+ AnalysisRegistry analysisRegistry = new AnalysisModule(new Environment(settings), emptyList()).getAnalysisRegistry();
+ return new ESTestCase.TestAnalysis(analysisRegistry.build(indexSettings),
+ analysisRegistry.buildTokenFilterFactories(indexSettings),
+ analysisRegistry.buildTokenizerFactories(indexSettings),
+ analysisRegistry.buildCharFilterFactories(indexSettings));
}
}
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/CJKFilterFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/CJKFilterFactoryTests.java
index d2e2d4cc6e..5ae2fbbb1c 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/CJKFilterFactoryTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/CJKFilterFactoryTests.java
@@ -21,6 +21,7 @@ package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.standard.StandardTokenizer;
+import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.ESTokenStreamTestCase;
import java.io.IOException;
@@ -30,8 +31,8 @@ public class CJKFilterFactoryTests extends ESTokenStreamTestCase {
private static final String RESOURCE = "/org/elasticsearch/index/analysis/cjk_analysis.json";
public void testDefault() throws IOException {
- AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromClassPath(createTempDir(), RESOURCE);
- TokenFilterFactory tokenFilter = analysisService.tokenFilter("cjk_bigram");
+ ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromClassPath(createTempDir(), RESOURCE);
+ TokenFilterFactory tokenFilter = analysis.tokenFilter.get("cjk_bigram");
String source = "多くの学生が試験に落ちた。";
String[] expected = new String[]{"多く", "くの", "の学", "学生", "生が", "が試", "試験", "験に", "に落", "落ち", "ちた" };
Tokenizer tokenizer = new StandardTokenizer();
@@ -40,8 +41,8 @@ public class CJKFilterFactoryTests extends ESTokenStreamTestCase {
}
public void testNoFlags() throws IOException {
- AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromClassPath(createTempDir(), RESOURCE);
- TokenFilterFactory tokenFilter = analysisService.tokenFilter("cjk_no_flags");
+ ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromClassPath(createTempDir(), RESOURCE);
+ TokenFilterFactory tokenFilter = analysis.tokenFilter.get("cjk_no_flags");
String source = "多くの学生が試験に落ちた。";
String[] expected = new String[]{"多く", "くの", "の学", "学生", "生が", "が試", "試験", "験に", "に落", "落ち", "ちた" };
Tokenizer tokenizer = new StandardTokenizer();
@@ -50,8 +51,8 @@ public class CJKFilterFactoryTests extends ESTokenStreamTestCase {
}
public void testHanOnly() throws IOException {
- AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromClassPath(createTempDir(), RESOURCE);
- TokenFilterFactory tokenFilter = analysisService.tokenFilter("cjk_han_only");
+ ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromClassPath(createTempDir(), RESOURCE);
+ TokenFilterFactory tokenFilter = analysis.tokenFilter.get("cjk_han_only");
String source = "多くの学生が試験に落ちた。";
String[] expected = new String[]{"多", "く", "の", "学生", "が", "試験", "に", "落", "ち", "た" };
Tokenizer tokenizer = new StandardTokenizer();
@@ -60,8 +61,8 @@ public class CJKFilterFactoryTests extends ESTokenStreamTestCase {
}
public void testHanUnigramOnly() throws IOException {
- AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromClassPath(createTempDir(), RESOURCE);
- TokenFilterFactory tokenFilter = analysisService.tokenFilter("cjk_han_unigram_only");
+ ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromClassPath(createTempDir(), RESOURCE);
+ TokenFilterFactory tokenFilter = analysis.tokenFilter.get("cjk_han_unigram_only");
String source = "多くの学生が試験に落ちた。";
String[] expected = new String[]{"多", "く", "の", "学", "学生", "生", "が", "試", "試験", "験", "に", "落", "ち", "た" };
Tokenizer tokenizer = new StandardTokenizer();
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/CharFilterTests.java b/core/src/test/java/org/elasticsearch/index/analysis/CharFilterTests.java
index 3f2b1461ef..206dffd0fb 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/CharFilterTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/CharFilterTests.java
@@ -26,7 +26,7 @@ import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.test.ESTokenStreamTestCase;
import org.elasticsearch.test.IndexSettingsModule;
-import static org.elasticsearch.test.ESTestCase.createAnalysisService;
+import static org.elasticsearch.test.ESTestCase.createTestAnalysis;
/**
*/
@@ -41,8 +41,8 @@ public class CharFilterTests extends ESTokenStreamTestCase {
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings);
- AnalysisService analysisService = createAnalysisService(idxSettings, settings);
- NamedAnalyzer analyzer1 = analysisService.analyzer("custom_with_char_filter");
+ IndexAnalyzers indexAnalyzers = createTestAnalysis(idxSettings, settings).indexAnalyzers;
+ NamedAnalyzer analyzer1 = indexAnalyzers.get("custom_with_char_filter");
assertTokenStreamContents(analyzer1.tokenStream("test", "jeff quit phish"), new String[]{"jeff", "qit", "fish"});
@@ -58,9 +58,9 @@ public class CharFilterTests extends ESTokenStreamTestCase {
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings);
- AnalysisService analysisService = createAnalysisService(idxSettings, settings);
- NamedAnalyzer analyzer1 = analysisService.analyzer("custom_with_char_filter");
+ IndexAnalyzers indexAnalyzers = createTestAnalysis(idxSettings, settings).indexAnalyzers;
+ NamedAnalyzer analyzer1 = indexAnalyzers.get("custom_with_char_filter");
assertTokenStreamContents(analyzer1.tokenStream("test", "<b>hello</b>!"), new String[]{"hello"});
@@ -80,8 +80,8 @@ public class CharFilterTests extends ESTokenStreamTestCase {
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings);
- AnalysisService analysisService = createAnalysisService(idxSettings, settings);
- NamedAnalyzer analyzer1 = analysisService.analyzer("custom_with_char_filter");
+ IndexAnalyzers indexAnalyzers = createTestAnalysis(idxSettings, settings).indexAnalyzers;
+ NamedAnalyzer analyzer1 = indexAnalyzers.get("custom_with_char_filter");
assertTokenStreamContents(analyzer1.tokenStream("test", "faBBbBB aBbbbBf"), new String[]{"foo", "oof"});
}
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/CompoundAnalysisTests.java b/core/src/test/java/org/elasticsearch/index/analysis/CompoundAnalysisTests.java
index 0c9010b2c9..ede4240455 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/CompoundAnalysisTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/CompoundAnalysisTests.java
@@ -61,9 +61,7 @@ public class CompoundAnalysisTests extends ESTestCase {
return singletonMap("myfilter", MyFilterTokenFilterFactory::new);
}
}));
- AnalysisService analysisService = analysisModule.getAnalysisRegistry().build(idxSettings);
-
- TokenFilterFactory filterFactory = analysisService.tokenFilter("dict_dec");
+ TokenFilterFactory filterFactory = analysisModule.getAnalysisRegistry().buildTokenFilterFactories(idxSettings).get("dict_dec");
MatcherAssert.assertThat(filterFactory, instanceOf(DictionaryCompoundWordTokenFilterFactory.class));
}
@@ -85,9 +83,8 @@ public class CompoundAnalysisTests extends ESTestCase {
return singletonMap("myfilter", MyFilterTokenFilterFactory::new);
}
}));
- AnalysisService analysisService = analysisModule.getAnalysisRegistry().build(idxSettings);
-
- Analyzer analyzer = analysisService.analyzer(analyzerName).analyzer();
+ IndexAnalyzers indexAnalyzers = analysisModule.getAnalysisRegistry().build(idxSettings);
+ Analyzer analyzer = indexAnalyzers.get(analyzerName).analyzer();
AllEntries allEntries = new AllEntries();
allEntries.addText("field1", text, 1.0f);
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/HunspellTokenFilterFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/HunspellTokenFilterFactoryTests.java
index 45e4dd24f9..2708387da1 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/HunspellTokenFilterFactoryTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/HunspellTokenFilterFactoryTests.java
@@ -36,8 +36,8 @@ public class HunspellTokenFilterFactoryTests extends ESTestCase {
.put("index.analysis.filter.en_US.locale", "en_US")
.build();
- AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
- TokenFilterFactory tokenFilter = analysisService.tokenFilter("en_US");
+ TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings);
+ TokenFilterFactory tokenFilter = analysis.tokenFilter.get("en_US");
assertThat(tokenFilter, instanceOf(HunspellTokenFilterFactory.class));
HunspellTokenFilterFactory hunspellTokenFilter = (HunspellTokenFilterFactory) tokenFilter;
assertThat(hunspellTokenFilter.dedup(), is(true));
@@ -50,8 +50,8 @@ public class HunspellTokenFilterFactoryTests extends ESTestCase {
.put("index.analysis.filter.en_US.locale", "en_US")
.build();
- analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
- tokenFilter = analysisService.tokenFilter("en_US");
+ analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings);
+ tokenFilter = analysis.tokenFilter.get("en_US");
assertThat(tokenFilter, instanceOf(HunspellTokenFilterFactory.class));
hunspellTokenFilter = (HunspellTokenFilterFactory) tokenFilter;
assertThat(hunspellTokenFilter.dedup(), is(false));
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/KeepFilterFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/KeepFilterFactoryTests.java
index 35148874e1..d5a6a590e7 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/KeepFilterFactoryTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/KeepFilterFactoryTests.java
@@ -23,6 +23,7 @@ import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.core.WhitespaceTokenizer;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
+import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.ESTokenStreamTestCase;
import org.junit.Assert;
@@ -35,8 +36,8 @@ public class KeepFilterFactoryTests extends ESTokenStreamTestCase {
private static final String RESOURCE = "/org/elasticsearch/index/analysis/keep_analysis.json";
public void testLoadWithoutSettings() throws IOException {
- AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromClassPath(createTempDir(), RESOURCE);
- TokenFilterFactory tokenFilter = analysisService.tokenFilter("keep");
+ ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromClassPath(createTempDir(), RESOURCE);
+ TokenFilterFactory tokenFilter = analysis.tokenFilter.get("keep");
Assert.assertNull(tokenFilter);
}
@@ -48,7 +49,7 @@ public class KeepFilterFactoryTests extends ESTokenStreamTestCase {
.put("index.analysis.filter.broken_keep_filter.keep_words", "[\"Hello\", \"worlD\"]")
.build();
try {
- AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
+ AnalysisTestsHelper.createTestAnalysisFromSettings(settings);
Assert.fail("path and array are configured");
} catch (IllegalArgumentException e) {
} catch (IOException e) {
@@ -64,7 +65,7 @@ public class KeepFilterFactoryTests extends ESTokenStreamTestCase {
.build();
try {
// test our none existing setup is picked up
- AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
+ AnalysisTestsHelper.createTestAnalysisFromSettings(settings);
fail("expected an exception due to non existent keep_words_path");
} catch (IllegalArgumentException e) {
} catch (IOException e) {
@@ -76,7 +77,7 @@ public class KeepFilterFactoryTests extends ESTokenStreamTestCase {
.build();
try {
// test our none existing setup is picked up
- AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
+ AnalysisTestsHelper.createTestAnalysisFromSettings(settings);
fail("expected an exception indicating that you can't use [keep_words_path] with [keep_words] ");
} catch (IllegalArgumentException e) {
} catch (IOException e) {
@@ -86,8 +87,8 @@ public class KeepFilterFactoryTests extends ESTokenStreamTestCase {
}
public void testCaseInsensitiveMapping() throws IOException {
- AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromClassPath(createTempDir(), RESOURCE);
- TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_keep_filter");
+ ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromClassPath(createTempDir(), RESOURCE);
+ TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_keep_filter");
assertThat(tokenFilter, instanceOf(KeepWordFilterFactory.class));
String source = "hello small world";
String[] expected = new String[]{"hello", "world"};
@@ -97,8 +98,8 @@ public class KeepFilterFactoryTests extends ESTokenStreamTestCase {
}
public void testCaseSensitiveMapping() throws IOException {
- AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromClassPath(createTempDir(), RESOURCE);
- TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_case_sensitive_keep_filter");
+ ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromClassPath(createTempDir(), RESOURCE);
+ TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_case_sensitive_keep_filter");
assertThat(tokenFilter, instanceOf(KeepWordFilterFactory.class));
String source = "Hello small world";
String[] expected = new String[]{"Hello"};
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/KeepTypesFilterFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/KeepTypesFilterFactoryTests.java
index 986b79fad2..48ce1139d8 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/KeepTypesFilterFactoryTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/KeepTypesFilterFactoryTests.java
@@ -23,6 +23,7 @@ import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.standard.StandardTokenizer;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
+import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.ESTokenStreamTestCase;
import java.io.IOException;
@@ -37,8 +38,8 @@ public class KeepTypesFilterFactoryTests extends ESTokenStreamTestCase {
.put("index.analysis.filter.keep_numbers.type", "keep_types")
.putArray("index.analysis.filter.keep_numbers.types", new String[] {"<NUM>", "<SOMETHINGELSE>"})
.build();
- AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
- TokenFilterFactory tokenFilter = analysisService.tokenFilter("keep_numbers");
+ ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings);
+ TokenFilterFactory tokenFilter = analysis.tokenFilter.get("keep_numbers");
assertThat(tokenFilter, instanceOf(KeepTypesFilterFactory.class));
String source = "Hello 123 world";
String[] expected = new String[]{"123"};
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/LimitTokenCountFilterFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/LimitTokenCountFilterFactoryTests.java
index bf17e5c7bf..f1d810505b 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/LimitTokenCountFilterFactoryTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/LimitTokenCountFilterFactoryTests.java
@@ -23,6 +23,7 @@ import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.core.WhitespaceTokenizer;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
+import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.ESTokenStreamTestCase;
import java.io.IOException;
@@ -34,9 +35,9 @@ public class LimitTokenCountFilterFactoryTests extends ESTokenStreamTestCase {
.put("index.analysis.filter.limit_default.type", "limit")
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
- AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
+ ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings);
{
- TokenFilterFactory tokenFilter = analysisService.tokenFilter("limit_default");
+ TokenFilterFactory tokenFilter = analysis.tokenFilter.get("limit_default");
String source = "the quick brown fox";
String[] expected = new String[] { "the" };
Tokenizer tokenizer = new WhitespaceTokenizer();
@@ -44,7 +45,7 @@ public class LimitTokenCountFilterFactoryTests extends ESTokenStreamTestCase {
assertTokenStreamContents(tokenFilter.create(tokenizer), expected);
}
{
- TokenFilterFactory tokenFilter = analysisService.tokenFilter("limit");
+ TokenFilterFactory tokenFilter = analysis.tokenFilter.get("limit");
String source = "the quick brown fox";
String[] expected = new String[] { "the" };
Tokenizer tokenizer = new WhitespaceTokenizer();
@@ -61,8 +62,8 @@ public class LimitTokenCountFilterFactoryTests extends ESTokenStreamTestCase {
.put("index.analysis.filter.limit_1.consume_all_tokens", true)
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
- AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
- TokenFilterFactory tokenFilter = analysisService.tokenFilter("limit_1");
+ ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings);
+ TokenFilterFactory tokenFilter = analysis.tokenFilter.get("limit_1");
String source = "the quick brown fox";
String[] expected = new String[] { "the", "quick", "brown" };
Tokenizer tokenizer = new WhitespaceTokenizer();
@@ -76,8 +77,8 @@ public class LimitTokenCountFilterFactoryTests extends ESTokenStreamTestCase {
.put("index.analysis.filter.limit_1.consume_all_tokens", false)
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
- AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
- TokenFilterFactory tokenFilter = analysisService.tokenFilter("limit_1");
+ ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings);
+ TokenFilterFactory tokenFilter = analysis.tokenFilter.get("limit_1");
String source = "the quick brown fox";
String[] expected = new String[] { "the", "quick", "brown" };
Tokenizer tokenizer = new WhitespaceTokenizer();
@@ -92,8 +93,8 @@ public class LimitTokenCountFilterFactoryTests extends ESTokenStreamTestCase {
.put("index.analysis.filter.limit_1.consume_all_tokens", true)
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
- AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
- TokenFilterFactory tokenFilter = analysisService.tokenFilter("limit_1");
+ ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings);
+ TokenFilterFactory tokenFilter = analysis.tokenFilter.get("limit_1");
String source = "the quick brown fox";
String[] expected = new String[] { "the", "quick", "brown", "fox" };
Tokenizer tokenizer = new WhitespaceTokenizer();
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/MinHashFilterFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/MinHashFilterFactoryTests.java
index 60f01cac70..fc78afa7ab 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/MinHashFilterFactoryTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/MinHashFilterFactoryTests.java
@@ -23,6 +23,7 @@ import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.core.WhitespaceTokenizer;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
+import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.ESTokenStreamTestCase;
import java.io.IOException;
@@ -36,8 +37,8 @@ public class MinHashFilterFactoryTests extends ESTokenStreamTestCase {
Settings settings = Settings.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
- AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
- TokenFilterFactory tokenFilter = analysisService.tokenFilter("min_hash");
+ ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings);
+ TokenFilterFactory tokenFilter = analysis.tokenFilter.get("min_hash");
String source = "the quick brown fox";
Tokenizer tokenizer = new WhitespaceTokenizer();
tokenizer.setReader(new StringReader(source));
@@ -57,8 +58,8 @@ public class MinHashFilterFactoryTests extends ESTokenStreamTestCase {
.put("index.analysis.filter.test_min_hash.with_rotation", false)
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
- AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
- TokenFilterFactory tokenFilter = analysisService.tokenFilter("test_min_hash");
+ ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings);
+ TokenFilterFactory tokenFilter = analysis.tokenFilter.get("test_min_hash");
String source = "sushi";
Tokenizer tokenizer = new WhitespaceTokenizer();
tokenizer.setReader(new StringReader(source));
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/PatternCaptureTokenFilterTests.java b/core/src/test/java/org/elasticsearch/index/analysis/PatternCaptureTokenFilterTests.java
index caefb1039c..126bbe2ab9 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/PatternCaptureTokenFilterTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/PatternCaptureTokenFilterTests.java
@@ -27,7 +27,7 @@ import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.test.ESTokenStreamTestCase;
import org.elasticsearch.test.IndexSettingsModule;
-import static org.elasticsearch.test.ESTestCase.createAnalysisService;
+import static org.elasticsearch.test.ESTestCase.createTestAnalysis;
import static org.hamcrest.Matchers.containsString;
public class PatternCaptureTokenFilterTests extends ESTokenStreamTestCase {
@@ -40,17 +40,16 @@ public class PatternCaptureTokenFilterTests extends ESTokenStreamTestCase {
.build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
- AnalysisService analysisService = createAnalysisService(idxSettings, settings);
-
- NamedAnalyzer analyzer1 = analysisService.analyzer("single");
+ IndexAnalyzers indexAnalyzers = createTestAnalysis(idxSettings, settings).indexAnalyzers;
+ NamedAnalyzer analyzer1 = indexAnalyzers.get("single");
assertTokenStreamContents(analyzer1.tokenStream("test", "foobarbaz"), new String[]{"foobarbaz","foobar","foo"});
- NamedAnalyzer analyzer2 = analysisService.analyzer("multi");
+ NamedAnalyzer analyzer2 = indexAnalyzers.get("multi");
assertTokenStreamContents(analyzer2.tokenStream("test", "abc123def"), new String[]{"abc123def","abc","123","def"});
- NamedAnalyzer analyzer3 = analysisService.analyzer("preserve");
+ NamedAnalyzer analyzer3 = indexAnalyzers.get("preserve");
assertTokenStreamContents(analyzer3.tokenStream("test", "foobarbaz"), new String[]{"foobar","foo"});
}
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/ShingleTokenFilterFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/ShingleTokenFilterFactoryTests.java
index f33ddc88cf..64663c3682 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/ShingleTokenFilterFactoryTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/ShingleTokenFilterFactoryTests.java
@@ -26,6 +26,7 @@ import org.apache.lucene.analysis.StopFilter;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.core.WhitespaceTokenizer;
+import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.ESTokenStreamTestCase;
import java.io.IOException;
@@ -38,8 +39,8 @@ public class ShingleTokenFilterFactoryTests extends ESTokenStreamTestCase {
private static final String RESOURCE = "/org/elasticsearch/index/analysis/shingle_analysis.json";
public void testDefault() throws IOException {
- AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromClassPath(createTempDir(), RESOURCE);
- TokenFilterFactory tokenFilter = analysisService.tokenFilter("shingle");
+ ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromClassPath(createTempDir(), RESOURCE);
+ TokenFilterFactory tokenFilter = analysis.tokenFilter.get("shingle");
String source = "the quick brown fox";
String[] expected = new String[]{"the", "the quick", "quick", "quick brown", "brown", "brown fox", "fox"};
Tokenizer tokenizer = new WhitespaceTokenizer();
@@ -48,8 +49,8 @@ public class ShingleTokenFilterFactoryTests extends ESTokenStreamTestCase {
}
public void testInverseMapping() throws IOException {
- AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromClassPath(createTempDir(), RESOURCE);
- TokenFilterFactory tokenFilter = analysisService.tokenFilter("shingle_inverse");
+ ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromClassPath(createTempDir(), RESOURCE);
+ TokenFilterFactory tokenFilter = analysis.tokenFilter.get("shingle_inverse");
assertThat(tokenFilter, instanceOf(ShingleTokenFilterFactory.class));
String source = "the quick brown fox";
String[] expected = new String[]{"the_quick_brown", "quick_brown_fox"};
@@ -59,8 +60,8 @@ public class ShingleTokenFilterFactoryTests extends ESTokenStreamTestCase {
}
public void testInverseMappingNoShingles() throws IOException {
- AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromClassPath(createTempDir(), RESOURCE);
- TokenFilterFactory tokenFilter = analysisService.tokenFilter("shingle_inverse");
+ ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromClassPath(createTempDir(), RESOURCE);
+ TokenFilterFactory tokenFilter = analysis.tokenFilter.get("shingle_inverse");
assertThat(tokenFilter, instanceOf(ShingleTokenFilterFactory.class));
String source = "the quick";
String[] expected = new String[]{"the", "quick"};
@@ -70,8 +71,8 @@ public class ShingleTokenFilterFactoryTests extends ESTokenStreamTestCase {
}
public void testFillerToken() throws IOException {
- AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromClassPath(createTempDir(), RESOURCE);
- TokenFilterFactory tokenFilter = analysisService.tokenFilter("shingle_filler");
+ ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromClassPath(createTempDir(), RESOURCE);
+ TokenFilterFactory tokenFilter = analysis.tokenFilter.get("shingle_filler");
String source = "simon the sorcerer";
String[] expected = new String[]{"simon FILLER", "simon FILLER sorcerer", "FILLER sorcerer"};
Tokenizer tokenizer = new WhitespaceTokenizer();
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/StemmerTokenFilterFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/StemmerTokenFilterFactoryTests.java
index a414f41276..7c4818c63b 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/StemmerTokenFilterFactoryTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/StemmerTokenFilterFactoryTests.java
@@ -26,6 +26,7 @@ import org.apache.lucene.analysis.snowball.SnowballFilter;
import org.elasticsearch.Version;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
+import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.ESTokenStreamTestCase;
import org.elasticsearch.test.VersionUtils;
@@ -53,13 +54,14 @@ public class StemmerTokenFilterFactoryTests extends ESTokenStreamTestCase {
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
- AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
- TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_english");
+ ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings);
+ TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_english");
assertThat(tokenFilter, instanceOf(StemmerTokenFilterFactory.class));
Tokenizer tokenizer = new WhitespaceTokenizer();
tokenizer.setReader(new StringReader("foo bar"));
TokenStream create = tokenFilter.create(tokenizer);
- NamedAnalyzer analyzer = analysisService.analyzer("my_english");
+ IndexAnalyzers indexAnalyzers = analysis.indexAnalyzers;
+ NamedAnalyzer analyzer = indexAnalyzers.get("my_english");
assertThat(create, instanceOf(PorterStemFilter.class));
assertAnalyzesTo(analyzer, "consolingly", new String[]{"consolingli"});
}
@@ -80,13 +82,14 @@ public class StemmerTokenFilterFactoryTests extends ESTokenStreamTestCase {
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
- AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
- TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_porter2");
+ ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings);
+ TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_porter2");
assertThat(tokenFilter, instanceOf(StemmerTokenFilterFactory.class));
Tokenizer tokenizer = new WhitespaceTokenizer();
tokenizer.setReader(new StringReader("foo bar"));
TokenStream create = tokenFilter.create(tokenizer);
- NamedAnalyzer analyzer = analysisService.analyzer("my_porter2");
+ IndexAnalyzers indexAnalyzers = analysis.indexAnalyzers;
+ NamedAnalyzer analyzer = indexAnalyzers.get("my_porter2");
assertThat(create, instanceOf(SnowballFilter.class));
assertAnalyzesTo(analyzer, "possibly", new String[]{"possibl"});
}
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/StopAnalyzerTests.java b/core/src/test/java/org/elasticsearch/index/analysis/StopAnalyzerTests.java
index 88c5fe692d..e166f4b7b9 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/StopAnalyzerTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/StopAnalyzerTests.java
@@ -27,7 +27,7 @@ import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.test.ESTokenStreamTestCase;
import org.elasticsearch.test.IndexSettingsModule;
-import static org.elasticsearch.test.ESTestCase.createAnalysisService;
+import static org.elasticsearch.test.ESTestCase.createTestAnalysis;
public class StopAnalyzerTests extends ESTokenStreamTestCase {
public void testDefaultsCompoundAnalysis() throws Exception {
@@ -38,13 +38,12 @@ public class StopAnalyzerTests extends ESTokenStreamTestCase {
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
- AnalysisService analysisService = createAnalysisService(idxSettings, settings);
-
- NamedAnalyzer analyzer1 = analysisService.analyzer("analyzer1");
+ IndexAnalyzers indexAnalyzers = createTestAnalysis(idxSettings, settings).indexAnalyzers;
+ NamedAnalyzer analyzer1 = indexAnalyzers.get("analyzer1");
assertTokenStreamContents(analyzer1.tokenStream("test", "to be or not to be"), new String[0]);
- NamedAnalyzer analyzer2 = analysisService.analyzer("analyzer2");
+ NamedAnalyzer analyzer2 = indexAnalyzers.get("analyzer2");
assertTokenStreamContents(analyzer2.tokenStream("test", "to be or not to be"), new String[0]);
}
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/StopTokenFilterTests.java b/core/src/test/java/org/elasticsearch/index/analysis/StopTokenFilterTests.java
index 07b00f30e2..7d91619075 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/StopTokenFilterTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/StopTokenFilterTests.java
@@ -28,6 +28,7 @@ import org.apache.lucene.util.Version;
import org.elasticsearch.common.settings.Settings.Builder;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
+import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.ESTokenStreamTestCase;
import java.io.IOException;
@@ -47,7 +48,7 @@ public class StopTokenFilterTests extends ESTokenStreamTestCase {
builder.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString());
Settings settings = builder.build();
try {
- AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
+ AnalysisTestsHelper.createTestAnalysisFromSettings(settings);
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), containsString("enable_position_increments is not supported anymore"));
@@ -62,8 +63,8 @@ public class StopTokenFilterTests extends ESTokenStreamTestCase {
// don't specify
}
builder.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString());
- AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(builder.build());
- TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_stop");
+ ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(builder.build());
+ TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_stop");
assertThat(tokenFilter, instanceOf(StopTokenFilterFactory.class));
Tokenizer tokenizer = new WhitespaceTokenizer();
tokenizer.setReader(new StringReader("foo bar"));
@@ -77,8 +78,8 @@ public class StopTokenFilterTests extends ESTokenStreamTestCase {
.put("index.analysis.filter.my_stop.remove_trailing", false)
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
- AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
- TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_stop");
+ ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings);
+ TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_stop");
assertThat(tokenFilter, instanceOf(StopTokenFilterFactory.class));
Tokenizer tokenizer = new WhitespaceTokenizer();
tokenizer.setReader(new StringReader("foo an"));
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/WordDelimiterTokenFilterFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/WordDelimiterTokenFilterFactoryTests.java
index f94252caba..1a7903bcfa 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/WordDelimiterTokenFilterFactoryTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/WordDelimiterTokenFilterFactoryTests.java
@@ -23,6 +23,7 @@ import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.core.WhitespaceTokenizer;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
+import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.ESTokenStreamTestCase;
import java.io.IOException;
@@ -30,11 +31,11 @@ import java.io.StringReader;
public class WordDelimiterTokenFilterFactoryTests extends ESTokenStreamTestCase {
public void testDefault() throws IOException {
- AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(Settings.builder()
+ ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(Settings.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put("index.analysis.filter.my_word_delimiter.type", "word_delimiter")
.build());
- TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_word_delimiter");
+ TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_word_delimiter");
String source = "PowerShot 500-42 wi-fi wi-fi-4000 j2se O'Neil's";
String[] expected = new String[]{"Power", "Shot", "500", "42", "wi", "fi", "wi", "fi", "4000", "j", "2", "se", "O", "Neil"};
Tokenizer tokenizer = new WhitespaceTokenizer();
@@ -43,13 +44,13 @@ public class WordDelimiterTokenFilterFactoryTests extends ESTokenStreamTestCase
}
public void testCatenateWords() throws IOException {
- AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(Settings.builder()
+ ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(Settings.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put("index.analysis.filter.my_word_delimiter.type", "word_delimiter")
.put("index.analysis.filter.my_word_delimiter.catenate_words", "true")
.put("index.analysis.filter.my_word_delimiter.generate_word_parts", "false")
.build());
- TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_word_delimiter");
+ TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_word_delimiter");
String source = "PowerShot 500-42 wi-fi wi-fi-4000 j2se O'Neil's";
String[] expected = new String[]{"PowerShot", "500", "42", "wifi", "wifi", "4000", "j", "2", "se", "ONeil"};
Tokenizer tokenizer = new WhitespaceTokenizer();
@@ -58,13 +59,13 @@ public class WordDelimiterTokenFilterFactoryTests extends ESTokenStreamTestCase
}
public void testCatenateNumbers() throws IOException {
- AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(Settings.builder()
+ ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(Settings.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put("index.analysis.filter.my_word_delimiter.type", "word_delimiter")
.put("index.analysis.filter.my_word_delimiter.generate_number_parts", "false")
.put("index.analysis.filter.my_word_delimiter.catenate_numbers", "true")
.build());
- TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_word_delimiter");
+ TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_word_delimiter");
String source = "PowerShot 500-42 wi-fi wi-fi-4000 j2se O'Neil's";
String[] expected = new String[]{"Power", "Shot", "50042", "wi", "fi", "wi", "fi", "4000", "j", "2", "se", "O", "Neil"};
Tokenizer tokenizer = new WhitespaceTokenizer();
@@ -73,14 +74,14 @@ public class WordDelimiterTokenFilterFactoryTests extends ESTokenStreamTestCase
}
public void testCatenateAll() throws IOException {
- AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(Settings.builder()
+ ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(Settings.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put("index.analysis.filter.my_word_delimiter.type", "word_delimiter")
.put("index.analysis.filter.my_word_delimiter.generate_word_parts", "false")
.put("index.analysis.filter.my_word_delimiter.generate_number_parts", "false")
.put("index.analysis.filter.my_word_delimiter.catenate_all", "true")
.build());
- TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_word_delimiter");
+ TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_word_delimiter");
String source = "PowerShot 500-42 wi-fi wi-fi-4000 j2se O'Neil's";
String[] expected = new String[]{"PowerShot", "50042", "wifi", "wifi4000", "j2se", "ONeil"};
Tokenizer tokenizer = new WhitespaceTokenizer();
@@ -89,12 +90,12 @@ public class WordDelimiterTokenFilterFactoryTests extends ESTokenStreamTestCase
}
public void testSplitOnCaseChange() throws IOException {
- AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(Settings.builder()
+ ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(Settings.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put("index.analysis.filter.my_word_delimiter.type", "word_delimiter")
.put("index.analysis.filter.my_word_delimiter.split_on_case_change", "false")
.build());
- TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_word_delimiter");
+ TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_word_delimiter");
String source = "PowerShot";
String[] expected = new String[]{"PowerShot"};
Tokenizer tokenizer = new WhitespaceTokenizer();
@@ -103,12 +104,12 @@ public class WordDelimiterTokenFilterFactoryTests extends ESTokenStreamTestCase
}
public void testPreserveOriginal() throws IOException {
- AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(Settings.builder()
+ ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(Settings.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put("index.analysis.filter.my_word_delimiter.type", "word_delimiter")
.put("index.analysis.filter.my_word_delimiter.preserve_original", "true")
.build());
- TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_word_delimiter");
+ TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_word_delimiter");
String source = "PowerShot 500-42 wi-fi wi-fi-4000 j2se O'Neil's";
String[] expected = new String[]{"PowerShot", "Power", "Shot", "500-42", "500", "42", "wi-fi", "wi", "fi", "wi-fi-4000", "wi", "fi", "4000", "j2se", "j", "2", "se", "O'Neil's", "O", "Neil"};
Tokenizer tokenizer = new WhitespaceTokenizer();
@@ -117,12 +118,12 @@ public class WordDelimiterTokenFilterFactoryTests extends ESTokenStreamTestCase
}
public void testStemEnglishPossessive() throws IOException {
- AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(Settings.builder()
+ ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(Settings.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put("index.analysis.filter.my_word_delimiter.type", "word_delimiter")
.put("index.analysis.filter.my_word_delimiter.stem_english_possessive", "false")
.build());
- TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_word_delimiter");
+ TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_word_delimiter");
String source = "PowerShot 500-42 wi-fi wi-fi-4000 j2se O'Neil's";
String[] expected = new String[]{"Power", "Shot", "500", "42", "wi", "fi", "wi", "fi", "4000", "j", "2", "se", "O", "Neil", "s"};
Tokenizer tokenizer = new WhitespaceTokenizer();
@@ -132,13 +133,13 @@ public class WordDelimiterTokenFilterFactoryTests extends ESTokenStreamTestCase
/** Correct offset order when doing both parts and concatenation: PowerShot is a synonym of Power */
public void testPartsAndCatenate() throws IOException {
- AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(Settings.builder()
+ ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(Settings.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put("index.analysis.filter.my_word_delimiter.type", "word_delimiter")
.put("index.analysis.filter.my_word_delimiter.catenate_words", "true")
.put("index.analysis.filter.my_word_delimiter.generate_word_parts", "true")
.build());
- TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_word_delimiter");
+ TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_word_delimiter");
String source = "PowerShot";
String[] expected = new String[]{"Power", "PowerShot", "Shot" };
Tokenizer tokenizer = new WhitespaceTokenizer();
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/commongrams/CommonGramsTokenFilterFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/commongrams/CommonGramsTokenFilterFactoryTests.java
index 3c192052db..ed6866e6a8 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/commongrams/CommonGramsTokenFilterFactoryTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/commongrams/CommonGramsTokenFilterFactoryTests.java
@@ -24,9 +24,10 @@ import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.core.WhitespaceTokenizer;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
-import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.analysis.AnalysisTestsHelper;
+import org.elasticsearch.index.analysis.IndexAnalyzers;
import org.elasticsearch.index.analysis.TokenFilterFactory;
+import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.ESTokenStreamTestCase;
import org.junit.Assert;
@@ -43,7 +44,7 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase {
.build();
try {
- AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
+ AnalysisTestsHelper.createTestAnalysisFromSettings(settings);
Assert.fail("[common_words] or [common_words_path] is set");
} catch (IllegalArgumentException e) {
} catch (IOException e) {
@@ -58,9 +59,9 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase {
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
- AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
+ ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings);
{
- TokenFilterFactory tokenFilter = analysisService.tokenFilter("common_grams_default");
+ TokenFilterFactory tokenFilter = analysis.tokenFilter.get("common_grams_default");
String source = "the quick brown is a fox Or noT";
String[] expected = new String[] { "the", "quick", "brown", "is", "a", "fox", "Or", "noT" };
Tokenizer tokenizer = new WhitespaceTokenizer();
@@ -75,9 +76,9 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase {
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.putArray("index.analysis.filter.common_grams_default.common_words", "chromosome", "protein")
.build();
- AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
+ ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings);
{
- TokenFilterFactory tokenFilter = analysisService.tokenFilter("common_grams_default");
+ TokenFilterFactory tokenFilter = analysis.tokenFilter.get("common_grams_default");
String source = "the quick brown is a fox Or noT";
String[] expected = new String[] { "the", "quick", "brown", "is", "a", "fox", "Or", "noT" };
Tokenizer tokenizer = new WhitespaceTokenizer();
@@ -94,8 +95,8 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase {
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.putArray("index.analysis.filter.common_grams_1.common_words", "the", "Or", "Not", "a", "is", "an", "they", "are")
.build();
- AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
- TokenFilterFactory tokenFilter = analysisService.tokenFilter("common_grams_1");
+ ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings);
+ TokenFilterFactory tokenFilter = analysis.tokenFilter.get("common_grams_1");
String source = "the quick brown is a fox or noT";
String[] expected = new String[] { "the", "the_quick", "quick", "brown", "brown_is", "is", "is_a", "a", "a_fox", "fox", "fox_or", "or", "or_noT", "noT" };
Tokenizer tokenizer = new WhitespaceTokenizer();
@@ -108,8 +109,8 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase {
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.putArray("index.analysis.filter.common_grams_2.common_words", "the", "Or", "noT", "a", "is", "an", "they", "are")
.build();
- AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
- TokenFilterFactory tokenFilter = analysisService.tokenFilter("common_grams_2");
+ ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings);
+ TokenFilterFactory tokenFilter = analysis.tokenFilter.get("common_grams_2");
String source = "the quick brown is a fox or why noT";
String[] expected = new String[] { "the", "the_quick", "quick", "brown", "brown_is", "is", "is_a", "a", "a_fox", "fox", "or", "why", "why_noT", "noT" };
Tokenizer tokenizer = new WhitespaceTokenizer();
@@ -121,8 +122,8 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase {
.putArray("index.analysis.filter.common_grams_3.common_words", "the", "or", "not", "a", "is", "an", "they", "are")
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
- AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
- TokenFilterFactory tokenFilter = analysisService.tokenFilter("common_grams_3");
+ ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings);
+ TokenFilterFactory tokenFilter = analysis.tokenFilter.get("common_grams_3");
String source = "the quick brown is a fox Or noT";
String[] expected = new String[] { "the", "the_quick", "quick", "brown", "brown_is", "is", "is_a", "a", "a_fox", "fox", "Or", "noT" };
Tokenizer tokenizer = new WhitespaceTokenizer();
@@ -138,15 +139,17 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase {
.put(Environment.PATH_HOME_SETTING.getKey(), createHome())
.build();
{
- AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
- Analyzer analyzer = analysisService.analyzer("commongramsAnalyzer").analyzer();
+ IndexAnalyzers indexAnalyzers = AnalysisTestsHelper.createTestAnalysisFromSettings(settings)
+ .indexAnalyzers;
+ Analyzer analyzer = indexAnalyzers.get("commongramsAnalyzer").analyzer();
String source = "the quick brown is a fox or not";
String[] expected = new String[] { "the", "quick", "quick_brown", "brown", "brown_is", "is", "a", "a_fox", "fox", "fox_or", "or", "not" };
assertTokenStreamContents(analyzer.tokenStream("test", source), expected);
}
{
- AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
- Analyzer analyzer = analysisService.analyzer("commongramsAnalyzer_file").analyzer();
+ IndexAnalyzers indexAnalyzers = AnalysisTestsHelper.createTestAnalysisFromSettings(settings)
+ .indexAnalyzers;
+ Analyzer analyzer = indexAnalyzers.get("commongramsAnalyzer_file").analyzer();
String source = "the quick brown is a fox or not";
String[] expected = new String[] { "the", "quick", "quick_brown", "brown", "brown_is", "is", "a", "a_fox", "fox", "fox_or", "or", "not" };
assertTokenStreamContents(analyzer.tokenStream("test", source), expected);
@@ -161,8 +164,8 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase {
.put("index.analysis.filter.common_grams_1.ignore_case", true)
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
- AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
- TokenFilterFactory tokenFilter = analysisService.tokenFilter("common_grams_1");
+ ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings);
+ TokenFilterFactory tokenFilter = analysis.tokenFilter.get("common_grams_1");
String source = "the quick brown is a fox or noT";
String[] expected = new String[] { "the_quick", "quick", "brown_is", "is_a", "a_fox", "fox_or", "or_noT" };
Tokenizer tokenizer = new WhitespaceTokenizer();
@@ -176,8 +179,8 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase {
.put("index.analysis.filter.common_grams_2.ignore_case", false)
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
- AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
- TokenFilterFactory tokenFilter = analysisService.tokenFilter("common_grams_2");
+ ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings);
+ TokenFilterFactory tokenFilter = analysis.tokenFilter.get("common_grams_2");
String source = "the quick brown is a fox or why noT";
String[] expected = new String[] { "the_quick", "quick", "brown_is", "is_a", "a_fox", "fox", "or", "why_noT" };
Tokenizer tokenizer = new WhitespaceTokenizer();
@@ -190,8 +193,8 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase {
.putArray("index.analysis.filter.common_grams_3.common_words", "the", "Or", "noT", "a", "is", "an", "they", "are")
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
- AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
- TokenFilterFactory tokenFilter = analysisService.tokenFilter("common_grams_3");
+ ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings);
+ TokenFilterFactory tokenFilter = analysis.tokenFilter.get("common_grams_3");
String source = "the quick brown is a fox or why noT";
String[] expected = new String[] { "the_quick", "quick", "brown_is", "is_a", "a_fox", "fox", "or", "why_noT" };
Tokenizer tokenizer = new WhitespaceTokenizer();
@@ -204,8 +207,8 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase {
.putArray("index.analysis.filter.common_grams_4.common_words", "the", "or", "not", "a", "is", "an", "they", "are")
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
- AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
- TokenFilterFactory tokenFilter = analysisService.tokenFilter("common_grams_4");
+ ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings);
+ TokenFilterFactory tokenFilter = analysis.tokenFilter.get("common_grams_4");
String source = "the quick brown is a fox Or noT";
String[] expected = new String[] { "the_quick", "quick", "brown_is", "is_a", "a_fox", "fox", "Or", "noT" };
Tokenizer tokenizer = new WhitespaceTokenizer();
@@ -221,15 +224,17 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase {
.put(Environment.PATH_HOME_SETTING.getKey(), createHome())
.build();
{
- AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
- Analyzer analyzer = analysisService.analyzer("commongramsAnalyzer").analyzer();
+ IndexAnalyzers indexAnalyzers = AnalysisTestsHelper.createTestAnalysisFromSettings(settings)
+ .indexAnalyzers;
+ Analyzer analyzer = indexAnalyzers.get("commongramsAnalyzer").analyzer();
String source = "the quick brown is a fox or not";
String[] expected = new String[] { "the", "quick_brown", "brown_is", "is", "a_fox", "fox_or", "or", "not" };
assertTokenStreamContents(analyzer.tokenStream("test", source), expected);
}
{
- AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
- Analyzer analyzer = analysisService.analyzer("commongramsAnalyzer_file").analyzer();
+ IndexAnalyzers indexAnalyzers = AnalysisTestsHelper.createTestAnalysisFromSettings(settings)
+ .indexAnalyzers;
+ Analyzer analyzer = indexAnalyzers.get("commongramsAnalyzer_file").analyzer();
String source = "the quick brown is a fox or not";
String[] expected = new String[] { "the", "quick_brown", "brown_is", "is", "a_fox", "fox_or", "or", "not" };
assertTokenStreamContents(analyzer.tokenStream("test", source), expected);
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTests.java b/core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTests.java
index de271d720c..c6dfdc1a41 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTests.java
@@ -30,7 +30,7 @@ import org.elasticsearch.common.lucene.all.AllTokenStream;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
-import org.elasticsearch.index.analysis.AnalysisService;
+import org.elasticsearch.index.analysis.IndexAnalyzers;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.IndexSettingsModule;
import org.hamcrest.MatcherAssert;
@@ -46,7 +46,7 @@ import static org.hamcrest.Matchers.equalTo;
*/
public class SynonymsAnalysisTests extends ESTestCase {
protected final Logger logger = Loggers.getLogger(getClass());
- private AnalysisService analysisService;
+ private IndexAnalyzers indexAnalyzers;
public void testSynonymsAnalysis() throws IOException {
InputStream synonyms = getClass().getResourceAsStream("synonyms.txt");
@@ -64,7 +64,7 @@ public class SynonymsAnalysisTests extends ESTestCase {
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
- analysisService = createAnalysisService(idxSettings, settings);
+ indexAnalyzers = createTestAnalysis(idxSettings, settings).indexAnalyzers;
match("synonymAnalyzer", "kimchy is the dude abides", "shay is the elasticsearch man!");
match("synonymAnalyzer_file", "kimchy is the dude abides", "shay is the elasticsearch man!");
@@ -74,8 +74,7 @@ public class SynonymsAnalysisTests extends ESTestCase {
}
private void match(String analyzerName, String source, String target) throws IOException {
-
- Analyzer analyzer = analysisService.analyzer(analyzerName).analyzer();
+ Analyzer analyzer = indexAnalyzers.get(analyzerName).analyzer();
TokenStream stream = AllTokenStream.allTokenStream("_all", source, 1.0f, analyzer);
stream.reset();
diff --git a/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java b/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java
index cf706f33cc..ea89acbd8f 100644
--- a/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java
+++ b/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java
@@ -37,7 +37,7 @@ import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
-import org.elasticsearch.index.analysis.AnalysisService;
+import org.elasticsearch.index.analysis.IndexAnalyzers;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.indices.mapper.MapperRegistry;
@@ -95,9 +95,9 @@ public class CodecTests extends ESTestCase {
.build();
IndexSettings settings = IndexSettingsModule.newIndexSettings("_na", nodeSettings);
SimilarityService similarityService = new SimilarityService(settings, Collections.emptyMap());
- AnalysisService analysisService = createAnalysisService(settings, nodeSettings);
+ IndexAnalyzers indexAnalyzers = createTestAnalysis(settings, nodeSettings).indexAnalyzers;
MapperRegistry mapperRegistry = new MapperRegistry(Collections.emptyMap(), Collections.emptyMap());
- MapperService service = new MapperService(settings, analysisService, similarityService, mapperRegistry, () -> null);
+ MapperService service = new MapperService(settings, indexAnalyzers, similarityService, mapperRegistry, () -> null);
return new CodecService(service, ESLoggerFactory.getLogger("test"));
}
diff --git a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java
index e92b620f9d..2ea45f7a40 100644
--- a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java
+++ b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java
@@ -26,6 +26,7 @@ import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.core.LogEvent;
import org.apache.logging.log4j.core.appender.AbstractAppender;
import org.apache.logging.log4j.core.filter.RegexFilter;
+import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.NumericDocValuesField;
@@ -67,10 +68,13 @@ import org.elasticsearch.common.lucene.uid.Versions;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.BigArrays;
+import org.elasticsearch.env.Environment;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.VersionType;
-import org.elasticsearch.index.analysis.AnalysisService;
+import org.elasticsearch.index.analysis.AnalysisRegistry;
+import org.elasticsearch.index.analysis.IndexAnalyzers;
+import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.codec.CodecService;
import org.elasticsearch.index.engine.Engine.Searcher;
import org.elasticsearch.index.mapper.ContentPath;
@@ -2006,10 +2010,12 @@ public class InternalEngineTests extends ESTestCase {
RootObjectMapper.Builder rootBuilder = new RootObjectMapper.Builder("test");
Index index = new Index(indexName, "_na_");
IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(index, settings);
- AnalysisService analysisService = new AnalysisService(indexSettings, Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap());
+ IndexAnalyzers indexAnalyzers = null;
+ NamedAnalyzer defaultAnalyzer = new NamedAnalyzer("default", new StandardAnalyzer());
+ indexAnalyzers = new IndexAnalyzers(indexSettings, defaultAnalyzer, defaultAnalyzer, defaultAnalyzer, Collections.emptyMap());
SimilarityService similarityService = new SimilarityService(indexSettings, Collections.emptyMap());
MapperRegistry mapperRegistry = new IndicesModule(Collections.emptyList()).getMapperRegistry();
- MapperService mapperService = new MapperService(indexSettings, analysisService, similarityService, mapperRegistry, () -> null);
+ MapperService mapperService = new MapperService(indexSettings, indexAnalyzers, similarityService, mapperRegistry, () -> null);
DocumentMapper.Builder b = new DocumentMapper.Builder(rootBuilder, mapperService);
this.docMapper = b.build(mapperService);
}
diff --git a/core/src/test/java/org/elasticsearch/index/mapper/ExternalFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/ExternalFieldMapperTests.java
index 7fa0a3c116..6be11ced1e 100644
--- a/core/src/test/java/org/elasticsearch/index/mapper/ExternalFieldMapperTests.java
+++ b/core/src/test/java/org/elasticsearch/index/mapper/ExternalFieldMapperTests.java
@@ -25,7 +25,6 @@ import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.compress.CompressedXContent;
-import org.elasticsearch.common.geo.GeoHashUtils;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
@@ -63,7 +62,7 @@ public class ExternalFieldMapperTests extends ESSingleNodeTestCase {
Collections.singletonMap(ExternalMetadataMapper.CONTENT_TYPE, new ExternalMetadataMapper.TypeParser()));
DocumentMapperParser parser = new DocumentMapperParser(indexService.getIndexSettings(), indexService.mapperService(),
- indexService.analysisService(), indexService.similarityService(), mapperRegistry, indexService::newQueryShardContext);
+ indexService.getIndexAnalyzers(), indexService.similarityService(), mapperRegistry, indexService::newQueryShardContext);
DocumentMapper documentMapper = parser.parse("type", new CompressedXContent(
XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject(ExternalMetadataMapper.CONTENT_TYPE)
@@ -112,7 +111,7 @@ public class ExternalFieldMapperTests extends ESSingleNodeTestCase {
MapperRegistry mapperRegistry = new MapperRegistry(mapperParsers, Collections.emptyMap());
DocumentMapperParser parser = new DocumentMapperParser(indexService.getIndexSettings(), indexService.mapperService(),
- indexService.analysisService(), indexService.similarityService(), mapperRegistry, indexService::newQueryShardContext);
+ indexService.getIndexAnalyzers(), indexService.similarityService(), mapperRegistry, indexService::newQueryShardContext);
DocumentMapper documentMapper = parser.parse("type", new CompressedXContent(
XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
@@ -182,7 +181,7 @@ public class ExternalFieldMapperTests extends ESSingleNodeTestCase {
MapperRegistry mapperRegistry = new MapperRegistry(mapperParsers, Collections.emptyMap());
DocumentMapperParser parser = new DocumentMapperParser(indexService.getIndexSettings(), indexService.mapperService(),
- indexService.analysisService(), indexService.similarityService(), mapperRegistry, indexService::newQueryShardContext);
+ indexService.getIndexAnalyzers(), indexService.similarityService(), mapperRegistry, indexService::newQueryShardContext);
DocumentMapper documentMapper = parser.parse("type", new CompressedXContent(
XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
diff --git a/core/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java
index baa9f72801..544764a9b5 100644
--- a/core/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java
+++ b/core/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java
@@ -24,24 +24,11 @@ import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.compress.CompressedXContent;
-import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.IndexService;
-import org.elasticsearch.index.mapper.DocumentMapper;
-import org.elasticsearch.index.mapper.DocumentMapperParser;
-import org.elasticsearch.index.mapper.FieldNamesFieldMapper;
-import org.elasticsearch.index.mapper.MappedFieldType;
-import org.elasticsearch.index.mapper.Mapper;
-import org.elasticsearch.index.mapper.MapperParsingException;
-import org.elasticsearch.index.mapper.MapperService;
-import org.elasticsearch.index.mapper.MetadataFieldMapper;
-import org.elasticsearch.index.mapper.ParseContext;
-import org.elasticsearch.index.mapper.ParsedDocument;
-import org.elasticsearch.index.mapper.TermBasedFieldType;
import org.elasticsearch.indices.IndicesModule;
import org.elasticsearch.indices.mapper.MapperRegistry;
-import org.elasticsearch.plugins.MapperPlugin;
import org.elasticsearch.test.ESSingleNodeTestCase;
import java.io.IOException;
@@ -244,9 +231,9 @@ public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase {
Collections.singletonMap("_dummy", new DummyMetadataFieldMapper.TypeParser())
);
final MapperRegistry mapperRegistry = indicesModule.getMapperRegistry();
- MapperService mapperService = new MapperService(indexService.getIndexSettings(), indexService.analysisService(), indexService.similarityService(), mapperRegistry, indexService::newQueryShardContext);
+ MapperService mapperService = new MapperService(indexService.getIndexSettings(), indexService.getIndexAnalyzers(), indexService.similarityService(), mapperRegistry, indexService::newQueryShardContext);
DocumentMapperParser parser = new DocumentMapperParser(indexService.getIndexSettings(), mapperService,
- indexService.analysisService(), indexService.similarityService(), mapperRegistry, indexService::newQueryShardContext);
+ indexService.getIndexAnalyzers(), indexService.similarityService(), mapperRegistry, indexService::newQueryShardContext);
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string();
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
ParsedDocument parsedDocument = mapper.parse("index", "type", "id", new BytesArray("{}"));
diff --git a/core/src/test/java/org/elasticsearch/index/mapper/ParentFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/ParentFieldMapperTests.java
index 15738ada3b..078c60ce19 100644
--- a/core/src/test/java/org/elasticsearch/index/mapper/ParentFieldMapperTests.java
+++ b/core/src/test/java/org/elasticsearch/index/mapper/ParentFieldMapperTests.java
@@ -18,30 +18,22 @@
*/
package org.elasticsearch.index.mapper;
+import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.compress.CompressedXContent;
-import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.IndexSettings;
-import org.elasticsearch.index.analysis.AnalysisService;
-import org.elasticsearch.index.mapper.ContentPath;
-import org.elasticsearch.index.mapper.DocumentMapper;
-import org.elasticsearch.index.mapper.Mapper;
-import org.elasticsearch.index.mapper.MapperParsingException;
-import org.elasticsearch.index.mapper.MapperService;
-import org.elasticsearch.index.mapper.ParentFieldMapper;
+import org.elasticsearch.index.analysis.IndexAnalyzers;
+import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.mapper.MapperService.MergeReason;
-import org.elasticsearch.index.mapper.ParseContext;
-import org.elasticsearch.index.mapper.ParsedDocument;
-import org.elasticsearch.index.mapper.SourceToParse;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.indices.IndicesModule;
import org.elasticsearch.test.ESSingleNodeTestCase;
@@ -111,10 +103,11 @@ public class ParentFieldMapperTests extends ESSingleNodeTestCase {
public void testNoParentNullFieldCreatedIfNoParentSpecified() throws Exception {
Index index = new Index("_index", "testUUID");
IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(index, Settings.EMPTY);
- AnalysisService analysisService = new AnalysisService(indexSettings, Collections.emptyMap(), Collections.emptyMap(),
- Collections.emptyMap(), Collections.emptyMap());
+ NamedAnalyzer namedAnalyzer = new NamedAnalyzer("default", new StandardAnalyzer());
+ IndexAnalyzers indexAnalyzers = new IndexAnalyzers(indexSettings, namedAnalyzer, namedAnalyzer, namedAnalyzer,
+ Collections.emptyMap());
SimilarityService similarityService = new SimilarityService(indexSettings, Collections.emptyMap());
- MapperService mapperService = new MapperService(indexSettings, analysisService, similarityService,
+ MapperService mapperService = new MapperService(indexSettings, indexAnalyzers, similarityService,
new IndicesModule(emptyList()).getMapperRegistry(), () -> null);
XContentBuilder mappingSource = jsonBuilder().startObject().startObject("some_type")
.startObject("properties")
diff --git a/core/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java b/core/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java
index 33763571da..02cc8ff30a 100644
--- a/core/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java
+++ b/core/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java
@@ -41,9 +41,9 @@ import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.Analysis;
import org.elasticsearch.index.analysis.AnalysisRegistry;
-import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.analysis.AnalysisTestsHelper;
import org.elasticsearch.index.analysis.CustomAnalyzer;
+import org.elasticsearch.index.analysis.IndexAnalyzers;
import org.elasticsearch.index.analysis.MappingCharFilterFactory;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.analysis.PatternReplaceCharFilterFactory;
@@ -79,11 +79,11 @@ import static org.hamcrest.Matchers.is;
*/
public class AnalysisModuleTests extends ModuleTestCase {
- public AnalysisService getAnalysisService(Settings settings) throws IOException {
- return getAnalysisService(getNewRegistry(settings), settings);
+ public IndexAnalyzers getIndexAnalyzers(Settings settings) throws IOException {
+ return getIndexAnalyzers(getNewRegistry(settings), settings);
}
- public AnalysisService getAnalysisService(AnalysisRegistry registry, Settings settings) throws IOException {
+ public IndexAnalyzers getIndexAnalyzers(AnalysisRegistry registry, Settings settings) throws IOException {
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings);
return registry.build(idxSettings);
}
@@ -136,9 +136,9 @@ public class AnalysisModuleTests extends ModuleTestCase {
.put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_2_3_5))
.build();
AnalysisRegistry newRegistry = getNewRegistry(settings);
- AnalysisService as = getAnalysisService(newRegistry, settings);
- assertThat(as.analyzer("default").analyzer(), is(instanceOf(KeywordAnalyzer.class)));
- assertThat(as.analyzer("default_search").analyzer(), is(instanceOf(EnglishAnalyzer.class)));
+ IndexAnalyzers indexAnalyzers = getIndexAnalyzers(newRegistry, settings);
+ assertThat(indexAnalyzers.get("default").analyzer(), is(instanceOf(KeywordAnalyzer.class)));
+ assertThat(indexAnalyzers.get("default_search").analyzer(), is(instanceOf(EnglishAnalyzer.class)));
}
public void testAnalyzerAliasReferencesAlias() throws IOException {
@@ -152,10 +152,11 @@ public class AnalysisModuleTests extends ModuleTestCase {
.put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_2_3_5))
.build();
AnalysisRegistry newRegistry = getNewRegistry(settings);
- AnalysisService as = getAnalysisService(newRegistry, settings);
- assertThat(as.analyzer("default").analyzer(), is(instanceOf(GermanAnalyzer.class)));
+ IndexAnalyzers indexAnalyzers = getIndexAnalyzers(newRegistry, settings);
+
+ assertThat(indexAnalyzers.get("default").analyzer(), is(instanceOf(GermanAnalyzer.class)));
// analyzer types are bound early before we resolve aliases
- assertThat(as.analyzer("default_search").analyzer(), is(instanceOf(StandardAnalyzer.class)));
+ assertThat(indexAnalyzers.get("default_search").analyzer(), is(instanceOf(StandardAnalyzer.class)));
}
public void testAnalyzerAliasDefault() throws IOException {
@@ -167,9 +168,9 @@ public class AnalysisModuleTests extends ModuleTestCase {
.put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_2_3_5))
.build();
AnalysisRegistry newRegistry = getNewRegistry(settings);
- AnalysisService as = getAnalysisService(newRegistry, settings);
- assertThat(as.analyzer("default").analyzer(), is(instanceOf(KeywordAnalyzer.class)));
- assertThat(as.analyzer("default_search").analyzer(), is(instanceOf(KeywordAnalyzer.class)));
+ IndexAnalyzers indexAnalyzers = getIndexAnalyzers(newRegistry, settings);
+ assertThat(indexAnalyzers.get("default").analyzer(), is(instanceOf(KeywordAnalyzer.class)));
+ assertThat(indexAnalyzers.get("default_search").analyzer(), is(instanceOf(KeywordAnalyzer.class)));
}
public void testAnalyzerAliasMoreThanOnce() throws IOException {
@@ -183,7 +184,7 @@ public class AnalysisModuleTests extends ModuleTestCase {
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
AnalysisRegistry newRegistry = getNewRegistry(settings);
- IllegalStateException ise = expectThrows(IllegalStateException.class, () -> getAnalysisService(newRegistry, settings));
+ IllegalStateException ise = expectThrows(IllegalStateException.class, () -> getIndexAnalyzers(newRegistry, settings));
assertEquals("alias [default] is already used by [foobar]", ise.getMessage());
}
@@ -196,7 +197,7 @@ public class AnalysisModuleTests extends ModuleTestCase {
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
AnalysisRegistry registry = getNewRegistry(settings);
- IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> getAnalysisService(registry, settings));
+ IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> getIndexAnalyzers(registry, settings));
assertEquals("setting [index.analysis.analyzer.foobar.alias] is not supported", e.getMessage());
}
@@ -208,7 +209,7 @@ public class AnalysisModuleTests extends ModuleTestCase {
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_0_0)
.build();
AnalysisRegistry newRegistry = getNewRegistry(settings2);
- AnalysisService analysisService2 = getAnalysisService(newRegistry, settings2);
+ IndexAnalyzers indexAnalyzers = getIndexAnalyzers(newRegistry, settings2);
// registry always has the current version
assertThat(newRegistry.getAnalyzer("default"), is(instanceOf(NamedAnalyzer.class)));
@@ -217,20 +218,20 @@ public class AnalysisModuleTests extends ModuleTestCase {
assertEquals(Version.CURRENT.luceneVersion, defaultNamedAnalyzer.analyzer().getVersion());
// analysis service has the expected version
- assertThat(analysisService2.analyzer("standard").analyzer(), is(instanceOf(StandardAnalyzer.class)));
- assertEquals(Version.V_2_0_0.luceneVersion, analysisService2.analyzer("standard").analyzer().getVersion());
- assertEquals(Version.V_2_0_0.luceneVersion, analysisService2.analyzer("thai").analyzer().getVersion());
+ assertThat(indexAnalyzers.get("standard").analyzer(), is(instanceOf(StandardAnalyzer.class)));
+ assertEquals(Version.V_2_0_0.luceneVersion, indexAnalyzers.get("standard").analyzer().getVersion());
+ assertEquals(Version.V_2_0_0.luceneVersion, indexAnalyzers.get("thai").analyzer().getVersion());
- assertThat(analysisService2.analyzer("custom7").analyzer(), is(instanceOf(StandardAnalyzer.class)));
- assertEquals(org.apache.lucene.util.Version.fromBits(3,6,0), analysisService2.analyzer("custom7").analyzer().getVersion());
+ assertThat(indexAnalyzers.get("custom7").analyzer(), is(instanceOf(StandardAnalyzer.class)));
+ assertEquals(org.apache.lucene.util.Version.fromBits(3,6,0), indexAnalyzers.get("custom7").analyzer().getVersion());
}
private void assertTokenFilter(String name, Class<?> clazz) throws IOException {
Settings settings = Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build();
- AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
- TokenFilterFactory tokenFilter = analysisService.tokenFilter(name);
+ TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings);
+ TokenFilterFactory tokenFilter = analysis.tokenFilter.get(name);
Tokenizer tokenizer = new WhitespaceTokenizer();
tokenizer.setReader(new StringReader("foo bar"));
TokenStream stream = tokenFilter.create(tokenizer);
@@ -238,8 +239,8 @@ public class AnalysisModuleTests extends ModuleTestCase {
}
private void testSimpleConfiguration(Settings settings) throws IOException {
- AnalysisService analysisService = getAnalysisService(settings);
- Analyzer analyzer = analysisService.analyzer("custom1").analyzer();
+ IndexAnalyzers indexAnalyzers = getIndexAnalyzers(settings);
+ Analyzer analyzer = indexAnalyzers.get("custom1").analyzer();
assertThat(analyzer, instanceOf(CustomAnalyzer.class));
CustomAnalyzer custom1 = (CustomAnalyzer) analyzer;
@@ -249,23 +250,23 @@ public class AnalysisModuleTests extends ModuleTestCase {
StopTokenFilterFactory stop1 = (StopTokenFilterFactory) custom1.tokenFilters()[0];
assertThat(stop1.stopWords().size(), equalTo(1));
- analyzer = analysisService.analyzer("custom2").analyzer();
+ analyzer = indexAnalyzers.get("custom2").analyzer();
assertThat(analyzer, instanceOf(CustomAnalyzer.class));
// verify position increment gap
- analyzer = analysisService.analyzer("custom6").analyzer();
+ analyzer = indexAnalyzers.get("custom6").analyzer();
assertThat(analyzer, instanceOf(CustomAnalyzer.class));
CustomAnalyzer custom6 = (CustomAnalyzer) analyzer;
assertThat(custom6.getPositionIncrementGap("any_string"), equalTo(256));
// verify characters mapping
- analyzer = analysisService.analyzer("custom5").analyzer();
+ analyzer = indexAnalyzers.get("custom5").analyzer();
assertThat(analyzer, instanceOf(CustomAnalyzer.class));
CustomAnalyzer custom5 = (CustomAnalyzer) analyzer;
assertThat(custom5.charFilters()[0], instanceOf(MappingCharFilterFactory.class));
// check custom pattern replace filter
- analyzer = analysisService.analyzer("custom3").analyzer();
+ analyzer = indexAnalyzers.get("custom3").analyzer();
assertThat(analyzer, instanceOf(CustomAnalyzer.class));
CustomAnalyzer custom3 = (CustomAnalyzer) analyzer;
PatternReplaceCharFilterFactory patternReplaceCharFilterFactory = (PatternReplaceCharFilterFactory) custom3.charFilters()[0];
@@ -273,7 +274,7 @@ public class AnalysisModuleTests extends ModuleTestCase {
assertThat(patternReplaceCharFilterFactory.getReplacement(), equalTo("replacedSample $1"));
// check custom class name (my)
- analyzer = analysisService.analyzer("custom4").analyzer();
+ analyzer = indexAnalyzers.get("custom4").analyzer();
assertThat(analyzer, instanceOf(CustomAnalyzer.class));
CustomAnalyzer custom4 = (CustomAnalyzer) analyzer;
assertThat(custom4.tokenFilters()[0], instanceOf(MyFilterTokenFilterFactory.class));
@@ -333,7 +334,7 @@ public class AnalysisModuleTests extends ModuleTestCase {
.put(IndexMetaData.SETTING_VERSION_CREATED, "1")
.build();
try {
- getAnalysisService(settings);
+ getIndexAnalyzers(settings);
fail("This should fail with IllegalArgumentException because the analyzers name starts with _");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), either(equalTo("analyzer name must not start with '_'. got \"_invalid_name\""))
@@ -350,7 +351,7 @@ public class AnalysisModuleTests extends ModuleTestCase {
.put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_2_3_5))
.build();
try {
- getAnalysisService(settings);
+ getIndexAnalyzers(settings);
fail("This should fail with IllegalArgumentException because the analyzers alias starts with _");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("analyzer name must not start with '_'. got \"_invalid_name\""));
@@ -365,7 +366,7 @@ public class AnalysisModuleTests extends ModuleTestCase {
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.build();
try {
- getAnalysisService(settings);
+ getIndexAnalyzers(settings);
fail("Analyzer should fail if it has position_offset_gap");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("Option [position_offset_gap] in Custom Analyzer [custom] " +