summaryrefslogtreecommitdiff
path: root/test/framework/src/main/resources/org/elasticsearch/analysis/common/test1.json
diff options
context:
space:
mode:
Diffstat (limited to 'test/framework/src/main/resources/org/elasticsearch/analysis/common/test1.json')
-rw-r--r--test/framework/src/main/resources/org/elasticsearch/analysis/common/test1.json54
1 files changed, 54 insertions, 0 deletions
diff --git a/test/framework/src/main/resources/org/elasticsearch/analysis/common/test1.json b/test/framework/src/main/resources/org/elasticsearch/analysis/common/test1.json
new file mode 100644
index 0000000000..38937a9b5a
--- /dev/null
+++ b/test/framework/src/main/resources/org/elasticsearch/analysis/common/test1.json
@@ -0,0 +1,54 @@
+{
+ "index":{
+ "analysis":{
+ "tokenizer":{
+ "standard":{
+ "type":"standard"
+ }
+ },
+ "filter":{
+ "stop":{
+ "type":"stop",
+ "stopwords":["test-stop"]
+ },
+ "stop2":{
+ "type":"stop",
+ "stopwords":["stop2-1", "stop2-2"]
+ },
+ "my":{
+ "type":"myfilter"
+ },
+ "dict_dec":{
+ "type":"dictionary_decompounder",
+ "word_list":["donau", "dampf", "schiff", "spargel", "creme", "suppe"]
+ }
+ },
+ "analyzer":{
+ "standard":{
+ "type":"standard",
+ "stopwords":["test1", "test2", "test3"]
+ },
+ "custom1":{
+ "tokenizer":"standard",
+ "filter":["stop", "stop2"]
+ },
+ "custom4":{
+ "tokenizer":"standard",
+ "filter":["my"]
+ },
+ "custom6":{
+ "tokenizer":"standard",
+ "position_increment_gap": 256
+ },
+ "czechAnalyzerWithStemmer":{
+ "tokenizer":"standard",
+ "filter":["standard", "lowercase", "stop", "czech_stem"]
+ },
+ "decompoundingAnalyzer":{
+ "tokenizer":"standard",
+ "filter":["dict_dec"]
+ }
+ }
+ }
+ }
+}