summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--buildSrc/version.properties2
-rw-r--r--core/licenses/lucene-analyzers-common-6.4.1.jar.sha11
-rw-r--r--core/licenses/lucene-analyzers-common-6.5.0-snapshot-f919485.jar.sha11
-rw-r--r--core/licenses/lucene-backward-codecs-6.4.1.jar.sha11
-rw-r--r--core/licenses/lucene-backward-codecs-6.5.0-snapshot-f919485.jar.sha11
-rw-r--r--core/licenses/lucene-core-6.4.1.jar.sha11
-rw-r--r--core/licenses/lucene-core-6.5.0-snapshot-f919485.jar.sha11
-rw-r--r--core/licenses/lucene-grouping-6.4.1.jar.sha11
-rw-r--r--core/licenses/lucene-grouping-6.5.0-snapshot-f919485.jar.sha11
-rw-r--r--core/licenses/lucene-highlighter-6.4.1.jar.sha11
-rw-r--r--core/licenses/lucene-highlighter-6.5.0-snapshot-f919485.jar.sha11
-rw-r--r--core/licenses/lucene-join-6.4.1.jar.sha11
-rw-r--r--core/licenses/lucene-join-6.5.0-snapshot-f919485.jar.sha11
-rw-r--r--core/licenses/lucene-memory-6.4.1.jar.sha11
-rw-r--r--core/licenses/lucene-memory-6.5.0-snapshot-f919485.jar.sha11
-rw-r--r--core/licenses/lucene-misc-6.4.1.jar.sha11
-rw-r--r--core/licenses/lucene-misc-6.5.0-snapshot-f919485.jar.sha11
-rw-r--r--core/licenses/lucene-queries-6.4.1.jar.sha11
-rw-r--r--core/licenses/lucene-queries-6.5.0-snapshot-f919485.jar.sha11
-rw-r--r--core/licenses/lucene-queryparser-6.4.1.jar.sha11
-rw-r--r--core/licenses/lucene-queryparser-6.5.0-snapshot-f919485.jar.sha11
-rw-r--r--core/licenses/lucene-sandbox-6.4.1.jar.sha11
-rw-r--r--core/licenses/lucene-sandbox-6.5.0-snapshot-f919485.jar.sha11
-rw-r--r--core/licenses/lucene-spatial-6.4.1.jar.sha11
-rw-r--r--core/licenses/lucene-spatial-6.5.0-snapshot-f919485.jar.sha11
-rw-r--r--core/licenses/lucene-spatial-extras-6.4.1.jar.sha11
-rw-r--r--core/licenses/lucene-spatial-extras-6.5.0-snapshot-f919485.jar.sha11
-rw-r--r--core/licenses/lucene-spatial3d-6.4.1.jar.sha11
-rw-r--r--core/licenses/lucene-spatial3d-6.5.0-snapshot-f919485.jar.sha11
-rw-r--r--core/licenses/lucene-suggest-6.4.1.jar.sha11
-rw-r--r--core/licenses/lucene-suggest-6.5.0-snapshot-f919485.jar.sha11
-rw-r--r--core/src/main/java/org/elasticsearch/Version.java4
-rw-r--r--core/src/main/java/org/elasticsearch/index/analysis/FlattenGraphTokenFilterFactory.java2
-rw-r--r--core/src/main/java/org/elasticsearch/index/analysis/WordDelimiterGraphTokenFilterFactory.java101
-rw-r--r--core/src/main/java/org/elasticsearch/index/analysis/WordDelimiterTokenFilterFactory.java9
-rw-r--r--core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java2
-rw-r--r--core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java2
-rw-r--r--core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java2
-rw-r--r--core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java2
-rw-r--r--core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/InternalTopHits.java63
-rw-r--r--core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java3
-rw-r--r--core/src/main/resources/org/elasticsearch/bootstrap/security.policy4
-rw-r--r--core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy2
-rw-r--r--core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java3
-rw-r--r--core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java33
-rw-r--r--core/src/test/java/org/elasticsearch/index/search/MatchQueryIT.java1
-rw-r--r--core/src/test/java/org/elasticsearch/search/aggregations/metrics/tophits/InternalTopHitsTests.java7
-rw-r--r--modules/lang-expression/licenses/lucene-expressions-6.4.1.jar.sha11
-rw-r--r--modules/lang-expression/licenses/lucene-expressions-6.5.0-snapshot-f919485.jar.sha11
-rw-r--r--modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java24
-rw-r--r--modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java10
-rw-r--r--modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java15
-rw-r--r--modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java16
-rw-r--r--plugins/analysis-icu/licenses/lucene-analyzers-icu-6.4.1.jar.sha11
-rw-r--r--plugins/analysis-icu/licenses/lucene-analyzers-icu-6.5.0-snapshot-f919485.jar.sha11
-rw-r--r--plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.4.1.jar.sha11
-rw-r--r--plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.5.0-snapshot-f919485.jar.sha11
-rw-r--r--plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.4.1.jar.sha11
-rw-r--r--plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.5.0-snapshot-f919485.jar.sha11
-rw-r--r--plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.4.1.jar.sha11
-rw-r--r--plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.5.0-snapshot-f919485.jar.sha11
-rw-r--r--plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.4.1.jar.sha11
-rw-r--r--plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.5.0-snapshot-f919485.jar.sha11
-rw-r--r--plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-6.4.1.jar.sha11
-rw-r--r--plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-6.5.0-snapshot-f919485.jar.sha11
-rw-r--r--test/framework/src/main/java/org/elasticsearch/AnalysisFactoryTestCase.java2
66 files changed, 224 insertions, 129 deletions
diff --git a/buildSrc/version.properties b/buildSrc/version.properties
index 741a5d0c50..1e996b3900 100644
--- a/buildSrc/version.properties
+++ b/buildSrc/version.properties
@@ -1,6 +1,6 @@
# When updating elasticsearch, please update 'rest' version in core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy
elasticsearch = 6.0.0-alpha1
-lucene = 6.4.1
+lucene = 6.5.0-snapshot-f919485
# optional dependencies
spatial4j = 0.6
diff --git a/core/licenses/lucene-analyzers-common-6.4.1.jar.sha1 b/core/licenses/lucene-analyzers-common-6.4.1.jar.sha1
deleted file mode 100644
index 5b51b32b77..0000000000
--- a/core/licenses/lucene-analyzers-common-6.4.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-c6f0f593503080204e9d33189cdc59320f55db37 \ No newline at end of file
diff --git a/core/licenses/lucene-analyzers-common-6.5.0-snapshot-f919485.jar.sha1 b/core/licenses/lucene-analyzers-common-6.5.0-snapshot-f919485.jar.sha1
new file mode 100644
index 0000000000..e53ca6efba
--- /dev/null
+++ b/core/licenses/lucene-analyzers-common-6.5.0-snapshot-f919485.jar.sha1
@@ -0,0 +1 @@
+886c1da9adc3347f61ab95ecbf4dbeeaa0e7acb2 \ No newline at end of file
diff --git a/core/licenses/lucene-backward-codecs-6.4.1.jar.sha1 b/core/licenses/lucene-backward-codecs-6.4.1.jar.sha1
deleted file mode 100644
index 481e10fdb0..0000000000
--- a/core/licenses/lucene-backward-codecs-6.4.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-b0ab8aca2b0025b8733411778b6d27afe1c451f5 \ No newline at end of file
diff --git a/core/licenses/lucene-backward-codecs-6.5.0-snapshot-f919485.jar.sha1 b/core/licenses/lucene-backward-codecs-6.5.0-snapshot-f919485.jar.sha1
new file mode 100644
index 0000000000..6025e4b484
--- /dev/null
+++ b/core/licenses/lucene-backward-codecs-6.5.0-snapshot-f919485.jar.sha1
@@ -0,0 +1 @@
+df9e94f63ad7d9188f14820c435ea1dc3c28d87a \ No newline at end of file
diff --git a/core/licenses/lucene-core-6.4.1.jar.sha1 b/core/licenses/lucene-core-6.4.1.jar.sha1
deleted file mode 100644
index e02b643c8a..0000000000
--- a/core/licenses/lucene-core-6.4.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-2a18924b9e0ed86b318902cb475a0b9ca4d7be5b \ No newline at end of file
diff --git a/core/licenses/lucene-core-6.5.0-snapshot-f919485.jar.sha1 b/core/licenses/lucene-core-6.5.0-snapshot-f919485.jar.sha1
new file mode 100644
index 0000000000..ada60f0596
--- /dev/null
+++ b/core/licenses/lucene-core-6.5.0-snapshot-f919485.jar.sha1
@@ -0,0 +1 @@
+3539f8dc9c3ed8ebe90afcb3daa2e9afcf5108d1 \ No newline at end of file
diff --git a/core/licenses/lucene-grouping-6.4.1.jar.sha1 b/core/licenses/lucene-grouping-6.4.1.jar.sha1
deleted file mode 100644
index 98fd2f55e2..0000000000
--- a/core/licenses/lucene-grouping-6.4.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-8297adfa469abd571079ee75a1645fc5124fff5b \ No newline at end of file
diff --git a/core/licenses/lucene-grouping-6.5.0-snapshot-f919485.jar.sha1 b/core/licenses/lucene-grouping-6.5.0-snapshot-f919485.jar.sha1
new file mode 100644
index 0000000000..e117e6d96f
--- /dev/null
+++ b/core/licenses/lucene-grouping-6.5.0-snapshot-f919485.jar.sha1
@@ -0,0 +1 @@
+da76338e4f299963da9d7ab33dae7586dfc902c2 \ No newline at end of file
diff --git a/core/licenses/lucene-highlighter-6.4.1.jar.sha1 b/core/licenses/lucene-highlighter-6.4.1.jar.sha1
deleted file mode 100644
index d61105265a..0000000000
--- a/core/licenses/lucene-highlighter-6.4.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-95c0f76fc7893240483b25a5c420bed77a10d05d \ No newline at end of file
diff --git a/core/licenses/lucene-highlighter-6.5.0-snapshot-f919485.jar.sha1 b/core/licenses/lucene-highlighter-6.5.0-snapshot-f919485.jar.sha1
new file mode 100644
index 0000000000..e23602806b
--- /dev/null
+++ b/core/licenses/lucene-highlighter-6.5.0-snapshot-f919485.jar.sha1
@@ -0,0 +1 @@
+f6318d120236c7ac03fca6bf98825b4cb4347fc8 \ No newline at end of file
diff --git a/core/licenses/lucene-join-6.4.1.jar.sha1 b/core/licenses/lucene-join-6.4.1.jar.sha1
deleted file mode 100644
index d63fed772f..0000000000
--- a/core/licenses/lucene-join-6.4.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-6d2f1ff94dba19b9bc6f15930b7104b890cab1ce \ No newline at end of file
diff --git a/core/licenses/lucene-join-6.5.0-snapshot-f919485.jar.sha1 b/core/licenses/lucene-join-6.5.0-snapshot-f919485.jar.sha1
new file mode 100644
index 0000000000..a8408f6acf
--- /dev/null
+++ b/core/licenses/lucene-join-6.5.0-snapshot-f919485.jar.sha1
@@ -0,0 +1 @@
+68f045ff272e10c307fe25a1867c2948b614b57c \ No newline at end of file
diff --git a/core/licenses/lucene-memory-6.4.1.jar.sha1 b/core/licenses/lucene-memory-6.4.1.jar.sha1
deleted file mode 100644
index 808557a1bc..0000000000
--- a/core/licenses/lucene-memory-6.4.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-825a946902f03a38257851733da908949d69f3da \ No newline at end of file
diff --git a/core/licenses/lucene-memory-6.5.0-snapshot-f919485.jar.sha1 b/core/licenses/lucene-memory-6.5.0-snapshot-f919485.jar.sha1
new file mode 100644
index 0000000000..acfe1ea6ca
--- /dev/null
+++ b/core/licenses/lucene-memory-6.5.0-snapshot-f919485.jar.sha1
@@ -0,0 +1 @@
+b58a7a15267614a9a14f7cf6257454e0c24b146d \ No newline at end of file
diff --git a/core/licenses/lucene-misc-6.4.1.jar.sha1 b/core/licenses/lucene-misc-6.4.1.jar.sha1
deleted file mode 100644
index ae47a860bb..0000000000
--- a/core/licenses/lucene-misc-6.4.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-4d147a6f0fcfc54630260d1bb3deecfc0d0d10f7 \ No newline at end of file
diff --git a/core/licenses/lucene-misc-6.5.0-snapshot-f919485.jar.sha1 b/core/licenses/lucene-misc-6.5.0-snapshot-f919485.jar.sha1
new file mode 100644
index 0000000000..3175fbad81
--- /dev/null
+++ b/core/licenses/lucene-misc-6.5.0-snapshot-f919485.jar.sha1
@@ -0,0 +1 @@
+d5f00fcd00fee6906b563d201bc00bdea7a92baa \ No newline at end of file
diff --git a/core/licenses/lucene-queries-6.4.1.jar.sha1 b/core/licenses/lucene-queries-6.4.1.jar.sha1
deleted file mode 100644
index 0e015acc9a..0000000000
--- a/core/licenses/lucene-queries-6.4.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-6de41d984c16185a244b52c4d069b00f5b2b120f \ No newline at end of file
diff --git a/core/licenses/lucene-queries-6.5.0-snapshot-f919485.jar.sha1 b/core/licenses/lucene-queries-6.5.0-snapshot-f919485.jar.sha1
new file mode 100644
index 0000000000..82c3080bd8
--- /dev/null
+++ b/core/licenses/lucene-queries-6.5.0-snapshot-f919485.jar.sha1
@@ -0,0 +1 @@
+2664901a494d87e9f4cef65be14cca918da7c4f5 \ No newline at end of file
diff --git a/core/licenses/lucene-queryparser-6.4.1.jar.sha1 b/core/licenses/lucene-queryparser-6.4.1.jar.sha1
deleted file mode 100644
index ee7eb78d6c..0000000000
--- a/core/licenses/lucene-queryparser-6.4.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-1fc5795a072770a2c47dce11a3c85a80f3437af6 \ No newline at end of file
diff --git a/core/licenses/lucene-queryparser-6.5.0-snapshot-f919485.jar.sha1 b/core/licenses/lucene-queryparser-6.5.0-snapshot-f919485.jar.sha1
new file mode 100644
index 0000000000..c95d59d535
--- /dev/null
+++ b/core/licenses/lucene-queryparser-6.5.0-snapshot-f919485.jar.sha1
@@ -0,0 +1 @@
+476a79293f9a15ea1ee5f93684587205d03480d1 \ No newline at end of file
diff --git a/core/licenses/lucene-sandbox-6.4.1.jar.sha1 b/core/licenses/lucene-sandbox-6.4.1.jar.sha1
deleted file mode 100644
index caa6302e36..0000000000
--- a/core/licenses/lucene-sandbox-6.4.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-d4a49664668c58aa23aba74717f3d74a61378d7c \ No newline at end of file
diff --git a/core/licenses/lucene-sandbox-6.5.0-snapshot-f919485.jar.sha1 b/core/licenses/lucene-sandbox-6.5.0-snapshot-f919485.jar.sha1
new file mode 100644
index 0000000000..b5065a8bee
--- /dev/null
+++ b/core/licenses/lucene-sandbox-6.5.0-snapshot-f919485.jar.sha1
@@ -0,0 +1 @@
+f4dd70223178cca067b0cade4e58c4d82bec87d6 \ No newline at end of file
diff --git a/core/licenses/lucene-spatial-6.4.1.jar.sha1 b/core/licenses/lucene-spatial-6.4.1.jar.sha1
deleted file mode 100644
index 250c504c96..0000000000
--- a/core/licenses/lucene-spatial-6.4.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-d6ceb47fdea913a8f468a240a6ea307368094463 \ No newline at end of file
diff --git a/core/licenses/lucene-spatial-6.5.0-snapshot-f919485.jar.sha1 b/core/licenses/lucene-spatial-6.5.0-snapshot-f919485.jar.sha1
new file mode 100644
index 0000000000..3e255fc8a8
--- /dev/null
+++ b/core/licenses/lucene-spatial-6.5.0-snapshot-f919485.jar.sha1
@@ -0,0 +1 @@
+72c4ec5d811480164db556b54c7a76bd3ea16bd6 \ No newline at end of file
diff --git a/core/licenses/lucene-spatial-extras-6.4.1.jar.sha1 b/core/licenses/lucene-spatial-extras-6.4.1.jar.sha1
deleted file mode 100644
index e30811d727..0000000000
--- a/core/licenses/lucene-spatial-extras-6.4.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-5b764e5f95f610f79622ca0e957bfc23f094e4c7 \ No newline at end of file
diff --git a/core/licenses/lucene-spatial-extras-6.5.0-snapshot-f919485.jar.sha1 b/core/licenses/lucene-spatial-extras-6.5.0-snapshot-f919485.jar.sha1
new file mode 100644
index 0000000000..f27eb0c244
--- /dev/null
+++ b/core/licenses/lucene-spatial-extras-6.5.0-snapshot-f919485.jar.sha1
@@ -0,0 +1 @@
+f7af3755fdd09df7c258c655aff03ddef9536a04 \ No newline at end of file
diff --git a/core/licenses/lucene-spatial3d-6.4.1.jar.sha1 b/core/licenses/lucene-spatial3d-6.4.1.jar.sha1
deleted file mode 100644
index 9f5354e398..0000000000
--- a/core/licenses/lucene-spatial3d-6.4.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-2c58459e671040046305dbee698fce1e6a1de71d \ No newline at end of file
diff --git a/core/licenses/lucene-spatial3d-6.5.0-snapshot-f919485.jar.sha1 b/core/licenses/lucene-spatial3d-6.5.0-snapshot-f919485.jar.sha1
new file mode 100644
index 0000000000..12990b49d2
--- /dev/null
+++ b/core/licenses/lucene-spatial3d-6.5.0-snapshot-f919485.jar.sha1
@@ -0,0 +1 @@
+2bf820109203b990e93a05dade8dcebec6aeb71a \ No newline at end of file
diff --git a/core/licenses/lucene-suggest-6.4.1.jar.sha1 b/core/licenses/lucene-suggest-6.4.1.jar.sha1
deleted file mode 100644
index d35a2b88d2..0000000000
--- a/core/licenses/lucene-suggest-6.4.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-47014b400916eea259645a9e3118558ef6e95441 \ No newline at end of file
diff --git a/core/licenses/lucene-suggest-6.5.0-snapshot-f919485.jar.sha1 b/core/licenses/lucene-suggest-6.5.0-snapshot-f919485.jar.sha1
new file mode 100644
index 0000000000..a99ede54a5
--- /dev/null
+++ b/core/licenses/lucene-suggest-6.5.0-snapshot-f919485.jar.sha1
@@ -0,0 +1 @@
+fc1f32923ee68761ee05051f4ef6f4a4ab3acdec \ No newline at end of file
diff --git a/core/src/main/java/org/elasticsearch/Version.java b/core/src/main/java/org/elasticsearch/Version.java
index 5bd58357ed..74d2ea872c 100644
--- a/core/src/main/java/org/elasticsearch/Version.java
+++ b/core/src/main/java/org/elasticsearch/Version.java
@@ -112,9 +112,11 @@ public class Version implements Comparable<Version> {
public static final Version V_5_2_1_UNRELEASED = new Version(V_5_2_1_ID_UNRELEASED, org.apache.lucene.util.Version.LUCENE_6_4_1);
public static final int V_5_3_0_ID_UNRELEASED = 5030099;
public static final Version V_5_3_0_UNRELEASED = new Version(V_5_3_0_ID_UNRELEASED, org.apache.lucene.util.Version.LUCENE_6_4_1);
+ public static final int V_5_4_0_ID_UNRELEASED = 5040099;
+ public static final Version V_5_4_0_UNRELEASED = new Version(V_5_4_0_ID_UNRELEASED, org.apache.lucene.util.Version.LUCENE_6_5_0);
public static final int V_6_0_0_alpha1_ID_UNRELEASED = 6000001;
public static final Version V_6_0_0_alpha1_UNRELEASED =
- new Version(V_6_0_0_alpha1_ID_UNRELEASED, org.apache.lucene.util.Version.LUCENE_6_4_1);
+ new Version(V_6_0_0_alpha1_ID_UNRELEASED, org.apache.lucene.util.Version.LUCENE_6_5_0);
public static final Version CURRENT = V_6_0_0_alpha1_UNRELEASED;
// unreleased versions must be added to the above list with the suffix _UNRELEASED (with the exception of CURRENT)
diff --git a/core/src/main/java/org/elasticsearch/index/analysis/FlattenGraphTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/FlattenGraphTokenFilterFactory.java
index 3af472f54b..6c9487a2cb 100644
--- a/core/src/main/java/org/elasticsearch/index/analysis/FlattenGraphTokenFilterFactory.java
+++ b/core/src/main/java/org/elasticsearch/index/analysis/FlattenGraphTokenFilterFactory.java
@@ -20,7 +20,7 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.TokenStream;
-import org.apache.lucene.analysis.synonym.FlattenGraphFilter;
+import org.apache.lucene.analysis.core.FlattenGraphFilter;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
diff --git a/core/src/main/java/org/elasticsearch/index/analysis/WordDelimiterGraphTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/WordDelimiterGraphTokenFilterFactory.java
new file mode 100644
index 0000000000..7cdc215f1b
--- /dev/null
+++ b/core/src/main/java/org/elasticsearch/index/analysis/WordDelimiterGraphTokenFilterFactory.java
@@ -0,0 +1,101 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.index.analysis;
+
+import org.apache.lucene.analysis.CharArraySet;
+import org.apache.lucene.analysis.TokenStream;
+import org.apache.lucene.analysis.miscellaneous.WordDelimiterGraphFilter;
+import org.apache.lucene.analysis.miscellaneous.WordDelimiterIterator;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.env.Environment;
+import org.elasticsearch.index.IndexSettings;
+
+import java.util.List;
+import java.util.Set;
+
+import static org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.CATENATE_ALL;
+import static org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.CATENATE_NUMBERS;
+import static org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.CATENATE_WORDS;
+import static org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.GENERATE_NUMBER_PARTS;
+import static org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.GENERATE_WORD_PARTS;
+import static org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.PRESERVE_ORIGINAL;
+import static org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.SPLIT_ON_CASE_CHANGE;
+import static org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.SPLIT_ON_NUMERICS;
+import static org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter.STEM_ENGLISH_POSSESSIVE;
+import static org.elasticsearch.index.analysis.WordDelimiterTokenFilterFactory.parseTypes;
+
+public class WordDelimiterGraphTokenFilterFactory extends AbstractTokenFilterFactory {
+
+ private final byte[] charTypeTable;
+ private final int flags;
+ private final CharArraySet protoWords;
+
+ public WordDelimiterGraphTokenFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) {
+ super(indexSettings, name, settings);
+
+ // Sample Format for the type table:
+ // $ => DIGIT
+ // % => DIGIT
+ // . => DIGIT
+ // \u002C => DIGIT
+ // \u200D => ALPHANUM
+ List<String> charTypeTableValues = Analysis.getWordList(env, settings, "type_table");
+ if (charTypeTableValues == null) {
+ this.charTypeTable = WordDelimiterIterator.DEFAULT_WORD_DELIM_TABLE;
+ } else {
+ this.charTypeTable = parseTypes(charTypeTableValues);
+ }
+ int flags = 0;
+ // If set, causes parts of words to be generated: "PowerShot" => "Power" "Shot"
+ flags |= getFlag(GENERATE_WORD_PARTS, settings, "generate_word_parts", true);
+ // If set, causes number subwords to be generated: "500-42" => "500" "42"
+ flags |= getFlag(GENERATE_NUMBER_PARTS, settings, "generate_number_parts", true);
+ // 1, causes maximum runs of word parts to be catenated: "wi-fi" => "wifi"
+ flags |= getFlag(CATENATE_WORDS, settings, "catenate_words", false);
+ // If set, causes maximum runs of number parts to be catenated: "500-42" => "50042"
+ flags |= getFlag(CATENATE_NUMBERS, settings, "catenate_numbers", false);
+ // If set, causes all subword parts to be catenated: "wi-fi-4000" => "wifi4000"
+ flags |= getFlag(CATENATE_ALL, settings, "catenate_all", false);
+ // 1, causes "PowerShot" to be two tokens; ("Power-Shot" remains two parts regards)
+ flags |= getFlag(SPLIT_ON_CASE_CHANGE, settings, "split_on_case_change", true);
+ // If set, includes original words in subwords: "500-42" => "500" "42" "500-42"
+ flags |= getFlag(PRESERVE_ORIGINAL, settings, "preserve_original", false);
+ // 1, causes "j2se" to be three tokens; "j" "2" "se"
+ flags |= getFlag(SPLIT_ON_NUMERICS, settings, "split_on_numerics", true);
+ // If set, causes trailing "'s" to be removed for each subword: "O'Neil's" => "O", "Neil"
+ flags |= getFlag(STEM_ENGLISH_POSSESSIVE, settings, "stem_english_possessive", true);
+ // If not null is the set of tokens to protect from being delimited
+ Set<?> protectedWords = Analysis.getWordSet(env, indexSettings.getIndexVersionCreated(), settings, "protected_words");
+ this.protoWords = protectedWords == null ? null : CharArraySet.copy(protectedWords);
+ this.flags = flags;
+ }
+
+ @Override
+ public TokenStream create(TokenStream tokenStream) {
+ return new WordDelimiterGraphFilter(tokenStream, charTypeTable, flags, protoWords);
+ }
+
+ private int getFlag(int flag, Settings settings, String key, boolean defaultValue) {
+ if (settings.getAsBoolean(key, defaultValue)) {
+ return flag;
+ }
+ return 0;
+ }
+}
diff --git a/core/src/main/java/org/elasticsearch/index/analysis/WordDelimiterTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/WordDelimiterTokenFilterFactory.java
index 387c2c9e7f..09882072ee 100644
--- a/core/src/main/java/org/elasticsearch/index/analysis/WordDelimiterTokenFilterFactory.java
+++ b/core/src/main/java/org/elasticsearch/index/analysis/WordDelimiterTokenFilterFactory.java
@@ -113,7 +113,7 @@ public class WordDelimiterTokenFilterFactory extends AbstractTokenFilterFactory
/**
* parses a list of MappingCharFilter style rules into a custom byte[] type table
*/
- private byte[] parseTypes(Collection<String> rules) {
+ static byte[] parseTypes(Collection<String> rules) {
SortedMap<Character, Byte> typeMap = new TreeMap<>();
for (String rule : rules) {
Matcher m = typePattern.matcher(rule);
@@ -137,7 +137,7 @@ public class WordDelimiterTokenFilterFactory extends AbstractTokenFilterFactory
return types;
}
- private Byte parseType(String s) {
+ private static Byte parseType(String s) {
if (s.equals("LOWER"))
return WordDelimiterFilter.LOWER;
else if (s.equals("UPPER"))
@@ -154,9 +154,8 @@ public class WordDelimiterTokenFilterFactory extends AbstractTokenFilterFactory
return null;
}
- char[] out = new char[256];
-
- private String parseString(String s) {
+ private static String parseString(String s) {
+ char[] out = new char[256];
int readPos = 0;
int len = s.length();
int writePos = 0;
diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java b/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java
index 51f8f2b42b..48b6a1127a 100644
--- a/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java
+++ b/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java
@@ -79,7 +79,7 @@ public class BytesRefFieldComparatorSource extends IndexFieldData.XFieldComparat
protected void setScorer(Scorer scorer) {}
@Override
- public FieldComparator<?> newComparator(String fieldname, int numHits, int sortPos, boolean reversed) throws IOException {
+ public FieldComparator<?> newComparator(String fieldname, int numHits, int sortPos, boolean reversed) {
assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldName());
final boolean sortMissingLast = sortMissingLast(missingValue) ^ reversed;
diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java b/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java
index 4684399a23..390a5493e2 100644
--- a/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java
+++ b/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java
@@ -64,7 +64,7 @@ public class DoubleValuesComparatorSource extends IndexFieldData.XFieldComparato
protected void setScorer(Scorer scorer) {}
@Override
- public FieldComparator<?> newComparator(String fieldname, int numHits, int sortPos, boolean reversed) throws IOException {
+ public FieldComparator<?> newComparator(String fieldname, int numHits, int sortPos, boolean reversed) {
assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldName());
final double dMissingValue = (Double) missingObject(missingValue, reversed);
diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java b/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java
index ba9b031ced..0546a5e5e8 100644
--- a/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java
+++ b/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java
@@ -56,7 +56,7 @@ public class FloatValuesComparatorSource extends IndexFieldData.XFieldComparator
}
@Override
- public FieldComparator<?> newComparator(String fieldname, int numHits, int sortPos, boolean reversed) throws IOException {
+ public FieldComparator<?> newComparator(String fieldname, int numHits, int sortPos, boolean reversed) {
assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldName());
final float dMissingValue = (Float) missingObject(missingValue, reversed);
diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java b/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java
index b2fd25e544..d652673308 100644
--- a/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java
+++ b/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java
@@ -55,7 +55,7 @@ public class LongValuesComparatorSource extends IndexFieldData.XFieldComparatorS
}
@Override
- public FieldComparator<?> newComparator(String fieldname, int numHits, int sortPos, boolean reversed) throws IOException {
+ public FieldComparator<?> newComparator(String fieldname, int numHits, int sortPos, boolean reversed) {
assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldName());
final Long dMissingValue = (Long) missingObject(missingValue, reversed);
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/InternalTopHits.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/InternalTopHits.java
index e60bf94dbc..baa8c45e14 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/InternalTopHits.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/InternalTopHits.java
@@ -23,7 +23,6 @@ import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.TopFieldDocs;
-import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lucene.Lucene;
@@ -102,42 +101,38 @@ public class InternalTopHits extends InternalMetricsAggregation implements TopHi
final TopDocs reducedTopDocs;
final TopDocs[] shardDocs;
- try {
- if (topDocs instanceof TopFieldDocs) {
- Sort sort = new Sort(((TopFieldDocs) topDocs).fields);
- shardDocs = new TopFieldDocs[aggregations.size()];
- for (int i = 0; i < shardDocs.length; i++) {
- InternalTopHits topHitsAgg = (InternalTopHits) aggregations.get(i);
- shardDocs[i] = (TopFieldDocs) topHitsAgg.topDocs;
- shardHits[i] = topHitsAgg.searchHits;
- }
- reducedTopDocs = TopDocs.merge(sort, from, size, (TopFieldDocs[]) shardDocs);
- } else {
- shardDocs = new TopDocs[aggregations.size()];
- for (int i = 0; i < shardDocs.length; i++) {
- InternalTopHits topHitsAgg = (InternalTopHits) aggregations.get(i);
- shardDocs[i] = topHitsAgg.topDocs;
- shardHits[i] = topHitsAgg.searchHits;
- }
- reducedTopDocs = TopDocs.merge(from, size, shardDocs);
+ if (topDocs instanceof TopFieldDocs) {
+ Sort sort = new Sort(((TopFieldDocs) topDocs).fields);
+ shardDocs = new TopFieldDocs[aggregations.size()];
+ for (int i = 0; i < shardDocs.length; i++) {
+ InternalTopHits topHitsAgg = (InternalTopHits) aggregations.get(i);
+ shardDocs[i] = (TopFieldDocs) topHitsAgg.topDocs;
+ shardHits[i] = topHitsAgg.searchHits;
}
-
- final int[] tracker = new int[shardHits.length];
- SearchHit[] hits = new SearchHit[reducedTopDocs.scoreDocs.length];
- for (int i = 0; i < reducedTopDocs.scoreDocs.length; i++) {
- ScoreDoc scoreDoc = reducedTopDocs.scoreDocs[i];
- int position;
- do {
- position = tracker[scoreDoc.shardIndex]++;
- } while (shardDocs[scoreDoc.shardIndex].scoreDocs[position] != scoreDoc);
- hits[i] = shardHits[scoreDoc.shardIndex].getAt(position);
+ reducedTopDocs = TopDocs.merge(sort, from, size, (TopFieldDocs[]) shardDocs);
+ } else {
+ shardDocs = new TopDocs[aggregations.size()];
+ for (int i = 0; i < shardDocs.length; i++) {
+ InternalTopHits topHitsAgg = (InternalTopHits) aggregations.get(i);
+ shardDocs[i] = topHitsAgg.topDocs;
+ shardHits[i] = topHitsAgg.searchHits;
}
- return new InternalTopHits(name, from, size, reducedTopDocs, new SearchHits(hits, reducedTopDocs.totalHits,
- reducedTopDocs.getMaxScore()),
- pipelineAggregators(), getMetaData());
- } catch (IOException e) {
- throw ExceptionsHelper.convertToElastic(e);
+ reducedTopDocs = TopDocs.merge(from, size, shardDocs);
+ }
+
+ final int[] tracker = new int[shardHits.length];
+ SearchHit[] hits = new SearchHit[reducedTopDocs.scoreDocs.length];
+ for (int i = 0; i < reducedTopDocs.scoreDocs.length; i++) {
+ ScoreDoc scoreDoc = reducedTopDocs.scoreDocs[i];
+ int position;
+ do {
+ position = tracker[scoreDoc.shardIndex]++;
+ } while (shardDocs[scoreDoc.shardIndex].scoreDocs[position] != scoreDoc);
+ hits[i] = shardHits[scoreDoc.shardIndex].getAt(position);
}
+ return new InternalTopHits(name, from, size, reducedTopDocs, new SearchHits(hits, reducedTopDocs.totalHits,
+ reducedTopDocs.getMaxScore()),
+ pipelineAggregators(), getMetaData());
}
@Override
diff --git a/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java b/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java
index 92a197e062..a896039f7a 100644
--- a/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java
+++ b/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java
@@ -554,8 +554,7 @@ public class GeoDistanceSortBuilder extends SortBuilder<GeoDistanceSortBuilder>
}
@Override
- public FieldComparator<?> newComparator(String fieldname, int numHits, int sortPos, boolean reversed)
- throws IOException {
+ public FieldComparator<?> newComparator(String fieldname, int numHits, int sortPos, boolean reversed) {
return new FieldComparator.DoubleComparator(numHits, null, null) {
@Override
protected NumericDocValues getNumericDocValues(LeafReaderContext context, String field)
diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy
index 6e71f6a46c..4614d627e0 100644
--- a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy
+++ b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy
@@ -31,7 +31,7 @@ grant codeBase "${codebase.securesm-1.1.jar}" {
//// Very special jar permissions:
//// These are dangerous permissions that we don't want to grant to everything.
-grant codeBase "${codebase.lucene-core-6.4.1.jar}" {
+grant codeBase "${codebase.lucene-core-6.5.0-snapshot-f919485.jar}" {
// needed to allow MMapDirectory's "unmap hack" (die unmap hack, die)
// java 8 package
permission java.lang.RuntimePermission "accessClassInPackage.sun.misc";
@@ -42,7 +42,7 @@ grant codeBase "${codebase.lucene-core-6.4.1.jar}" {
permission java.lang.RuntimePermission "accessDeclaredMembers";
};
-grant codeBase "${codebase.lucene-misc-6.4.1.jar}" {
+grant codeBase "${codebase.lucene-misc-6.5.0-snapshot-f919485.jar}" {
// needed to allow shard shrinking to use hard-links if possible via lucenes HardlinkCopyDirectoryWrapper
permission java.nio.file.LinkPermission "hard";
};
diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy b/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy
index f96fb685a2..82061d17e3 100644
--- a/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy
+++ b/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy
@@ -33,7 +33,7 @@ grant codeBase "${codebase.securemock-1.2.jar}" {
permission java.lang.reflect.ReflectPermission "suppressAccessChecks";
};
-grant codeBase "${codebase.lucene-test-framework-6.4.1.jar}" {
+grant codeBase "${codebase.lucene-test-framework-6.5.0-snapshot-f919485.jar}" {
// needed by RamUsageTester
permission java.lang.reflect.ReflectPermission "suppressAccessChecks";
// needed for testing hardlinks in StoreRecoveryTests since we install MockFS
diff --git a/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java
index 032ebc392c..2b6482ec5d 100644
--- a/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java
+++ b/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java
@@ -242,7 +242,8 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase<HasChildQue
assertThat(booleanQuery.clauses().get(0).getOccur(), equalTo(BooleanClause.Occur.MUST));
assertThat(booleanQuery.clauses().get(0).getQuery(), instanceOf(TermsQuery.class));
TermsQuery termsQuery = (TermsQuery) booleanQuery.clauses().get(0).getQuery();
- Query rewrittenTermsQuery = termsQuery.rewrite(null);
+ // we need to rewrite once for TermsQuery -> TermInSetQuery and than againt TermInSetQuery -> ConstantScoreQuery
+ Query rewrittenTermsQuery = termsQuery.rewrite(null).rewrite(null);
assertThat(rewrittenTermsQuery, instanceOf(ConstantScoreQuery.class));
ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) rewrittenTermsQuery;
assertThat(constantScoreQuery.getQuery(), instanceOf(BooleanQuery.class));
diff --git a/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java
index a98d90c04e..96c51d6f94 100644
--- a/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java
+++ b/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java
@@ -40,6 +40,7 @@ import org.apache.lucene.search.SynonymQuery;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TermRangeQuery;
import org.apache.lucene.search.WildcardQuery;
+import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.automaton.TooComplexToDeterminizeException;
import org.elasticsearch.common.ParsingException;
@@ -397,8 +398,8 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase<QueryStr
Query query = queryParser.parse("guinea pig");
Query expectedQuery = new GraphQuery(
new BooleanQuery.Builder()
- .add(new BooleanClause(new TermQuery(new Term(STRING_FIELD_NAME, "guinea")), defaultOp))
- .add(new BooleanClause(new TermQuery(new Term(STRING_FIELD_NAME, "pig")), defaultOp))
+ .add(new BooleanClause(new TermQuery(new Term(STRING_FIELD_NAME, "guinea")), Occur.MUST))
+ .add(new BooleanClause(new TermQuery(new Term(STRING_FIELD_NAME, "pig")), Occur.MUST))
.build(),
new TermQuery(new Term(STRING_FIELD_NAME, "cavy"))
);
@@ -406,19 +407,17 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase<QueryStr
// simple with additional tokens
query = queryParser.parse("that guinea pig smells");
- expectedQuery = new GraphQuery(
- new BooleanQuery.Builder()
- .add(new BooleanClause(new TermQuery(new Term(STRING_FIELD_NAME, "that")), defaultOp))
- .add(new BooleanClause(new TermQuery(new Term(STRING_FIELD_NAME, "guinea")), defaultOp))
- .add(new BooleanClause(new TermQuery(new Term(STRING_FIELD_NAME, "pig")), defaultOp))
- .add(new BooleanClause(new TermQuery(new Term(STRING_FIELD_NAME, "smells")), defaultOp))
- .build(),
- new BooleanQuery.Builder()
- .add(new BooleanClause(new TermQuery(new Term(STRING_FIELD_NAME, "that")), defaultOp))
- .add(new BooleanClause(new TermQuery(new Term(STRING_FIELD_NAME, "cavy")), defaultOp))
- .add(new BooleanClause(new TermQuery(new Term(STRING_FIELD_NAME, "smells")), defaultOp))
- .build()
- );
+ expectedQuery = new BooleanQuery.Builder()
+ .add(new TermQuery(new Term(STRING_FIELD_NAME, "that")), defaultOp)
+ .add(new GraphQuery(
+ new BooleanQuery.Builder()
+ .add(new BooleanClause(new TermQuery(new Term(STRING_FIELD_NAME, "guinea")), Occur.MUST))
+ .add(new BooleanClause(new TermQuery(new Term(STRING_FIELD_NAME, "pig")), Occur.MUST))
+ .build(),
+ new TermQuery(new Term(STRING_FIELD_NAME, "cavy"))
+ ), defaultOp)
+ .add(new TermQuery(new Term(STRING_FIELD_NAME, "smells")), defaultOp)
+ .build();
assertThat(query, Matchers.equalTo(expectedQuery));
// complex
@@ -427,8 +426,8 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase<QueryStr
.add(new BooleanClause(new TermQuery(new Term(STRING_FIELD_NAME, "that")), BooleanClause.Occur.MUST))
.add(new BooleanClause(new GraphQuery(
new BooleanQuery.Builder()
- .add(new BooleanClause(new TermQuery(new Term(STRING_FIELD_NAME, "guinea")), defaultOp))
- .add(new BooleanClause(new TermQuery(new Term(STRING_FIELD_NAME, "pig")), defaultOp))
+ .add(new TermQuery(new Term(STRING_FIELD_NAME, "guinea")), Occur.MUST)
+ .add(new TermQuery(new Term(STRING_FIELD_NAME, "pig")), Occur.MUST)
.build(),
new TermQuery(new Term(STRING_FIELD_NAME, "cavy"))
), BooleanClause.Occur.MUST_NOT))
diff --git a/core/src/test/java/org/elasticsearch/index/search/MatchQueryIT.java b/core/src/test/java/org/elasticsearch/index/search/MatchQueryIT.java
index 2381b8bdc3..3b529fc6ec 100644
--- a/core/src/test/java/org/elasticsearch/index/search/MatchQueryIT.java
+++ b/core/src/test/java/org/elasticsearch/index/search/MatchQueryIT.java
@@ -172,6 +172,7 @@ public class MatchQueryIT extends ESIntegTestCase {
assertSearchHits(searchResponse, "1", "2", "3", "7", "8");
}
+ @AwaitsFix(bugUrl="https://github.com/elastic/elasticsearch/issues/23102")
public void testCommonTerms() throws ExecutionException, InterruptedException {
String route = "commonTermsTest";
List<IndexRequestBuilder> builders = getDocs();
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/tophits/InternalTopHitsTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/tophits/InternalTopHitsTests.java
index 2a44333638..ba729d3c97 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/tophits/InternalTopHitsTests.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/tophits/InternalTopHitsTests.java
@@ -36,7 +36,6 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
-import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
@@ -188,11 +187,7 @@ public class InternalTopHitsTests extends InternalAggregationTestCase<InternalTo
@SuppressWarnings("rawtypes")
FieldComparator[] comparators = new FieldComparator[testInstancesSortFields.length];
for (int i = 0; i < testInstancesSortFields.length; i++) {
- try {
- comparators[i] = testInstancesSortFields[i].getComparator(0, 0);
- } catch (IOException e) {
- throw new RuntimeException(e);
- }
+ comparators[i] = testInstancesSortFields[i].getComparator(0, 0);
}
return (lhs, rhs) -> {
FieldDoc l = (FieldDoc) lhs;
diff --git a/modules/lang-expression/licenses/lucene-expressions-6.4.1.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-6.4.1.jar.sha1
deleted file mode 100644
index b3ef2f5e9f..0000000000
--- a/modules/lang-expression/licenses/lucene-expressions-6.4.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-d56305d2ee8b2484262b1704d802470e6d8f8a8f \ No newline at end of file
diff --git a/modules/lang-expression/licenses/lucene-expressions-6.5.0-snapshot-f919485.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-6.5.0-snapshot-f919485.jar.sha1
new file mode 100644
index 0000000000..861da27527
--- /dev/null
+++ b/modules/lang-expression/licenses/lucene-expressions-6.5.0-snapshot-f919485.jar.sha1
@@ -0,0 +1 @@
+89f1c501f5f6504c53527da76aa18796c2b56492 \ No newline at end of file
diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java
index e1511c216a..700fc308e2 100644
--- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java
+++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java
@@ -28,11 +28,13 @@ import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
-import org.apache.lucene.queries.TermsQuery;
+import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
+import org.apache.lucene.search.TermInSetQuery;
import org.apache.lucene.search.TermQuery;
+import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
import org.elasticsearch.common.ParsingException;
@@ -194,12 +196,7 @@ public class PercolatorFieldMapper extends FieldMapper {
}
Query createCandidateQuery(IndexReader indexReader) throws IOException {
- List<Term> extractedTerms = new ArrayList<>();
- // include extractionResultField:failed, because docs with this term have no extractedTermsField
- // and otherwise we would fail to return these docs. Docs that failed query term extraction
- // always need to be verified by MemoryIndex:
- extractedTerms.add(new Term(extractionResultField.name(), EXTRACTION_FAILED));
-
+ List<BytesRef> extractedTerms = new ArrayList<>();
LeafReader reader = indexReader.leaves().get(0).reader();
Fields fields = reader.fields();
for (String field : fields) {
@@ -215,10 +212,19 @@ public class PercolatorFieldMapper extends FieldMapper {
builder.append(fieldBr);
builder.append(FIELD_VALUE_SEPARATOR);
builder.append(term);
- extractedTerms.add(new Term(queryTermsField.name(), builder.toBytesRef()));
+ extractedTerms.add(builder.toBytesRef());
}
}
- return new TermsQuery(extractedTerms);
+ Query extractionSuccess = new TermInSetQuery(queryTermsField.name(), extractedTerms);
+ // include extractionResultField:failed, because docs with this term have no extractedTermsField
+ // and otherwise we would fail to return these docs. Docs that failed query term extraction
+ // always need to be verified by MemoryIndex:
+ Query extractionFailure = new TermQuery(new Term(extractionResultField.name(), EXTRACTION_FAILED));
+
+ return new BooleanQuery.Builder()
+ .add(extractionSuccess, Occur.SHOULD)
+ .add(extractionFailure, Occur.SHOULD)
+ .build();
}
}
diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java
index dce0e3dd2a..2f88e54b08 100644
--- a/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java
+++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java
@@ -22,7 +22,6 @@ import org.apache.lucene.index.PrefixCodedTerms;
import org.apache.lucene.index.Term;
import org.apache.lucene.queries.BlendedTermQuery;
import org.apache.lucene.queries.CommonTermsQuery;
-import org.apache.lucene.queries.TermsQuery;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.BoostQuery;
@@ -32,6 +31,7 @@ import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.PhraseQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.SynonymQuery;
+import org.apache.lucene.search.TermInSetQuery;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.spans.SpanFirstQuery;
import org.apache.lucene.search.spans.SpanNearQuery;
@@ -62,7 +62,7 @@ public final class QueryAnalyzer {
map.put(ConstantScoreQuery.class, constantScoreQuery());
map.put(BoostQuery.class, boostQuery());
map.put(TermQuery.class, termQuery());
- map.put(TermsQuery.class, termsQuery());
+ map.put(TermInSetQuery.class, termInSetQuery());
map.put(CommonTermsQuery.class, commonTermsQuery());
map.put(BlendedTermQuery.class, blendedTermQuery());
map.put(PhraseQuery.class, phraseQuery());
@@ -145,11 +145,11 @@ public final class QueryAnalyzer {
});
}
- static Function<Query, Result> termsQuery() {
+ static Function<Query, Result> termInSetQuery() {
return query -> {
- TermsQuery termsQuery = (TermsQuery) query;
+ TermInSetQuery termInSetQuery = (TermInSetQuery) query;
Set<Term> terms = new HashSet<>();
- PrefixCodedTerms.TermIterator iterator = termsQuery.getTermData().iterator();
+ PrefixCodedTerms.TermIterator iterator = termInSetQuery.getTermData().iterator();
for (BytesRef term = iterator.next(); term != null; term = iterator.next()) {
terms.add(new Term(iterator.field(), term));
}
diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java
index d47e0a4614..244091595b 100644
--- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java
+++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java
@@ -26,12 +26,13 @@ import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.PrefixCodedTerms;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.memory.MemoryIndex;
-import org.apache.lucene.queries.TermsQuery;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.PhraseQuery;
+import org.apache.lucene.search.TermInSetQuery;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TermRangeQuery;
+import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.join.ScoreMode;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
@@ -207,10 +208,13 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
IndexReader indexReader = memoryIndex.createSearcher().getIndexReader();
- TermsQuery termsQuery = (TermsQuery) fieldType.createCandidateQuery(indexReader);
+ BooleanQuery candidateQuery = (BooleanQuery) fieldType.createCandidateQuery(indexReader);
+ assertEquals(2, candidateQuery.clauses().size());
+ assertEquals(Occur.SHOULD, candidateQuery.clauses().get(0).getOccur());
+ TermInSetQuery termsQuery = (TermInSetQuery) candidateQuery.clauses().get(0).getQuery();
PrefixCodedTerms terms = termsQuery.getTermData();
- assertThat(terms.size(), equalTo(15L));
+ assertThat(terms.size(), equalTo(14L));
PrefixCodedTerms.TermIterator termIterator = terms.iterator();
assertTermIterator(termIterator, "_field3\u0000me", fieldType.queryTermsField.name());
assertTermIterator(termIterator, "_field3\u0000unhide", fieldType.queryTermsField.name());
@@ -226,7 +230,10 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
assertTermIterator(termIterator, "field2\u0000some", fieldType.queryTermsField.name());
assertTermIterator(termIterator, "field2\u0000text", fieldType.queryTermsField.name());
assertTermIterator(termIterator, "field4\u0000123", fieldType.queryTermsField.name());
- assertTermIterator(termIterator, EXTRACTION_FAILED, fieldType.extractionResultField.name());
+
+ assertEquals(Occur.SHOULD, candidateQuery.clauses().get(1).getOccur());
+ assertEquals(new TermQuery(new Term(fieldType.extractionResultField.name(), EXTRACTION_FAILED)),
+ candidateQuery.clauses().get(1).getQuery());
}
private void assertTermIterator(PrefixCodedTerms.TermIterator termIterator, String expectedValue, String expectedField) {
diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java
index 4410ac8012..c00872a6e8 100644
--- a/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java
+++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java
@@ -21,7 +21,6 @@ package org.elasticsearch.percolator;
import org.apache.lucene.index.Term;
import org.apache.lucene.queries.BlendedTermQuery;
import org.apache.lucene.queries.CommonTermsQuery;
-import org.apache.lucene.queries.TermsQuery;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.BoostQuery;
@@ -31,6 +30,7 @@ import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.PhraseQuery;
import org.apache.lucene.search.SynonymQuery;
+import org.apache.lucene.search.TermInSetQuery;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TermRangeQuery;
import org.apache.lucene.search.spans.SpanFirstQuery;
@@ -71,7 +71,7 @@ public class QueryAnalyzerTests extends ESTestCase {
}
public void testExtractQueryMetadata_termsQuery() {
- TermsQuery termsQuery = new TermsQuery("_field", new BytesRef("_term1"), new BytesRef("_term2"));
+ TermInSetQuery termsQuery = new TermInSetQuery("_field", new BytesRef("_term1"), new BytesRef("_term2"));
Result result = analyze(termsQuery);
assertThat(result.verified, is(true));
List<Term> terms = new ArrayList<>(result.terms);
@@ -81,18 +81,6 @@ public class QueryAnalyzerTests extends ESTestCase {
assertThat(terms.get(0).text(), equalTo("_term1"));
assertThat(terms.get(1).field(), equalTo("_field"));
assertThat(terms.get(1).text(), equalTo("_term2"));
-
- // test with different fields
- termsQuery = new TermsQuery(new Term("_field1", "_term1"), new Term("_field2", "_term2"));
- result = analyze(termsQuery);
- assertThat(result.verified, is(true));
- terms = new ArrayList<>(result.terms);
- Collections.sort(terms);
- assertThat(terms.size(), equalTo(2));
- assertThat(terms.get(0).field(), equalTo("_field1"));
- assertThat(terms.get(0).text(), equalTo("_term1"));
- assertThat(terms.get(1).field(), equalTo("_field2"));
- assertThat(terms.get(1).text(), equalTo("_term2"));
}
public void testExtractQueryMetadata_phraseQuery() {
diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.4.1.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.4.1.jar.sha1
deleted file mode 100644
index adbd64b045..0000000000
--- a/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.4.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-dae7aa1d7ccb6eaa32d7208d25fe772c029113bd \ No newline at end of file
diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.5.0-snapshot-f919485.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.5.0-snapshot-f919485.jar.sha1
new file mode 100644
index 0000000000..c46c62d53a
--- /dev/null
+++ b/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.5.0-snapshot-f919485.jar.sha1
@@ -0,0 +1 @@
+e430aa3efe4883c74edc01711871870c907f37ca \ No newline at end of file
diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.4.1.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.4.1.jar.sha1
deleted file mode 100644
index 31c61e712e..0000000000
--- a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.4.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-8acb2fd78d2a4612d677e353b056c89fe700a73a \ No newline at end of file
diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.5.0-snapshot-f919485.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.5.0-snapshot-f919485.jar.sha1
new file mode 100644
index 0000000000..19aef8debe
--- /dev/null
+++ b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.5.0-snapshot-f919485.jar.sha1
@@ -0,0 +1 @@
+f4340c16ce417a688b5b20f6b6624f51683247bd \ No newline at end of file
diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.4.1.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.4.1.jar.sha1
deleted file mode 100644
index ea37b6ed38..0000000000
--- a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.4.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-5f40ded59cc0a57d2a9fe9d9b9ff6d5dbdb319e6 \ No newline at end of file
diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.5.0-snapshot-f919485.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.5.0-snapshot-f919485.jar.sha1
new file mode 100644
index 0000000000..7ea8298602
--- /dev/null
+++ b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.5.0-snapshot-f919485.jar.sha1
@@ -0,0 +1 @@
+ccf0f76f1249bc0027e9ebe01953e3663e52a5dc \ No newline at end of file
diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.4.1.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.4.1.jar.sha1
deleted file mode 100644
index 89a03e9fe0..0000000000
--- a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.4.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-1aff866b1c191914301af25f818309f7ceb76cd3 \ No newline at end of file
diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.5.0-snapshot-f919485.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.5.0-snapshot-f919485.jar.sha1
new file mode 100644
index 0000000000..a10f5f52b0
--- /dev/null
+++ b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.5.0-snapshot-f919485.jar.sha1
@@ -0,0 +1 @@
+8cfa9d27ea3d36524d69b78601fe9b9c6d4b9628 \ No newline at end of file
diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.4.1.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.4.1.jar.sha1
deleted file mode 100644
index a3ee881dfd..0000000000
--- a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.4.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-52fcba7d7abde7d299ba31b1c5194fca3b1625da \ No newline at end of file
diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.5.0-snapshot-f919485.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.5.0-snapshot-f919485.jar.sha1
new file mode 100644
index 0000000000..6c8e2bac03
--- /dev/null
+++ b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.5.0-snapshot-f919485.jar.sha1
@@ -0,0 +1 @@
+eb7e1cdab96c107ca256cef75e149139f9b62044 \ No newline at end of file
diff --git a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-6.4.1.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-6.4.1.jar.sha1
deleted file mode 100644
index cbe0964ac6..0000000000
--- a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-6.4.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-405aeb0b03eca645434cbd23aed31bb74feaece8 \ No newline at end of file
diff --git a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-6.5.0-snapshot-f919485.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-6.5.0-snapshot-f919485.jar.sha1
new file mode 100644
index 0000000000..68e86b164f
--- /dev/null
+++ b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-6.5.0-snapshot-f919485.jar.sha1
@@ -0,0 +1 @@
+d0a56789aa72751547c0001a0e950c387b245181 \ No newline at end of file
diff --git a/test/framework/src/main/java/org/elasticsearch/AnalysisFactoryTestCase.java b/test/framework/src/main/java/org/elasticsearch/AnalysisFactoryTestCase.java
index bdf36bbb85..521494a5a5 100644
--- a/test/framework/src/main/java/org/elasticsearch/AnalysisFactoryTestCase.java
+++ b/test/framework/src/main/java/org/elasticsearch/AnalysisFactoryTestCase.java
@@ -20,6 +20,7 @@
package org.elasticsearch;
import org.apache.lucene.analysis.en.PorterStemFilterFactory;
+import org.apache.lucene.analysis.miscellaneous.WordDelimiterGraphFilterFactory;
import org.apache.lucene.analysis.reverse.ReverseStringFilterFactory;
import org.apache.lucene.analysis.snowball.SnowballPorterFilterFactory;
import org.apache.lucene.analysis.util.CharFilterFactory;
@@ -249,6 +250,7 @@ public class AnalysisFactoryTestCase extends ESTestCase {
.put("type", KeepTypesFilterFactory.class)
.put("uppercase", UpperCaseTokenFilterFactory.class)
.put("worddelimiter", WordDelimiterTokenFilterFactory.class)
+ .put("worddelimitergraph", WordDelimiterGraphFilterFactory.class)
.put("flattengraph", FlattenGraphTokenFilterFactory.class)
// TODO: these tokenfilters are not yet exposed: useful?