summaryrefslogtreecommitdiff
path: root/core/src/main/java/org/elasticsearch
diff options
context:
space:
mode:
authorSimon Willnauer <simonw@apache.org>2017-02-08 14:40:08 +0100
committerGitHub <noreply@github.com>2017-02-08 14:40:08 +0100
commitecb01c15b9a6645f22f153eb099a377e70e398c8 (patch)
tree6281a85417a51ac6265e44d93ff92322fee4b199 /core/src/main/java/org/elasticsearch
parent2d6d871f5c70b11c23a5b99b5e3a0bf0cff8c6b0 (diff)
Fold InternalSearchHits and friends into their interfaces (#23042)
We have a bunch of interfaces that have only a single implementation for 6 years now. These interfaces are pretty useless from a SW development perspective and only add unnecessary abstractions. They also require lots of casting in many places where we expect that there is only one concrete implementation. This change removes the interfaces, makes all of the classes final and removes the duplicate `foo` `getFoo` accessors in favor of `getFoo` from these classes.
Diffstat (limited to 'core/src/main/java/org/elasticsearch')
-rw-r--r--core/src/main/java/org/elasticsearch/action/bulk/byscroll/ClientScrollableHitSource.java2
-rw-r--r--core/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java20
-rw-r--r--core/src/main/java/org/elasticsearch/action/search/SearchScrollQueryThenFetchAsyncAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/cat/RestCountAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/rest/action/document/RestCountAction.java2
-rw-r--r--core/src/main/java/org/elasticsearch/search/SearchHit.java865
-rw-r--r--core/src/main/java/org/elasticsearch/search/SearchHitField.java97
-rw-r--r--core/src/main/java/org/elasticsearch/search/SearchHits.java198
-rw-r--r--core/src/main/java/org/elasticsearch/search/SearchSortValues.java (renamed from core/src/main/java/org/elasticsearch/search/internal/SearchSortValues.java)5
-rw-r--r--core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/InternalTopHits.java17
-rw-r--r--core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregator.java10
-rw-r--r--core/src/main/java/org/elasticsearch/search/collapse/ExpandCollapseSearchResponseListener.java10
-rw-r--r--core/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java32
-rw-r--r--core/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java15
-rw-r--r--core/src/main/java/org/elasticsearch/search/fetch/FetchSubPhase.java10
-rw-r--r--core/src/main/java/org/elasticsearch/search/fetch/subphase/DocValueFieldsFetchSubPhase.java9
-rw-r--r--core/src/main/java/org/elasticsearch/search/fetch/subphase/ExplainFetchSubPhase.java4
-rw-r--r--core/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsContext.java16
-rw-r--r--core/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsFetchSubPhase.java10
-rw-r--r--core/src/main/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesFetchSubPhase.java6
-rw-r--r--core/src/main/java/org/elasticsearch/search/fetch/subphase/ParentFieldSubFetchPhase.java5
-rw-r--r--core/src/main/java/org/elasticsearch/search/fetch/subphase/ScriptFieldsFetchSubPhase.java7
-rw-r--r--core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java8
-rw-r--r--core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java2
-rw-r--r--core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PostingsHighlighter.java2
-rw-r--r--core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java2
-rw-r--r--core/src/main/java/org/elasticsearch/search/internal/InternalSearchHit.java943
-rw-r--r--core/src/main/java/org/elasticsearch/search/internal/InternalSearchHitField.java128
-rw-r--r--core/src/main/java/org/elasticsearch/search/internal/InternalSearchHits.java228
-rw-r--r--core/src/main/java/org/elasticsearch/search/internal/InternalSearchResponse.java11
-rw-r--r--core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestion.java12
31 files changed, 1128 insertions, 1552 deletions
diff --git a/core/src/main/java/org/elasticsearch/action/bulk/byscroll/ClientScrollableHitSource.java b/core/src/main/java/org/elasticsearch/action/bulk/byscroll/ClientScrollableHitSource.java
index 4f2aefc101..9fc02e29e6 100644
--- a/core/src/main/java/org/elasticsearch/action/bulk/byscroll/ClientScrollableHitSource.java
+++ b/core/src/main/java/org/elasticsearch/action/bulk/byscroll/ClientScrollableHitSource.java
@@ -255,7 +255,7 @@ public class ClientScrollableHitSource extends ScrollableHitSource {
private <T> T fieldValue(String fieldName) {
SearchHitField field = delegate.field(fieldName);
- return field == null ? null : field.value();
+ return field == null ? null : field.getValue();
}
}
}
diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java b/core/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java
index 33c8859e04..5445155603 100644
--- a/core/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java
+++ b/core/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java
@@ -45,8 +45,8 @@ import org.elasticsearch.search.aggregations.pipeline.SiblingPipelineAggregator;
import org.elasticsearch.search.dfs.AggregatedDfs;
import org.elasticsearch.search.dfs.DfsSearchResult;
import org.elasticsearch.search.fetch.FetchSearchResult;
-import org.elasticsearch.search.internal.InternalSearchHit;
-import org.elasticsearch.search.internal.InternalSearchHits;
+import org.elasticsearch.search.SearchHit;
+import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.internal.InternalSearchResponse;
import org.elasticsearch.search.profile.ProfileShardResult;
import org.elasticsearch.search.profile.SearchProfileShardResults;
@@ -386,7 +386,7 @@ public class SearchPhaseController extends AbstractComponent {
return InternalSearchResponse.empty();
}
List<? extends AtomicArray.Entry<? extends QuerySearchResultProvider>> fetchResults = fetchResultsArr.asList();
- InternalSearchHits hits = getHits(reducedQueryPhase, ignoreFrom, sortedDocs, fetchResultsArr);
+ SearchHits hits = getHits(reducedQueryPhase, ignoreFrom, sortedDocs, fetchResultsArr);
if (reducedQueryPhase.suggest != null) {
if (!fetchResults.isEmpty()) {
int currentOffset = hits.getHits().length;
@@ -401,7 +401,7 @@ public class SearchPhaseController extends AbstractComponent {
FetchSearchResult fetchResult = searchResultProvider.fetchResult();
int fetchResultIndex = fetchResult.counterGetAndIncrement();
if (fetchResultIndex < fetchResult.hits().internalHits().length) {
- InternalSearchHit hit = fetchResult.hits().internalHits()[fetchResultIndex];
+ SearchHit hit = fetchResult.hits().internalHits()[fetchResultIndex];
CompletionSuggestion.Entry.Option suggestOption =
suggestionOptions.get(scoreDocIndex - currentOffset);
hit.score(shardDoc.score);
@@ -417,8 +417,8 @@ public class SearchPhaseController extends AbstractComponent {
return reducedQueryPhase.buildResponse(hits);
}
- private InternalSearchHits getHits(ReducedQueryPhase reducedQueryPhase, boolean ignoreFrom, ScoreDoc[] sortedDocs,
- AtomicArray<? extends QuerySearchResultProvider> fetchResultsArr) {
+ private SearchHits getHits(ReducedQueryPhase reducedQueryPhase, boolean ignoreFrom, ScoreDoc[] sortedDocs,
+ AtomicArray<? extends QuerySearchResultProvider> fetchResultsArr) {
List<? extends AtomicArray.Entry<? extends QuerySearchResultProvider>> fetchResults = fetchResultsArr.asList();
boolean sorted = false;
int sortScoreIndex = -1;
@@ -445,7 +445,7 @@ public class SearchPhaseController extends AbstractComponent {
// with collapsing we can have more fetch hits than sorted docs
numSearchHits = Math.min(sortedDocs.length, numSearchHits);
// merge hits
- List<InternalSearchHit> hits = new ArrayList<>();
+ List<SearchHit> hits = new ArrayList<>();
if (!fetchResults.isEmpty()) {
for (int i = 0; i < numSearchHits; i++) {
ScoreDoc shardDoc = sortedDocs[i];
@@ -456,7 +456,7 @@ public class SearchPhaseController extends AbstractComponent {
FetchSearchResult fetchResult = fetchResultProvider.fetchResult();
int index = fetchResult.counterGetAndIncrement();
if (index < fetchResult.hits().internalHits().length) {
- InternalSearchHit searchHit = fetchResult.hits().internalHits()[index];
+ SearchHit searchHit = fetchResult.hits().internalHits()[index];
searchHit.score(shardDoc.score);
searchHit.shard(fetchResult.shardTarget());
if (sorted) {
@@ -470,7 +470,7 @@ public class SearchPhaseController extends AbstractComponent {
}
}
}
- return new InternalSearchHits(hits.toArray(new InternalSearchHit[hits.size()]), reducedQueryPhase.totalHits,
+ return new SearchHits(hits.toArray(new SearchHit[hits.size()]), reducedQueryPhase.totalHits,
reducedQueryPhase.maxScore);
}
@@ -596,7 +596,7 @@ public class SearchPhaseController extends AbstractComponent {
* Creates a new search response from the given merged hits.
* @see #merge(boolean, ScoreDoc[], ReducedQueryPhase, AtomicArray)
*/
- public InternalSearchResponse buildResponse(InternalSearchHits hits) {
+ public InternalSearchResponse buildResponse(SearchHits hits) {
return new InternalSearchResponse(hits, aggregations, suggest, shardResults, timedOut, terminatedEarly);
}
diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchScrollQueryThenFetchAsyncAction.java b/core/src/main/java/org/elasticsearch/action/search/SearchScrollQueryThenFetchAsyncAction.java
index 6d8d3a42be..13c81c1d5e 100644
--- a/core/src/main/java/org/elasticsearch/action/search/SearchScrollQueryThenFetchAsyncAction.java
+++ b/core/src/main/java/org/elasticsearch/action/search/SearchScrollQueryThenFetchAsyncAction.java
@@ -30,14 +30,12 @@ import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.util.concurrent.AtomicArray;
import org.elasticsearch.search.fetch.FetchSearchResult;
-import org.elasticsearch.search.fetch.QueryFetchSearchResult;
import org.elasticsearch.search.fetch.ShardFetchRequest;
import org.elasticsearch.search.internal.InternalScrollSearchRequest;
import org.elasticsearch.search.internal.InternalSearchResponse;
import org.elasticsearch.search.query.QuerySearchResult;
import org.elasticsearch.search.query.ScrollQuerySearchResult;
-import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestCountAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestCountAction.java
index 00eb89a9c0..bbc7ffa250 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestCountAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestCountAction.java
@@ -91,7 +91,7 @@ public class RestCountAction extends AbstractCatAction {
private Table buildTable(RestRequest request, SearchResponse response) {
Table table = getTableWithHeader(request);
table.startRow();
- table.addCell(response.getHits().totalHits());
+ table.addCell(response.getHits().getTotalHits());
table.endRow();
return table;
diff --git a/core/src/main/java/org/elasticsearch/rest/action/document/RestCountAction.java b/core/src/main/java/org/elasticsearch/rest/action/document/RestCountAction.java
index 5de436b38c..004fab81b4 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/document/RestCountAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/document/RestCountAction.java
@@ -91,7 +91,7 @@ public class RestCountAction extends BaseRestHandler {
if (terminateAfter != DEFAULT_TERMINATE_AFTER) {
builder.field("terminated_early", response.isTerminatedEarly());
}
- builder.field("count", response.getHits().totalHits());
+ builder.field("count", response.getHits().getTotalHits());
buildBroadcastShardsHeader(builder, request, response.getTotalShards(), response.getSuccessfulShards(),
response.getFailedShards(), response.getShardFailures());
diff --git a/core/src/main/java/org/elasticsearch/search/SearchHit.java b/core/src/main/java/org/elasticsearch/search/SearchHit.java
index ef9aef8fbb..8f8d1a98ed 100644
--- a/core/src/main/java/org/elasticsearch/search/SearchHit.java
+++ b/core/src/main/java/org/elasticsearch/search/SearchHit.java
@@ -21,210 +21,889 @@ package org.elasticsearch.search;
import org.apache.lucene.search.Explanation;
import org.elasticsearch.ElasticsearchParseException;
+import org.elasticsearch.common.Nullable;
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.ParsingException;
+import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
+import org.elasticsearch.common.compress.CompressorFactory;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
+import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.text.Text;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.ToXContentObject;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentHelper;
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.index.mapper.MapperService;
+import org.elasticsearch.index.mapper.SourceFieldMapper;
+import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightField;
-
+import org.elasticsearch.search.lookup.SourceLookup;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
import java.util.Map;
+import java.util.Objects;
+
+import static java.util.Collections.emptyMap;
+import static java.util.Collections.singletonMap;
+import static java.util.Collections.unmodifiableMap;
+import static org.elasticsearch.common.lucene.Lucene.readExplanation;
+import static org.elasticsearch.common.lucene.Lucene.writeExplanation;
+import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
+import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
+import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
+import static org.elasticsearch.common.xcontent.XContentParserUtils.parseStoredFieldsValue;
+import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownField;
+import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownToken;
+import static org.elasticsearch.search.fetch.subphase.highlight.HighlightField.readHighlightField;
/**
* A single search hit.
*
* @see SearchHits
*/
-public interface SearchHit extends Streamable, ToXContentObject, Iterable<SearchHitField> {
+public final class SearchHit implements Streamable, ToXContentObject, Iterable<SearchHitField> {
- /**
- * The score.
- */
- float score();
+ private transient int docId;
- /**
- * The score.
- */
- float getScore();
+ private static final float DEFAULT_SCORE = Float.NEGATIVE_INFINITY;
+ private float score = DEFAULT_SCORE;
- /**
- * The index of the hit.
- */
- String index();
+ private Text id;
+ private Text type;
- /**
- * The index of the hit.
- */
- String getIndex();
+ private NestedIdentity nestedIdentity;
- /**
- * The id of the document.
- */
- String id();
+ private long version = -1;
- /**
- * The id of the document.
- */
- String getId();
+ private BytesReference source;
+
+ private Map<String, SearchHitField> fields = emptyMap();
+
+ private Map<String, HighlightField> highlightFields = null;
+
+ private SearchSortValues sortValues = SearchSortValues.EMPTY;
+
+ private String[] matchedQueries = Strings.EMPTY_ARRAY;
+
+ private Explanation explanation;
+
+ @Nullable
+ private SearchShardTarget shard;
+
+ private transient String index;
+
+ private Map<String, Object> sourceAsMap;
+ private byte[] sourceAsBytes;
+
+ private Map<String, SearchHits> innerHits;
+
+ private SearchHit() {
+
+ }
+
+ public SearchHit(int docId) {
+ this(docId, null, null, null);
+ }
+
+ public SearchHit(int docId, String id, Text type, Map<String, SearchHitField> fields) {
+ this(docId, id, type, null, fields);
+ }
+
+ public SearchHit(int nestedTopDocId, String id, Text type, NestedIdentity nestedIdentity, Map<String, SearchHitField> fields) {
+ this.docId = nestedTopDocId;
+ if (id != null) {
+ this.id = new Text(id);
+ } else {
+ this.id = null;
+ }
+ this.type = type;
+ this.nestedIdentity = nestedIdentity;
+ this.fields = fields;
+ }
+
+ public int docId() {
+ return this.docId;
+ }
+
+ public void score(float score) {
+ this.score = score;
+ }
/**
- * The type of the document.
+ * The score.
*/
- String type();
+ public float getScore() {
+ return this.score;
+ }
+
+ public void version(long version) {
+ this.version = version;
+ }
/**
- * The type of the document.
+ * The version of the hit.
*/
- String getType();
+ public long getVersion() {
+ return this.version;
+ }
/**
- * If this is a nested hit then nested reference information is returned otherwise <code>null</code> is returned.
+ * The index of the hit.
*/
- NestedIdentity getNestedIdentity();
+ public String getIndex() {
+ return this.index;
+ }
/**
- * The version of the hit.
+ * The id of the document.
*/
- long version();
+ public String getId() {
+ return id != null ? id.string() : null;
+ }
/**
- * The version of the hit.
+ * The type of the document.
*/
- long getVersion();
+ public String getType() {
+ return type != null ? type.string() : null;
+ }
/**
- * Returns bytes reference, also un compress the source if needed.
+ * If this is a nested hit then nested reference information is returned otherwise <code>null</code> is returned.
*/
- BytesReference sourceRef();
+ public NestedIdentity getNestedIdentity() {
+ return nestedIdentity;
+ }
/**
* Returns bytes reference, also un compress the source if needed.
*/
- BytesReference getSourceRef();
+ public BytesReference getSourceRef() {
+ if (this.source == null) {
+ return null;
+ }
+
+ try {
+ this.source = CompressorFactory.uncompressIfNeeded(this.source);
+ return this.source;
+ } catch (IOException e) {
+ throw new ElasticsearchParseException("failed to decompress source", e);
+ }
+ }
/**
- * The source of the document (can be <tt>null</tt>). Note, its a copy of the source
- * into a byte array, consider using {@link #sourceRef()} so there won't be a need to copy.
+ * Sets representation, might be compressed....
*/
- byte[] source();
+ public SearchHit sourceRef(BytesReference source) {
+ this.source = source;
+ this.sourceAsBytes = null;
+ this.sourceAsMap = null;
+ return this;
+ }
/**
* Is the source available or not. A source with no fields will return true. This will return false if {@code fields} doesn't contain
* {@code _source} or if source is disabled in the mapping.
*/
- boolean hasSource();
-
- /**
- * The source of the document as a map (can be <tt>null</tt>).
- */
- Map<String, Object> getSource();
+ public boolean hasSource() {
+ return source != null;
+ }
/**
* The source of the document as string (can be <tt>null</tt>).
*/
- String sourceAsString();
+ public String getSourceAsString() {
+ if (source == null) {
+ return null;
+ }
+ try {
+ return XContentHelper.convertToJson(getSourceRef(), false);
+ } catch (IOException e) {
+ throw new ElasticsearchParseException("failed to convert source to a json string");
+ }
+ }
- /**
- * The source of the document as string (can be <tt>null</tt>).
- */
- String getSourceAsString();
/**
* The source of the document as a map (can be <tt>null</tt>).
*/
- Map<String, Object> sourceAsMap() throws ElasticsearchParseException;
+ public Map<String, Object> getSourceAsMap() {
+ if (source == null) {
+ return null;
+ }
+ if (sourceAsMap != null) {
+ return sourceAsMap;
+ }
- /**
- * If enabled, the explanation of the search hit.
- */
- Explanation explanation();
+ sourceAsMap = SourceLookup.sourceAsMap(source);
+ return sourceAsMap;
+ }
- /**
- * If enabled, the explanation of the search hit.
- */
- Explanation getExplanation();
+ @Override
+ public Iterator<SearchHitField> iterator() {
+ return fields.values().iterator();
+ }
/**
* The hit field matching the given field name.
*/
- SearchHitField field(String fieldName);
+ public SearchHitField field(String fieldName) {
+ return getFields().get(fieldName);
+ }
/**
* A map of hit fields (from field name to hit fields) if additional fields
* were required to be loaded.
*/
- Map<String, SearchHitField> fields();
+ public Map<String, SearchHitField> getFields() {
+ return fields == null ? emptyMap() : fields;
+ }
- /**
- * A map of hit fields (from field name to hit fields) if additional fields
- * were required to be loaded.
- */
- Map<String, SearchHitField> getFields();
+ // returns the fields without handling null cases
+ public Map<String, SearchHitField> fieldsOrNull() {
+ return fields;
+ }
- /**
- * A map of highlighted fields.
- */
- Map<String, HighlightField> highlightFields();
+ public void fields(Map<String, SearchHitField> fields) {
+ this.fields = fields;
+ }
/**
* A map of highlighted fields.
*/
- Map<String, HighlightField> getHighlightFields();
+ public Map<String, HighlightField> getHighlightFields() {
+ return highlightFields == null ? emptyMap() : highlightFields;
+ }
- /**
- * An array of the sort values used.
- */
- Object[] sortValues();
+ public void highlightFields(Map<String, HighlightField> highlightFields) {
+ this.highlightFields = highlightFields;
+ }
+
+ public void sortValues(Object[] sortValues, DocValueFormat[] sortValueFormats) {
+ sortValues(new SearchSortValues(sortValues, sortValueFormats));
+ }
+
+ public void sortValues(SearchSortValues sortValues) {
+ this.sortValues = sortValues;
+ }
/**
* An array of the sort values used.
*/
- Object[] getSortValues();
+ public Object[] getSortValues() {
+ return sortValues.sortValues();
+ }
/**
- * The set of query and filter names the query matched with. Mainly makes sense for compound filters and queries.
+ * If enabled, the explanation of the search hit.
*/
- String[] matchedQueries();
+ public Explanation getExplanation() {
+ return explanation;
+ }
- /**
- * The set of query and filter names the query matched with. Mainly makes sense for compound filters and queries.
- */
- String[] getMatchedQueries();
+ public void explanation(Explanation explanation) {
+ this.explanation = explanation;
+ }
/**
* The shard of the search hit.
*/
- SearchShardTarget shard();
+ public SearchShardTarget getShard() {
+ return shard;
+ }
+
+ public void shard(SearchShardTarget target) {
+ this.shard = target;
+ if (target != null) {
+ this.index = target.getIndex();
+ }
+ }
+
+ public void matchedQueries(String[] matchedQueries) {
+ this.matchedQueries = matchedQueries;
+ }
/**
- * The shard of the search hit.
+ * The set of query and filter names the query matched with. Mainly makes sense for compound filters and queries.
*/
- SearchShardTarget getShard();
+ public String[] getMatchedQueries() {
+ return this.matchedQueries;
+ }
/**
* @return Inner hits or <code>null</code> if there are none
*/
- Map<String, SearchHits> getInnerHits();
+ @SuppressWarnings("unchecked")
+ public Map<String, SearchHits> getInnerHits() {
+ return (Map) innerHits;
+ }
+
+ public void setInnerHits(Map<String, SearchHits> innerHits) {
+ this.innerHits = innerHits;
+ }
+
+ public static class Fields {
+ static final String _INDEX = "_index";
+ static final String _TYPE = "_type";
+ static final String _ID = "_id";
+ static final String _VERSION = "_version";
+ static final String _SCORE = "_score";
+ static final String FIELDS = "fields";
+ static final String HIGHLIGHT = "highlight";
+ static final String SORT = "sort";
+ static final String MATCHED_QUERIES = "matched_queries";
+ static final String _EXPLANATION = "_explanation";
+ static final String VALUE = "value";
+ static final String DESCRIPTION = "description";
+ static final String DETAILS = "details";
+ static final String INNER_HITS = "inner_hits";
+ static final String _SHARD = "_shard";
+ static final String _NODE = "_node";
+ }
+
+ // public because we render hit as part of completion suggestion option
+ public XContentBuilder toInnerXContent(XContentBuilder builder, Params params) throws IOException {
+ List<SearchHitField> metaFields = new ArrayList<>();
+ List<SearchHitField> otherFields = new ArrayList<>();
+ if (fields != null && !fields.isEmpty()) {
+ for (SearchHitField field : fields.values()) {
+ if (field.getValues().isEmpty()) {
+ continue;
+ }
+ if (field.isMetadataField()) {
+ metaFields.add(field);
+ } else {
+ otherFields.add(field);
+ }
+ }
+ }
+
+ // For inner_hit hits shard is null and that is ok, because the parent search hit has all this information.
+ // Even if this was included in the inner_hit hits this would be the same, so better leave it out.
+ if (getExplanation() != null && shard != null) {
+ builder.field(Fields._SHARD, shard.getShardId());
+ builder.field(Fields._NODE, shard.getNodeIdText());
+ }
+ if (nestedIdentity != null) {
+ nestedIdentity.toXContent(builder, params);
+ } else {
+ if (index != null) {
+ builder.field(Fields._INDEX, index);
+ }
+ if (type != null) {
+ builder.field(Fields._TYPE, type);
+ }
+ if (id != null) {
+ builder.field(Fields._ID, id);
+ }
+ }
+ if (version != -1) {
+ builder.field(Fields._VERSION, version);
+ }
+ if (Float.isNaN(score)) {
+ builder.nullField(Fields._SCORE);
+ } else {
+ builder.field(Fields._SCORE, score);
+ }
+ for (SearchHitField field : metaFields) {
+ Object value = field.getValue();
+ builder.field(field.getName(), value);
+ }
+ if (source != null) {
+ XContentHelper.writeRawField(SourceFieldMapper.NAME, source, builder, params);
+ }
+ if (!otherFields.isEmpty()) {
+ builder.startObject(Fields.FIELDS);
+ for (SearchHitField field : otherFields) {
+ builder.startArray(field.getName());
+ for (Object value : field.getValues()) {
+ builder.value(value);
+ }
+ builder.endArray();
+ }
+ builder.endObject();
+ }
+ if (highlightFields != null && !highlightFields.isEmpty()) {
+ builder.startObject(Fields.HIGHLIGHT);
+ for (HighlightField field : highlightFields.values()) {
+ field.toXContent(builder, params);
+ }
+ builder.endObject();
+ }
+ sortValues.toXContent(builder, params);
+ if (matchedQueries.length > 0) {
+ builder.startArray(Fields.MATCHED_QUERIES);
+ for (String matchedFilter : matchedQueries) {
+ builder.value(matchedFilter);
+ }
+ builder.endArray();
+ }
+ if (getExplanation() != null) {
+ builder.field(Fields._EXPLANATION);
+ buildExplanation(builder, getExplanation());
+ }
+ if (innerHits != null) {
+ builder.startObject(Fields.INNER_HITS);
+ for (Map.Entry<String, SearchHits> entry : innerHits.entrySet()) {
+ builder.startObject(entry.getKey());
+ entry.getValue().toXContent(builder, params);
+ builder.endObject();
+ }
+ builder.endObject();
+ }
+ return builder;
+ }
+
+ public static SearchHit fromXContent(XContentParser parser) throws IOException {
+ XContentParser.Token token;
+ ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation);
+ String currentFieldName = null;
+ String type = null, id = null;
+ String index = null;
+ float score = DEFAULT_SCORE;
+ long version = -1;
+ SearchSortValues sortValues = SearchSortValues.EMPTY;
+ NestedIdentity nestedIdentity = null;
+ Map<String, HighlightField> highlightFields = new HashMap<>();
+ BytesReference parsedSource = null;
+ List<String> matchedQueries = new ArrayList<>();
+ Map<String, SearchHitField> fields = new HashMap<>();
+ Explanation explanation = null;
+ ShardId shardId = null;
+ String nodeId = null;
+ Map<String, SearchHits> innerHits = null;
+ while((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
+ if (token == XContentParser.Token.FIELD_NAME) {
+ currentFieldName = parser.currentName();
+ } else if (token.isValue()) {
+ if (Fields._TYPE.equals(currentFieldName)) {
+ type = parser.text();
+ } else if (Fields._INDEX.equals(currentFieldName)) {
+ index = parser.text();
+ } else if (Fields._ID.equals(currentFieldName)) {
+ id = parser.text();
+ } else if (Fields._SCORE.equals(currentFieldName)) {
+ score = parser.floatValue();
+ } else if (Fields._VERSION.equals(currentFieldName)) {
+ version = parser.longValue();
+ } else if (Fields._SHARD.equals(currentFieldName)) {
+ shardId = ShardId.fromString(parser.text());
+ } else if (Fields._NODE.equals(currentFieldName)) {
+ nodeId = parser.text();
+ } else if (MapperService.isMetadataField(currentFieldName)) {
+ List<Object> values = new ArrayList<>();
+ values.add(parseStoredFieldsValue(parser));
+ fields.put(currentFieldName, new SearchHitField(currentFieldName, values));
+ } else {
+ throwUnknownField(currentFieldName, parser.getTokenLocation());
+ }
+ } else if (token == XContentParser.Token.VALUE_NULL) {
+ if (Fields._SCORE.equals(currentFieldName)) {
+ score = Float.NaN;
+ } else {
+ throwUnknownField(currentFieldName, parser.getTokenLocation());
+ }
+ } else if (token == XContentParser.Token.START_OBJECT) {
+ if (SourceFieldMapper.NAME.equals(currentFieldName)) {
+ try (XContentBuilder builder = XContentBuilder.builder(parser.contentType().xContent())) {
+ //the original document gets slightly modified: whitespaces or pretty printing are not preserved,
+ //it all depends on the current builder settings
+ builder.copyCurrentStructure(parser);
+ parsedSource = builder.bytes();
+ }
+ } else if (Fields.HIGHLIGHT.equals(currentFieldName)) {
+ while((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
+ HighlightField highlightField = HighlightField.fromXContent(parser);
+ highlightFields.put(highlightField.getName(), highlightField);
+ }
+ } else if (Fields.FIELDS.equals(currentFieldName)) {
+ while((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
+ String fieldName = parser.currentName();
+ List<Object> values = new ArrayList<>();
+ ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.nextToken(), parser::getTokenLocation);
+ while((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
+ values.add(parseStoredFieldsValue(parser));
+ }
+ fields.put(fieldName, new SearchHitField(fieldName, values));
+ }
+ } else if (Fields._EXPLANATION.equals(currentFieldName)) {
+ explanation = parseExplanation(parser);
+ } else if (Fields.INNER_HITS.equals(currentFieldName)) {
+ innerHits = new HashMap<>();
+ while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
+ // parse the key
+ ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser::getTokenLocation);
+ String name = parser.currentName();
+ innerHits.put(name, SearchHits.fromXContent(parser));
+ parser.nextToken();
+ ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.currentToken(), parser::getTokenLocation);
+ }
+ } else if (NestedIdentity.Fields._NESTED.equals(currentFieldName)) {
+ nestedIdentity = NestedIdentity.fromXContent(parser);
+ } else {
+ throwUnknownField(currentFieldName, parser.getTokenLocation());
+ }
+ } else if (token == XContentParser.Token.START_ARRAY) {
+ if (Fields.SORT.equals(currentFieldName)) {
+ sortValues = SearchSortValues.fromXContent(parser);
+ } else if (Fields.MATCHED_QUERIES.equals(currentFieldName)) {
+ while((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
+ matchedQueries.add(parser.text());
+ }
+ } else {
+ throwUnknownField(currentFieldName, parser.getTokenLocation());
+ }
+ } else {
+ throwUnknownToken(token, parser.getTokenLocation());
+ }
+ }
+ SearchHit searchHit = new SearchHit(-1, id, new Text(type), nestedIdentity, Collections.emptyMap());
+ searchHit.index = index;
+ searchHit.score(score);
+ searchHit.version(version);
+ searchHit.sortValues(sortValues);
+ searchHit.highlightFields(highlightFields);
+ searchHit.sourceRef(parsedSource);
+ searchHit.explanation(explanation);
+ searchHit.setInnerHits(innerHits);
+ if (matchedQueries.size() > 0) {
+ searchHit.matchedQueries(matchedQueries.toArray(new String[matchedQueries.size()]));
+ }
+ if (shardId != null && nodeId != null) {
+ searchHit.shard(new SearchShardTarget(nodeId, shardId));
+ }
+ searchHit.fields(fields);
+ return searchHit;
+ }
+
+ private static Explanation parseExplanation(XContentParser parser) throws IOException {
+ ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation);
+ XContentParser.Token token;
+ Float value = null;
+ String description = null;
+ List<Explanation> details = new ArrayList<>();
+ while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
+ ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, () -> parser.getTokenLocation());
+ String currentFieldName = parser.currentName();
+ token = parser.nextToken();
+ if (Fields.VALUE.equals(currentFieldName)) {
+ value = parser.floatValue();
+ } else if (Fields.DESCRIPTION.equals(currentFieldName)) {
+ description = parser.textOrNull();
+ } else if (Fields.DETAILS.equals(currentFieldName)) {
+ ensureExpectedToken(XContentParser.Token.START_ARRAY, token, () -> parser.getTokenLocation());
+ while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
+ details.add(parseExplanation(parser));
+ }
+ } else {
+ throwUnknownField(currentFieldName, parser.getTokenLocation());
+ }
+ }
+ if (value == null) {
+ throw new ParsingException(parser.getTokenLocation(), "missing explanation value");
+ }
+ if (description == null) {
+ throw new ParsingException(parser.getTokenLocation(), "missing explanation description");
+ }
+ return Explanation.match(value, description, details);
+ }
+
+ private void buildExplanation(XContentBuilder builder, Explanation explanation) throws IOException {
+ builder.startObject();
+ builder.field(Fields.VALUE, explanation.getValue());
+ builder.field(Fields.DESCRIPTION, explanation.getDescription());
+ Explanation[] innerExps = explanation.getDetails();
+ if (innerExps != null) {
+ builder.startArray(Fields.DETAILS);
+ for (Explanation exp : innerExps) {
+ buildExplanation(builder, exp);
+ }
+ builder.endArray();
+ }
+ builder.endObject();
+
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ toInnerXContent(builder, params);
+ builder.endObject();
+ return builder;
+ }
+
+ public static SearchHit readSearchHit(StreamInput in) throws IOException {
+ SearchHit hit = new SearchHit();
+ hit.readFrom(in);
+ return hit;
+ }
+
+ @Override
+ public void readFrom(StreamInput in) throws IOException {
+ score = in.readFloat();
+ id = in.readOptionalText();
+ type = in.readOptionalText();
+ nestedIdentity = in.readOptionalWriteable(NestedIdentity::new);
+ version = in.readLong();
+ source = in.readBytesReference();
+ if (source.length() == 0) {
+ source = null;
+ }
+ if (in.readBoolean()) {
+ explanation = readExplanation(in);
+ }
+ int size = in.readVInt();
+ if (size == 0) {
+ fields = emptyMap();
+ } else if (size == 1) {
+ SearchHitField hitField = SearchHitField.readSearchHitField(in);
+ fields = singletonMap(hitField.getName(), hitField);
+ } else {
+ Map<String, SearchHitField> fields = new HashMap<>();
+ for (int i = 0; i < size; i++) {
+ SearchHitField hitField = SearchHitField.readSearchHitField(in);
+ fields.put(hitField.getName(), hitField);
+ }
+ this.fields = unmodifiableMap(fields);
+ }
+
+ size = in.readVInt();
+ if (size == 0) {
+ highlightFields = emptyMap();
+ } else if (size == 1) {
+ HighlightField field = readHighlightField(in);
+ highlightFields = singletonMap(field.name(), field);
+ } else {
+ Map<String, HighlightField> highlightFields = new HashMap<>();
+ for (int i = 0; i < size; i++) {
+ HighlightField field = readHighlightField(in);
+ highlightFields.put(field.name(), field);
+ }
+ this.highlightFields = unmodifiableMap(highlightFields);
+ }
+
+ sortValues = new SearchSortValues(in);
+
+ size = in.readVInt();
+ if (size > 0) {
+ matchedQueries = new String[size];
+ for (int i = 0; i < size; i++) {
+ matchedQueries[i] = in.readString();
+ }
+ }
+ // we call the setter here because that also sets the local index parameter
+ shard(in.readOptionalWriteable(SearchShardTarget::new));
+ size = in.readVInt();
+ if (size > 0) {
+ innerHits = new HashMap<>(size);
+ for (int i = 0; i < size; i++) {
+ String key = in.readString();
+ SearchHits value = SearchHits.readSearchHits(in);
+ innerHits.put(key, value);
+ }
+ }
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ out.writeFloat(score);
+ out.writeOptionalText(id);
+ out.writeOptionalText(type);
+ out.writeOptionalWriteable(nestedIdentity);
+ out.writeLong(version);
+ out.writeBytesReference(source);
+ if (explanation == null) {
+ out.writeBoolean(false);
+ } else {
+ out.writeBoolean(true);
+ writeExplanation(out, explanation);
+ }
+ if (fields == null) {
+ out.writeVInt(0);
+ } else {
+ out.writeVInt(fields.size());
+ for (SearchHitField hitField : getFields().values()) {
+ hitField.writeTo(out);
+ }
+ }
+ if (highlightFields == null) {
+ out.writeVInt(0);
+ } else {
+ out.writeVInt(highlightFields.size());
+ for (HighlightField highlightField : highlightFields.values()) {
+ highlightField.writeTo(out);
+ }
+ }
+ sortValues.writeTo(out);
+
+ if (matchedQueries.length == 0) {
+ out.writeVInt(0);
+ } else {
+ out.writeVInt(matchedQueries.length);
+ for (String matchedFilter : matchedQueries) {
+ out.writeString(matchedFilter);
+ }
+ }
+ out.writeOptionalWriteable(shard);
+ if (innerHits == null) {
+ out.writeVInt(0);
+ } else {
+ out.writeVInt(innerHits.size());
+ for (Map.Entry<String, SearchHits> entry : innerHits.entrySet()) {
+ out.writeString(entry.getKey());
+ entry.getValue().writeTo(out);
+ }
+ }
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (obj == null || getClass() != obj.getClass()) {
+ return false;
+ }
+ SearchHit other = (SearchHit) obj;
+ return Objects.equals(id, other.id)
+ && Objects.equals(type, other.type)
+ && Objects.equals(nestedIdentity, other.nestedIdentity)
+ && Objects.equals(version, other.version)
+ && Objects.equals(source, other.source)
+ && Objects.equals(fields, other.fields)
+ && Objects.equals(getHighlightFields(), other.getHighlightFields())
+ && Arrays.equals(matchedQueries, other.matchedQueries)
+ && Objects.equals(explanation, other.explanation)
+ && Objects.equals(shard, other.shard)
+ && Objects.equals(innerHits, other.innerHits);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(id, type, nestedIdentity, version, source, fields, getHighlightFields(), Arrays.hashCode(matchedQueries),
+ explanation, shard, innerHits);
+ }
/**
* Encapsulates the nested identity of a hit.
*/
- interface NestedIdentity {
+ public static final class NestedIdentity implements Writeable, ToXContent {
+
+ private Text field;
+ private int offset;
+ private NestedIdentity child;
+
+ public NestedIdentity(String field, int offset, NestedIdentity child) {
+ this.field = new Text(field);
+ this.offset = offset;
+ this.child = child;
+ }
+
+ NestedIdentity(StreamInput in) throws IOException {
+ field = in.readOptionalText();
+ offset = in.readInt();
+ child = in.readOptionalWriteable(NestedIdentity::new);
+ }
/**
* Returns the nested field in the source this hit originates from
*/
- Text getField();
+ public Text getField() {
+ return field;
+ }
/**
* Returns the offset in the nested array of objects in the source this hit
*/
- int getOffset();
+ public int getOffset() {
+ return offset;
+ }
/**
* Returns the next child nested level if there is any, otherwise <code>null</code> is returned.
*
* In the case of mappings with multiple levels of nested object fields
*/
- NestedIdentity getChild();
+ public NestedIdentity getChild() {
+ return child;
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ out.writeOptionalText(field);
+ out.writeInt(offset);
+ out.writeOptionalWriteable(child);
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.field(Fields._NESTED);
+ return innerToXContent(builder, params);
+ }
+
+ /**
+ * Rendering of the inner XContent object without the leading field name. This way the structure innerToXContent renders and
+ * fromXContent parses correspond to each other.
+ */
+ XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ if (field != null) {
+ builder.field(Fields._NESTED_FIELD, field);
+ }
+ if (offset != -1) {
+ builder.field(Fields._NESTED_OFFSET, offset);
+ }
+ if (child != null) {
+ builder = child.toXContent(builder, params);
+ }
+ builder.endObject();
+ return builder;
+ }
+
+ private static final ConstructingObjectParser<NestedIdentity, Void> PARSER = new ConstructingObjectParser<>(
+ "nested_identity",
+ ctorArgs -> new NestedIdentity((String) ctorArgs[0], (int) ctorArgs[1], (NestedIdentity) ctorArgs[2]));
+ static {
+ PARSER.declareString(constructorArg(), new ParseField(Fields._NESTED_FIELD));
+ PARSER.declareInt(constructorArg(), new ParseField(Fields._NESTED_OFFSET));
+ PARSER.declareObject(optionalConstructorArg(), PARSER, new ParseField(Fields._NESTED));
+ }
+
+ public static NestedIdentity fromXContent(XContentParser parser) {
+ return PARSER.apply(parser, null);
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj) {
+ return true;
+ }
+ if (obj == null || getClass() != obj.getClass()) {
+ return false;
+ }
+ NestedIdentity other = (NestedIdentity) obj;
+ return Objects.equals(field, other.field) &&
+ Objects.equals(offset, other.offset) &&
+ Objects.equals(child, other.child);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(field, offset, child);
+ }
+
+ public static class Fields {
+ static final String _NESTED = "_nested";
+ static final String _NESTED_FIELD = "field";
+ static final String _NESTED_OFFSET = "offset";
+ }
}
}
diff --git a/core/src/main/java/org/elasticsearch/search/SearchHitField.java b/core/src/main/java/org/elasticsearch/search/SearchHitField.java
index 5747bbebef..fbb211b90d 100644
--- a/core/src/main/java/org/elasticsearch/search/SearchHitField.java
+++ b/core/src/main/java/org/elasticsearch/search/SearchHitField.java
@@ -19,49 +19,108 @@
package org.elasticsearch.search;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
+import org.elasticsearch.index.mapper.MapperService;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Iterator;
import java.util.List;
+import java.util.Objects;
/**
* A single field name and values part of a {@link SearchHit}.
*
* @see SearchHit
*/
-public interface SearchHitField extends Streamable, Iterable<Object> {
+public final class SearchHitField implements Streamable, Iterable<Object> {
- /**
- * The name of the field.
- */
- String name();
+ private String name;
+ private List<Object> values;
- /**
- * The name of the field.
- */
- String getName();
+ private SearchHitField() {
+ }
- /**
- * The first value of the hit.
- */
- <V> V value();
+ public SearchHitField(String name, List<Object> values) {
+ this.name = name;
+ this.values = values;
+ }
/**
- * The first value of the hit.
+ * The name of the field.
*/
- <V> V getValue();
+ public String getName() {
+ return name;
+ }
/**
- * The field values.
+ * The first value of the hit.
*/
- List<Object> values();
+ public <V> V getValue() {
+ if (values == null || values.isEmpty()) {
+ return null;
+ }
+ return (V)values.get(0);
+ }
/**
* The field values.
*/
- List<Object> getValues();
+ public List<Object> getValues() {
+ return values;
+ }
/**
* @return The field is a metadata field
*/
- boolean isMetadataField();
+ public boolean isMetadataField() {
+ return MapperService.isMetadataField(name);
+ }
+
+ @Override
+ public Iterator<Object> iterator() {
+ return values.iterator();
+ }
+
+ public static SearchHitField readSearchHitField(StreamInput in) throws IOException {
+ SearchHitField result = new SearchHitField();
+ result.readFrom(in);
+ return result;
+ }
+
+ @Override
+ public void readFrom(StreamInput in) throws IOException {
+ name = in.readString();
+ int size = in.readVInt();
+ values = new ArrayList<>(size);
+ for (int i = 0; i < size; i++) {
+ values.add(in.readGenericValue());
+ }
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ out.writeString(name);
+ out.writeVInt(values.size());
+ for (Object value : values) {
+ out.writeGenericValue(value);
+ }
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (obj == null || getClass() != obj.getClass()) {
+ return false;
+ }
+ SearchHitField other = (SearchHitField) obj;
+ return Objects.equals(name, other.name)
+ && Objects.equals(values, other.values);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name, values);
+ }
}
diff --git a/core/src/main/java/org/elasticsearch/search/SearchHits.java b/core/src/main/java/org/elasticsearch/search/SearchHits.java
index 400e2ebc44..ada09d2e38 100644
--- a/core/src/main/java/org/elasticsearch/search/SearchHits.java
+++ b/core/src/main/java/org/elasticsearch/search/SearchHits.java
@@ -19,48 +19,202 @@
package org.elasticsearch.search;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.xcontent.ToXContent;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentParser;
-/**
- * The hits of a search request.
- *
- *
- */
-public interface SearchHits extends Streamable, ToXContent, Iterable<SearchHit> {
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Objects;
- /**
- * The total number of hits that matches the search request.
- */
- long totalHits();
+import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
+import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownField;
+
+public final class SearchHits implements Streamable, ToXContent, Iterable<SearchHit> {
+
+ public static SearchHits empty() {
+ // We shouldn't use static final instance, since that could directly be returned by native transport clients
+ return new SearchHits(EMPTY, 0, 0);
+ }
+
+ public static final SearchHit[] EMPTY = new SearchHit[0];
+
+ private SearchHit[] hits;
+
+ public long totalHits;
+
+ private float maxScore;
+
+ SearchHits() {
+
+ }
+
+ public SearchHits(SearchHit[] hits, long totalHits, float maxScore) {
+ this.hits = hits;
+ this.totalHits = totalHits;
+ this.maxScore = maxScore;
+ }
+
+ public void shardTarget(SearchShardTarget shardTarget) {
+ for (SearchHit hit : hits) {
+ hit.shard(shardTarget);
+ }
+ }
/**
* The total number of hits that matches the search request.
*/
- long getTotalHits();
+ public long getTotalHits() {
+ return totalHits;
+ }
- /**
- * The maximum score of this query.
- */
- float maxScore();
/**
* The maximum score of this query.
*/
- float getMaxScore();
+ public float getMaxScore() {
+ return maxScore;
+ }
/**
* The hits of the search request (based on the search type, and from / size provided).
*/
- SearchHit[] hits();
+ public SearchHit[] getHits() {
+ return this.hits;
+ }
/**
* Return the hit as the provided position.
*/
- SearchHit getAt(int position);
+ public SearchHit getAt(int position) {
+ return hits[position];
+ }
- /**
- * The hits of the search request (based on the search type, and from / size provided).
- */
- SearchHit[] getHits();
+ @Override
+ public Iterator<SearchHit> iterator() {
+ return Arrays.stream(getHits()).iterator();
+ }
+
+ public SearchHit[] internalHits() {
+ return this.hits;
+ }
+
+ static final class Fields {
+ static final String HITS = "hits";
+ static final String TOTAL = "total";
+ static final String MAX_SCORE = "max_score";
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject(Fields.HITS);
+ builder.field(Fields.TOTAL, totalHits);
+ if (Float.isNaN(maxScore)) {
+ builder.nullField(Fields.MAX_SCORE);
+ } else {
+ builder.field(Fields.MAX_SCORE, maxScore);
+ }
+ builder.field(Fields.HITS);
+ builder.startArray();
+ for (SearchHit hit : hits) {
+ hit.toXContent(builder, params);
+ }
+ builder.endArray();
+ builder.endObject();
+ return builder;
+ }
+
+ public static SearchHits fromXContent(XContentParser parser) throws IOException {
+ if (parser.currentToken() != XContentParser.Token.START_OBJECT) {
+ parser.nextToken();
+ ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation);
+ }
+ XContentParser.Token token = parser.currentToken();
+ String currentFieldName = null;
+ List<SearchHit> hits = new ArrayList<>();
+ long totalHits = 0;
+ float maxScore = 0f;
+ while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
+ if (token == XContentParser.Token.FIELD_NAME) {
+ currentFieldName = parser.currentName();
+ } else if (token.isValue()) {
+ if (Fields.TOTAL.equals(currentFieldName)) {
+ totalHits = parser.longValue();
+ } else if (Fields.MAX_SCORE.equals(currentFieldName)) {
+ maxScore = parser.floatValue();
+ } else {
+ throwUnknownField(currentFieldName, parser.getTokenLocation());
+ }
+ } else if (token == XContentParser.Token.VALUE_NULL) {
+ if (Fields.MAX_SCORE.equals(currentFieldName)) {
+ maxScore = Float.NaN; // NaN gets rendered as null-field
+ } else {
+ throwUnknownField(currentFieldName, parser.getTokenLocation());
+ }
+ } else if (token == XContentParser.Token.START_ARRAY) {
+ while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
+ hits.add(SearchHit.fromXContent(parser));
+ }
+ }
+ }
+ SearchHits searchHits = new SearchHits(hits.toArray(new SearchHit[hits.size()]), totalHits,
+ maxScore);
+ return searchHits;
+ }
+
+
+ public static SearchHits readSearchHits(StreamInput in) throws IOException {
+ SearchHits hits = new SearchHits();
+ hits.readFrom(in);
+ return hits;
+ }
+
+ @Override
+ public void readFrom(StreamInput in) throws IOException {
+ totalHits = in.readVLong();
+ maxScore = in.readFloat();
+ int size = in.readVInt();
+ if (size == 0) {
+ hits = EMPTY;
+ } else {
+ hits = new SearchHit[size];
+ for (int i = 0; i < hits.length; i++) {
+ hits[i] = SearchHit.readSearchHit(in);
+ }
+ }
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ out.writeVLong(totalHits);
+ out.writeFloat(maxScore);
+ out.writeVInt(hits.length);
+ if (hits.length > 0) {
+ for (SearchHit hit : hits) {
+ hit.writeTo(out);
+ }
+ }
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (obj == null || getClass() != obj.getClass()) {
+ return false;
+ }
+ SearchHits other = (SearchHits) obj;
+ return Objects.equals(totalHits, other.totalHits)
+ && Objects.equals(maxScore, other.maxScore)
+ && Arrays.equals(hits, other.hits);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(totalHits, maxScore, Arrays.hashCode(hits));
+ }
}
diff --git a/core/src/main/java/org/elasticsearch/search/internal/SearchSortValues.java b/core/src/main/java/org/elasticsearch/search/SearchSortValues.java
index 9aa29d7768..d3d55ff481 100644
--- a/core/src/main/java/org/elasticsearch/search/internal/SearchSortValues.java
+++ b/core/src/main/java/org/elasticsearch/search/SearchSortValues.java
@@ -17,7 +17,7 @@
* under the License.
*/
-package org.elasticsearch.search.internal;
+package org.elasticsearch.search;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.io.stream.StreamInput;
@@ -27,8 +27,7 @@ import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParserUtils;
-import org.elasticsearch.search.DocValueFormat;
-import org.elasticsearch.search.internal.InternalSearchHit.Fields;
+import org.elasticsearch.search.SearchHit.Fields;
import java.io.IOException;
import java.util.Arrays;
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/InternalTopHits.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/InternalTopHits.java
index 3f712f99b3..e60bf94dbc 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/InternalTopHits.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/InternalTopHits.java
@@ -32,8 +32,7 @@ import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.metrics.InternalMetricsAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
-import org.elasticsearch.search.internal.InternalSearchHit;
-import org.elasticsearch.search.internal.InternalSearchHits;
+import org.elasticsearch.search.SearchHit;
import java.io.IOException;
import java.util.Arrays;
@@ -47,9 +46,9 @@ public class InternalTopHits extends InternalMetricsAggregation implements TopHi
private int from;
private int size;
private TopDocs topDocs;
- private InternalSearchHits searchHits;
+ private SearchHits searchHits;
- public InternalTopHits(String name, int from, int size, TopDocs topDocs, InternalSearchHits searchHits,
+ public InternalTopHits(String name, int from, int size, TopDocs topDocs, SearchHits searchHits,
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
super(name, pipelineAggregators, metaData);
this.from = from;
@@ -67,7 +66,7 @@ public class InternalTopHits extends InternalMetricsAggregation implements TopHi
size = in.readVInt();
topDocs = Lucene.readTopDocs(in);
assert topDocs != null;
- searchHits = InternalSearchHits.readSearchHits(in);
+ searchHits = SearchHits.readSearchHits(in);
}
@Override
@@ -98,7 +97,7 @@ public class InternalTopHits extends InternalMetricsAggregation implements TopHi
@Override
public InternalAggregation doReduce(List<InternalAggregation> aggregations, ReduceContext reduceContext) {
- InternalSearchHits[] shardHits = new InternalSearchHits[aggregations.size()];
+ SearchHits[] shardHits = new SearchHits[aggregations.size()];
final TopDocs reducedTopDocs;
final TopDocs[] shardDocs;
@@ -124,16 +123,16 @@ public class InternalTopHits extends InternalMetricsAggregation implements TopHi
}
final int[] tracker = new int[shardHits.length];
- InternalSearchHit[] hits = new InternalSearchHit[reducedTopDocs.scoreDocs.length];
+ SearchHit[] hits = new SearchHit[reducedTopDocs.scoreDocs.length];
for (int i = 0; i < reducedTopDocs.scoreDocs.length; i++) {
ScoreDoc scoreDoc = reducedTopDocs.scoreDocs[i];
int position;
do {
position = tracker[scoreDoc.shardIndex]++;
} while (shardDocs[scoreDoc.shardIndex].scoreDocs[position] != scoreDoc);
- hits[i] = (InternalSearchHit) shardHits[scoreDoc.shardIndex].getAt(position);
+ hits[i] = shardHits[scoreDoc.shardIndex].getAt(position);
}
- return new InternalTopHits(name, from, size, reducedTopDocs, new InternalSearchHits(hits, reducedTopDocs.totalHits,
+ return new InternalTopHits(name, from, size, reducedTopDocs, new SearchHits(hits, reducedTopDocs.totalHits,
reducedTopDocs.getMaxScore()),
pipelineAggregators(), getMetaData());
} catch (IOException e) {
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregator.java
index 9204174773..6b8e441335 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregator.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregator.java
@@ -41,8 +41,8 @@ import org.elasticsearch.search.aggregations.metrics.MetricsAggregator;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.search.fetch.FetchPhase;
import org.elasticsearch.search.fetch.FetchSearchResult;
-import org.elasticsearch.search.internal.InternalSearchHit;
-import org.elasticsearch.search.internal.InternalSearchHits;
+import org.elasticsearch.search.SearchHit;
+import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.internal.SubSearchContext;
import org.elasticsearch.search.rescore.RescoreSearchContext;
@@ -165,10 +165,10 @@ public class TopHitsAggregator extends MetricsAggregator {
subSearchContext.docIdsToLoad(docIdsToLoad, 0, docIdsToLoad.length);
fetchPhase.execute(subSearchContext);
FetchSearchResult fetchResult = subSearchContext.fetchResult();
- InternalSearchHit[] internalHits = fetchResult.fetchResult().hits().internalHits();
+ SearchHit[] internalHits = fetchResult.fetchResult().hits().internalHits();
for (int i = 0; i < internalHits.length; i++) {
ScoreDoc scoreDoc = topDocs.scoreDocs[i];
- InternalSearchHit searchHitFields = internalHits[i];
+ SearchHit searchHitFields = internalHits[i];
searchHitFields.shard(subSearchContext.shardTarget());
searchHitFields.score(scoreDoc.score);
if (scoreDoc instanceof FieldDoc) {
@@ -190,7 +190,7 @@ public class TopHitsAggregator extends MetricsAggregator {
} else {
topDocs = Lucene.EMPTY_TOP_DOCS;
}
- return new InternalTopHits(name, subSearchContext.from(), subSearchContext.size(), topDocs, InternalSearchHits.empty(),
+ return new InternalTopHits(name, subSearchContext.from(), subSearchContext.size(), topDocs, SearchHits.empty(),
pipelineAggregators(), metaData());
}
diff --git a/core/src/main/java/org/elasticsearch/search/collapse/ExpandCollapseSearchResponseListener.java b/core/src/main/java/org/elasticsearch/search/collapse/ExpandCollapseSearchResponseListener.java
index 9a45b7a9fe..b9caa5216c 100644
--- a/core/src/main/java/org/elasticsearch/search/collapse/ExpandCollapseSearchResponseListener.java
+++ b/core/src/main/java/org/elasticsearch/search/collapse/ExpandCollapseSearchResponseListener.java
@@ -29,7 +29,6 @@ import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.builder.SearchSourceBuilder;
-import org.elasticsearch.search.internal.InternalSearchHit;
import java.util.HashMap;
import java.util.Objects;
@@ -56,9 +55,8 @@ public class ExpandCollapseSearchResponseListener implements BiConsumer<SearchRe
return ;
}
for (SearchHit hit : searchResponse.getHits()) {
- InternalSearchHit internalHit = (InternalSearchHit) hit;
BoolQueryBuilder groupQuery = new BoolQueryBuilder();
- Object collapseValue = internalHit.field(collapseBuilder.getField()).getValue();
+ Object collapseValue = hit.field(collapseBuilder.getField()).getValue();
if (collapseValue != null) {
groupQuery.filter(QueryBuilders.matchQuery(collapseBuilder.getField(), collapseValue));
} else {
@@ -75,10 +73,10 @@ public class ExpandCollapseSearchResponseListener implements BiConsumer<SearchRe
.source(sourceBuilder);
SearchResponse groupResponse = client.search(groupRequest).actionGet();
SearchHits innerHits = groupResponse.getHits();
- if (internalHit.getInnerHits() == null) {
- internalHit.setInnerHits(new HashMap<>(1));
+ if (hit.getInnerHits() == null) {
+ hit.setInnerHits(new HashMap<>(1));
}
- internalHit.getInnerHits().put(collapseBuilder.getInnerHit().getName(), innerHits);
+ hit.getInnerHits().put(collapseBuilder.getInnerHit().getName(), innerHits);
}
}
diff --git a/core/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java b/core/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java
index dcf55872e3..83af0b9abd 100644
--- a/core/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java
+++ b/core/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java
@@ -46,9 +46,7 @@ import org.elasticsearch.search.SearchHitField;
import org.elasticsearch.search.SearchPhase;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.fetch.subphase.InnerHitsFetchSubPhase;
-import org.elasticsearch.search.internal.InternalSearchHit;
-import org.elasticsearch.search.internal.InternalSearchHitField;
-import org.elasticsearch.search.internal.InternalSearchHits;
+import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.tasks.TaskCancelledException;
@@ -134,7 +132,7 @@ public class FetchPhase implements SearchPhase {
}
}
- InternalSearchHit[] hits = new InternalSearchHit[context.docIdsToLoadSize()];
+ SearchHit[] hits = new SearchHit[context.docIdsToLoadSize()];
FetchSubPhase.HitContext hitContext = new FetchSubPhase.HitContext();
for (int index = 0; index < context.docIdsToLoadSize(); index++) {
if(context.isCancelled()) {
@@ -145,7 +143,7 @@ public class FetchPhase implements SearchPhase {
LeafReaderContext subReaderContext = context.searcher().getIndexReader().leaves().get(readerIndex);
int subDocId = docId - subReaderContext.docBase;
- final InternalSearchHit searchHit;
+ final SearchHit searchHit;
try {
int rootDocId = findRootDocumentIfNested(context, subReaderContext, subDocId);
if (rootDocId != -1) {
@@ -168,7 +166,7 @@ public class FetchPhase implements SearchPhase {
fetchSubPhase.hitsExecute(context, hits);
}
- context.fetchResult().hits(new InternalSearchHits(hits, context.queryResult().topDocs().totalHits, context.queryResult().topDocs().getMaxScore()));
+ context.fetchResult().hits(new SearchHits(hits, context.queryResult().topDocs().totalHits, context.queryResult().topDocs().getMaxScore()));
}
private int findRootDocumentIfNested(SearchContext context, LeafReaderContext subReaderContext, int subDocId) throws IOException {
@@ -181,9 +179,9 @@ public class FetchPhase implements SearchPhase {
return -1;
}
- private InternalSearchHit createSearchHit(SearchContext context, FieldsVisitor fieldsVisitor, int docId, int subDocId, LeafReaderContext subReaderContext) {
+ private SearchHit createSearchHit(SearchContext context, FieldsVisitor fieldsVisitor, int docId, int subDocId, LeafReaderContext subReaderContext) {
if (fieldsVisitor == null) {
- return new InternalSearchHit(docId);
+ return new SearchHit(docId);
}
loadStoredFields(context, subReaderContext, fieldsVisitor, subDocId);
fieldsVisitor.postProcess(context.mapperService());
@@ -192,7 +190,7 @@ public class FetchPhase implements SearchPhase {
if (!fieldsVisitor.fields().isEmpty()) {
searchFields = new HashMap<>(fieldsVisitor.fields().size());
for (Map.Entry<String, List<Object>> entry : fieldsVisitor.fields().entrySet()) {
- searchFields.put(entry.getKey(), new InternalSearchHitField(entry.getKey(), entry.getValue()));
+ searchFields.put(entry.getKey(), new SearchHitField(entry.getKey(), entry.getValue()));
}
}
@@ -203,7 +201,7 @@ public class FetchPhase implements SearchPhase {
} else {
typeText = documentMapper.typeText();
}
- InternalSearchHit searchHit = new InternalSearchHit(docId, fieldsVisitor.uid().id(), typeText, searchFields);
+ SearchHit searchHit = new SearchHit(docId, fieldsVisitor.uid().id(), typeText, searchFields);
// Set _source if requested.
SourceLookup sourceLookup = context.lookup().source();
sourceLookup.setSegmentAndDocument(subReaderContext, subDocId);
@@ -213,7 +211,7 @@ public class FetchPhase implements SearchPhase {
return searchHit;
}
- private InternalSearchHit createNestedSearchHit(SearchContext context, int nestedTopDocId, int nestedSubDocId, int rootSubDocId, Set<String> fieldNames, List<String> fieldNamePatterns, LeafReaderContext subReaderContext) throws IOException {
+ private SearchHit createNestedSearchHit(SearchContext context, int nestedTopDocId, int nestedSubDocId, int rootSubDocId, Set<String> fieldNames, List<String> fieldNamePatterns, LeafReaderContext subReaderContext) throws IOException {
// Also if highlighting is requested on nested documents we need to fetch the _source from the root document,
// otherwise highlighting will attempt to fetch the _source from the nested doc, which will fail,
// because the entire _source is only stored with the root document.
@@ -228,7 +226,7 @@ public class FetchPhase implements SearchPhase {
ObjectMapper nestedObjectMapper = documentMapper.findNestedObjectMapper(nestedSubDocId, context, subReaderContext);
assert nestedObjectMapper != null;
- InternalSearchHit.InternalNestedIdentity nestedIdentity = getInternalNestedIdentity(context, nestedSubDocId, subReaderContext, documentMapper, nestedObjectMapper);
+ SearchHit.NestedIdentity nestedIdentity = getInternalNestedIdentity(context, nestedSubDocId, subReaderContext, documentMapper, nestedObjectMapper);
BytesReference source = rootFieldsVisitor.source();
if (source != null) {
@@ -271,7 +269,7 @@ public class FetchPhase implements SearchPhase {
context.lookup().source().setSourceContentType(contentType);
}
- return new InternalSearchHit(nestedTopDocId, rootFieldsVisitor.uid().id(), documentMapper.typeText(), nestedIdentity, searchFields);
+ return new SearchHit(nestedTopDocId, rootFieldsVisitor.uid().id(), documentMapper.typeText(), nestedIdentity, searchFields);
}
private Map<String, SearchHitField> getSearchFields(SearchContext context, int nestedSubDocId, Set<String> fieldNames, List<String> fieldNamePatterns, LeafReaderContext subReaderContext) {
@@ -285,7 +283,7 @@ public class FetchPhase implements SearchPhase {
if (!nestedFieldsVisitor.fields().isEmpty()) {
searchFields = new HashMap<>(nestedFieldsVisitor.fields().size());
for (Map.Entry<String, List<Object>> entry : nestedFieldsVisitor.fields().entrySet()) {
- searchFields.put(entry.getKey(), new InternalSearchHitField(entry.getKey(), entry.getValue()));
+ searchFields.put(entry.getKey(), new SearchHitField(entry.getKey(), entry.getValue()));
}
}
}
@@ -293,12 +291,12 @@ public class FetchPhase implements SearchPhase {
return searchFields;
}
- private InternalSearchHit.InternalNestedIdentity getInternalNestedIdentity(SearchContext context, int nestedSubDocId, LeafReaderContext subReaderContext, DocumentMapper documentMapper, ObjectMapper nestedObjectMapper) throws IOException {
+ private SearchHit.NestedIdentity getInternalNestedIdentity(SearchContext context, int nestedSubDocId, LeafReaderContext subReaderContext, DocumentMapper documentMapper, ObjectMapper nestedObjectMapper) throws IOException {
int currentParent = nestedSubDocId;
ObjectMapper nestedParentObjectMapper;
ObjectMapper current = nestedObjectMapper;
String originalName = nestedObjectMapper.name();
- InternalSearchHit.InternalNestedIdentity nestedIdentity = null;
+ SearchHit.NestedIdentity nestedIdentity = null;
do {
Query parentFilter;
nestedParentObjectMapper = documentMapper.findParentObjectMapper(current);
@@ -335,7 +333,7 @@ public class FetchPhase implements SearchPhase {
currentParent = nextParent;
current = nestedObjectMapper = nestedParentObjectMapper;
int currentPrefix = current == null ? 0 : current.name().length() + 1;
- nestedIdentity = new InternalSearchHit.InternalNestedIdentity(originalName.substring(currentPrefix), offset, nestedIdentity);
+ nestedIdentity = new SearchHit.NestedIdentity(originalName.substring(currentPrefix), offset, nestedIdentity);
if (current != null) {
originalName = current.name();
}
diff --git a/core/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java b/core/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java
index 6e844a5d9d..1e2def8cc6 100644
--- a/core/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java
+++ b/core/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java
@@ -23,10 +23,9 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchShardTarget;
-import org.elasticsearch.search.internal.InternalSearchHits;
+import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.query.QuerySearchResult;
import org.elasticsearch.search.query.QuerySearchResultProvider;
-import org.elasticsearch.transport.TransportResponse;
import java.io.IOException;
@@ -34,7 +33,7 @@ public class FetchSearchResult extends QuerySearchResultProvider {
private long id;
private SearchShardTarget shardTarget;
- private InternalSearchHits hits;
+ private SearchHits hits;
// client side counter
private transient int counter;
@@ -72,19 +71,19 @@ public class FetchSearchResult extends QuerySearchResultProvider {
this.shardTarget = shardTarget;
}
- public void hits(InternalSearchHits hits) {
+ public void hits(SearchHits hits) {
assert assertNoSearchTarget(hits);
this.hits = hits;
}
- private boolean assertNoSearchTarget(InternalSearchHits hits) {
- for (SearchHit hit : hits.hits()) {
+ private boolean assertNoSearchTarget(SearchHits hits) {
+ for (SearchHit hit : hits.getHits()) {
assert hit.getShard() == null : "expected null but got: " + hit.getShard();
}
return true;
}
- public InternalSearchHits hits() {
+ public SearchHits hits() {
return hits;
}
@@ -107,7 +106,7 @@ public class FetchSearchResult extends QuerySearchResultProvider {
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
id = in.readLong();
- hits = InternalSearchHits.readSearchHits(in);
+ hits = SearchHits.readSearchHits(in);
}
@Override
diff --git a/core/src/main/java/org/elasticsearch/search/fetch/FetchSubPhase.java b/core/src/main/java/org/elasticsearch/search/fetch/FetchSubPhase.java
index 1783652d12..6f34eba212 100644
--- a/core/src/main/java/org/elasticsearch/search/fetch/FetchSubPhase.java
+++ b/core/src/main/java/org/elasticsearch/search/fetch/FetchSubPhase.java
@@ -22,7 +22,7 @@ import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.IndexSearcher;
-import org.elasticsearch.search.internal.InternalSearchHit;
+import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.internal.SearchContext;
import java.util.HashMap;
@@ -34,20 +34,20 @@ import java.util.Map;
public interface FetchSubPhase {
class HitContext {
- private InternalSearchHit hit;
+ private SearchHit hit;
private IndexSearcher searcher;
private LeafReaderContext readerContext;
private int docId;
private Map<String, Object> cache;
- public void reset(InternalSearchHit hit, LeafReaderContext context, int docId, IndexSearcher searcher) {
+ public void reset(SearchHit hit, LeafReaderContext context, int docId, IndexSearcher searcher) {
this.hit = hit;
this.readerContext = context;
this.docId = docId;
this.searcher = searcher;
}
- public InternalSearchHit hit() {
+ public SearchHit hit() {
return hit;
}
@@ -82,5 +82,5 @@ public interface FetchSubPhase {
default void hitExecute(SearchContext context, HitContext hitContext) {}
- default void hitsExecute(SearchContext context, InternalSearchHit[] hits) {}
+ default void hitsExecute(SearchContext context, SearchHit[] hits) {}
}
diff --git a/core/src/main/java/org/elasticsearch/search/fetch/subphase/DocValueFieldsFetchSubPhase.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/DocValueFieldsFetchSubPhase.java
index d6f3318a8b..031f7a6240 100644
--- a/core/src/main/java/org/elasticsearch/search/fetch/subphase/DocValueFieldsFetchSubPhase.java
+++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/DocValueFieldsFetchSubPhase.java
@@ -23,7 +23,6 @@ import org.elasticsearch.index.fielddata.ScriptDocValues;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.search.SearchHitField;
import org.elasticsearch.search.fetch.FetchSubPhase;
-import org.elasticsearch.search.internal.InternalSearchHitField;
import org.elasticsearch.search.internal.SearchContext;
import java.util.ArrayList;
@@ -55,10 +54,10 @@ public final class DocValueFieldsFetchSubPhase implements FetchSubPhase {
if (hitContext.hit().fieldsOrNull() == null) {
hitContext.hit().fields(new HashMap<>(2));
}
- SearchHitField hitField = hitContext.hit().fields().get(field);
+ SearchHitField hitField = hitContext.hit().getFields().get(field);
if (hitField == null) {
- hitField = new InternalSearchHitField(field, new ArrayList<>(2));
- hitContext.hit().fields().put(field, hitField);
+ hitField = new SearchHitField(field, new ArrayList<>(2));
+ hitContext.hit().getFields().put(field, hitField);
}
MappedFieldType fieldType = context.mapperService().fullName(field);
if (fieldType != null) {
@@ -67,7 +66,7 @@ public final class DocValueFieldsFetchSubPhase implements FetchSubPhase {
AtomicFieldData data = context.fieldData().getForField(fieldType).load(hitContext.readerContext());
ScriptDocValues<?> values = data.getScriptValues();
values.setNextDocId(hitContext.docId());
- hitField.values().addAll(values);
+ hitField.getValues().addAll(values);
}
}
}
diff --git a/core/src/main/java/org/elasticsearch/search/fetch/subphase/ExplainFetchSubPhase.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/ExplainFetchSubPhase.java
index 4ad7db0c75..5aabaf644e 100644
--- a/core/src/main/java/org/elasticsearch/search/fetch/subphase/ExplainFetchSubPhase.java
+++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/ExplainFetchSubPhase.java
@@ -46,8 +46,8 @@ public final class ExplainFetchSubPhase implements FetchSubPhase {
// we use the top level doc id, since we work with the top level searcher
hitContext.hit().explanation(explanation);
} catch (IOException e) {
- throw new FetchPhaseExecutionException(context, "Failed to explain doc [" + hitContext.hit().type() + "#"
- + hitContext.hit().id() + "]", e);
+ throw new FetchPhaseExecutionException(context, "Failed to explain doc [" + hitContext.hit().getType() + "#"
+ + hitContext.hit().getId() + "]", e);
} finally {
context.clearReleasables(SearchContext.Lifetime.COLLECTION);
}
diff --git a/core/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsContext.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsContext.java
index d3de22f720..5c76328610 100644
--- a/core/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsContext.java
+++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsContext.java
@@ -50,7 +50,7 @@ import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.UidFieldMapper;
import org.elasticsearch.search.SearchHitField;
import org.elasticsearch.search.fetch.FetchSubPhase;
-import org.elasticsearch.search.internal.InternalSearchHit;
+import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.internal.SubSearchContext;
@@ -287,10 +287,10 @@ public final class InnerHitsContext {
public TopDocs topDocs(SearchContext context, FetchSubPhase.HitContext hitContext) throws IOException {
final Query hitQuery;
if (isParentHit(hitContext.hit())) {
- String field = ParentFieldMapper.joinField(hitContext.hit().type());
- hitQuery = new DocValuesTermsQuery(field, hitContext.hit().id());
+ String field = ParentFieldMapper.joinField(hitContext.hit().getType());
+ hitQuery = new DocValuesTermsQuery(field, hitContext.hit().getId());
} else if (isChildHit(hitContext.hit())) {
- DocumentMapper hitDocumentMapper = mapperService.documentMapper(hitContext.hit().type());
+ DocumentMapper hitDocumentMapper = mapperService.documentMapper(hitContext.hit().getType());
final String parentType = hitDocumentMapper.parentFieldMapper().type();
SearchHitField parentField = hitContext.hit().field(ParentFieldMapper.NAME);
if (parentField == null) {
@@ -328,12 +328,12 @@ public final class InnerHitsContext {
}
}
- private boolean isParentHit(InternalSearchHit hit) {
- return hit.type().equals(documentMapper.parentFieldMapper().type());
+ private boolean isParentHit(SearchHit hit) {
+ return hit.getType().equals(documentMapper.parentFieldMapper().type());
}
- private boolean isChildHit(InternalSearchHit hit) {
- DocumentMapper hitDocumentMapper = mapperService.documentMapper(hit.type());
+ private boolean isChildHit(SearchHit hit) {
+ DocumentMapper hitDocumentMapper = mapperService.documentMapper(hit.getType());
return documentMapper.type().equals(hitDocumentMapper.parentFieldMapper().type());
}
}
diff --git a/core/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsFetchSubPhase.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsFetchSubPhase.java
index 23c63bc7ee..48294bd82c 100644
--- a/core/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsFetchSubPhase.java
+++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsFetchSubPhase.java
@@ -26,8 +26,8 @@ import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.search.fetch.FetchPhase;
import org.elasticsearch.search.fetch.FetchSearchResult;
import org.elasticsearch.search.fetch.FetchSubPhase;
-import org.elasticsearch.search.internal.InternalSearchHit;
-import org.elasticsearch.search.internal.InternalSearchHits;
+import org.elasticsearch.search.SearchHit;
+import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
@@ -47,7 +47,7 @@ public final class InnerHitsFetchSubPhase implements FetchSubPhase {
if ((context.innerHits() != null && context.innerHits().getInnerHits().size() > 0) == false) {
return;
}
- Map<String, InternalSearchHits> results = new HashMap<>();
+ Map<String, SearchHits> results = new HashMap<>();
for (Map.Entry<String, InnerHitsContext.BaseInnerHits> entry : context.innerHits().getInnerHits().entrySet()) {
InnerHitsContext.BaseInnerHits innerHits = entry.getValue();
TopDocs topDocs;
@@ -64,10 +64,10 @@ public final class InnerHitsFetchSubPhase implements FetchSubPhase {
innerHits.docIdsToLoad(docIdsToLoad, 0, docIdsToLoad.length);
fetchPhase.execute(innerHits);
FetchSearchResult fetchResult = innerHits.fetchResult();
- InternalSearchHit[] internalHits = fetchResult.fetchResult().hits().internalHits();
+ SearchHit[] internalHits = fetchResult.fetchResult().hits().internalHits();
for (int i = 0; i < internalHits.length; i++) {
ScoreDoc scoreDoc = topDocs.scoreDocs[i];
- InternalSearchHit searchHitFields = internalHits[i];
+ SearchHit searchHitFields = internalHits[i];
searchHitFields.score(scoreDoc.score);
if (scoreDoc instanceof FieldDoc) {
FieldDoc fieldDoc = (FieldDoc) scoreDoc;
diff --git a/core/src/main/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesFetchSubPhase.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesFetchSubPhase.java
index 18eacffdc7..e38090ee4d 100644
--- a/core/src/main/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesFetchSubPhase.java
+++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesFetchSubPhase.java
@@ -28,7 +28,7 @@ import org.apache.lucene.util.Bits;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.search.fetch.FetchSubPhase;
-import org.elasticsearch.search.internal.InternalSearchHit;
+import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.internal.SearchContext.Lifetime;
@@ -42,7 +42,7 @@ import java.util.Map;
public final class MatchedQueriesFetchSubPhase implements FetchSubPhase {
@Override
- public void hitsExecute(SearchContext context, InternalSearchHit[] hits) {
+ public void hitsExecute(SearchContext context, SearchHit[] hits) {
if (hits.length == 0 ||
// in case the request has only suggest, parsed query is null
context.parsedQuery() == null) {
@@ -71,7 +71,7 @@ public final class MatchedQueriesFetchSubPhase implements FetchSubPhase {
Bits matchingDocs = null;
final IndexReader indexReader = context.searcher().getIndexReader();
for (int i = 0; i < hits.length; ++i) {
- InternalSearchHit hit = hits[i];
+ SearchHit hit = hits[i];
int hitReaderIndex = ReaderUtil.subIndex(hit.docId(), indexReader.leaves());
if (readerIndex != hitReaderIndex) {
readerIndex = hitReaderIndex;
diff --git a/core/src/main/java/org/elasticsearch/search/fetch/subphase/ParentFieldSubFetchPhase.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/ParentFieldSubFetchPhase.java
index 5cd19096be..00b9686702 100644
--- a/core/src/main/java/org/elasticsearch/search/fetch/subphase/ParentFieldSubFetchPhase.java
+++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/ParentFieldSubFetchPhase.java
@@ -26,7 +26,6 @@ import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.index.mapper.ParentFieldMapper;
import org.elasticsearch.search.SearchHitField;
import org.elasticsearch.search.fetch.FetchSubPhase;
-import org.elasticsearch.search.internal.InternalSearchHitField;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
@@ -41,7 +40,7 @@ public final class ParentFieldSubFetchPhase implements FetchSubPhase {
if (context.storedFieldsContext() != null && context.storedFieldsContext().fetchFields() == false) {
return ;
}
- ParentFieldMapper parentFieldMapper = context.mapperService().documentMapper(hitContext.hit().type()).parentFieldMapper();
+ ParentFieldMapper parentFieldMapper = context.mapperService().documentMapper(hitContext.hit().getType()).parentFieldMapper();
if (parentFieldMapper.active() == false) {
return;
}
@@ -57,7 +56,7 @@ public final class ParentFieldSubFetchPhase implements FetchSubPhase {
fields = new HashMap<>();
hitContext.hit().fields(fields);
}
- fields.put(ParentFieldMapper.NAME, new InternalSearchHitField(ParentFieldMapper.NAME, Collections.singletonList(parentId)));
+ fields.put(ParentFieldMapper.NAME, new SearchHitField(ParentFieldMapper.NAME, Collections.singletonList(parentId)));
}
public static String getParentId(ParentFieldMapper fieldMapper, LeafReader reader, int docId) {
diff --git a/core/src/main/java/org/elasticsearch/search/fetch/subphase/ScriptFieldsFetchSubPhase.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/ScriptFieldsFetchSubPhase.java
index 93415f5350..c272ab6dbf 100644
--- a/core/src/main/java/org/elasticsearch/search/fetch/subphase/ScriptFieldsFetchSubPhase.java
+++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/ScriptFieldsFetchSubPhase.java
@@ -21,7 +21,6 @@ package org.elasticsearch.search.fetch.subphase;
import org.elasticsearch.script.LeafSearchScript;
import org.elasticsearch.search.SearchHitField;
import org.elasticsearch.search.fetch.FetchSubPhase;
-import org.elasticsearch.search.internal.InternalSearchHitField;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
@@ -63,7 +62,7 @@ public final class ScriptFieldsFetchSubPhase implements FetchSubPhase {
hitContext.hit().fields(new HashMap<>(2));
}
- SearchHitField hitField = hitContext.hit().fields().get(scriptField.name());
+ SearchHitField hitField = hitContext.hit().getFields().get(scriptField.name());
if (hitField == null) {
final List<Object> values;
if (value instanceof Collection) {
@@ -72,8 +71,8 @@ public final class ScriptFieldsFetchSubPhase implements FetchSubPhase {
} else {
values = Collections.singletonList(value);
}
- hitField = new InternalSearchHitField(scriptField.name(), values);
- hitContext.hit().fields().put(scriptField.name(), hitField);
+ hitField = new SearchHitField(scriptField.name(), values);
+ hitContext.hit().getFields().put(scriptField.name(), hitField);
}
}
}
diff --git a/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java
index e89d87e4fb..701b981e0f 100644
--- a/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java
+++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java
@@ -57,17 +57,17 @@ public class HighlightPhase extends AbstractComponent implements FetchSubPhase {
for (SearchContextHighlight.Field field : context.highlight().fields()) {
Collection<String> fieldNamesToHighlight;
if (Regex.isSimpleMatchPattern(field.field())) {
- DocumentMapper documentMapper = context.mapperService().documentMapper(hitContext.hit().type());
+ DocumentMapper documentMapper = context.mapperService().documentMapper(hitContext.hit().getType());
fieldNamesToHighlight = documentMapper.mappers().simpleMatchToFullName(field.field());
} else {
fieldNamesToHighlight = Collections.singletonList(field.field());
}
if (context.highlight().forceSource(field)) {
- SourceFieldMapper sourceFieldMapper = context.mapperService().documentMapper(hitContext.hit().type()).sourceMapper();
+ SourceFieldMapper sourceFieldMapper = context.mapperService().documentMapper(hitContext.hit().getType()).sourceMapper();
if (!sourceFieldMapper.enabled()) {
throw new IllegalArgumentException("source is forced for fields " + fieldNamesToHighlight
- + " but type [" + hitContext.hit().type() + "] has disabled _source");
+ + " but type [" + hitContext.hit().getType() + "] has disabled _source");
}
}
@@ -129,7 +129,7 @@ public class HighlightPhase extends AbstractComponent implements FetchSubPhase {
}
private FieldMapper getMapperForField(String fieldName, SearchContext searchContext, HitContext hitContext) {
- DocumentMapper documentMapper = searchContext.mapperService().documentMapper(hitContext.hit().type());
+ DocumentMapper documentMapper = searchContext.mapperService().documentMapper(hitContext.hit().getType());
// TODO: no need to lookup the doc mapper with unambiguous field names? just look at the mapper service
return documentMapper.mappers().smartNameFieldMapper(fieldName);
}
diff --git a/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java
index 127a008f9c..deb1464b70 100644
--- a/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java
+++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java
@@ -101,7 +101,7 @@ public class PlainHighlighter implements Highlighter {
int numberOfFragments = field.fieldOptions().numberOfFragments() == 0 ? 1 : field.fieldOptions().numberOfFragments();
ArrayList<TextFragment> fragsList = new ArrayList<>();
List<Object> textsToHighlight;
- Analyzer analyzer = context.mapperService().documentMapper(hitContext.hit().type()).mappers().indexAnalyzer();
+ Analyzer analyzer = context.mapperService().documentMapper(hitContext.hit().getType()).mappers().indexAnalyzer();
try {
textsToHighlight = HighlightUtils.loadFieldValues(field, mapper, context, hitContext);
diff --git a/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PostingsHighlighter.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PostingsHighlighter.java
index 330fb908cc..34997912fe 100644
--- a/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PostingsHighlighter.java
+++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PostingsHighlighter.java
@@ -78,7 +78,7 @@ public class PostingsHighlighter implements Highlighter {
List<Snippet> snippets = new ArrayList<>();
int numberOfFragments;
try {
- Analyzer analyzer = context.mapperService().documentMapper(hitContext.hit().type()).mappers().indexAnalyzer();
+ Analyzer analyzer = context.mapperService().documentMapper(hitContext.hit().getType()).mappers().indexAnalyzer();
List<Object> fieldValues = HighlightUtils.loadFieldValues(field, fieldMapper, context, hitContext);
CustomPostingsHighlighter highlighter;
if (field.fieldOptions().numberOfFragments() == 0) {
diff --git a/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java
index 4afce9700e..2ffa1a28ef 100644
--- a/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java
+++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java
@@ -78,7 +78,7 @@ public class UnifiedHighlighter implements Highlighter {
int numberOfFragments;
try {
Analyzer analyzer =
- context.mapperService().documentMapper(hitContext.hit().type()).mappers().indexAnalyzer();
+ context.mapperService().documentMapper(hitContext.hit().getType()).mappers().indexAnalyzer();
List<Object> fieldValues = HighlightUtils.loadFieldValues(field, fieldMapper, context, hitContext);
fieldValues = fieldValues.stream().map(obj -> {
if (obj instanceof BytesRef) {
diff --git a/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHit.java b/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHit.java
deleted file mode 100644
index 3487312b72..0000000000
--- a/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHit.java
+++ /dev/null
@@ -1,943 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.search.internal;
-
-import org.apache.lucene.search.Explanation;
-import org.elasticsearch.ElasticsearchParseException;
-import org.elasticsearch.common.Nullable;
-import org.elasticsearch.common.ParseField;
-import org.elasticsearch.common.ParsingException;
-import org.elasticsearch.common.Strings;
-import org.elasticsearch.common.bytes.BytesReference;
-import org.elasticsearch.common.compress.CompressorFactory;
-import org.elasticsearch.common.io.stream.StreamInput;
-import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.common.io.stream.Writeable;
-import org.elasticsearch.common.text.Text;
-import org.elasticsearch.common.xcontent.ConstructingObjectParser;
-import org.elasticsearch.common.xcontent.ToXContent;
-import org.elasticsearch.common.xcontent.XContentBuilder;
-import org.elasticsearch.common.xcontent.XContentHelper;
-import org.elasticsearch.common.xcontent.XContentParser;
-import org.elasticsearch.index.mapper.MapperService;
-import org.elasticsearch.index.mapper.SourceFieldMapper;
-import org.elasticsearch.index.shard.ShardId;
-import org.elasticsearch.search.DocValueFormat;
-import org.elasticsearch.search.SearchHit;
-import org.elasticsearch.search.SearchHitField;
-import org.elasticsearch.search.SearchHits;
-import org.elasticsearch.search.SearchShardTarget;
-import org.elasticsearch.search.fetch.subphase.highlight.HighlightField;
-import org.elasticsearch.search.lookup.SourceLookup;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Objects;
-
-import static java.util.Collections.emptyMap;
-import static java.util.Collections.singletonMap;
-import static java.util.Collections.unmodifiableMap;
-import static org.elasticsearch.common.lucene.Lucene.readExplanation;
-import static org.elasticsearch.common.lucene.Lucene.writeExplanation;
-import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
-import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
-import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
-import static org.elasticsearch.common.xcontent.XContentParserUtils.parseStoredFieldsValue;
-import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownField;
-import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownToken;
-import static org.elasticsearch.search.fetch.subphase.highlight.HighlightField.readHighlightField;
-import static org.elasticsearch.search.internal.InternalSearchHitField.readSearchHitField;
-
-public class InternalSearchHit implements SearchHit {
-
- private transient int docId;
-
- private static final float DEFAULT_SCORE = Float.NEGATIVE_INFINITY;
- private float score = DEFAULT_SCORE;
-
- private Text id;
- private Text type;
-
- private InternalNestedIdentity nestedIdentity;
-
- private long version = -1;
-
- private BytesReference source;
-
- private Map<String, SearchHitField> fields = emptyMap();
-
- private Map<String, HighlightField> highlightFields = null;
-
- private SearchSortValues sortValues = SearchSortValues.EMPTY;
-
- private String[] matchedQueries = Strings.EMPTY_ARRAY;
-
- private Explanation explanation;
-
- @Nullable
- private SearchShardTarget shard;
-
- private transient String index;
-
- private Map<String, Object> sourceAsMap;
- private byte[] sourceAsBytes;
-
- private Map<String, InternalSearchHits> innerHits;
-
- private InternalSearchHit() {
-
- }
-
- public InternalSearchHit(int docId) {
- this(docId, null, null, null);
- }
-
- public InternalSearchHit(int docId, String id, Text type, Map<String, SearchHitField> fields) {
- this(docId, id, type, null, fields);
- }
-
- public InternalSearchHit(int nestedTopDocId, String id, Text type, InternalNestedIdentity nestedIdentity, Map<String, SearchHitField> fields) {
- this.docId = nestedTopDocId;
- if (id != null) {
- this.id = new Text(id);
- } else {
- this.id = null;
- }
- this.type = type;
- this.nestedIdentity = nestedIdentity;
- this.fields = fields;
- }
-
- public int docId() {
- return this.docId;
- }
-
- public void score(float score) {
- this.score = score;
- }
-
- @Override
- public float score() {
- return this.score;
- }
-
- @Override
- public float getScore() {
- return score();
- }
-
- public void version(long version) {
- this.version = version;
- }
-
- @Override
- public long version() {
- return this.version;
- }
-
- @Override
- public long getVersion() {
- return this.version;
- }
-
- @Override
- public String index() {
- return this.index;
- }
-
- @Override
- public String getIndex() {
- return index();
- }
-
- @Override
- public String id() {
- return id != null ? id.string() : null;
- }
-
- @Override
- public String getId() {
- return id();
- }
-
- @Override
- public String type() {
- return type != null ? type.string() : null;
- }
-
- @Override
- public String getType() {
- return type();
- }
-
- @Override
- public NestedIdentity getNestedIdentity() {
- return nestedIdentity;
- }
-
- /**
- * Returns bytes reference, also un compress the source if needed.
- */
- @Override
- public BytesReference sourceRef() {
- if (this.source == null) {
- return null;
- }
-
- try {
- this.source = CompressorFactory.uncompressIfNeeded(this.source);
- return this.source;
- } catch (IOException e) {
- throw new ElasticsearchParseException("failed to decompress source", e);
- }
- }
-
- /**
- * Sets representation, might be compressed....
- */
- public InternalSearchHit sourceRef(BytesReference source) {
- this.source = source;
- this.sourceAsBytes = null;
- this.sourceAsMap = null;
- return this;
- }
-
- @Override
- public BytesReference getSourceRef() {
- return sourceRef();
- }
-
- @Override
- public byte[] source() {
- if (source == null) {
- return null;
- }
- if (sourceAsBytes != null) {
- return sourceAsBytes;
- }
- this.sourceAsBytes = BytesReference.toBytes(sourceRef());
- return this.sourceAsBytes;
- }
-
- @Override
- public boolean hasSource() {
- return source != null;
- }
-
- @Override
- public Map<String, Object> getSource() {
- return sourceAsMap();
- }
-
- @Override
- public String sourceAsString() {
- if (source == null) {
- return null;
- }
- try {
- return XContentHelper.convertToJson(sourceRef(), false);
- } catch (IOException e) {
- throw new ElasticsearchParseException("failed to convert source to a json string");
- }
- }
-
- @Override
- public String getSourceAsString() {
- return sourceAsString();
- }
-
- @Override
- public Map<String, Object> sourceAsMap() throws ElasticsearchParseException {
- if (source == null) {
- return null;
- }
- if (sourceAsMap != null) {
- return sourceAsMap;
- }
-
- sourceAsMap = SourceLookup.sourceAsMap(source);
- return sourceAsMap;
- }
-
- @Override
- public Iterator<SearchHitField> iterator() {
- return fields.values().iterator();
- }
-
- @Override
- public SearchHitField field(String fieldName) {
- return fields().get(fieldName);
- }
-
- @Override
- public Map<String, SearchHitField> fields() {
- return fields == null ? emptyMap() : fields;
- }
-
- // returns the fields without handling null cases
- public Map<String, SearchHitField> fieldsOrNull() {
- return fields;
- }
-
- @Override
- public Map<String, SearchHitField> getFields() {
- return fields();
- }
-
- public void fields(Map<String, SearchHitField> fields) {
- this.fields = fields;
- }
-
- @Override
- public Map<String, HighlightField> highlightFields() {
- return highlightFields == null ? emptyMap() : highlightFields;
- }
-
- @Override
- public Map<String, HighlightField> getHighlightFields() {
- return highlightFields();
- }
-
- public void highlightFields(Map<String, HighlightField> highlightFields) {
- this.highlightFields = highlightFields;
- }
-
- public void sortValues(Object[] sortValues, DocValueFormat[] sortValueFormats) {
- sortValues(new SearchSortValues(sortValues, sortValueFormats));
- }
-
- public void sortValues(SearchSortValues sortValues) {
- this.sortValues = sortValues;
- }
-
- @Override
- public Object[] sortValues() {
- return sortValues.sortValues();
- }
-
- @Override
- public Object[] getSortValues() {
- return sortValues();
- }
-
- @Override
- public Explanation explanation() {
- return explanation;
- }
-
- @Override
- public Explanation getExplanation() {
- return explanation();
- }
-
- public void explanation(Explanation explanation) {
- this.explanation = explanation;
- }
-
- @Override
- public SearchShardTarget shard() {
- return shard;
- }
-
- @Override
- public SearchShardTarget getShard() {
- return shard();
- }
-
- public void shard(SearchShardTarget target) {
- this.shard = target;
- if (target != null) {
- this.index = target.getIndex();
- }
- }
-
- public void matchedQueries(String[] matchedQueries) {
- this.matchedQueries = matchedQueries;
- }
-
- @Override
- public String[] matchedQueries() {
- return this.matchedQueries;
- }
-
- @Override
- public String[] getMatchedQueries() {
- return this.matchedQueries;
- }
-
- @Override
- @SuppressWarnings("unchecked")
- public Map<String, SearchHits> getInnerHits() {
- return (Map) innerHits;
- }
-
- public void setInnerHits(Map<String, InternalSearchHits> innerHits) {
- this.innerHits = innerHits;
- }
-
- public static class Fields {
- static final String _INDEX = "_index";
- static final String _TYPE = "_type";
- static final String _ID = "_id";
- static final String _VERSION = "_version";
- static final String _SCORE = "_score";
- static final String FIELDS = "fields";
- static final String HIGHLIGHT = "highlight";
- static final String SORT = "sort";
- static final String MATCHED_QUERIES = "matched_queries";
- static final String _EXPLANATION = "_explanation";
- static final String VALUE = "value";
- static final String DESCRIPTION = "description";
- static final String DETAILS = "details";
- static final String INNER_HITS = "inner_hits";
- static final String _SHARD = "_shard";
- static final String _NODE = "_node";
- }
-
- // public because we render hit as part of completion suggestion option
- public XContentBuilder toInnerXContent(XContentBuilder builder, Params params) throws IOException {
- List<SearchHitField> metaFields = new ArrayList<>();
- List<SearchHitField> otherFields = new ArrayList<>();
- if (fields != null && !fields.isEmpty()) {
- for (SearchHitField field : fields.values()) {
- if (field.values().isEmpty()) {
- continue;
- }
- if (field.isMetadataField()) {
- metaFields.add(field);
- } else {
- otherFields.add(field);
- }
- }
- }
-
- // For inner_hit hits shard is null and that is ok, because the parent search hit has all this information.
- // Even if this was included in the inner_hit hits this would be the same, so better leave it out.
- if (explanation() != null && shard != null) {
- builder.field(Fields._SHARD, shard.getShardId());
- builder.field(Fields._NODE, shard.getNodeIdText());
- }
- if (nestedIdentity != null) {
- nestedIdentity.toXContent(builder, params);
- } else {
- if (index != null) {
- builder.field(Fields._INDEX, index);
- }
- if (type != null) {
- builder.field(Fields._TYPE, type);
- }
- if (id != null) {
- builder.field(Fields._ID, id);
- }
- }
- if (version != -1) {
- builder.field(Fields._VERSION, version);
- }
- if (Float.isNaN(score)) {
- builder.nullField(Fields._SCORE);
- } else {
- builder.field(Fields._SCORE, score);
- }
- for (SearchHitField field : metaFields) {
- Object value = field.value();
- builder.field(field.name(), value);
- }
- if (source != null) {
- XContentHelper.writeRawField(SourceFieldMapper.NAME, source, builder, params);
- }
- if (!otherFields.isEmpty()) {
- builder.startObject(Fields.FIELDS);
- for (SearchHitField field : otherFields) {
- builder.startArray(field.name());
- for (Object value : field.getValues()) {
- builder.value(value);
- }
- builder.endArray();
- }
- builder.endObject();
- }
- if (highlightFields != null && !highlightFields.isEmpty()) {
- builder.startObject(Fields.HIGHLIGHT);
- for (HighlightField field : highlightFields.values()) {
- field.toXContent(builder, params);
- }
- builder.endObject();
- }
- sortValues.toXContent(builder, params);
- if (matchedQueries.length > 0) {
- builder.startArray(Fields.MATCHED_QUERIES);
- for (String matchedFilter : matchedQueries) {
- builder.value(matchedFilter);
- }
- builder.endArray();
- }
- if (explanation() != null) {
- builder.field(Fields._EXPLANATION);
- buildExplanation(builder, explanation());
- }
- if (innerHits != null) {
- builder.startObject(Fields.INNER_HITS);
- for (Map.Entry<String, InternalSearchHits> entry : innerHits.entrySet()) {
- builder.startObject(entry.getKey());
- entry.getValue().toXContent(builder, params);
- builder.endObject();
- }
- builder.endObject();
- }
- return builder;
- }
-
- public static InternalSearchHit fromXContent(XContentParser parser) throws IOException {
- XContentParser.Token token;
- ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation);
- String currentFieldName = null;
- String type = null, id = null;
- String index = null;
- float score = DEFAULT_SCORE;
- long version = -1;
- SearchSortValues sortValues = SearchSortValues.EMPTY;
- InternalNestedIdentity nestedIdentity = null;
- Map<String, HighlightField> highlightFields = new HashMap<>();
- BytesReference parsedSource = null;
- List<String> matchedQueries = new ArrayList<>();
- Map<String, SearchHitField> fields = new HashMap<>();
- Explanation explanation = null;
- ShardId shardId = null;
- String nodeId = null;
- Map<String, InternalSearchHits> innerHits = null;
- while((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
- if (token == XContentParser.Token.FIELD_NAME) {
- currentFieldName = parser.currentName();
- } else if (token.isValue()) {
- if (Fields._TYPE.equals(currentFieldName)) {
- type = parser.text();
- } else if (Fields._INDEX.equals(currentFieldName)) {
- index = parser.text();
- } else if (Fields._ID.equals(currentFieldName)) {
- id = parser.text();
- } else if (Fields._SCORE.equals(currentFieldName)) {
- score = parser.floatValue();
- } else if (Fields._VERSION.equals(currentFieldName)) {
- version = parser.longValue();
- } else if (Fields._SHARD.equals(currentFieldName)) {
- shardId = ShardId.fromString(parser.text());
- } else if (Fields._NODE.equals(currentFieldName)) {
- nodeId = parser.text();
- } else if (MapperService.isMetadataField(currentFieldName)) {
- List<Object> values = new ArrayList<>();
- values.add(parseStoredFieldsValue(parser));
- fields.put(currentFieldName, new InternalSearchHitField(currentFieldName, values));
- } else {
- throwUnknownField(currentFieldName, parser.getTokenLocation());
- }
- } else if (token == XContentParser.Token.VALUE_NULL) {
- if (Fields._SCORE.equals(currentFieldName)) {
- score = Float.NaN;
- } else {
- throwUnknownField(currentFieldName, parser.getTokenLocation());
- }
- } else if (token == XContentParser.Token.START_OBJECT) {
- if (SourceFieldMapper.NAME.equals(currentFieldName)) {
- try (XContentBuilder builder = XContentBuilder.builder(parser.contentType().xContent())) {
- //the original document gets slightly modified: whitespaces or pretty printing are not preserved,
- //it all depends on the current builder settings
- builder.copyCurrentStructure(parser);
- parsedSource = builder.bytes();
- }
- } else if (Fields.HIGHLIGHT.equals(currentFieldName)) {
- while((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
- HighlightField highlightField = HighlightField.fromXContent(parser);
- highlightFields.put(highlightField.getName(), highlightField);
- }
- } else if (Fields.FIELDS.equals(currentFieldName)) {
- while((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
- String fieldName = parser.currentName();
- List<Object> values = new ArrayList<>();
- ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.nextToken(), parser::getTokenLocation);
- while((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
- values.add(parseStoredFieldsValue(parser));
- }
- fields.put(fieldName, new InternalSearchHitField(fieldName, values));
- }
- } else if (Fields._EXPLANATION.equals(currentFieldName)) {
- explanation = parseExplanation(parser);
- } else if (Fields.INNER_HITS.equals(currentFieldName)) {
- innerHits = new HashMap<>();
- while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
- // parse the key
- ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser::getTokenLocation);
- String name = parser.currentName();
- innerHits.put(name, InternalSearchHits.fromXContent(parser));
- parser.nextToken();
- ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.currentToken(), parser::getTokenLocation);
- }
- } else if (InternalNestedIdentity.Fields._NESTED.equals(currentFieldName)) {
- nestedIdentity = InternalNestedIdentity.fromXContent(parser);
- } else {
- throwUnknownField(currentFieldName, parser.getTokenLocation());
- }
- } else if (token == XContentParser.Token.START_ARRAY) {
- if (Fields.SORT.equals(currentFieldName)) {
- sortValues = SearchSortValues.fromXContent(parser);
- } else if (Fields.MATCHED_QUERIES.equals(currentFieldName)) {
- while((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
- matchedQueries.add(parser.text());
- }
- } else {
- throwUnknownField(currentFieldName, parser.getTokenLocation());
- }
- } else {
- throwUnknownToken(token, parser.getTokenLocation());
- }
- }
- InternalSearchHit internalSearchHit = new InternalSearchHit(-1, id, new Text(type), nestedIdentity, Collections.emptyMap());
- internalSearchHit.index = index;
- internalSearchHit.score(score);
- internalSearchHit.version(version);
- internalSearchHit.sortValues(sortValues);
- internalSearchHit.highlightFields(highlightFields);
- internalSearchHit.sourceRef(parsedSource);
- internalSearchHit.explanation(explanation);
- internalSearchHit.setInnerHits(innerHits);
- if (matchedQueries.size() > 0) {
- internalSearchHit.matchedQueries(matchedQueries.toArray(new String[matchedQueries.size()]));
- }
- if (shardId != null && nodeId != null) {
- internalSearchHit.shard(new SearchShardTarget(nodeId, shardId));
- }
- internalSearchHit.fields(fields);
- return internalSearchHit;
- }
-
- private static Explanation parseExplanation(XContentParser parser) throws IOException {
- ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation);
- XContentParser.Token token;
- Float value = null;
- String description = null;
- List<Explanation> details = new ArrayList<>();
- while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
- ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, () -> parser.getTokenLocation());
- String currentFieldName = parser.currentName();
- token = parser.nextToken();
- if (Fields.VALUE.equals(currentFieldName)) {
- value = parser.floatValue();
- } else if (Fields.DESCRIPTION.equals(currentFieldName)) {
- description = parser.textOrNull();
- } else if (Fields.DETAILS.equals(currentFieldName)) {
- ensureExpectedToken(XContentParser.Token.START_ARRAY, token, () -> parser.getTokenLocation());
- while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
- details.add(parseExplanation(parser));
- }
- } else {
- throwUnknownField(currentFieldName, parser.getTokenLocation());
- }
- }
- if (value == null) {
- throw new ParsingException(parser.getTokenLocation(), "missing explanation value");
- }
- if (description == null) {
- throw new ParsingException(parser.getTokenLocation(), "missing explanation description");
- }
- return Explanation.match(value, description, details);
- }
-
- private void buildExplanation(XContentBuilder builder, Explanation explanation) throws IOException {
- builder.startObject();
- builder.field(Fields.VALUE, explanation.getValue());
- builder.field(Fields.DESCRIPTION, explanation.getDescription());
- Explanation[] innerExps = explanation.getDetails();
- if (innerExps != null) {
- builder.startArray(Fields.DETAILS);
- for (Explanation exp : innerExps) {
- buildExplanation(builder, exp);
- }
- builder.endArray();
- }
- builder.endObject();
-
- }
-
- @Override
- public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
- builder.startObject();
- toInnerXContent(builder, params);
- builder.endObject();
- return builder;
- }
-
- public static InternalSearchHit readSearchHit(StreamInput in) throws IOException {
- InternalSearchHit hit = new InternalSearchHit();
- hit.readFrom(in);
- return hit;
- }
-
- @Override
- public void readFrom(StreamInput in) throws IOException {
- score = in.readFloat();
- id = in.readOptionalText();
- type = in.readOptionalText();
- nestedIdentity = in.readOptionalWriteable(InternalNestedIdentity::new);
- version = in.readLong();
- source = in.readBytesReference();
- if (source.length() == 0) {
- source = null;
- }
- if (in.readBoolean()) {
- explanation = readExplanation(in);
- }
- int size = in.readVInt();
- if (size == 0) {
- fields = emptyMap();
- } else if (size == 1) {
- SearchHitField hitField = readSearchHitField(in);
- fields = singletonMap(hitField.name(), hitField);
- } else {
- Map<String, SearchHitField> fields = new HashMap<>();
- for (int i = 0; i < size; i++) {
- SearchHitField hitField = readSearchHitField(in);
- fields.put(hitField.name(), hitField);
- }
- this.fields = unmodifiableMap(fields);
- }
-
- size = in.readVInt();
- if (size == 0) {
- highlightFields = emptyMap();
- } else if (size == 1) {
- HighlightField field = readHighlightField(in);
- highlightFields = singletonMap(field.name(), field);
- } else {
- Map<String, HighlightField> highlightFields = new HashMap<>();
- for (int i = 0; i < size; i++) {
- HighlightField field = readHighlightField(in);
- highlightFields.put(field.name(), field);
- }
- this.highlightFields = unmodifiableMap(highlightFields);
- }
-
- sortValues = new SearchSortValues(in);
-
- size = in.readVInt();
- if (size > 0) {
- matchedQueries = new String[size];
- for (int i = 0; i < size; i++) {
- matchedQueries[i] = in.readString();
- }
- }
- // we call the setter here because that also sets the local index parameter
- shard(in.readOptionalWriteable(SearchShardTarget::new));
- size = in.readVInt();
- if (size > 0) {
- innerHits = new HashMap<>(size);
- for (int i = 0; i < size; i++) {
- String key = in.readString();
- InternalSearchHits value = InternalSearchHits.readSearchHits(in);
- innerHits.put(key, value);
- }
- }
- }
-
- @Override
- public void writeTo(StreamOutput out) throws IOException {
- out.writeFloat(score);
- out.writeOptionalText(id);
- out.writeOptionalText(type);
- out.writeOptionalWriteable(nestedIdentity);
- out.writeLong(version);
- out.writeBytesReference(source);
- if (explanation == null) {
- out.writeBoolean(false);
- } else {
- out.writeBoolean(true);
- writeExplanation(out, explanation);
- }
- if (fields == null) {
- out.writeVInt(0);
- } else {
- out.writeVInt(fields.size());
- for (SearchHitField hitField : fields().values()) {
- hitField.writeTo(out);
- }
- }
- if (highlightFields == null) {
- out.writeVInt(0);
- } else {
- out.writeVInt(highlightFields.size());
- for (HighlightField highlightField : highlightFields.values()) {
- highlightField.writeTo(out);
- }
- }
- sortValues.writeTo(out);
-
- if (matchedQueries.length == 0) {
- out.writeVInt(0);
- } else {
- out.writeVInt(matchedQueries.length);
- for (String matchedFilter : matchedQueries) {
- out.writeString(matchedFilter);
- }
- }
- out.writeOptionalWriteable(shard);
- if (innerHits == null) {
- out.writeVInt(0);
- } else {
- out.writeVInt(innerHits.size());
- for (Map.Entry<String, InternalSearchHits> entry : innerHits.entrySet()) {
- out.writeString(entry.getKey());
- entry.getValue().writeTo(out);
- }
- }
- }
-
- @Override
- public boolean equals(Object obj) {
- if (obj == null || getClass() != obj.getClass()) {
- return false;
- }
- InternalSearchHit other = (InternalSearchHit) obj;
- return Objects.equals(id, other.id)
- && Objects.equals(type, other.type)
- && Objects.equals(nestedIdentity, other.nestedIdentity)
- && Objects.equals(version, other.version)
- && Objects.equals(source, other.source)
- && Objects.equals(fields, other.fields)
- && Objects.equals(highlightFields(), other.highlightFields())
- && Arrays.equals(matchedQueries, other.matchedQueries)
- && Objects.equals(explanation, other.explanation)
- && Objects.equals(shard, other.shard)
- && Objects.equals(innerHits, other.innerHits);
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(id, type, nestedIdentity, version, source, fields, highlightFields(), Arrays.hashCode(matchedQueries),
- explanation, shard, innerHits);
- }
-
- public static final class InternalNestedIdentity implements NestedIdentity, Writeable, ToXContent {
-
- private Text field;
- private int offset;
- private InternalNestedIdentity child;
-
- public InternalNestedIdentity(String field, int offset, InternalNestedIdentity child) {
- this.field = new Text(field);
- this.offset = offset;
- this.child = child;
- }
-
- InternalNestedIdentity(StreamInput in) throws IOException {
- field = in.readOptionalText();
- offset = in.readInt();
- child = in.readOptionalWriteable(InternalNestedIdentity::new);
- }
-
- @Override
- public Text getField() {
- return field;
- }
-
- @Override
- public int getOffset() {
- return offset;
- }
-
- @Override
- public NestedIdentity getChild() {
- return child;
- }
-
- @Override
- public void writeTo(StreamOutput out) throws IOException {
- out.writeOptionalText(field);
- out.writeInt(offset);
- out.writeOptionalWriteable(child);
- }
-
- @Override
- public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
- builder.field(Fields._NESTED);
- return innerToXContent(builder, params);
- }
-
- /**
- * Rendering of the inner XContent object without the leading field name. This way the structure innerToXContent renders and
- * fromXContent parses correspond to each other.
- */
- XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException {
- builder.startObject();
- if (field != null) {
- builder.field(Fields._NESTED_FIELD, field);
- }
- if (offset != -1) {
- builder.field(Fields._NESTED_OFFSET, offset);
- }
- if (child != null) {
- builder = child.toXContent(builder, params);
- }
- builder.endObject();
- return builder;
- }
-
- private static final ConstructingObjectParser<InternalNestedIdentity, Void> PARSER = new ConstructingObjectParser<>(
- "nested_identity",
- ctorArgs -> new InternalNestedIdentity((String) ctorArgs[0], (int) ctorArgs[1], (InternalNestedIdentity) ctorArgs[2]));
- static {
- PARSER.declareString(constructorArg(), new ParseField(Fields._NESTED_FIELD));
- PARSER.declareInt(constructorArg(), new ParseField(Fields._NESTED_OFFSET));
- PARSER.declareObject(optionalConstructorArg(), PARSER, new ParseField(Fields._NESTED));
- }
-
- public static InternalNestedIdentity fromXContent(XContentParser parser) {
- return PARSER.apply(parser, null);
- }
-
- @Override
- public boolean equals(Object obj) {
- if (this == obj) {
- return true;
- }
- if (obj == null || getClass() != obj.getClass()) {
- return false;
- }
- InternalNestedIdentity other = (InternalNestedIdentity) obj;
- return Objects.equals(field, other.field) &&
- Objects.equals(offset, other.offset) &&
- Objects.equals(child, other.child);
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(field, offset, child);
- }
-
- public static class Fields {
- static final String _NESTED = "_nested";
- static final String _NESTED_FIELD = "field";
- static final String _NESTED_OFFSET = "offset";
- }
- }
-}
diff --git a/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHitField.java b/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHitField.java
deleted file mode 100644
index d1f94969bd..0000000000
--- a/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHitField.java
+++ /dev/null
@@ -1,128 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.search.internal;
-
-import org.elasticsearch.common.io.stream.StreamInput;
-import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.index.mapper.MapperService;
-import org.elasticsearch.search.SearchHitField;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Objects;
-
-public class InternalSearchHitField implements SearchHitField {
-
- private String name;
- private List<Object> values;
-
- private InternalSearchHitField() {
- }
-
- public InternalSearchHitField(String name, List<Object> values) {
- this.name = name;
- this.values = values;
- }
-
- @Override
- public String name() {
- return name;
- }
-
- @Override
- public String getName() {
- return name();
- }
-
- @Override
- public Object value() {
- if (values == null || values.isEmpty()) {
- return null;
- }
- return values.get(0);
- }
-
- @Override
- public Object getValue() {
- return value();
- }
-
- @Override
- public List<Object> values() {
- return values;
- }
-
- @Override
- public List<Object> getValues() {
- return values();
- }
-
- @Override
- public boolean isMetadataField() {
- return MapperService.isMetadataField(name);
- }
-
- @Override
- public Iterator<Object> iterator() {
- return values.iterator();
- }
-
- public static InternalSearchHitField readSearchHitField(StreamInput in) throws IOException {
- InternalSearchHitField result = new InternalSearchHitField();
- result.readFrom(in);
- return result;
- }
-
- @Override
- public void readFrom(StreamInput in) throws IOException {
- name = in.readString();
- int size = in.readVInt();
- values = new ArrayList<>(size);
- for (int i = 0; i < size; i++) {
- values.add(in.readGenericValue());
- }
- }
-
- @Override
- public void writeTo(StreamOutput out) throws IOException {
- out.writeString(name);
- out.writeVInt(values.size());
- for (Object value : values) {
- out.writeGenericValue(value);
- }
- }
-
- @Override
- public boolean equals(Object obj) {
- if (obj == null || getClass() != obj.getClass()) {
- return false;
- }
- InternalSearchHitField other = (InternalSearchHitField) obj;
- return Objects.equals(name, other.name)
- && Objects.equals(values, other.values);
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(name, values);
- }
-} \ No newline at end of file
diff --git a/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHits.java b/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHits.java
deleted file mode 100644
index 834476b543..0000000000
--- a/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHits.java
+++ /dev/null
@@ -1,228 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.search.internal;
-
-import org.elasticsearch.common.io.stream.StreamInput;
-import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.common.xcontent.XContentBuilder;
-import org.elasticsearch.common.xcontent.XContentParser;
-import org.elasticsearch.search.SearchHit;
-import org.elasticsearch.search.SearchHits;
-import org.elasticsearch.search.SearchShardTarget;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Objects;
-
-import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
-import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownField;
-import static org.elasticsearch.search.internal.InternalSearchHit.readSearchHit;
-
-public class InternalSearchHits implements SearchHits {
-
- public static InternalSearchHits empty() {
- // We shouldn't use static final instance, since that could directly be returned by native transport clients
- return new InternalSearchHits(EMPTY, 0, 0);
- }
-
- public static final InternalSearchHit[] EMPTY = new InternalSearchHit[0];
-
- private InternalSearchHit[] hits;
-
- public long totalHits;
-
- private float maxScore;
-
- InternalSearchHits() {
-
- }
-
- public InternalSearchHits(InternalSearchHit[] hits, long totalHits, float maxScore) {
- this.hits = hits;
- this.totalHits = totalHits;
- this.maxScore = maxScore;
- }
-
- public void shardTarget(SearchShardTarget shardTarget) {
- for (InternalSearchHit hit : hits) {
- hit.shard(shardTarget);
- }
- }
-
- @Override
- public long totalHits() {
- return totalHits;
- }
-
- @Override
- public long getTotalHits() {
- return totalHits();
- }
-
- @Override
- public float maxScore() {
- return this.maxScore;
- }
-
- @Override
- public float getMaxScore() {
- return maxScore();
- }
-
- @Override
- public SearchHit[] hits() {
- return this.hits;
- }
-
- @Override
- public SearchHit getAt(int position) {
- return hits[position];
- }
-
- @Override
- public SearchHit[] getHits() {
- return hits();
- }
-
- @Override
- public Iterator<SearchHit> iterator() {
- return Arrays.stream(hits()).iterator();
- }
-
- public InternalSearchHit[] internalHits() {
- return this.hits;
- }
-
- static final class Fields {
- static final String HITS = "hits";
- static final String TOTAL = "total";
- static final String MAX_SCORE = "max_score";
- }
-
- @Override
- public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
- builder.startObject(Fields.HITS);
- builder.field(Fields.TOTAL, totalHits);
- if (Float.isNaN(maxScore)) {
- builder.nullField(Fields.MAX_SCORE);
- } else {
- builder.field(Fields.MAX_SCORE, maxScore);
- }
- builder.field(Fields.HITS);
- builder.startArray();
- for (SearchHit hit : hits) {
- hit.toXContent(builder, params);
- }
- builder.endArray();
- builder.endObject();
- return builder;
- }
-
- public static InternalSearchHits fromXContent(XContentParser parser) throws IOException {
- if (parser.currentToken() != XContentParser.Token.START_OBJECT) {
- parser.nextToken();
- ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation);
- }
- XContentParser.Token token = parser.currentToken();
- String currentFieldName = null;
- List<InternalSearchHit> hits = new ArrayList<>();
- long totalHits = 0;
- float maxScore = 0f;
- while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
- if (token == XContentParser.Token.FIELD_NAME) {
- currentFieldName = parser.currentName();
- } else if (token.isValue()) {
- if (Fields.TOTAL.equals(currentFieldName)) {
- totalHits = parser.longValue();
- } else if (Fields.MAX_SCORE.equals(currentFieldName)) {
- maxScore = parser.floatValue();
- } else {
- throwUnknownField(currentFieldName, parser.getTokenLocation());
- }
- } else if (token == XContentParser.Token.VALUE_NULL) {
- if (Fields.MAX_SCORE.equals(currentFieldName)) {
- maxScore = Float.NaN; // NaN gets rendered as null-field
- } else {
- throwUnknownField(currentFieldName, parser.getTokenLocation());
- }
- } else if (token == XContentParser.Token.START_ARRAY) {
- while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
- hits.add(InternalSearchHit.fromXContent(parser));
- }
- }
- }
- InternalSearchHits internalSearchHits = new InternalSearchHits(hits.toArray(new InternalSearchHit[hits.size()]), totalHits,
- maxScore);
- return internalSearchHits;
- }
-
-
- public static InternalSearchHits readSearchHits(StreamInput in) throws IOException {
- InternalSearchHits hits = new InternalSearchHits();
- hits.readFrom(in);
- return hits;
- }
-
- @Override
- public void readFrom(StreamInput in) throws IOException {
- totalHits = in.readVLong();
- maxScore = in.readFloat();
- int size = in.readVInt();
- if (size == 0) {
- hits = EMPTY;
- } else {
- hits = new InternalSearchHit[size];
- for (int i = 0; i < hits.length; i++) {
- hits[i] = readSearchHit(in);
- }
- }
- }
-
- @Override
- public void writeTo(StreamOutput out) throws IOException {
- out.writeVLong(totalHits);
- out.writeFloat(maxScore);
- out.writeVInt(hits.length);
- if (hits.length > 0) {
- for (InternalSearchHit hit : hits) {
- hit.writeTo(out);
- }
- }
- }
-
- @Override
- public boolean equals(Object obj) {
- if (obj == null || getClass() != obj.getClass()) {
- return false;
- }
- InternalSearchHits other = (InternalSearchHits) obj;
- return Objects.equals(totalHits, other.totalHits)
- && Objects.equals(maxScore, other.maxScore)
- && Arrays.equals(hits, other.hits);
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(totalHits, maxScore, Arrays.hashCode(hits));
- }
-}
diff --git a/core/src/main/java/org/elasticsearch/search/internal/InternalSearchResponse.java b/core/src/main/java/org/elasticsearch/search/internal/InternalSearchResponse.java
index 871d176ffc..d396d14e98 100644
--- a/core/src/main/java/org/elasticsearch/search/internal/InternalSearchResponse.java
+++ b/core/src/main/java/org/elasticsearch/search/internal/InternalSearchResponse.java
@@ -19,7 +19,6 @@
package org.elasticsearch.search.internal;
-import org.elasticsearch.Version;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
@@ -36,15 +35,13 @@ import java.io.IOException;
import java.util.Collections;
import java.util.Map;
-import static org.elasticsearch.search.internal.InternalSearchHits.readSearchHits;
-
public class InternalSearchResponse implements Streamable, ToXContent {
public static InternalSearchResponse empty() {
- return new InternalSearchResponse(InternalSearchHits.empty(), null, null, null, false, null);
+ return new InternalSearchResponse(SearchHits.empty(), null, null, null, false, null);
}
- private InternalSearchHits hits;
+ private SearchHits hits;
private InternalAggregations aggregations;
@@ -59,7 +56,7 @@ public class InternalSearchResponse implements Streamable, ToXContent {
private InternalSearchResponse() {
}
- public InternalSearchResponse(InternalSearchHits hits, InternalAggregations aggregations, Suggest suggest,
+ public InternalSearchResponse(SearchHits hits, InternalAggregations aggregations, Suggest suggest,
SearchProfileShardResults profileResults, boolean timedOut, Boolean terminatedEarly) {
this.hits = hits;
this.aggregations = aggregations;
@@ -125,7 +122,7 @@ public class InternalSearchResponse implements Streamable, ToXContent {
@Override
public void readFrom(StreamInput in) throws IOException {
- hits = readSearchHits(in);
+ hits = SearchHits.readSearchHits(in);
if (in.readBoolean()) {
aggregations = InternalAggregations.readAggregations(in);
}
diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestion.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestion.java
index c86c056522..1d8da83271 100644
--- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestion.java
+++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestion.java
@@ -25,12 +25,10 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.xcontent.XContentBuilder;
-import org.elasticsearch.search.internal.InternalSearchHit;
-import org.elasticsearch.search.internal.InternalSearchHits;
+import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.suggest.Suggest;
import java.io.IOException;
-import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
@@ -194,7 +192,7 @@ public final class CompletionSuggestion extends Suggest.Suggestion<CompletionSug
public static class Option extends Suggest.Suggestion.Entry.Option {
private Map<String, Set<CharSequence>> contexts;
private ScoreDoc doc;
- private InternalSearchHit hit;
+ private SearchHit hit;
public Option(int docID, Text text, float score, Map<String, Set<CharSequence>> contexts) {
super(text, score);
@@ -221,7 +219,7 @@ public final class CompletionSuggestion extends Suggest.Suggestion<CompletionSug
return doc;
}
- public InternalSearchHit getHit() {
+ public SearchHit getHit() {
return hit;
}
@@ -229,7 +227,7 @@ public final class CompletionSuggestion extends Suggest.Suggestion<CompletionSug
this.doc.shardIndex = shardIndex;
}
- public void setHit(InternalSearchHit hit) {
+ public void setHit(SearchHit hit) {
this.hit = hit;
}
@@ -260,7 +258,7 @@ public final class CompletionSuggestion extends Suggest.Suggestion<CompletionSug
super.readFrom(in);
this.doc = Lucene.readScoreDoc(in);
if (in.readBoolean()) {
- this.hit = InternalSearchHit.readSearchHit(in);
+ this.hit = SearchHit.readSearchHit(in);
}
int contextSize = in.readInt();
this.contexts = new LinkedHashMap<>(contextSize);