Skip to content

Commit

Permalink
Remove SearchHit#internalHits (#25653)
Browse files Browse the repository at this point in the history
This method does exactly what getHits() does and is used in only a few places,
so it can safely be removed. It seems to be a left-over from when
InternalSearchHits was folded into the SearchHits interface, which didn't
contain this method.
  • Loading branch information
cbuescher authored Jul 12, 2017
1 parent b5e8113 commit ad01a67
Show file tree
Hide file tree
Showing 6 changed files with 14 additions and 17 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@

import com.carrotsearch.hppc.IntArrayList;
import com.carrotsearch.hppc.ObjectObjectHashMap;

import org.apache.lucene.index.Term;
import org.apache.lucene.search.CollectionStatistics;
import org.apache.lucene.search.FieldDoc;
Expand Down Expand Up @@ -329,9 +330,9 @@ public InternalSearchResponse merge(boolean ignoreFrom, ReducedQueryPhase reduce
}
FetchSearchResult fetchResult = searchResultProvider.fetchResult();
final int index = fetchResult.counterGetAndIncrement();
assert index < fetchResult.hits().internalHits().length : "not enough hits fetched. index [" + index + "] length: "
+ fetchResult.hits().internalHits().length;
SearchHit hit = fetchResult.hits().internalHits()[index];
assert index < fetchResult.hits().getHits().length : "not enough hits fetched. index [" + index + "] length: "
+ fetchResult.hits().getHits().length;
SearchHit hit = fetchResult.hits().getHits()[index];
CompletionSuggestion.Entry.Option suggestOption =
suggestionOptions.get(scoreDocIndex - currentOffset);
hit.score(shardDoc.score);
Expand Down Expand Up @@ -381,9 +382,9 @@ private SearchHits getHits(ReducedQueryPhase reducedQueryPhase, boolean ignoreFr
}
FetchSearchResult fetchResult = fetchResultProvider.fetchResult();
final int index = fetchResult.counterGetAndIncrement();
assert index < fetchResult.hits().internalHits().length : "not enough hits fetched. index [" + index + "] length: "
+ fetchResult.hits().internalHits().length;
SearchHit searchHit = fetchResult.hits().internalHits()[index];
assert index < fetchResult.hits().getHits().length : "not enough hits fetched. index [" + index + "] length: "
+ fetchResult.hits().getHits().length;
SearchHit searchHit = fetchResult.hits().getHits()[index];
searchHit.score(shardDoc.score);
searchHit.shard(fetchResult.getSearchShardTarget());
if (sorted) {
Expand Down
4 changes: 0 additions & 4 deletions core/src/main/java/org/elasticsearch/search/SearchHits.java
Original file line number Diff line number Diff line change
Expand Up @@ -95,10 +95,6 @@ public Iterator<SearchHit> iterator() {
return Arrays.stream(getHits()).iterator();
}

public SearchHit[] internalHits() {
return this.hits;
}

public static final class Fields {
public static final String HITS = "hits";
public static final String TOTAL = "total";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,8 @@
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.util.LongObjectPagedHashMap;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.LeafBucketCollector;
Expand All @@ -41,8 +43,6 @@
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.search.fetch.FetchPhase;
import org.elasticsearch.search.fetch.FetchSearchResult;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.internal.SubSearchContext;
import org.elasticsearch.search.rescore.RescoreSearchContext;
Expand Down Expand Up @@ -165,7 +165,7 @@ public InternalAggregation buildAggregation(long owningBucketOrdinal) {
subSearchContext.docIdsToLoad(docIdsToLoad, 0, docIdsToLoad.length);
fetchPhase.execute(subSearchContext);
FetchSearchResult fetchResult = subSearchContext.fetchResult();
SearchHit[] internalHits = fetchResult.fetchResult().hits().internalHits();
SearchHit[] internalHits = fetchResult.fetchResult().hits().getHits();
for (int i = 0; i < internalHits.length; i++) {
ScoreDoc scoreDoc = topDocs.scoreDocs[i];
SearchHit searchHitFields = internalHits[i];
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ public void hitsExecute(SearchContext context, SearchHit[] hits) throws IOExcept
innerHits.docIdsToLoad(docIdsToLoad, 0, docIdsToLoad.length);
fetchPhase.execute(innerHits);
FetchSearchResult fetchResult = innerHits.fetchResult();
SearchHit[] internalHits = fetchResult.fetchResult().hits().internalHits();
SearchHit[] internalHits = fetchResult.fetchResult().hits().getHits();
for (int j = 0; j < internalHits.length; j++) {
ScoreDoc scoreDoc = topDoc.scoreDocs[j];
SearchHit searchHitFields = internalHits[j];
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -350,7 +350,7 @@ public void run() throws IOException {
mockSearchPhaseContext.assertNoFailure();
assertNotNull(responseRef.get());
assertEquals(2, responseRef.get().getHits().totalHits);
assertEquals(1, responseRef.get().getHits().internalHits().length);
assertEquals(1, responseRef.get().getHits().getHits().length);
assertEquals(84, responseRef.get().getHits().getAt(0).docId());
assertEquals(0, responseRef.get().getFailedShards());
assertEquals(2, responseRef.get().getSuccessfulShards());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -178,14 +178,14 @@ protected void assertReduced(InternalTopHits reduced, List<InternalTopHits> inpu
SearchHits internalHits = inputs.get(input).getHits();
totalHits += internalHits.getTotalHits();
maxScore = max(maxScore, internalHits.getMaxScore());
for (int i = 0; i < internalHits.internalHits().length; i++) {
for (int i = 0; i < internalHits.getHits().length; i++) {
ScoreDoc doc = inputs.get(input).getTopDocs().scoreDocs[i];
if (testInstancesLookSortedByField) {
doc = new FieldDoc(doc.doc, doc.score, ((FieldDoc) doc).fields, input);
} else {
doc = new ScoreDoc(doc.doc, doc.score, input);
}
allHits.add(new Tuple<>(doc, internalHits.internalHits()[i]));
allHits.add(new Tuple<>(doc, internalHits.getHits()[i]));
}
}
allHits.sort(comparing(Tuple::v1, scoreDocComparator()));
Expand Down

0 comments on commit ad01a67

Please sign in to comment.