Skip to content

Commit

Permalink
Fixed bug when non percolator docs end up in the search hits.
Browse files Browse the repository at this point in the history
In the case that a document with a percolator field is matched when using the `percolate` query then
the fetch phase can fail due to the fact that the percolator can't resolve any query from that document.

Closes #29429
  • Loading branch information
martijnvg committed Apr 10, 2018
1 parent 22b144a commit 684f3d8
Show file tree
Hide file tree
Showing 2 changed files with 77 additions and 2 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,11 @@ final class PercolatorMatchedSlotSubFetchPhase implements FetchSubPhase {

@Override
public void hitsExecute(SearchContext context, SearchHit[] hits) throws IOException {
List<PercolateQuery> percolateQueries = locatePercolatorQuery(context.query());
innerHitsExecute(context.query(), context.searcher(), hits);
}

static void innerHitsExecute(Query mainQuery, IndexSearcher indexSearcher, SearchHit[] hits) throws IOException {
List<PercolateQuery> percolateQueries = locatePercolatorQuery(mainQuery);
if (percolateQueries.isEmpty()) {
return;
}
Expand All @@ -81,11 +85,15 @@ public void hitsExecute(SearchContext context, SearchHit[] hits) throws IOExcept
}

PercolateQuery.QueryStore queryStore = percolateQuery.getQueryStore();
List<LeafReaderContext> ctxs = context.searcher().getIndexReader().leaves();
List<LeafReaderContext> ctxs = indexSearcher.getIndexReader().leaves();
for (SearchHit hit : hits) {
LeafReaderContext ctx = ctxs.get(ReaderUtil.subIndex(hit.docId(), ctxs));
int segmentDocId = hit.docId() - ctx.docBase;
Query query = queryStore.getQueries(ctx).apply(segmentDocId);
if (query == null) {
// This is not a document with a percolator field.
continue;
}

TopDocs topDocs = percolatorIndexSearcher.search(query, memoryIndexMaxDoc, new Sort(SortField.FIELD_DOC));
if (topDocs.totalHits == 0) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,15 +18,82 @@
*/
package org.elasticsearch.percolator;

import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.memory.MemoryIndex;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.FixedBitSet;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.test.ESTestCase;

import java.util.Collections;
import java.util.stream.IntStream;

public class PercolatorMatchedSlotSubFetchPhaseTests extends ESTestCase {

public void testHitsExecute() throws Exception {
try (Directory directory = newDirectory()) {
// Need a one doc index:
try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
Document document = new Document();
indexWriter.addDocument(document);
}

try (DirectoryReader reader = DirectoryReader.open(directory)) {
IndexSearcher indexSearcher = new IndexSearcher(reader);

// A match:
{
SearchHit[] hits = new SearchHit[]{new SearchHit(0)};
PercolateQuery.QueryStore queryStore = ctx -> docId -> new TermQuery(new Term("field", "value"));
MemoryIndex memoryIndex = new MemoryIndex();
memoryIndex.addField("field", "value", new WhitespaceAnalyzer());
PercolateQuery percolateQuery = new PercolateQuery("_name", queryStore, Collections.emptyList(),
new MatchAllDocsQuery(), memoryIndex.createSearcher(), new MatchNoDocsQuery());

PercolatorMatchedSlotSubFetchPhase.innerHitsExecute(percolateQuery, indexSearcher, hits);
assertNotNull(hits[0].field(PercolatorMatchedSlotSubFetchPhase.FIELD_NAME_PREFIX));
assertEquals(0, (int) hits[0].field(PercolatorMatchedSlotSubFetchPhase.FIELD_NAME_PREFIX).getValue());
}

// No match:
{
SearchHit[] hits = new SearchHit[]{new SearchHit(0)};
PercolateQuery.QueryStore queryStore = ctx -> docId -> new TermQuery(new Term("field", "value"));
MemoryIndex memoryIndex = new MemoryIndex();
memoryIndex.addField("field", "value1", new WhitespaceAnalyzer());
PercolateQuery percolateQuery = new PercolateQuery("_name", queryStore, Collections.emptyList(),
new MatchAllDocsQuery(), memoryIndex.createSearcher(), new MatchNoDocsQuery());

PercolatorMatchedSlotSubFetchPhase.innerHitsExecute(percolateQuery, indexSearcher, hits);
assertNull(hits[0].field(PercolatorMatchedSlotSubFetchPhase.FIELD_NAME_PREFIX));
}

// No query:
{
SearchHit[] hits = new SearchHit[]{new SearchHit(0)};
PercolateQuery.QueryStore queryStore = ctx -> docId -> null;
MemoryIndex memoryIndex = new MemoryIndex();
memoryIndex.addField("field", "value", new WhitespaceAnalyzer());
PercolateQuery percolateQuery = new PercolateQuery("_name", queryStore, Collections.emptyList(),
new MatchAllDocsQuery(), memoryIndex.createSearcher(), new MatchNoDocsQuery());

PercolatorMatchedSlotSubFetchPhase.innerHitsExecute(percolateQuery, indexSearcher, hits);
assertNull(hits[0].field(PercolatorMatchedSlotSubFetchPhase.FIELD_NAME_PREFIX));
}
}
}
}

public void testConvertTopDocsToSlots() {
ScoreDoc[] scoreDocs = new ScoreDoc[randomInt(128)];
for (int i = 0; i < scoreDocs.length; i++) {
Expand Down

0 comments on commit 684f3d8

Please sign in to comment.