Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add a builder for SearchContext #47198

Closed
wants to merge 5 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.fetch.subphase.InnerHitsContext;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.internal.SearchContext.Lifetime;

import java.io.IOException;
import java.util.List;
Expand Down Expand Up @@ -76,29 +77,28 @@ public void doValidate(QueryShardContext queryShardContext) {

@Override
public void build(SearchContext context, InnerHitsContext innerHitsContext) throws IOException {
QueryShardContext queryShardContext = context.getQueryShardContext();
ParentJoinFieldMapper joinFieldMapper = ParentJoinFieldMapper.getMapper(context.mapperService());
if (joinFieldMapper == null) {
assert innerHitBuilder.isIgnoreUnmapped() : "should be validated first";
return;
}
String name = innerHitBuilder.getName() != null ? innerHitBuilder.getName() : typeName;
JoinFieldInnerHitSubContext joinFieldInnerHits =
new JoinFieldInnerHitSubContext(name, context, typeName, fetchChildInnerHits, joinFieldMapper);
setupInnerHitsContext(queryShardContext, joinFieldInnerHits);
SearchContext subSearchContext = createSubSearchContext(new QueryShardContext(context.getQueryShardContext()), context);
JoinFieldInnerHitsSubContext joinFieldInnerHits =
new JoinFieldInnerHitsSubContext(name, subSearchContext, typeName, fetchChildInnerHits, joinFieldMapper);
innerHitsContext.addInnerHitDefinition(joinFieldInnerHits);
}

static final class JoinFieldInnerHitSubContext extends InnerHitsContext.InnerHitSubContext {
static final class JoinFieldInnerHitsSubContext extends InnerHitsContext.InnerHitsSubContext {
private final String typeName;
private final boolean fetchChildInnerHits;
private final ParentJoinFieldMapper joinFieldMapper;

JoinFieldInnerHitSubContext(String name,
SearchContext context,
String typeName,
boolean fetchChildInnerHits,
ParentJoinFieldMapper joinFieldMapper) {
JoinFieldInnerHitsSubContext(String name,
SearchContext context,
String typeName,
boolean fetchChildInnerHits,
ParentJoinFieldMapper joinFieldMapper) {
super(name, context);
this.typeName = typeName;
this.fetchChildInnerHits = fetchChildInnerHits;
Expand All @@ -111,13 +111,13 @@ public TopDocsAndMaxScore[] topDocs(SearchHit[] hits) throws IOException {
TopDocsAndMaxScore[] result = new TopDocsAndMaxScore[hits.length];
for (int i = 0; i < hits.length; i++) {
SearchHit hit = hits[i];
String joinName = getSortedDocValue(joinFieldMapper.name(), this, hit.docId());
String joinName = getSortedDocValue(joinFieldMapper.name(), searchContext, hit.docId());
if (joinName == null) {
result[i] = new TopDocsAndMaxScore(Lucene.EMPTY_TOP_DOCS, Float.NaN);
continue;
}

QueryShardContext qsc = getQueryShardContext();
QueryShardContext qsc = searchContext.getQueryShardContext();
ParentIdFieldMapper parentIdFieldMapper =
joinFieldMapper.getParentIdFieldMapper(typeName, fetchChildInnerHits == false);
if (parentIdFieldMapper == null) {
Expand All @@ -135,14 +135,16 @@ public TopDocsAndMaxScore[] topDocs(SearchHit[] hits) throws IOException {
.add(joinFieldMapper.fieldType().termQuery(typeName, qsc), BooleanClause.Occur.FILTER)
.build();
} else {
String parentId = getSortedDocValue(parentIdFieldMapper.name(), this, hit.docId());
q = mapperService().fullName(IdFieldMapper.NAME).termQuery(parentId, qsc);
String parentId = getSortedDocValue(parentIdFieldMapper.name(), searchContext, hit.docId());
q = searchContext.getQueryShardContext()
.getMapperService().fullName(IdFieldMapper.NAME).termQuery(parentId, qsc);
}

Weight weight = searcher().createWeight(searcher().rewrite(q), ScoreMode.COMPLETE_NO_SCORES, 1f);
if (size() == 0) {
Weight weight = searchContext.searcher()
.createWeight(searchContext.searcher().rewrite(q), ScoreMode.COMPLETE_NO_SCORES, 1f);
if (searchContext.size() == 0) {
TotalHitCountCollector totalHitCountCollector = new TotalHitCountCollector();
for (LeafReaderContext ctx : searcher().getIndexReader().leaves()) {
for (LeafReaderContext ctx : searchContext.searcher().getIndexReader().leaves()) {
intersect(weight, innerHitQueryWeight, totalHitCountCollector, ctx);
}
result[i] = new TopDocsAndMaxScore(
Expand All @@ -151,26 +153,27 @@ public TopDocsAndMaxScore[] topDocs(SearchHit[] hits) throws IOException {
Lucene.EMPTY_SCORE_DOCS
), Float.NaN);
} else {
int topN = Math.min(from() + size(), searcher().getIndexReader().maxDoc());
int topN = Math.min(searchContext.from() + searchContext.size(),
searchContext.searcher().getIndexReader().maxDoc());
TopDocsCollector<?> topDocsCollector;
MaxScoreCollector maxScoreCollector = null;
if (sort() != null) {
topDocsCollector = TopFieldCollector.create(sort().sort, topN, Integer.MAX_VALUE);
if (trackScores()) {
if (searchContext.sort() != null) {
topDocsCollector = TopFieldCollector.create(searchContext.sort().sort, topN, Integer.MAX_VALUE);
if (searchContext.trackScores()) {
maxScoreCollector = new MaxScoreCollector();
}
} else {
topDocsCollector = TopScoreDocCollector.create(topN, Integer.MAX_VALUE);
maxScoreCollector = new MaxScoreCollector();
}
try {
for (LeafReaderContext ctx : searcher().getIndexReader().leaves()) {
for (LeafReaderContext ctx : searchContext.searcher().getIndexReader().leaves()) {
intersect(weight, innerHitQueryWeight, MultiCollector.wrap(topDocsCollector, maxScoreCollector), ctx);
}
} finally {
clearReleasables(Lifetime.COLLECTION);
searchContext.clearReleasables(Lifetime.COLLECTION);
}
TopDocs topDocs = topDocsCollector.topDocs(from(), size());
TopDocs topDocs = topDocsCollector.topDocs(searchContext.from(), searchContext.size());
float maxScore = Float.NaN;
if (maxScoreCollector != null) {
maxScore = maxScoreCollector.getMaxScore();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchPhaseExecutionException;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.InnerHitBuilder;
Expand Down Expand Up @@ -470,7 +471,12 @@ public void testUseMaxDocInsteadOfSize() throws Exception {
assertAcked(prepareCreate("index1")
.addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")));
client().admin().indices().prepareUpdateSettings("index1")
.setSettings(Collections.singletonMap(IndexSettings.MAX_INNER_RESULT_WINDOW_SETTING.getKey(), ArrayUtil.MAX_ARRAY_LENGTH))
.setSettings(
Settings.builder()
.put(IndexSettings.MAX_INNER_RESULT_WINDOW_SETTING.getKey(), ArrayUtil.MAX_ARRAY_LENGTH)
.put(IndexSettings.MAX_RESULT_WINDOW_SETTING.getKey(), ArrayUtil.MAX_ARRAY_LENGTH)
.build()
)
.get();
List<IndexRequestBuilder> requests = new ArrayList<>();
requests.add(createIndexRequest("index1", "parent", "1", null));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,6 @@
import org.elasticsearch.search.fetch.subphase.highlight.Highlighter;
import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.internal.SubSearchContext;

import java.io.IOException;
import java.util.ArrayList;
Expand Down Expand Up @@ -99,9 +98,8 @@ public void hitsExecute(SearchContext context, SearchHit[] hits) throws IOExcept
for (Object matchedSlot : field.getValues()) {
int slot = (int) matchedSlot;
BytesReference document = percolateQuery.getDocuments().get(slot);
SubSearchContext subSearchContext =
createSubSearchContext(context, percolatorLeafReaderContext, document, slot);
subSearchContext.parsedQuery(new ParsedQuery(query));
SearchContext subSearchContext =
createSubSearchContext(context, query, percolatorLeafReaderContext, document, slot);
hitContext.reset(
new SearchHit(slot, "unknown", Collections.emptyMap()),
percolatorLeafReaderContext, slot, percolatorIndexSearcher
Expand Down Expand Up @@ -166,12 +164,28 @@ static List<PercolateQuery> locatePercolatorQuery(Query query) {
return Collections.emptyList();
}

private SubSearchContext createSubSearchContext(SearchContext context, LeafReaderContext leafReaderContext,
BytesReference source, int docId) {
SubSearchContext subSearchContext = new SubSearchContext(context);
subSearchContext.highlight(new SearchContextHighlight(context.highlight().fields()));
private SearchContext createSubSearchContext(SearchContext parentContext,
Query query,
LeafReaderContext leafReaderContext,
BytesReference source,
int docId) {
SearchContext.Builder builder = new SearchContext.Builder(parentContext.id(),
parentContext.getTask(),
parentContext.nodeId(),
parentContext.indexShard(),
parentContext.getQueryShardContext(),
parentContext.searcher(),
parentContext.fetchPhase(),
parentContext.shardTarget().getClusterAlias(),
parentContext.numberOfShards(),
parentContext::getRelativeTimeInMillis,
parentContext.source());
Comment on lines +172 to +182
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Would this work as a simple copy contructor? Or do we need to be picky about which member variables we're copying over?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I tried a different approach in #47733. We don't need a full search context to run the highlight phase. With this change we wouldn't need to clone the search context at all.

// Enforce highlighting by source, because MemoryIndex doesn't support stored fields.
subSearchContext.highlight().globalForceSource(true);
SearchContextHighlight highlight = new SearchContextHighlight(parentContext.highlight().fields());
highlight.globalForceSource(true);
builder.setHighlight(highlight);
builder.setQuery(new ParsedQuery(query));
SearchContext subSearchContext = builder.build(() -> {});
subSearchContext.lookup().source().setSegmentAndDocument(leafReaderContext, docId);
subSearchContext.lookup().source().setSource(source);
return subSearchContext;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,12 +55,13 @@

import static java.util.Collections.singleton;
import static org.elasticsearch.common.xcontent.XContentHelper.toXContent;
import static org.elasticsearch.test.TestSearchContext.SHARD_TARGET;
import static org.elasticsearch.test.XContentTestUtils.insertRandomFields;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;
import static org.hamcrest.Matchers.instanceOf;

public class RankEvalResponseTests extends ESTestCase {
private static final SearchShardTarget SHARD_TARGET =
new SearchShardTarget("test", new ShardId("test", "test", 0), null, OriginalIndices.NONE);

private static final Exception[] RANDOM_EXCEPTIONS = new Exception[] {
new ClusterBlockException(singleton(NoMasterBlockService.NO_MASTER_BLOCK_WRITES)),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,13 +37,12 @@
import org.elasticsearch.common.Randomness;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.index.IndexNotFoundException;
import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.index.query.Rewriteable;
import org.elasticsearch.indices.IndexClosedException;
import org.elasticsearch.search.SearchService;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.internal.AliasFilter;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.internal.ShardSearchRequest;
Expand Down Expand Up @@ -196,11 +195,11 @@ protected ShardValidateQueryResponse shardOperation(ShardValidateQueryRequest re
String error = null;
ShardSearchRequest shardSearchLocalRequest = new ShardSearchRequest(request.shardId(),
request.nowInMillis(), request.filteringAliases());
SearchContext searchContext = searchService.createSearchContext(shardSearchLocalRequest, SearchService.NO_TIMEOUT);
try {
ParsedQuery parsedQuery = searchContext.getQueryShardContext().toQuery(request.query());
searchContext.parsedQuery(parsedQuery);
searchContext.preProcess(request.rewrite());
shardSearchLocalRequest.source(new SearchSourceBuilder().query(request.query()));
try (SearchContext searchContext = searchService.createSearchContext(shardSearchLocalRequest, SearchService.NO_TIMEOUT)) {
if (request.rewrite()) {
searchContext.rewriteQuery();
}
valid = true;
explanation = explain(searchContext, request.rewrite());
} catch (QueryShardException|ParsingException e) {
Expand All @@ -209,8 +208,6 @@ protected ShardValidateQueryResponse shardOperation(ShardValidateQueryRequest re
} catch (AssertionError e) {
valid = false;
error = e.getMessage();
} finally {
Releasables.close(searchContext);
}

return new ShardValidateQueryResponse(request.shardId(), valid, explanation, error);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.search.SearchService;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.internal.AliasFilter;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.internal.ShardSearchRequest;
Expand Down Expand Up @@ -109,17 +110,17 @@ protected void asyncShardOperation(ExplainRequest request, ShardId shardId,
@Override
protected ExplainResponse shardOperation(ExplainRequest request, ShardId shardId) throws IOException {
ShardSearchRequest shardSearchLocalRequest = new ShardSearchRequest(shardId, request.nowInMillis, request.filteringAlias());
SearchContext context = searchService.createSearchContext(shardSearchLocalRequest, SearchService.NO_TIMEOUT);
shardSearchLocalRequest.source(new SearchSourceBuilder().query(request.query()));
Engine.GetResult result = null;
try {
try (SearchContext context = searchService.createSearchContext(shardSearchLocalRequest, SearchService.NO_TIMEOUT)) {
context.rewriteQuery();
// No need to check the type, IndexShard#get does it for us
Term uidTerm = new Term(IdFieldMapper.NAME, Uid.encodeId(request.id()));
result = context.indexShard().get(new Engine.Get(false, false, request.id(), uidTerm));
if (!result.exists()) {
return new ExplainResponse(shardId.getIndexName(), request.id(), false);
}
context.parsedQuery(context.getQueryShardContext().toQuery(request.query()));
context.preProcess(true);
context.rewriteQuery();
int topLevelDocId = result.docIdAndVersion().docId + result.docIdAndVersion().docBase;
Explanation explanation = context.searcher().explain(context.query(), topLevelDocId);
for (RescoreContext ctx : context.rescore()) {
Expand All @@ -139,7 +140,7 @@ protected ExplainResponse shardOperation(ExplainRequest request, ShardId shardId
} catch (IOException e) {
throw new ElasticsearchException("Could not explain", e);
} finally {
Releasables.close(result, context);
Releasables.close(result);
}
}

Expand Down
12 changes: 6 additions & 6 deletions server/src/main/java/org/elasticsearch/index/SearchSlowLog.java
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ static final class SearchSlowLogMessage extends ESLogMessage {

private static Map<String, Object> prepareMap(SearchContext context, long tookInNanos) {
Map<String, Object> messageFields = new HashMap<>();
messageFields.put("message", context.indexShard().shardId());
messageFields.put("message", context.shardId());
messageFields.put("took", TimeValue.timeValueNanos(tookInNanos));
messageFields.put("took_millis", TimeUnit.NANOSECONDS.toMillis(tookInNanos));
if (context.queryResult().getTotalHits() != null) {
Expand All @@ -173,8 +173,8 @@ private static Map<String, Object> prepareMap(SearchContext context, long tookIn
messageFields.put("search_type", context.searchType());
messageFields.put("total_shards", context.numberOfShards());

if (context.request().source() != null) {
String source = escapeJson(context.request().source().toString(FORMAT_PARAMS));
if (context.source() != null) {
String source = escapeJson(context.source().toString(FORMAT_PARAMS));

messageFields.put("source", source);
} else {
Expand All @@ -188,7 +188,7 @@ private static Map<String, Object> prepareMap(SearchContext context, long tookIn
// Message will be used in plaintext logs
private static String message(SearchContext context, long tookInNanos) {
StringBuilder sb = new StringBuilder();
sb.append(context.indexShard().shardId())
sb.append(context.shardId())
.append(" ")
.append("took[").append(TimeValue.timeValueNanos(tookInNanos)).append("], ")
.append("took_millis[").append(TimeUnit.NANOSECONDS.toMillis(tookInNanos)).append("], ")
Expand All @@ -208,8 +208,8 @@ private static String message(SearchContext context, long tookInNanos) {
}
sb.append("search_type[").append(context.searchType()).append("], total_shards[")
.append(context.numberOfShards()).append("], ");
if (context.request().source() != null) {
sb.append("source[").append(context.request().source().toString(FORMAT_PARAMS)).append("], ");
if (context.source() != null) {
sb.append("source[").append(context.source().toString(FORMAT_PARAMS)).append("], ");
} else {
sb.append("source[], ");
}
Expand Down
Loading