Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Teach _search to read search time mappings #59316

Closed
Closed
13 changes: 12 additions & 1 deletion server/src/main/java/org/elasticsearch/index/IndexService.java
Original file line number Diff line number Diff line change
Expand Up @@ -584,14 +584,25 @@ public IndexSettings getIndexSettings() {
* {@link IndexReader}-specific optimizations, such as rewriting containing range queries.
*/
public QueryShardContext newQueryShardContext(int shardId, IndexSearcher searcher, LongSupplier nowInMillis, String clusterAlias) {
return newQueryShardContext(shardId, searcher, nowInMillis, clusterAlias, null);
}

public QueryShardContext newQueryShardContext(
int shardId,
IndexSearcher searcher,
LongSupplier nowInMillis,
String clusterAlias,
Map<String, Object> runtimeMappings
) {
final SearchIndexNameMatcher indexNameMatcher =
new SearchIndexNameMatcher(index().getName(), clusterAlias, clusterService, expressionResolver);
return new QueryShardContext(
shardId, indexSettings, bigArrays, indexCache.bitsetFilterCache(), indexFieldData::getForField, mapperService(),
similarityService(), scriptService, xContentRegistry, namedWriteableRegistry, client, searcher, nowInMillis, clusterAlias,
indexNameMatcher, allowExpensiveQueries, valuesSourceRegistry);
indexNameMatcher, allowExpensiveQueries, valuesSourceRegistry, runtimeMappings);
}


nik9000 marked this conversation as resolved.
Show resolved Hide resolved
/**
* The {@link ThreadPool} to use for this index.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -103,14 +103,17 @@ public Builder put(MetadataFieldMapper.Builder<?> mapper) {
return this;
}

public DocumentMapper build(MapperService mapperService) {
public Mapping buildMapping(Version indexVersionCreated) {
Objects.requireNonNull(rootObjectMapper, "Mapper builder must have the root object mapper set");
Mapping mapping = new Mapping(
mapperService.getIndexSettings().getIndexVersionCreated(),
return new Mapping(
indexVersionCreated,
rootObjectMapper,
metadataMappers.values().toArray(new MetadataFieldMapper[metadataMappers.values().size()]),
meta);
return new DocumentMapper(mapperService, mapping);
}

public DocumentMapper build(MapperService mapperService) {
return new DocumentMapper(mapperService, buildMapping(mapperService.getIndexSettings().getIndexVersionCreated()));
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -80,11 +80,15 @@ public DocumentMapper parse(@Nullable String type, CompressedXContent source) th
if (mapping == null) {
mapping = new HashMap<>();
}
return parse(type, mapping);
return parse(type, mapping).build(mapperService);
}

public Mapping parseMapping(String type, Map<String, Object> mapping) {
return parse(type, mapping).buildMapping(Version.CURRENT);
}

@SuppressWarnings({"unchecked"})
private DocumentMapper parse(String type, Map<String, Object> mapping) throws MapperParsingException {
private DocumentMapper.Builder parse(String type, Map<String, Object> mapping) throws MapperParsingException {
if (type == null) {
throw new MapperParsingException("Failed to derive type");
}
Expand Down Expand Up @@ -134,7 +138,7 @@ private DocumentMapper parse(String type, Map<String, Object> mapping) throws Ma

checkNoRemainingFields(mapping, parserContext.indexVersionCreated(), "Root mapping definition has unsupported parameters: ");

return docBuilder.build(mapperService);
return docBuilder;
}

public static void checkNoRemainingFields(String fieldName, Map<?, ?> fieldNodeMap, Version indexVersionCreated) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -449,6 +449,14 @@ protected final void doXContentAnalyzers(XContentBuilder builder, boolean includ
}
}

/**
* Called when this {@linkplain Mapper} is parsed on the {@code _search}
* request to check if this field can be a runtime field.
*/
public boolean isRuntimeField() {
return false;
}

protected static String indexOptionToString(IndexOptions indexOption) {
switch (indexOption) {
case DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -171,5 +171,4 @@ public final String simpleName() {
/** Return the merge of {@code mergeWith} into this.
* Both {@code this} and {@code mergeWith} will be left unmodified. */
public abstract Mapper merge(Mapper mergeWith);

nik9000 marked this conversation as resolved.
Show resolved Hide resolved
}
Original file line number Diff line number Diff line change
Expand Up @@ -618,6 +618,65 @@ public Analyzer searchQuoteAnalyzer() {
return this.searchQuoteAnalyzer;
}

/**
* Builds a {@linkplain Function} to lookup mappers for a request, adding
* any {@code extraMapping} provided.
nik9000 marked this conversation as resolved.
Show resolved Hide resolved
* @param runtimeMappings extra mappings parse and to add to the request
* lookup or {@code null} if there aren't any extra mappings
*/
public Function<String, MappedFieldType> newFieldTypeLookup(Map<String, Object> runtimeMappings) {
if (runtimeMappings == null || runtimeMappings.size() == 0) {
return this::fieldType;
}
Mapper.BuilderContext builderContext = new Mapper.BuilderContext(indexSettings.getSettings(), new ContentPath(0));
Collection<ObjectMapper> objectMappers = new ArrayList<>();
Collection<FieldMapper> fieldMappers = new ArrayList<>();
Collection<FieldAliasMapper> fieldAliasMappers = new ArrayList<>();
for (Map.Entry<String, Object> runtimeEntry : runtimeMappings.entrySet()) {
@SuppressWarnings("unchecked") // Safe because that is how we deserialized it
Map<String, Object> definition = (Map<String, Object>) runtimeEntry.getValue();
String type = (String) definition.remove("type");
if (type == null) {
throw new IllegalArgumentException("[type] is required for runtime mapping [" + runtimeEntry.getKey() + "]");
}
Mapper.TypeParser parser = documentMapperParser().parserContext().typeParser(type);
if (parser == null) {
throw new IllegalArgumentException("[" + type + "] is unknown type for runtime mapping [" + runtimeEntry.getKey() + "]");
}
Mapper.Builder<?> builder = parser.parse(runtimeEntry.getKey(), definition, documentMapperParser().parserContext());
Mapper mapper = builder.build(builderContext);

MapperUtils.collect(mapper, objectMappers, fieldMappers, fieldAliasMappers);
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can you expand on why we need this? I was expecting that alias or objects would be caught anyways by your checks below, that only runtime fields can be defined.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This should allow us to collect the sub-mappers under objects. I do need to make sure there is a test for though.


}
// We don't do anything with the collected ObjectMappers
Map<String, MappedFieldType> extra = new HashMap<>();
for (FieldMapper fm : fieldMappers) {
if (false == fm.isRuntimeField()) {
throw new IllegalArgumentException(
"[" + fm.typeName() + "] are not supported in runtime mappings"
);
}
MappedFieldType fromIndexMapping = fieldType(fm.name());
if (fromIndexMapping != null) {
throw new IllegalArgumentException(
"[" + fm.name() + "] can't be defined in the search's runtime mappings and the index's mappings"
);
}
extra.put(fm.name(), fm.fieldType());
}
if (false == fieldAliasMappers.isEmpty()) {
throw new IllegalArgumentException("aliases are not supported in runtime mappings");
}
return fullName -> {
MappedFieldType searchTime = extra.get(fullName);
javanna marked this conversation as resolved.
Show resolved Hide resolved
if (searchTime != null) {
return searchTime;
}
return fieldType(fullName);
};
}

/**
* Returns <code>true</code> if fielddata is enabled for the {@link IdFieldMapper} field, <code>false</code> otherwise.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,7 @@
import java.util.function.BiConsumer;
import java.util.function.BiFunction;
import java.util.function.BooleanSupplier;
import java.util.function.Function;
import java.util.function.LongSupplier;
import java.util.function.Predicate;

Expand Down Expand Up @@ -102,6 +103,7 @@ public class QueryShardContext extends QueryRewriteContext {
private boolean mapUnmappedFieldAsString;
private NestedScope nestedScope;
private final ValuesSourceRegistry valuesSourceRegistry;
private final Function<String, MappedFieldType> lookupFieldType;

public QueryShardContext(int shardId,
IndexSettings indexSettings,
Expand All @@ -119,18 +121,20 @@ public QueryShardContext(int shardId,
String clusterAlias,
Predicate<String> indexNameMatcher,
BooleanSupplier allowExpensiveQueries,
ValuesSourceRegistry valuesSourceRegistry) {
ValuesSourceRegistry valuesSourceRegistry,
Map<String, Object> runtimeMappings) {
this(shardId, indexSettings, bigArrays, bitsetFilterCache, indexFieldDataLookup, mapperService, similarityService,
scriptService, xContentRegistry, namedWriteableRegistry, client, searcher, nowInMillis, indexNameMatcher,
new Index(RemoteClusterAware.buildRemoteIndexName(clusterAlias, indexSettings.getIndex().getName()),
indexSettings.getIndex().getUUID()), allowExpensiveQueries, valuesSourceRegistry);
scriptService, xContentRegistry, namedWriteableRegistry, client, searcher, nowInMillis, indexNameMatcher,
new Index(RemoteClusterAware.buildRemoteIndexName(clusterAlias, indexSettings.getIndex().getName()),
indexSettings.getIndex().getUUID()), allowExpensiveQueries, valuesSourceRegistry,
mapperService.newFieldTypeLookup(runtimeMappings));
}

public QueryShardContext(QueryShardContext source) {
this(source.shardId, source.indexSettings, source.bigArrays, source.bitsetFilterCache, source.indexFieldDataService,
source.mapperService, source.similarityService, source.scriptService, source.getXContentRegistry(),
source.getWriteableRegistry(), source.client, source.searcher, source.nowInMillis, source.indexNameMatcher,
source.fullyQualifiedIndex, source.allowExpensiveQueries, source.valuesSourceRegistry);
source.fullyQualifiedIndex, source.allowExpensiveQueries, source.valuesSourceRegistry, source.lookupFieldType);
}

private QueryShardContext(int shardId,
Expand All @@ -149,7 +153,8 @@ private QueryShardContext(int shardId,
Predicate<String> indexNameMatcher,
Index fullyQualifiedIndex,
BooleanSupplier allowExpensiveQueries,
ValuesSourceRegistry valuesSourceRegistry) {
ValuesSourceRegistry valuesSourceRegistry,
Function<String, MappedFieldType> lookupFieldType) {
super(xContentRegistry, namedWriteableRegistry, client, nowInMillis);
this.shardId = shardId;
this.similarityService = similarityService;
Expand All @@ -166,6 +171,7 @@ private QueryShardContext(int shardId,
this.fullyQualifiedIndex = fullyQualifiedIndex;
this.allowExpensiveQueries = allowExpensiveQueries;
this.valuesSourceRegistry = valuesSourceRegistry;
this.lookupFieldType = lookupFieldType;
}

private void reset() {
Expand Down Expand Up @@ -241,7 +247,7 @@ public MappedFieldType fieldMapper(String name) {
if (name.equals(TypeFieldMapper.NAME)) {
deprecationLogger.deprecate("query_with_types", TYPES_DEPRECATION_MESSAGE);
}
return failIfFieldMappingNotFound(name, mapperService.fieldType(name));
return failIfFieldMappingNotFound(name, lookupFieldType.apply(name));
nik9000 marked this conversation as resolved.
Show resolved Hide resolved
}

public ObjectMapper getObjectMapper(String name) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -175,8 +175,9 @@ final class DefaultSearchContext extends SearchContext {
engineSearcher.getQueryCache(), engineSearcher.getQueryCachingPolicy(), lowLevelCancellation);
this.relativeTimeSupplier = relativeTimeSupplier;
this.timeout = timeout;
Map<String, Object> runtimeMappings = request.source() == null ? null : request.source().runtimeMappings();
queryShardContext = indexService.newQueryShardContext(request.shardId().id(), searcher,
request::nowInMillis, shardTarget.getClusterAlias());
request::nowInMillis, shardTarget.getClusterAlias(), runtimeMappings);
queryBoost = request.indexBoost();
this.lowLevelCancellation = lowLevelCancellation;
}
Expand Down Expand Up @@ -775,7 +776,7 @@ public FetchSearchResult fetchResult() {

@Override
public MappedFieldType fieldType(String name) {
return mapperService().fieldType(name);
return queryShardContext.fieldMapper(name);
nik9000 marked this conversation as resolved.
Show resolved Hide resolved
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
package org.elasticsearch.search.builder;

import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.Version;
import org.elasticsearch.common.Booleans;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
Expand Down Expand Up @@ -62,6 +63,7 @@
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Objects;

import static org.elasticsearch.index.query.AbstractQueryBuilder.parseInnerQueryBuilder;
Expand Down Expand Up @@ -107,6 +109,7 @@ public final class SearchSourceBuilder implements Writeable, ToXContentObject, R
public static final ParseField SEARCH_AFTER = new ParseField("search_after");
public static final ParseField COLLAPSE = new ParseField("collapse");
public static final ParseField SLICE = new ParseField("slice");
public static final ParseField RUNTIME_MAPPINGS = new ParseField("runtime_mappings");

public static SearchSourceBuilder fromXContent(XContentParser parser) throws IOException {
return fromXContent(parser, true);
Expand Down Expand Up @@ -185,6 +188,8 @@ public static HighlightBuilder highlight() {

private CollapseBuilder collapse = null;

private Map<String, Object> runtimeMappings;

/**
* Constructs a new search source builder.
*/
Expand Down Expand Up @@ -239,6 +244,10 @@ public SearchSourceBuilder(StreamInput in) throws IOException {
sliceBuilder = in.readOptionalWriteable(SliceBuilder::new);
collapse = in.readOptionalWriteable(CollapseBuilder::new);
trackTotalHitsUpTo = in.readOptionalInt();
if (in.getVersion().onOrAfter(Version.V_8_0_0)) {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

add a TODO to update the version once the branch is merged?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

👍

// TODO update version after backporting runtime fields
runtimeMappings = in.readMap();
}
}

@Override
Expand Down Expand Up @@ -293,6 +302,12 @@ public void writeTo(StreamOutput out) throws IOException {
out.writeOptionalWriteable(sliceBuilder);
out.writeOptionalWriteable(collapse);
out.writeOptionalInt(trackTotalHitsUpTo);
if (out.getVersion().onOrAfter(Version.V_8_0_0)) {
// TODO update version after backporting runtime fields
out.writeMap(runtimeMappings);
} else {
throw new IllegalArgumentException("[" + RUNTIME_MAPPINGS.getPreferredName() + "] are not supported on nodes older than 8.0.0");
}
}

/**
Expand Down Expand Up @@ -895,6 +910,21 @@ public List<String> stats() {
return stats;
}

/**
* Extra runtime field mappings.
*/
public SearchSourceBuilder runtimeMappings(Map<String, Object> runtimeMappings) {
this.runtimeMappings = runtimeMappings;
return this;
}

/**
* Extra runtime field mappings.
*/
public Map<String, Object> runtimeMappings() {
return runtimeMappings;
}

public SearchSourceBuilder ext(List<SearchExtBuilder> searchExtBuilders) {
this.extBuilders = Objects.requireNonNull(searchExtBuilders, "searchExtBuilders must not be null");
return this;
Expand Down Expand Up @@ -996,6 +1026,7 @@ private SearchSourceBuilder shallowCopy(QueryBuilder queryBuilder, QueryBuilder
rewrittenBuilder.version = version;
rewrittenBuilder.seqNoAndPrimaryTerm = seqNoAndPrimaryTerm;
rewrittenBuilder.collapse = collapse;
rewrittenBuilder.runtimeMappings = runtimeMappings;
return rewrittenBuilder;
}

Expand Down Expand Up @@ -1104,6 +1135,8 @@ public void parseXContent(XContentParser parser, boolean checkTrailingTokens) th
sliceBuilder = SliceBuilder.fromXContent(parser);
} else if (COLLAPSE.match(currentFieldName, parser.getDeprecationHandler())) {
collapse = CollapseBuilder.fromXContent(parser);
} else if (RUNTIME_MAPPINGS.match(currentFieldName, parser.getDeprecationHandler())) {
runtimeMappings = parser.map();
} else {
throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].",
parser.getTokenLocation());
Expand Down Expand Up @@ -1551,7 +1584,8 @@ public boolean equals(Object obj) {
&& Objects.equals(profile, other.profile)
&& Objects.equals(extBuilders, other.extBuilders)
&& Objects.equals(collapse, other.collapse)
&& Objects.equals(trackTotalHitsUpTo, other.trackTotalHitsUpTo);
&& Objects.equals(trackTotalHitsUpTo, other.trackTotalHitsUpTo)
&& Objects.equals(runtimeMappings, other.runtimeMappings);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ public void hitsExecute(SearchContext context, SearchHit[] hits) throws IOExcept

for (FieldAndFormat fieldAndFormat : context.docValuesContext().fields()) {
String field = fieldAndFormat.field;
MappedFieldType fieldType = context.mapperService().fieldType(field);
MappedFieldType fieldType = context.fieldType(field);
nik9000 marked this conversation as resolved.
Show resolved Hide resolved
if (fieldType != null) {
final IndexFieldData<?> indexFieldData = context.getForField(fieldType);
final boolean isNanosecond;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ public void setupCreateIndexRequestAndAliasValidator() {
queryShardContext = new QueryShardContext(0,
new IndexSettings(IndexMetadata.builder("test").settings(indexSettings).build(), indexSettings),
BigArrays.NON_RECYCLING_INSTANCE, null, null, null, null, null, xContentRegistry(), writableRegistry(),
null, null, () -> randomNonNegativeLong(), null, null, () -> true, null);
null, null, () -> randomNonNegativeLong(), null, null, () -> true, null, null);
}

private ClusterState createClusterState(String name, int numShards, int numReplicas, Settings settings) {
Expand Down
Loading