Skip to content

Commit

Permalink
Remove last DocumentMapper reference from MappingLookup (elastic#67157)
Browse files Browse the repository at this point in the history
As part of elastic#66295 we made QueryShardContext perform mapping lookups through MappingLookup rather than MapperService. That helps as MapperService relies on DocumentMapper which may change througout the execution of the search request. At search time, the percolate query also needs to parse documents, which made us add a parse method to MappingLookup.Such parse method currently relies on calling DocumentMapper#parseDocument through a function, but we would like to rather make this easier to follow. (see https://github.com/elastic/elasticsearch/pull/66295/files#r544639868)

We recently removed the need to provide the entire DocumentMapper to DocumentParser#parse, opening the possibility for using DocumentParser directly when needing to parse a document at query time. This commit adds everything that is needed (namely Mapping, IndexSettings and IndexAnalyzers) to MappingLookup so that it can parse a document through DocumentParser without relying on DocumentMapper.

As a bonus, given that MappingLookup holds a reference to these three additional objects, we can make DocumentMapper rely on MappingLookup to retrieve those and not hold its own same references to them.
Along the same lines, given that MappingLookup holds all that's necessary to parse a document, the signature of DocumentParser#parse can be simplified by replacing most of its arguments with MappingLookup and retrieving what is needed from it.
  • Loading branch information
javanna committed Jan 12, 2021
1 parent 1e73818 commit 196cdc5
Show file tree
Hide file tree
Showing 20 changed files with 245 additions and 307 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -1119,8 +1119,8 @@ private void duelRun(PercolateQuery.QueryStore queryStore, MemoryIndex memoryInd
}

private void addQuery(Query query, List<ParseContext.Document> docs) {
ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(documentMapper.mapping(),
documentMapper.mappers(), mapperService.getIndexSettings(), mapperService.getIndexAnalyzers(), null, null, null, null);
ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(
documentMapper.mappers(), null, null, null, null);
fieldMapper.processQuery(query, parseContext);
ParseContext.Document queryDocument = parseContext.doc();
// Add to string representation of the query to make debugging easier:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -181,8 +181,8 @@ public void testExtractTerms() throws Exception {

DocumentMapper documentMapper = mapperService.documentMapper("doc");
PercolatorFieldMapper fieldMapper = (PercolatorFieldMapper) documentMapper.mappers().getMapper(fieldName);
ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(documentMapper.mapping(),
documentMapper.mappers(), mapperService.getIndexSettings(), mapperService.getIndexAnalyzers(), null, null, null, null);
ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(documentMapper.mappers(),
null, null, null, null);
fieldMapper.processQuery(bq.build(), parseContext);
ParseContext.Document document = parseContext.doc();

Expand All @@ -203,8 +203,7 @@ public void testExtractTerms() throws Exception {
bq.add(termQuery1, Occur.MUST);
bq.add(termQuery2, Occur.MUST);

parseContext = new ParseContext.InternalParseContext(documentMapper.mapping(),
documentMapper.mappers(), mapperService.getIndexSettings(), mapperService.getIndexAnalyzers(), null, null, null, null);
parseContext = new ParseContext.InternalParseContext(documentMapper.mappers(), null, null, null, null);
fieldMapper.processQuery(bq.build(), parseContext);
document = parseContext.doc();

Expand Down Expand Up @@ -233,8 +232,8 @@ public void testExtractRanges() throws Exception {

DocumentMapper documentMapper = mapperService.documentMapper("doc");
PercolatorFieldMapper fieldMapper = (PercolatorFieldMapper) documentMapper.mappers().getMapper(fieldName);
ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(documentMapper.mapping(),
documentMapper.mappers(), mapperService.getIndexSettings(), mapperService.getIndexAnalyzers(), null, null, null, null);
ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(
documentMapper.mappers(), null, null, null, null);
fieldMapper.processQuery(bq.build(), parseContext);
ParseContext.Document document = parseContext.doc();

Expand All @@ -259,8 +258,7 @@ public void testExtractRanges() throws Exception {
.rangeQuery(15, 20, true, true, null, null, null, context);
bq.add(rangeQuery2, Occur.MUST);

parseContext = new ParseContext.InternalParseContext(documentMapper.mapping(),
documentMapper.mappers(), mapperService.getIndexSettings(), mapperService.getIndexAnalyzers(), null, null, null, null);
parseContext = new ParseContext.InternalParseContext(documentMapper.mappers(), null, null, null, null);
fieldMapper.processQuery(bq.build(), parseContext);
document = parseContext.doc();

Expand All @@ -283,8 +281,8 @@ public void testExtractTermsAndRanges_failed() throws Exception {
TermRangeQuery query = new TermRangeQuery("field1", new BytesRef("a"), new BytesRef("z"), true, true);
DocumentMapper documentMapper = mapperService.documentMapper("doc");
PercolatorFieldMapper fieldMapper = (PercolatorFieldMapper) documentMapper.mappers().getMapper(fieldName);
ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(documentMapper.mapping(),
documentMapper.mappers(), mapperService.getIndexSettings(), mapperService.getIndexAnalyzers(), null, null, null, null);
ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(documentMapper.mappers(),
null, null, null, null);
fieldMapper.processQuery(query, parseContext);
ParseContext.Document document = parseContext.doc();

Expand All @@ -298,8 +296,8 @@ public void testExtractTermsAndRanges_partial() throws Exception {
PhraseQuery phraseQuery = new PhraseQuery("field", "term");
DocumentMapper documentMapper = mapperService.documentMapper("doc");
PercolatorFieldMapper fieldMapper = (PercolatorFieldMapper) documentMapper.mappers().getMapper(fieldName);
ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(documentMapper.mapping(),
documentMapper.mappers(), mapperService.getIndexSettings(), mapperService.getIndexAnalyzers(), null, null, null, null);
ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(documentMapper.mappers(),
null, null, null, null);
fieldMapper.processQuery(phraseQuery, parseContext);
ParseContext.Document document = parseContext.doc();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -97,11 +97,8 @@ public DocumentMapper build() {
private final String type;
private final Text typeText;
private final CompressedXContent mappingSource;
private final Mapping mapping;
private final DocumentParser documentParser;
private final MappingLookup fieldMappers;
private final IndexSettings indexSettings;
private final IndexAnalyzers indexAnalyzers;
private final MappingLookup mappingLookup;
private final MetadataFieldMapper[] deleteTombstoneMetadataFieldMappers;
private final MetadataFieldMapper[] noopTombstoneMetadataFieldMappers;

Expand All @@ -111,11 +108,8 @@ private DocumentMapper(IndexSettings indexSettings,
Mapping mapping) {
this.type = mapping.root().name();
this.typeText = new Text(this.type);
this.mapping = mapping;
this.documentParser = documentParser;
this.indexSettings = indexSettings;
this.indexAnalyzers = indexAnalyzers;
this.fieldMappers = MappingLookup.fromMapping(mapping, this::parse);
this.mappingLookup = MappingLookup.fromMapping(mapping, documentParser, indexSettings, indexAnalyzers);

try {
mappingSource = new CompressedXContent(this, XContentType.JSON, ToXContent.EMPTY_PARAMS);
Expand All @@ -134,7 +128,7 @@ private DocumentMapper(IndexSettings indexSettings,
}

public Mapping mapping() {
return mapping;
return mappingLookup.getMapping();
}

public String type() {
Expand All @@ -146,19 +140,19 @@ public Text typeText() {
}

public Map<String, Object> meta() {
return mapping.meta;
return mapping().meta;
}

public CompressedXContent mappingSource() {
return this.mappingSource;
}

public RootObjectMapper root() {
return mapping.root;
return mapping().root;
}

public <T extends MetadataFieldMapper> T metadataMapper(Class<T> type) {
return mapping.metadataMapper(type);
return mapping().metadataMapper(type);
}

public IndexFieldMapper indexMapper() {
Expand Down Expand Up @@ -190,24 +184,23 @@ public boolean hasNestedObjects() {
}

public MappingLookup mappers() {
return this.fieldMappers;
return this.mappingLookup;
}

public ParsedDocument parse(SourceToParse source) throws MapperParsingException {
return documentParser.parseDocument(source, mapping, fieldMappers, indexSettings, indexAnalyzers);
return documentParser.parseDocument(source, mappingLookup);
}

public ParsedDocument createDeleteTombstoneDoc(String index, String type, String id) throws MapperParsingException {
final SourceToParse emptySource = new SourceToParse(index, type, id, new BytesArray("{}"), XContentType.JSON);
return documentParser.parseDocument(emptySource, mapping, deleteTombstoneMetadataFieldMappers, fieldMappers,
indexSettings, indexAnalyzers).toTombstone();
return documentParser.parseDocument(emptySource, deleteTombstoneMetadataFieldMappers, mappingLookup).toTombstone();
}

public ParsedDocument createNoopTombstoneDoc(String index, String reason) throws MapperParsingException {
final String id = ""; // _id won't be used.
final SourceToParse sourceToParse = new SourceToParse(index, type, id, new BytesArray("{}"), XContentType.JSON);
final ParsedDocument parsedDoc = documentParser.parseDocument(sourceToParse, mapping, noopTombstoneMetadataFieldMappers,
fieldMappers, indexSettings, indexAnalyzers).toTombstone();
final ParsedDocument parsedDoc = documentParser.parseDocument(sourceToParse, noopTombstoneMetadataFieldMappers, mappingLookup)
.toTombstone();
// Store the reason of a noop as a raw string in the _source field
final BytesRef byteRef = new BytesRef(reason);
parsedDoc.rootDoc().add(new StoredField(SourceFieldMapper.NAME, byteRef.bytes, byteRef.offset, byteRef.length));
Expand Down Expand Up @@ -300,12 +293,12 @@ public String getNestedParent(String path) {
}

public DocumentMapper merge(Mapping mapping, MergeReason reason) {
Mapping merged = this.mapping.merge(mapping, reason);
return new DocumentMapper(this.indexSettings, this.indexAnalyzers, this.documentParser, merged);
Mapping merged = this.mapping().merge(mapping, reason);
return new DocumentMapper(mappingLookup.getIndexSettings(), mappingLookup.getIndexAnalyzers(), documentParser, merged);
}

public void validate(IndexSettings settings, boolean checkLimits) {
this.mapping.validate(this.fieldMappers);
this.mapping().validate(this.mappingLookup);
if (settings.getIndexMetadata().isRoutingPartitionedIndex()) {
if (routingFieldMapper().required() == false) {
throw new IllegalArgumentException("mapping type [" + type() + "] must have routing "
Expand All @@ -316,13 +309,13 @@ public void validate(IndexSettings settings, boolean checkLimits) {
throw new IllegalArgumentException("cannot have nested fields when index sort is activated");
}
if (checkLimits) {
this.fieldMappers.checkLimits(settings);
this.mappingLookup.checkLimits(settings);
}
}

@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
return mapping.toXContent(builder, params);
return mapping().toXContent(builder, params);
}

@Override
Expand All @@ -331,9 +324,8 @@ public String toString() {
"type='" + type + '\'' +
", typeText=" + typeText +
", mappingSource=" + mappingSource +
", mapping=" + mapping +
", documentParser=" + documentParser +
", fieldMappers=" + fieldMappers +
", mappingLookup=" + mappingLookup +
", objectMappers=" + mappers().objectMappers() +
", hasNestedObjects=" + hasNestedObjects() +
", deleteTombstoneMetadataFieldMappers=" + Arrays.toString(deleteTombstoneMetadataFieldMappers) +
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,8 +40,6 @@
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.IndexAnalyzers;
import org.elasticsearch.index.query.QueryShardContext;

/** A parser for documents, given mappings from a DocumentMapper */
Expand All @@ -60,34 +58,26 @@ final class DocumentParser {
}

ParsedDocument parseDocument(SourceToParse source,
Mapping mapping,
MappingLookup mappingLookup,
IndexSettings indexSettings,
IndexAnalyzers indexAnalyzers) throws MapperParsingException {
return parseDocument(source, mapping, mapping.metadataMappers, mappingLookup, indexSettings, indexAnalyzers);
MappingLookup mappingLookup) throws MapperParsingException {
return parseDocument(source, mappingLookup.getMapping().metadataMappers, mappingLookup);
}

ParsedDocument parseDocument(SourceToParse source,
Mapping mapping,
MetadataFieldMapper[] metadataFieldsMappers,
MappingLookup mappingLookup,
IndexSettings indexSettings,
IndexAnalyzers indexAnalyzers) throws MapperParsingException {
MappingLookup mappingLookup) throws MapperParsingException {
validateType(source, mappingLookup.getType());
final ParseContext.InternalParseContext context;
final XContentType xContentType = source.getXContentType();
try (XContentParser parser = XContentHelper.createParser(xContentRegistry,
LoggingDeprecationHandler.INSTANCE, source.source(), xContentType)) {
context = new ParseContext.InternalParseContext(mapping,
context = new ParseContext.InternalParseContext(
mappingLookup,
indexSettings,
indexAnalyzers,
dateParserContext,
dynamicRuntimeFieldsBuilder,
source,
parser);
validateStart(parser);
internalParseDocument(mapping.root(), metadataFieldsMappers, context, parser);
internalParseDocument(mappingLookup.getMapping().root(), metadataFieldsMappers, context, parser);
validateEnd(parser);
} catch (Exception e) {
throw wrapInMapperParsingException(source, e);
Expand All @@ -99,7 +89,7 @@ ParsedDocument parseDocument(SourceToParse source,

context.postParse();

return parsedDocument(source, context, createDynamicUpdate(mapping, mappingLookup,
return parsedDocument(source, context, createDynamicUpdate(mappingLookup,
context.getDynamicMappers(), context.getDynamicRuntimeFields()));
}

Expand Down Expand Up @@ -180,7 +170,6 @@ private static boolean isEmptyDoc(RootObjectMapper root, XContentParser parser)
return false;
}


private static ParsedDocument parsedDocument(SourceToParse source, ParseContext.InternalParseContext context, Mapping update) {
return new ParsedDocument(
context.version(),
Expand All @@ -195,7 +184,6 @@ private static ParsedDocument parsedDocument(SourceToParse source, ParseContext.
);
}


private static MapperParsingException wrapInMapperParsingException(SourceToParse source, Exception e) {
// if its already a mapper parsing exception, no need to wrap it...
if (e instanceof MapperParsingException) {
Expand Down Expand Up @@ -234,25 +222,23 @@ private static String[] splitAndValidatePath(String fullFieldPath) {
}

/** Creates a Mapping containing any dynamically added fields, or returns null if there were no dynamic mappings. */
static Mapping createDynamicUpdate(Mapping mapping,
MappingLookup mappingLookup,
static Mapping createDynamicUpdate(MappingLookup mappingLookup,
List<Mapper> dynamicMappers,
List<RuntimeFieldType> dynamicRuntimeFields) {
if (dynamicMappers.isEmpty() && dynamicRuntimeFields.isEmpty()) {
return null;
}
RootObjectMapper root;
if (dynamicMappers.isEmpty() == false) {
root = createDynamicUpdate(mapping.root, mappingLookup, dynamicMappers);
root = createDynamicUpdate(mappingLookup, dynamicMappers);
} else {
root = mapping.root.copyAndReset();
root = mappingLookup.getMapping().root().copyAndReset();
}
root.addRuntimeFields(dynamicRuntimeFields);
return mapping.mappingUpdate(root);
return mappingLookup.getMapping().mappingUpdate(root);
}

private static RootObjectMapper createDynamicUpdate(RootObjectMapper root,
MappingLookup mappingLookup,
private static RootObjectMapper createDynamicUpdate(MappingLookup mappingLookup,
List<Mapper> dynamicMappers) {

// We build a mapping by first sorting the mappers, so that all mappers containing a common prefix
Expand All @@ -262,7 +248,7 @@ private static RootObjectMapper createDynamicUpdate(RootObjectMapper root,
Iterator<Mapper> dynamicMapperItr = dynamicMappers.iterator();
List<ObjectMapper> parentMappers = new ArrayList<>();
Mapper firstUpdate = dynamicMapperItr.next();
parentMappers.add(createUpdate(root, splitAndValidatePath(firstUpdate.name()), 0, firstUpdate));
parentMappers.add(createUpdate(mappingLookup.getMapping().root(), splitAndValidatePath(firstUpdate.name()), 0, firstUpdate));
Mapper previousMapper = null;
while (dynamicMapperItr.hasNext()) {
Mapper newMapper = dynamicMapperItr.next();
Expand Down Expand Up @@ -308,7 +294,6 @@ private static RootObjectMapper createDynamicUpdate(RootObjectMapper root,
return (RootObjectMapper)parentMappers.get(0);
}


private static void popMappers(List<ObjectMapper> parentMappers, int keepBefore, boolean merge) {
assert keepBefore >= 1; // never remove the root mapper
// pop off parent mappers not needed by the current mapper,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -167,8 +167,4 @@ Iterable<MappedFieldType> filter(Predicate<MappedFieldType> predicate) {
return () -> Stream.concat(fullNameToFieldType.values().stream(), dynamicKeyLookup.fieldTypes())
.distinct().filter(predicate).iterator();
}

public String getType() {
return type;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@

package org.elasticsearch.index.mapper;

import org.elasticsearch.Version;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.ToXContentFragment;
Expand All @@ -30,6 +31,7 @@
import java.io.IOException;
import java.io.UncheckedIOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Map;
Expand All @@ -42,6 +44,11 @@
*/
public final class Mapping implements ToXContentFragment {

public static final Mapping EMPTY = new Mapping(
new RootObjectMapper.Builder("_doc", Version.CURRENT).build(new ContentPath()),
new MetadataFieldMapper[0],
Collections.emptyMap());

final RootObjectMapper root;
final MetadataFieldMapper[] metadataMappers;
final Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper> metadataMappersMap;
Expand Down
Loading

0 comments on commit 196cdc5

Please sign in to comment.