Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix merging component templates with a mix of dotted and nested object mapper definitions #106077

Merged
merged 10 commits into from
Apr 8, 2024
7 changes: 7 additions & 0 deletions docs/changelog/106077.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
pr: 106077
summary: Fix merging component templates with a mix of dotted and nested object mapper
definitions
area: Mapping
type: bug
issues:
- 105482
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,11 @@ public void testBasics() throws Exception {
.endObject()
);

Mapping parsedMapping = createMapperService(mapping).parseMapping("type", new CompressedXContent(mapping));
Mapping parsedMapping = createMapperService(mapping).parseMapping(
"type",
MapperService.MergeReason.MAPPING_UPDATE,
new CompressedXContent(mapping)
);
assertEquals(mapping, parsedMapping.toCompressedXContent().toString());
assertNotNull(parsedMapping.getMetadataMapperByClass(RankFeatureMetaFieldMapper.class));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@
import org.elasticsearch.index.mapper.LuceneDocument;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.MapperService.MergeReason;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.SourceToParse;
import org.elasticsearch.index.mapper.TestDocumentParserContext;
Expand Down Expand Up @@ -206,7 +207,7 @@ public void init() throws Exception {
.endObject()
.endObject()
);
mapperService.merge("doc", new CompressedXContent(mapper), MapperService.MergeReason.MAPPING_UPDATE);
mapperService.merge("doc", new CompressedXContent(mapper), MergeReason.MAPPING_UPDATE);
}

private void addQueryFieldMappings() throws Exception {
Expand All @@ -223,7 +224,7 @@ private void addQueryFieldMappings() throws Exception {
.endObject()
.endObject()
);
mapperService.merge("doc", new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE);
mapperService.merge("doc", new CompressedXContent(percolatorMapper), MergeReason.MAPPING_UPDATE);
fieldType = (PercolatorFieldMapper.PercolatorFieldType) mapperService.fieldType(fieldName);
}

Expand Down Expand Up @@ -699,7 +700,7 @@ public void testAllowNoAdditionalSettings() throws Exception {
MapperParsingException e = expectThrows(
MapperParsingException.class,
() -> indexServiceWithoutSettings.mapperService()
.merge("doc", new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE)
.merge("doc", new CompressedXContent(percolatorMapper), MergeReason.MAPPING_UPDATE)
);
assertThat(e.getMessage(), containsString("Mapping definition for [" + fieldName + "] has unsupported parameters: [index : no]"));
}
Expand All @@ -722,7 +723,7 @@ public void testMultiplePercolatorFields() throws Exception {
.endObject()
.endObject()
);
mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE);
mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MergeReason.MAPPING_UPDATE);

QueryBuilder queryBuilder = matchQuery("field", "value");
ParsedDocument doc = mapperService.documentMapper()
Expand Down Expand Up @@ -763,7 +764,7 @@ public void testNestedPercolatorField() throws Exception {
.endObject()
.endObject()
);
mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE);
mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MergeReason.MAPPING_UPDATE);

QueryBuilder queryBuilder = matchQuery("field", "value");
ParsedDocument doc = mapperService.documentMapper()
Expand Down Expand Up @@ -912,7 +913,7 @@ public void testEmptyName() throws Exception {
);
MapperParsingException e = expectThrows(
MapperParsingException.class,
() -> mapperService.parseMapping("type1", new CompressedXContent(mapping))
() -> mapperService.parseMapping("type1", MergeReason.MAPPING_UPDATE, new CompressedXContent(mapping))
);
assertThat(e.getMessage(), containsString("field name cannot be an empty string"));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -133,6 +133,7 @@ private static ClusterState applyRequest(
final CompressedXContent mappingUpdateSource = request.source();
final Metadata metadata = currentState.metadata();
final List<IndexMetadata> updateList = new ArrayList<>();
MergeReason reason = request.autoUpdate() ? MergeReason.MAPPING_AUTO_UPDATE : MergeReason.MAPPING_UPDATE;
for (Index index : request.indices()) {
MapperService mapperService = indexMapperServices.get(index);
// IMPORTANT: always get the metadata from the state since it get's batched
Expand All @@ -147,13 +148,8 @@ private static ClusterState applyRequest(
updateList.add(indexMetadata);
// try and parse it (no need to add it here) so we can bail early in case of parsing exception
// first, simulate: just call merge and ignore the result
Mapping mapping = mapperService.parseMapping(MapperService.SINGLE_MAPPING_NAME, mappingUpdateSource);
MapperService.mergeMappings(
mapperService.documentMapper(),
mapping,
request.autoUpdate() ? MergeReason.MAPPING_AUTO_UPDATE : MergeReason.MAPPING_UPDATE,
mapperService.getIndexSettings()
);
Mapping mapping = mapperService.parseMapping(MapperService.SINGLE_MAPPING_NAME, reason, mappingUpdateSource);
MapperService.mergeMappings(mapperService.documentMapper(), mapping, reason, mapperService.getIndexSettings());
}
Metadata.Builder builder = Metadata.builder(metadata);
boolean updated = false;
Expand All @@ -169,11 +165,7 @@ private static ClusterState applyRequest(
if (existingMapper != null) {
existingSource = existingMapper.mappingSource();
}
DocumentMapper mergedMapper = mapperService.merge(
MapperService.SINGLE_MAPPING_NAME,
mappingUpdateSource,
request.autoUpdate() ? MergeReason.MAPPING_AUTO_UPDATE : MergeReason.MAPPING_UPDATE
);
DocumentMapper mergedMapper = mapperService.merge(MapperService.SINGLE_MAPPING_NAME, mappingUpdateSource, reason);
CompressedXContent updatedSource = mergedMapper.mappingSource();

if (existingSource != null) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.IndexAnalyzers;
import org.elasticsearch.index.mapper.MapperService.MergeReason;
import org.elasticsearch.xcontent.FilterXContentParserWrapper;
import org.elasticsearch.xcontent.FlatteningXContentParser;
import org.elasticsearch.xcontent.XContentParser;
Expand Down Expand Up @@ -613,7 +614,14 @@ public final MapperBuilderContext createDynamicMapperBuilderContext() {
if (objectMapper instanceof PassThroughObjectMapper passThroughObjectMapper) {
containsDimensions = passThroughObjectMapper.containsDimensions();
}
return new MapperBuilderContext(p, mappingLookup().isSourceSynthetic(), false, containsDimensions, dynamic);
return new MapperBuilderContext(
p,
mappingLookup().isSourceSynthetic(),
false,
containsDimensions,
dynamic,
MergeReason.MAPPING_UPDATE
);
}

public abstract XContentParser parser();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@

import org.elasticsearch.common.Strings;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.index.mapper.MapperService.MergeReason;

import java.util.Objects;

Expand All @@ -22,32 +23,39 @@ public class MapperBuilderContext {
* The root context, to be used when building a tree of mappers
*/
public static MapperBuilderContext root(boolean isSourceSynthetic, boolean isDataStream) {
return new MapperBuilderContext(null, isSourceSynthetic, isDataStream, false, ObjectMapper.Defaults.DYNAMIC);
return root(isSourceSynthetic, isDataStream, MergeReason.MAPPING_UPDATE);
}

public static MapperBuilderContext root(boolean isSourceSynthetic, boolean isDataStream, MergeReason mergeReason) {
return new MapperBuilderContext(null, isSourceSynthetic, isDataStream, false, ObjectMapper.Defaults.DYNAMIC, mergeReason);
felixbarny marked this conversation as resolved.
Show resolved Hide resolved
}

private final String path;
private final boolean isSourceSynthetic;
private final boolean isDataStream;
private final boolean parentObjectContainsDimensions;
private final ObjectMapper.Dynamic dynamic;
private final MergeReason mergeReason;

MapperBuilderContext(String path) {
this(path, false, false, false, ObjectMapper.Defaults.DYNAMIC);
this(path, false, false, false, ObjectMapper.Defaults.DYNAMIC, MergeReason.MAPPING_UPDATE);
}

MapperBuilderContext(
String path,
boolean isSourceSynthetic,
boolean isDataStream,
boolean parentObjectContainsDimensions,
ObjectMapper.Dynamic dynamic
ObjectMapper.Dynamic dynamic,
MergeReason mergeReason
) {
Objects.requireNonNull(dynamic, "dynamic must not be null");
this.path = path;
this.isSourceSynthetic = isSourceSynthetic;
this.isDataStream = isDataStream;
this.parentObjectContainsDimensions = parentObjectContainsDimensions;
this.dynamic = dynamic;
this.mergeReason = mergeReason;
}

/**
Expand Down Expand Up @@ -79,7 +87,8 @@ public MapperBuilderContext createChildContext(
this.isSourceSynthetic,
this.isDataStream,
parentObjectContainsDimensions,
getDynamic(dynamic)
getDynamic(dynamic),
this.mergeReason
);
}

Expand Down Expand Up @@ -121,4 +130,12 @@ public boolean parentObjectContainsDimensions() {
public ObjectMapper.Dynamic getDynamic() {
return dynamic;
}

/**
* The merge reason to use when merging mappers while building the mapper.
* See also {@link ObjectMapper.Builder#buildMappers(MapperBuilderContext)}.
*/
felixbarny marked this conversation as resolved.
Show resolved Hide resolved
public MergeReason getMergeReason() {
return mergeReason;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@

package org.elasticsearch.index.mapper;

import org.elasticsearch.index.mapper.MapperService.MergeReason;

/**
* Holds context used when merging mappings.
* As the merge process also involves building merged {@link Mapper.Builder}s,
Expand All @@ -23,11 +25,18 @@ private MapperMergeContext(MapperBuilderContext mapperBuilderContext, NewFieldsB
this.newFieldsBudget = newFieldsBudget;
}

static MapperMergeContext root(boolean isSourceSynthetic, boolean isDataStream, long newFieldsBudget) {
return root(isSourceSynthetic, isDataStream, MergeReason.MAPPING_UPDATE, newFieldsBudget);
}

/**
* The root context, to be used when merging a tree of mappers
*/
public static MapperMergeContext root(boolean isSourceSynthetic, boolean isDataStream, long newFieldsBudget) {
return new MapperMergeContext(MapperBuilderContext.root(isSourceSynthetic, isDataStream), NewFieldsBudget.of(newFieldsBudget));
public static MapperMergeContext root(boolean isSourceSynthetic, boolean isDataStream, MergeReason mergeReason, long newFieldsBudget) {
felixbarny marked this conversation as resolved.
Show resolved Hide resolved
return new MapperMergeContext(
MapperBuilderContext.root(isSourceSynthetic, isDataStream, mergeReason),
NewFieldsBudget.of(newFieldsBudget)
);
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -310,7 +310,7 @@ public void updateMapping(final IndexMetadata currentIndexMetadata, final IndexM
if (newMappingMetadata != null) {
String type = newMappingMetadata.type();
CompressedXContent incomingMappingSource = newMappingMetadata.source();
Mapping incomingMapping = parseMapping(type, incomingMappingSource);
Mapping incomingMapping = parseMapping(type, MergeReason.MAPPING_UPDATE, incomingMappingSource);
DocumentMapper previousMapper;
synchronized (this) {
previousMapper = this.mapper;
Expand Down Expand Up @@ -366,7 +366,7 @@ boolean assertNoUpdateRequired(final IndexMetadata newIndexMetadata) {
// that the incoming mappings are the same as the current ones: we need to
// parse the incoming mappings into a DocumentMapper and check that its
// serialization is the same as the existing mapper
Mapping newMapping = parseMapping(mapping.type(), mapping.source());
Mapping newMapping = parseMapping(mapping.type(), MergeReason.MAPPING_UPDATE, mapping.source());
final CompressedXContent currentSource = this.mapper.mappingSource();
final CompressedXContent newSource = newMapping.toCompressedXContent();
if (Objects.equals(currentSource, newSource) == false
Expand Down Expand Up @@ -533,7 +533,7 @@ public DocumentMapper merge(String type, CompressedXContent mappingSource, Merge
}

private synchronized DocumentMapper doMerge(String type, MergeReason reason, Map<String, Object> mappingSourceAsMap) {
Mapping incomingMapping = parseMapping(type, mappingSourceAsMap);
Mapping incomingMapping = parseMapping(type, reason, mappingSourceAsMap);
Mapping mapping = mergeMappings(this.mapper, incomingMapping, reason, this.indexSettings);
// TODO: In many cases the source here is equal to mappingSource so we need not serialize again.
// We should identify these cases reliably and save expensive serialization here
Expand All @@ -542,7 +542,7 @@ private synchronized DocumentMapper doMerge(String type, MergeReason reason, Map
return newMapper;
}
this.mapper = newMapper;
assert assertSerialization(newMapper);
assert assertSerialization(newMapper, reason);
return newMapper;
}

Expand All @@ -552,9 +552,9 @@ private DocumentMapper newDocumentMapper(Mapping mapping, MergeReason reason, Co
return newMapper;
}

public Mapping parseMapping(String mappingType, CompressedXContent mappingSource) {
public Mapping parseMapping(String mappingType, MergeReason reason, CompressedXContent mappingSource) {
try {
return mappingParser.parse(mappingType, mappingSource);
return mappingParser.parse(mappingType, reason, mappingSource);
} catch (Exception e) {
throw new MapperParsingException("Failed to parse mapping: {}", e, e.getMessage());
}
Expand All @@ -564,12 +564,13 @@ public Mapping parseMapping(String mappingType, CompressedXContent mappingSource
* A method to parse mapping from a source in a map form.
*
* @param mappingType the mapping type
* @param reason the merge reason to use when merging mappers while building the mapper
* @param mappingSource mapping source already converted to a map form, but not yet processed otherwise
* @return a parsed mapping
*/
public Mapping parseMapping(String mappingType, Map<String, Object> mappingSource) {
public Mapping parseMapping(String mappingType, MergeReason reason, Map<String, Object> mappingSource) {
try {
return mappingParser.parse(mappingType, mappingSource);
return mappingParser.parse(mappingType, reason, mappingSource);
} catch (Exception e) {
throw new MapperParsingException("Failed to parse mapping: {}", e, e.getMessage());
}
Expand Down Expand Up @@ -619,10 +620,10 @@ static Mapping mergeMappings(DocumentMapper currentMapper, Mapping incomingMappi
return newMapping;
}

private boolean assertSerialization(DocumentMapper mapper) {
private boolean assertSerialization(DocumentMapper mapper, MergeReason reason) {
// capture the source now, it may change due to concurrent parsing
final CompressedXContent mappingSource = mapper.mappingSource();
Mapping newMapping = parseMapping(mapper.type(), mappingSource);
Mapping newMapping = parseMapping(mapper.type(), reason, mappingSource);
if (newMapping.toCompressedXContent().equals(mappingSource) == false) {
throw new AssertionError(
"Mapping serialization result is different from source. \n--> Source ["
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() {
* @return the resulting merged mapping.
*/
Mapping merge(Mapping mergeWith, MergeReason reason, long newFieldsBudget) {
MapperMergeContext mergeContext = MapperMergeContext.root(isSourceSynthetic(), false, newFieldsBudget);
MapperMergeContext mergeContext = MapperMergeContext.root(isSourceSynthetic(), false, reason, newFieldsBudget);
RootObjectMapper mergedRoot = root.merge(mergeWith.root, reason, mergeContext);

// When merging metadata fields as part of applying an index template, new field definitions
Expand Down Expand Up @@ -176,7 +176,7 @@ Mapping merge(Mapping mergeWith, MergeReason reason, long newFieldsBudget) {
* @param fieldsBudget the maximum number of fields this mapping may have
*/
public Mapping withFieldsBudget(long fieldsBudget) {
MapperMergeContext mergeContext = MapperMergeContext.root(isSourceSynthetic(), false, fieldsBudget);
MapperMergeContext mergeContext = MapperMergeContext.root(isSourceSynthetic(), false, MergeReason.MAPPING_RECOVERY, fieldsBudget);
// get a copy of the root mapper, without any fields
RootObjectMapper shallowRoot = root.withoutMappers();
// calling merge on the shallow root to ensure we're only adding as many fields as allowed by the fields budget
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.index.IndexMode;
import org.elasticsearch.index.mapper.MapperService.MergeReason;
import org.elasticsearch.xcontent.XContentType;

import java.util.Collections;
Expand Down Expand Up @@ -79,20 +80,25 @@ static Map<String, Object> convertToMap(CompressedXContent source) {
}

Mapping parse(@Nullable String type, CompressedXContent source) throws MapperParsingException {
return parse(type, MergeReason.MAPPING_UPDATE, source);
}

Mapping parse(@Nullable String type, MergeReason reason, CompressedXContent source) throws MapperParsingException {
Map<String, Object> mapping = convertToMap(source);
return parse(type, mapping);
return parse(type, reason, mapping);
}

/**
* A method to parse mapping from a source in a map form.
*
* @param type the mapping type
* @param reason the merge reason to use when merging mappers while building the mapper
* @param mappingSource mapping source already converted to a map form, but not yet processed otherwise
* @return a parsed mapping
* @throws MapperParsingException in case of parsing error
*/
@SuppressWarnings("unchecked")
Mapping parse(@Nullable String type, Map<String, Object> mappingSource) throws MapperParsingException {
Mapping parse(@Nullable String type, MergeReason reason, Map<String, Object> mappingSource) throws MapperParsingException {
if (mappingSource.isEmpty()) {
if (type == null) {
throw new MapperParsingException("malformed mapping, no type name found");
Expand Down Expand Up @@ -178,7 +184,7 @@ Mapping parse(@Nullable String type, Map<String, Object> mappingSource) throws M
}

return new Mapping(
rootObjectMapper.build(MapperBuilderContext.root(isSourceSynthetic, isDataStream)),
rootObjectMapper.build(MapperBuilderContext.root(isSourceSynthetic, isDataStream, reason)),
metadataMappers.values().toArray(new MetadataFieldMapper[0]),
meta
);
Expand Down
Loading