Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Better validation of mapping JSON #7534

Merged
merged 1 commit into from
Nov 12, 2014
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,8 @@
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.compress.CompressedString;
import org.elasticsearch.common.geo.ShapesAvailability;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
Expand All @@ -37,10 +39,37 @@
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.codec.docvaluesformat.DocValuesFormatService;
import org.elasticsearch.index.codec.postingsformat.PostingsFormatService;
import org.elasticsearch.index.mapper.core.*;
import org.elasticsearch.index.mapper.core.BinaryFieldMapper;
import org.elasticsearch.index.mapper.core.BooleanFieldMapper;
import org.elasticsearch.index.mapper.core.ByteFieldMapper;
import org.elasticsearch.index.mapper.core.CompletionFieldMapper;
import org.elasticsearch.index.mapper.core.DateFieldMapper;
import org.elasticsearch.index.mapper.core.DoubleFieldMapper;
import org.elasticsearch.index.mapper.core.FloatFieldMapper;
import org.elasticsearch.index.mapper.core.IntegerFieldMapper;
import org.elasticsearch.index.mapper.core.LongFieldMapper;
import org.elasticsearch.index.mapper.core.Murmur3FieldMapper;
import org.elasticsearch.index.mapper.core.ShortFieldMapper;
import org.elasticsearch.index.mapper.core.StringFieldMapper;
import org.elasticsearch.index.mapper.core.TokenCountFieldMapper;
import org.elasticsearch.index.mapper.core.TypeParsers;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import org.elasticsearch.index.mapper.geo.GeoShapeFieldMapper;
import org.elasticsearch.index.mapper.internal.*;
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
import org.elasticsearch.index.mapper.internal.AnalyzerMapper;
import org.elasticsearch.index.mapper.internal.BoostFieldMapper;
import org.elasticsearch.index.mapper.internal.FieldNamesFieldMapper;
import org.elasticsearch.index.mapper.internal.IdFieldMapper;
import org.elasticsearch.index.mapper.internal.IndexFieldMapper;
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
import org.elasticsearch.index.mapper.internal.RoutingFieldMapper;
import org.elasticsearch.index.mapper.internal.SizeFieldMapper;
import org.elasticsearch.index.mapper.internal.SourceFieldMapper;
import org.elasticsearch.index.mapper.internal.TTLFieldMapper;
import org.elasticsearch.index.mapper.internal.TimestampFieldMapper;
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.mapper.internal.VersionFieldMapper;
import org.elasticsearch.index.mapper.ip.IpFieldMapper;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.mapper.object.RootObjectMapper;
Expand All @@ -63,6 +92,7 @@
public class DocumentMapperParser extends AbstractIndexComponent {

final AnalysisService analysisService;
private static final ESLogger logger = Loggers.getLogger(DocumentMapperParser.class);
private final PostingsFormatService postingsFormatService;
private final DocValuesFormatService docValuesFormatService;
private final SimilarityLookupService similarityLookupService;
Expand Down Expand Up @@ -248,13 +278,13 @@ private DocumentMapper parse(String type, Map<String, Object> mapping, String de
} else if ("transform".equals(fieldName)) {
iterator.remove();
if (fieldNode instanceof Map) {
parseTransform(docBuilder, (Map<String, Object>) fieldNode);
parseTransform(docBuilder, (Map<String, Object>) fieldNode, parserContext.indexVersionCreated());
} else if (fieldNode instanceof List) {
for (Object transformItem: (List)fieldNode) {
if (!(transformItem instanceof Map)) {
throw new MapperParsingException("Elements of transform list must be objects but one was: " + fieldNode);
}
parseTransform(docBuilder, (Map<String, Object>) transformItem);
parseTransform(docBuilder, (Map<String, Object>) transformItem, parserContext.indexVersionCreated());
}
} else {
throw new MapperParsingException("Transform must be an object or an array but was: " + fieldNode);
Expand All @@ -263,7 +293,10 @@ private DocumentMapper parse(String type, Map<String, Object> mapping, String de
Mapper.TypeParser typeParser = rootTypeParsers.get(fieldName);
if (typeParser != null) {
iterator.remove();
docBuilder.put(typeParser.parse(fieldName, (Map<String, Object>) fieldNode, parserContext));
Map<String, Object> fieldNodeMap = (Map<String, Object>) fieldNode;
docBuilder.put(typeParser.parse(fieldName, fieldNodeMap, parserContext));
fieldNodeMap.remove("type");
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Only _field_names actually builds the content type and I wonder if it would not be better to remove the "type" parameter in _field_names parser instead of doing it here for all root mappers. Else it might look like you can define a type for different root mappers:

PUT testidx
{
  "mappings": {
    "doc": {
      "_analyzer": {
        "type": "geo_point",
        "path": "custom_path"
      }
    }
  }
}

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The type parameter is used in a few other places, including the nested mapping. My thinking was that it's the DocumentMapperParser that actually parses the type (since it uses it to work out the correct type parser to use, so it should remove it. Otherwise, every implementation of the Tye Mappers would need to remove it themselves since if its not removed from the map, the map won't be empty and we will throw an exception saying 'type' has not been parsed.

Hope that makes sense?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Could you extract this "type" string to a constant? (and replace other references to this string to a reference to the constant)

checkNoRemainingFields(fieldName, fieldNodeMap, parserContext.indexVersionCreated());
}
}
}
Expand All @@ -274,9 +307,8 @@ private DocumentMapper parse(String type, Map<String, Object> mapping, String de
}
docBuilder.meta(attributes);

if (!mapping.isEmpty()) {
throw new MapperParsingException("Root type mapping not empty after parsing! Remaining fields: " + getRemainingFields(mapping));
}
checkNoRemainingFields(mapping, parserContext.indexVersionCreated(), "Root mapping definition has unsupported parameters: ");

if (!docBuilder.hasIndexAnalyzer()) {
docBuilder.indexAnalyzer(analysisService.defaultIndexAnalyzer());
}
Expand All @@ -293,16 +325,30 @@ private DocumentMapper parse(String type, Map<String, Object> mapping, String de
return documentMapper;
}

private String getRemainingFields(Map<String, ?> map) {
public static void checkNoRemainingFields(String fieldName, Map<String, Object> fieldNodeMap, Version indexVersionCreated) {
checkNoRemainingFields(fieldNodeMap, indexVersionCreated, "Mapping definition for [" + fieldName + "] has unsupported parameters: ");
}

public static void checkNoRemainingFields(Map<String, Object> fieldNodeMap, Version indexVersionCreated, String message) {
if (!fieldNodeMap.isEmpty()) {
if (indexVersionCreated.onOrAfter(Version.V_2_0_0)) {
throw new MapperParsingException(message + getRemainingFields(fieldNodeMap));
} else {
logger.debug(message + "{}", getRemainingFields(fieldNodeMap));
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

or just message + getRemainingFields(fieldNodeMap)?

}
}
}

private static String getRemainingFields(Map<String, ?> map) {
StringBuilder remainingFields = new StringBuilder();
for (String key : map.keySet()) {
remainingFields.append(" [").append(key).append(" : ").append(map.get(key).toString()).append("]");
remainingFields.append(" [").append(key).append(" : ").append(map.get(key)).append("]");
}
return remainingFields.toString();
}

@SuppressWarnings("unchecked")
private void parseTransform(DocumentMapper.Builder docBuilder, Map<String, Object> transformConfig) {
private void parseTransform(DocumentMapper.Builder docBuilder, Map<String, Object> transformConfig, Version indexVersionCreated) {
ScriptParameterParser scriptParameterParser = new ScriptParameterParser();
scriptParameterParser.parseConfig(transformConfig, true);

Expand All @@ -319,9 +365,7 @@ private void parseTransform(DocumentMapper.Builder docBuilder, Map<String, Objec
Map<String, Object> params = (Map<String, Object>)transformConfig.remove("params");
docBuilder.transform(scriptService, script, scriptType, scriptLang, params);
}
if (!transformConfig.isEmpty()) {
throw new MapperParsingException("Unrecognized parameter in transform config: " + getRemainingFields(transformConfig));
}
checkNoRemainingFields(transformConfig, indexVersionCreated, "Transform config has unsupported parameters: ");
}

private Tuple<String, Map<String, Object>> extractMapping(String type, String source) throws MapperParsingException {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@
import org.elasticsearch.index.mapper.*;

import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.Map;

Expand Down Expand Up @@ -104,11 +105,13 @@ public static class TypeParser implements Mapper.TypeParser {
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
BinaryFieldMapper.Builder builder = binaryField(name);
parseField(builder, name, node, parserContext);
for (Map.Entry<String, Object> entry : node.entrySet()) {
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals("compress") && fieldNode != null) {
builder.compress(nodeBooleanValue(fieldNode));
iterator.remove();
} else if (fieldName.equals("compress_threshold") && fieldNode != null) {
if (fieldNode instanceof Number) {
builder.compressThreshold(((Number) fieldNode).longValue());
Expand All @@ -117,6 +120,7 @@ public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext
builder.compressThreshold(ByteSizeValue.parseBytesSizeValue(fieldNode.toString()).bytes());
builder.compress(true);
}
iterator.remove();
}
}
return builder;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@
import org.elasticsearch.index.similarity.SimilarityProvider;

import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.Map;

Expand Down Expand Up @@ -107,14 +108,16 @@ public static class TypeParser implements Mapper.TypeParser {
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
BooleanFieldMapper.Builder builder = booleanField(name);
parseField(builder, name, node, parserContext);
for (Map.Entry<String, Object> entry : node.entrySet()) {
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String propName = Strings.toUnderscoreCase(entry.getKey());
Object propNode = entry.getValue();
if (propName.equals("null_value")) {
if (propNode == null) {
throw new MapperParsingException("Property [null_value] cannot be null.");
}
builder.nullValue(nodeBooleanValue(propNode));
iterator.remove();
}
}
return builder;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@
import org.elasticsearch.index.similarity.SimilarityProvider;

import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.Map;

Expand Down Expand Up @@ -105,14 +106,16 @@ public static class TypeParser implements Mapper.TypeParser {
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
ByteFieldMapper.Builder builder = byteField(name);
parseNumberField(builder, name, node, parserContext);
for (Map.Entry<String, Object> entry : node.entrySet()) {
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String propName = Strings.toUnderscoreCase(entry.getKey());
Object propNode = entry.getValue();
if (propName.equals("null_value")) {
if (propNode == null) {
throw new MapperParsingException("Property [null_value] cannot be null.");
}
builder.nullValue(nodeByteValue(propNode));
iterator.remove();
}
}
return builder;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,13 +30,20 @@
import org.elasticsearch.ElasticsearchIllegalArgumentException;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.*;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParser.NumberType;
import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.codec.postingsformat.PostingsFormatProvider;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.*;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperException;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.similarity.SimilarityProvider;
import org.elasticsearch.search.suggest.completion.AnalyzingCompletionLookupProvider;
import org.elasticsearch.search.suggest.completion.CompletionPostingsFormatProvider;
Expand All @@ -46,7 +53,12 @@
import org.elasticsearch.search.suggest.context.ContextMapping.ContextConfig;

import java.io.IOException;
import java.util.*;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.SortedMap;

import static org.elasticsearch.index.mapper.MapperBuilders.completionField;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField;
Expand Down Expand Up @@ -147,7 +159,8 @@ public static class TypeParser implements Mapper.TypeParser {
@Override
public Mapper.Builder<?, ?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
CompletionFieldMapper.Builder builder = completionField(name);
for (Map.Entry<String, Object> entry : node.entrySet()) {
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String fieldName = entry.getKey();
Object fieldNode = entry.getValue();
if (fieldName.equals("type")) {
Expand All @@ -157,24 +170,32 @@ public static class TypeParser implements Mapper.TypeParser {
NamedAnalyzer analyzer = getNamedAnalyzer(parserContext, fieldNode.toString());
builder.indexAnalyzer(analyzer);
builder.searchAnalyzer(analyzer);
iterator.remove();
} else if (Fields.INDEX_ANALYZER.match(fieldName)) {
builder.indexAnalyzer(getNamedAnalyzer(parserContext, fieldNode.toString()));
iterator.remove();
} else if (Fields.SEARCH_ANALYZER.match(fieldName)) {
builder.searchAnalyzer(getNamedAnalyzer(parserContext, fieldNode.toString()));
iterator.remove();
} else if (fieldName.equals(Fields.PAYLOADS)) {
builder.payloads(Boolean.parseBoolean(fieldNode.toString()));
iterator.remove();
} else if (Fields.PRESERVE_SEPARATORS.match(fieldName)) {
builder.preserveSeparators(Boolean.parseBoolean(fieldNode.toString()));
iterator.remove();
} else if (Fields.PRESERVE_POSITION_INCREMENTS.match(fieldName)) {
builder.preservePositionIncrements(Boolean.parseBoolean(fieldNode.toString()));
iterator.remove();
} else if (Fields.MAX_INPUT_LENGTH.match(fieldName)) {
builder.maxInputLength(Integer.parseInt(fieldNode.toString()));
iterator.remove();
} else if ("fields".equals(fieldName) || "path".equals(fieldName)) {
parseMultiField(builder, name, parserContext, fieldName, fieldNode);
if (parseMultiField(builder, name, parserContext, fieldName, fieldNode)) {
iterator.remove();
}
} else if (fieldName.equals(Fields.CONTEXT)) {
builder.contextMapping(ContextBuilder.loadMappings(fieldNode));
} else {
throw new MapperParsingException("Unknown field [" + fieldName + "]");
builder.contextMapping(ContextBuilder.loadMappings(fieldNode, parserContext.indexVersionCreated()));
iterator.remove();
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@
import org.joda.time.DateTimeZone;

import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
Expand Down Expand Up @@ -149,20 +150,25 @@ public static class TypeParser implements Mapper.TypeParser {
public Mapper.Builder<?, ?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
DateFieldMapper.Builder builder = dateField(name);
parseNumberField(builder, name, node, parserContext);
for (Map.Entry<String, Object> entry : node.entrySet()) {
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String propName = Strings.toUnderscoreCase(entry.getKey());
Object propNode = entry.getValue();
if (propName.equals("null_value")) {
if (propNode == null) {
throw new MapperParsingException("Property [null_value] cannot be null.");
}
builder.nullValue(propNode.toString());
iterator.remove();
} else if (propName.equals("format")) {
builder.dateTimeFormatter(parseDateTimeFormatter(propNode));
iterator.remove();
} else if (propName.equals("numeric_resolution")) {
builder.timeUnit(TimeUnit.valueOf(propNode.toString().toUpperCase(Locale.ROOT)));
iterator.remove();
} else if (propName.equals("locale")) {
builder.locale(LocaleUtils.parse(propNode.toString()));
iterator.remove();
}
}
return builder;
Expand Down
Loading