From d84c643f5880a88d2e5c38b487123161f3e01fe0 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Fri, 1 Apr 2016 11:07:35 +0200 Subject: [PATCH] Use the new points API to index numeric fields. #17746 This makes all numeric fields including `date`, `ip` and `token_count` use points instead of the inverted index as a lookup structure. This is expected to perform worse for exact queries, but faster for range queries. It also requires less storage. Notes about how the change works: - Numeric mappers have been split into a legacy version that is essentially the current mapper, and a new version that uses points, eg. LegacyDateFieldMapper and DateFieldMapper. - Since new and old fields have the same names, the decision about which one to use is made based on the index creation version. - If you try to force using a legacy field on a new index or a field that uses points on an old index, you will get an exception. - IP addresses now support IPv6 via Lucene's InetAddressPoint and store them in SORTED_SET doc values using the same encoding (fixed length of 16 bytes and sortable). - The internal MappedFieldType that is stored by the new mappers does not have any of the points-related properties set. Instead, it keeps setting the index options when parsing the `index` property of mappings and does `if (fieldType.indexOptions() != IndexOptions.NONE) { // add point field }` when parsing documents. Known issues that won't fix: - You can't use numeric fields in significant terms aggregations anymore since this requires document frequencies, which points do not record. - Term queries on numeric fields will now return constant scores instead of giving better scores to the rare values. Known issues that we could work around (in follow-up PRs, this one is too large already): - Range queries on `ip` addresses only work if both the lower and upper bounds are inclusive (exclusive bounds are not exposed in Lucene). We could either decide to implement it, or drop range support entirely and tell users to query subnets using the CIDR notation instead. - Since IP addresses now use a different representation for doc values, aggregations will fail when running a terms aggregation on an ip field on a list of indices that contains both pre-5.0 and 5.0 indices. - The ip range aggregation does not work on the new ip field. We need to either implement range aggs for SORTED_SET doc values or drop support for ip ranges and tell users to use filters instead. #17700 Closes #16751 Closes #17007 Closes #11513 --- .../resources/checkstyle_suppressions.xml | 18 +- .../resources/forbidden/es-all-signatures.txt | 3 + .../classic/MapperQueryParser.java | 12 +- .../action/fieldstats/FieldStats.java | 168 ++- .../index/mapper/CustomDocValuesField.java | 81 ++ .../index/mapper/DocumentParser.java | 103 +- .../index/mapper/MappedFieldType.java | 11 +- .../index/mapper/core/BinaryFieldMapper.java | 3 +- .../index/mapper/core/DateFieldMapper.java | 528 +++++----- .../index/mapper/core/KeywordFieldMapper.java | 17 +- ...Mapper.java => LegacyByteFieldMapper.java} | 19 +- .../mapper/core/LegacyDateFieldMapper.java | 617 +++++++++++ ...pper.java => LegacyDoubleFieldMapper.java} | 19 +- ...apper.java => LegacyFloatFieldMapper.java} | 19 +- ...per.java => LegacyIntegerFieldMapper.java} | 19 +- ...Mapper.java => LegacyLongFieldMapper.java} | 19 +- .../mapper/core/LegacyNumberFieldMapper.java | 366 +++++++ ...apper.java => LegacyShortFieldMapper.java} | 19 +- .../core/LegacyTokenCountFieldMapper.java | 202 ++++ .../index/mapper/core/NumberFieldMapper.java | 961 +++++++++++++----- .../index/mapper/core/StringFieldMapper.java | 15 +- .../index/mapper/core/TextFieldMapper.java | 15 + .../mapper/core/TokenCountFieldMapper.java | 68 +- .../index/mapper/core/TypeParsers.java | 3 +- .../mapper/geo/BaseGeoPointFieldMapper.java | 40 +- .../index/mapper/geo/GeoPointFieldMapper.java | 8 +- .../mapper/geo/GeoPointFieldMapperLegacy.java | 12 +- .../index/mapper/internal/TTLFieldMapper.java | 8 +- .../mapper/internal/TimestampFieldMapper.java | 14 +- .../index/mapper/ip/IpFieldMapper.java | 403 ++++---- .../index/mapper/ip/LegacyIpFieldMapper.java | 361 +++++++ .../index/mapper/object/RootObjectMapper.java | 2 +- .../index/query/RangeQueryBuilder.java | 10 +- .../functionscore/DecayFunctionBuilder.java | 26 +- .../elasticsearch/indices/IndicesModule.java | 17 +- .../elasticsearch/search/DocValueFormat.java | 14 +- .../aggregations/support/ValueType.java | 2 +- .../ValuesSourceAggregatorBuilder.java | 3 +- .../fieldstats/FieldStatsTests.java | 5 +- .../fielddata/AbstractFieldDataTestCase.java | 24 +- .../fielddata/IndexFieldDataServiceTests.java | 31 +- .../index/mapper/DynamicMappingTests.java | 90 +- .../index/mapper/MapperServiceTests.java | 4 +- .../mapper/boost/CustomBoostMappingTests.java | 7 - .../mapper/copyto/CopyToMapperTests.java | 4 +- .../mapper/core/DateFieldMapperTests.java | 254 +++++ .../index/mapper/core/DateFieldTypeTests.java | 34 +- ...sts.java => LegacyByteFieldTypeTests.java} | 4 +- .../mapper/core/LegacyDateFieldTypeTests.java | 150 +++ ...s.java => LegacyDoubleFieldTypeTests.java} | 6 +- ...ts.java => LegacyFloatFieldTypeTests.java} | 6 +- ....java => LegacyIntegerFieldTypeTests.java} | 6 +- ...sts.java => LegacyLongFieldTypeTests.java} | 6 +- ...ts.java => LegacyShortFieldTypeTests.java} | 4 +- .../LegacyTokenCountFieldMapperTests.java | 109 ++ .../mapper/core/NumberFieldMapperTests.java | 319 ++++++ .../mapper/core/NumberFieldTypeTests.java | 52 + .../core/TokenCountFieldMapperTests.java | 7 +- ...Tests.java => LegacyDateMappingTests.java} | 58 +- .../GenericStoreDynamicTemplateTests.java | 8 +- .../mapper/geo/GeoPointFieldMapperTests.java | 112 +- .../mapper/geo/GeoPointFieldTypeTests.java | 4 +- .../internal/TimestampFieldTypeTests.java | 6 +- .../index/mapper/ip/IpFieldMapperTests.java | 220 ++++ .../index/mapper/ip/IpFieldTypeTests.java | 75 ++ ...ngTests.java => LegacyIpMappingTests.java} | 30 +- .../lucene/StoredNumericValuesTests.java | 19 +- .../mapper/multifield/MultiFieldTests.java | 2 - ...ericTests.java => LegacyNumericTests.java} | 156 +-- .../mapper/update/UpdateMappingTests.java | 15 +- .../index/query/AbstractQueryTestCase.java | 11 +- .../query/AbstractTermQueryTestCase.java | 44 - .../index/query/FuzzyQueryBuilderTests.java | 17 +- .../MatchPhrasePrefixQueryBuilderTests.java | 4 +- .../query/MatchPhraseQueryBuilderTests.java | 4 +- .../index/query/MatchQueryBuilderTests.java | 14 +- .../query/MultiMatchQueryBuilderTests.java | 4 +- .../query/QueryStringQueryBuilderTests.java | 12 +- .../index/query/RandomQueryBuilder.java | 15 +- .../index/query/RangeQueryBuilderTests.java | 168 ++- .../query/SpanTermQueryBuilderTests.java | 24 + .../index/query/TermQueryBuilderTests.java | 68 +- .../indices/IndicesOptionsIntegrationIT.java | 2 - .../percolator/PercolatorIT.java | 2 +- .../aggregations/bucket/DateHistogramIT.java | 4 +- .../bucket/DateHistogramOffsetIT.java | 4 +- .../aggregations/bucket/IPv4RangeTests.java | 6 +- .../search/aggregations/bucket/IpTermsIT.java | 54 + .../aggregations/bucket/ShardReduceIT.java | 1 + .../bucket/SignificantTermsIT.java | 18 +- .../SignificantTermsSignificanceScoreIT.java | 15 +- .../bucket/TermsShardMinDocCountIT.java | 2 +- .../pipeline/DateDerivativeIT.java | 2 +- .../search/simple/SimpleSearchIT.java | 22 +- .../search/sort/AbstractSortTestCase.java | 2 +- .../SharedSignificantTermsTestMethods.java | 2 +- docs/reference/mapping/types/date.asciidoc | 6 - docs/reference/mapping/types/ip.asciidoc | 50 +- docs/reference/mapping/types/numeric.asciidoc | 6 - docs/reference/migration/migrate_5_0.asciidoc | 4 + .../migrate_5_0/aggregations.asciidoc | 13 + .../migration/migrate_5_0/mapping.asciidoc | 40 + .../ExpressionScriptEngineService.java | 4 +- .../messy/tests/IPv4RangeTests.java | 92 +- .../mapper/attachments/AttachmentMapper.java | 5 +- .../MultifieldAttachmentMapperTests.java | 10 - .../mapper/murmur3/Murmur3FieldMapper.java | 68 +- .../index/mapper/size/SizeFieldMapper.java | 40 +- .../index/mapper/size/SizeMappingTests.java | 12 +- 109 files changed, 5348 insertions(+), 1575 deletions(-) create mode 100644 core/src/main/java/org/elasticsearch/index/mapper/CustomDocValuesField.java rename core/src/main/java/org/elasticsearch/index/mapper/core/{ByteFieldMapper.java => LegacyByteFieldMapper.java} (93%) create mode 100644 core/src/main/java/org/elasticsearch/index/mapper/core/LegacyDateFieldMapper.java rename core/src/main/java/org/elasticsearch/index/mapper/core/{DoubleFieldMapper.java => LegacyDoubleFieldMapper.java} (93%) rename core/src/main/java/org/elasticsearch/index/mapper/core/{FloatFieldMapper.java => LegacyFloatFieldMapper.java} (93%) rename core/src/main/java/org/elasticsearch/index/mapper/core/{IntegerFieldMapper.java => LegacyIntegerFieldMapper.java} (93%) rename core/src/main/java/org/elasticsearch/index/mapper/core/{LongFieldMapper.java => LegacyLongFieldMapper.java} (93%) create mode 100644 core/src/main/java/org/elasticsearch/index/mapper/core/LegacyNumberFieldMapper.java rename core/src/main/java/org/elasticsearch/index/mapper/core/{ShortFieldMapper.java => LegacyShortFieldMapper.java} (93%) create mode 100644 core/src/main/java/org/elasticsearch/index/mapper/core/LegacyTokenCountFieldMapper.java create mode 100644 core/src/main/java/org/elasticsearch/index/mapper/ip/LegacyIpFieldMapper.java create mode 100644 core/src/test/java/org/elasticsearch/index/mapper/core/DateFieldMapperTests.java rename core/src/test/java/org/elasticsearch/index/mapper/core/{ByteFieldTypeTests.java => LegacyByteFieldTypeTests.java} (91%) create mode 100644 core/src/test/java/org/elasticsearch/index/mapper/core/LegacyDateFieldTypeTests.java rename core/src/test/java/org/elasticsearch/index/mapper/core/{DoubleFieldTypeTests.java => LegacyDoubleFieldTypeTests.java} (89%) rename core/src/test/java/org/elasticsearch/index/mapper/core/{FloatFieldTypeTests.java => LegacyFloatFieldTypeTests.java} (89%) rename core/src/test/java/org/elasticsearch/index/mapper/core/{IntegerFieldTypeTests.java => LegacyIntegerFieldTypeTests.java} (88%) rename core/src/test/java/org/elasticsearch/index/mapper/core/{LongFieldTypeTests.java => LegacyLongFieldTypeTests.java} (89%) rename core/src/test/java/org/elasticsearch/index/mapper/core/{ShortFieldTypeTests.java => LegacyShortFieldTypeTests.java} (91%) create mode 100644 core/src/test/java/org/elasticsearch/index/mapper/core/LegacyTokenCountFieldMapperTests.java create mode 100644 core/src/test/java/org/elasticsearch/index/mapper/core/NumberFieldMapperTests.java create mode 100644 core/src/test/java/org/elasticsearch/index/mapper/core/NumberFieldTypeTests.java rename core/src/test/java/org/elasticsearch/index/mapper/date/{SimpleDateMappingTests.java => LegacyDateMappingTests.java} (91%) create mode 100644 core/src/test/java/org/elasticsearch/index/mapper/ip/IpFieldMapperTests.java create mode 100644 core/src/test/java/org/elasticsearch/index/mapper/ip/IpFieldTypeTests.java rename core/src/test/java/org/elasticsearch/index/mapper/ip/{SimpleIpMappingTests.java => LegacyIpMappingTests.java} (79%) rename core/src/test/java/org/elasticsearch/index/mapper/numeric/{SimpleNumericTests.java => LegacyNumericTests.java} (77%) create mode 100644 core/src/test/java/org/elasticsearch/search/aggregations/bucket/IpTermsIT.java create mode 100644 docs/reference/migration/migrate_5_0/aggregations.asciidoc diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml index 0575472215f0d..6ea0c03de736f 100644 --- a/buildSrc/src/main/resources/checkstyle_suppressions.xml +++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml @@ -471,12 +471,12 @@ - - - - + + + + - + @@ -1070,8 +1070,8 @@ - - + + @@ -1087,12 +1087,12 @@ - + - + diff --git a/buildSrc/src/main/resources/forbidden/es-all-signatures.txt b/buildSrc/src/main/resources/forbidden/es-all-signatures.txt index d258c09891176..e31a7020282eb 100644 --- a/buildSrc/src/main/resources/forbidden/es-all-signatures.txt +++ b/buildSrc/src/main/resources/forbidden/es-all-signatures.txt @@ -28,3 +28,6 @@ java.security.MessageDigest#clone() @ use org.elasticsearch.common.hash.MessageD @defaultMessage this should not have been added to lucene in the first place org.apache.lucene.index.IndexReader#getCombinedCoreAndDeletesKey() + +@defaultMessage Soon to be removed +org.apache.lucene.document.FieldType#numericType() diff --git a/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java b/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java index 53304d6d79400..9dc5071839cdc 100644 --- a/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java +++ b/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java @@ -41,6 +41,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.core.DateFieldMapper; +import org.elasticsearch.index.mapper.core.LegacyDateFieldMapper; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.support.QueryParsers; @@ -336,11 +337,12 @@ private Query getRangeQuerySingle(String field, String part1, String part2, try { Query rangeQuery; - if (currentFieldType instanceof DateFieldMapper.DateFieldType && settings.timeZone() != null) { - DateFieldMapper.DateFieldType dateFieldType = - (DateFieldMapper.DateFieldType) this.currentFieldType; - rangeQuery = dateFieldType.rangeQuery(part1, part2, startInclusive, endInclusive, - settings.timeZone(), null); + if (currentFieldType instanceof LegacyDateFieldMapper.DateFieldType && settings.timeZone() != null) { + LegacyDateFieldMapper.DateFieldType dateFieldType = (LegacyDateFieldMapper.DateFieldType) this.currentFieldType; + rangeQuery = dateFieldType.rangeQuery(part1, part2, startInclusive, endInclusive, settings.timeZone(), null); + } else if (currentFieldType instanceof DateFieldMapper.DateFieldType && settings.timeZone() != null) { + DateFieldMapper.DateFieldType dateFieldType = (DateFieldMapper.DateFieldType) this.currentFieldType; + rangeQuery = dateFieldType.rangeQuery(part1, part2, startInclusive, endInclusive, settings.timeZone(), null); } else { rangeQuery = currentFieldType.rangeQuery(part1, part2, startInclusive, endInclusive); } diff --git a/core/src/main/java/org/elasticsearch/action/fieldstats/FieldStats.java b/core/src/main/java/org/elasticsearch/action/fieldstats/FieldStats.java index 6f54c28ae4716..e842d412642df 100644 --- a/core/src/main/java/org/elasticsearch/action/fieldstats/FieldStats.java +++ b/core/src/main/java/org/elasticsearch/action/fieldstats/FieldStats.java @@ -19,23 +19,26 @@ package org.elasticsearch.action.fieldstats; +import org.apache.lucene.document.InetAddressPoint; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.StringHelper; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.joda.Joda; +import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; -import org.elasticsearch.index.mapper.ip.IpFieldMapper; -import org.joda.time.DateTime; import java.io.IOException; +import java.net.InetAddress; +import java.net.UnknownHostException; -public abstract class FieldStats> implements Streamable, ToXContent { +public abstract class FieldStats implements Streamable, ToXContent { - private byte type; + private final byte type; private long maxDoc; private long docCount; private long sumDocFreq; @@ -43,7 +46,8 @@ public abstract class FieldStats> implements Streamable, protected T minValue; protected T maxValue; - protected FieldStats() { + protected FieldStats(int type) { + this.type = (byte) type; } protected FieldStats(int type, long maxDoc, long docCount, long sumDocFreq, long sumTotalTermFreq) { @@ -148,17 +152,6 @@ public T getMaxValue() { */ protected abstract T valueOf(String value, String optionalFormat); - /** - * @param value - * The value to be converted to a String - * @param optionalFormat - * A string describing how to print the specified value. Whether - * this parameter is supported depends on the implementation. If - * optionalFormat is specified and the implementation doesn't - * support it an {@link UnsupportedOperationException} is thrown - */ - public abstract String stringValueOf(Object value, String optionalFormat); - /** * Merges the provided stats into this stats instance. */ @@ -181,6 +174,8 @@ public void append(FieldStats stats) { } } + protected abstract int compare(T a, T b); + /** * @return true if this instance matches with the provided index constraint, otherwise false is returned */ @@ -188,9 +183,9 @@ public boolean match(IndexConstraint constraint) { int cmp; T value = valueOf(constraint.getValue(), constraint.getOptionalFormat()); if (constraint.getProperty() == IndexConstraint.Property.MIN) { - cmp = minValue.compareTo(value); + cmp = compare(minValue, value); } else if (constraint.getProperty() == IndexConstraint.Property.MAX) { - cmp = maxValue.compareTo(value); + cmp = compare(maxValue, value); } else { throw new IllegalArgumentException("Unsupported property [" + constraint.getProperty() + "]"); } @@ -246,9 +241,25 @@ public void writeTo(StreamOutput out) throws IOException { out.writeLong(sumTotalTermFreq); } - public static class Long extends FieldStats { + private static abstract class ComparableFieldStats> extends FieldStats { + protected ComparableFieldStats(int type) { + super(type); + } + + protected ComparableFieldStats(int type, long maxDoc, long docCount, long sumDocFreq, long sumTotalTermFreq) { + super(type, maxDoc, docCount, sumDocFreq, sumTotalTermFreq); + } + + @Override + protected int compare(T a, T b) { + return a.compareTo(b); + } + } + + public static class Long extends ComparableFieldStats { public Long() { + super(0); } public Long(long maxDoc, long docCount, long sumDocFreq, long sumTotalTermFreq, long minValue, long maxValue) { @@ -287,18 +298,6 @@ protected java.lang.Long valueOf(String value, String optionalFormat) { return java.lang.Long.valueOf(value); } - @Override - public String stringValueOf(Object value, String optionalFormat) { - if (optionalFormat != null) { - throw new UnsupportedOperationException("custom format isn't supported"); - } - if (value instanceof Number) { - return java.lang.Long.toString(((Number) value).longValue()); - } else { - throw new IllegalArgumentException("value must be a Long: " + value); - } - } - @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); @@ -315,9 +314,10 @@ public void writeTo(StreamOutput out) throws IOException { } - public static final class Double extends FieldStats { + public static final class Double extends ComparableFieldStats { public Double() { + super(2); } public Double(long maxDoc, long docCount, long sumDocFreq, long sumTotalTermFreq, double minValue, double maxValue) { @@ -352,18 +352,6 @@ protected java.lang.Double valueOf(String value, String optionalFormat) { return java.lang.Double.valueOf(value); } - @Override - public String stringValueOf(Object value, String optionalFormat) { - if (optionalFormat != null) { - throw new UnsupportedOperationException("custom format isn't supported"); - } - if (value instanceof Number) { - return java.lang.Double.toString(((Number) value).doubleValue()); - } else { - throw new IllegalArgumentException("value must be a Double: " + value); - } - } - @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); @@ -380,9 +368,10 @@ public void writeTo(StreamOutput out) throws IOException { } - public static final class Text extends FieldStats { + public static final class Text extends ComparableFieldStats { public Text() { + super(3); } public Text(long maxDoc, long docCount, long sumDocFreq, long sumTotalTermFreq, BytesRef minValue, BytesRef maxValue) { @@ -421,18 +410,6 @@ protected BytesRef valueOf(String value, String optionalFormat) { return new BytesRef(value); } - @Override - public String stringValueOf(Object value, String optionalFormat) { - if (optionalFormat != null) { - throw new UnsupportedOperationException("custom format isn't supported"); - } - if (value instanceof BytesRef) { - return ((BytesRef) value).utf8ToString(); - } else { - throw new IllegalArgumentException("value must be a BytesRef: " + value); - } - } - @Override protected void toInnerXContent(XContentBuilder builder) throws IOException { builder.field(Fields.MIN_VALUE, getMinValueAsString()); @@ -486,25 +463,6 @@ protected java.lang.Long valueOf(String value, String optionalFormat) { return dateFormatter.parser().parseMillis(value); } - @Override - public String stringValueOf(Object value, String optionalFormat) { - FormatDateTimeFormatter dateFormatter = this.dateFormatter; - if (optionalFormat != null) { - dateFormatter = Joda.forPattern(optionalFormat); - } - long millis; - if (value instanceof java.lang.Long) { - millis = ((java.lang.Long) value).longValue(); - } else if (value instanceof DateTime) { - millis = ((DateTime) value).getMillis(); - } else if (value instanceof BytesRef) { - millis = dateFormatter.parser().parseMillis(((BytesRef) value).utf8ToString()); - } else { - throw new IllegalArgumentException("value must be either a DateTime or a long: " + value); - } - return dateFormatter.printer().print(millis); - } - @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); @@ -519,25 +477,59 @@ public void writeTo(StreamOutput out) throws IOException { } - public static class Ip extends Long { + public static class Ip extends FieldStats { - public Ip(int maxDoc, int docCount, long sumDocFreq, long sumTotalTermFreq, long minValue, long maxValue) { - super(maxDoc, docCount, sumDocFreq, sumTotalTermFreq, minValue, maxValue); - } + private InetAddress minValue, maxValue; - protected Ip(int type, long maxDoc, long docCount, long sumDocFreq, long sumTotalTermFreq, long minValue, long maxValue) { - super(type, maxDoc, docCount, sumDocFreq, sumTotalTermFreq, minValue, maxValue); + public Ip(int maxDoc, int docCount, long sumDocFreq, long sumTotalTermFreq, + InetAddress minValue, InetAddress maxValue) { + super(4, maxDoc, docCount, sumDocFreq, sumTotalTermFreq); + this.minValue = minValue; + this.maxValue = maxValue; } public Ip() { + super(4); + } + + @Override + public String getMinValueAsString() { + return NetworkAddress.format(minValue); + } + + @Override + public String getMaxValueAsString() { + return NetworkAddress.format(maxValue); } @Override - public String stringValueOf(Object value, String optionalFormat) { - if (value instanceof BytesRef) { - return super.stringValueOf(IpFieldMapper.ipToLong(((BytesRef) value).utf8ToString()), optionalFormat); + protected InetAddress valueOf(String value, String optionalFormat) { + try { + return InetAddress.getByName(value); + } catch (UnknownHostException e) { + throw new RuntimeException(e); } - return super.stringValueOf(value, optionalFormat); + } + + @Override + protected int compare(InetAddress a, InetAddress b) { + byte[] ab = InetAddressPoint.encode(a); + byte[] bb = InetAddressPoint.encode(b); + return StringHelper.compare(ab.length, ab, 0, bb, 0); + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + minValue = valueOf(in.readString(), null); + maxValue = valueOf(in.readString(), null); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(NetworkAddress.format(minValue)); + out.writeString(NetworkAddress.format(maxValue)); } } @@ -557,10 +549,12 @@ public static FieldStats read(StreamInput in) throws IOException { case 3: stats = new Text(); break; + case 4: + stats = new Ip(); + break; default: throw new IllegalArgumentException("Illegal type [" + type + "]"); } - stats.type = type; stats.readFrom(in); return stats; } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/CustomDocValuesField.java b/core/src/main/java/org/elasticsearch/index/mapper/CustomDocValuesField.java new file mode 100644 index 0000000000000..737c8ddaabfa2 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/index/mapper/CustomDocValuesField.java @@ -0,0 +1,81 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.mapper; + +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.document.FieldType; +import org.apache.lucene.index.DocValuesType; +import org.apache.lucene.index.IndexableField; +import org.apache.lucene.index.IndexableFieldType; + +import java.io.Reader; + +// used for binary and geo fields +public abstract class CustomDocValuesField implements IndexableField { + + public static final FieldType TYPE = new FieldType(); + static { + TYPE.setDocValuesType(DocValuesType.BINARY); + TYPE.freeze(); + } + + private final String name; + + public CustomDocValuesField(String name) { + this.name = name; + } + + @Override + public String name() { + return name; + } + + @Override + public IndexableFieldType fieldType() { + return TYPE; + } + + @Override + public float boost() { + return 1f; + } + + @Override + public String stringValue() { + return null; + } + + @Override + public Reader readerValue() { + return null; + } + + @Override + public Number numericValue() { + return null; + } + + @Override + public TokenStream tokenStream(Analyzer analyzer, TokenStream reuse) { + return null; + } + +} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index 0f9c31d75dcd2..70219516147af 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -29,6 +29,7 @@ import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexableField; import org.apache.lucene.util.CloseableThreadLocal; +import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.xcontent.XContentHelper; @@ -37,13 +38,14 @@ import org.elasticsearch.index.mapper.core.BinaryFieldMapper; import org.elasticsearch.index.mapper.core.BooleanFieldMapper; import org.elasticsearch.index.mapper.core.DateFieldMapper; -import org.elasticsearch.index.mapper.core.DateFieldMapper.DateFieldType; -import org.elasticsearch.index.mapper.core.DoubleFieldMapper; -import org.elasticsearch.index.mapper.core.FloatFieldMapper; -import org.elasticsearch.index.mapper.core.IntegerFieldMapper; +import org.elasticsearch.index.mapper.core.LegacyDateFieldMapper; +import org.elasticsearch.index.mapper.core.LegacyDoubleFieldMapper; +import org.elasticsearch.index.mapper.core.LegacyFloatFieldMapper; +import org.elasticsearch.index.mapper.core.LegacyIntegerFieldMapper; import org.elasticsearch.index.mapper.core.KeywordFieldMapper; import org.elasticsearch.index.mapper.core.KeywordFieldMapper.KeywordFieldType; -import org.elasticsearch.index.mapper.core.LongFieldMapper; +import org.elasticsearch.index.mapper.core.LegacyLongFieldMapper; +import org.elasticsearch.index.mapper.core.NumberFieldMapper; import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.index.mapper.core.StringFieldMapper.StringFieldType; import org.elasticsearch.index.mapper.core.TextFieldMapper; @@ -622,44 +624,93 @@ private static Mapper.Builder createBuilderFromFieldType(final ParseContext if (builder == null) { builder = new KeywordFieldMapper.Builder(currentFieldName); } - } else if (fieldType instanceof DateFieldType) { - builder = context.root().findTemplateBuilder(context, currentFieldName, "date"); - if (builder == null) { - builder = new DateFieldMapper.Builder(currentFieldName); - } - } else if (fieldType.numericType() != null) { - switch (fieldType.numericType()) { - case LONG: + } else { + switch (fieldType.typeName()) { + case "date": + builder = context.root().findTemplateBuilder(context, currentFieldName, "date"); + if (builder == null) { + builder = newDateBuilder(currentFieldName, null, Version.indexCreated(context.indexSettings())); + } + break; + case "long": builder = context.root().findTemplateBuilder(context, currentFieldName, "long"); if (builder == null) { - builder = new LongFieldMapper.Builder(currentFieldName); + builder = newLongBuilder(currentFieldName, Version.indexCreated(context.indexSettings())); } break; - case DOUBLE: + case "double": builder = context.root().findTemplateBuilder(context, currentFieldName, "double"); if (builder == null) { - builder = new DoubleFieldMapper.Builder(currentFieldName); + builder = newDoubleBuilder(currentFieldName, Version.indexCreated(context.indexSettings())); } break; - case INT: + case "integer": builder = context.root().findTemplateBuilder(context, currentFieldName, "integer"); if (builder == null) { - builder = new IntegerFieldMapper.Builder(currentFieldName); + builder = newIntBuilder(currentFieldName, Version.indexCreated(context.indexSettings())); } break; - case FLOAT: + case "float": builder = context.root().findTemplateBuilder(context, currentFieldName, "float"); if (builder == null) { - builder = new FloatFieldMapper.Builder(currentFieldName); + builder = newFloatBuilder(currentFieldName, Version.indexCreated(context.indexSettings())); } break; default: - throw new AssertionError("Unexpected numeric type " + fieldType.numericType()); + break; } } return builder; } + private static Mapper.Builder newLongBuilder(String name, Version indexCreated) { + if (indexCreated.onOrAfter(Version.V_5_0_0)) { + return new NumberFieldMapper.Builder(name, NumberFieldMapper.NumberType.LONG); + } else { + return new LegacyLongFieldMapper.Builder(name); + } + } + + private static Mapper.Builder newIntBuilder(String name, Version indexCreated) { + if (indexCreated.onOrAfter(Version.V_5_0_0)) { + return new NumberFieldMapper.Builder(name, NumberFieldMapper.NumberType.INTEGER); + } else { + return new LegacyIntegerFieldMapper.Builder(name); + } + } + + private static Mapper.Builder newDoubleBuilder(String name, Version indexCreated) { + if (indexCreated.onOrAfter(Version.V_5_0_0)) { + return new NumberFieldMapper.Builder(name, NumberFieldMapper.NumberType.DOUBLE); + } else { + return new LegacyDoubleFieldMapper.Builder(name); + } + } + + private static Mapper.Builder newFloatBuilder(String name, Version indexCreated) { + if (indexCreated.onOrAfter(Version.V_5_0_0)) { + return new NumberFieldMapper.Builder(name, NumberFieldMapper.NumberType.FLOAT); + } else { + return new LegacyFloatFieldMapper.Builder(name); + } + } + + private static Mapper.Builder newDateBuilder(String name, FormatDateTimeFormatter dateTimeFormatter, Version indexCreated) { + if (indexCreated.onOrAfter(Version.V_5_0_0)) { + DateFieldMapper.Builder builder = new DateFieldMapper.Builder(name); + if (dateTimeFormatter != null) { + builder.dateTimeFormatter(dateTimeFormatter); + } + return builder; + } else { + LegacyDateFieldMapper.Builder builder = new LegacyDateFieldMapper.Builder(name); + if (dateTimeFormatter != null) { + builder.dateTimeFormatter(dateTimeFormatter); + } + return builder; + } + } + private static Mapper.Builder createBuilderFromDynamicValue(final ParseContext context, XContentParser.Token token, String currentFieldName) throws IOException { if (token == XContentParser.Token.VALUE_STRING) { // do a quick test to see if its fits a dynamic template, if so, use it. @@ -681,7 +732,7 @@ private static Mapper.Builder createBuilderFromDynamicValue(final ParseCont dateTimeFormatter.parser().parseMillis(text); Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "date"); if (builder == null) { - builder = new DateFieldMapper.Builder(currentFieldName).dateTimeFormatter(dateTimeFormatter); + builder = newDateBuilder(currentFieldName, dateTimeFormatter, Version.indexCreated(context.indexSettings())); } return builder; } catch (Exception e) { @@ -696,7 +747,7 @@ private static Mapper.Builder createBuilderFromDynamicValue(final ParseCont Long.parseLong(text); Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "long"); if (builder == null) { - builder = new LongFieldMapper.Builder(currentFieldName); + builder = newLongBuilder(currentFieldName, Version.indexCreated(context.indexSettings())); } return builder; } catch (NumberFormatException e) { @@ -706,7 +757,7 @@ private static Mapper.Builder createBuilderFromDynamicValue(final ParseCont Double.parseDouble(text); Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "double"); if (builder == null) { - builder = new FloatFieldMapper.Builder(currentFieldName); + builder = newFloatBuilder(currentFieldName, Version.indexCreated(context.indexSettings())); } return builder; } catch (NumberFormatException e) { @@ -724,7 +775,7 @@ private static Mapper.Builder createBuilderFromDynamicValue(final ParseCont if (numberType == XContentParser.NumberType.INT || numberType == XContentParser.NumberType.LONG) { Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "long"); if (builder == null) { - builder = new LongFieldMapper.Builder(currentFieldName); + builder = newLongBuilder(currentFieldName, Version.indexCreated(context.indexSettings())); } return builder; } else if (numberType == XContentParser.NumberType.FLOAT || numberType == XContentParser.NumberType.DOUBLE) { @@ -733,7 +784,7 @@ private static Mapper.Builder createBuilderFromDynamicValue(final ParseCont // no templates are defined, we use float by default instead of double // since this is much more space-efficient and should be enough most of // the time - builder = new FloatFieldMapper.Builder(currentFieldName); + builder = newFloatBuilder(currentFieldName, Version.indexCreated(context.indexSettings())); } return builder; } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java b/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java index 58045c730527e..2dc54f1f55c16 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java @@ -31,7 +31,6 @@ import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.RegexpQuery; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermRangeQuery; import org.apache.lucene.search.BoostQuery; @@ -358,15 +357,7 @@ public Query prefixQuery(String value, @Nullable MultiTermQuery.RewriteMethod me } public Query regexpQuery(String value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryShardContext context) { - if (numericType() != null) { - throw new QueryShardException(context, "Cannot use regular expression to filter numeric field [" + name + "]"); - } - - RegexpQuery query = new RegexpQuery(new Term(name(), indexedValueForSearch(value)), flags, maxDeterminizedStates); - if (method != null) { - query.setRewriteMethod(method); - } - return query; + throw new QueryShardException(context, "Can only use regular expression on keyword and text fields - not on [" + name + "] which is of type [" + typeName() + "]"); } public Query nullValueQuery() { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java index 7f140da21f437..ed01aff290ec5 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java @@ -34,6 +34,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.plain.BytesBinaryDVIndexFieldData; +import org.elasticsearch.index.mapper.CustomDocValuesField; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; @@ -178,7 +179,7 @@ protected String contentType() { return CONTENT_TYPE; } - public static class CustomBinaryDocValuesField extends NumberFieldMapper.CustomNumericDocValuesField { + public static class CustomBinaryDocValuesField extends CustomDocValuesField { private final ObjectArrayList bytesList; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java index 9254e221432f7..a5d06a64e1f9c 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java @@ -20,38 +20,38 @@ package org.elasticsearch.index.mapper.core; import org.apache.lucene.document.Field; +import org.apache.lucene.document.LongPoint; +import org.apache.lucene.document.SortedNumericDocValuesField; +import org.apache.lucene.document.StoredField; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.MultiFields; -import org.apache.lucene.index.Terms; -import org.apache.lucene.search.LegacyNumericRangeQuery; +import org.apache.lucene.index.PointValues; +import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.LegacyNumericUtils; import org.elasticsearch.Version; import org.elasticsearch.action.fieldstats.FieldStats; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.Numbers; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.joda.DateMathParser; import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.joda.Joda; +import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.util.LocaleUtils; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType; import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData; +import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParseContext; -import org.elasticsearch.index.mapper.core.LongFieldMapper.CustomLongNumericField; +import org.elasticsearch.index.mapper.core.LegacyNumberFieldMapper.Defaults; +import org.elasticsearch.index.mapper.internal.AllFieldMapper; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.internal.SearchContext; import org.joda.time.DateTimeZone; @@ -63,37 +63,24 @@ import java.util.Map; import java.util.Objects; import java.util.concurrent.Callable; -import java.util.concurrent.TimeUnit; import static org.elasticsearch.index.mapper.core.TypeParsers.parseDateTimeFormatter; -import static org.elasticsearch.index.mapper.core.TypeParsers.parseNumberField; -public class DateFieldMapper extends NumberFieldMapper { +/** A {@link FieldMapper} for ip addresses. */ +public class DateFieldMapper extends FieldMapper implements AllFieldMapper.IncludeInAll { public static final String CONTENT_TYPE = "date"; + public static final FormatDateTimeFormatter DEFAULT_DATE_TIME_FORMATTER = Joda.forPattern( + "strict_date_optional_time||epoch_millis", Locale.ROOT); - public static class Defaults extends NumberFieldMapper.Defaults { - public static final FormatDateTimeFormatter DATE_TIME_FORMATTER = Joda.forPattern("strict_date_optional_time||epoch_millis", Locale.ROOT); - public static final TimeUnit TIME_UNIT = TimeUnit.MILLISECONDS; - public static final DateFieldType FIELD_TYPE = new DateFieldType(); - - static { - FIELD_TYPE.freeze(); - } - - public static final String NULL_VALUE = null; - } - - public static class Builder extends NumberFieldMapper.Builder { - - protected String nullValue = Defaults.NULL_VALUE; + public static class Builder extends FieldMapper.Builder { + private Boolean ignoreMalformed; private Locale locale; public Builder(String name) { - super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_64_BIT); + super(name, new DateFieldType(), new DateFieldType()); builder = this; - // do *NOT* rely on the default locale locale = Locale.ROOT; } @@ -102,14 +89,19 @@ public DateFieldType fieldType() { return (DateFieldType)fieldType; } - public Builder timeUnit(TimeUnit timeUnit) { - fieldType().setTimeUnit(timeUnit); - return this; + public Builder ignoreMalformed(boolean ignoreMalformed) { + this.ignoreMalformed = ignoreMalformed; + return builder; } - public Builder nullValue(String nullValue) { - this.nullValue = nullValue; - return this; + protected Explicit ignoreMalformed(BuilderContext context) { + if (ignoreMalformed != null) { + return new Explicit<>(ignoreMalformed, true); + } + if (context.indexSettings() != null) { + return new Explicit<>(IGNORE_MALFORMED_SETTING.get(context.indexSettings()), false); + } + return Defaults.IGNORE_MALFORMED; } public Builder dateTimeFormatter(FormatDateTimeFormatter dateTimeFormatter) { @@ -117,71 +109,69 @@ public Builder dateTimeFormatter(FormatDateTimeFormatter dateTimeFormatter) { return this; } - @Override - public DateFieldMapper build(BuilderContext context) { - setupFieldType(context); - fieldType.setNullValue(nullValue); - DateFieldMapper fieldMapper = new DateFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), - coerce(context), context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); - return (DateFieldMapper) fieldMapper.includeInAll(includeInAll); + public void locale(Locale locale) { + this.locale = locale; } @Override protected void setupFieldType(BuilderContext context) { + super.setupFieldType(context); FormatDateTimeFormatter dateTimeFormatter = fieldType().dateTimeFormatter; if (!locale.equals(dateTimeFormatter.locale())) { - fieldType().setDateTimeFormatter(new FormatDateTimeFormatter(dateTimeFormatter.format(), dateTimeFormatter.parser(), dateTimeFormatter.printer(), locale)); + fieldType().setDateTimeFormatter( new FormatDateTimeFormatter(dateTimeFormatter.format(), + dateTimeFormatter.parser(), dateTimeFormatter.printer(), locale)); } - super.setupFieldType(context); - } - - public Builder locale(Locale locale) { - this.locale = locale; - return this; } @Override - protected int maxPrecisionStep() { - return 64; + public DateFieldMapper build(BuilderContext context) { + setupFieldType(context); + DateFieldMapper fieldMapper = new DateFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), + context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); + return (DateFieldMapper) fieldMapper.includeInAll(includeInAll); } } public static class TypeParser implements Mapper.TypeParser { + + public TypeParser() { + } + @Override - public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - DateFieldMapper.Builder builder = new DateFieldMapper.Builder(name); - parseNumberField(builder, name, node, parserContext); - boolean configuredFormat = false; + public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { + if (parserContext.indexVersionCreated().before(Version.V_5_0_0)) { + return new LegacyDateFieldMapper.TypeParser().parse(name, node, parserContext); + } + Builder builder = new Builder(name); + TypeParsers.parseField(builder, name, node, parserContext); for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { Map.Entry entry = iterator.next(); - String propName = Strings.toUnderscoreCase(entry.getKey()); + String propName = entry.getKey(); Object propNode = entry.getValue(); if (propName.equals("null_value")) { if (propNode == null) { throw new MapperParsingException("Property [null_value] cannot be null."); } - builder.nullValue(propNode.toString()); + builder.nullValue(InetAddresses.forString(propNode.toString())); iterator.remove(); - } else if (propName.equals("format")) { - builder.dateTimeFormatter(parseDateTimeFormatter(propNode)); - configuredFormat = true; - iterator.remove(); - } else if (propName.equals("numeric_resolution")) { - builder.timeUnit(TimeUnit.valueOf(propNode.toString().toUpperCase(Locale.ROOT))); + } else if (propName.equals("ignore_malformed")) { + builder.ignoreMalformed(TypeParsers.nodeBooleanValue("ignore_malformed", propNode, parserContext)); iterator.remove(); } else if (propName.equals("locale")) { builder.locale(LocaleUtils.parse(propNode.toString())); iterator.remove(); + } else if (propName.equals("format")) { + builder.dateTimeFormatter(parseDateTimeFormatter(propNode)); + iterator.remove(); + } else if (TypeParsers.parseMultiField(builder, name, parserContext, propName, propNode)) { + iterator.remove(); } } - if (!configuredFormat) { - builder.dateTimeFormatter(Defaults.DATE_TIME_FORMATTER); - } return builder; } } - public static class DateFieldType extends NumberFieldType { + public static final class DateFieldType extends MappedFieldType { final class LateParsingQuery extends Query { @@ -192,7 +182,8 @@ final class LateParsingQuery extends Query { final DateTimeZone timeZone; final DateMathParser forcedDateParser; - public LateParsingQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, DateTimeZone timeZone, DateMathParser forcedDateParser) { + public LateParsingQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, + DateTimeZone timeZone, DateMathParser forcedDateParser) { this.lowerTerm = lowerTerm; this.upperTerm = upperTerm; this.includeLower = includeLower; @@ -244,23 +235,24 @@ public String toString(String s) { } } - protected FormatDateTimeFormatter dateTimeFormatter = Defaults.DATE_TIME_FORMATTER; - protected TimeUnit timeUnit = Defaults.TIME_UNIT; - protected DateMathParser dateMathParser = new DateMathParser(dateTimeFormatter); + protected FormatDateTimeFormatter dateTimeFormatter; + protected DateMathParser dateMathParser; - public DateFieldType() { - super(LegacyNumericType.LONG); + DateFieldType() { + super(); + setTokenized(false); + setHasDocValues(true); + setOmitNorms(true); + setDateTimeFormatter(DEFAULT_DATE_TIME_FORMATTER); } - protected DateFieldType(DateFieldType ref) { - super(ref); - this.dateTimeFormatter = ref.dateTimeFormatter; - this.timeUnit = ref.timeUnit; - this.dateMathParser = ref.dateMathParser; + DateFieldType(DateFieldType other) { + super(other); + setDateTimeFormatter(other.dateTimeFormatter); } @Override - public DateFieldType clone() { + public MappedFieldType clone() { return new DateFieldType(this); } @@ -269,13 +261,12 @@ public boolean equals(Object o) { if (!super.equals(o)) return false; DateFieldType that = (DateFieldType) o; return Objects.equals(dateTimeFormatter.format(), that.dateTimeFormatter.format()) && - Objects.equals(dateTimeFormatter.locale(), that.dateTimeFormatter.locale()) && - Objects.equals(timeUnit, that.timeUnit); + Objects.equals(dateTimeFormatter.locale(), that.dateTimeFormatter.locale()); } @Override public int hashCode() { - return Objects.hash(super.hashCode(), dateTimeFormatter.format(), timeUnit); + return Objects.hash(super.hashCode(), dateTimeFormatter.format(), dateTimeFormatter.locale()); } @Override @@ -289,13 +280,12 @@ public void checkCompatibility(MappedFieldType fieldType, List conflicts if (strict) { DateFieldType other = (DateFieldType)fieldType; if (Objects.equals(dateTimeFormatter().format(), other.dateTimeFormatter().format()) == false) { - conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [format] across all types."); + conflicts.add("mapper [" + name() + + "] is used by multiple types. Set update_all_types to true to update [format] across all types."); } if (Objects.equals(dateTimeFormatter().locale(), other.dateTimeFormatter().locale()) == false) { - conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [locale] across all types."); - } - if (Objects.equals(timeUnit(), other.timeUnit()) == false) { - conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [numeric_resolution] across all types."); + conflicts.add("mapper [" + name() + + "] is used by multiple types. Set update_all_types to true to update [locale] across all types."); } } } @@ -310,48 +300,35 @@ public void setDateTimeFormatter(FormatDateTimeFormatter dateTimeFormatter) { this.dateMathParser = new DateMathParser(dateTimeFormatter); } - public TimeUnit timeUnit() { - return timeUnit; - } - - public void setTimeUnit(TimeUnit timeUnit) { - checkIfFrozen(); - this.timeUnit = timeUnit; - this.dateMathParser = new DateMathParser(dateTimeFormatter); - } - protected DateMathParser dateMathParser() { return dateMathParser; } - private long parseValue(Object value) { - if (value instanceof Number) { - return ((Number) value).longValue(); - } - if (value instanceof BytesRef) { - return dateTimeFormatter().parser().parseMillis(((BytesRef) value).utf8ToString()); - } - return dateTimeFormatter().parser().parseMillis(value.toString()); - } - - protected long parseStringValue(String value) { + long parse(String value) { return dateTimeFormatter().parser().parseMillis(value); } @Override - public BytesRef indexedValueForSearch(Object value) { - BytesRefBuilder bytesRef = new BytesRefBuilder(); - LegacyNumericUtils.longToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match - return bytesRef.get(); + public Query termQuery(Object value, @Nullable QueryShardContext context) { + Query query = innerRangeQuery(value, value, true, true, null, null); + if (boost() != 1f) { + query = new BoostQuery(query, boost()); + } + return query; } @Override - public Object valueForSearch(Object value) { - Long val = (Long) value; - if (val == null) { - return null; + public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { + long baseLo = parseToMilliseconds(value, false, null, dateMathParser); + long baseHi = parseToMilliseconds(value, true, null, dateMathParser); + long delta; + try { + delta = fuzziness.asTimeValue().millis(); + } catch (Exception e) { + // not a time format + delta = fuzziness.asLong(); } - return dateTimeFormatter().printer().print(val); + return LongPoint.newRangeQuery(name(), baseLo - delta, baseHi + delta); } @Override @@ -359,45 +336,75 @@ public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower return rangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, null, null); } - @Override - public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { - long iValue = parseValue(value); - long iSim; - try { - iSim = fuzziness.asTimeValue().millis(); - } catch (Exception e) { - // not a time format - iSim = fuzziness.asLong(); + public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, + @Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser) { + return new LateParsingQuery(lowerTerm, upperTerm, includeLower, includeUpper, timeZone, forcedDateParser); + } + + Query innerRangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, + @Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser) { + DateMathParser parser = forcedDateParser == null + ? dateMathParser + : forcedDateParser; + long l, u; + if (lowerTerm == null) { + l = Long.MIN_VALUE; + } else { + l = parseToMilliseconds(lowerTerm, !includeLower, timeZone, parser); + if (includeLower == false) { + ++l; + } } - return LegacyNumericRangeQuery.newLongRange(name(), numericPrecisionStep(), - iValue - iSim, - iValue + iSim, - true, true); + if (upperTerm == null) { + u = Long.MAX_VALUE; + } else { + u = parseToMilliseconds(upperTerm, includeUpper, timeZone, parser); + if (includeUpper == false) { + --u; + } + } + return LongPoint.newRangeQuery(name(), l, u); } - @Override - public FieldStats stats(IndexReader reader) throws IOException { - int maxDoc = reader.maxDoc(); - Terms terms = org.apache.lucene.index.MultiFields.getTerms(reader, name()); - if (terms == null) { - return null; + public long parseToMilliseconds(Object value, boolean roundUp, + @Nullable DateTimeZone zone, @Nullable DateMathParser forcedDateParser) { + DateMathParser dateParser = dateMathParser(); + if (forcedDateParser != null) { + dateParser = forcedDateParser; + } + + String strValue; + if (value instanceof BytesRef) { + strValue = ((BytesRef) value).utf8ToString(); + } else { + strValue = value.toString(); } - long minValue = LegacyNumericUtils.getMinLong(terms); - long maxValue = LegacyNumericUtils.getMaxLong(terms); - return new FieldStats.Date( - maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue, dateTimeFormatter() - ); + return dateParser.parse(strValue, now(), roundUp, zone); } - public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser) { - return new LateParsingQuery(lowerTerm, upperTerm, includeLower, includeUpper, timeZone, forcedDateParser); + private static Callable now() { + return () -> { + final SearchContext context = SearchContext.current(); + return context != null + ? context.nowInMillis() + : System.currentTimeMillis(); + }; } - private Query innerRangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser) { - return LegacyNumericRangeQuery.newLongRange(name(), numericPrecisionStep(), - lowerTerm == null ? null : parseToMilliseconds(lowerTerm, !includeLower, timeZone, forcedDateParser == null ? dateMathParser : forcedDateParser), - upperTerm == null ? null : parseToMilliseconds(upperTerm, includeUpper, timeZone, forcedDateParser == null ? dateMathParser : forcedDateParser), - includeLower, includeUpper); + @Override + public FieldStats.Date stats(IndexReader reader) throws IOException { + String field = name(); + long size = PointValues.size(reader, field); + if (size == 0) { + return null; + } + int docCount = PointValues.getDocCount(reader, field); + byte[] min = PointValues.getMinPackedValue(reader, field); + byte[] max = PointValues.getMaxPackedValue(reader, field); + return new FieldStats.Date(reader.maxDoc(),docCount, -1L, size, + LongPoint.decodeDimension(min, 0), + LongPoint.decodeDimension(max, 0), + dateTimeFormatter()); } @Override @@ -409,14 +416,13 @@ public Relation isFieldWithinQuery(IndexReader reader, dateParser = this.dateMathParser; } - Terms terms = org.apache.lucene.index.MultiFields.getTerms(reader, name()); - if (terms == null) { - // no terms, so nothing matches + if (PointValues.size(reader, name()) == 0) { + // no points, so nothing matches return Relation.DISJOINT; } - long minValue = LegacyNumericUtils.getMinLong(terms); - long maxValue = LegacyNumericUtils.getMaxLong(terms); + long minValue = LongPoint.decodeDimension(PointValues.getMinPackedValue(reader, name()), 0); + long maxValue = LongPoint.decodeDimension(PointValues.getMaxPackedValue(reader, name()), 0); long fromInclusive = Long.MIN_VALUE; if (from != null) { @@ -449,31 +455,21 @@ public Relation isFieldWithinQuery(IndexReader reader, } } - public long parseToMilliseconds(Object value, boolean inclusive, @Nullable DateTimeZone zone, @Nullable DateMathParser forcedDateParser) { - if (value instanceof Long) { - return ((Long) value).longValue(); - } - - DateMathParser dateParser = dateMathParser(); - if (forcedDateParser != null) { - dateParser = forcedDateParser; - } - - String strValue; - if (value instanceof BytesRef) { - strValue = ((BytesRef) value).utf8ToString(); - } else { - strValue = value.toString(); - } - return dateParser.parse(strValue, now(), inclusive, zone); - } - @Override public IndexFieldData.Builder fielddataBuilder() { failIfNoDocValues(); return new DocValuesIndexFieldData.Builder().numericType(NumericType.LONG); } + @Override + public Object valueForSearch(Object value) { + Long val = (Long) value; + if (val == null) { + return null; + } + return dateTimeFormatter().printer().print(val); + } + @Override public DocValueFormat docValueFormat(@Nullable String format, DateTimeZone timeZone) { FormatDateTimeFormatter dateTimeFormatter = this.dateTimeFormatter; @@ -487,9 +483,20 @@ public DocValueFormat docValueFormat(@Nullable String format, DateTimeZone timeZ } } - protected DateFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Explicit ignoreMalformed,Explicit coerce, - Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(simpleName, fieldType, defaultFieldType, ignoreMalformed, coerce, indexSettings, multiFields, copyTo); + private Boolean includeInAll; + + private Explicit ignoreMalformed; + + private DateFieldMapper( + String simpleName, + MappedFieldType fieldType, + MappedFieldType defaultFieldType, + Explicit ignoreMalformed, + Settings indexSettings, + MultiFields multiFields, + CopyTo copyTo) { + super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo); + this.ignoreMalformed = ignoreMalformed; } @Override @@ -497,121 +504,126 @@ public DateFieldType fieldType() { return (DateFieldType) super.fieldType(); } - private static Callable now() { - return new Callable() { - @Override - public Long call() { - final SearchContext context = SearchContext.current(); - return context != null - ? context.nowInMillis() - : System.currentTimeMillis(); - } - }; + @Override + protected String contentType() { + return fieldType.typeName(); } @Override - protected boolean customBoost() { - return true; + protected DateFieldMapper clone() { + return (DateFieldMapper) super.clone(); } @Override - protected void innerParseCreateField(ParseContext context, List fields) throws IOException { - String dateAsString = null; - float boost = fieldType().boost(); + public Mapper includeInAll(Boolean includeInAll) { + if (includeInAll != null) { + DateFieldMapper clone = clone(); + clone.includeInAll = includeInAll; + return clone; + } else { + return this; + } + } + + @Override + public Mapper includeInAllIfNotSet(Boolean includeInAll) { + if (includeInAll != null && this.includeInAll == null) { + DateFieldMapper clone = clone(); + clone.includeInAll = includeInAll; + return clone; + } else { + return this; + } + } + + @Override + public Mapper unsetIncludeInAll() { + if (includeInAll != null) { + DateFieldMapper clone = clone(); + clone.includeInAll = null; + return clone; + } else { + return this; + } + } + + @Override + protected void parseCreateField(ParseContext context, List fields) throws IOException { + String dateAsString; if (context.externalValueSet()) { - Object externalValue = context.externalValue(); - dateAsString = (String) externalValue; - if (dateAsString == null) { - dateAsString = fieldType().nullValueAsString(); + Object dateAsObject = context.externalValue(); + if (dateAsObject == null) { + dateAsString = null; + } else { + dateAsString = dateAsObject.toString(); } } else { - XContentParser parser = context.parser(); - XContentParser.Token token = parser.currentToken(); - if (token == XContentParser.Token.VALUE_NULL) { - dateAsString = fieldType().nullValueAsString(); - } else if (token == XContentParser.Token.VALUE_NUMBER) { - dateAsString = parser.text(); - } else if (token == XContentParser.Token.START_OBJECT - && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) { - String currentFieldName = null; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else { - if ("value".equals(currentFieldName) || "_value".equals(currentFieldName)) { - if (token == XContentParser.Token.VALUE_NULL) { - dateAsString = fieldType().nullValueAsString(); - } else { - dateAsString = parser.text(); - } - } else if ("boost".equals(currentFieldName) || "_boost".equals(currentFieldName)) { - boost = parser.floatValue(); - } else { - throw new IllegalArgumentException("unknown property [" + currentFieldName + "]"); - } - } - } + dateAsString = context.parser().text(); + } + + if (dateAsString == null) { + dateAsString = fieldType().nullValueAsString(); + } + + if (dateAsString == null) { + return; + } + + long timestamp; + try { + timestamp = fieldType().parse(dateAsString); + } catch (IllegalArgumentException e) { + if (ignoreMalformed.value()) { + return; } else { - dateAsString = parser.text(); + throw e; } } - Long value = null; - if (dateAsString != null) { - if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().name(), dateAsString, boost); - } - value = fieldType().parseStringValue(dateAsString); + if (context.includeInAll(includeInAll, this)) { + context.allEntries().addText(fieldType().name(), dateAsString, fieldType().boost()); } - if (value != null) { - if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { - CustomLongNumericField field = new CustomLongNumericField(value, fieldType()); - if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) { - field.setBoost(boost); - } - fields.add(field); - } - if (fieldType().hasDocValues()) { - addDocValue(context, fields, value); - } + if (fieldType().indexOptions() != IndexOptions.NONE) { + fields.add(new LongPoint(fieldType().name(), timestamp)); + } + if (fieldType().hasDocValues()) { + fields.add(new SortedNumericDocValuesField(fieldType().name(), timestamp)); + } + if (fieldType().stored()) { + fields.add(new StoredField(fieldType().name(), timestamp)); } } @Override - protected String contentType() { - return CONTENT_TYPE; + protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { + super.doMerge(mergeWith, updateAllTypes); + DateFieldMapper other = (DateFieldMapper) mergeWith; + this.includeInAll = other.includeInAll; + if (other.ignoreMalformed.explicit()) { + this.ignoreMalformed = other.ignoreMalformed; + } } @Override protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { super.doXContentBody(builder, includeDefaults, params); - if (includeDefaults || fieldType().numericPrecisionStep() != Defaults.PRECISION_STEP_64_BIT) { - builder.field("precision_step", fieldType().numericPrecisionStep()); - } - builder.field("format", fieldType().dateTimeFormatter().format()); - if (includeDefaults || fieldType().nullValueAsString() != null) { - builder.field("null_value", fieldType().nullValueAsString()); + if (includeDefaults || ignoreMalformed.explicit()) { + builder.field("ignore_malformed", ignoreMalformed.value()); } if (includeInAll != null) { builder.field("include_in_all", includeInAll); } else if (includeDefaults) { builder.field("include_in_all", false); } - - if (includeDefaults || fieldType().timeUnit() != Defaults.TIME_UNIT) { - builder.field("numeric_resolution", fieldType().timeUnit().name().toLowerCase(Locale.ROOT)); + if (includeDefaults + || fieldType().dateTimeFormatter().format().equals(DEFAULT_DATE_TIME_FORMATTER.format()) == false) { + builder.field("format", fieldType().dateTimeFormatter().format()); } - // only serialize locale if needed, ROOT is the default, so no need to serialize that case as well... - if (fieldType().dateTimeFormatter().locale() != null && fieldType().dateTimeFormatter().locale() != Locale.ROOT) { + if (includeDefaults + || fieldType().dateTimeFormatter().locale() != Locale.ROOT) { builder.field("locale", fieldType().dateTimeFormatter().locale()); - } else if (includeDefaults) { - if (fieldType().dateTimeFormatter().locale() == null) { - builder.field("locale", Locale.ROOT); - } else { - builder.field("locale", fieldType().dateTimeFormatter().locale()); - } } } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/KeywordFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/KeywordFieldMapper.java index 78823a5343d69..92028ae56fa93 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/KeywordFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/KeywordFieldMapper.java @@ -22,15 +22,18 @@ import org.apache.lucene.document.Field; import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.search.RegexpQuery; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType; import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; @@ -38,6 +41,7 @@ import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.internal.AllFieldMapper; +import org.elasticsearch.index.query.QueryShardContext; import java.io.IOException; import java.util.Iterator; @@ -45,7 +49,6 @@ import java.util.Map; import static org.elasticsearch.index.mapper.core.TypeParsers.parseField; -import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField; /** * A field mapper for keywords. This mapper accepts strings and indexes them as-is. @@ -170,6 +173,16 @@ public IndexFieldData.Builder fielddataBuilder() { failIfNoDocValues(); return new DocValuesIndexFieldData.Builder(); } + + @Override + public Query regexpQuery(String value, int flags, int maxDeterminizedStates, + @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryShardContext context) { + RegexpQuery query = new RegexpQuery(new Term(name(), indexedValueForSearch(value)), flags, maxDeterminizedStates); + if (method != null) { + query.setRewriteMethod(method); + } + return query; + } } private Boolean includeInAll; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/ByteFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/LegacyByteFieldMapper.java similarity index 93% rename from core/src/main/java/org/elasticsearch/index/mapper/core/ByteFieldMapper.java rename to core/src/main/java/org/elasticsearch/index/mapper/core/LegacyByteFieldMapper.java index 6036e04381010..bff46f21f06f8 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/ByteFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/LegacyByteFieldMapper.java @@ -57,11 +57,11 @@ /** * */ -public class ByteFieldMapper extends NumberFieldMapper { +public class LegacyByteFieldMapper extends LegacyNumberFieldMapper { public static final String CONTENT_TYPE = "byte"; - public static class Defaults extends NumberFieldMapper.Defaults { + public static class Defaults extends LegacyNumberFieldMapper.Defaults { public static final MappedFieldType FIELD_TYPE = new ByteFieldType(); static { @@ -69,7 +69,7 @@ public static class Defaults extends NumberFieldMapper.Defaults { } } - public static class Builder extends NumberFieldMapper.Builder { + public static class Builder extends LegacyNumberFieldMapper.Builder { public Builder(String name) { super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_8_BIT); @@ -77,11 +77,14 @@ public Builder(String name) { } @Override - public ByteFieldMapper build(BuilderContext context) { + public LegacyByteFieldMapper build(BuilderContext context) { + if (context.indexCreatedVersion().onOrAfter(Version.V_5_0_0)) { + throw new IllegalStateException("Cannot use legacy numeric types after 5.0"); + } setupFieldType(context); - ByteFieldMapper fieldMapper = new ByteFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), + LegacyByteFieldMapper fieldMapper = new LegacyByteFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), coerce(context), context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); - return (ByteFieldMapper) fieldMapper.includeInAll(includeInAll); + return (LegacyByteFieldMapper) fieldMapper.includeInAll(includeInAll); } @Override @@ -93,7 +96,7 @@ protected int maxPrecisionStep() { public static class TypeParser implements Mapper.TypeParser { @Override public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - ByteFieldMapper.Builder builder = new ByteFieldMapper.Builder(name); + LegacyByteFieldMapper.Builder builder = new LegacyByteFieldMapper.Builder(name); parseNumberField(builder, name, node, parserContext); for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { Map.Entry entry = iterator.next(); @@ -189,7 +192,7 @@ public IndexFieldData.Builder fielddataBuilder() { } } - protected ByteFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, + protected LegacyByteFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Explicit ignoreMalformed, Explicit coerce, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { super(simpleName, fieldType, defaultFieldType, ignoreMalformed, coerce, indexSettings, multiFields, copyTo); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/LegacyDateFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/LegacyDateFieldMapper.java new file mode 100644 index 0000000000000..f6b8a0dffef81 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/LegacyDateFieldMapper.java @@ -0,0 +1,617 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.mapper.core; + +import org.apache.lucene.document.Field; +import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.Terms; +import org.apache.lucene.search.LegacyNumericRangeQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefBuilder; +import org.apache.lucene.util.LegacyNumericUtils; +import org.elasticsearch.Version; +import org.elasticsearch.action.fieldstats.FieldStats; +import org.elasticsearch.common.Explicit; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.joda.DateMathParser; +import org.elasticsearch.common.joda.FormatDateTimeFormatter; +import org.elasticsearch.common.joda.Joda; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.Fuzziness; +import org.elasticsearch.common.util.LocaleUtils; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.fielddata.IndexFieldData; +import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType; +import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.ParseContext; +import org.elasticsearch.index.mapper.core.LegacyLongFieldMapper.CustomLongNumericField; +import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.internal.SearchContext; +import org.joda.time.DateTimeZone; + +import java.io.IOException; +import java.util.Iterator; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.Callable; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.index.mapper.core.TypeParsers.parseDateTimeFormatter; +import static org.elasticsearch.index.mapper.core.TypeParsers.parseNumberField; + +public class LegacyDateFieldMapper extends LegacyNumberFieldMapper { + + public static final String CONTENT_TYPE = "date"; + + public static class Defaults extends LegacyNumberFieldMapper.Defaults { + public static final FormatDateTimeFormatter DATE_TIME_FORMATTER = Joda.forPattern("strict_date_optional_time||epoch_millis", Locale.ROOT); + public static final TimeUnit TIME_UNIT = TimeUnit.MILLISECONDS; + public static final DateFieldType FIELD_TYPE = new DateFieldType(); + + static { + FIELD_TYPE.freeze(); + } + + public static final String NULL_VALUE = null; + } + + public static class Builder extends LegacyNumberFieldMapper.Builder { + + protected String nullValue = Defaults.NULL_VALUE; + + private Locale locale; + + public Builder(String name) { + super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_64_BIT); + builder = this; + // do *NOT* rely on the default locale + locale = Locale.ROOT; + } + + @Override + public DateFieldType fieldType() { + return (DateFieldType)fieldType; + } + + public Builder timeUnit(TimeUnit timeUnit) { + fieldType().setTimeUnit(timeUnit); + return this; + } + + public Builder nullValue(String nullValue) { + this.nullValue = nullValue; + return this; + } + + public Builder dateTimeFormatter(FormatDateTimeFormatter dateTimeFormatter) { + fieldType().setDateTimeFormatter(dateTimeFormatter); + return this; + } + + @Override + public LegacyDateFieldMapper build(BuilderContext context) { + if (context.indexCreatedVersion().onOrAfter(Version.V_5_0_0)) { + throw new IllegalStateException("Cannot use legacy numeric types after 5.0"); + } + setupFieldType(context); + fieldType.setNullValue(nullValue); + LegacyDateFieldMapper fieldMapper = new LegacyDateFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), + coerce(context), context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); + return (LegacyDateFieldMapper) fieldMapper.includeInAll(includeInAll); + } + + @Override + protected void setupFieldType(BuilderContext context) { + FormatDateTimeFormatter dateTimeFormatter = fieldType().dateTimeFormatter; + if (!locale.equals(dateTimeFormatter.locale())) { + fieldType().setDateTimeFormatter(new FormatDateTimeFormatter(dateTimeFormatter.format(), dateTimeFormatter.parser(), dateTimeFormatter.printer(), locale)); + } + super.setupFieldType(context); + } + + public Builder locale(Locale locale) { + this.locale = locale; + return this; + } + + @Override + protected int maxPrecisionStep() { + return 64; + } + } + + public static class TypeParser implements Mapper.TypeParser { + @Override + public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { + LegacyDateFieldMapper.Builder builder = new LegacyDateFieldMapper.Builder(name); + parseNumberField(builder, name, node, parserContext); + boolean configuredFormat = false; + for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { + Map.Entry entry = iterator.next(); + String propName = Strings.toUnderscoreCase(entry.getKey()); + Object propNode = entry.getValue(); + if (propName.equals("null_value")) { + if (propNode == null) { + throw new MapperParsingException("Property [null_value] cannot be null."); + } + builder.nullValue(propNode.toString()); + iterator.remove(); + } else if (propName.equals("format")) { + builder.dateTimeFormatter(parseDateTimeFormatter(propNode)); + configuredFormat = true; + iterator.remove(); + } else if (propName.equals("numeric_resolution")) { + builder.timeUnit(TimeUnit.valueOf(propNode.toString().toUpperCase(Locale.ROOT))); + iterator.remove(); + } else if (propName.equals("locale")) { + builder.locale(LocaleUtils.parse(propNode.toString())); + iterator.remove(); + } + } + if (!configuredFormat) { + builder.dateTimeFormatter(Defaults.DATE_TIME_FORMATTER); + } + return builder; + } + } + + public static class DateFieldType extends NumberFieldType { + + final class LateParsingQuery extends Query { + + final Object lowerTerm; + final Object upperTerm; + final boolean includeLower; + final boolean includeUpper; + final DateTimeZone timeZone; + final DateMathParser forcedDateParser; + + public LateParsingQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, DateTimeZone timeZone, DateMathParser forcedDateParser) { + this.lowerTerm = lowerTerm; + this.upperTerm = upperTerm; + this.includeLower = includeLower; + this.includeUpper = includeUpper; + this.timeZone = timeZone; + this.forcedDateParser = forcedDateParser; + } + + @Override + public Query rewrite(IndexReader reader) throws IOException { + Query rewritten = super.rewrite(reader); + if (rewritten != this) { + return rewritten; + } + return innerRangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, timeZone, forcedDateParser); + } + + // Even though we only cache rewritten queries it is good to let all queries implement hashCode() and equals(): + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (!super.equals(o)) return false; + + LateParsingQuery that = (LateParsingQuery) o; + if (includeLower != that.includeLower) return false; + if (includeUpper != that.includeUpper) return false; + if (lowerTerm != null ? !lowerTerm.equals(that.lowerTerm) : that.lowerTerm != null) return false; + if (upperTerm != null ? !upperTerm.equals(that.upperTerm) : that.upperTerm != null) return false; + if (timeZone != null ? !timeZone.equals(that.timeZone) : that.timeZone != null) return false; + + return true; + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), lowerTerm, upperTerm, includeLower, includeUpper, timeZone); + } + + @Override + public String toString(String s) { + final StringBuilder sb = new StringBuilder(); + return sb.append(name()).append(':') + .append(includeLower ? '[' : '{') + .append((lowerTerm == null) ? "*" : lowerTerm.toString()) + .append(" TO ") + .append((upperTerm == null) ? "*" : upperTerm.toString()) + .append(includeUpper ? ']' : '}') + .toString(); + } + } + + protected FormatDateTimeFormatter dateTimeFormatter = Defaults.DATE_TIME_FORMATTER; + protected TimeUnit timeUnit = Defaults.TIME_UNIT; + protected DateMathParser dateMathParser = new DateMathParser(dateTimeFormatter); + + public DateFieldType() { + super(LegacyNumericType.LONG); + } + + protected DateFieldType(DateFieldType ref) { + super(ref); + this.dateTimeFormatter = ref.dateTimeFormatter; + this.timeUnit = ref.timeUnit; + this.dateMathParser = ref.dateMathParser; + } + + @Override + public DateFieldType clone() { + return new DateFieldType(this); + } + + @Override + public boolean equals(Object o) { + if (!super.equals(o)) return false; + DateFieldType that = (DateFieldType) o; + return Objects.equals(dateTimeFormatter.format(), that.dateTimeFormatter.format()) && + Objects.equals(dateTimeFormatter.locale(), that.dateTimeFormatter.locale()) && + Objects.equals(timeUnit, that.timeUnit); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), dateTimeFormatter.format(), timeUnit); + } + + @Override + public String typeName() { + return CONTENT_TYPE; + } + + @Override + public void checkCompatibility(MappedFieldType fieldType, List conflicts, boolean strict) { + super.checkCompatibility(fieldType, conflicts, strict); + if (strict) { + DateFieldType other = (DateFieldType)fieldType; + if (Objects.equals(dateTimeFormatter().format(), other.dateTimeFormatter().format()) == false) { + conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [format] across all types."); + } + if (Objects.equals(dateTimeFormatter().locale(), other.dateTimeFormatter().locale()) == false) { + conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [locale] across all types."); + } + if (Objects.equals(timeUnit(), other.timeUnit()) == false) { + conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [numeric_resolution] across all types."); + } + } + } + + public FormatDateTimeFormatter dateTimeFormatter() { + return dateTimeFormatter; + } + + public void setDateTimeFormatter(FormatDateTimeFormatter dateTimeFormatter) { + checkIfFrozen(); + this.dateTimeFormatter = dateTimeFormatter; + this.dateMathParser = new DateMathParser(dateTimeFormatter); + } + + public TimeUnit timeUnit() { + return timeUnit; + } + + public void setTimeUnit(TimeUnit timeUnit) { + checkIfFrozen(); + this.timeUnit = timeUnit; + this.dateMathParser = new DateMathParser(dateTimeFormatter); + } + + protected DateMathParser dateMathParser() { + return dateMathParser; + } + + private long parseValue(Object value) { + if (value instanceof Number) { + return ((Number) value).longValue(); + } + if (value instanceof BytesRef) { + return dateTimeFormatter().parser().parseMillis(((BytesRef) value).utf8ToString()); + } + return dateTimeFormatter().parser().parseMillis(value.toString()); + } + + protected long parseStringValue(String value) { + return dateTimeFormatter().parser().parseMillis(value); + } + + @Override + public BytesRef indexedValueForSearch(Object value) { + BytesRefBuilder bytesRef = new BytesRefBuilder(); + LegacyNumericUtils.longToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match + return bytesRef.get(); + } + + @Override + public Object valueForSearch(Object value) { + Long val = (Long) value; + if (val == null) { + return null; + } + return dateTimeFormatter().printer().print(val); + } + + @Override + public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { + return rangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, null, null); + } + + @Override + public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { + long iValue = parseValue(value); + long iSim; + try { + iSim = fuzziness.asTimeValue().millis(); + } catch (Exception e) { + // not a time format + iSim = fuzziness.asLong(); + } + return LegacyNumericRangeQuery.newLongRange(name(), numericPrecisionStep(), + iValue - iSim, + iValue + iSim, + true, true); + } + + @Override + public FieldStats stats(IndexReader reader) throws IOException { + int maxDoc = reader.maxDoc(); + Terms terms = org.apache.lucene.index.MultiFields.getTerms(reader, name()); + if (terms == null) { + return null; + } + long minValue = LegacyNumericUtils.getMinLong(terms); + long maxValue = LegacyNumericUtils.getMaxLong(terms); + return new FieldStats.Date( + maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue, dateTimeFormatter() + ); + } + + public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser) { + return new LateParsingQuery(lowerTerm, upperTerm, includeLower, includeUpper, timeZone, forcedDateParser); + } + + private Query innerRangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser) { + return LegacyNumericRangeQuery.newLongRange(name(), numericPrecisionStep(), + lowerTerm == null ? null : parseToMilliseconds(lowerTerm, !includeLower, timeZone, forcedDateParser == null ? dateMathParser : forcedDateParser), + upperTerm == null ? null : parseToMilliseconds(upperTerm, includeUpper, timeZone, forcedDateParser == null ? dateMathParser : forcedDateParser), + includeLower, includeUpper); + } + + @Override + public Relation isFieldWithinQuery(IndexReader reader, + Object from, Object to, + boolean includeLower, boolean includeUpper, + DateTimeZone timeZone, DateMathParser dateParser) throws IOException { + if (dateParser == null) { + dateParser = this.dateMathParser; + } + + Terms terms = org.apache.lucene.index.MultiFields.getTerms(reader, name()); + if (terms == null) { + // no terms, so nothing matches + return Relation.DISJOINT; + } + + long minValue = LegacyNumericUtils.getMinLong(terms); + long maxValue = LegacyNumericUtils.getMaxLong(terms); + + long fromInclusive = Long.MIN_VALUE; + if (from != null) { + fromInclusive = parseToMilliseconds(from, !includeLower, timeZone, dateParser); + if (includeLower == false) { + if (fromInclusive == Long.MAX_VALUE) { + return Relation.DISJOINT; + } + ++fromInclusive; + } + } + + long toInclusive = Long.MAX_VALUE; + if (to != null) { + toInclusive = parseToMilliseconds(to, includeUpper, timeZone, dateParser); + if (includeUpper == false) { + if (toInclusive == Long.MIN_VALUE) { + return Relation.DISJOINT; + } + --toInclusive; + } + } + + if (minValue >= fromInclusive && maxValue <= toInclusive) { + return Relation.WITHIN; + } else if (maxValue < fromInclusive || minValue > toInclusive) { + return Relation.DISJOINT; + } else { + return Relation.INTERSECTS; + } + } + + public long parseToMilliseconds(Object value, boolean inclusive, @Nullable DateTimeZone zone, @Nullable DateMathParser forcedDateParser) { + if (value instanceof Long) { + return ((Long) value).longValue(); + } + + DateMathParser dateParser = dateMathParser(); + if (forcedDateParser != null) { + dateParser = forcedDateParser; + } + + String strValue; + if (value instanceof BytesRef) { + strValue = ((BytesRef) value).utf8ToString(); + } else { + strValue = value.toString(); + } + return dateParser.parse(strValue, now(), inclusive, zone); + } + + @Override + public IndexFieldData.Builder fielddataBuilder() { + failIfNoDocValues(); + return new DocValuesIndexFieldData.Builder().numericType(NumericType.LONG); + } + + @Override + public DocValueFormat docValueFormat(@Nullable String format, DateTimeZone timeZone) { + FormatDateTimeFormatter dateTimeFormatter = this.dateTimeFormatter; + if (format != null) { + dateTimeFormatter = Joda.forPattern(format); + } + if (timeZone == null) { + timeZone = DateTimeZone.UTC; + } + return new DocValueFormat.DateTime(dateTimeFormatter, timeZone); + } + } + + protected LegacyDateFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Explicit ignoreMalformed,Explicit coerce, + Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { + super(simpleName, fieldType, defaultFieldType, ignoreMalformed, coerce, indexSettings, multiFields, copyTo); + } + + @Override + public DateFieldType fieldType() { + return (DateFieldType) super.fieldType(); + } + + private static Callable now() { + return new Callable() { + @Override + public Long call() { + final SearchContext context = SearchContext.current(); + return context != null + ? context.nowInMillis() + : System.currentTimeMillis(); + } + }; + } + + @Override + protected boolean customBoost() { + return true; + } + + @Override + protected void innerParseCreateField(ParseContext context, List fields) throws IOException { + String dateAsString = null; + float boost = fieldType().boost(); + if (context.externalValueSet()) { + Object externalValue = context.externalValue(); + dateAsString = (String) externalValue; + if (dateAsString == null) { + dateAsString = fieldType().nullValueAsString(); + } + } else { + XContentParser parser = context.parser(); + XContentParser.Token token = parser.currentToken(); + if (token == XContentParser.Token.VALUE_NULL) { + dateAsString = fieldType().nullValueAsString(); + } else if (token == XContentParser.Token.VALUE_NUMBER) { + dateAsString = parser.text(); + } else if (token == XContentParser.Token.START_OBJECT + && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) { + String currentFieldName = null; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else { + if ("value".equals(currentFieldName) || "_value".equals(currentFieldName)) { + if (token == XContentParser.Token.VALUE_NULL) { + dateAsString = fieldType().nullValueAsString(); + } else { + dateAsString = parser.text(); + } + } else if ("boost".equals(currentFieldName) || "_boost".equals(currentFieldName)) { + boost = parser.floatValue(); + } else { + throw new IllegalArgumentException("unknown property [" + currentFieldName + "]"); + } + } + } + } else { + dateAsString = parser.text(); + } + } + + Long value = null; + if (dateAsString != null) { + if (context.includeInAll(includeInAll, this)) { + context.allEntries().addText(fieldType().name(), dateAsString, boost); + } + value = fieldType().parseStringValue(dateAsString); + } + + if (value != null) { + if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { + CustomLongNumericField field = new CustomLongNumericField(value, fieldType()); + if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) { + field.setBoost(boost); + } + fields.add(field); + } + if (fieldType().hasDocValues()) { + addDocValue(context, fields, value); + } + } + } + + @Override + protected String contentType() { + return CONTENT_TYPE; + } + + @Override + protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { + super.doXContentBody(builder, includeDefaults, params); + + if (includeDefaults || fieldType().numericPrecisionStep() != Defaults.PRECISION_STEP_64_BIT) { + builder.field("precision_step", fieldType().numericPrecisionStep()); + } + builder.field("format", fieldType().dateTimeFormatter().format()); + if (includeDefaults || fieldType().nullValueAsString() != null) { + builder.field("null_value", fieldType().nullValueAsString()); + } + if (includeInAll != null) { + builder.field("include_in_all", includeInAll); + } else if (includeDefaults) { + builder.field("include_in_all", false); + } + + if (includeDefaults || fieldType().timeUnit() != Defaults.TIME_UNIT) { + builder.field("numeric_resolution", fieldType().timeUnit().name().toLowerCase(Locale.ROOT)); + } + // only serialize locale if needed, ROOT is the default, so no need to serialize that case as well... + if (fieldType().dateTimeFormatter().locale() != null && fieldType().dateTimeFormatter().locale() != Locale.ROOT) { + builder.field("locale", fieldType().dateTimeFormatter().locale()); + } else if (includeDefaults) { + if (fieldType().dateTimeFormatter().locale() == null) { + builder.field("locale", Locale.ROOT); + } else { + builder.field("locale", fieldType().dateTimeFormatter().locale()); + } + } + } +} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/DoubleFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/LegacyDoubleFieldMapper.java similarity index 93% rename from core/src/main/java/org/elasticsearch/index/mapper/core/DoubleFieldMapper.java rename to core/src/main/java/org/elasticsearch/index/mapper/core/LegacyDoubleFieldMapper.java index 11b1dcadb0699..f87370a78a8c5 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/DoubleFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/LegacyDoubleFieldMapper.java @@ -59,11 +59,11 @@ /** * */ -public class DoubleFieldMapper extends NumberFieldMapper { +public class LegacyDoubleFieldMapper extends LegacyNumberFieldMapper { public static final String CONTENT_TYPE = "double"; - public static class Defaults extends NumberFieldMapper.Defaults { + public static class Defaults extends LegacyNumberFieldMapper.Defaults { public static final MappedFieldType FIELD_TYPE = new DoubleFieldType(); static { @@ -71,7 +71,7 @@ public static class Defaults extends NumberFieldMapper.Defaults { } } - public static class Builder extends NumberFieldMapper.Builder { + public static class Builder extends LegacyNumberFieldMapper.Builder { public Builder(String name) { super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_64_BIT); @@ -79,11 +79,14 @@ public Builder(String name) { } @Override - public DoubleFieldMapper build(BuilderContext context) { + public LegacyDoubleFieldMapper build(BuilderContext context) { + if (context.indexCreatedVersion().onOrAfter(Version.V_5_0_0)) { + throw new IllegalStateException("Cannot use legacy numeric types after 5.0"); + } setupFieldType(context); - DoubleFieldMapper fieldMapper = new DoubleFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), coerce(context), + LegacyDoubleFieldMapper fieldMapper = new LegacyDoubleFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), coerce(context), context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); - return (DoubleFieldMapper) fieldMapper.includeInAll(includeInAll); + return (LegacyDoubleFieldMapper) fieldMapper.includeInAll(includeInAll); } @Override @@ -95,7 +98,7 @@ protected int maxPrecisionStep() { public static class TypeParser implements Mapper.TypeParser { @Override public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - DoubleFieldMapper.Builder builder = new DoubleFieldMapper.Builder(name); + LegacyDoubleFieldMapper.Builder builder = new LegacyDoubleFieldMapper.Builder(name); parseNumberField(builder, name, node, parserContext); for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { Map.Entry entry = iterator.next(); @@ -199,7 +202,7 @@ public IndexFieldData.Builder fielddataBuilder() { } } - protected DoubleFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Explicit ignoreMalformed, + protected LegacyDoubleFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Explicit ignoreMalformed, Explicit coerce, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { super(simpleName, fieldType, defaultFieldType, ignoreMalformed, coerce, indexSettings, multiFields, copyTo); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/FloatFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/LegacyFloatFieldMapper.java similarity index 93% rename from core/src/main/java/org/elasticsearch/index/mapper/core/FloatFieldMapper.java rename to core/src/main/java/org/elasticsearch/index/mapper/core/LegacyFloatFieldMapper.java index c984cb0cb0d5d..8d442ea70c9cd 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/FloatFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/LegacyFloatFieldMapper.java @@ -58,11 +58,11 @@ /** * */ -public class FloatFieldMapper extends NumberFieldMapper { +public class LegacyFloatFieldMapper extends LegacyNumberFieldMapper { public static final String CONTENT_TYPE = "float"; - public static class Defaults extends NumberFieldMapper.Defaults { + public static class Defaults extends LegacyNumberFieldMapper.Defaults { public static final MappedFieldType FIELD_TYPE = new FloatFieldType(); static { @@ -70,7 +70,7 @@ public static class Defaults extends NumberFieldMapper.Defaults { } } - public static class Builder extends NumberFieldMapper.Builder { + public static class Builder extends LegacyNumberFieldMapper.Builder { public Builder(String name) { super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_32_BIT); @@ -78,11 +78,14 @@ public Builder(String name) { } @Override - public FloatFieldMapper build(BuilderContext context) { + public LegacyFloatFieldMapper build(BuilderContext context) { + if (context.indexCreatedVersion().onOrAfter(Version.V_5_0_0)) { + throw new IllegalStateException("Cannot use legacy numeric types after 5.0"); + } setupFieldType(context); - FloatFieldMapper fieldMapper = new FloatFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), coerce(context), + LegacyFloatFieldMapper fieldMapper = new LegacyFloatFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), coerce(context), context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); - return (FloatFieldMapper) fieldMapper.includeInAll(includeInAll); + return (LegacyFloatFieldMapper) fieldMapper.includeInAll(includeInAll); } @Override @@ -94,7 +97,7 @@ protected int maxPrecisionStep() { public static class TypeParser implements Mapper.TypeParser { @Override public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - FloatFieldMapper.Builder builder = new FloatFieldMapper.Builder(name); + LegacyFloatFieldMapper.Builder builder = new LegacyFloatFieldMapper.Builder(name); parseNumberField(builder, name, node, parserContext); for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { Map.Entry entry = iterator.next(); @@ -184,7 +187,7 @@ public IndexFieldData.Builder fielddataBuilder() { } } - protected FloatFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, + protected LegacyFloatFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Explicit ignoreMalformed, Explicit coerce, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { super(simpleName, fieldType, defaultFieldType, ignoreMalformed, coerce, indexSettings, multiFields, copyTo); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/IntegerFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/LegacyIntegerFieldMapper.java similarity index 93% rename from core/src/main/java/org/elasticsearch/index/mapper/core/IntegerFieldMapper.java rename to core/src/main/java/org/elasticsearch/index/mapper/core/LegacyIntegerFieldMapper.java index c4e9ea958d136..c0ff90a4dc543 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/IntegerFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/LegacyIntegerFieldMapper.java @@ -59,11 +59,11 @@ /** * */ -public class IntegerFieldMapper extends NumberFieldMapper { +public class LegacyIntegerFieldMapper extends LegacyNumberFieldMapper { public static final String CONTENT_TYPE = "integer"; - public static class Defaults extends NumberFieldMapper.Defaults { + public static class Defaults extends LegacyNumberFieldMapper.Defaults { public static final MappedFieldType FIELD_TYPE = new IntegerFieldType(); static { @@ -71,7 +71,7 @@ public static class Defaults extends NumberFieldMapper.Defaults { } } - public static class Builder extends NumberFieldMapper.Builder { + public static class Builder extends LegacyNumberFieldMapper.Builder { public Builder(String name) { super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_32_BIT); @@ -84,12 +84,15 @@ public Builder nullValue(int nullValue) { } @Override - public IntegerFieldMapper build(BuilderContext context) { + public LegacyIntegerFieldMapper build(BuilderContext context) { + if (context.indexCreatedVersion().onOrAfter(Version.V_5_0_0)) { + throw new IllegalStateException("Cannot use legacy numeric types after 5.0"); + } setupFieldType(context); - IntegerFieldMapper fieldMapper = new IntegerFieldMapper(name, fieldType, defaultFieldType, + LegacyIntegerFieldMapper fieldMapper = new LegacyIntegerFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), coerce(context), context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); - return (IntegerFieldMapper) fieldMapper.includeInAll(includeInAll); + return (LegacyIntegerFieldMapper) fieldMapper.includeInAll(includeInAll); } @Override protected int maxPrecisionStep() { @@ -100,7 +103,7 @@ protected int maxPrecisionStep() { public static class TypeParser implements Mapper.TypeParser { @Override public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - IntegerFieldMapper.Builder builder = new IntegerFieldMapper.Builder(name); + LegacyIntegerFieldMapper.Builder builder = new LegacyIntegerFieldMapper.Builder(name); parseNumberField(builder, name, node, parserContext); for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { Map.Entry entry = iterator.next(); @@ -190,7 +193,7 @@ public IndexFieldData.Builder fielddataBuilder() { } } - protected IntegerFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, + protected LegacyIntegerFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Explicit ignoreMalformed, Explicit coerce, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { super(simpleName, fieldType, defaultFieldType, ignoreMalformed, coerce, indexSettings, multiFields, copyTo); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/LongFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/LegacyLongFieldMapper.java similarity index 93% rename from core/src/main/java/org/elasticsearch/index/mapper/core/LongFieldMapper.java rename to core/src/main/java/org/elasticsearch/index/mapper/core/LegacyLongFieldMapper.java index 05591a5b1c198..b1b924bb152f4 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/LongFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/LegacyLongFieldMapper.java @@ -59,11 +59,11 @@ /** * */ -public class LongFieldMapper extends NumberFieldMapper { +public class LegacyLongFieldMapper extends LegacyNumberFieldMapper { public static final String CONTENT_TYPE = "long"; - public static class Defaults extends NumberFieldMapper.Defaults { + public static class Defaults extends LegacyNumberFieldMapper.Defaults { public static final MappedFieldType FIELD_TYPE = new LongFieldType(); static { @@ -71,7 +71,7 @@ public static class Defaults extends NumberFieldMapper.Defaults { } } - public static class Builder extends NumberFieldMapper.Builder { + public static class Builder extends LegacyNumberFieldMapper.Builder { public Builder(String name) { super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_64_BIT); @@ -84,11 +84,14 @@ public Builder nullValue(long nullValue) { } @Override - public LongFieldMapper build(BuilderContext context) { + public LegacyLongFieldMapper build(BuilderContext context) { + if (context.indexCreatedVersion().onOrAfter(Version.V_5_0_0)) { + throw new IllegalStateException("Cannot use legacy numeric types after 5.0"); + } setupFieldType(context); - LongFieldMapper fieldMapper = new LongFieldMapper(name, fieldType, defaultFieldType, + LegacyLongFieldMapper fieldMapper = new LegacyLongFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), coerce(context), context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); - return (LongFieldMapper) fieldMapper.includeInAll(includeInAll); + return (LegacyLongFieldMapper) fieldMapper.includeInAll(includeInAll); } @Override @@ -100,7 +103,7 @@ protected int maxPrecisionStep() { public static class TypeParser implements Mapper.TypeParser { @Override public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - LongFieldMapper.Builder builder = new LongFieldMapper.Builder(name); + LegacyLongFieldMapper.Builder builder = new LegacyLongFieldMapper.Builder(name); parseNumberField(builder, name, node, parserContext); for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { Map.Entry entry = iterator.next(); @@ -189,7 +192,7 @@ public IndexFieldData.Builder fielddataBuilder() { } } - protected LongFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, + protected LegacyLongFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Explicit ignoreMalformed, Explicit coerce, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { super(simpleName, fieldType, defaultFieldType, ignoreMalformed, coerce, indexSettings, multiFields, copyTo); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/LegacyNumberFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/LegacyNumberFieldMapper.java new file mode 100644 index 0000000000000..28e68ce59d59f --- /dev/null +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/LegacyNumberFieldMapper.java @@ -0,0 +1,366 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.mapper.core; + +import java.io.IOException; +import java.io.Reader; +import java.util.List; + +import org.apache.lucene.analysis.LegacyNumericTokenStream; +import org.apache.lucene.document.Field; +import org.apache.lucene.document.SortedNumericDocValuesField; +import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.search.Query; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.Explicit; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.Property; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.Fuzziness; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.ParseContext; +import org.elasticsearch.index.mapper.internal.AllFieldMapper; +import org.elasticsearch.search.DocValueFormat; +import org.joda.time.DateTimeZone; + +/** + * + */ +public abstract class LegacyNumberFieldMapper extends FieldMapper implements AllFieldMapper.IncludeInAll { + // this is private since it has a different default + private static final Setting COERCE_SETTING = + Setting.boolSetting("index.mapping.coerce", true, Property.IndexScope); + + public static class Defaults { + + public static final int PRECISION_STEP_8_BIT = Integer.MAX_VALUE; // 1tpv: 256 terms at most, not useful + public static final int PRECISION_STEP_16_BIT = 8; // 2tpv + public static final int PRECISION_STEP_32_BIT = 8; // 4tpv + public static final int PRECISION_STEP_64_BIT = 16; // 4tpv + + public static final Explicit IGNORE_MALFORMED = new Explicit<>(false, false); + public static final Explicit COERCE = new Explicit<>(true, false); + } + + public abstract static class Builder extends FieldMapper.Builder { + + private Boolean ignoreMalformed; + + private Boolean coerce; + + public Builder(String name, MappedFieldType fieldType, int defaultPrecisionStep) { + super(name, fieldType, fieldType); + this.fieldType.setNumericPrecisionStep(defaultPrecisionStep); + } + + public T precisionStep(int precisionStep) { + fieldType.setNumericPrecisionStep(precisionStep); + return builder; + } + + public T ignoreMalformed(boolean ignoreMalformed) { + this.ignoreMalformed = ignoreMalformed; + return builder; + } + + protected Explicit ignoreMalformed(BuilderContext context) { + if (ignoreMalformed != null) { + return new Explicit<>(ignoreMalformed, true); + } + if (context.indexSettings() != null) { + return new Explicit<>(IGNORE_MALFORMED_SETTING.get(context.indexSettings()), false); + } + return Defaults.IGNORE_MALFORMED; + } + + public T coerce(boolean coerce) { + this.coerce = coerce; + return builder; + } + + protected Explicit coerce(BuilderContext context) { + if (coerce != null) { + return new Explicit<>(coerce, true); + } + if (context.indexSettings() != null) { + return new Explicit<>(COERCE_SETTING.get(context.indexSettings()), false); + } + return Defaults.COERCE; + } + + protected void setupFieldType(BuilderContext context) { + super.setupFieldType(context); + int precisionStep = fieldType.numericPrecisionStep(); + if (precisionStep <= 0 || precisionStep >= maxPrecisionStep()) { + fieldType.setNumericPrecisionStep(Integer.MAX_VALUE); + } + } + + protected abstract int maxPrecisionStep(); + } + + public static abstract class NumberFieldType extends MappedFieldType { + + public NumberFieldType(LegacyNumericType numericType) { + setTokenized(false); + setOmitNorms(true); + setIndexOptions(IndexOptions.DOCS); + setStoreTermVectors(false); + setNumericType(numericType); + } + + protected NumberFieldType(NumberFieldType ref) { + super(ref); + } + + @Override + public void checkCompatibility(MappedFieldType other, + List conflicts, boolean strict) { + super.checkCompatibility(other, conflicts, strict); + if (numericPrecisionStep() != other.numericPrecisionStep()) { + conflicts.add("mapper [" + name() + "] has different [precision_step] values"); + } + } + + public abstract NumberFieldType clone(); + + @Override + public abstract Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions); + + @Override + public DocValueFormat docValueFormat(@Nullable String format, DateTimeZone timeZone) { + if (timeZone != null) { + throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom time zones"); + } + if (format == null) { + return DocValueFormat.RAW; + } else { + return new DocValueFormat.Decimal(format); + } + } + } + + protected Boolean includeInAll; + + protected Explicit ignoreMalformed; + + protected Explicit coerce; + + protected LegacyNumberFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, + Explicit ignoreMalformed, Explicit coerce, Settings indexSettings, + MultiFields multiFields, CopyTo copyTo) { + super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo); + assert fieldType.tokenized() == false; + this.ignoreMalformed = ignoreMalformed; + this.coerce = coerce; + } + + @Override + protected LegacyNumberFieldMapper clone() { + return (LegacyNumberFieldMapper) super.clone(); + } + + @Override + public Mapper includeInAll(Boolean includeInAll) { + if (includeInAll != null) { + LegacyNumberFieldMapper clone = clone(); + clone.includeInAll = includeInAll; + return clone; + } else { + return this; + } + } + + @Override + public Mapper includeInAllIfNotSet(Boolean includeInAll) { + if (includeInAll != null && this.includeInAll == null) { + LegacyNumberFieldMapper clone = clone(); + clone.includeInAll = includeInAll; + return clone; + } else { + return this; + } + } + + @Override + public Mapper unsetIncludeInAll() { + if (includeInAll != null) { + LegacyNumberFieldMapper clone = clone(); + clone.includeInAll = null; + return clone; + } else { + return this; + } + } + + @Override + protected void parseCreateField(ParseContext context, List fields) throws IOException { + RuntimeException e = null; + try { + innerParseCreateField(context, fields); + } catch (IllegalArgumentException e1) { + e = e1; + } catch (MapperParsingException e2) { + e = e2; + } + + if (e != null && !ignoreMalformed.value()) { + throw e; + } + } + + protected abstract void innerParseCreateField(ParseContext context, List fields) throws IOException; + + protected final void addDocValue(ParseContext context, List fields, long value) { + fields.add(new SortedNumericDocValuesField(fieldType().name(), value)); + } + + /** + * Converts an object value into a double + */ + public static double parseDoubleValue(Object value) { + if (value instanceof Number) { + return ((Number) value).doubleValue(); + } + + if (value instanceof BytesRef) { + return Double.parseDouble(((BytesRef) value).utf8ToString()); + } + + return Double.parseDouble(value.toString()); + } + + /** + * Converts an object value into a long + */ + public static long parseLongValue(Object value) { + if (value instanceof Number) { + return ((Number) value).longValue(); + } + + if (value instanceof BytesRef) { + return Long.parseLong(((BytesRef) value).utf8ToString()); + } + + return Long.parseLong(value.toString()); + } + + @Override + protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { + super.doMerge(mergeWith, updateAllTypes); + LegacyNumberFieldMapper nfmMergeWith = (LegacyNumberFieldMapper) mergeWith; + + this.includeInAll = nfmMergeWith.includeInAll; + if (nfmMergeWith.ignoreMalformed.explicit()) { + this.ignoreMalformed = nfmMergeWith.ignoreMalformed; + } + if (nfmMergeWith.coerce.explicit()) { + this.coerce = nfmMergeWith.coerce; + } + } + + // used to we can use a numeric field in a document that is then parsed twice! + public abstract static class CustomNumericField extends Field { + + private ThreadLocal tokenStream = new ThreadLocal() { + @Override + protected LegacyNumericTokenStream initialValue() { + return new LegacyNumericTokenStream(fieldType().numericPrecisionStep()); + } + }; + + private static ThreadLocal tokenStream4 = new ThreadLocal() { + @Override + protected LegacyNumericTokenStream initialValue() { + return new LegacyNumericTokenStream(4); + } + }; + + private static ThreadLocal tokenStream8 = new ThreadLocal() { + @Override + protected LegacyNumericTokenStream initialValue() { + return new LegacyNumericTokenStream(8); + } + }; + + private static ThreadLocal tokenStream16 = new ThreadLocal() { + @Override + protected LegacyNumericTokenStream initialValue() { + return new LegacyNumericTokenStream(16); + } + }; + + private static ThreadLocal tokenStreamMax = new ThreadLocal() { + @Override + protected LegacyNumericTokenStream initialValue() { + return new LegacyNumericTokenStream(Integer.MAX_VALUE); + } + }; + + public CustomNumericField(Number value, MappedFieldType fieldType) { + super(fieldType.name(), fieldType); + if (value != null) { + this.fieldsData = value; + } + } + + protected LegacyNumericTokenStream getCachedStream() { + if (fieldType().numericPrecisionStep() == 4) { + return tokenStream4.get(); + } else if (fieldType().numericPrecisionStep() == 8) { + return tokenStream8.get(); + } else if (fieldType().numericPrecisionStep() == 16) { + return tokenStream16.get(); + } else if (fieldType().numericPrecisionStep() == Integer.MAX_VALUE) { + return tokenStreamMax.get(); + } + return tokenStream.get(); + } + + @Override + public String stringValue() { + return null; + } + + @Override + public Reader readerValue() { + return null; + } + + public abstract String numericAsString(); + } + + @Override + protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { + super.doXContentBody(builder, includeDefaults, params); + + if (includeDefaults || ignoreMalformed.explicit()) { + builder.field("ignore_malformed", ignoreMalformed.value()); + } + if (includeDefaults || coerce.explicit()) { + builder.field("coerce", coerce.value()); + } + } +} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/ShortFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/LegacyShortFieldMapper.java similarity index 93% rename from core/src/main/java/org/elasticsearch/index/mapper/core/ShortFieldMapper.java rename to core/src/main/java/org/elasticsearch/index/mapper/core/LegacyShortFieldMapper.java index 6902e26e0fd0d..7f476f61ee312 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/ShortFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/LegacyShortFieldMapper.java @@ -58,12 +58,12 @@ /** * */ -public class ShortFieldMapper extends NumberFieldMapper { +public class LegacyShortFieldMapper extends LegacyNumberFieldMapper { public static final String CONTENT_TYPE = "short"; public static final int DEFAULT_PRECISION_STEP = 8; - public static class Defaults extends NumberFieldMapper.Defaults { + public static class Defaults extends LegacyNumberFieldMapper.Defaults { public static final MappedFieldType FIELD_TYPE = new ShortFieldType(); static { @@ -71,7 +71,7 @@ public static class Defaults extends NumberFieldMapper.Defaults { } } - public static class Builder extends NumberFieldMapper.Builder { + public static class Builder extends LegacyNumberFieldMapper.Builder { public Builder(String name) { super(name, Defaults.FIELD_TYPE, DEFAULT_PRECISION_STEP); @@ -79,12 +79,15 @@ public Builder(String name) { } @Override - public ShortFieldMapper build(BuilderContext context) { + public LegacyShortFieldMapper build(BuilderContext context) { + if (context.indexCreatedVersion().onOrAfter(Version.V_5_0_0)) { + throw new IllegalStateException("Cannot use legacy numeric types after 5.0"); + } setupFieldType(context); - ShortFieldMapper fieldMapper = new ShortFieldMapper(name, fieldType, defaultFieldType, + LegacyShortFieldMapper fieldMapper = new LegacyShortFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), coerce(context), context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); - return (ShortFieldMapper) fieldMapper.includeInAll(includeInAll); + return (LegacyShortFieldMapper) fieldMapper.includeInAll(includeInAll); } @Override @@ -96,7 +99,7 @@ protected int maxPrecisionStep() { public static class TypeParser implements Mapper.TypeParser { @Override public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - ShortFieldMapper.Builder builder = new ShortFieldMapper.Builder(name); + LegacyShortFieldMapper.Builder builder = new LegacyShortFieldMapper.Builder(name); parseNumberField(builder, name, node, parserContext); for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { Map.Entry entry = iterator.next(); @@ -193,7 +196,7 @@ public IndexFieldData.Builder fielddataBuilder() { } } - protected ShortFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, + protected LegacyShortFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Explicit ignoreMalformed, Explicit coerce, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { super(simpleName, fieldType, defaultFieldType, ignoreMalformed, coerce, indexSettings, multiFields, copyTo); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/LegacyTokenCountFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/LegacyTokenCountFieldMapper.java new file mode 100644 index 0000000000000..d5a527c370a48 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/LegacyTokenCountFieldMapper.java @@ -0,0 +1,202 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.mapper.core; + +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; +import org.apache.lucene.document.Field; +import org.elasticsearch.Version; +import org.elasticsearch.common.Explicit; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.ParseContext; +import org.elasticsearch.index.mapper.core.StringFieldMapper.ValueAndBoost; + +import java.io.IOException; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +import static org.apache.lucene.index.IndexOptions.NONE; +import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeIntegerValue; +import static org.elasticsearch.index.mapper.core.TypeParsers.parseNumberField; + +/** + * A {@link FieldMapper} that takes a string and writes a count of the tokens in that string + * to the index. In most ways the mapper acts just like an {@link LegacyIntegerFieldMapper}. + */ +public class LegacyTokenCountFieldMapper extends LegacyIntegerFieldMapper { + public static final String CONTENT_TYPE = "token_count"; + + public static class Defaults extends LegacyIntegerFieldMapper.Defaults { + + } + + public static class Builder extends LegacyNumberFieldMapper.Builder { + private NamedAnalyzer analyzer; + + public Builder(String name) { + super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_32_BIT); + builder = this; + } + + public Builder analyzer(NamedAnalyzer analyzer) { + this.analyzer = analyzer; + return this; + } + + public NamedAnalyzer analyzer() { + return analyzer; + } + + @Override + public LegacyTokenCountFieldMapper build(BuilderContext context) { + if (context.indexCreatedVersion().onOrAfter(Version.V_5_0_0)) { + throw new IllegalStateException("Cannot use legacy numeric types after 5.0"); + } + setupFieldType(context); + LegacyTokenCountFieldMapper fieldMapper = new LegacyTokenCountFieldMapper(name, fieldType, defaultFieldType, + ignoreMalformed(context), coerce(context), context.indexSettings(), + analyzer, multiFieldsBuilder.build(this, context), copyTo); + return (LegacyTokenCountFieldMapper) fieldMapper.includeInAll(includeInAll); + } + + @Override + protected int maxPrecisionStep() { + return 32; + } + } + + public static class TypeParser implements Mapper.TypeParser { + @Override + @SuppressWarnings("unchecked") + public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { + LegacyTokenCountFieldMapper.Builder builder = new LegacyTokenCountFieldMapper.Builder(name); + for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { + Map.Entry entry = iterator.next(); + String propName = Strings.toUnderscoreCase(entry.getKey()); + Object propNode = entry.getValue(); + if (propName.equals("null_value")) { + builder.nullValue(nodeIntegerValue(propNode)); + iterator.remove(); + } else if (propName.equals("analyzer")) { + NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString()); + if (analyzer == null) { + throw new MapperParsingException("Analyzer [" + propNode.toString() + "] not found for field [" + name + "]"); + } + builder.analyzer(analyzer); + iterator.remove(); + } + } + parseNumberField(builder, name, node, parserContext); + if (builder.analyzer() == null) { + throw new MapperParsingException("Analyzer must be set for field [" + name + "] but wasn't."); + } + return builder; + } + } + + private NamedAnalyzer analyzer; + + protected LegacyTokenCountFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Explicit ignoreMalformed, + Explicit coerce, Settings indexSettings, NamedAnalyzer analyzer, MultiFields multiFields, CopyTo copyTo) { + super(simpleName, fieldType, defaultFieldType, ignoreMalformed, coerce, indexSettings, multiFields, copyTo); + this.analyzer = analyzer; + } + + @Override + protected void parseCreateField(ParseContext context, List fields) throws IOException { + ValueAndBoost valueAndBoost = StringFieldMapper.parseCreateFieldForString(context, null /* Out null value is an int so we convert*/, fieldType().boost()); + if (valueAndBoost.value() == null && fieldType().nullValue() == null) { + return; + } + + if (fieldType().indexOptions() != NONE || fieldType().stored() || fieldType().hasDocValues()) { + int count; + if (valueAndBoost.value() == null) { + count = fieldType().nullValue(); + } else { + count = countPositions(analyzer, simpleName(), valueAndBoost.value()); + } + addIntegerFields(context, fields, count, valueAndBoost.boost()); + } + } + + /** + * Count position increments in a token stream. Package private for testing. + * @param analyzer analyzer to create token stream + * @param fieldName field name to pass to analyzer + * @param fieldValue field value to pass to analyzer + * @return number of position increments in a token stream + * @throws IOException if tokenStream throws it + */ + static int countPositions(Analyzer analyzer, String fieldName, String fieldValue) throws IOException { + try (TokenStream tokenStream = analyzer.tokenStream(fieldName, fieldValue)) { + int count = 0; + PositionIncrementAttribute position = tokenStream.addAttribute(PositionIncrementAttribute.class); + tokenStream.reset(); + while (tokenStream.incrementToken()) { + count += position.getPositionIncrement(); + } + tokenStream.end(); + count += position.getPositionIncrement(); + return count; + } + } + + /** + * Name of analyzer. + * @return name of analyzer + */ + public String analyzer() { + return analyzer.name(); + } + + @Override + protected String contentType() { + return CONTENT_TYPE; + } + + @Override + protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { + super.doMerge(mergeWith, updateAllTypes); + this.analyzer = ((LegacyTokenCountFieldMapper) mergeWith).analyzer; + } + + @Override + protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { + super.doXContentBody(builder, includeDefaults, params); + + builder.field("analyzer", analyzer()); + } + + @Override + public boolean isGenerated() { + return true; + } + +} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java index 9c12c52b47f70..316324c1dc9a8 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java @@ -19,79 +19,70 @@ package org.elasticsearch.index.mapper.core; -import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.LegacyNumericTokenStream; -import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.document.DoublePoint; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; +import org.apache.lucene.document.FloatPoint; +import org.apache.lucene.document.IntPoint; +import org.apache.lucene.document.LongPoint; import org.apache.lucene.document.SortedNumericDocValuesField; -import org.apache.lucene.index.DocValuesType; +import org.apache.lucene.document.StoredField; import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.index.IndexableField; -import org.apache.lucene.index.IndexableFieldType; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.PointValues; +import org.apache.lucene.search.BoostQuery; +import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.NumericUtils; import org.elasticsearch.Version; +import org.elasticsearch.action.fieldstats.FieldStats; import org.elasticsearch.common.Explicit; -import org.elasticsearch.common.Nullable; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.index.fielddata.IndexNumericFieldData; +import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType; import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParseContext; +import org.elasticsearch.index.mapper.core.LegacyNumberFieldMapper.Defaults; import org.elasticsearch.index.mapper.internal.AllFieldMapper; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.DocValueFormat; import org.joda.time.DateTimeZone; import java.io.IOException; -import java.io.Reader; +import java.util.ArrayList; +import java.util.Iterator; import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** A {@link FieldMapper} for numeric types: byte, short, int, long, float and double. */ +public class NumberFieldMapper extends FieldMapper implements AllFieldMapper.IncludeInAll { -/** - * - */ -public abstract class NumberFieldMapper extends FieldMapper implements AllFieldMapper.IncludeInAll { // this is private since it has a different default private static final Setting COERCE_SETTING = - Setting.boolSetting("index.mapping.coerce", true, Property.IndexScope); - - public static class Defaults { - - public static final int PRECISION_STEP_8_BIT = Integer.MAX_VALUE; // 1tpv: 256 terms at most, not useful - public static final int PRECISION_STEP_16_BIT = 8; // 2tpv - public static final int PRECISION_STEP_32_BIT = 8; // 4tpv - public static final int PRECISION_STEP_64_BIT = 16; // 4tpv + Setting.boolSetting("index.mapping.coerce", true, Property.IndexScope); - public static final Explicit IGNORE_MALFORMED = new Explicit<>(false, false); - public static final Explicit COERCE = new Explicit<>(true, false); - } - - public abstract static class Builder extends FieldMapper.Builder { + public static class Builder extends FieldMapper.Builder { private Boolean ignoreMalformed; - private Boolean coerce; - public Builder(String name, MappedFieldType fieldType, int defaultPrecisionStep) { - super(name, fieldType, fieldType); - this.fieldType.setNumericPrecisionStep(defaultPrecisionStep); + public Builder(String name, NumberType type) { + super(name, new NumberFieldType(type), new NumberFieldType(type)); + builder = this; } - public T precisionStep(int precisionStep) { - fieldType.setNumericPrecisionStep(precisionStep); - return builder; - } - - public T ignoreMalformed(boolean ignoreMalformed) { + public Builder ignoreMalformed(boolean ignoreMalformed) { this.ignoreMalformed = ignoreMalformed; return builder; } @@ -106,7 +97,7 @@ protected Explicit ignoreMalformed(BuilderContext context) { return Defaults.IGNORE_MALFORMED; } - public T coerce(boolean coerce) { + public Builder coerce(boolean coerce) { this.coerce = coerce; return builder; } @@ -121,49 +112,658 @@ protected Explicit coerce(BuilderContext context) { return Defaults.COERCE; } + @Override protected void setupFieldType(BuilderContext context) { super.setupFieldType(context); - int precisionStep = fieldType.numericPrecisionStep(); - if (precisionStep <= 0 || precisionStep >= maxPrecisionStep()) { - fieldType.setNumericPrecisionStep(Integer.MAX_VALUE); + } + + @Override + public NumberFieldMapper build(BuilderContext context) { + setupFieldType(context); + NumberFieldMapper fieldMapper = new NumberFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), + coerce(context), context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); + return (NumberFieldMapper) fieldMapper.includeInAll(includeInAll); + } + } + + public static class TypeParser implements Mapper.TypeParser { + + final NumberType type; + + public TypeParser(NumberType type) { + this.type = type; + } + + @Override + public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { + if (parserContext.indexVersionCreated().before(Version.V_5_0_0)) { + switch (type) { + case BYTE: + return new LegacyByteFieldMapper.TypeParser().parse(name, node, parserContext); + case SHORT: + return new LegacyShortFieldMapper.TypeParser().parse(name, node, parserContext); + case INTEGER: + return new LegacyIntegerFieldMapper.TypeParser().parse(name, node, parserContext); + case LONG: + return new LegacyLongFieldMapper.TypeParser().parse(name, node, parserContext); + case FLOAT: + return new LegacyFloatFieldMapper.TypeParser().parse(name, node, parserContext); + case DOUBLE: + return new LegacyDoubleFieldMapper.TypeParser().parse(name, node, parserContext); + default: + throw new AssertionError(); + } + } + Builder builder = new Builder(name, type); + TypeParsers.parseField(builder, name, node, parserContext); + for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { + Map.Entry entry = iterator.next(); + String propName = entry.getKey(); + Object propNode = entry.getValue(); + if (propName.equals("null_value")) { + if (propNode == null) { + throw new MapperParsingException("Property [null_value] cannot be null."); + } + builder.nullValue(type.parse(propNode)); + iterator.remove(); + } else if (propName.equals("ignore_malformed")) { + builder.ignoreMalformed(TypeParsers.nodeBooleanValue("ignore_malformed", propNode, parserContext)); + iterator.remove(); + } else if (propName.equals("coerce")) { + builder.coerce(TypeParsers.nodeBooleanValue("coerce", propNode, parserContext)); + iterator.remove(); + } } + return builder; } + } + + public enum NumberType { + FLOAT("float", NumericType.FLOAT) { + @Override + Float parse(Object value) { + if (value instanceof Number) { + return ((Number) value).floatValue(); + } + if (value instanceof BytesRef) { + value = ((BytesRef) value).utf8ToString(); + } + return Float.parseFloat(value.toString()); + } + + @Override + Float parse(XContentParser parser, boolean coerce) throws IOException { + return parser.floatValue(coerce); + } + + @Override + Query termQuery(String field, Object value) { + float v = parse(value); + return FloatPoint.newExactQuery(field, v); + } + + @Override + Query termsQuery(String field, List values) { + float[] v = new float[values.size()]; + for (int i = 0; i < values.size(); ++i) { + v[i] = parse(values.get(i)); + } + return FloatPoint.newSetQuery(field, v); + } + + @Override + Query rangeQuery(String field, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { + float l = Float.NEGATIVE_INFINITY; + float u = Float.POSITIVE_INFINITY; + if (lowerTerm != null) { + l = parse(lowerTerm); + if (includeLower == false) { + l = Math.nextUp(l); + } + } + if (upperTerm != null) { + u = parse(upperTerm); + if (includeUpper == false) { + u = Math.nextDown(u); + } + } + return FloatPoint.newRangeQuery(field, l, u); + } + + @Override + Query fuzzyQuery(String field, Object value, Fuzziness fuzziness) { + float base = parse(value); + float delta = fuzziness.asFloat(); + return rangeQuery(field, base - delta, base + delta, true, true); + } + + @Override + public List createFields(String name, Number value, boolean indexed, boolean docValued, boolean stored) { + List fields = new ArrayList<>(); + if (indexed) { + fields.add(new FloatPoint(name, value.floatValue())); + } + if (docValued) { + fields.add(new SortedNumericDocValuesField(name, NumericUtils.floatToSortableInt(value.floatValue()))); + } + if (stored) { + fields.add(new StoredField(name, value.floatValue())); + } + return fields; + } - protected abstract int maxPrecisionStep(); + @Override + FieldStats.Double stats(IndexReader reader, String field) throws IOException { + long size = PointValues.size(reader, field); + if (size == 0) { + return null; + } + int docCount = PointValues.getDocCount(reader, field); + byte[] min = PointValues.getMinPackedValue(reader, field); + byte[] max = PointValues.getMaxPackedValue(reader, field); + return new FieldStats.Double(reader.maxDoc(),docCount, -1L, size, + FloatPoint.decodeDimension(min, 0), + FloatPoint.decodeDimension(max, 0)); + } + }, + DOUBLE("double", NumericType.DOUBLE) { + @Override + Double parse(Object value) { + if (value instanceof Number) { + return ((Number) value).doubleValue(); + } + if (value instanceof BytesRef) { + value = ((BytesRef) value).utf8ToString(); + } + return Double.parseDouble(value.toString()); + } + + @Override + Double parse(XContentParser parser, boolean coerce) throws IOException { + return parser.doubleValue(coerce); + } + + @Override + Query termQuery(String field, Object value) { + double v = parse(value); + return DoublePoint.newExactQuery(field, v); + } + + @Override + Query termsQuery(String field, List values) { + double[] v = new double[values.size()]; + for (int i = 0; i < values.size(); ++i) { + v[i] = parse(values.get(i)); + } + return DoublePoint.newSetQuery(field, v); + } + + @Override + Query rangeQuery(String field, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { + double l = Double.NEGATIVE_INFINITY; + double u = Double.POSITIVE_INFINITY; + if (lowerTerm != null) { + l = parse(lowerTerm); + if (includeLower == false) { + l = Math.nextUp(l); + } + } + if (upperTerm != null) { + u = parse(upperTerm); + if (includeUpper == false) { + u = Math.nextDown(u); + } + } + return DoublePoint.newRangeQuery(field, l, u); + } + + @Override + Query fuzzyQuery(String field, Object value, Fuzziness fuzziness) { + double base = parse(value); + double delta = fuzziness.asFloat(); + return rangeQuery(field, base - delta, base + delta, true, true); + } + + @Override + public List createFields(String name, Number value, boolean indexed, boolean docValued, boolean stored) { + List fields = new ArrayList<>(); + if (indexed) { + fields.add(new DoublePoint(name, value.doubleValue())); + } + if (docValued) { + fields.add(new SortedNumericDocValuesField(name, NumericUtils.doubleToSortableLong(value.doubleValue()))); + } + if (stored) { + fields.add(new StoredField(name, value.doubleValue())); + } + return fields; + } + + @Override + FieldStats.Double stats(IndexReader reader, String field) throws IOException { + long size = PointValues.size(reader, field); + if (size == 0) { + return null; + } + int docCount = PointValues.getDocCount(reader, field); + byte[] min = PointValues.getMinPackedValue(reader, field); + byte[] max = PointValues.getMaxPackedValue(reader, field); + return new FieldStats.Double(reader.maxDoc(),docCount, -1L, size, + DoublePoint.decodeDimension(min, 0), + DoublePoint.decodeDimension(max, 0)); + } + }, + BYTE("byte", NumericType.BYTE) { + @Override + Byte parse(Object value) { + if (value instanceof Byte) { + return (Byte) value; + } + if (value instanceof BytesRef) { + value = ((BytesRef) value).utf8ToString(); + } + return Byte.parseByte(value.toString()); + } + + @Override + Short parse(XContentParser parser, boolean coerce) throws IOException { + int value = parser.intValue(coerce); + if (value < Byte.MIN_VALUE || value > Byte.MAX_VALUE) { + throw new IllegalArgumentException("Value [" + value + "] is out of range for a byte"); + } + return (short) value; + } + + @Override + Query termQuery(String field, Object value) { + return INTEGER.termQuery(field, value); + } + + @Override + Query termsQuery(String field, List values) { + return INTEGER.termsQuery(field, values); + } + + @Override + Query rangeQuery(String field, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { + return INTEGER.rangeQuery(field, lowerTerm, upperTerm, includeLower, includeUpper); + } + + @Override + Query fuzzyQuery(String field, Object value, Fuzziness fuzziness) { + return INTEGER.fuzzyQuery(field, value, fuzziness); + } + + @Override + public List createFields(String name, Number value, boolean indexed, boolean docValued, boolean stored) { + return INTEGER.createFields(name, value, indexed, docValued, stored); + } + + @Override + FieldStats.Long stats(IndexReader reader, String field) throws IOException { + return (FieldStats.Long) INTEGER.stats(reader, field); + } + + @Override + Number valueForSearch(Number value) { + return value.byteValue(); + } + }, + SHORT("short", NumericType.SHORT) { + @Override + Short parse(Object value) { + if (value instanceof Number) { + return ((Number) value).shortValue(); + } + if (value instanceof BytesRef) { + value = ((BytesRef) value).utf8ToString(); + } + return Short.parseShort(value.toString()); + } + + @Override + Short parse(XContentParser parser, boolean coerce) throws IOException { + int value = parser.intValue(coerce); + if (value < Short.MIN_VALUE || value > Short.MAX_VALUE) { + throw new IllegalArgumentException("Value [" + value + "] is out of range for a short"); + } + return (short) value; + } + + @Override + Query termQuery(String field, Object value) { + return INTEGER.termQuery(field, value); + } + + @Override + Query termsQuery(String field, List values) { + return INTEGER.termsQuery(field, values); + } + + @Override + Query rangeQuery(String field, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { + return INTEGER.rangeQuery(field, lowerTerm, upperTerm, includeLower, includeUpper); + } + + @Override + Query fuzzyQuery(String field, Object value, Fuzziness fuzziness) { + return INTEGER.fuzzyQuery(field, value, fuzziness); + } + + @Override + public List createFields(String name, Number value, boolean indexed, boolean docValued, boolean stored) { + return INTEGER.createFields(name, value, indexed, docValued, stored); + } + + @Override + FieldStats.Long stats(IndexReader reader, String field) throws IOException { + return (FieldStats.Long) INTEGER.stats(reader, field); + } + + @Override + Number valueForSearch(Number value) { + return value.shortValue(); + } + }, + INTEGER("integer", NumericType.INT) { + @Override + Integer parse(Object value) { + if (value instanceof Number) { + return ((Number) value).intValue(); + } + if (value instanceof BytesRef) { + value = ((BytesRef) value).utf8ToString(); + } + return Integer.parseInt(value.toString()); + } + + @Override + Integer parse(XContentParser parser, boolean coerce) throws IOException { + return parser.intValue(coerce); + } + + @Override + Query termQuery(String field, Object value) { + int v = parse(value); + return IntPoint.newExactQuery(field, v); + } + + @Override + Query termsQuery(String field, List values) { + int[] v = new int[values.size()]; + for (int i = 0; i < values.size(); ++i) { + v[i] = parse(values.get(i)); + } + return IntPoint.newSetQuery(field, v); + } + + @Override + Query rangeQuery(String field, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { + int l = Integer.MIN_VALUE; + int u = Integer.MAX_VALUE; + if (lowerTerm != null) { + l = parse(lowerTerm); + if (includeLower == false) { + if (l == Integer.MAX_VALUE) { + return new MatchNoDocsQuery(); + } + ++l; + } + } + if (upperTerm != null) { + u = parse(upperTerm); + if (includeUpper == false) { + if (u == Integer.MIN_VALUE) { + return new MatchNoDocsQuery(); + } + --u; + } + } + return IntPoint.newRangeQuery(field, l, u); + } + + @Override + Query fuzzyQuery(String field, Object value, Fuzziness fuzziness) { + int base = parse(value); + int delta = fuzziness.asInt(); + return rangeQuery(field, base - delta, base + delta, true, true); + } + + @Override + public List createFields(String name, Number value, boolean indexed, boolean docValued, boolean stored) { + List fields = new ArrayList<>(); + if (indexed) { + fields.add(new IntPoint(name, value.intValue())); + } + if (docValued) { + fields.add(new SortedNumericDocValuesField(name, value.intValue())); + } + if (stored) { + fields.add(new StoredField(name, value.intValue())); + } + return fields; + } + + @Override + FieldStats.Long stats(IndexReader reader, String field) throws IOException { + long size = PointValues.size(reader, field); + if (size == 0) { + return null; + } + int docCount = PointValues.getDocCount(reader, field); + byte[] min = PointValues.getMinPackedValue(reader, field); + byte[] max = PointValues.getMaxPackedValue(reader, field); + return new FieldStats.Long(reader.maxDoc(),docCount, -1L, size, + IntPoint.decodeDimension(min, 0), + IntPoint.decodeDimension(max, 0)); + } + }, + LONG("long", NumericType.LONG) { + @Override + Long parse(Object value) { + if (value instanceof Number) { + return ((Number) value).longValue(); + } + if (value instanceof BytesRef) { + value = ((BytesRef) value).utf8ToString(); + } + return Long.parseLong(value.toString()); + } + + @Override + Long parse(XContentParser parser, boolean coerce) throws IOException { + return parser.longValue(coerce); + } + + @Override + Query termQuery(String field, Object value) { + long v = parse(value); + return LongPoint.newExactQuery(field, v); + } + + @Override + Query termsQuery(String field, List values) { + long[] v = new long[values.size()]; + for (int i = 0; i < values.size(); ++i) { + v[i] = parse(values.get(i)); + } + return LongPoint.newSetQuery(field, v); + } + + @Override + Query rangeQuery(String field, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { + long l = Long.MIN_VALUE; + long u = Long.MAX_VALUE; + if (lowerTerm != null) { + l = parse(lowerTerm); + if (includeLower == false) { + if (l == Long.MAX_VALUE) { + return new MatchNoDocsQuery(); + } + ++l; + } + } + if (upperTerm != null) { + u = parse(upperTerm); + if (includeUpper == false) { + if (u == Long.MIN_VALUE) { + return new MatchNoDocsQuery(); + } + --u; + } + } + return LongPoint.newRangeQuery(field, l, u); + } + + @Override + Query fuzzyQuery(String field, Object value, Fuzziness fuzziness) { + long base = parse(value); + long delta = fuzziness.asLong(); + return rangeQuery(field, base - delta, base + delta, true, true); + } + + @Override + public List createFields(String name, Number value, boolean indexed, boolean docValued, boolean stored) { + List fields = new ArrayList<>(); + if (indexed) { + fields.add(new LongPoint(name, value.longValue())); + } + if (docValued) { + fields.add(new SortedNumericDocValuesField(name, value.longValue())); + } + if (stored) { + fields.add(new StoredField(name, value.longValue())); + } + return fields; + } + + @Override + FieldStats.Long stats(IndexReader reader, String field) throws IOException { + long size = PointValues.size(reader, field); + if (size == 0) { + return null; + } + int docCount = PointValues.getDocCount(reader, field); + byte[] min = PointValues.getMinPackedValue(reader, field); + byte[] max = PointValues.getMaxPackedValue(reader, field); + return new FieldStats.Long(reader.maxDoc(),docCount, -1L, size, + LongPoint.decodeDimension(min, 0), + LongPoint.decodeDimension(max, 0)); + } + }; + + private final String name; + private final NumericType numericType; + + NumberType(String name, NumericType numericType) { + this.name = name; + this.numericType = numericType; + } + + /** Get the associated type name. */ + public final String typeName() { + return name; + } + /** Get the associated numerit type */ + final NumericType numericType() { + return numericType; + } + abstract Query termQuery(String field, Object value); + abstract Query termsQuery(String field, List values); + abstract Query rangeQuery(String field, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper); + abstract Query fuzzyQuery(String field, Object value, Fuzziness fuzziness); + abstract Number parse(XContentParser parser, boolean coerce) throws IOException; + abstract Number parse(Object value); + public abstract List createFields(String name, Number value, boolean indexed, boolean docValued, boolean stored); + abstract FieldStats stats(IndexReader reader, String field) throws IOException; + Number valueForSearch(Number value) { + return value; + } } - public static abstract class NumberFieldType extends MappedFieldType { + public static final class NumberFieldType extends MappedFieldType { - public NumberFieldType(LegacyNumericType numericType) { + NumberType type; + + public NumberFieldType(NumberType type) { + super(); + this.type = Objects.requireNonNull(type); setTokenized(false); + setHasDocValues(true); setOmitNorms(true); - setIndexOptions(IndexOptions.DOCS); - setStoreTermVectors(false); - setNumericType(numericType); } - protected NumberFieldType(NumberFieldType ref) { - super(ref); + NumberFieldType(NumberFieldType other) { + super(other); + this.type = other.type; + } + + @Override + public MappedFieldType clone() { + return new NumberFieldType(this); } @Override - public void checkCompatibility(MappedFieldType other, - List conflicts, boolean strict) { - super.checkCompatibility(other, conflicts, strict); - if (numericPrecisionStep() != other.numericPrecisionStep()) { - conflicts.add("mapper [" + name() + "] has different [precision_step] values"); + public String typeName() { + return type.name; + } + + @Override + public Query termQuery(Object value, QueryShardContext context) { + Query query = type.termQuery(name(), value); + if (boost() != 1f) { + query = new BoostQuery(query, boost()); + } + return query; + } + + @Override + public Query termsQuery(List values, QueryShardContext context) { + Query query = type.termsQuery(name(), values); + if (boost() != 1f) { + query = new BoostQuery(query, boost()); + } + return query; + } + + @Override + public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { + Query query = type.rangeQuery(name(), lowerTerm, upperTerm, includeLower, includeUpper); + if (boost() != 1f) { + query = new BoostQuery(query, boost()); } + return query; + } + + @Override + public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { + return type.fuzzyQuery(name(), value, fuzziness); } - public abstract NumberFieldType clone(); + @Override + public FieldStats stats(IndexReader reader) throws IOException { + return type.stats(reader, name()); + } + + @Override + public IndexFieldData.Builder fielddataBuilder() { + failIfNoDocValues(); + return new DocValuesIndexFieldData.Builder().numericType(type.numericType()); + } @Override - public abstract Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions); + public Object valueForSearch(Object value) { + if (value == null) { + return null; + } + return type.valueForSearch((Number) value); + } @Override - public DocValueFormat docValueFormat(@Nullable String format, DateTimeZone timeZone) { + public DocValueFormat docValueFormat(String format, DateTimeZone timeZone) { if (timeZone != null) { - throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom time zones"); + throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + + "] does not support custom time zones"); } if (format == null) { return DocValueFormat.RAW; @@ -173,21 +773,36 @@ public DocValueFormat docValueFormat(@Nullable String format, DateTimeZone timeZ } } - protected Boolean includeInAll; + private Boolean includeInAll; - protected Explicit ignoreMalformed; + private Explicit ignoreMalformed; - protected Explicit coerce; + private Explicit coerce; - protected NumberFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, - Explicit ignoreMalformed, Explicit coerce, Settings indexSettings, - MultiFields multiFields, CopyTo copyTo) { + private NumberFieldMapper( + String simpleName, + MappedFieldType fieldType, + MappedFieldType defaultFieldType, + Explicit ignoreMalformed, + Explicit coerce, + Settings indexSettings, + MultiFields multiFields, + CopyTo copyTo) { super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo); - assert fieldType.tokenized() == false; this.ignoreMalformed = ignoreMalformed; this.coerce = coerce; } + @Override + public NumberFieldType fieldType() { + return (NumberFieldType) super.fieldType(); + } + + @Override + protected String contentType() { + return fieldType.typeName(); + } + @Override protected NumberFieldMapper clone() { return (NumberFieldMapper) super.clone(); @@ -228,192 +843,67 @@ public Mapper unsetIncludeInAll() { @Override protected void parseCreateField(ParseContext context, List fields) throws IOException { - RuntimeException e = null; - try { - innerParseCreateField(context, fields); - } catch (IllegalArgumentException e1) { - e = e1; - } catch (MapperParsingException e2) { - e = e2; - } - - if (e != null && !ignoreMalformed.value()) { - throw e; + XContentParser parser = context.parser(); + Object value; + Number numericValue = null; + if (context.externalValueSet()) { + value = context.externalValue(); + } else if (parser.currentToken() == Token.VALUE_NULL) { + value = null; + } else if (coerce.value() + && parser.currentToken() == Token.VALUE_STRING + && parser.textLength() == 0) { + value = null; + } else { + value = parser.textOrNull(); + if (value != null) { + try { + numericValue = fieldType().type.parse(parser, coerce.value()); + } catch (IllegalArgumentException e) { + if (ignoreMalformed.value()) { + return; + } else { + throw e; + } + } + } } - } - - protected abstract void innerParseCreateField(ParseContext context, List fields) throws IOException; - - protected final void addDocValue(ParseContext context, List fields, long value) { - fields.add(new SortedNumericDocValuesField(fieldType().name(), value)); - } - /** - * Converts an object value into a double - */ - public static double parseDoubleValue(Object value) { - if (value instanceof Number) { - return ((Number) value).doubleValue(); + if (value == null) { + value = fieldType().nullValue(); } - if (value instanceof BytesRef) { - return Double.parseDouble(((BytesRef) value).utf8ToString()); + if (value == null) { + return; } - return Double.parseDouble(value.toString()); - } - - /** - * Converts an object value into a long - */ - public static long parseLongValue(Object value) { - if (value instanceof Number) { - return ((Number) value).longValue(); + if (numericValue == null) { + numericValue = fieldType().type.parse(value); } - if (value instanceof BytesRef) { - return Long.parseLong(((BytesRef) value).utf8ToString()); + if (context.includeInAll(includeInAll, this)) { + context.allEntries().addText(fieldType().name(), value.toString(), fieldType().boost()); } - return Long.parseLong(value.toString()); + boolean indexed = fieldType().indexOptions() != IndexOptions.NONE; + boolean docValued = fieldType().hasDocValues(); + boolean stored = fieldType().stored(); + fields.addAll(fieldType().type.createFields(fieldType().name(), numericValue, indexed, docValued, stored)); } @Override protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { super.doMerge(mergeWith, updateAllTypes); - NumberFieldMapper nfmMergeWith = (NumberFieldMapper) mergeWith; - - this.includeInAll = nfmMergeWith.includeInAll; - if (nfmMergeWith.ignoreMalformed.explicit()) { - this.ignoreMalformed = nfmMergeWith.ignoreMalformed; + NumberFieldMapper other = (NumberFieldMapper) mergeWith; + this.includeInAll = other.includeInAll; + if (other.ignoreMalformed.explicit()) { + this.ignoreMalformed = other.ignoreMalformed; } - if (nfmMergeWith.coerce.explicit()) { - this.coerce = nfmMergeWith.coerce; + if (other.coerce.explicit()) { + this.coerce = other.coerce; } } - // used to we can use a numeric field in a document that is then parsed twice! - public abstract static class CustomNumericField extends Field { - - private ThreadLocal tokenStream = new ThreadLocal() { - @Override - protected LegacyNumericTokenStream initialValue() { - return new LegacyNumericTokenStream(fieldType().numericPrecisionStep()); - } - }; - - private static ThreadLocal tokenStream4 = new ThreadLocal() { - @Override - protected LegacyNumericTokenStream initialValue() { - return new LegacyNumericTokenStream(4); - } - }; - - private static ThreadLocal tokenStream8 = new ThreadLocal() { - @Override - protected LegacyNumericTokenStream initialValue() { - return new LegacyNumericTokenStream(8); - } - }; - - private static ThreadLocal tokenStream16 = new ThreadLocal() { - @Override - protected LegacyNumericTokenStream initialValue() { - return new LegacyNumericTokenStream(16); - } - }; - - private static ThreadLocal tokenStreamMax = new ThreadLocal() { - @Override - protected LegacyNumericTokenStream initialValue() { - return new LegacyNumericTokenStream(Integer.MAX_VALUE); - } - }; - - public CustomNumericField(Number value, MappedFieldType fieldType) { - super(fieldType.name(), fieldType); - if (value != null) { - this.fieldsData = value; - } - } - - protected LegacyNumericTokenStream getCachedStream() { - if (fieldType().numericPrecisionStep() == 4) { - return tokenStream4.get(); - } else if (fieldType().numericPrecisionStep() == 8) { - return tokenStream8.get(); - } else if (fieldType().numericPrecisionStep() == 16) { - return tokenStream16.get(); - } else if (fieldType().numericPrecisionStep() == Integer.MAX_VALUE) { - return tokenStreamMax.get(); - } - return tokenStream.get(); - } - - @Override - public String stringValue() { - return null; - } - - @Override - public Reader readerValue() { - return null; - } - - public abstract String numericAsString(); - } - - public static abstract class CustomNumericDocValuesField implements IndexableField { - - public static final FieldType TYPE = new FieldType(); - static { - TYPE.setDocValuesType(DocValuesType.BINARY); - TYPE.freeze(); - } - - private final String name; - - public CustomNumericDocValuesField(String name) { - this.name = name; - } - - @Override - public String name() { - return name; - } - - @Override - public IndexableFieldType fieldType() { - return TYPE; - } - - @Override - public float boost() { - return 1f; - } - - @Override - public String stringValue() { - return null; - } - - @Override - public Reader readerValue() { - return null; - } - - @Override - public Number numericValue() { - return null; - } - - @Override - public TokenStream tokenStream(Analyzer analyzer, TokenStream reuse) { - return null; - } - - } - @Override protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { super.doXContentBody(builder, includeDefaults, params); @@ -424,5 +914,10 @@ protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, if (includeDefaults || coerce.explicit()) { builder.field("coerce", coerce.value()); } + if (includeInAll != null) { + builder.field("include_in_all", includeInAll); + } else if (includeDefaults) { + builder.field("include_in_all", false); + } } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java index fd9bd27160dad..c2370a4f96d86 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java @@ -22,9 +22,13 @@ import org.apache.lucene.document.Field; import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.search.RegexpQuery; import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.ESLogger; @@ -44,6 +48,7 @@ import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.internal.AllFieldMapper; +import org.elasticsearch.index.query.QueryShardContext; import java.io.IOException; import java.util.Arrays; @@ -55,7 +60,6 @@ import java.util.Set; import static org.apache.lucene.index.IndexOptions.NONE; -import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField; import static org.elasticsearch.index.mapper.core.TypeParsers.parseTextField; public class StringFieldMapper extends FieldMapper implements AllFieldMapper.IncludeInAll { @@ -470,6 +474,15 @@ public IndexFieldData.Builder fielddataBuilder() { + "use significant memory."); } } + + @Override + public Query regexpQuery(String value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryShardContext context) { + RegexpQuery query = new RegexpQuery(new Term(name(), indexedValueForSearch(value)), flags, maxDeterminizedStates); + if (method != null) { + query.setRewriteMethod(method); + } + return query; + } } private Boolean includeInAll; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/TextFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/TextFieldMapper.java index 91cf363ce3426..bb766262e507f 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/TextFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/TextFieldMapper.java @@ -21,7 +21,11 @@ import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.search.RegexpQuery; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -36,6 +40,7 @@ import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.internal.AllFieldMapper; +import org.elasticsearch.index.query.QueryShardContext; import java.io.IOException; import java.util.Iterator; @@ -295,6 +300,16 @@ public IndexFieldData.Builder fielddataBuilder() { } return new PagedBytesIndexFieldData.Builder(fielddataMinFrequency, fielddataMaxFrequency, fielddataMinSegmentSize); } + + @Override + public Query regexpQuery(String value, int flags, int maxDeterminizedStates, + @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryShardContext context) { + RegexpQuery query = new RegexpQuery(new Term(name(), indexedValueForSearch(value)), flags, maxDeterminizedStates); + if (method != null) { + query.setRewriteMethod(method); + } + return query; + } } private Boolean includeInAll; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapper.java index 3374444175937..41581cc48df17 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapper.java @@ -23,7 +23,8 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; import org.apache.lucene.document.Field; -import org.elasticsearch.common.Explicit; +import org.apache.lucene.index.IndexOptions; +import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -33,33 +34,31 @@ import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParseContext; -import org.elasticsearch.index.mapper.core.StringFieldMapper.ValueAndBoost; import java.io.IOException; import java.util.Iterator; import java.util.List; import java.util.Map; -import static org.apache.lucene.index.IndexOptions.NONE; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeIntegerValue; -import static org.elasticsearch.index.mapper.core.TypeParsers.parseNumberField; +import static org.elasticsearch.index.mapper.core.TypeParsers.parseField; /** * A {@link FieldMapper} that takes a string and writes a count of the tokens in that string - * to the index. In most ways the mapper acts just like an {@link IntegerFieldMapper}. + * to the index. In most ways the mapper acts just like an {@link LegacyIntegerFieldMapper}. */ -public class TokenCountFieldMapper extends IntegerFieldMapper { +public class TokenCountFieldMapper extends FieldMapper { public static final String CONTENT_TYPE = "token_count"; - public static class Defaults extends IntegerFieldMapper.Defaults { - + public static class Defaults { + public static final MappedFieldType FIELD_TYPE = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER); } - public static class Builder extends NumberFieldMapper.Builder { + public static class Builder extends FieldMapper.Builder { private NamedAnalyzer analyzer; public Builder(String name) { - super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_32_BIT); + super(name, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE); builder = this; } @@ -75,15 +74,8 @@ public NamedAnalyzer analyzer() { @Override public TokenCountFieldMapper build(BuilderContext context) { setupFieldType(context); - TokenCountFieldMapper fieldMapper = new TokenCountFieldMapper(name, fieldType, defaultFieldType, - ignoreMalformed(context), coerce(context), context.indexSettings(), - analyzer, multiFieldsBuilder.build(this, context), copyTo); - return (TokenCountFieldMapper) fieldMapper.includeInAll(includeInAll); - } - - @Override - protected int maxPrecisionStep() { - return 32; + return new TokenCountFieldMapper(name, fieldType, defaultFieldType, + context.indexSettings(), analyzer, multiFieldsBuilder.build(this, context), copyTo); } } @@ -91,6 +83,9 @@ public static class TypeParser implements Mapper.TypeParser { @Override @SuppressWarnings("unchecked") public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { + if (parserContext.indexVersionCreated().before(Version.V_5_0_0)) { + return new LegacyTokenCountFieldMapper.TypeParser().parse(name, node, parserContext); + } TokenCountFieldMapper.Builder builder = new TokenCountFieldMapper.Builder(name); for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { Map.Entry entry = iterator.next(); @@ -108,7 +103,7 @@ public Mapper.Builder parse(String name, Map node, ParserContext iterator.remove(); } } - parseNumberField(builder, name, node, parserContext); + parseField(builder, name, node, parserContext); if (builder.analyzer() == null) { throw new MapperParsingException("Analyzer must be set for field [" + name + "] but wasn't."); } @@ -118,28 +113,32 @@ public Mapper.Builder parse(String name, Map node, ParserContext private NamedAnalyzer analyzer; - protected TokenCountFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Explicit ignoreMalformed, - Explicit coerce, Settings indexSettings, NamedAnalyzer analyzer, MultiFields multiFields, CopyTo copyTo) { - super(simpleName, fieldType, defaultFieldType, ignoreMalformed, coerce, indexSettings, multiFields, copyTo); + protected TokenCountFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, + Settings indexSettings, NamedAnalyzer analyzer, MultiFields multiFields, CopyTo copyTo) { + super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo); this.analyzer = analyzer; } @Override protected void parseCreateField(ParseContext context, List fields) throws IOException { - ValueAndBoost valueAndBoost = StringFieldMapper.parseCreateFieldForString(context, null /* Out null value is an int so we convert*/, fieldType().boost()); - if (valueAndBoost.value() == null && fieldType().nullValue() == null) { - return; + final String value; + if (context.externalValueSet()) { + value = context.externalValue().toString(); + } else { + value = context.parser().textOrNull(); } - if (fieldType().indexOptions() != NONE || fieldType().stored() || fieldType().hasDocValues()) { - int count; - if (valueAndBoost.value() == null) { - count = fieldType().nullValue(); - } else { - count = countPositions(analyzer, simpleName(), valueAndBoost.value()); - } - addIntegerFields(context, fields, count, valueAndBoost.boost()); + final int tokenCount; + if (value == null) { + tokenCount = (Integer) fieldType().nullValue(); + } else { + tokenCount = countPositions(analyzer, name(), value); } + + boolean indexed = fieldType().indexOptions() != IndexOptions.NONE; + boolean docValued = fieldType().hasDocValues(); + boolean stored = fieldType().stored(); + fields.addAll(NumberFieldMapper.NumberType.INTEGER.createFields(fieldType().name(), tokenCount, indexed, docValued, stored)); } /** @@ -186,7 +185,6 @@ protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { @Override protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { super.doXContentBody(builder, includeDefaults, params); - builder.field("analyzer", analyzer()); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java b/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java index bc7c97bc4ff24..5e140399addb1 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java @@ -81,7 +81,8 @@ public static boolean nodeBooleanValue(String name, Object node, Mapper.TypePars } } - public static void parseNumberField(NumberFieldMapper.Builder builder, String name, Map numberNode, Mapper.TypeParser.ParserContext parserContext) { + @Deprecated // for legacy ints only + public static void parseNumberField(LegacyNumberFieldMapper.Builder builder, String name, Map numberNode, Mapper.TypeParser.ParserContext parserContext) { parseField(builder, name, numberNode, parserContext); for (Iterator> iterator = numberNode.entrySet().iterator(); iterator.hasNext();) { Map.Entry entry = iterator.next(); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java index 7284d437dbe19..1d6e971efffa6 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java @@ -43,8 +43,9 @@ import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParseContext; -import org.elasticsearch.index.mapper.core.DoubleFieldMapper; +import org.elasticsearch.index.mapper.core.LegacyDoubleFieldMapper; import org.elasticsearch.index.mapper.core.KeywordFieldMapper; +import org.elasticsearch.index.mapper.core.LegacyNumberFieldMapper; import org.elasticsearch.index.mapper.core.NumberFieldMapper; import org.elasticsearch.index.mapper.object.ArrayValueMapperParser; import org.elasticsearch.search.DocValueFormat; @@ -145,25 +146,32 @@ protected Explicit ignoreMalformed(BuilderContext context) { } public abstract Y build(BuilderContext context, String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, - Settings indexSettings, DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, + Settings indexSettings, FieldMapper latMapper, FieldMapper lonMapper, KeywordFieldMapper geoHashMapper, MultiFields multiFields, Explicit ignoreMalformed, CopyTo copyTo); public Y build(Mapper.BuilderContext context) { GeoPointFieldType geoPointFieldType = (GeoPointFieldType)fieldType; - DoubleFieldMapper latMapper = null; - DoubleFieldMapper lonMapper = null; + FieldMapper latMapper = null; + FieldMapper lonMapper = null; context.path().add(name); if (enableLatLon) { - NumberFieldMapper.Builder latMapperBuilder = new DoubleFieldMapper.Builder(Names.LAT).includeInAll(false); - NumberFieldMapper.Builder lonMapperBuilder = new DoubleFieldMapper.Builder(Names.LON).includeInAll(false); - if (precisionStep != null) { - latMapperBuilder.precisionStep(precisionStep); - lonMapperBuilder.precisionStep(precisionStep); + if (context.indexCreatedVersion().before(Version.V_5_0_0)) { + LegacyNumberFieldMapper.Builder latMapperBuilder = new LegacyDoubleFieldMapper.Builder(Names.LAT).includeInAll(false); + LegacyNumberFieldMapper.Builder lonMapperBuilder = new LegacyDoubleFieldMapper.Builder(Names.LON).includeInAll(false); + if (precisionStep != null) { + latMapperBuilder.precisionStep(precisionStep); + lonMapperBuilder.precisionStep(precisionStep); + } + latMapper = (LegacyDoubleFieldMapper) latMapperBuilder.includeInAll(false).store(fieldType.stored()).docValues(false).build(context); + lonMapper = (LegacyDoubleFieldMapper) lonMapperBuilder.includeInAll(false).store(fieldType.stored()).docValues(false).build(context); + } else { + latMapper = new NumberFieldMapper.Builder(Names.LAT, NumberFieldMapper.NumberType.DOUBLE) + .includeInAll(false).store(fieldType.stored()).docValues(false).build(context); + lonMapper = new NumberFieldMapper.Builder(Names.LON, NumberFieldMapper.NumberType.DOUBLE) + .includeInAll(false).store(fieldType.stored()).docValues(false).build(context); } - latMapper = (DoubleFieldMapper) latMapperBuilder.includeInAll(false).store(fieldType.stored()).docValues(false).build(context); - lonMapper = (DoubleFieldMapper) lonMapperBuilder.includeInAll(false).store(fieldType.stored()).docValues(false).build(context); geoPointFieldType.setLatLonEnabled(latMapper.fieldType(), lonMapper.fieldType()); } KeywordFieldMapper geoHashMapper = null; @@ -361,16 +369,16 @@ public DocValueFormat docValueFormat(@Nullable String format, DateTimeZone timeZ } } - protected DoubleFieldMapper latMapper; + protected FieldMapper latMapper; - protected DoubleFieldMapper lonMapper; + protected FieldMapper lonMapper; protected KeywordFieldMapper geoHashMapper; protected Explicit ignoreMalformed; protected BaseGeoPointFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings, - DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, KeywordFieldMapper geoHashMapper, + FieldMapper latMapper, FieldMapper lonMapper, KeywordFieldMapper geoHashMapper, MultiFields multiFields, Explicit ignoreMalformed, CopyTo copyTo) { super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo); this.latMapper = latMapper; @@ -542,8 +550,8 @@ protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, public FieldMapper updateFieldType(Map fullNameToFieldType) { BaseGeoPointFieldMapper updated = (BaseGeoPointFieldMapper) super.updateFieldType(fullNameToFieldType); KeywordFieldMapper geoUpdated = geoHashMapper == null ? null : (KeywordFieldMapper) geoHashMapper.updateFieldType(fullNameToFieldType); - DoubleFieldMapper latUpdated = latMapper == null ? null : (DoubleFieldMapper) latMapper.updateFieldType(fullNameToFieldType); - DoubleFieldMapper lonUpdated = lonMapper == null ? null : (DoubleFieldMapper) lonMapper.updateFieldType(fullNameToFieldType); + FieldMapper latUpdated = latMapper == null ? null : latMapper.updateFieldType(fullNameToFieldType); + FieldMapper lonUpdated = lonMapper == null ? null : lonMapper.updateFieldType(fullNameToFieldType); if (updated == this && geoUpdated == geoHashMapper && latUpdated == latMapper diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java index f46addcabc37d..4623b3f950866 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java @@ -28,11 +28,11 @@ import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParseContext; -import org.elasticsearch.index.mapper.core.DoubleFieldMapper; import org.elasticsearch.index.mapper.core.KeywordFieldMapper; import java.io.IOException; @@ -78,8 +78,8 @@ public Builder(String name) { @Override public GeoPointFieldMapper build(BuilderContext context, String simpleName, MappedFieldType fieldType, - MappedFieldType defaultFieldType, Settings indexSettings, DoubleFieldMapper latMapper, - DoubleFieldMapper lonMapper, KeywordFieldMapper geoHashMapper, MultiFields multiFields, Explicit ignoreMalformed, + MappedFieldType defaultFieldType, Settings indexSettings, FieldMapper latMapper, + FieldMapper lonMapper, KeywordFieldMapper geoHashMapper, MultiFields multiFields, Explicit ignoreMalformed, CopyTo copyTo) { fieldType.setTokenized(false); if (context.indexCreatedVersion().before(Version.V_2_3_0)) { @@ -109,7 +109,7 @@ public static class TypeParser extends BaseGeoPointFieldMapper.TypeParser { } public GeoPointFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings, - DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, + FieldMapper latMapper, FieldMapper lonMapper, KeywordFieldMapper geoHashMapper, MultiFields multiFields, Explicit ignoreMalformed, CopyTo copyTo) { super(simpleName, fieldType, defaultFieldType, indexSettings, latMapper, lonMapper, geoHashMapper, multiFields, ignoreMalformed, copyTo); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperLegacy.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperLegacy.java index 7e952c2bb2ceb..bfaad41cbe31b 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperLegacy.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperLegacy.java @@ -38,9 +38,9 @@ import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParseContext; -import org.elasticsearch.index.mapper.core.DoubleFieldMapper; +import org.elasticsearch.index.mapper.CustomDocValuesField; +import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.core.KeywordFieldMapper; -import org.elasticsearch.index.mapper.core.NumberFieldMapper.CustomNumericDocValuesField; import org.elasticsearch.index.mapper.object.ArrayValueMapperParser; import java.io.IOException; @@ -108,8 +108,8 @@ protected Explicit coerce(BuilderContext context) { @Override public GeoPointFieldMapperLegacy build(BuilderContext context, String simpleName, MappedFieldType fieldType, - MappedFieldType defaultFieldType, Settings indexSettings, DoubleFieldMapper latMapper, - DoubleFieldMapper lonMapper, KeywordFieldMapper geoHashMapper, MultiFields multiFields, Explicit ignoreMalformed, + MappedFieldType defaultFieldType, Settings indexSettings, FieldMapper latMapper, + FieldMapper lonMapper, KeywordFieldMapper geoHashMapper, MultiFields multiFields, Explicit ignoreMalformed, CopyTo copyTo) { fieldType.setTokenized(false); setupFieldType(context); @@ -266,7 +266,7 @@ public GeoPoint decode(long latBits, long lonBits, GeoPoint out) { protected Explicit coerce; public GeoPointFieldMapperLegacy(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings, - DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, + FieldMapper latMapper, FieldMapper lonMapper, KeywordFieldMapper geoHashMapper, MultiFields multiFields, Explicit ignoreMalformed, Explicit coerce, CopyTo copyTo) { super(simpleName, fieldType, defaultFieldType, indexSettings, latMapper, lonMapper, geoHashMapper, multiFields, @@ -335,7 +335,7 @@ protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, } } - public static class CustomGeoPointDocValuesField extends CustomNumericDocValuesField { + public static class CustomGeoPointDocValuesField extends CustomDocValuesField { private final ObjectHashSet points; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/TTLFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/TTLFieldMapper.java index 13dd7205e37ae..76fc4544028de 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/TTLFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/TTLFieldMapper.java @@ -34,7 +34,7 @@ import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.SourceToParse; -import org.elasticsearch.index.mapper.core.LongFieldMapper; +import org.elasticsearch.index.mapper.core.LegacyLongFieldMapper; import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; @@ -51,7 +51,7 @@ public class TTLFieldMapper extends MetadataFieldMapper { public static final String NAME = "_ttl"; public static final String CONTENT_TYPE = "_ttl"; - public static class Defaults extends LongFieldMapper.Defaults { + public static class Defaults extends LegacyLongFieldMapper.Defaults { public static final String NAME = TTLFieldMapper.CONTENT_TYPE; public static final TTLFieldType TTL_FIELD_TYPE = new TTLFieldType(); @@ -127,7 +127,7 @@ public MetadataFieldMapper getDefault(Settings indexSettings, MappedFieldType fi } } - public static final class TTLFieldType extends LongFieldMapper.LongFieldType { + public static final class TTLFieldType extends LegacyLongFieldMapper.LongFieldType { public TTLFieldType() { } @@ -226,7 +226,7 @@ protected void parseCreateField(ParseContext context, List fields) throws throw new AlreadyExpiredException(context.index(), context.type(), context.id(), timestamp, ttl, now); } // the expiration timestamp (timestamp + ttl) is set as field - fields.add(new LongFieldMapper.CustomLongNumericField(expire, fieldType())); + fields.add(new LegacyLongFieldMapper.CustomLongNumericField(expire, fieldType())); } } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java index 0e277d95543d6..b563e8fc64902 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java @@ -34,8 +34,8 @@ import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParseContext; -import org.elasticsearch.index.mapper.core.DateFieldMapper; -import org.elasticsearch.index.mapper.core.LongFieldMapper; +import org.elasticsearch.index.mapper.core.LegacyDateFieldMapper; +import org.elasticsearch.index.mapper.core.LegacyLongFieldMapper; import java.io.IOException; import java.util.ArrayList; @@ -52,7 +52,7 @@ public class TimestampFieldMapper extends MetadataFieldMapper { public static final String CONTENT_TYPE = "_timestamp"; public static final String DEFAULT_DATE_TIME_FORMAT = "epoch_millis||strictDateOptionalTime"; - public static class Defaults extends DateFieldMapper.Defaults { + public static class Defaults extends LegacyDateFieldMapper.Defaults { public static final String NAME = "_timestamp"; // TODO: this should be removed @@ -86,8 +86,8 @@ public Builder(MappedFieldType existing, Settings settings) { } @Override - public DateFieldMapper.DateFieldType fieldType() { - return (DateFieldMapper.DateFieldType)fieldType; + public LegacyDateFieldMapper.DateFieldType fieldType() { + return (LegacyDateFieldMapper.DateFieldType)fieldType; } public Builder enabled(EnabledAttributeMapper enabledState) { @@ -169,7 +169,7 @@ public MetadataFieldMapper getDefault(Settings indexSettings, MappedFieldType fi } } - public static final class TimestampFieldType extends DateFieldMapper.DateFieldType { + public static final class TimestampFieldType extends LegacyDateFieldMapper.DateFieldType { public TimestampFieldType() {} @@ -242,7 +242,7 @@ protected void parseCreateField(ParseContext context, List fields) throws if (enabledState.enabled) { long timestamp = context.sourceToParse().timestamp(); if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { - fields.add(new LongFieldMapper.CustomLongNumericField(timestamp, fieldType())); + fields.add(new LegacyLongFieldMapper.CustomLongNumericField(timestamp, fieldType())); } if (fieldType().hasDocValues()) { fields.add(new NumericDocValuesField(fieldType().name(), timestamp)); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java index b973c8f60ee4e..17f204cb82079 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java @@ -19,136 +19,109 @@ package org.elasticsearch.index.mapper.ip; -import org.apache.lucene.analysis.LegacyNumericTokenStream; import org.apache.lucene.document.Field; +import org.apache.lucene.document.InetAddressPoint; +import org.apache.lucene.document.SortedSetDocValuesField; +import org.apache.lucene.document.StoredField; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.Terms; -import org.apache.lucene.search.LegacyNumericRangeQuery; +import org.apache.lucene.index.PointValues; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.LegacyNumericUtils; import org.elasticsearch.Version; import org.elasticsearch.action.fieldstats.FieldStats; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.Numbers; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.network.Cidrs; import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType; import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData; +import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParseContext; -import org.elasticsearch.index.mapper.core.LongFieldMapper; -import org.elasticsearch.index.mapper.core.LongFieldMapper.CustomLongNumericField; -import org.elasticsearch.index.mapper.core.NumberFieldMapper; +import org.elasticsearch.index.mapper.core.LegacyNumberFieldMapper.Defaults; +import org.elasticsearch.index.mapper.core.TypeParsers; +import org.elasticsearch.index.mapper.internal.AllFieldMapper; +import org.elasticsearch.index.mapper.ip.LegacyIpFieldMapper; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.DocValueFormat; -import org.elasticsearch.search.aggregations.bucket.range.ipv4.InternalIPv4Range; import org.joda.time.DateTimeZone; import java.io.IOException; +import java.net.InetAddress; +import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.Map; -import java.util.regex.Pattern; -import static org.elasticsearch.index.mapper.core.TypeParsers.parseNumberField; - -/** - * - */ -public class IpFieldMapper extends NumberFieldMapper { +/** A {@link FieldMapper} for ip addresses. */ +public class IpFieldMapper extends FieldMapper implements AllFieldMapper.IncludeInAll { public static final String CONTENT_TYPE = "ip"; - public static final long MAX_IP = 4294967296L; - - public static String longToIp(long longIp) { - int octet3 = (int) ((longIp >> 24) % 256); - int octet2 = (int) ((longIp >> 16) % 256); - int octet1 = (int) ((longIp >> 8) % 256); - int octet0 = (int) ((longIp) % 256); - return octet3 + "." + octet2 + "." + octet1 + "." + octet0; - } - - private static final Pattern pattern = Pattern.compile("\\."); - - public static long ipToLong(String ip) { - try { - if (!InetAddresses.isInetAddress(ip)) { - throw new IllegalArgumentException("failed to parse ip [" + ip + "], not a valid ip address"); - } - String[] octets = pattern.split(ip); - if (octets.length != 4) { - throw new IllegalArgumentException("failed to parse ip [" + ip + "], not a valid ipv4 address (4 dots)"); - } - return (Long.parseLong(octets[0]) << 24) + (Integer.parseInt(octets[1]) << 16) + - (Integer.parseInt(octets[2]) << 8) + Integer.parseInt(octets[3]); - } catch (Exception e) { - if (e instanceof IllegalArgumentException) { - throw (IllegalArgumentException) e; - } - throw new IllegalArgumentException("failed to parse ip [" + ip + "]", e); - } - } - public static class Defaults extends NumberFieldMapper.Defaults { - public static final String NULL_VALUE = null; + public static class Builder extends FieldMapper.Builder { - public static final MappedFieldType FIELD_TYPE = new IpFieldType(); + private Boolean ignoreMalformed; - static { - FIELD_TYPE.freeze(); + public Builder(String name) { + super(name, new IpFieldType(), new IpFieldType()); + builder = this; } - } - public static class Builder extends NumberFieldMapper.Builder { - - protected String nullValue = Defaults.NULL_VALUE; + public Builder ignoreMalformed(boolean ignoreMalformed) { + this.ignoreMalformed = ignoreMalformed; + return builder; + } - public Builder(String name) { - super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_64_BIT); - builder = this; + protected Explicit ignoreMalformed(BuilderContext context) { + if (ignoreMalformed != null) { + return new Explicit<>(ignoreMalformed, true); + } + if (context.indexSettings() != null) { + return new Explicit<>(IGNORE_MALFORMED_SETTING.get(context.indexSettings()), false); + } + return Defaults.IGNORE_MALFORMED; } @Override public IpFieldMapper build(BuilderContext context) { setupFieldType(context); - IpFieldMapper fieldMapper = new IpFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), coerce(context), + IpFieldMapper fieldMapper = new IpFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); return (IpFieldMapper) fieldMapper.includeInAll(includeInAll); } - - @Override - protected int maxPrecisionStep() { - return 64; - } } public static class TypeParser implements Mapper.TypeParser { + + public TypeParser() { + } + @Override - public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - IpFieldMapper.Builder builder = new Builder(name); - parseNumberField(builder, name, node, parserContext); + public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { + if (parserContext.indexVersionCreated().before(Version.V_5_0_0)) { + return new LegacyIpFieldMapper.TypeParser().parse(name, node, parserContext); + } + Builder builder = new Builder(name); + TypeParsers.parseField(builder, name, node, parserContext); for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { Map.Entry entry = iterator.next(); - String propName = Strings.toUnderscoreCase(entry.getKey()); + String propName = entry.getKey(); Object propNode = entry.getValue(); if (propName.equals("null_value")) { if (propNode == null) { throw new MapperParsingException("Property [null_value] cannot be null."); } - builder.nullValue(propNode.toString()); + builder.nullValue(InetAddresses.forString(propNode.toString())); + iterator.remove(); + } else if (propName.equals("ignore_malformed")) { + builder.ignoreMalformed(TypeParsers.nodeBooleanValue("ignore_malformed", propNode, parserContext)); + iterator.remove(); + } else if (TypeParsers.parseMultiField(builder, name, parserContext, propName, propNode)) { iterator.remove(); } } @@ -156,17 +129,20 @@ public Mapper.Builder parse(String name, Map node, ParserContext } } - public static final class IpFieldType extends LongFieldMapper.LongFieldType { + public static final class IpFieldType extends MappedFieldType { - public IpFieldType() { + IpFieldType() { + super(); + setTokenized(false); + setHasDocValues(true); } - protected IpFieldType(IpFieldType ref) { - super(ref); + IpFieldType(IpFieldType other) { + super(other); } @Override - public NumberFieldType clone() { + public MappedFieldType clone() { return new IpFieldType(this); } @@ -175,95 +151,100 @@ public String typeName() { return CONTENT_TYPE; } - /** - * IPs should return as a string. - */ - @Override - public Object valueForSearch(Object value) { - Long val = (Long) value; - if (val == null) { - return null; + private InetAddress parse(Object value) { + if (value instanceof InetAddress) { + return (InetAddress) value; + } else { + if (value instanceof BytesRef) { + value = ((BytesRef) value).utf8ToString(); + } + return InetAddresses.forString(value.toString()); } - return longToIp(val); - } - - @Override - public BytesRef indexedValueForSearch(Object value) { - BytesRefBuilder bytesRef = new BytesRefBuilder(); - LegacyNumericUtils.longToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match - return bytesRef.get(); } @Override public Query termQuery(Object value, @Nullable QueryShardContext context) { - if (value != null) { - String term; + if (value instanceof InetAddress) { + return InetAddressPoint.newExactQuery(name(), (InetAddress) value); + } else { if (value instanceof BytesRef) { - term = ((BytesRef) value).utf8ToString(); - } else { - term = value.toString(); + value = ((BytesRef) value).utf8ToString(); } - long[] fromTo; - // assume that the term is either a CIDR range or the - // term is a single IPv4 address; if either of these - // assumptions is wrong, the CIDR parsing will fail - // anyway, and that is okay + String term = value.toString(); if (term.contains("/")) { - // treat the term as if it is in CIDR notation - fromTo = Cidrs.cidrMaskToMinMax(term); - } else { - // treat the term as if it is a single IPv4, and - // apply a CIDR mask equivalent to the host route - fromTo = Cidrs.cidrMaskToMinMax(term + "/32"); - } - if (fromTo != null) { - return rangeQuery(fromTo[0] == 0 ? null : fromTo[0], - fromTo[1] == InternalIPv4Range.MAX_IP ? null : fromTo[1], true, false); + String[] fields = term.split("/"); + if (fields.length == 2) { + InetAddress address = InetAddresses.forString(fields[0]); + int prefixLength = Integer.parseInt(fields[1]); + return InetAddressPoint.newPrefixQuery(name(), address, prefixLength); + } else { + throw new IllegalArgumentException("Expected [ip/prefix] but was [" + term + "]"); + } } + InetAddress address = InetAddresses.forString(term); + return InetAddressPoint.newExactQuery(name(), address); } - return super.termQuery(value, context); } @Override public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { - return LegacyNumericRangeQuery.newLongRange(name(), numericPrecisionStep(), - lowerTerm == null ? null : parseValue(lowerTerm), - upperTerm == null ? null : parseValue(upperTerm), - includeLower, includeUpper); + if (includeLower == false || includeUpper == false) { + // TODO: should we drop range support entirely + throw new IllegalArgumentException("range on ip addresses only supports inclusive bounds"); + } + InetAddress lower; + if (lowerTerm == null) { + lower = InetAddressPoint.decode(new byte[16]); + } else { + lower = parse(lowerTerm); + } + + InetAddress upper; + if (upperTerm == null) { + byte[] bytes = new byte[16]; + Arrays.fill(bytes, (byte) 255); + upper = InetAddressPoint.decode(bytes); + } else { + upper = parse(upperTerm); + } + + return InetAddressPoint.newRangeQuery(name(), lower, upper); } @Override public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { - long iValue = parseValue(value); - long iSim; - try { - iSim = ipToLong(fuzziness.asString()); - } catch (IllegalArgumentException e) { - iSim = fuzziness.asLong(); - } - return LegacyNumericRangeQuery.newLongRange(name(), numericPrecisionStep(), - iValue - iSim, - iValue + iSim, - true, true); + InetAddress base = parse(value); + int mask = fuzziness.asInt(); + return InetAddressPoint.newPrefixQuery(name(), base, mask); } @Override - public FieldStats stats(IndexReader reader) throws IOException { - int maxDoc = reader.maxDoc(); - Terms terms = org.apache.lucene.index.MultiFields.getTerms(reader, name()); - if (terms == null) { + public FieldStats.Ip stats(IndexReader reader) throws IOException { + String field = name(); + long size = PointValues.size(reader, field); + if (size == 0) { return null; } - long minValue = LegacyNumericUtils.getMinLong(terms); - long maxValue = LegacyNumericUtils.getMaxLong(terms); - return new FieldStats.Ip(maxDoc, terms.getDocCount(), terms.getSumDocFreq(), - terms.getSumTotalTermFreq(), minValue, maxValue); + int docCount = PointValues.getDocCount(reader, field); + byte[] min = PointValues.getMinPackedValue(reader, field); + byte[] max = PointValues.getMaxPackedValue(reader, field); + return new FieldStats.Ip(reader.maxDoc(),docCount, -1L, size, + InetAddressPoint.decode(min), + InetAddressPoint.decode(max)); } @Override public IndexFieldData.Builder fielddataBuilder() { failIfNoDocValues(); - return new DocValuesIndexFieldData.Builder().numericType(NumericType.LONG); + return new DocValuesIndexFieldData.Builder(); + } + + @Override + public Object valueForSearch(Object value) { + if (value == null) { + return null; + } + return DocValueFormat.IP.format((BytesRef) value); } @Override @@ -279,79 +260,139 @@ public DocValueFormat docValueFormat(@Nullable String format, DateTimeZone timeZ } } - protected IpFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, - Explicit ignoreMalformed, Explicit coerce, - Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(simpleName, fieldType, defaultFieldType, ignoreMalformed, coerce, indexSettings, multiFields, copyTo); + private Boolean includeInAll; + + private Explicit ignoreMalformed; + + private IpFieldMapper( + String simpleName, + MappedFieldType fieldType, + MappedFieldType defaultFieldType, + Explicit ignoreMalformed, + Settings indexSettings, + MultiFields multiFields, + CopyTo copyTo) { + super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo); + this.ignoreMalformed = ignoreMalformed; } - private static long parseValue(Object value) { - if (value instanceof Number) { - return ((Number) value).longValue(); + @Override + public IpFieldType fieldType() { + return (IpFieldType) super.fieldType(); + } + + @Override + protected String contentType() { + return fieldType.typeName(); + } + + @Override + protected IpFieldMapper clone() { + return (IpFieldMapper) super.clone(); + } + + @Override + public Mapper includeInAll(Boolean includeInAll) { + if (includeInAll != null) { + IpFieldMapper clone = clone(); + clone.includeInAll = includeInAll; + return clone; + } else { + return this; } - if (value instanceof BytesRef) { - return ipToLong(((BytesRef) value).utf8ToString()); + } + + @Override + public Mapper includeInAllIfNotSet(Boolean includeInAll) { + if (includeInAll != null && this.includeInAll == null) { + IpFieldMapper clone = clone(); + clone.includeInAll = includeInAll; + return clone; + } else { + return this; } - return ipToLong(value.toString()); } @Override - protected void innerParseCreateField(ParseContext context, List fields) throws IOException { - String ipAsString; + public Mapper unsetIncludeInAll() { + if (includeInAll != null) { + IpFieldMapper clone = clone(); + clone.includeInAll = null; + return clone; + } else { + return this; + } + } + + @Override + protected void parseCreateField(ParseContext context, List fields) throws IOException { + Object addressAsObject; if (context.externalValueSet()) { - ipAsString = (String) context.externalValue(); - if (ipAsString == null) { - ipAsString = fieldType().nullValueAsString(); - } + addressAsObject = context.externalValue(); } else { - if (context.parser().currentToken() == XContentParser.Token.VALUE_NULL) { - ipAsString = fieldType().nullValueAsString(); - } else { - ipAsString = context.parser().text(); - } + addressAsObject = context.parser().text(); + } + + if (addressAsObject == null) { + addressAsObject = fieldType().nullValue(); } - if (ipAsString == null) { + if (addressAsObject == null) { return; } + + String addressAsString = addressAsObject.toString(); + InetAddress address; + if (addressAsObject instanceof InetAddress) { + address = (InetAddress) addressAsObject; + } else { + try { + address = InetAddresses.forString(addressAsString); + } catch (IllegalArgumentException e) { + if (ignoreMalformed.value()) { + return; + } else { + throw e; + } + } + } + if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().name(), ipAsString, fieldType().boost()); + context.allEntries().addText(fieldType().name(), addressAsString, fieldType().boost()); } - final long value = ipToLong(ipAsString); - if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { - CustomLongNumericField field = new CustomLongNumericField(value, fieldType()); - if (fieldType.boost() != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) { - field.setBoost(fieldType().boost()); - } - fields.add(field); + if (fieldType().indexOptions() != IndexOptions.NONE) { + fields.add(new InetAddressPoint(fieldType().name(), address)); } if (fieldType().hasDocValues()) { - addDocValue(context, fields, value); + fields.add(new SortedSetDocValuesField(fieldType().name(), new BytesRef(InetAddressPoint.encode(address)))); + } + if (fieldType().stored()) { + fields.add(new StoredField(fieldType().name(), new BytesRef(InetAddressPoint.encode(address)))); } } @Override - protected String contentType() { - return CONTENT_TYPE; + protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { + super.doMerge(mergeWith, updateAllTypes); + IpFieldMapper other = (IpFieldMapper) mergeWith; + this.includeInAll = other.includeInAll; + if (other.ignoreMalformed.explicit()) { + this.ignoreMalformed = other.ignoreMalformed; + } } @Override protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { super.doXContentBody(builder, includeDefaults, params); - if (includeDefaults || fieldType().numericPrecisionStep() != Defaults.PRECISION_STEP_64_BIT) { - builder.field("precision_step", fieldType().numericPrecisionStep()); - } - if (includeDefaults || fieldType().nullValueAsString() != null) { - builder.field("null_value", fieldType().nullValueAsString()); + if (includeDefaults || ignoreMalformed.explicit()) { + builder.field("ignore_malformed", ignoreMalformed.value()); } if (includeInAll != null) { builder.field("include_in_all", includeInAll); } else if (includeDefaults) { builder.field("include_in_all", false); } - } - } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/ip/LegacyIpFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/ip/LegacyIpFieldMapper.java new file mode 100644 index 0000000000000..860b7aaeb9d0c --- /dev/null +++ b/core/src/main/java/org/elasticsearch/index/mapper/ip/LegacyIpFieldMapper.java @@ -0,0 +1,361 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.mapper.ip; + +import org.apache.lucene.analysis.LegacyNumericTokenStream; +import org.apache.lucene.document.Field; +import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.Terms; +import org.apache.lucene.search.LegacyNumericRangeQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefBuilder; +import org.apache.lucene.util.LegacyNumericUtils; +import org.elasticsearch.Version; +import org.elasticsearch.action.fieldstats.FieldStats; +import org.elasticsearch.common.Explicit; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.network.Cidrs; +import org.elasticsearch.common.network.InetAddresses; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.Fuzziness; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.fielddata.IndexFieldData; +import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType; +import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.ParseContext; +import org.elasticsearch.index.mapper.core.LegacyLongFieldMapper; +import org.elasticsearch.index.mapper.core.LegacyLongFieldMapper.CustomLongNumericField; +import org.elasticsearch.index.mapper.core.LegacyNumberFieldMapper; +import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.aggregations.bucket.range.ipv4.InternalIPv4Range; +import org.joda.time.DateTimeZone; + +import java.io.IOException; +import java.net.InetAddress; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.regex.Pattern; + +import static org.elasticsearch.index.mapper.core.TypeParsers.parseNumberField; + +/** + * + */ +public class LegacyIpFieldMapper extends LegacyNumberFieldMapper { + + public static final String CONTENT_TYPE = "ip"; + public static final long MAX_IP = 4294967296L; + + public static String longToIp(long longIp) { + int octet3 = (int) ((longIp >> 24) % 256); + int octet2 = (int) ((longIp >> 16) % 256); + int octet1 = (int) ((longIp >> 8) % 256); + int octet0 = (int) ((longIp) % 256); + return octet3 + "." + octet2 + "." + octet1 + "." + octet0; + } + + private static final Pattern pattern = Pattern.compile("\\."); + + public static long ipToLong(String ip) { + try { + if (!InetAddresses.isInetAddress(ip)) { + throw new IllegalArgumentException("failed to parse ip [" + ip + "], not a valid ip address"); + } + String[] octets = pattern.split(ip); + if (octets.length != 4) { + throw new IllegalArgumentException("failed to parse ip [" + ip + "], not a valid ipv4 address (4 dots)"); + } + return (Long.parseLong(octets[0]) << 24) + (Integer.parseInt(octets[1]) << 16) + + (Integer.parseInt(octets[2]) << 8) + Integer.parseInt(octets[3]); + } catch (Exception e) { + if (e instanceof IllegalArgumentException) { + throw (IllegalArgumentException) e; + } + throw new IllegalArgumentException("failed to parse ip [" + ip + "]", e); + } + } + + public static class Defaults extends LegacyNumberFieldMapper.Defaults { + public static final String NULL_VALUE = null; + + public static final MappedFieldType FIELD_TYPE = new IpFieldType(); + + static { + FIELD_TYPE.freeze(); + } + } + + public static class Builder extends LegacyNumberFieldMapper.Builder { + + protected String nullValue = Defaults.NULL_VALUE; + + public Builder(String name) { + super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_64_BIT); + builder = this; + } + + @Override + public LegacyIpFieldMapper build(BuilderContext context) { + if (context.indexCreatedVersion().onOrAfter(Version.V_5_0_0)) { + throw new IllegalStateException("Cannot use legacy numeric types after 5.0"); + } + setupFieldType(context); + LegacyIpFieldMapper fieldMapper = new LegacyIpFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), + coerce(context), context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); + return (LegacyIpFieldMapper) fieldMapper.includeInAll(includeInAll); + } + + @Override + protected int maxPrecisionStep() { + return 64; + } + } + + public static class TypeParser implements Mapper.TypeParser { + @Override + public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { + LegacyIpFieldMapper.Builder builder = new Builder(name); + parseNumberField(builder, name, node, parserContext); + for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { + Map.Entry entry = iterator.next(); + String propName = Strings.toUnderscoreCase(entry.getKey()); + Object propNode = entry.getValue(); + if (propName.equals("null_value")) { + if (propNode == null) { + throw new MapperParsingException("Property [null_value] cannot be null."); + } + builder.nullValue(propNode.toString()); + iterator.remove(); + } + } + return builder; + } + } + + public static final class IpFieldType extends LegacyLongFieldMapper.LongFieldType { + + public IpFieldType() { + } + + protected IpFieldType(IpFieldType ref) { + super(ref); + } + + @Override + public NumberFieldType clone() { + return new IpFieldType(this); + } + + @Override + public String typeName() { + return CONTENT_TYPE; + } + + /** + * IPs should return as a string. + */ + @Override + public Object valueForSearch(Object value) { + Long val = (Long) value; + if (val == null) { + return null; + } + return longToIp(val); + } + + @Override + public BytesRef indexedValueForSearch(Object value) { + BytesRefBuilder bytesRef = new BytesRefBuilder(); + LegacyNumericUtils.longToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match + return bytesRef.get(); + } + + @Override + public Query termQuery(Object value, @Nullable QueryShardContext context) { + if (value != null) { + String term; + if (value instanceof BytesRef) { + term = ((BytesRef) value).utf8ToString(); + } else { + term = value.toString(); + } + long[] fromTo; + // assume that the term is either a CIDR range or the + // term is a single IPv4 address; if either of these + // assumptions is wrong, the CIDR parsing will fail + // anyway, and that is okay + if (term.contains("/")) { + // treat the term as if it is in CIDR notation + fromTo = Cidrs.cidrMaskToMinMax(term); + } else { + // treat the term as if it is a single IPv4, and + // apply a CIDR mask equivalent to the host route + fromTo = Cidrs.cidrMaskToMinMax(term + "/32"); + } + if (fromTo != null) { + return rangeQuery(fromTo[0] == 0 ? null : fromTo[0], + fromTo[1] == InternalIPv4Range.MAX_IP ? null : fromTo[1], true, false); + } + } + return super.termQuery(value, context); + } + + @Override + public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { + return LegacyNumericRangeQuery.newLongRange(name(), numericPrecisionStep(), + lowerTerm == null ? null : parseValue(lowerTerm), + upperTerm == null ? null : parseValue(upperTerm), + includeLower, includeUpper); + } + + @Override + public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { + long iValue = parseValue(value); + long iSim; + try { + iSim = ipToLong(fuzziness.asString()); + } catch (IllegalArgumentException e) { + iSim = fuzziness.asLong(); + } + return LegacyNumericRangeQuery.newLongRange(name(), numericPrecisionStep(), + iValue - iSim, + iValue + iSim, + true, true); + } + + @Override + public FieldStats stats(IndexReader reader) throws IOException { + int maxDoc = reader.maxDoc(); + Terms terms = org.apache.lucene.index.MultiFields.getTerms(reader, name()); + if (terms == null) { + return null; + } + long minValue = LegacyNumericUtils.getMinLong(terms); + long maxValue = LegacyNumericUtils.getMaxLong(terms); + return new FieldStats.Ip(maxDoc, terms.getDocCount(), terms.getSumDocFreq(), + terms.getSumTotalTermFreq(), + InetAddress.getByName(longToIp(minValue)), + InetAddress.getByName(longToIp(maxValue))); + } + + @Override + public IndexFieldData.Builder fielddataBuilder() { + failIfNoDocValues(); + return new DocValuesIndexFieldData.Builder().numericType(NumericType.LONG); + } + + @Override + public DocValueFormat docValueFormat(@Nullable String format, DateTimeZone timeZone) { + if (format != null) { + throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom formats"); + } + if (timeZone != null) { + throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + + "] does not support custom time zones"); + } + return DocValueFormat.IP; + } + } + + protected LegacyIpFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, + Explicit ignoreMalformed, Explicit coerce, + Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { + super(simpleName, fieldType, defaultFieldType, ignoreMalformed, coerce, indexSettings, multiFields, copyTo); + } + + private static long parseValue(Object value) { + if (value instanceof Number) { + return ((Number) value).longValue(); + } + if (value instanceof BytesRef) { + return ipToLong(((BytesRef) value).utf8ToString()); + } + return ipToLong(value.toString()); + } + + @Override + protected void innerParseCreateField(ParseContext context, List fields) throws IOException { + String ipAsString; + if (context.externalValueSet()) { + ipAsString = (String) context.externalValue(); + if (ipAsString == null) { + ipAsString = fieldType().nullValueAsString(); + } + } else { + if (context.parser().currentToken() == XContentParser.Token.VALUE_NULL) { + ipAsString = fieldType().nullValueAsString(); + } else { + ipAsString = context.parser().text(); + } + } + + if (ipAsString == null) { + return; + } + if (context.includeInAll(includeInAll, this)) { + context.allEntries().addText(fieldType().name(), ipAsString, fieldType().boost()); + } + + final long value = ipToLong(ipAsString); + if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { + CustomLongNumericField field = new CustomLongNumericField(value, fieldType()); + if (fieldType.boost() != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) { + field.setBoost(fieldType().boost()); + } + fields.add(field); + } + if (fieldType().hasDocValues()) { + addDocValue(context, fields, value); + } + } + + @Override + protected String contentType() { + return CONTENT_TYPE; + } + + @Override + protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { + super.doXContentBody(builder, includeDefaults, params); + + if (includeDefaults || fieldType().numericPrecisionStep() != Defaults.PRECISION_STEP_64_BIT) { + builder.field("precision_step", fieldType().numericPrecisionStep()); + } + if (includeDefaults || fieldType().nullValueAsString() != null) { + builder.field("null_value", fieldType().nullValueAsString()); + } + if (includeInAll != null) { + builder.field("include_in_all", includeInAll); + } else if (includeDefaults) { + builder.field("include_in_all", false); + } + + } + +} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java index 7e5dc3d28f5ab..c13276dbe6249 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java @@ -54,7 +54,7 @@ public class RootObjectMapper extends ObjectMapper { public static class Defaults { public static final FormatDateTimeFormatter[] DYNAMIC_DATE_TIME_FORMATTERS = new FormatDateTimeFormatter[]{ - DateFieldMapper.Defaults.DATE_TIME_FORMATTER, + DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, Joda.getStrictStandardDateFormatter() }; public static final boolean DATE_DETECTION = true; diff --git a/core/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java index 0f8bbff959e9d..cae4997fa60f6 100644 --- a/core/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java @@ -37,6 +37,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.core.DateFieldMapper; +import org.elasticsearch.index.mapper.core.LegacyDateFieldMapper; import org.joda.time.DateTimeZone; import java.io.IOException; @@ -437,7 +438,14 @@ protected Query doToQuery(QueryShardContext context) throws IOException { Query query = null; MappedFieldType mapper = context.fieldMapper(this.fieldName); if (mapper != null) { - if (mapper instanceof DateFieldMapper.DateFieldType) { + if (mapper instanceof LegacyDateFieldMapper.DateFieldType) { + DateMathParser forcedDateParser = null; + if (this.format != null) { + forcedDateParser = new DateMathParser(this.format); + } + query = ((LegacyDateFieldMapper.DateFieldType) mapper).rangeQuery(from, to, includeLower, includeUpper, + timeZone, forcedDateParser); + } else if (mapper instanceof DateFieldMapper.DateFieldType) { DateMathParser forcedDateParser = null; if (this.format != null) { forcedDateParser = new DateMathParser(this.format); diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionBuilder.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionBuilder.java index c3a0bca67b1dd..3b6b850aa1aa7 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionBuilder.java @@ -44,6 +44,8 @@ import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.core.DateFieldMapper; +import org.elasticsearch.index.mapper.core.LegacyDateFieldMapper; +import org.elasticsearch.index.mapper.core.LegacyNumberFieldMapper; import org.elasticsearch.index.mapper.core.NumberFieldMapper; import org.elasticsearch.index.mapper.geo.BaseGeoPointFieldMapper; import org.elasticsearch.index.query.QueryShardContext; @@ -198,12 +200,14 @@ private AbstractDistanceScoreFunction parseVariable(String fieldName, XContentPa // dates and time and geo need special handling parser.nextToken(); - if (fieldType instanceof DateFieldMapper.DateFieldType) { - return parseDateVariable(parser, context, (DateFieldMapper.DateFieldType) fieldType, mode); + if (fieldType instanceof LegacyDateFieldMapper.DateFieldType + || fieldType instanceof DateFieldMapper.DateFieldType) { + return parseDateVariable(parser, context, fieldType, mode); } else if (fieldType instanceof BaseGeoPointFieldMapper.GeoPointFieldType) { - return parseGeoVariable(parser, context, (BaseGeoPointFieldMapper.GeoPointFieldType) fieldType, mode); - } else if (fieldType instanceof NumberFieldMapper.NumberFieldType) { - return parseNumberVariable(parser, context, (NumberFieldMapper.NumberFieldType) fieldType, mode); + return parseGeoVariable(parser, context, fieldType, mode); + } else if (fieldType instanceof LegacyNumberFieldMapper.NumberFieldType + || fieldType instanceof NumberFieldMapper.NumberFieldType) { + return parseNumberVariable(parser, context, fieldType, mode); } else { throw new ParsingException(parser.getTokenLocation(), "field [{}] is of type [{}], but only numeric types are supported.", fieldName, fieldType); @@ -211,7 +215,7 @@ private AbstractDistanceScoreFunction parseVariable(String fieldName, XContentPa } private AbstractDistanceScoreFunction parseNumberVariable(XContentParser parser, QueryShardContext context, - NumberFieldMapper.NumberFieldType fieldType, MultiValueMode mode) throws IOException { + MappedFieldType fieldType, MultiValueMode mode) throws IOException { XContentParser.Token token; String parameterName = null; double scale = 0; @@ -246,7 +250,7 @@ private AbstractDistanceScoreFunction parseNumberVariable(XContentParser parser, } private AbstractDistanceScoreFunction parseGeoVariable(XContentParser parser, QueryShardContext context, - BaseGeoPointFieldMapper.GeoPointFieldType fieldType, MultiValueMode mode) throws IOException { + MappedFieldType fieldType, MultiValueMode mode) throws IOException { XContentParser.Token token; String parameterName = null; GeoPoint origin = new GeoPoint(); @@ -280,7 +284,7 @@ private AbstractDistanceScoreFunction parseGeoVariable(XContentParser parser, Qu } private AbstractDistanceScoreFunction parseDateVariable(XContentParser parser, QueryShardContext context, - DateFieldMapper.DateFieldType dateFieldType, MultiValueMode mode) throws IOException { + MappedFieldType dateFieldType, MultiValueMode mode) throws IOException { XContentParser.Token token; String parameterName = null; String scaleString = null; @@ -306,7 +310,11 @@ private AbstractDistanceScoreFunction parseDateVariable(XContentParser parser, Q if (originString == null) { origin = context.nowInMillis(); } else { - origin = dateFieldType.parseToMilliseconds(originString, false, null, null); + if (dateFieldType instanceof LegacyDateFieldMapper.DateFieldType) { + origin = ((LegacyDateFieldMapper.DateFieldType) dateFieldType).parseToMilliseconds(originString, false, null, null); + } else { + origin = ((DateFieldMapper.DateFieldType) dateFieldType).parseToMilliseconds(originString, false, null, null); + } } if (scaleString == null) { diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesModule.java b/core/src/main/java/org/elasticsearch/indices/IndicesModule.java index e1330f025446f..1ff198c6cbf6a 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesModule.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesModule.java @@ -28,18 +28,14 @@ import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.core.BinaryFieldMapper; import org.elasticsearch.index.mapper.core.BooleanFieldMapper; -import org.elasticsearch.index.mapper.core.ByteFieldMapper; import org.elasticsearch.index.mapper.core.CompletionFieldMapper; import org.elasticsearch.index.mapper.core.DateFieldMapper; -import org.elasticsearch.index.mapper.core.DoubleFieldMapper; -import org.elasticsearch.index.mapper.core.FloatFieldMapper; -import org.elasticsearch.index.mapper.core.IntegerFieldMapper; import org.elasticsearch.index.mapper.core.KeywordFieldMapper; -import org.elasticsearch.index.mapper.core.LongFieldMapper; -import org.elasticsearch.index.mapper.core.ShortFieldMapper; import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.index.mapper.core.TextFieldMapper; import org.elasticsearch.index.mapper.core.TokenCountFieldMapper; +import org.elasticsearch.index.mapper.core.LegacyTokenCountFieldMapper; +import org.elasticsearch.index.mapper.core.NumberFieldMapper; import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper; import org.elasticsearch.index.mapper.geo.GeoShapeFieldMapper; import org.elasticsearch.index.mapper.internal.AllFieldMapper; @@ -87,12 +83,9 @@ public IndicesModule() { } private void registerBuiltInMappers() { - registerMapper(ByteFieldMapper.CONTENT_TYPE, new ByteFieldMapper.TypeParser()); - registerMapper(ShortFieldMapper.CONTENT_TYPE, new ShortFieldMapper.TypeParser()); - registerMapper(IntegerFieldMapper.CONTENT_TYPE, new IntegerFieldMapper.TypeParser()); - registerMapper(LongFieldMapper.CONTENT_TYPE, new LongFieldMapper.TypeParser()); - registerMapper(FloatFieldMapper.CONTENT_TYPE, new FloatFieldMapper.TypeParser()); - registerMapper(DoubleFieldMapper.CONTENT_TYPE, new DoubleFieldMapper.TypeParser()); + for (NumberFieldMapper.NumberType type : NumberFieldMapper.NumberType.values()) { + registerMapper(type.typeName(), new NumberFieldMapper.TypeParser(type)); + } registerMapper(BooleanFieldMapper.CONTENT_TYPE, new BooleanFieldMapper.TypeParser()); registerMapper(BinaryFieldMapper.CONTENT_TYPE, new BinaryFieldMapper.TypeParser()); registerMapper(DateFieldMapper.CONTENT_TYPE, new DateFieldMapper.TypeParser()); diff --git a/core/src/main/java/org/elasticsearch/search/DocValueFormat.java b/core/src/main/java/org/elasticsearch/search/DocValueFormat.java index 3b9d3f678b95e..98cb388c16f9f 100644 --- a/core/src/main/java/org/elasticsearch/search/DocValueFormat.java +++ b/core/src/main/java/org/elasticsearch/search/DocValueFormat.java @@ -19,6 +19,7 @@ package org.elasticsearch.search; +import org.apache.lucene.document.InetAddressPoint; import org.apache.lucene.index.Term; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.geo.GeoHashUtils; @@ -28,14 +29,18 @@ import org.elasticsearch.common.joda.DateMathParser; import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.joda.Joda; +import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.index.mapper.ip.IpFieldMapper; +import org.elasticsearch.index.mapper.ip.LegacyIpFieldMapper; import org.joda.time.DateTimeZone; import java.io.IOException; +import java.net.InetAddress; import java.text.DecimalFormat; import java.text.DecimalFormatSymbols; import java.text.NumberFormat; import java.text.ParseException; +import java.util.Arrays; import java.util.Locale; import java.util.Objects; import java.util.concurrent.Callable; @@ -238,7 +243,7 @@ public void writeTo(StreamOutput out) throws IOException { @Override public String format(long value) { - return IpFieldMapper.longToIp(value); + return LegacyIpFieldMapper.longToIp(value); } @Override @@ -248,12 +253,15 @@ public String format(double value) { @Override public String format(BytesRef value) { - throw new UnsupportedOperationException(); + byte[] bytes = Arrays.copyOfRange(value.bytes, value.offset, value.offset + value.length); + InetAddress inet = InetAddressPoint.decode(bytes); + return NetworkAddress.format(inet); } @Override public long parseLong(String value, boolean roundUp, Callable now) { - return IpFieldMapper.ipToLong(value); + // TODO: throw exception in 6.0 + return LegacyIpFieldMapper.ipToLong(value); } @Override diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/support/ValueType.java b/core/src/main/java/org/elasticsearch/search/aggregations/support/ValueType.java index 13ed5e6435def..1d509d40e1da2 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/support/ValueType.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/support/ValueType.java @@ -64,7 +64,7 @@ public boolean isNumeric() { } }, DATE((byte) 5, "date", "date", ValuesSourceType.NUMERIC, IndexNumericFieldData.class, - new DocValueFormat.DateTime(DateFieldMapper.Defaults.DATE_TIME_FORMATTER, DateTimeZone.UTC)) { + new DocValueFormat.DateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, DateTimeZone.UTC)) { @Override public boolean isNumeric() { return true; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregatorBuilder.java index a84f782456de4..486fae0d52acf 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregatorBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregatorBuilder.java @@ -21,6 +21,7 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexGeoPointFieldData; @@ -441,4 +442,4 @@ protected final boolean doEquals(Object obj) { } protected abstract boolean innerEquals(Object obj); -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/fieldstats/FieldStatsTests.java b/core/src/test/java/org/elasticsearch/fieldstats/FieldStatsTests.java index bbef8dbcca749..b9c892587a046 100644 --- a/core/src/test/java/org/elasticsearch/fieldstats/FieldStatsTests.java +++ b/core/src/test/java/org/elasticsearch/fieldstats/FieldStatsTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.action.fieldstats.IndexConstraint; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.mapper.core.DateFieldMapper; +import org.elasticsearch.index.mapper.core.LegacyDateFieldMapper; import org.elasticsearch.test.ESSingleNodeTestCase; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; @@ -305,9 +306,9 @@ public void testNumberFiltering() { public void testDateFiltering() { DateTime dateTime1 = new DateTime(2014, 1, 1, 0, 0, 0, 0, DateTimeZone.UTC); - String dateTime1Str = DateFieldMapper.Defaults.DATE_TIME_FORMATTER.parser().print(dateTime1); + String dateTime1Str = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parser().print(dateTime1); DateTime dateTime2 = new DateTime(2014, 1, 2, 0, 0, 0, 0, DateTimeZone.UTC); - String dateTime2Str = DateFieldMapper.Defaults.DATE_TIME_FORMATTER.parser().print(dateTime2); + String dateTime2Str = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parser().print(dateTime2); createIndex("test1", Settings.EMPTY, "type", "value", "type=date"); client().prepareIndex("test1", "test").setSource("value", dateTime1Str).get(); diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java index 877dbc836251d..3849b78604c2f 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java @@ -43,12 +43,12 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper.BuilderContext; import org.elasticsearch.index.mapper.core.BinaryFieldMapper; -import org.elasticsearch.index.mapper.core.ByteFieldMapper; -import org.elasticsearch.index.mapper.core.DoubleFieldMapper; -import org.elasticsearch.index.mapper.core.FloatFieldMapper; -import org.elasticsearch.index.mapper.core.IntegerFieldMapper; -import org.elasticsearch.index.mapper.core.LongFieldMapper; -import org.elasticsearch.index.mapper.core.ShortFieldMapper; +import org.elasticsearch.index.mapper.core.LegacyByteFieldMapper; +import org.elasticsearch.index.mapper.core.LegacyDoubleFieldMapper; +import org.elasticsearch.index.mapper.core.LegacyFloatFieldMapper; +import org.elasticsearch.index.mapper.core.LegacyIntegerFieldMapper; +import org.elasticsearch.index.mapper.core.LegacyLongFieldMapper; +import org.elasticsearch.index.mapper.core.LegacyShortFieldMapper; import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper; @@ -106,17 +106,17 @@ public > IFD getForField(String type, String field if (type.equals("string")) { fieldType = new StringFieldMapper.Builder(fieldName).tokenized(false).fielddata(docValues == false).docValues(docValues).build(context).fieldType(); } else if (type.equals("float")) { - fieldType = new FloatFieldMapper.Builder(fieldName).docValues(docValues).build(context).fieldType(); + fieldType = new LegacyFloatFieldMapper.Builder(fieldName).docValues(docValues).build(context).fieldType(); } else if (type.equals("double")) { - fieldType = new DoubleFieldMapper.Builder(fieldName).docValues(docValues).build(context).fieldType(); + fieldType = new LegacyDoubleFieldMapper.Builder(fieldName).docValues(docValues).build(context).fieldType(); } else if (type.equals("long")) { - fieldType = new LongFieldMapper.Builder(fieldName).docValues(docValues).build(context).fieldType(); + fieldType = new LegacyLongFieldMapper.Builder(fieldName).docValues(docValues).build(context).fieldType(); } else if (type.equals("int")) { - fieldType = new IntegerFieldMapper.Builder(fieldName).docValues(docValues).build(context).fieldType(); + fieldType = new LegacyIntegerFieldMapper.Builder(fieldName).docValues(docValues).build(context).fieldType(); } else if (type.equals("short")) { - fieldType = new ShortFieldMapper.Builder(fieldName).docValues(docValues).build(context).fieldType(); + fieldType = new LegacyShortFieldMapper.Builder(fieldName).docValues(docValues).build(context).fieldType(); } else if (type.equals("byte")) { - fieldType = new ByteFieldMapper.Builder(fieldName).docValues(docValues).build(context).fieldType(); + fieldType = new LegacyByteFieldMapper.Builder(fieldName).docValues(docValues).build(context).fieldType(); } else if (type.equals("geo_point")) { if (indexService.getIndexSettings().getIndexVersionCreated().before(Version.V_2_2_0)) { fieldType = new GeoPointFieldMapperLegacy.Builder(fieldName).docValues(docValues).build(context).fieldType(); diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java index e891f9fab002e..00bd827da856a 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java @@ -39,13 +39,14 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper.BuilderContext; import org.elasticsearch.index.mapper.core.BooleanFieldMapper; -import org.elasticsearch.index.mapper.core.ByteFieldMapper; -import org.elasticsearch.index.mapper.core.DoubleFieldMapper; -import org.elasticsearch.index.mapper.core.FloatFieldMapper; -import org.elasticsearch.index.mapper.core.IntegerFieldMapper; +import org.elasticsearch.index.mapper.core.LegacyByteFieldMapper; +import org.elasticsearch.index.mapper.core.LegacyDoubleFieldMapper; +import org.elasticsearch.index.mapper.core.LegacyFloatFieldMapper; +import org.elasticsearch.index.mapper.core.LegacyIntegerFieldMapper; import org.elasticsearch.index.mapper.core.KeywordFieldMapper; -import org.elasticsearch.index.mapper.core.LongFieldMapper; -import org.elasticsearch.index.mapper.core.ShortFieldMapper; +import org.elasticsearch.index.mapper.core.LegacyLongFieldMapper; +import org.elasticsearch.index.mapper.core.LegacyShortFieldMapper; +import org.elasticsearch.index.mapper.core.NumberFieldMapper; import org.elasticsearch.index.mapper.core.TextFieldMapper; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesService; @@ -79,22 +80,24 @@ public void testGetForFieldDefaults() { assertTrue(fd instanceof SortedSetDVOrdinalsIndexFieldData); for (MappedFieldType mapper : Arrays.asList( - new ByteFieldMapper.Builder("int").build(ctx).fieldType(), - new ShortFieldMapper.Builder("int").build(ctx).fieldType(), - new IntegerFieldMapper.Builder("int").build(ctx).fieldType(), - new LongFieldMapper.Builder("long").build(ctx).fieldType() + new NumberFieldMapper.Builder("int", NumberFieldMapper.NumberType.BYTE).build(ctx).fieldType(), + new NumberFieldMapper.Builder("int", NumberFieldMapper.NumberType.SHORT).build(ctx).fieldType(), + new NumberFieldMapper.Builder("int", NumberFieldMapper.NumberType.INTEGER).build(ctx).fieldType(), + new NumberFieldMapper.Builder("long", NumberFieldMapper.NumberType.LONG).build(ctx).fieldType() )) { ifdService.clear(); fd = ifdService.getForField(mapper); assertTrue(fd instanceof SortedNumericDVIndexFieldData); } - final MappedFieldType floatMapper = new FloatFieldMapper.Builder("float").build(ctx).fieldType(); + final MappedFieldType floatMapper = new NumberFieldMapper.Builder("float", NumberFieldMapper.NumberType.FLOAT) + .build(ctx).fieldType(); ifdService.clear(); fd = ifdService.getForField(floatMapper); assertTrue(fd instanceof SortedNumericDVIndexFieldData); - final MappedFieldType doubleMapper = new DoubleFieldMapper.Builder("double").build(ctx).fieldType(); + final MappedFieldType doubleMapper = new NumberFieldMapper.Builder("double", NumberFieldMapper.NumberType.DOUBLE) + .build(ctx).fieldType(); ifdService.clear(); fd = ifdService.getForField(doubleMapper); assertTrue(fd instanceof SortedNumericDVIndexFieldData); @@ -194,11 +197,11 @@ private void doTestRequireDocValues(MappedFieldType ft) { } public void testRequireDocValuesOnLongs() { - doTestRequireDocValues(new LongFieldMapper.LongFieldType()); + doTestRequireDocValues(new LegacyLongFieldMapper.LongFieldType()); } public void testRequireDocValuesOnDoubles() { - doTestRequireDocValues(new DoubleFieldMapper.DoubleFieldType()); + doTestRequireDocValues(new LegacyDoubleFieldMapper.DoubleFieldType()); } public void testRequireDocValuesOnBools() { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java index 477e48ae5d20d..3b7a931ae625c 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java @@ -33,11 +33,8 @@ import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.core.DateFieldMapper; import org.elasticsearch.index.mapper.core.DateFieldMapper.DateFieldType; -import org.elasticsearch.index.mapper.core.DoubleFieldMapper; -import org.elasticsearch.index.mapper.core.FloatFieldMapper; -import org.elasticsearch.index.mapper.core.IntegerFieldMapper; -import org.elasticsearch.index.mapper.core.LongFieldMapper; -import org.elasticsearch.index.mapper.core.LongFieldMapper.LongFieldType; +import org.elasticsearch.index.mapper.core.NumberFieldMapper; +import org.elasticsearch.index.mapper.core.NumberFieldMapper.NumberFieldType; import org.elasticsearch.index.mapper.core.TextFieldMapper; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -45,6 +42,7 @@ import static java.util.Collections.emptyMap; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.nullValue; @@ -427,10 +425,10 @@ public void testComplexArray() throws Exception { public void testReuseExistingMappings() throws IOException, Exception { IndexService indexService = createIndex("test", Settings.EMPTY, "type", "my_field1", "type=text,store=true", - "my_field2", "type=integer,precision_step=10", + "my_field2", "type=integer,store=false", "my_field3", "type=long,doc_values=false", - "my_field4", "type=float,index_options=freqs", - "my_field5", "type=double,precision_step=14", + "my_field4", "type=float,index=false", + "my_field5", "type=double,store=true", "my_field6", "type=date,doc_values=false"); // Even if the dynamic type of our new field is long, we already have a mapping for the same field @@ -483,21 +481,22 @@ public void testReuseExistingMappings() throws IOException, Exception { // since we already have a mapping of type integer assertNotNull(myField2Mapper); // same type - assertTrue(myField2Mapper instanceof IntegerFieldMapper); + assertEquals("integer", ((FieldMapper) myField2Mapper).fieldType().typeName()); // and same option - assertEquals(10, ((IntegerFieldMapper) myField2Mapper).fieldType().numericPrecisionStep()); + assertFalse(((FieldMapper) myField2Mapper).fieldType().stored()); assertNotNull(myField3Mapper); - assertTrue(myField3Mapper instanceof LongFieldMapper); - assertFalse(((LongFieldType) ((LongFieldMapper) myField3Mapper).fieldType()).hasDocValues()); + assertTrue(myField3Mapper instanceof NumberFieldMapper); + assertFalse(((NumberFieldType) ((NumberFieldMapper) myField3Mapper).fieldType()).hasDocValues()); assertNotNull(myField4Mapper); - assertTrue(myField4Mapper instanceof FloatFieldMapper); - assertEquals(IndexOptions.DOCS_AND_FREQS, ((FieldMapper) myField4Mapper).fieldType().indexOptions()); + assertTrue(myField4Mapper instanceof NumberFieldMapper); + assertEquals(IndexOptions.NONE, ((FieldMapper) myField4Mapper).fieldType().indexOptions()); assertNotNull(myField5Mapper); - assertTrue(myField5Mapper instanceof DoubleFieldMapper); - assertEquals(14, ((DoubleFieldMapper) myField5Mapper).fieldType().numericPrecisionStep()); + + assertTrue(myField5Mapper instanceof NumberFieldMapper); + assertTrue(((NumberFieldMapper) myField5Mapper).fieldType().stored()); assertNotNull(myField6Mapper); assertTrue(myField6Mapper instanceof DateFieldMapper); @@ -584,9 +583,60 @@ private void doTestDefaultFloatingPointMappings(DocumentMapper mapper, XContentB ParsedDocument parsedDocument = mapper.parse("index", "type", "id", source); Mapping update = parsedDocument.dynamicMappingsUpdate(); assertNotNull(update); - assertThat(update.root().getMapper("foo"), instanceOf(FloatFieldMapper.class)); - assertThat(update.root().getMapper("bar"), instanceOf(FloatFieldMapper.class)); - assertThat(update.root().getMapper("baz"), instanceOf(FloatFieldMapper.class)); - assertThat(update.root().getMapper("quux"), instanceOf(FloatFieldMapper.class)); + assertThat(((FieldMapper) update.root().getMapper("foo")).fieldType().typeName(), equalTo("float")); + assertThat(((FieldMapper) update.root().getMapper("bar")).fieldType().typeName(), equalTo("float")); + assertThat(((FieldMapper) update.root().getMapper("baz")).fieldType().typeName(), equalTo("float")); + assertThat(((FieldMapper) update.root().getMapper("quux")).fieldType().typeName(), equalTo("float")); + } + + public void testNumericDetectionEnabled() throws Exception { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .field("numeric_detection", true) + .endObject().endObject().string(); + + IndexService index = createIndex("test"); + client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping).get(); + DocumentMapper defaultMapper = index.mapperService().documentMapper("type"); + + ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("s_long", "100") + .field("s_double", "100.0") + .endObject() + .bytes()); + assertNotNull(doc.dynamicMappingsUpdate()); + client().admin().indices().preparePutMapping("test").setType("type").setSource(doc.dynamicMappingsUpdate().toString()).get(); + + defaultMapper = index.mapperService().documentMapper("type"); + FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("s_long"); + assertThat(mapper.fieldType().typeName(), equalTo("long")); + + mapper = defaultMapper.mappers().smartNameFieldMapper("s_double"); + assertThat(mapper.fieldType().typeName(), equalTo("float")); + } + + public void testNumericDetectionDefault() throws Exception { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .endObject().endObject().string(); + + IndexService index = createIndex("test"); + client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping).get(); + DocumentMapper defaultMapper = index.mapperService().documentMapper("type"); + + ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("s_long", "100") + .field("s_double", "100.0") + .endObject() + .bytes()); + assertNotNull(doc.dynamicMappingsUpdate()); + assertAcked(client().admin().indices().preparePutMapping("test").setType("type").setSource(doc.dynamicMappingsUpdate().toString()).get()); + + defaultMapper = index.mapperService().documentMapper("type"); + FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("s_long"); + assertThat(mapper, instanceOf(TextFieldMapper.class)); + + mapper = defaultMapper.mappers().smartNameFieldMapper("s_double"); + assertThat(mapper, instanceOf(TextFieldMapper.class)); } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java b/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java index 3a5de78c496eb..d43d48729140f 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java @@ -29,7 +29,7 @@ import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.index.mapper.core.KeywordFieldMapper.KeywordFieldType; -import org.elasticsearch.index.mapper.core.LongFieldMapper.LongFieldType; +import org.elasticsearch.index.mapper.core.NumberFieldMapper.NumberFieldType; import org.elasticsearch.test.ESSingleNodeTestCase; import org.junit.Rule; import org.junit.rules.ExpectedException; @@ -188,7 +188,7 @@ public void testMappingDepthExceedsLimit() throws Throwable { public void testUnmappedFieldType() { MapperService mapperService = createIndex("index").mapperService(); assertThat(mapperService.unmappedFieldType("keyword"), instanceOf(KeywordFieldType.class)); - assertThat(mapperService.unmappedFieldType("long"), instanceOf(LongFieldType.class)); + assertThat(mapperService.unmappedFieldType("long"), instanceOf(NumberFieldType.class)); // back compat assertThat(mapperService.unmappedFieldType("string"), instanceOf(KeywordFieldType.class)); } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/boost/CustomBoostMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/boost/CustomBoostMappingTests.java index 490477d67e7c2..9974fd5b69554 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/boost/CustomBoostMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/boost/CustomBoostMappingTests.java @@ -176,19 +176,12 @@ public void testBackCompatFieldMappingBoostValues() throws Exception { assertThat(doc.rootDoc().getField("s_field").boost(), equalTo(1f)); assertThat(doc.rootDoc().getField("s_field").fieldType().omitNorms(), equalTo(true)); assertThat(doc.rootDoc().getField("l_field").boost(), equalTo(1f)); - assertThat(doc.rootDoc().getField("l_field").fieldType().omitNorms(), equalTo(true)); assertThat(doc.rootDoc().getField("i_field").boost(), equalTo(1f)); - assertThat(doc.rootDoc().getField("i_field").fieldType().omitNorms(), equalTo(true)); assertThat(doc.rootDoc().getField("sh_field").boost(), equalTo(1f)); - assertThat(doc.rootDoc().getField("sh_field").fieldType().omitNorms(), equalTo(true)); assertThat(doc.rootDoc().getField("b_field").boost(), equalTo(1f)); - assertThat(doc.rootDoc().getField("b_field").fieldType().omitNorms(), equalTo(true)); assertThat(doc.rootDoc().getField("d_field").boost(), equalTo(1f)); - assertThat(doc.rootDoc().getField("d_field").fieldType().omitNorms(), equalTo(true)); assertThat(doc.rootDoc().getField("f_field").boost(), equalTo(1f)); - assertThat(doc.rootDoc().getField("f_field").fieldType().omitNorms(), equalTo(true)); assertThat(doc.rootDoc().getField("date_field").boost(), equalTo(1f)); - assertThat(doc.rootDoc().getField("date_field").fieldType().omitNorms(), equalTo(true)); } } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/copyto/CopyToMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/copyto/CopyToMapperTests.java index 0e4f0a34af60e..d4434b974231c 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/copyto/CopyToMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/copyto/CopyToMapperTests.java @@ -36,7 +36,7 @@ import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParseContext.Document; import org.elasticsearch.index.mapper.ParsedDocument; -import org.elasticsearch.index.mapper.core.LongFieldMapper; +import org.elasticsearch.index.mapper.core.LegacyLongFieldMapper; import org.elasticsearch.index.mapper.core.TextFieldMapper; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -131,7 +131,7 @@ public void testCopyToFieldsParsing() throws Exception { docMapper = index.mapperService().documentMapper("type1"); fieldMapper = docMapper.mappers().getMapper("new_field"); - assertThat(fieldMapper, instanceOf(LongFieldMapper.class)); + assertThat(fieldMapper.fieldType().typeName(), equalTo("long")); } public void testCopyToFieldsInnerObjectParsing() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/DateFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/DateFieldMapperTests.java new file mode 100644 index 0000000000000..9f09e3e2e132d --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/DateFieldMapperTests.java @@ -0,0 +1,254 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.mapper.core; + +import org.apache.lucene.index.DocValuesType; +import org.apache.lucene.index.IndexableField; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.DocumentMapperParser; +import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.junit.Before; + +import java.io.IOException; + +import static org.hamcrest.Matchers.containsString; + +public class DateFieldMapperTests extends ESSingleNodeTestCase { + + IndexService indexService; + DocumentMapperParser parser; + + @Before + public void before() { + indexService = createIndex("test"); + parser = indexService.mapperService().documentMapperParser(); + } + + public void testDefaults() throws Exception { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", "date").endObject().endObject() + .endObject().endObject().string(); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + + assertEquals(mapping, mapper.mappingSource().toString()); + + ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("field", "2016-03-11") + .endObject() + .bytes()); + + IndexableField[] fields = doc.rootDoc().getFields("field"); + assertEquals(2, fields.length); + IndexableField pointField = fields[0]; + assertEquals(1, pointField.fieldType().pointDimensionCount()); + assertEquals(8, pointField.fieldType().pointNumBytes()); + assertFalse(pointField.fieldType().stored()); + assertEquals(1457654400000L, pointField.numericValue().longValue()); + IndexableField dvField = fields[1]; + assertEquals(DocValuesType.SORTED_NUMERIC, dvField.fieldType().docValuesType()); + assertEquals(1457654400000L, dvField.numericValue().longValue()); + assertFalse(dvField.fieldType().stored()); + } + + public void testNotIndexed() throws Exception { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", "date").field("index", false).endObject().endObject() + .endObject().endObject().string(); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + + assertEquals(mapping, mapper.mappingSource().toString()); + + ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("field", "2016-03-11") + .endObject() + .bytes()); + + IndexableField[] fields = doc.rootDoc().getFields("field"); + assertEquals(1, fields.length); + IndexableField dvField = fields[0]; + assertEquals(DocValuesType.SORTED_NUMERIC, dvField.fieldType().docValuesType()); + } + + public void testNoDocValues() throws Exception { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", "date").field("doc_values", false).endObject().endObject() + .endObject().endObject().string(); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + + assertEquals(mapping, mapper.mappingSource().toString()); + + ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("field", "2016-03-11") + .endObject() + .bytes()); + + IndexableField[] fields = doc.rootDoc().getFields("field"); + assertEquals(1, fields.length); + IndexableField pointField = fields[0]; + assertEquals(1, pointField.fieldType().pointDimensionCount()); + } + + public void testStore() throws Exception { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", "date").field("store", true).endObject().endObject() + .endObject().endObject().string(); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + + assertEquals(mapping, mapper.mappingSource().toString()); + + ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("field", "2016-03-11") + .endObject() + .bytes()); + + IndexableField[] fields = doc.rootDoc().getFields("field"); + assertEquals(3, fields.length); + IndexableField pointField = fields[0]; + assertEquals(1, pointField.fieldType().pointDimensionCount()); + IndexableField dvField = fields[1]; + assertEquals(DocValuesType.SORTED_NUMERIC, dvField.fieldType().docValuesType()); + IndexableField storedField = fields[2]; + assertTrue(storedField.fieldType().stored()); + assertEquals(1457654400000L, storedField.numericValue().longValue()); + } + + public void testIgnoreMalformed() throws Exception { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", "date").endObject().endObject() + .endObject().endObject().string(); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + + assertEquals(mapping, mapper.mappingSource().toString()); + + ThrowingRunnable runnable = () -> mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("field", "2016-03-99") + .endObject() + .bytes()); + MapperParsingException e = expectThrows(MapperParsingException.class, runnable); + assertThat(e.getCause().getMessage(), containsString("Cannot parse \"2016-03-99\"")); + + mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", "date") + .field("ignore_malformed", true).endObject().endObject() + .endObject().endObject().string(); + + DocumentMapper mapper2 = parser.parse("type", new CompressedXContent(mapping)); + + ParsedDocument doc = mapper2.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("field", ":1") + .endObject() + .bytes()); + + IndexableField[] fields = doc.rootDoc().getFields("field"); + assertEquals(0, fields.length); + } + + public void testIncludeInAll() throws Exception { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", "date").endObject().endObject() + .endObject().endObject().string(); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + + assertEquals(mapping, mapper.mappingSource().toString()); + + ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("field", "2016-03-11") + .endObject() + .bytes()); + + IndexableField[] fields = doc.rootDoc().getFields("_all"); + assertEquals(1, fields.length); + assertEquals("2016-03-11", fields[0].stringValue()); + + mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", "date") + .field("include_in_all", false).endObject().endObject() + .endObject().endObject().string(); + + mapper = parser.parse("type", new CompressedXContent(mapping)); + + assertEquals(mapping, mapper.mappingSource().toString()); + + doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("field", "2016-03-11") + .endObject() + .bytes()); + + fields = doc.rootDoc().getFields("_all"); + assertEquals(0, fields.length); + } + + public void testChangeFormat() throws IOException { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", "date") + .field("format", "epoch_second").endObject().endObject() + .endObject().endObject().string(); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + + assertEquals(mapping, mapper.mappingSource().toString()); + + ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("field", 1457654400) + .endObject() + .bytes()); + + IndexableField[] fields = doc.rootDoc().getFields("field"); + assertEquals(2, fields.length); + IndexableField pointField = fields[0]; + assertEquals(1457654400000L, pointField.numericValue().longValue()); + } + + public void testChangeLocale() throws IOException { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", "date").field("locale", "fr").endObject().endObject() + .endObject().endObject().string(); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + + assertEquals(mapping, mapper.mappingSource().toString()); + + mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("field", 1457654400) + .endObject() + .bytes()); + } +} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/DateFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/DateFieldTypeTests.java index d68c444ec4a53..64fe5781ecf9d 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/core/DateFieldTypeTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/DateFieldTypeTests.java @@ -18,8 +18,10 @@ */ package org.elasticsearch.index.mapper.core; -import org.apache.lucene.document.Field.Store; -import org.apache.lucene.document.LegacyLongField; +import java.io.IOException; +import java.util.Locale; + +import org.apache.lucene.document.LongPoint; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; @@ -37,10 +39,6 @@ import org.joda.time.DateTimeZone; import org.junit.Before; -import java.io.IOException; -import java.util.Locale; -import java.util.concurrent.TimeUnit; - public class DateFieldTypeTests extends FieldTypeTestCase { @Override protected MappedFieldType createDefaultFieldType() { @@ -53,19 +51,13 @@ public void setupProperties() { addModifier(new Modifier("format", true) { @Override public void modify(MappedFieldType ft) { - ((DateFieldMapper.DateFieldType) ft).setDateTimeFormatter(Joda.forPattern("basic_week_date", Locale.ROOT)); + ((DateFieldType) ft).setDateTimeFormatter(Joda.forPattern("basic_week_date", Locale.ROOT)); } }); addModifier(new Modifier("locale", true) { @Override public void modify(MappedFieldType ft) { - ((DateFieldMapper.DateFieldType) ft).setDateTimeFormatter(Joda.forPattern("date_optional_time", Locale.CANADA)); - } - }); - addModifier(new Modifier("numeric_resolution", true) { - @Override - public void modify(MappedFieldType ft) { - ((DateFieldMapper.DateFieldType)ft).setTimeUnit(TimeUnit.HOURS); + ((DateFieldType) ft).setDateTimeFormatter(Joda.forPattern("date_optional_time", Locale.CANADA)); } }); } @@ -105,10 +97,10 @@ private void doTestIsFieldWithinQuery(DateFieldType ft, DirectoryReader reader, public void testIsFieldWithinQuery() throws IOException { Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(null)); - long instant1 = DateFieldMapper.Defaults.DATE_TIME_FORMATTER.parser().parseDateTime("2015-10-12").getMillis(); - long instant2 = DateFieldMapper.Defaults.DATE_TIME_FORMATTER.parser().parseDateTime("2016-04-03").getMillis(); + long instant1 = LegacyDateFieldMapper.Defaults.DATE_TIME_FORMATTER.parser().parseDateTime("2015-10-12").getMillis(); + long instant2 = LegacyDateFieldMapper.Defaults.DATE_TIME_FORMATTER.parser().parseDateTime("2016-04-03").getMillis(); Document doc = new Document(); - LegacyLongField field = new LegacyLongField("my_date", instant1, Store.NO); + LongPoint field = new LongPoint("my_date", instant1); doc.add(field); w.addDocument(doc); field.setLongValue(instant2); @@ -116,7 +108,7 @@ public void testIsFieldWithinQuery() throws IOException { DirectoryReader reader = DirectoryReader.open(w); DateFieldType ft = new DateFieldType(); ft.setName("my_date"); - DateMathParser alternateFormat = new DateMathParser(DateFieldMapper.Defaults.DATE_TIME_FORMATTER); + DateMathParser alternateFormat = new DateMathParser(LegacyDateFieldMapper.Defaults.DATE_TIME_FORMATTER); doTestIsFieldWithinQuery(ft, reader, null, null); doTestIsFieldWithinQuery(ft, reader, null, alternateFormat); doTestIsFieldWithinQuery(ft, reader, DateTimeZone.UTC, null); @@ -126,7 +118,7 @@ public void testIsFieldWithinQuery() throws IOException { public void testValueFormat() { MappedFieldType ft = createDefaultFieldType(); - long instant = DateFieldMapper.Defaults.DATE_TIME_FORMATTER.parser().parseDateTime("2015-10-12T14:10:55").getMillis(); + long instant = LegacyDateFieldMapper.Defaults.DATE_TIME_FORMATTER.parser().parseDateTime("2015-10-12T14:10:55").getMillis(); assertEquals("2015-10-12T14:10:55.000Z", ft.docValueFormat(null, DateTimeZone.UTC).format(instant)); assertEquals("2015-10-12T15:10:55.000+01:00", @@ -137,14 +129,14 @@ public void testValueFormat() { ft.docValueFormat(null, DateTimeZone.UTC).parseLong("2015-10-12T14:10:55", false, null)); assertEquals(instant, ft.docValueFormat(null, DateTimeZone.UTC).parseLong("2015-10-12T14:10:55", true, null)); - assertEquals(DateFieldMapper.Defaults.DATE_TIME_FORMATTER.parser().parseDateTime("2015-10-13").getMillis() - 1, + assertEquals(LegacyDateFieldMapper.Defaults.DATE_TIME_FORMATTER.parser().parseDateTime("2015-10-13").getMillis() - 1, ft.docValueFormat(null, DateTimeZone.UTC).parseLong("2015-10-12||/d", true, null)); } public void testValueForSearch() { MappedFieldType ft = createDefaultFieldType(); String date = "2015-10-12T12:09:55.000Z"; - long instant = DateFieldMapper.Defaults.DATE_TIME_FORMATTER.parser().parseDateTime(date).getMillis(); + long instant = LegacyDateFieldMapper.Defaults.DATE_TIME_FORMATTER.parser().parseDateTime(date).getMillis(); assertEquals(date, ft.valueForSearch(instant)); } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/ByteFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/LegacyByteFieldTypeTests.java similarity index 91% rename from core/src/test/java/org/elasticsearch/index/mapper/core/ByteFieldTypeTests.java rename to core/src/test/java/org/elasticsearch/index/mapper/core/LegacyByteFieldTypeTests.java index 02f78e991de9d..720fc5b435729 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/core/ByteFieldTypeTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/LegacyByteFieldTypeTests.java @@ -22,10 +22,10 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.junit.Before; -public class ByteFieldTypeTests extends FieldTypeTestCase { +public class LegacyByteFieldTypeTests extends FieldTypeTestCase { @Override protected MappedFieldType createDefaultFieldType() { - return new ByteFieldMapper.ByteFieldType(); + return new LegacyByteFieldMapper.ByteFieldType(); } @Before diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/LegacyDateFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/LegacyDateFieldTypeTests.java new file mode 100644 index 0000000000000..6c47f40c0c075 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/LegacyDateFieldTypeTests.java @@ -0,0 +1,150 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.index.mapper.core; + +import org.apache.lucene.document.Field.Store; +import org.apache.lucene.document.LegacyLongField; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.MultiReader; +import org.apache.lucene.store.Directory; +import org.apache.lucene.util.IOUtils; +import org.elasticsearch.common.joda.DateMathParser; +import org.elasticsearch.common.joda.Joda; +import org.elasticsearch.index.mapper.FieldTypeTestCase; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedFieldType.Relation; +import org.elasticsearch.index.mapper.ParseContext.Document; +import org.elasticsearch.index.mapper.core.LegacyDateFieldMapper.DateFieldType; +import org.joda.time.DateTimeZone; +import org.junit.Before; + +import java.io.IOException; +import java.util.Locale; +import java.util.concurrent.TimeUnit; + +public class LegacyDateFieldTypeTests extends FieldTypeTestCase { + @Override + protected MappedFieldType createDefaultFieldType() { + return new LegacyDateFieldMapper.DateFieldType(); + } + + @Before + public void setupProperties() { + setDummyNullValue(10); + addModifier(new Modifier("format", true) { + @Override + public void modify(MappedFieldType ft) { + ((LegacyDateFieldMapper.DateFieldType) ft).setDateTimeFormatter(Joda.forPattern("basic_week_date", Locale.ROOT)); + } + }); + addModifier(new Modifier("locale", true) { + @Override + public void modify(MappedFieldType ft) { + ((LegacyDateFieldMapper.DateFieldType) ft).setDateTimeFormatter(Joda.forPattern("date_optional_time", Locale.CANADA)); + } + }); + addModifier(new Modifier("numeric_resolution", true) { + @Override + public void modify(MappedFieldType ft) { + ((LegacyDateFieldMapper.DateFieldType)ft).setTimeUnit(TimeUnit.HOURS); + } + }); + } + + public void testIsFieldWithinQueryEmptyReader() throws IOException { + IndexReader reader = new MultiReader(); + DateFieldType ft = new DateFieldType(); + ft.setName("my_date"); + assertEquals(Relation.DISJOINT, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03", + randomBoolean(), randomBoolean(), null, null)); + } + + private void doTestIsFieldWithinQuery(DateFieldType ft, DirectoryReader reader, + DateTimeZone zone, DateMathParser alternateFormat) throws IOException { + assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-09", "2016-01-02", + randomBoolean(), randomBoolean(), null, null)); + assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2016-01-02", "2016-06-20", + randomBoolean(), randomBoolean(), null, null)); + assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2016-01-02", "2016-02-12", + randomBoolean(), randomBoolean(), null, null)); + assertEquals(Relation.DISJOINT, ft.isFieldWithinQuery(reader, "2014-01-02", "2015-02-12", + randomBoolean(), randomBoolean(), null, null)); + assertEquals(Relation.DISJOINT, ft.isFieldWithinQuery(reader, "2016-05-11", "2016-08-30", + randomBoolean(), randomBoolean(), null, null)); + assertEquals(Relation.WITHIN, ft.isFieldWithinQuery(reader, "2015-09-25", "2016-05-29", + randomBoolean(), randomBoolean(), null, null)); + assertEquals(Relation.WITHIN, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03", + true, true, null, null)); + assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03", + false, false, null, null)); + assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03", + false, true, null, null)); + assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03", + true, false, null, null)); + } + + public void testIsFieldWithinQuery() throws IOException { + Directory dir = newDirectory(); + IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(null)); + long instant1 = LegacyDateFieldMapper.Defaults.DATE_TIME_FORMATTER.parser().parseDateTime("2015-10-12").getMillis(); + long instant2 = LegacyDateFieldMapper.Defaults.DATE_TIME_FORMATTER.parser().parseDateTime("2016-04-03").getMillis(); + Document doc = new Document(); + LegacyLongField field = new LegacyLongField("my_date", instant1, Store.NO); + doc.add(field); + w.addDocument(doc); + field.setLongValue(instant2); + w.addDocument(doc); + DirectoryReader reader = DirectoryReader.open(w); + DateFieldType ft = new DateFieldType(); + ft.setName("my_date"); + DateMathParser alternateFormat = new DateMathParser(LegacyDateFieldMapper.Defaults.DATE_TIME_FORMATTER); + doTestIsFieldWithinQuery(ft, reader, null, null); + doTestIsFieldWithinQuery(ft, reader, null, alternateFormat); + doTestIsFieldWithinQuery(ft, reader, DateTimeZone.UTC, null); + doTestIsFieldWithinQuery(ft, reader, DateTimeZone.UTC, alternateFormat); + IOUtils.close(reader, w, dir); + } + + public void testValueFormat() { + MappedFieldType ft = createDefaultFieldType(); + long instant = LegacyDateFieldMapper.Defaults.DATE_TIME_FORMATTER.parser().parseDateTime("2015-10-12T14:10:55").getMillis(); + assertEquals("2015-10-12T14:10:55.000Z", + ft.docValueFormat(null, DateTimeZone.UTC).format(instant)); + assertEquals("2015-10-12T15:10:55.000+01:00", + ft.docValueFormat(null, DateTimeZone.forOffsetHours(1)).format(instant)); + assertEquals("2015", + createDefaultFieldType().docValueFormat("YYYY", DateTimeZone.UTC).format(instant)); + assertEquals(instant, + ft.docValueFormat(null, DateTimeZone.UTC).parseLong("2015-10-12T14:10:55", false, null)); + assertEquals(instant, + ft.docValueFormat(null, DateTimeZone.UTC).parseLong("2015-10-12T14:10:55", true, null)); + assertEquals(LegacyDateFieldMapper.Defaults.DATE_TIME_FORMATTER.parser().parseDateTime("2015-10-13").getMillis() - 1, + ft.docValueFormat(null, DateTimeZone.UTC).parseLong("2015-10-12||/d", true, null)); + } + + public void testValueForSearch() { + MappedFieldType ft = createDefaultFieldType(); + String date = "2015-10-12T12:09:55.000Z"; + long instant = LegacyDateFieldMapper.Defaults.DATE_TIME_FORMATTER.parser().parseDateTime(date).getMillis(); + assertEquals(date, ft.valueForSearch(instant)); + } +} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/DoubleFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/LegacyDoubleFieldTypeTests.java similarity index 89% rename from core/src/test/java/org/elasticsearch/index/mapper/core/DoubleFieldTypeTests.java rename to core/src/test/java/org/elasticsearch/index/mapper/core/LegacyDoubleFieldTypeTests.java index 5d6543a346d7c..65660776d29c2 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/core/DoubleFieldTypeTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/LegacyDoubleFieldTypeTests.java @@ -21,15 +21,15 @@ import org.elasticsearch.index.mapper.FieldTypeTestCase; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType.Relation; -import org.elasticsearch.index.mapper.core.DoubleFieldMapper.DoubleFieldType; +import org.elasticsearch.index.mapper.core.LegacyDoubleFieldMapper.DoubleFieldType; import org.junit.Before; import java.io.IOException; -public class DoubleFieldTypeTests extends FieldTypeTestCase { +public class LegacyDoubleFieldTypeTests extends FieldTypeTestCase { @Override protected MappedFieldType createDefaultFieldType() { - return new DoubleFieldMapper.DoubleFieldType(); + return new LegacyDoubleFieldMapper.DoubleFieldType(); } @Before diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/FloatFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/LegacyFloatFieldTypeTests.java similarity index 89% rename from core/src/test/java/org/elasticsearch/index/mapper/core/FloatFieldTypeTests.java rename to core/src/test/java/org/elasticsearch/index/mapper/core/LegacyFloatFieldTypeTests.java index 9a265b373f91a..d75f958345833 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/core/FloatFieldTypeTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/LegacyFloatFieldTypeTests.java @@ -21,15 +21,15 @@ import org.elasticsearch.index.mapper.FieldTypeTestCase; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType.Relation; -import org.elasticsearch.index.mapper.core.FloatFieldMapper.FloatFieldType; +import org.elasticsearch.index.mapper.core.LegacyFloatFieldMapper.FloatFieldType; import org.junit.Before; import java.io.IOException; -public class FloatFieldTypeTests extends FieldTypeTestCase { +public class LegacyFloatFieldTypeTests extends FieldTypeTestCase { @Override protected MappedFieldType createDefaultFieldType() { - return new FloatFieldMapper.FloatFieldType(); + return new LegacyFloatFieldMapper.FloatFieldType(); } @Before diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/IntegerFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/LegacyIntegerFieldTypeTests.java similarity index 88% rename from core/src/test/java/org/elasticsearch/index/mapper/core/IntegerFieldTypeTests.java rename to core/src/test/java/org/elasticsearch/index/mapper/core/LegacyIntegerFieldTypeTests.java index a8527f9c78b32..9f3fb41e47605 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/core/IntegerFieldTypeTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/LegacyIntegerFieldTypeTests.java @@ -21,15 +21,15 @@ import org.elasticsearch.index.mapper.FieldTypeTestCase; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType.Relation; -import org.elasticsearch.index.mapper.core.IntegerFieldMapper.IntegerFieldType; +import org.elasticsearch.index.mapper.core.LegacyIntegerFieldMapper.IntegerFieldType; import org.junit.Before; import java.io.IOException; -public class IntegerFieldTypeTests extends FieldTypeTestCase { +public class LegacyIntegerFieldTypeTests extends FieldTypeTestCase { @Override protected MappedFieldType createDefaultFieldType() { - return new IntegerFieldMapper.IntegerFieldType(); + return new LegacyIntegerFieldMapper.IntegerFieldType(); } @Before diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/LongFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/LegacyLongFieldTypeTests.java similarity index 89% rename from core/src/test/java/org/elasticsearch/index/mapper/core/LongFieldTypeTests.java rename to core/src/test/java/org/elasticsearch/index/mapper/core/LegacyLongFieldTypeTests.java index 765afcc549217..a52c72a5cc3ba 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/core/LongFieldTypeTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/LegacyLongFieldTypeTests.java @@ -21,15 +21,15 @@ import org.elasticsearch.index.mapper.FieldTypeTestCase; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType.Relation; -import org.elasticsearch.index.mapper.core.LongFieldMapper.LongFieldType; +import org.elasticsearch.index.mapper.core.LegacyLongFieldMapper.LongFieldType; import org.junit.Before; import java.io.IOException; -public class LongFieldTypeTests extends FieldTypeTestCase { +public class LegacyLongFieldTypeTests extends FieldTypeTestCase { @Override protected MappedFieldType createDefaultFieldType() { - return new LongFieldMapper.LongFieldType(); + return new LegacyLongFieldMapper.LongFieldType(); } @Before diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/ShortFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/LegacyShortFieldTypeTests.java similarity index 91% rename from core/src/test/java/org/elasticsearch/index/mapper/core/ShortFieldTypeTests.java rename to core/src/test/java/org/elasticsearch/index/mapper/core/LegacyShortFieldTypeTests.java index 5a93add577627..7501eb5cf1f5a 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/core/ShortFieldTypeTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/LegacyShortFieldTypeTests.java @@ -22,10 +22,10 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.junit.Before; -public class ShortFieldTypeTests extends FieldTypeTestCase { +public class LegacyShortFieldTypeTests extends FieldTypeTestCase { @Override protected MappedFieldType createDefaultFieldType() { - return new ShortFieldMapper.ShortFieldType(); + return new LegacyShortFieldMapper.ShortFieldType(); } @Before diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/LegacyTokenCountFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/LegacyTokenCountFieldMapperTests.java new file mode 100644 index 0000000000000..ea5bdab132ac0 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/LegacyTokenCountFieldMapperTests.java @@ -0,0 +1,109 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.mapper.core; + +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.CannedTokenStream; +import org.apache.lucene.analysis.MockTokenizer; +import org.apache.lucene.analysis.Token; +import org.apache.lucene.analysis.TokenStream; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.DocumentMapperParser; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; + +import static org.hamcrest.Matchers.equalTo; + +/** + * Test for {@link LegacyTokenCountFieldMapper}. + */ +public class LegacyTokenCountFieldMapperTests extends ESSingleNodeTestCase { + + private static final Settings BW_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0).build(); + + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); + } + + public void testMerge() throws IOException { + String stage1Mapping = XContentFactory.jsonBuilder().startObject() + .startObject("person") + .startObject("properties") + .startObject("tc") + .field("type", "token_count") + .field("analyzer", "keyword") + .endObject() + .endObject() + .endObject().endObject().string(); + MapperService mapperService = createIndex("test", BW_SETTINGS).mapperService(); + DocumentMapper stage1 = mapperService.merge("person", new CompressedXContent(stage1Mapping), MapperService.MergeReason.MAPPING_UPDATE, false); + + String stage2Mapping = XContentFactory.jsonBuilder().startObject() + .startObject("person") + .startObject("properties") + .startObject("tc") + .field("type", "token_count") + .field("analyzer", "standard") + .endObject() + .endObject() + .endObject().endObject().string(); + DocumentMapper stage2 = mapperService.merge("person", new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE, false); + + // previous mapper has not been modified + assertThat(((LegacyTokenCountFieldMapper) stage1.mappers().smartNameFieldMapper("tc")).analyzer(), equalTo("keyword")); + // but the new one has the change + assertThat(((LegacyTokenCountFieldMapper) stage2.mappers().smartNameFieldMapper("tc")).analyzer(), equalTo("standard")); + } + + public void testCountPositions() throws IOException { + // We're looking to make sure that we: + Token t1 = new Token(); // Don't count tokens without an increment + t1.setPositionIncrement(0); + Token t2 = new Token(); + t2.setPositionIncrement(1); // Count normal tokens with one increment + Token t3 = new Token(); + t2.setPositionIncrement(2); // Count funny tokens with more than one increment + int finalTokenIncrement = 4; // Count the final token increment on the rare token streams that have them + Token[] tokens = new Token[] {t1, t2, t3}; + Collections.shuffle(Arrays.asList(tokens), random()); + final TokenStream tokenStream = new CannedTokenStream(finalTokenIncrement, 0, tokens); + // TODO: we have no CannedAnalyzer? + Analyzer analyzer = new Analyzer() { + @Override + public TokenStreamComponents createComponents(String fieldName) { + return new TokenStreamComponents(new MockTokenizer(), tokenStream); + } + }; + assertThat(LegacyTokenCountFieldMapper.countPositions(analyzer, "", ""), equalTo(7)); + } +} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/NumberFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/NumberFieldMapperTests.java new file mode 100644 index 0000000000000..0f0f5a3321352 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/NumberFieldMapperTests.java @@ -0,0 +1,319 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.mapper.core; + +import org.apache.lucene.index.DocValuesType; +import org.apache.lucene.index.IndexableField; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.DocumentMapperParser; +import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.junit.Before; + +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +import static org.hamcrest.Matchers.containsString; + +public class NumberFieldMapperTests extends ESSingleNodeTestCase { + + private static final Set TYPES = new HashSet<>(Arrays.asList("byte", "short", "integer", "long", "float", "double")); + + IndexService indexService; + DocumentMapperParser parser; + + @Before + public void before() { + indexService = createIndex("test"); + parser = indexService.mapperService().documentMapperParser(); + } + + public void testDefaults() throws Exception { + for (String type : TYPES) { + doTestDefaults(type); + } + } + + public void doTestDefaults(String type) throws Exception { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", type).endObject().endObject() + .endObject().endObject().string(); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + + assertEquals(mapping, mapper.mappingSource().toString()); + + ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("field", 123) + .endObject() + .bytes()); + + IndexableField[] fields = doc.rootDoc().getFields("field"); + assertEquals(2, fields.length); + IndexableField pointField = fields[0]; + assertEquals(1, pointField.fieldType().pointDimensionCount()); + assertFalse(pointField.fieldType().stored()); + assertEquals(123, pointField.numericValue().doubleValue(), 0d); + IndexableField dvField = fields[1]; + assertEquals(DocValuesType.SORTED_NUMERIC, dvField.fieldType().docValuesType()); + assertFalse(dvField.fieldType().stored()); + } + + public void testNotIndexed() throws Exception { + for (String type : TYPES) { + doTestNotIndexed(type); + } + } + + public void doTestNotIndexed(String type) throws Exception { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", type).field("index", false).endObject().endObject() + .endObject().endObject().string(); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + + assertEquals(mapping, mapper.mappingSource().toString()); + + ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("field", 123) + .endObject() + .bytes()); + + IndexableField[] fields = doc.rootDoc().getFields("field"); + assertEquals(1, fields.length); + IndexableField dvField = fields[0]; + assertEquals(DocValuesType.SORTED_NUMERIC, dvField.fieldType().docValuesType()); + } + + public void testNoDocValues() throws Exception { + for (String type : TYPES) { + doTestNotIndexed(type); + } + } + + public void doTestNoDocValues(String type) throws Exception { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", type).field("doc_values", false).endObject().endObject() + .endObject().endObject().string(); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + + assertEquals(mapping, mapper.mappingSource().toString()); + + ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("field", 123) + .endObject() + .bytes()); + + IndexableField[] fields = doc.rootDoc().getFields("field"); + assertEquals(1, fields.length); + IndexableField pointField = fields[0]; + assertEquals(1, pointField.fieldType().pointDimensionCount()); + assertEquals(123, pointField.numericValue().doubleValue(), 0d); + } + + public void testStore() throws Exception { + for (String type : TYPES) { + doTestStore(type); + } + } + + public void doTestStore(String type) throws Exception { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", type).field("store", true).endObject().endObject() + .endObject().endObject().string(); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + + assertEquals(mapping, mapper.mappingSource().toString()); + + ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("field", 123) + .endObject() + .bytes()); + + IndexableField[] fields = doc.rootDoc().getFields("field"); + assertEquals(3, fields.length); + IndexableField pointField = fields[0]; + assertEquals(1, pointField.fieldType().pointDimensionCount()); + assertEquals(123, pointField.numericValue().doubleValue(), 0d); + IndexableField dvField = fields[1]; + assertEquals(DocValuesType.SORTED_NUMERIC, dvField.fieldType().docValuesType()); + IndexableField storedField = fields[2]; + assertTrue(storedField.fieldType().stored()); + assertEquals(123, storedField.numericValue().doubleValue(), 0d); + } + + public void testCoerce() throws Exception { + for (String type : TYPES) { + doTestCoerce(type); + } + } + + public void doTestCoerce(String type) throws IOException { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", type).endObject().endObject() + .endObject().endObject().string(); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + + assertEquals(mapping, mapper.mappingSource().toString()); + + ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("field", "123") + .endObject() + .bytes()); + + IndexableField[] fields = doc.rootDoc().getFields("field"); + assertEquals(2, fields.length); + IndexableField pointField = fields[0]; + assertEquals(1, pointField.fieldType().pointDimensionCount()); + assertEquals(123, pointField.numericValue().doubleValue(), 0d); + IndexableField dvField = fields[1]; + assertEquals(DocValuesType.SORTED_NUMERIC, dvField.fieldType().docValuesType()); + + mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", type).field("coerce", false).endObject().endObject() + .endObject().endObject().string(); + + DocumentMapper mapper2 = parser.parse("type", new CompressedXContent(mapping)); + + assertEquals(mapping, mapper2.mappingSource().toString()); + + ThrowingRunnable runnable = () -> mapper2.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("field", "123") + .endObject() + .bytes()); + MapperParsingException e = expectThrows(MapperParsingException.class, runnable); + assertThat(e.getCause().getMessage(), containsString("passed as String")); + } + + public void testIgnoreMalformed() throws Exception { + for (String type : TYPES) { + doTestIgnoreMalformed(type); + } + } + + public void doTestIgnoreMalformed(String type) throws IOException { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", type).endObject().endObject() + .endObject().endObject().string(); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + + assertEquals(mapping, mapper.mappingSource().toString()); + + ThrowingRunnable runnable = () -> mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("field", "a") + .endObject() + .bytes()); + MapperParsingException e = expectThrows(MapperParsingException.class, runnable); + assertThat(e.getCause().getMessage(), containsString("For input string: \"a\"")); + + mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", type).field("ignore_malformed", true).endObject().endObject() + .endObject().endObject().string(); + + DocumentMapper mapper2 = parser.parse("type", new CompressedXContent(mapping)); + + ParsedDocument doc = mapper2.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("field", "a") + .endObject() + .bytes()); + + IndexableField[] fields = doc.rootDoc().getFields("field"); + assertEquals(0, fields.length); + } + + public void testIncludeInAll() throws Exception { + for (String type : TYPES) { + doTestIncludeInAll(type); + } + } + + public void doTestIncludeInAll(String type) throws Exception { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", type).endObject().endObject() + .endObject().endObject().string(); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + + assertEquals(mapping, mapper.mappingSource().toString()); + + ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("field", 123) + .endObject() + .bytes()); + + IndexableField[] fields = doc.rootDoc().getFields("_all"); + assertEquals(1, fields.length); + assertEquals("123", fields[0].stringValue()); + + mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", type) + .field("include_in_all", false).endObject().endObject() + .endObject().endObject().string(); + + mapper = parser.parse("type", new CompressedXContent(mapping)); + + assertEquals(mapping, mapper.mappingSource().toString()); + + doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("field", 123) + .endObject() + .bytes()); + + fields = doc.rootDoc().getFields("_all"); + assertEquals(0, fields.length); + } + + public void testRejectNorms() throws IOException { + // not supported as of 5.0 + for (String type : Arrays.asList("byte", "short", "integer", "long", "float", "double")) { + DocumentMapperParser parser = createIndex("index-" + type).mapperService().documentMapperParser(); + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties") + .startObject("foo") + .field("type", type) + .field("norms", random().nextBoolean()) + .endObject() + .endObject().endObject().endObject().string(); + MapperParsingException e = expectThrows(MapperParsingException.class, + () -> parser.parse("type", new CompressedXContent(mapping))); + assertThat(e.getMessage(), containsString("Mapping definition for [foo] has unsupported parameters: [norms")); + } + } +} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/NumberFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/NumberFieldTypeTests.java new file mode 100644 index 0000000000000..4bb73d857ae47 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/NumberFieldTypeTests.java @@ -0,0 +1,52 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.mapper.core; + +import com.carrotsearch.randomizedtesting.generators.RandomPicks; + +import org.elasticsearch.index.mapper.FieldTypeTestCase; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedFieldType.Relation; +import org.elasticsearch.index.mapper.core.NumberFieldMapper.NumberType; +import org.junit.Before; + +import java.io.IOException; + +public class NumberFieldTypeTests extends FieldTypeTestCase { + + NumberType type; + + @Before + public void pickType() { + type = RandomPicks.randomFrom(random(), NumberFieldMapper.NumberType.values()); + } + + @Override + protected MappedFieldType createDefaultFieldType() { + return new NumberFieldMapper.NumberFieldType(type); + } + + public void testIsFieldWithinQuery() throws IOException { + MappedFieldType ft = createDefaultFieldType(); + // current impl ignores args and should always return INTERSECTS + assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(null, randomDouble(), randomDouble(), + randomBoolean(), randomBoolean(), null, null)); + } +} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperTests.java index e8fe39e9bbb9b..0a27a7ebb319b 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperTests.java @@ -27,7 +27,6 @@ import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -52,7 +51,8 @@ public void testMerge() throws IOException { .endObject() .endObject().endObject().string(); MapperService mapperService = createIndex("test").mapperService(); - DocumentMapper stage1 = mapperService.merge("person", new CompressedXContent(stage1Mapping), MapperService.MergeReason.MAPPING_UPDATE, false); + DocumentMapper stage1 = mapperService.merge("person", + new CompressedXContent(stage1Mapping), MapperService.MergeReason.MAPPING_UPDATE, false); String stage2Mapping = XContentFactory.jsonBuilder().startObject() .startObject("person") @@ -63,7 +63,8 @@ public void testMerge() throws IOException { .endObject() .endObject() .endObject().endObject().string(); - DocumentMapper stage2 = mapperService.merge("person", new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE, false); + DocumentMapper stage2 = mapperService.merge("person", + new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE, false); // previous mapper has not been modified assertThat(((TokenCountFieldMapper) stage1.mappers().smartNameFieldMapper("tc")).analyzer(), equalTo("keyword")); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/date/LegacyDateMappingTests.java similarity index 91% rename from core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java rename to core/src/test/java/org/elasticsearch/index/mapper/date/LegacyDateMappingTests.java index 6eea0e3be3a96..7172c42b32f89 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/date/LegacyDateMappingTests.java @@ -45,11 +45,13 @@ import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParseContext.Document; import org.elasticsearch.index.mapper.ParsedDocument; -import org.elasticsearch.index.mapper.core.DateFieldMapper; -import org.elasticsearch.index.mapper.core.LongFieldMapper; +import org.elasticsearch.index.mapper.core.LegacyDateFieldMapper; +import org.elasticsearch.index.mapper.core.LegacyLongFieldMapper; import org.elasticsearch.index.mapper.core.TextFieldMapper; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.TestSearchContext; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; @@ -57,6 +59,7 @@ import java.io.IOException; import java.util.ArrayList; +import java.util.Collection; import java.util.List; import java.util.Locale; import java.util.Map; @@ -70,14 +73,21 @@ import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; -public class SimpleDateMappingTests extends ESSingleNodeTestCase { +public class LegacyDateMappingTests extends ESSingleNodeTestCase { + + private static final Settings BW_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0).build(); + + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); + } public void testAutomaticDateParser() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").endObject() .endObject().endObject().string(); - IndexService index = createIndex("test"); + IndexService index = createIndex("test", BW_SETTINGS); client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping).get(); DocumentMapper defaultMapper = index.mapperService().documentMapper("type"); @@ -95,12 +105,12 @@ public void testAutomaticDateParser() throws Exception { defaultMapper = index.mapperService().documentMapper("type"); FieldMapper fieldMapper = defaultMapper.mappers().smartNameFieldMapper("date_field1"); - assertThat(fieldMapper, instanceOf(DateFieldMapper.class)); - DateFieldMapper dateFieldMapper = (DateFieldMapper)fieldMapper; + assertThat(fieldMapper, instanceOf(LegacyDateFieldMapper.class)); + LegacyDateFieldMapper dateFieldMapper = (LegacyDateFieldMapper)fieldMapper; assertEquals("yyyy/MM/dd HH:mm:ss||yyyy/MM/dd||epoch_millis", dateFieldMapper.fieldType().dateTimeFormatter().format()); assertEquals(1265587200000L, dateFieldMapper.fieldType().dateTimeFormatter().parser().parseMillis("1265587200000")); fieldMapper = defaultMapper.mappers().smartNameFieldMapper("date_field2"); - assertThat(fieldMapper, instanceOf(DateFieldMapper.class)); + assertThat(fieldMapper, instanceOf(LegacyDateFieldMapper.class)); fieldMapper = defaultMapper.mappers().smartNameFieldMapper("wrong_date1"); assertThat(fieldMapper, instanceOf(TextFieldMapper.class)); @@ -168,16 +178,7 @@ public void reset() { int i = 0; private DocumentMapper mapper(String indexName, String type, String mapping) throws IOException { - return mapper(indexName, type, mapping, Version.CURRENT); - } - - private DocumentMapper mapper(String indexName, String type, String mapping, Version version) throws IOException { - IndexService index; - if (version.equals(Version.CURRENT)) { - index = createIndex(indexName); - } else { - index = createIndex(indexName, Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build()); - } + IndexService index = createIndex(indexName, BW_SETTINGS); client().admin().indices().preparePutMapping(indexName).setType(type).setSource(mapping).get(); return index.mapperService().documentMapper(type); } @@ -253,7 +254,7 @@ public void testHourFormat() throws Exception { .field("date_field", "10:00:00") .endObject() .bytes()); - assertThat(((LongFieldMapper.CustomLongNumericField) doc.rootDoc().getField("date_field")).numericAsString(), equalTo(Long.toString(new DateTime(TimeValue.timeValueHours(10).millis(), DateTimeZone.UTC).getMillis()))); + assertThat(((LegacyLongFieldMapper.CustomLongNumericField) doc.rootDoc().getField("date_field")).numericAsString(), equalTo(Long.toString(new DateTime(TimeValue.timeValueHours(10).millis(), DateTimeZone.UTC).getMillis()))); LegacyNumericRangeQuery rangeQuery; try { @@ -279,7 +280,7 @@ public void testDayWithoutYearFormat() throws Exception { .field("date_field", "Jan 02 10:00:00") .endObject() .bytes()); - assertThat(((LongFieldMapper.CustomLongNumericField) doc.rootDoc().getField("date_field")).numericAsString(), equalTo(Long.toString(new DateTime(TimeValue.timeValueHours(34).millis(), DateTimeZone.UTC).getMillis()))); + assertThat(((LegacyLongFieldMapper.CustomLongNumericField) doc.rootDoc().getField("date_field")).numericAsString(), equalTo(Long.toString(new DateTime(TimeValue.timeValueHours(34).millis(), DateTimeZone.UTC).getMillis()))); LegacyNumericRangeQuery rangeQuery; try { @@ -336,7 +337,10 @@ public void testIgnoreMalformedOption() throws Exception { } // Unless the global ignore_malformed option is set to true - Settings indexSettings = Settings.builder().put("index.mapping.ignore_malformed", true).build(); + Settings indexSettings = Settings.builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0) + .put("index.mapping.ignore_malformed", true) + .build(); defaultMapper = createIndex("test2", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -379,16 +383,16 @@ public void testThatMergingWorks() throws Exception { DocumentMapper defaultMapper = mapper("test1", "type", initialMapping); DocumentMapper mergeMapper = mapper("test2", "type", updatedMapping); - assertThat(defaultMapper.mappers().getMapper("field"), is(instanceOf(DateFieldMapper.class))); - DateFieldMapper initialDateFieldMapper = (DateFieldMapper) defaultMapper.mappers().getMapper("field"); + assertThat(defaultMapper.mappers().getMapper("field"), is(instanceOf(LegacyDateFieldMapper.class))); + LegacyDateFieldMapper initialDateFieldMapper = (LegacyDateFieldMapper) defaultMapper.mappers().getMapper("field"); Map config = getConfigurationViaXContent(initialDateFieldMapper); assertThat(config.get("format"), is("EEE MMM dd HH:mm:ss.S Z yyyy||EEE MMM dd HH:mm:ss.SSS Z yyyy")); defaultMapper = defaultMapper.merge(mergeMapper.mapping(), false); - assertThat(defaultMapper.mappers().getMapper("field"), is(instanceOf(DateFieldMapper.class))); + assertThat(defaultMapper.mappers().getMapper("field"), is(instanceOf(LegacyDateFieldMapper.class))); - DateFieldMapper mergedFieldMapper = (DateFieldMapper) defaultMapper.mappers().getMapper("field"); + LegacyDateFieldMapper mergedFieldMapper = (LegacyDateFieldMapper) defaultMapper.mappers().getMapper("field"); Map mergedConfig = getConfigurationViaXContent(mergedFieldMapper); assertThat(mergedConfig.get("format"), is("EEE MMM dd HH:mm:ss.S Z yyyy||EEE MMM dd HH:mm:ss.SSS Z yyyy||yyyy-MM-dd'T'HH:mm:ss.SSSZZ")); } @@ -409,7 +413,7 @@ public void testDefaultDocValues() throws Exception { assertEquals(DocValuesType.SORTED_NUMERIC, docValuesType(doc, "date_field")); } - private Map getConfigurationViaXContent(DateFieldMapper dateFieldMapper) throws IOException { + private Map getConfigurationViaXContent(LegacyDateFieldMapper dateFieldMapper) throws IOException { XContentBuilder builder = JsonXContent.contentBuilder().startObject(); dateFieldMapper.toXContent(builder, ToXContent.EMPTY_PARAMS).endObject(); Map dateFieldMapperMap; @@ -462,9 +466,9 @@ public void testThatNewIndicesOnlyAllowStrictDates() throws Exception { .startObject("properties").startObject("date_field").field("type", "date").endObject().endObject() .endObject().endObject().string(); - IndexService index = createIndex("test"); + IndexService index = createIndex("test", BW_SETTINGS); client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping).get(); - assertDateFormat(DateFieldMapper.Defaults.DATE_TIME_FORMATTER.format()); + assertDateFormat(LegacyDateFieldMapper.Defaults.DATE_TIME_FORMATTER.format()); DocumentMapper defaultMapper = index.mapperService().documentMapper("type"); // also test normal date diff --git a/core/src/test/java/org/elasticsearch/index/mapper/dynamictemplate/genericstore/GenericStoreDynamicTemplateTests.java b/core/src/test/java/org/elasticsearch/index/mapper/dynamictemplate/genericstore/GenericStoreDynamicTemplateTests.java index da5c53f46f956..b013f9b4a5685 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/dynamictemplate/genericstore/GenericStoreDynamicTemplateTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/dynamictemplate/genericstore/GenericStoreDynamicTemplateTests.java @@ -55,9 +55,11 @@ public void testSimple() throws Exception { FieldMapper fieldMapper = docMapper.mappers().getMapper("name"); assertThat(fieldMapper.fieldType().stored(), equalTo(true)); - f = doc.getField("age"); - assertThat(f.name(), equalTo("age")); - assertThat(f.fieldType().stored(), equalTo(true)); + boolean stored = false; + for (IndexableField field : doc.getFields("age")) { + stored |= field.fieldType().stored(); + } + assertTrue(stored); fieldMapper = docMapper.mappers().getMapper("age"); assertThat(fieldMapper.fieldType().stored(), equalTo(true)); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java index 1a89380d951ad..a535c4ec3dac3 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.index.mapper.geo; +import org.apache.lucene.index.IndexableField; import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; @@ -428,21 +429,44 @@ public void testArrayLatLonValues() throws Exception { .endObject() .bytes()); - assertThat(doc.rootDoc().getFields("point.lat").length, equalTo(2)); - assertThat(doc.rootDoc().getFields("point.lon").length, equalTo(2)); - assertThat(doc.rootDoc().getFields("point.lat")[0].numericValue().doubleValue(), equalTo(1.2)); - assertThat(doc.rootDoc().getFields("point.lon")[0].numericValue().doubleValue(), equalTo(1.3)); - if (version.before(Version.V_2_2_0)) { - assertThat(doc.rootDoc().getFields("point")[0].stringValue(), equalTo("1.2,1.3")); - } else { + if (version.onOrAfter(Version.V_5_0_0)) { + assertThat(doc.rootDoc().getFields("point.lat").length, equalTo(4)); + assertThat(doc.rootDoc().getFields("point.lon").length, equalTo(4)); + + // point field for 1st value + assertThat(doc.rootDoc().getFields("point.lat")[0].numericValue().doubleValue(), equalTo(1.2)); + assertThat(doc.rootDoc().getFields("point.lon")[0].numericValue().doubleValue(), equalTo(1.3)); + // stored field for 1st value + assertThat(doc.rootDoc().getFields("point.lat")[1].numericValue().doubleValue(), equalTo(1.2)); + assertThat(doc.rootDoc().getFields("point.lon")[1].numericValue().doubleValue(), equalTo(1.3)); + // indexed hash assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(mortonHash(1.2, 1.3))); - } - assertThat(doc.rootDoc().getFields("point.lat")[1].numericValue().doubleValue(), equalTo(1.4)); - assertThat(doc.rootDoc().getFields("point.lon")[1].numericValue().doubleValue(), equalTo(1.5)); - if (version.before(Version.V_2_2_0)) { - assertThat(doc.rootDoc().getFields("point")[1].stringValue(), equalTo("1.4,1.5")); - } else { + + // point field for 2nd value + assertThat(doc.rootDoc().getFields("point.lat")[2].numericValue().doubleValue(), equalTo(1.4)); + assertThat(doc.rootDoc().getFields("point.lon")[2].numericValue().doubleValue(), equalTo(1.5)); + // stored field for 2nd value + assertThat(doc.rootDoc().getFields("point.lat")[3].numericValue().doubleValue(), equalTo(1.4)); + assertThat(doc.rootDoc().getFields("point.lon")[3].numericValue().doubleValue(), equalTo(1.5)); + // indexed hash assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(mortonHash(1.4, 1.5))); + } else { + assertThat(doc.rootDoc().getFields("point.lat").length, equalTo(2)); + assertThat(doc.rootDoc().getFields("point.lon").length, equalTo(2)); + assertThat(doc.rootDoc().getFields("point.lat")[0].numericValue().doubleValue(), equalTo(1.2)); + assertThat(doc.rootDoc().getFields("point.lon")[0].numericValue().doubleValue(), equalTo(1.3)); + if (version.before(Version.V_2_2_0)) { + assertThat(doc.rootDoc().getFields("point")[0].stringValue(), equalTo("1.2,1.3")); + } else { + assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(mortonHash(1.2, 1.3))); + } + assertThat(doc.rootDoc().getFields("point.lat")[1].numericValue().doubleValue(), equalTo(1.4)); + assertThat(doc.rootDoc().getFields("point.lon")[1].numericValue().doubleValue(), equalTo(1.5)); + if (version.before(Version.V_2_2_0)) { + assertThat(doc.rootDoc().getFields("point")[1].stringValue(), equalTo("1.4,1.5")); + } else { + assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(mortonHash(1.4, 1.5))); + } } } @@ -514,17 +538,28 @@ public void testLatLonInOneValueArray() throws Exception { .endObject() .bytes()); - assertThat(doc.rootDoc().getFields("point.lat").length, equalTo(2)); - assertThat(doc.rootDoc().getFields("point.lon").length, equalTo(2)); - assertThat(doc.rootDoc().getFields("point.lat")[0].numericValue().doubleValue(), equalTo(1.2)); - assertThat(doc.rootDoc().getFields("point.lon")[0].numericValue().doubleValue(), equalTo(1.3)); + if (version.before(Version.V_5_0_0)) { + assertThat(doc.rootDoc().getFields("point.lat").length, equalTo(2)); + assertThat(doc.rootDoc().getFields("point.lon").length, equalTo(2)); + assertThat(doc.rootDoc().getFields("point.lat")[0].numericValue().doubleValue(), equalTo(1.2)); + assertThat(doc.rootDoc().getFields("point.lon")[0].numericValue().doubleValue(), equalTo(1.3)); + assertThat(doc.rootDoc().getFields("point.lat")[1].numericValue().doubleValue(), equalTo(1.4)); + assertThat(doc.rootDoc().getFields("point.lon")[1].numericValue().doubleValue(), equalTo(1.5)); + } else { + IndexableField[] latPoints = doc.rootDoc().getFields("point.lat"); + IndexableField[] lonPoints = doc.rootDoc().getFields("point.lon"); + assertThat(latPoints.length, equalTo(4)); + assertThat(lonPoints.length, equalTo(4)); + assertThat(latPoints[0].numericValue().doubleValue(), equalTo(1.2)); + assertThat(lonPoints[0].numericValue().doubleValue(), equalTo(1.3)); + assertThat(latPoints[2].numericValue().doubleValue(), equalTo(1.4)); + assertThat(lonPoints[2].numericValue().doubleValue(), equalTo(1.5)); + } if (version.before(Version.V_2_2_0)) { assertThat(doc.rootDoc().getFields("point")[0].stringValue(), equalTo("1.2,1.3")); } else { assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(mortonHash(1.2, 1.3))); } - assertThat(doc.rootDoc().getFields("point.lat")[1].numericValue().doubleValue(), equalTo(1.4)); - assertThat(doc.rootDoc().getFields("point.lon")[1].numericValue().doubleValue(), equalTo(1.5)); if (version.before(Version.V_2_2_0)) { assertThat(doc.rootDoc().getFields("point")[1].stringValue(), equalTo("1.4,1.5")); } else { @@ -625,20 +660,35 @@ public void testLonLatArrayArrayStored() throws Exception { .endObject() .bytes()); - assertThat(doc.rootDoc().getFields("point.lat").length, equalTo(2)); - assertThat(doc.rootDoc().getFields("point.lon").length, equalTo(2)); - assertThat(doc.rootDoc().getFields("point.lat")[0].numericValue().doubleValue(), equalTo(1.2)); - assertThat(doc.rootDoc().getFields("point.lon")[0].numericValue().doubleValue(), equalTo(1.3)); - if (version.before(Version.V_2_2_0)) { - assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3")); + if (version.before(Version.V_5_0_0)) { + assertThat(doc.rootDoc().getFields("point.lat").length, equalTo(2)); + assertThat(doc.rootDoc().getFields("point.lon").length, equalTo(2)); + assertThat(doc.rootDoc().getFields("point.lat")[0].numericValue().doubleValue(), equalTo(1.2)); + assertThat(doc.rootDoc().getFields("point.lon")[0].numericValue().doubleValue(), equalTo(1.3)); + if (version.before(Version.V_2_2_0)) { + assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3")); + } else { + assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(mortonHash(1.2, 1.3))); + } + assertThat(doc.rootDoc().getFields("point.lat")[1].numericValue().doubleValue(), equalTo(1.4)); + assertThat(doc.rootDoc().getFields("point.lon")[1].numericValue().doubleValue(), equalTo(1.5)); + if (version.before(Version.V_2_2_0)) { + assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3")); + } else { + assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(mortonHash(1.4, 1.5))); + } } else { + assertThat(doc.rootDoc().getFields("point.lat").length, equalTo(4)); + assertThat(doc.rootDoc().getFields("point.lon").length, equalTo(4)); + assertThat(doc.rootDoc().getFields("point.lat")[0].numericValue().doubleValue(), equalTo(1.2)); + assertThat(doc.rootDoc().getFields("point.lat")[1].numericValue().doubleValue(), equalTo(1.2)); + assertThat(doc.rootDoc().getFields("point.lon")[0].numericValue().doubleValue(), equalTo(1.3)); + assertThat(doc.rootDoc().getFields("point.lon")[1].numericValue().doubleValue(), equalTo(1.3)); assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(mortonHash(1.2, 1.3))); - } - assertThat(doc.rootDoc().getFields("point.lat")[1].numericValue().doubleValue(), equalTo(1.4)); - assertThat(doc.rootDoc().getFields("point.lon")[1].numericValue().doubleValue(), equalTo(1.5)); - if (version.before(Version.V_2_2_0)) { - assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3")); - } else { + assertThat(doc.rootDoc().getFields("point.lat")[2].numericValue().doubleValue(), equalTo(1.4)); + assertThat(doc.rootDoc().getFields("point.lat")[3].numericValue().doubleValue(), equalTo(1.4)); + assertThat(doc.rootDoc().getFields("point.lon")[2].numericValue().doubleValue(), equalTo(1.5)); + assertThat(doc.rootDoc().getFields("point.lon")[3].numericValue().doubleValue(), equalTo(1.5)); assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(mortonHash(1.4, 1.5))); } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldTypeTests.java index 6934d06a509f7..0c209c25d5230 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldTypeTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldTypeTests.java @@ -20,7 +20,7 @@ import org.elasticsearch.index.mapper.FieldTypeTestCase; import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.core.DoubleFieldMapper; +import org.elasticsearch.index.mapper.core.LegacyDoubleFieldMapper; import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.junit.Before; @@ -41,7 +41,7 @@ public void modify(MappedFieldType ft) { addModifier(new Modifier("lat_lon", false) { @Override public void modify(MappedFieldType ft) { - ((BaseGeoPointFieldMapper.GeoPointFieldType)ft).setLatLonEnabled(new DoubleFieldMapper.DoubleFieldType(), new DoubleFieldMapper.DoubleFieldType()); + ((BaseGeoPointFieldMapper.GeoPointFieldType)ft).setLatLonEnabled(new LegacyDoubleFieldMapper.DoubleFieldType(), new LegacyDoubleFieldMapper.DoubleFieldType()); } }); } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/internal/TimestampFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/internal/TimestampFieldTypeTests.java index 1ce9375f94ffc..deb21b434cad5 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/internal/TimestampFieldTypeTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/internal/TimestampFieldTypeTests.java @@ -20,9 +20,9 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.core.DateFieldMapper; -import org.elasticsearch.index.mapper.core.DateFieldTypeTests; +import org.elasticsearch.index.mapper.core.LegacyDateFieldTypeTests; -public class TimestampFieldTypeTests extends DateFieldTypeTests { +public class TimestampFieldTypeTests extends LegacyDateFieldTypeTests { @Override protected MappedFieldType createDefaultFieldType() { return new TimestampFieldMapper.TimestampFieldType(); @@ -32,7 +32,7 @@ protected MappedFieldType createDefaultFieldType() { public void testValueForSearch() { MappedFieldType ft = createDefaultFieldType(); String date = "2015-10-12T12:09:55.000Z"; - long instant = DateFieldMapper.Defaults.DATE_TIME_FORMATTER.parser().parseDateTime(date).getMillis(); + long instant = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parser().parseDateTime(date).getMillis(); assertEquals(instant, ft.valueForSearch(instant)); } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/ip/IpFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/ip/IpFieldMapperTests.java new file mode 100644 index 0000000000000..3bb96cce31ef6 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/mapper/ip/IpFieldMapperTests.java @@ -0,0 +1,220 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.mapper.ip; + +import org.apache.lucene.document.InetAddressPoint; +import org.apache.lucene.index.DocValuesType; +import org.apache.lucene.index.IndexableField; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.network.InetAddresses; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.DocumentMapperParser; +import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.junit.Before; + +import static org.hamcrest.Matchers.containsString; + +import java.net.InetAddress; + +public class IpFieldMapperTests extends ESSingleNodeTestCase { + + IndexService indexService; + DocumentMapperParser parser; + + @Before + public void before() { + indexService = createIndex("test"); + parser = indexService.mapperService().documentMapperParser(); + } + + public void testDefaults() throws Exception { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", "ip").endObject().endObject() + .endObject().endObject().string(); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + + assertEquals(mapping, mapper.mappingSource().toString()); + + ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("field", "::1") + .endObject() + .bytes()); + + IndexableField[] fields = doc.rootDoc().getFields("field"); + assertEquals(2, fields.length); + IndexableField pointField = fields[0]; + assertEquals(1, pointField.fieldType().pointDimensionCount()); + assertEquals(16, pointField.fieldType().pointNumBytes()); + assertFalse(pointField.fieldType().stored()); + assertEquals(new BytesRef(InetAddressPoint.encode(InetAddresses.forString("::1"))), pointField.binaryValue()); + IndexableField dvField = fields[1]; + assertEquals(DocValuesType.SORTED_SET, dvField.fieldType().docValuesType()); + assertEquals(new BytesRef(InetAddressPoint.encode(InetAddresses.forString("::1"))), dvField.binaryValue()); + assertFalse(dvField.fieldType().stored()); + } + + public void testNotIndexed() throws Exception { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", "ip").field("index", false).endObject().endObject() + .endObject().endObject().string(); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + + assertEquals(mapping, mapper.mappingSource().toString()); + + ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("field", "::1") + .endObject() + .bytes()); + + IndexableField[] fields = doc.rootDoc().getFields("field"); + assertEquals(1, fields.length); + IndexableField dvField = fields[0]; + assertEquals(DocValuesType.SORTED_SET, dvField.fieldType().docValuesType()); + } + + public void testNoDocValues() throws Exception { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", "ip").field("doc_values", false).endObject().endObject() + .endObject().endObject().string(); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + + assertEquals(mapping, mapper.mappingSource().toString()); + + ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("field", "::1") + .endObject() + .bytes()); + + IndexableField[] fields = doc.rootDoc().getFields("field"); + assertEquals(1, fields.length); + IndexableField pointField = fields[0]; + assertEquals(1, pointField.fieldType().pointDimensionCount()); + assertEquals(new BytesRef(InetAddressPoint.encode(InetAddresses.forString("::1"))), pointField.binaryValue()); + } + + public void testStore() throws Exception { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", "ip").field("store", true).endObject().endObject() + .endObject().endObject().string(); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + + assertEquals(mapping, mapper.mappingSource().toString()); + + ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("field", "::1") + .endObject() + .bytes()); + + IndexableField[] fields = doc.rootDoc().getFields("field"); + assertEquals(3, fields.length); + IndexableField pointField = fields[0]; + assertEquals(1, pointField.fieldType().pointDimensionCount()); + IndexableField dvField = fields[1]; + assertEquals(DocValuesType.SORTED_SET, dvField.fieldType().docValuesType()); + IndexableField storedField = fields[2]; + assertTrue(storedField.fieldType().stored()); + assertEquals(new BytesRef(InetAddressPoint.encode(InetAddress.getByName("::1"))), + storedField.binaryValue()); + } + + public void testIgnoreMalformed() throws Exception { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", "ip").endObject().endObject() + .endObject().endObject().string(); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + + assertEquals(mapping, mapper.mappingSource().toString()); + + ThrowingRunnable runnable = () -> mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("field", ":1") + .endObject() + .bytes()); + MapperParsingException e = expectThrows(MapperParsingException.class, runnable); + assertThat(e.getCause().getMessage(), containsString("':1' is not an IP string literal")); + + mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", "ip").field("ignore_malformed", true).endObject().endObject() + .endObject().endObject().string(); + + DocumentMapper mapper2 = parser.parse("type", new CompressedXContent(mapping)); + + ParsedDocument doc = mapper2.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("field", ":1") + .endObject() + .bytes()); + + IndexableField[] fields = doc.rootDoc().getFields("field"); + assertEquals(0, fields.length); + } + + public void testIncludeInAll() throws Exception { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", "ip").endObject().endObject() + .endObject().endObject().string(); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + + assertEquals(mapping, mapper.mappingSource().toString()); + + ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("field", "::1") + .endObject() + .bytes()); + + IndexableField[] fields = doc.rootDoc().getFields("_all"); + assertEquals(1, fields.length); + assertEquals("::1", fields[0].stringValue()); + + mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", "ip") + .field("include_in_all", false).endObject().endObject() + .endObject().endObject().string(); + + mapper = parser.parse("type", new CompressedXContent(mapping)); + + assertEquals(mapping, mapper.mappingSource().toString()); + + doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("field", "::1") + .endObject() + .bytes()); + + fields = doc.rootDoc().getFields("_all"); + assertEquals(0, fields.length); + } +} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/ip/IpFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/ip/IpFieldTypeTests.java new file mode 100644 index 0000000000000..1c5bbd9dd512e --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/mapper/ip/IpFieldTypeTests.java @@ -0,0 +1,75 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.index.mapper.ip; + +import java.net.InetAddress; + +import org.apache.lucene.document.InetAddressPoint; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.network.InetAddresses; +import org.elasticsearch.index.mapper.FieldTypeTestCase; +import org.elasticsearch.index.mapper.MappedFieldType; + +public class IpFieldTypeTests extends FieldTypeTestCase { + @Override + protected MappedFieldType createDefaultFieldType() { + return new IpFieldMapper.IpFieldType(); + } + + public void testValueFormat() throws Exception { + MappedFieldType ft = createDefaultFieldType(); + String ip = "2001:db8::2:1"; + BytesRef asBytes = new BytesRef(InetAddressPoint.encode(InetAddress.getByName(ip))); + assertEquals(ip, ft.docValueFormat(null, null).format(asBytes)); + + ip = "192.168.1.7"; + asBytes = new BytesRef(InetAddressPoint.encode(InetAddress.getByName(ip))); + assertEquals(ip, ft.docValueFormat(null, null).format(asBytes)); + } + + public void testValueForSearch() throws Exception { + MappedFieldType ft = createDefaultFieldType(); + String ip = "2001:db8::2:1"; + BytesRef asBytes = new BytesRef(InetAddressPoint.encode(InetAddresses.forString(ip))); + assertEquals(ip, ft.valueForSearch(asBytes)); + + ip = "192.168.1.7"; + asBytes = new BytesRef(InetAddressPoint.encode(InetAddresses.forString(ip))); + assertEquals(ip, ft.valueForSearch(asBytes)); + } + + public void testTermQuery() { + MappedFieldType ft = createDefaultFieldType(); + ft.setName("field"); + + String ip = "2001:db8::2:1"; + assertEquals(InetAddressPoint.newExactQuery("field", InetAddresses.forString(ip)), ft.termQuery(ip, null)); + + ip = "192.168.1.7"; + assertEquals(InetAddressPoint.newExactQuery("field", InetAddresses.forString(ip)), ft.termQuery(ip, null)); + + ip = "2001:db8::2:1"; + String prefix = ip + "/64"; + assertEquals(InetAddressPoint.newPrefixQuery("field", InetAddresses.forString(ip), 64), ft.termQuery(prefix, null)); + + ip = "192.168.1.7"; + prefix = ip + "/16"; + assertEquals(InetAddressPoint.newPrefixQuery("field", InetAddresses.forString(ip), 16), ft.termQuery(prefix, null)); + } +} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/ip/SimpleIpMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/ip/LegacyIpMappingTests.java similarity index 79% rename from core/src/test/java/org/elasticsearch/index/mapper/ip/SimpleIpMappingTests.java rename to core/src/test/java/org/elasticsearch/index/mapper/ip/LegacyIpMappingTests.java index 0e6aea56f7ed9..35eac98f58087 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/ip/SimpleIpMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/ip/LegacyIpMappingTests.java @@ -19,13 +19,19 @@ package org.elasticsearch.index.mapper.ip; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; + +import java.util.Collection; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.instanceOf; @@ -36,14 +42,21 @@ /** * */ -public class SimpleIpMappingTests extends ESSingleNodeTestCase { +public class LegacyIpMappingTests extends ESSingleNodeTestCase { + + private static final Settings BW_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0).build(); + + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); + } public void testSimpleMapping() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("ip").field("type", "ip").endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); + DocumentMapper defaultMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -56,12 +69,12 @@ public void testSimpleMapping() throws Exception { } public void testThatValidIpCanBeConvertedToLong() throws Exception { - assertThat(IpFieldMapper.ipToLong("127.0.0.1"), is(2130706433L)); + assertThat(LegacyIpFieldMapper.ipToLong("127.0.0.1"), is(2130706433L)); } public void testThatInvalidIpThrowsException() throws Exception { try { - IpFieldMapper.ipToLong("127.0.011.1111111"); + LegacyIpFieldMapper.ipToLong("127.0.011.1111111"); fail("Expected ip address parsing to fail but did not happen"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("not a valid ip address")); @@ -70,7 +83,7 @@ public void testThatInvalidIpThrowsException() throws Exception { public void testThatIpv6AddressThrowsException() throws Exception { try { - IpFieldMapper.ipToLong("2001:db8:0:8d3:0:8a2e:70:7344"); + LegacyIpFieldMapper.ipToLong("2001:db8:0:8d3:0:8a2e:70:7344"); fail("Expected ip address parsing to fail but did not happen"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("not a valid ipv4 address")); @@ -83,7 +96,7 @@ public void testIgnoreMalformedOption() throws Exception { .field("ignore_malformed", false).endObject().startObject("field3").field("type", "ip").endObject().endObject().endObject() .endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); + DocumentMapper defaultMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject().field("field1", "").field("field2", "10.20.30.40").endObject().bytes()); @@ -104,7 +117,10 @@ public void testIgnoreMalformedOption() throws Exception { } // Unless the global ignore_malformed option is set to true - Settings indexSettings = Settings.builder().put("index.mapping.ignore_malformed", true).build(); + Settings indexSettings = Settings.builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0) + .put("index.mapping.ignore_malformed", true) + .build(); defaultMapper = createIndex("test2", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject().field("field3", "").endObject().bytes()); assertThat(doc.rootDoc().getField("field3"), nullValue()); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/lucene/StoredNumericValuesTests.java b/core/src/test/java/org/elasticsearch/index/mapper/lucene/StoredNumericValuesTests.java index fc900e3c33122..a4d66a388a6eb 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/lucene/StoredNumericValuesTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/lucene/StoredNumericValuesTests.java @@ -54,8 +54,9 @@ public void testBytesAndNumericRepresentation() throws Exception { .startObject("field5").field("type", "long").field("store", true).endObject() .startObject("field6").field("type", "double").field("store", true).endObject() .startObject("field7").field("type", "ip").field("store", true).endObject() - .startObject("field8").field("type", "date").field("store", true).endObject() - .startObject("field9").field("type", "boolean").field("store", true).endObject() + .startObject("field8").field("type", "ip").field("store", true).endObject() + .startObject("field9").field("type", "date").field("store", true).endObject() + .startObject("field10").field("type", "boolean").field("store", true).endObject() .endObject() .endObject() .endObject() @@ -71,8 +72,9 @@ public void testBytesAndNumericRepresentation() throws Exception { .startArray("field5").value(1).value(2).value(3).endArray() .field("field6", 1.1) .field("field7", "192.168.1.1") - .field("field8", "2016-04-05") - .field("field9", true) + .field("field8", "2001:db8::2:1") + .field("field9", "2016-04-05") + .field("field10", true) .endObject() .bytes()); @@ -85,7 +87,7 @@ public void testBytesAndNumericRepresentation() throws Exception { Collections.emptySet(), Collections.singletonList("field*"), false); searcher.doc(0, fieldsVisitor); fieldsVisitor.postProcess(mapper); - assertThat(fieldsVisitor.fields().size(), equalTo(9)); + assertThat(fieldsVisitor.fields().size(), equalTo(10)); assertThat(fieldsVisitor.fields().get("field1").size(), equalTo(1)); assertThat(fieldsVisitor.fields().get("field1").get(0), equalTo((byte) 1)); @@ -110,10 +112,13 @@ public void testBytesAndNumericRepresentation() throws Exception { assertThat(fieldsVisitor.fields().get("field7").get(0), equalTo("192.168.1.1")); assertThat(fieldsVisitor.fields().get("field8").size(), equalTo(1)); - assertThat(fieldsVisitor.fields().get("field8").get(0), equalTo("2016-04-05T00:00:00.000Z")); + assertThat(fieldsVisitor.fields().get("field8").get(0), equalTo("2001:db8::2:1")); assertThat(fieldsVisitor.fields().get("field9").size(), equalTo(1)); - assertThat(fieldsVisitor.fields().get("field9").get(0), equalTo(true)); + assertThat(fieldsVisitor.fields().get("field9").get(0), equalTo("2016-04-05T00:00:00.000Z")); + + assertThat(fieldsVisitor.fields().get("field10").size(), equalTo(1)); + assertThat(fieldsVisitor.fields().get("field10").get(0), equalTo(true)); reader.close(); writer.close(); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java b/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java index 9d33a1129f5fa..70af77513ab88 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java @@ -42,9 +42,7 @@ import java.io.IOException; import java.util.Arrays; -import java.util.Collections; import java.util.Map; -import java.util.TreeMap; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.StreamsUtils.copyToBytesFromClasspath; diff --git a/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java b/core/src/test/java/org/elasticsearch/index/mapper/numeric/LegacyNumericTests.java similarity index 77% rename from core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java rename to core/src/test/java/org/elasticsearch/index/mapper/numeric/LegacyNumericTests.java index e48af8ba9ec77..812324a1423cd 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/numeric/LegacyNumericTests.java @@ -37,9 +37,9 @@ import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParseContext.Document; import org.elasticsearch.index.mapper.ParsedDocument; -import org.elasticsearch.index.mapper.core.FloatFieldMapper; -import org.elasticsearch.index.mapper.core.LongFieldMapper; -import org.elasticsearch.index.mapper.core.NumberFieldMapper; +import org.elasticsearch.index.mapper.core.LegacyFloatFieldMapper; +import org.elasticsearch.index.mapper.core.LegacyLongFieldMapper; +import org.elasticsearch.index.mapper.core.LegacyNumberFieldMapper; import org.elasticsearch.index.mapper.core.TextFieldMapper; import org.elasticsearch.index.mapper.string.SimpleStringMappingTests; import org.elasticsearch.plugins.Plugin; @@ -58,64 +58,15 @@ /** */ -public class SimpleNumericTests extends ESSingleNodeTestCase { +public class LegacyNumericTests extends ESSingleNodeTestCase { + + private static final Settings BW_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0).build(); @Override protected Collection> getPlugins() { return pluginList(InternalSettingsPlugin.class); } - public void testNumericDetectionEnabled() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .field("numeric_detection", true) - .endObject().endObject().string(); - - IndexService index = createIndex("test"); - client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping).get(); - DocumentMapper defaultMapper = index.mapperService().documentMapper("type"); - - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("s_long", "100") - .field("s_double", "100.0") - .endObject() - .bytes()); - assertNotNull(doc.dynamicMappingsUpdate()); - client().admin().indices().preparePutMapping("test").setType("type").setSource(doc.dynamicMappingsUpdate().toString()).get(); - - defaultMapper = index.mapperService().documentMapper("type"); - FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("s_long"); - assertThat(mapper, instanceOf(LongFieldMapper.class)); - - mapper = defaultMapper.mappers().smartNameFieldMapper("s_double"); - assertThat(mapper, instanceOf(FloatFieldMapper.class)); - } - - public void testNumericDetectionDefault() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .endObject().endObject().string(); - - IndexService index = createIndex("test"); - client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping).get(); - DocumentMapper defaultMapper = index.mapperService().documentMapper("type"); - - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("s_long", "100") - .field("s_double", "100.0") - .endObject() - .bytes()); - assertNotNull(doc.dynamicMappingsUpdate()); - assertAcked(client().admin().indices().preparePutMapping("test").setType("type").setSource(doc.dynamicMappingsUpdate().toString()).get()); - - defaultMapper = index.mapperService().documentMapper("type"); - FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("s_long"); - assertThat(mapper, instanceOf(TextFieldMapper.class)); - - mapper = defaultMapper.mappers().smartNameFieldMapper("s_double"); - assertThat(mapper, instanceOf(TextFieldMapper.class)); - } - public void testIgnoreMalformedOption() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") @@ -158,7 +109,9 @@ public void testIgnoreMalformedOption() throws Exception { } // Unless the global ignore_malformed option is set to true - Settings indexSettings = Settings.builder().put("index.mapping.ignore_malformed", true).build(); + Settings indexSettings = Settings.builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0) + .put("index.mapping.ignore_malformed", true).build(); defaultMapper = createIndex("test2", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -182,7 +135,7 @@ public void testIgnoreMalformedOption() throws Exception { public void testCoerceOption() throws Exception { String [] nonFractionNumericFieldTypes={"integer","long","short"}; //Test co-ercion policies on all non-fraction numerics - DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); + DocumentMapperParser parser = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser(); for (String nonFractionNumericFieldType : nonFractionNumericFieldTypes) { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") @@ -277,46 +230,7 @@ public void testCoerceOption() throws Exception { } } - public void testDocValues() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("int1") - .field("type", "integer") - .endObject() - .startObject("int2") - .field("type", "integer") - .field("index", false) - .endObject() - .startObject("double1") - .field("type", "double") - .endObject() - .startObject("double2") - .field("type", "integer") - .field("index", false) - .endObject() - .endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - ParsedDocument parsedDoc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("int1", "1234") - .field("double1", "1234") - .field("int2", "1234") - .field("double2", "1234") - .endObject() - .bytes()); - Document doc = parsedDoc.rootDoc(); - assertEquals(DocValuesType.SORTED_NUMERIC, SimpleStringMappingTests.docValuesType(doc, "int1")); - assertEquals(DocValuesType.SORTED_NUMERIC, SimpleStringMappingTests.docValuesType(doc, "double1")); - assertEquals(DocValuesType.SORTED_NUMERIC, SimpleStringMappingTests.docValuesType(doc, "int2")); - assertEquals(DocValuesType.SORTED_NUMERIC, SimpleStringMappingTests.docValuesType(doc, "double2")); - - } - - public void testBwCompatDocValues() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("int1") @@ -336,8 +250,7 @@ public void testBwCompatDocValues() throws Exception { .endObject() .endObject().endObject().string(); - Settings oldIndexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_2_0).build(); - DocumentMapper defaultMapper = createIndex("test", oldIndexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); + DocumentMapper defaultMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument parsedDoc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -368,7 +281,7 @@ public void testUnIndex() throws IOException { .endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); + DocumentMapper defaultMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); assertEquals("{\"type\":{\"properties\":{\"double\":{\"type\":\"double\",\"index\":false},\"int\":{\"type\":\"integer\",\"index\":false}}}}", defaultMapper.mapping().toString()); @@ -427,7 +340,7 @@ public void testDocValuesOnNested() throws Exception { .endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); + DocumentMapper defaultMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument parsedDoc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -459,7 +372,7 @@ public void testPrecisionStepDefaultsDetected() throws Exception { .field("date_detection", true) .endObject().endObject().string(); - DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); + DocumentMapper mapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -472,10 +385,10 @@ public void testPrecisionStepDefaultsDetected() throws Exception { assertEquals(1, doc.docs().size()); Document luceneDoc = doc.docs().get(0); - assertPrecisionStepEquals(NumberFieldMapper.Defaults.PRECISION_STEP_64_BIT, luceneDoc.getField("long")); + assertPrecisionStepEquals(LegacyNumberFieldMapper.Defaults.PRECISION_STEP_64_BIT, luceneDoc.getField("long")); assertThat(luceneDoc.getField("double").numericValue(), instanceOf(Float.class)); - assertPrecisionStepEquals(NumberFieldMapper.Defaults.PRECISION_STEP_32_BIT, luceneDoc.getField("double")); - assertPrecisionStepEquals(NumberFieldMapper.Defaults.PRECISION_STEP_64_BIT, luceneDoc.getField("date")); + assertPrecisionStepEquals(LegacyNumberFieldMapper.Defaults.PRECISION_STEP_32_BIT, luceneDoc.getField("double")); + assertPrecisionStepEquals(LegacyNumberFieldMapper.Defaults.PRECISION_STEP_64_BIT, luceneDoc.getField("date")); } /** Test default precision step for numeric types */ @@ -510,7 +423,7 @@ public void testPrecisionStepDefaultsMapped() throws Exception { .endObject() .endObject().endObject().string(); - DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); + DocumentMapper mapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -528,16 +441,16 @@ public void testPrecisionStepDefaultsMapped() throws Exception { assertEquals(1, doc.docs().size()); Document luceneDoc = doc.docs().get(0); - assertPrecisionStepEquals(NumberFieldMapper.Defaults.PRECISION_STEP_64_BIT, luceneDoc.getField("long")); - assertPrecisionStepEquals(NumberFieldMapper.Defaults.PRECISION_STEP_64_BIT, luceneDoc.getField("double")); - assertPrecisionStepEquals(NumberFieldMapper.Defaults.PRECISION_STEP_64_BIT, luceneDoc.getField("date")); - assertPrecisionStepEquals(NumberFieldMapper.Defaults.PRECISION_STEP_64_BIT, luceneDoc.getField("ip")); + assertPrecisionStepEquals(LegacyNumberFieldMapper.Defaults.PRECISION_STEP_64_BIT, luceneDoc.getField("long")); + assertPrecisionStepEquals(LegacyNumberFieldMapper.Defaults.PRECISION_STEP_64_BIT, luceneDoc.getField("double")); + assertPrecisionStepEquals(LegacyNumberFieldMapper.Defaults.PRECISION_STEP_64_BIT, luceneDoc.getField("date")); + assertPrecisionStepEquals(LegacyNumberFieldMapper.Defaults.PRECISION_STEP_64_BIT, luceneDoc.getField("ip")); - assertPrecisionStepEquals(NumberFieldMapper.Defaults.PRECISION_STEP_32_BIT, luceneDoc.getField("int")); - assertPrecisionStepEquals(NumberFieldMapper.Defaults.PRECISION_STEP_32_BIT, luceneDoc.getField("float")); + assertPrecisionStepEquals(LegacyNumberFieldMapper.Defaults.PRECISION_STEP_32_BIT, luceneDoc.getField("int")); + assertPrecisionStepEquals(LegacyNumberFieldMapper.Defaults.PRECISION_STEP_32_BIT, luceneDoc.getField("float")); - assertPrecisionStepEquals(NumberFieldMapper.Defaults.PRECISION_STEP_16_BIT, luceneDoc.getField("short")); - assertPrecisionStepEquals(NumberFieldMapper.Defaults.PRECISION_STEP_8_BIT, luceneDoc.getField("byte")); + assertPrecisionStepEquals(LegacyNumberFieldMapper.Defaults.PRECISION_STEP_16_BIT, luceneDoc.getField("short")); + assertPrecisionStepEquals(LegacyNumberFieldMapper.Defaults.PRECISION_STEP_8_BIT, luceneDoc.getField("byte")); } /** Test precision step set to silly explicit values */ @@ -580,7 +493,7 @@ public void testPrecisionStepExplicit() throws Exception { .endObject() .endObject().endObject().string(); - DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); + DocumentMapper mapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -681,21 +594,6 @@ private void doTestAnalyzerBackCompat(String type) throws Exception { parser.parse("type", new CompressedXContent(mappingWithTV)); // no exception } - public void testRejectNorms() throws IOException { - // not supported as of 5.0 - for (String type : Arrays.asList("byte", "short", "integer", "long", "float", "double")) { - DocumentMapperParser parser = createIndex("index-" + type).mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("foo") - .field("type", type) - .field("norms", random().nextBoolean()) - .endObject() - .endObject().endObject().endObject().string(); - MapperParsingException e = expectThrows(MapperParsingException.class, () -> parser.parse("type", new CompressedXContent(mapping))); - assertThat(e.getMessage(), containsString("Mapping definition for [foo] has unsupported parameters: [norms")); - } - } public void testIgnoreFielddata() throws IOException { for (String type : Arrays.asList("byte", "short", "integer", "long", "float", "double")) { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java index e1b8dc1b4aad8..17cb8a84664fa 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java @@ -28,9 +28,9 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService.MergeReason; -import org.elasticsearch.index.mapper.core.LongFieldMapper; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.InternalSettingsPlugin; @@ -132,17 +132,18 @@ public void testConflictSameType() throws Exception { mapperService.merge("type", new CompressedXContent(update.string()), MapperService.MergeReason.MAPPING_UPDATE, false); fail(); } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("mapper [foo] of different type, current_type [long], merged_type [double]")); + assertThat(e.getMessage(), containsString("mapper [foo] cannot be changed from type [long] to [double]")); } try { mapperService.merge("type", new CompressedXContent(update.string()), MapperService.MergeReason.MAPPING_UPDATE, false); fail(); } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("mapper [foo] of different type, current_type [long], merged_type [double]")); + assertThat(e.getMessage(), containsString("mapper [foo] cannot be changed from type [long] to [double]")); } - assertTrue(mapperService.documentMapper("type").mapping().root().getMapper("foo") instanceof LongFieldMapper); + assertThat(((FieldMapper) mapperService.documentMapper("type").mapping().root().getMapper("foo")).fieldType().typeName(), + equalTo("long")); } public void testConflictNewType() throws Exception { @@ -171,7 +172,8 @@ public void testConflictNewType() throws Exception { assertTrue(e.getMessage(), e.getMessage().contains("mapper [foo] cannot be changed from type [long] to [double]")); } - assertTrue(mapperService.documentMapper("type1").mapping().root().getMapper("foo") instanceof LongFieldMapper); + assertThat(((FieldMapper) mapperService.documentMapper("type1").mapping().root().getMapper("foo")).fieldType().typeName(), + equalTo("long")); assertNull(mapperService.documentMapper("type2")); } @@ -206,7 +208,8 @@ public void testConflictNewTypeUpdate() throws Exception { assertTrue(e.getMessage(), e.getMessage().contains("mapper [foo] cannot be changed from type [long] to [double]")); } - assertTrue(mapperService.documentMapper("type1").mapping().root().getMapper("foo") instanceof LongFieldMapper); + assertThat(((FieldMapper) mapperService.documentMapper("type1").mapping().root().getMapper("foo")).fieldType().typeName(), + equalTo("long")); assertNotNull(mapperService.documentMapper("type2")); assertNull(mapperService.documentMapper("type2").mapping().root().getMapper("foo")); } diff --git a/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java b/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java index be2f7697a68de..aa6aa51982f0d 100644 --- a/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java @@ -120,6 +120,7 @@ import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.Objects; import java.util.Set; import java.util.concurrent.ExecutionException; @@ -172,6 +173,12 @@ protected static Index getIndex() { return index; } + private static Version indexVersionCreated; + + protected static Version getIndexVersionCreated() { + return indexVersionCreated; + } + private static String[] currentTypes; protected static String[] getCurrentTypes() { @@ -193,14 +200,14 @@ protected static SearchModule getSearchModule() { @BeforeClass public static void init() throws IOException { // we have to prefer CURRENT since with the range of versions we support it's rather unlikely to get the current actually. - Version version = randomBoolean() ? Version.CURRENT : VersionUtils.randomVersionBetween(random(), Version.V_2_0_0_beta1, Version.CURRENT); + indexVersionCreated = randomBoolean() ? Version.CURRENT : VersionUtils.randomVersionBetween(random(), Version.V_2_0_0_beta1, Version.CURRENT); Settings settings = Settings.builder() .put("node.name", AbstractQueryTestCase.class.toString()) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING.getKey(), false) .build(); Settings indexSettings = Settings.builder() - .put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); + .put(IndexMetaData.SETTING_VERSION_CREATED, indexVersionCreated).build(); final ThreadPool threadPool = new ThreadPool(settings); index = new Index(randomAsciiOfLengthBetween(1, 10), "_na_"); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(index, indexSettings); diff --git a/core/src/test/java/org/elasticsearch/index/query/AbstractTermQueryTestCase.java b/core/src/test/java/org/elasticsearch/index/query/AbstractTermQueryTestCase.java index 161e7582bb836..0fe35530234f7 100644 --- a/core/src/test/java/org/elasticsearch/index/query/AbstractTermQueryTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/query/AbstractTermQueryTestCase.java @@ -25,50 +25,6 @@ import java.util.Map; public abstract class AbstractTermQueryTestCase> extends AbstractQueryTestCase { - @Override - protected final QB doCreateTestQueryBuilder() { - String fieldName = null; - Object value; - switch (randomIntBetween(0, 3)) { - case 0: - if (randomBoolean()) { - fieldName = BOOLEAN_FIELD_NAME; - } - value = randomBoolean(); - break; - case 1: - if (randomBoolean()) { - fieldName = STRING_FIELD_NAME; - } - if (frequently()) { - value = randomAsciiOfLengthBetween(1, 10); - } else { - // generate unicode string in 10% of cases - JsonStringEncoder encoder = JsonStringEncoder.getInstance(); - value = new String(encoder.quoteAsString(randomUnicodeOfLength(10))); - } - break; - case 2: - if (randomBoolean()) { - fieldName = INT_FIELD_NAME; - } - value = randomInt(10000); - break; - case 3: - if (randomBoolean()) { - fieldName = DOUBLE_FIELD_NAME; - } - value = randomDouble(); - break; - default: - throw new UnsupportedOperationException(); - } - - if (fieldName == null) { - fieldName = randomAsciiOfLengthBetween(1, 10); - } - return createQueryBuilder(fieldName, value); - } protected abstract QB createQueryBuilder(String fieldName, Object value); diff --git a/core/src/test/java/org/elasticsearch/index/query/FuzzyQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/FuzzyQueryBuilderTests.java index b14d5f5077658..8a4e27af4d939 100644 --- a/core/src/test/java/org/elasticsearch/index/query/FuzzyQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/FuzzyQueryBuilderTests.java @@ -19,17 +19,21 @@ package org.elasticsearch.index.query; +import org.apache.lucene.document.IntPoint; import org.apache.lucene.index.Term; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.FuzzyQuery; import org.apache.lucene.search.LegacyNumericRangeQuery; +import org.apache.lucene.search.PointRangeQuery; import org.apache.lucene.search.Query; +import org.elasticsearch.Version; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.unit.Fuzziness; import org.hamcrest.Matchers; import java.io.IOException; +import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -60,7 +64,7 @@ protected FuzzyQueryBuilder doCreateTestQueryBuilder() { @Override protected void doAssertLuceneQuery(FuzzyQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { if (isNumericFieldName(queryBuilder.fieldName()) || queryBuilder.fieldName().equals(DATE_FIELD_NAME)) { - assertThat(query, instanceOf(LegacyNumericRangeQuery.class)); + assertThat(query, either(instanceOf(LegacyNumericRangeQuery.class)).or(instanceOf(PointRangeQuery.class))); } else { assertThat(query, instanceOf(FuzzyQuery.class)); } @@ -139,10 +143,13 @@ public void testToQueryWithNumericField() throws IOException { " }\n" + "}\n"; Query parsedQuery = parseQuery(query).toQuery(createShardContext()); - assertThat(parsedQuery, instanceOf(LegacyNumericRangeQuery.class)); - LegacyNumericRangeQuery fuzzyQuery = (LegacyNumericRangeQuery) parsedQuery; - assertThat(fuzzyQuery.getMin().longValue(), equalTo(7L)); - assertThat(fuzzyQuery.getMax().longValue(), equalTo(17L)); + Query expected; + if (getIndexVersionCreated().onOrAfter(Version.V_5_0_0)) { + expected = IntPoint.newRangeQuery(INT_FIELD_NAME, 7, 17); + } else { + expected = LegacyNumericRangeQuery.newIntRange(INT_FIELD_NAME, 7, 17, true, true); + } + assertEquals(expected, parsedQuery); } public void testFromJson() throws IOException { diff --git a/core/src/test/java/org/elasticsearch/index/query/MatchPhrasePrefixQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/MatchPhrasePrefixQueryBuilderTests.java index ed2d8f2d1d0ef..a6b5b65097f33 100644 --- a/core/src/test/java/org/elasticsearch/index/query/MatchPhrasePrefixQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/MatchPhrasePrefixQueryBuilderTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.query; import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.PointRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery; @@ -70,7 +71,8 @@ protected void doAssertLuceneQuery(MatchPhrasePrefixQueryBuilder queryBuilder, Q throws IOException { assertThat(query, notNullValue()); assertThat(query, - either(instanceOf(BooleanQuery.class)).or(instanceOf(MultiPhrasePrefixQuery.class)).or(instanceOf(TermQuery.class))); + either(instanceOf(BooleanQuery.class)).or(instanceOf(MultiPhrasePrefixQuery.class)) + .or(instanceOf(TermQuery.class)).or(instanceOf(PointRangeQuery.class))); } public void testIllegalValues() { diff --git a/core/src/test/java/org/elasticsearch/index/query/MatchPhraseQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/MatchPhraseQueryBuilderTests.java index 0249b7c8f15a2..7ee35b15af25e 100644 --- a/core/src/test/java/org/elasticsearch/index/query/MatchPhraseQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/MatchPhraseQueryBuilderTests.java @@ -21,6 +21,7 @@ import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.PhraseQuery; +import org.apache.lucene.search.PointRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; @@ -66,7 +67,8 @@ protected MatchPhraseQueryBuilder doCreateTestQueryBuilder() { @Override protected void doAssertLuceneQuery(MatchPhraseQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { assertThat(query, notNullValue()); - assertThat(query, either(instanceOf(BooleanQuery.class)).or(instanceOf(PhraseQuery.class)).or(instanceOf(TermQuery.class))); + assertThat(query, either(instanceOf(BooleanQuery.class)).or(instanceOf(PhraseQuery.class)) + .or(instanceOf(TermQuery.class)).or(instanceOf(PointRangeQuery.class))); } public void testIllegalValues() { diff --git a/core/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java index c67dcc0650953..4639d40310793 100644 --- a/core/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java @@ -26,6 +26,7 @@ import org.apache.lucene.search.LegacyNumericRangeQuery; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.PhraseQuery; +import org.apache.lucene.search.PointRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.elasticsearch.common.ParseFieldMatcher; @@ -126,15 +127,18 @@ protected void doAssertLuceneQuery(MatchQueryBuilder queryBuilder, Query query, switch (queryBuilder.type()) { case BOOLEAN: assertThat(query, either(instanceOf(BooleanQuery.class)).or(instanceOf(ExtendedCommonTermsQuery.class)) - .or(instanceOf(TermQuery.class)).or(instanceOf(FuzzyQuery.class)).or(instanceOf(LegacyNumericRangeQuery.class))); + .or(instanceOf(TermQuery.class)).or(instanceOf(FuzzyQuery.class)) + .or(instanceOf(LegacyNumericRangeQuery.class)).or(instanceOf(PointRangeQuery.class))); break; case PHRASE: assertThat(query, either(instanceOf(BooleanQuery.class)).or(instanceOf(PhraseQuery.class)) - .or(instanceOf(TermQuery.class)).or(instanceOf(FuzzyQuery.class)).or(instanceOf(LegacyNumericRangeQuery.class))); + .or(instanceOf(TermQuery.class)).or(instanceOf(FuzzyQuery.class)) + .or(instanceOf(LegacyNumericRangeQuery.class)).or(instanceOf(PointRangeQuery.class))); break; case PHRASE_PREFIX: assertThat(query, either(instanceOf(BooleanQuery.class)).or(instanceOf(MultiPhrasePrefixQuery.class)) - .or(instanceOf(TermQuery.class)).or(instanceOf(FuzzyQuery.class)).or(instanceOf(LegacyNumericRangeQuery.class))); + .or(instanceOf(TermQuery.class)).or(instanceOf(FuzzyQuery.class)) + .or(instanceOf(LegacyNumericRangeQuery.class)).or(instanceOf(PointRangeQuery.class))); break; } @@ -214,6 +218,10 @@ protected void doAssertLuceneQuery(MatchQueryBuilder queryBuilder, Query query, assertEquals(value - width, numericRangeQuery.getMin().doubleValue(), width * .1); assertEquals(value + width, numericRangeQuery.getMax().doubleValue(), width * .1); } + + if (query instanceof PointRangeQuery) { + // TODO + } } public void testIllegalValues() { diff --git a/core/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java index 238a186394dd0..f82a2844f25fc 100644 --- a/core/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java @@ -29,6 +29,7 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.PhraseQuery; +import org.apache.lucene.search.PointRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.elasticsearch.common.lucene.all.AllTermQuery; @@ -134,7 +135,8 @@ protected void doAssertLuceneQuery(MultiMatchQueryBuilder queryBuilder, Query qu .or(instanceOf(FuzzyQuery.class)).or(instanceOf(MultiPhrasePrefixQuery.class)) .or(instanceOf(MatchAllDocsQuery.class)).or(instanceOf(ExtendedCommonTermsQuery.class)) .or(instanceOf(MatchNoDocsQuery.class)).or(instanceOf(PhraseQuery.class)) - .or(instanceOf(LegacyNumericRangeQuery.class))); + .or(instanceOf(LegacyNumericRangeQuery.class)) + .or(instanceOf(PointRangeQuery.class))); } public void testIllegaArguments() { diff --git a/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java index 4cb31bd2a2019..2738d14ef0207 100644 --- a/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.query; +import org.apache.lucene.document.IntPoint; import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; @@ -32,6 +33,7 @@ import org.apache.lucene.search.RegexpQuery; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.automaton.TooComplexToDeterminizeException; +import org.elasticsearch.Version; import org.elasticsearch.common.lucene.all.AllTermQuery; import org.hamcrest.Matchers; import org.joda.time.DateTimeZone; @@ -349,9 +351,13 @@ public void testToQueryRegExpQueryTooComplex() throws Exception { public void testToQueryNumericRangeQuery() throws Exception { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); Query query = queryStringQuery("12~0.2").defaultField(INT_FIELD_NAME).toQuery(createShardContext()); - LegacyNumericRangeQuery fuzzyQuery = (LegacyNumericRangeQuery) query; - assertThat(fuzzyQuery.getMin().longValue(), equalTo(12L)); - assertThat(fuzzyQuery.getMax().longValue(), equalTo(12L)); + if (getIndexVersionCreated().onOrAfter(Version.V_5_0_0)) { + assertEquals(IntPoint.newExactQuery(INT_FIELD_NAME, 12), query); + } else { + LegacyNumericRangeQuery fuzzyQuery = (LegacyNumericRangeQuery) query; + assertThat(fuzzyQuery.getMin().longValue(), equalTo(12L)); + assertThat(fuzzyQuery.getMax().longValue(), equalTo(12L)); + } } public void testTimezone() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/index/query/RandomQueryBuilder.java b/core/src/test/java/org/elasticsearch/index/query/RandomQueryBuilder.java index 33f2bb4532cd6..f99a202eb88d6 100644 --- a/core/src/test/java/org/elasticsearch/index/query/RandomQueryBuilder.java +++ b/core/src/test/java/org/elasticsearch/index/query/RandomQueryBuilder.java @@ -62,7 +62,7 @@ public static MultiTermQueryBuilder createMultiTermQuery(Random r) { // for now, only use String Rangequeries for MultiTerm test, numeric and date makes little sense // see issue #12123 for discussion MultiTermQueryBuilder multiTermQueryBuilder; - switch(RandomInts.randomIntBetween(r, 0, 5)) { + switch(RandomInts.randomIntBetween(r, 0, 3)) { case 0: RangeQueryBuilder stringRangeQuery = new RangeQueryBuilder(AbstractQueryTestCase.STRING_FIELD_NAME); stringRangeQuery.from("a" + RandomStrings.randomAsciiOfLengthBetween(r, 1, 10)); @@ -70,21 +70,12 @@ public static MultiTermQueryBuilder createMultiTermQuery(Random r) { multiTermQueryBuilder = stringRangeQuery; break; case 1: - RangeQueryBuilder numericRangeQuery = new RangeQueryBuilder(AbstractQueryTestCase.INT_FIELD_NAME); - numericRangeQuery.from(RandomInts.randomIntBetween(r, 1, 100)); - numericRangeQuery.to(RandomInts.randomIntBetween(r, 101, 200)); - multiTermQueryBuilder = numericRangeQuery; - break; - case 2: - multiTermQueryBuilder = new FuzzyQueryBuilder(AbstractQueryTestCase.INT_FIELD_NAME, RandomInts.randomInt(r, 1000)); - break; - case 3: multiTermQueryBuilder = new FuzzyQueryBuilder(AbstractQueryTestCase.STRING_FIELD_NAME, RandomStrings.randomAsciiOfLengthBetween(r, 1, 10)); break; - case 4: + case 2: multiTermQueryBuilder = new PrefixQueryBuilderTests().createTestQueryBuilder(); break; - case 5: + case 3: multiTermQueryBuilder = new WildcardQueryBuilderTests().createTestQueryBuilder(); break; default: diff --git a/core/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java index af0dd487536e0..2a317fd1d35c5 100644 --- a/core/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java @@ -19,7 +19,10 @@ package org.elasticsearch.index.query; +import org.apache.lucene.document.IntPoint; +import org.apache.lucene.document.LongPoint; import org.apache.lucene.search.LegacyNumericRangeQuery; +import org.apache.lucene.search.PointRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermRangeQuery; import org.elasticsearch.ElasticsearchParseException; @@ -37,6 +40,7 @@ import static org.elasticsearch.index.query.QueryBuilders.rangeQuery; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; @@ -122,13 +126,40 @@ protected void doAssertLuceneQuery(RangeQueryBuilder queryBuilder, Query query, } else if (queryBuilder.fieldName().equals(DATE_FIELD_NAME)) { //we can't properly test unmapped dates because LateParsingQuery is package private } else if (queryBuilder.fieldName().equals(INT_FIELD_NAME)) { - assertThat(query, instanceOf(LegacyNumericRangeQuery.class)); - LegacyNumericRangeQuery numericRangeQuery = (LegacyNumericRangeQuery) query; - assertThat(numericRangeQuery.getField(), equalTo(queryBuilder.fieldName())); - assertThat(numericRangeQuery.getMin(), equalTo(queryBuilder.from())); - assertThat(numericRangeQuery.getMax(), equalTo(queryBuilder.to())); - assertThat(numericRangeQuery.includesMin(), equalTo(queryBuilder.includeLower())); - assertThat(numericRangeQuery.includesMax(), equalTo(queryBuilder.includeUpper())); + assertThat(query, either(instanceOf(LegacyNumericRangeQuery.class)).or(instanceOf(PointRangeQuery.class))); + if (query instanceof LegacyNumericRangeQuery) { + LegacyNumericRangeQuery numericRangeQuery = (LegacyNumericRangeQuery) query; + assertThat(numericRangeQuery.getField(), equalTo(queryBuilder.fieldName())); + assertThat(numericRangeQuery.getMin(), equalTo(queryBuilder.from())); + assertThat(numericRangeQuery.getMax(), equalTo(queryBuilder.to())); + assertThat(numericRangeQuery.includesMin(), equalTo(queryBuilder.includeLower())); + assertThat(numericRangeQuery.includesMax(), equalTo(queryBuilder.includeUpper())); + } else { + Integer min = (Integer) queryBuilder.from(); + Integer max = (Integer) queryBuilder.to(); + int minInt, maxInt; + if (min == null) { + minInt = Integer.MIN_VALUE; + } else { + minInt = min.intValue(); + if (queryBuilder.includeLower() == false && minInt != Integer.MAX_VALUE) { + minInt++; + } + } + if (max == null) { + maxInt = Integer.MAX_VALUE; + } else { + maxInt = max.intValue(); + if (queryBuilder.includeUpper() == false && maxInt != Integer.MIN_VALUE) { + maxInt--; + } + } + try { + assertEquals(IntPoint.newRangeQuery(INT_FIELD_NAME, minInt, maxInt), query); + }catch(AssertionError e) { + throw e; + } + } } else { throw new UnsupportedOperationException(); } @@ -194,13 +225,17 @@ public void testToQueryNumericField() throws IOException { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); Query parsedQuery = rangeQuery(INT_FIELD_NAME).from(23).to(54).includeLower(true).includeUpper(false).toQuery(createShardContext()); // since age is automatically registered in data, we encode it as numeric - assertThat(parsedQuery, instanceOf(LegacyNumericRangeQuery.class)); - LegacyNumericRangeQuery rangeQuery = (LegacyNumericRangeQuery) parsedQuery; - assertThat(rangeQuery.getField(), equalTo(INT_FIELD_NAME)); - assertThat(rangeQuery.getMin().intValue(), equalTo(23)); - assertThat(rangeQuery.getMax().intValue(), equalTo(54)); - assertThat(rangeQuery.includesMin(), equalTo(true)); - assertThat(rangeQuery.includesMax(), equalTo(false)); + assertThat(parsedQuery, either(instanceOf(LegacyNumericRangeQuery.class)).or(instanceOf(PointRangeQuery.class))); + if (parsedQuery instanceof LegacyNumericRangeQuery) { + LegacyNumericRangeQuery rangeQuery = (LegacyNumericRangeQuery) parsedQuery; + assertThat(rangeQuery.getField(), equalTo(INT_FIELD_NAME)); + assertThat(rangeQuery.getMin().intValue(), equalTo(23)); + assertThat(rangeQuery.getMax().intValue(), equalTo(54)); + assertThat(rangeQuery.includesMin(), equalTo(true)); + assertThat(rangeQuery.includesMax(), equalTo(false)); + } else { + assertEquals(IntPoint.newRangeQuery(INT_FIELD_NAME, 23, 53), parsedQuery); + } } public void testDateRangeQueryFormat() throws IOException { @@ -216,15 +251,22 @@ public void testDateRangeQueryFormat() throws IOException { " }\n" + "}"; Query parsedQuery = parseQuery(query).toQuery(createShardContext()).rewrite(null); - assertThat(parsedQuery, instanceOf(LegacyNumericRangeQuery.class)); - - // Min value was 01/01/2012 (dd/MM/yyyy) - DateTime min = DateTime.parse("2012-01-01T00:00:00.000+00"); - assertThat(((LegacyNumericRangeQuery) parsedQuery).getMin().longValue(), is(min.getMillis())); - - // Max value was 2030 (yyyy) - DateTime max = DateTime.parse("2030-01-01T00:00:00.000+00"); - assertThat(((LegacyNumericRangeQuery) parsedQuery).getMax().longValue(), is(max.getMillis())); + assertThat(parsedQuery, either(instanceOf(LegacyNumericRangeQuery.class)).or(instanceOf(PointRangeQuery.class))); + + if (parsedQuery instanceof LegacyNumericRangeQuery) { + // Min value was 01/01/2012 (dd/MM/yyyy) + DateTime min = DateTime.parse("2012-01-01T00:00:00.000+00"); + assertThat(((LegacyNumericRangeQuery) parsedQuery).getMin().longValue(), is(min.getMillis())); + + // Max value was 2030 (yyyy) + DateTime max = DateTime.parse("2030-01-01T00:00:00.000+00"); + assertThat(((LegacyNumericRangeQuery) parsedQuery).getMax().longValue(), is(max.getMillis())); + } else { + assertEquals(LongPoint.newRangeQuery(DATE_FIELD_NAME, + DateTime.parse("2012-01-01T00:00:00.000+00").getMillis(), + DateTime.parse("2030-01-01T00:00:00.000+00").getMillis() - 1), + parsedQuery); + } // Test Invalid format query = "{\n" + @@ -255,16 +297,23 @@ public void testDateRangeBoundaries() throws IOException { " }\n" + "}\n"; Query parsedQuery = parseQuery(query).toQuery(createShardContext()).rewrite(null); - assertThat(parsedQuery, instanceOf(LegacyNumericRangeQuery.class)); - LegacyNumericRangeQuery rangeQuery = (LegacyNumericRangeQuery) parsedQuery; - - DateTime min = DateTime.parse("2014-11-01T00:00:00.000+00"); - assertThat(rangeQuery.getMin().longValue(), is(min.getMillis())); - assertTrue(rangeQuery.includesMin()); - - DateTime max = DateTime.parse("2014-12-08T23:59:59.999+00"); - assertThat(rangeQuery.getMax().longValue(), is(max.getMillis())); - assertTrue(rangeQuery.includesMax()); + assertThat(parsedQuery, either(instanceOf(LegacyNumericRangeQuery.class)).or(instanceOf(PointRangeQuery.class))); + if (parsedQuery instanceof LegacyNumericRangeQuery) { + LegacyNumericRangeQuery rangeQuery = (LegacyNumericRangeQuery) parsedQuery; + + DateTime min = DateTime.parse("2014-11-01T00:00:00.000+00"); + assertThat(rangeQuery.getMin().longValue(), is(min.getMillis())); + assertTrue(rangeQuery.includesMin()); + + DateTime max = DateTime.parse("2014-12-08T23:59:59.999+00"); + assertThat(rangeQuery.getMax().longValue(), is(max.getMillis())); + assertTrue(rangeQuery.includesMax()); + } else { + assertEquals(LongPoint.newRangeQuery(DATE_FIELD_NAME, + DateTime.parse("2014-11-01T00:00:00.000+00").getMillis(), + DateTime.parse("2014-12-08T23:59:59.999+00").getMillis()), + parsedQuery); + } query = "{\n" + " \"range\" : {\n" + @@ -275,16 +324,23 @@ public void testDateRangeBoundaries() throws IOException { " }\n" + "}"; parsedQuery = parseQuery(query).toQuery(createShardContext()).rewrite(null); - assertThat(parsedQuery, instanceOf(LegacyNumericRangeQuery.class)); - rangeQuery = (LegacyNumericRangeQuery) parsedQuery; - - min = DateTime.parse("2014-11-30T23:59:59.999+00"); - assertThat(rangeQuery.getMin().longValue(), is(min.getMillis())); - assertFalse(rangeQuery.includesMin()); - - max = DateTime.parse("2014-12-08T00:00:00.000+00"); - assertThat(rangeQuery.getMax().longValue(), is(max.getMillis())); - assertFalse(rangeQuery.includesMax()); + assertThat(parsedQuery, either(instanceOf(LegacyNumericRangeQuery.class)).or(instanceOf(PointRangeQuery.class))); + if (parsedQuery instanceof LegacyNumericRangeQuery) { + LegacyNumericRangeQuery rangeQuery = (LegacyNumericRangeQuery) parsedQuery; + + DateTime min = DateTime.parse("2014-11-30T23:59:59.999+00"); + assertThat(rangeQuery.getMin().longValue(), is(min.getMillis())); + assertFalse(rangeQuery.includesMin()); + + DateTime max = DateTime.parse("2014-12-08T00:00:00.000+00"); + assertThat(rangeQuery.getMax().longValue(), is(max.getMillis())); + assertFalse(rangeQuery.includesMax()); + } else { + assertEquals(LongPoint.newRangeQuery(DATE_FIELD_NAME, + DateTime.parse("2014-11-30T23:59:59.999+00").getMillis() + 1, + DateTime.parse("2014-12-08T00:00:00.000+00").getMillis() - 1), + parsedQuery); + } } public void testDateRangeQueryTimezone() throws IOException { @@ -300,17 +356,21 @@ public void testDateRangeQueryTimezone() throws IOException { " }\n" + "}"; Query parsedQuery = parseQuery(query).toQuery(createShardContext()).rewrite(null); - assertThat(parsedQuery, instanceOf(LegacyNumericRangeQuery.class)); - - // Min value was 2012-01-01 (UTC) so we need to remove one hour - DateTime min = DateTime.parse("2012-01-01T00:00:00.000+01:00"); - // Max value is when we started the test. So it should be some ms from now - DateTime max = new DateTime(startDate, DateTimeZone.UTC); - - assertThat(((LegacyNumericRangeQuery) parsedQuery).getMin().longValue(), is(min.getMillis())); - - // We should not have a big difference here (should be some ms) - assertThat(((LegacyNumericRangeQuery) parsedQuery).getMax().longValue() - max.getMillis(), lessThanOrEqualTo(60000L)); + if (parsedQuery instanceof PointRangeQuery) { + // TODO what can we assert + } else { + assertThat(parsedQuery, instanceOf(LegacyNumericRangeQuery.class)); + + // Min value was 2012-01-01 (UTC) so we need to remove one hour + DateTime min = DateTime.parse("2012-01-01T00:00:00.000+01:00"); + // Max value is when we started the test. So it should be some ms from now + DateTime max = new DateTime(startDate, DateTimeZone.UTC); + + assertThat(((LegacyNumericRangeQuery) parsedQuery).getMin().longValue(), is(min.getMillis())); + + // We should not have a big difference here (should be some ms) + assertThat(((LegacyNumericRangeQuery) parsedQuery).getMax().longValue() - max.getMillis(), lessThanOrEqualTo(60000L)); + } query = "{\n" + " \"range\" : {\n" + diff --git a/core/src/test/java/org/elasticsearch/index/query/SpanTermQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/SpanTermQueryBuilderTests.java index 03551f8c6aefa..546c8536ad98e 100644 --- a/core/src/test/java/org/elasticsearch/index/query/SpanTermQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/SpanTermQueryBuilderTests.java @@ -26,6 +26,8 @@ import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.index.mapper.MappedFieldType; +import com.fasterxml.jackson.core.io.JsonStringEncoder; + import java.io.IOException; import static org.hamcrest.CoreMatchers.equalTo; @@ -33,6 +35,28 @@ public class SpanTermQueryBuilderTests extends AbstractTermQueryTestCase { + @Override + protected SpanTermQueryBuilder doCreateTestQueryBuilder() { + String fieldName = null; + Object value; + + if (randomBoolean()) { + fieldName = STRING_FIELD_NAME; + } + if (frequently()) { + value = randomAsciiOfLengthBetween(1, 10); + } else { + // generate unicode string in 10% of cases + JsonStringEncoder encoder = JsonStringEncoder.getInstance(); + value = new String(encoder.quoteAsString(randomUnicodeOfLength(10))); + } + + if (fieldName == null) { + fieldName = randomAsciiOfLengthBetween(1, 10); + } + return createQueryBuilder(fieldName, value); + } + @Override protected SpanTermQueryBuilder createQueryBuilder(String fieldName, Object value) { return new SpanTermQueryBuilder(fieldName, value); diff --git a/core/src/test/java/org/elasticsearch/index/query/TermQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/TermQueryBuilderTests.java index ebf2fca6f5f3a..bc024686d651d 100644 --- a/core/src/test/java/org/elasticsearch/index/query/TermQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/TermQueryBuilderTests.java @@ -20,19 +20,69 @@ package org.elasticsearch.index.query; import org.apache.lucene.index.Term; +import org.apache.lucene.search.PointRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.index.mapper.MappedFieldType; +import com.fasterxml.jackson.core.io.JsonStringEncoder; + import java.io.IOException; +import static org.hamcrest.Matchers.either; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.Matchers.is; public class TermQueryBuilderTests extends AbstractTermQueryTestCase { + + @Override + protected TermQueryBuilder doCreateTestQueryBuilder() { + String fieldName = null; + Object value; + switch (randomIntBetween(0, 3)) { + case 0: + if (randomBoolean()) { + fieldName = BOOLEAN_FIELD_NAME; + } + value = randomBoolean(); + break; + case 1: + if (randomBoolean()) { + fieldName = STRING_FIELD_NAME; + } + if (frequently()) { + value = randomAsciiOfLengthBetween(1, 10); + } else { + // generate unicode string in 10% of cases + JsonStringEncoder encoder = JsonStringEncoder.getInstance(); + value = new String(encoder.quoteAsString(randomUnicodeOfLength(10))); + } + break; + case 2: + if (randomBoolean()) { + fieldName = INT_FIELD_NAME; + } + value = randomInt(10000); + break; + case 3: + if (randomBoolean()) { + fieldName = DOUBLE_FIELD_NAME; + } + value = randomDouble(); + break; + default: + throw new UnsupportedOperationException(); + } + + if (fieldName == null) { + fieldName = randomAsciiOfLengthBetween(1, 10); + } + return createQueryBuilder(fieldName, value); + } + /** * @return a TermQuery with random field name and value, optional random boost and queryname */ @@ -43,15 +93,19 @@ protected TermQueryBuilder createQueryBuilder(String fieldName, Object value) { @Override protected void doAssertLuceneQuery(TermQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { - assertThat(query, instanceOf(TermQuery.class)); - TermQuery termQuery = (TermQuery) query; - assertThat(termQuery.getTerm().field(), equalTo(queryBuilder.fieldName())); + assertThat(query, either(instanceOf(TermQuery.class)).or(instanceOf(PointRangeQuery.class))); MappedFieldType mapper = context.fieldMapper(queryBuilder.fieldName()); - if (mapper != null) { - Term term = ((TermQuery) mapper.termQuery(queryBuilder.value(), null)).getTerm(); - assertThat(termQuery.getTerm(), equalTo(term)); + if (query instanceof TermQuery) { + TermQuery termQuery = (TermQuery) query; + assertThat(termQuery.getTerm().field(), equalTo(queryBuilder.fieldName())); + if (mapper != null) { + Term term = ((TermQuery) mapper.termQuery(queryBuilder.value(), null)).getTerm(); + assertThat(termQuery.getTerm(), equalTo(term)); + } else { + assertThat(termQuery.getTerm().bytes(), equalTo(BytesRefs.toBytesRef(queryBuilder.value()))); + } } else { - assertThat(termQuery.getTerm().bytes(), equalTo(BytesRefs.toBytesRef(queryBuilder.value()))); + assertEquals(query, mapper.termQuery(queryBuilder.value(), null)); } } diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java index 6fa9d86ac5877..10bdbeb887b9e 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java @@ -52,8 +52,6 @@ import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.search.suggest.SuggestBuilder; -import org.elasticsearch.search.suggest.SuggestBuilders; import org.elasticsearch.test.ESIntegTestCase; import java.util.Collection; diff --git a/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java b/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java index 799b84dd6d229..7b1a93c48d62e 100644 --- a/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java +++ b/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java @@ -170,7 +170,7 @@ public void testSimple1() throws Exception { } public void testSimple2() throws Exception { - assertAcked(prepareCreate("test").addMapping("type1", "field1", "type=long,doc_values=true", "field2", "type=text")); + assertAcked(prepareCreate("test").addMapping("type1", "field1", "type=long", "field2", "type=text")); ensureGreen(); // introduce the doc diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java index c2ac2078c06af..76b5558df80f2 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java @@ -79,7 +79,7 @@ private DateTime date(int month, int day) { } private DateTime date(String date) { - return DateFieldMapper.Defaults.DATE_TIME_FORMATTER.parser().parseDateTime(date); + return DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parser().parseDateTime(date); } private static String format(DateTime date, String pattern) { @@ -144,7 +144,7 @@ private static String getBucketKeyAsString(DateTime key) { } private static String getBucketKeyAsString(DateTime key, DateTimeZone tz) { - return Joda.forPattern(DateFieldMapper.Defaults.DATE_TIME_FORMATTER.format()).printer().withZone(tz).print(key); + return Joda.forPattern(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.format()).printer().withZone(tz).print(key); } public void testSingleValuedField() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java index 2200e0e30ca96..d7c2bf1c4609c 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java @@ -18,10 +18,8 @@ */ package org.elasticsearch.search.aggregations.bucket; -import org.elasticsearch.Version; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.mapper.core.DateFieldMapper; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; @@ -57,7 +55,7 @@ public class DateHistogramOffsetIT extends ESIntegTestCase { private static final String DATE_FORMAT = "yyyy-MM-dd:hh-mm-ss"; private DateTime date(String date) { - return DateFieldMapper.Defaults.DATE_TIME_FORMATTER.parser().parseDateTime(date); + return DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parser().parseDateTime(date); } @Override diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/IPv4RangeTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/IPv4RangeTests.java index 507e877c0079a..c72e220cf5e82 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/IPv4RangeTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/IPv4RangeTests.java @@ -20,7 +20,7 @@ package org.elasticsearch.search.aggregations.bucket; import org.elasticsearch.common.network.Cidrs; -import org.elasticsearch.index.mapper.ip.IpFieldMapper; +import org.elasticsearch.index.mapper.ip.LegacyIpFieldMapper; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; import org.elasticsearch.search.aggregations.bucket.range.ipv4.IPv4RangeAggregatorBuilder; import org.elasticsearch.search.aggregations.bucket.range.ipv4.IPv4RangeAggregatorBuilder.Range; @@ -44,8 +44,8 @@ protected IPv4RangeAggregatorBuilder createTestAggregatorBuilder() { if (randomBoolean()) { factory.addRange(new Range(key, from, to)); } else { - String fromAsStr = Double.isInfinite(from) ? null : IpFieldMapper.longToIp((long) from); - String toAsStr = Double.isInfinite(to) ? null : IpFieldMapper.longToIp((long) to); + String fromAsStr = Double.isInfinite(from) ? null : LegacyIpFieldMapper.longToIp((long) from); + String toAsStr = Double.isInfinite(to) ? null : LegacyIpFieldMapper.longToIp((long) to); factory.addRange(new Range(key, fromAsStr, toAsStr)); } } else { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/IpTermsIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/IpTermsIT.java new file mode 100644 index 0000000000000..4a03303c66e12 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/IpTermsIT.java @@ -0,0 +1,54 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.search.aggregations.bucket; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; + +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.search.aggregations.AggregationBuilders; +import org.elasticsearch.search.aggregations.bucket.terms.Terms; + +public class IpTermsIT extends AbstractTermsTestCase { + + public void testBasics() throws Exception { + assertAcked(prepareCreate("index").addMapping("type", "ip", "type=ip")); + indexRandom(true, + client().prepareIndex("index", "type", "1").setSource("ip", "192.168.1.7"), + client().prepareIndex("index", "type", "2").setSource("ip", "192.168.1.7"), + client().prepareIndex("index", "type", "3").setSource("ip", "2001:db8::2:1")); + + SearchResponse response = client().prepareSearch("index").addAggregation( + AggregationBuilders.terms("my_terms").field("ip").executionHint(randomExecutionHint())).get(); + assertSearchResponse(response); + Terms terms = response.getAggregations().get("my_terms"); + assertEquals(2, terms.getBuckets().size()); + + Terms.Bucket bucket1 = terms.getBuckets().get(0); + assertEquals(2, bucket1.getDocCount()); + assertEquals("192.168.1.7", bucket1.getKey()); + assertEquals("192.168.1.7", bucket1.getKeyAsString()); + + Terms.Bucket bucket2 = terms.getBuckets().get(1); + assertEquals(1, bucket2.getDocCount()); + assertEquals("2001:db8::2:1", bucket2.getKey()); + assertEquals("2001:db8::2:1", bucket2.getKeyAsString()); + } + +} diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardReduceIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardReduceIT.java index 7567e3c144166..2a180d0cfc37f 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardReduceIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardReduceIT.java @@ -235,6 +235,7 @@ public void testDateRange() throws Exception { assertThat(histo.getBuckets().size(), equalTo(4)); } + @AwaitsFix(bugUrl="https://github.com/elastic/elasticsearch/issues/17700") public void testIpRange() throws Exception { SearchResponse response = client().prepareSearch("idx") .setQuery(QueryBuilders.matchAllQuery()) diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsIT.java index 676761d682535..f3ca8aa0b7b65 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsIT.java @@ -69,15 +69,15 @@ public Settings indexSettings() { .build(); } - public static final int MUSIC_CATEGORY=1; - public static final int OTHER_CATEGORY=2; - public static final int SNOWBOARDING_CATEGORY=3; + public static final String MUSIC_CATEGORY="1"; + public static final String OTHER_CATEGORY="2"; + public static final String SNOWBOARDING_CATEGORY="3"; @Override public void setupSuiteScopeCluster() throws Exception { assertAcked(prepareCreate("test").setSettings(SETTING_NUMBER_OF_SHARDS, 5, SETTING_NUMBER_OF_REPLICAS, 0).addMapping("fact", "_routing", "required=true", "routing_id", "type=keyword", "fact_category", - "type=integer,index=true", "description", "type=text,fielddata=true")); + "type=keyword,index=true", "description", "type=text,fielddata=true")); createIndex("idx_unmapped"); ensureGreen(); @@ -123,12 +123,12 @@ public void testStructuredAnalysis() throws Exception { .actionGet(); assertSearchResponse(response); SignificantTerms topTerms = response.getAggregations().get("mySignificantTerms"); - Number topCategory = (Number) topTerms.getBuckets().iterator().next().getKey(); - assertTrue(topCategory.equals(new Long(SNOWBOARDING_CATEGORY))); + String topCategory = (String) topTerms.getBuckets().iterator().next().getKey(); + assertTrue(topCategory.equals(SNOWBOARDING_CATEGORY)); } public void testStructuredAnalysisWithIncludeExclude() throws Exception { - long[] excludeTerms = { MUSIC_CATEGORY }; + String[] excludeTerms = { MUSIC_CATEGORY }; SearchResponse response = client().prepareSearch("test") .setSearchType(SearchType.QUERY_AND_FETCH) .setQuery(new TermQueryBuilder("_all", "paul")) @@ -139,8 +139,8 @@ public void testStructuredAnalysisWithIncludeExclude() throws Exception { .actionGet(); assertSearchResponse(response); SignificantTerms topTerms = response.getAggregations().get("mySignificantTerms"); - Number topCategory = (Number) topTerms.getBuckets().iterator().next().getKey(); - assertTrue(topCategory.equals(new Long(OTHER_CATEGORY))); + String topCategory = topTerms.getBuckets().iterator().next().getKeyAsString(); + assertTrue(topCategory.equals(OTHER_CATEGORY)); } public void testIncludeExclude() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java index d245cd7bcc652..ff68c66957439 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java @@ -94,7 +94,7 @@ public String randomExecutionHint() { } public void testPlugin() throws Exception { - String type = randomBoolean() ? "text" : "long"; + String type = randomBoolean() ? "text" : "keyword"; String settings = "{\"index.number_of_shards\": 1, \"index.number_of_replicas\": 0}"; SharedSignificantTermsTestMethods.index01Docs(type, settings, this); SearchResponse response = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE) @@ -257,7 +257,7 @@ public String[] getNames() { } public void testXContentResponse() throws Exception { - String type = randomBoolean() ? "text" : "long"; + String type = randomBoolean() ? "text" : "keyword"; String settings = "{\"index.number_of_shards\": 1, \"index.number_of_replicas\": 0}"; SharedSignificantTermsTestMethods.index01Docs(type, settings, this); SearchResponse response = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE) @@ -279,12 +279,7 @@ public void testXContentResponse() throws Exception { XContentBuilder responseBuilder = XContentFactory.jsonBuilder(); classes.toXContent(responseBuilder, null); - String result = null; - if (type.equals("long")) { - result = "\"class\"{\"doc_count_error_upper_bound\":0,\"sum_other_doc_count\":0,\"buckets\":[{\"key\":\"0\",\"doc_count\":4,\"sig_terms\":{\"doc_count\":4,\"buckets\":[{\"key\":0,\"doc_count\":4,\"score\":0.39999999999999997,\"bg_count\":5}]}},{\"key\":\"1\",\"doc_count\":3,\"sig_terms\":{\"doc_count\":3,\"buckets\":[{\"key\":1,\"doc_count\":3,\"score\":0.75,\"bg_count\":4}]}}]}"; - } else { - result = "\"class\"{\"doc_count_error_upper_bound\":0,\"sum_other_doc_count\":0,\"buckets\":[{\"key\":\"0\",\"doc_count\":4,\"sig_terms\":{\"doc_count\":4,\"buckets\":[{\"key\":\"0\",\"doc_count\":4,\"score\":0.39999999999999997,\"bg_count\":5}]}},{\"key\":\"1\",\"doc_count\":3,\"sig_terms\":{\"doc_count\":3,\"buckets\":[{\"key\":\"1\",\"doc_count\":3,\"score\":0.75,\"bg_count\":4}]}}]}"; - } + String result = "\"class\"{\"doc_count_error_upper_bound\":0,\"sum_other_doc_count\":0,\"buckets\":[{\"key\":\"0\",\"doc_count\":4,\"sig_terms\":{\"doc_count\":4,\"buckets\":[{\"key\":\"0\",\"doc_count\":4,\"score\":0.39999999999999997,\"bg_count\":5}]}},{\"key\":\"1\",\"doc_count\":3,\"sig_terms\":{\"doc_count\":3,\"buckets\":[{\"key\":\"1\",\"doc_count\":3,\"score\":0.75,\"bg_count\":4}]}}]}"; assertThat(responseBuilder.string(), equalTo(result)); } @@ -333,7 +328,7 @@ public void testDeletesIssue7951() throws Exception { } public void testBackgroundVsSeparateSet() throws Exception { - String type = randomBoolean() ? "text" : "long"; + String type = randomBoolean() ? "text" : "keyword"; String settings = "{\"index.number_of_shards\": 1, \"index.number_of_replicas\": 0}"; SharedSignificantTermsTestMethods.index01Docs(type, settings, this); testBackgroundVsSeparateSet(new MutualInformation(true, true), new MutualInformation(true, false)); @@ -460,7 +455,7 @@ private void indexEqualTestData() throws ExecutionException, InterruptedExceptio } public void testScriptScore() throws ExecutionException, InterruptedException, IOException { - indexRandomFrequencies01(randomBoolean() ? "text" : "long"); + indexRandomFrequencies01(randomBoolean() ? "text" : "keyword"); ScriptHeuristic scriptHeuristic = getScriptSignificanceHeuristic(); ensureYellow(); SearchResponse response = client().prepareSearch(INDEX_NAME) diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/TermsShardMinDocCountIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/TermsShardMinDocCountIT.java index dad55a20828bc..408a9137eb80d 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/TermsShardMinDocCountIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/TermsShardMinDocCountIT.java @@ -53,7 +53,7 @@ public String randomExecutionHint() { public void testShardMinDocCountSignificantTermsTest() throws Exception { String textMappings; if (randomBoolean()) { - textMappings = "type=long"; + textMappings = "type=keyword"; } else { textMappings = "type=text,fielddata=true"; } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/DateDerivativeIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/DateDerivativeIT.java index 73ec2c9c0bdc0..d1495cc7321cd 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/DateDerivativeIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/DateDerivativeIT.java @@ -59,7 +59,7 @@ private DateTime date(int month, int day) { } private DateTime date(String date) { - return DateFieldMapper.Defaults.DATE_TIME_FORMATTER.parser().parseDateTime(date); + return DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parser().parseDateTime(date); } private static String format(DateTime date, String pattern) { diff --git a/core/src/test/java/org/elasticsearch/search/simple/SimpleSearchIT.java b/core/src/test/java/org/elasticsearch/search/simple/SimpleSearchIT.java index 136c1fba2e06b..ab1e463590dfe 100644 --- a/core/src/test/java/org/elasticsearch/search/simple/SimpleSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/simple/SimpleSearchIT.java @@ -102,7 +102,7 @@ public void testSimpleIp() throws Exception { client().prepareIndex("test", "type1", "1").setSource("from", "192.168.0.5", "to", "192.168.0.10").setRefresh(true).execute().actionGet(); SearchResponse search = client().prepareSearch() - .setQuery(boolQuery().must(rangeQuery("from").lt("192.168.0.7")).must(rangeQuery("to").gt("192.168.0.7"))) + .setQuery(boolQuery().must(rangeQuery("from").lte("192.168.0.7")).must(rangeQuery("to").gte("192.168.0.7"))) .execute().actionGet(); assertHitCount(search, 1L); @@ -122,6 +122,7 @@ public void testIpCidr() throws Exception { client().prepareIndex("test", "type1", "2").setSource("ip", "192.168.0.2").execute().actionGet(); client().prepareIndex("test", "type1", "3").setSource("ip", "192.168.0.3").execute().actionGet(); client().prepareIndex("test", "type1", "4").setSource("ip", "192.168.1.4").execute().actionGet(); + client().prepareIndex("test", "type1", "5").setSource("ip", "2001:db8::ff00:42:8329").execute().actionGet(); refresh(); SearchResponse search = client().prepareSearch() @@ -154,6 +155,21 @@ public void testIpCidr() throws Exception { .execute().actionGet(); assertHitCount(search, 4L); + search = client().prepareSearch() + .setQuery(boolQuery().must(QueryBuilders.termQuery("ip", "2001:db8::ff00:42:8329/128"))) + .execute().actionGet(); + assertHitCount(search, 1L); + + search = client().prepareSearch() + .setQuery(boolQuery().must(QueryBuilders.termQuery("ip", "2001:db8::/64"))) + .execute().actionGet(); + assertHitCount(search, 1L); + + search = client().prepareSearch() + .setQuery(boolQuery().must(QueryBuilders.termQuery("ip", "::/0"))) + .execute().actionGet(); + assertHitCount(search, 5L); + search = client().prepareSearch() .setQuery(boolQuery().must(QueryBuilders.termQuery("ip", "192.168.1.5/32"))) .execute().actionGet(); @@ -161,7 +177,7 @@ public void testIpCidr() throws Exception { assertFailures(client().prepareSearch().setQuery(boolQuery().must(QueryBuilders.termQuery("ip", "0/0/0/0/0"))), RestStatus.BAD_REQUEST, - containsString("invalid IPv4/CIDR; expected [a.b.c.d, e] but was [[0, 0, 0, 0, 0]]")); + containsString("Expected [ip/prefix] but was [0/0/0/0/0]")); } public void testSimpleId() { @@ -351,7 +367,7 @@ public void testQueryNumericFieldWithRegex() throws Exception { client().prepareSearch("idx").setQuery(QueryBuilders.regexpQuery("num", "34")).get(); fail("SearchPhaseExecutionException should have been thrown"); } catch (SearchPhaseExecutionException ex) { - assertThat(ex.getCause().getCause().getMessage(), equalTo("Cannot use regular expression to filter numeric field [num]")); + assertThat(ex.getCause().getCause().getMessage(), containsString("Can only use regular expression on keyword and text fields")); } } diff --git a/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java b/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java index c1e22df933482..d25abd12f354a 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java @@ -40,7 +40,7 @@ import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper.BuilderContext; -import org.elasticsearch.index.mapper.core.DoubleFieldMapper.DoubleFieldType; +import org.elasticsearch.index.mapper.core.LegacyDoubleFieldMapper.DoubleFieldType; import org.elasticsearch.index.mapper.object.ObjectMapper; import org.elasticsearch.index.mapper.object.ObjectMapper.Nested; import org.elasticsearch.index.query.QueryParseContext; diff --git a/core/src/test/java/org/elasticsearch/test/search/aggregations/bucket/SharedSignificantTermsTestMethods.java b/core/src/test/java/org/elasticsearch/test/search/aggregations/bucket/SharedSignificantTermsTestMethods.java index e37de0e674650..7fc1490de73f6 100644 --- a/core/src/test/java/org/elasticsearch/test/search/aggregations/bucket/SharedSignificantTermsTestMethods.java +++ b/core/src/test/java/org/elasticsearch/test/search/aggregations/bucket/SharedSignificantTermsTestMethods.java @@ -48,7 +48,7 @@ public class SharedSignificantTermsTestMethods { public static final String CLASS_FIELD = "class"; public static void aggregateAndCheckFromSeveralShards(ESIntegTestCase testCase) throws ExecutionException, InterruptedException { - String type = ESTestCase.randomBoolean() ? "text" : "long"; + String type = ESTestCase.randomBoolean() ? "text" : "keyword"; String settings = "{\"index.number_of_shards\": 5, \"index.number_of_replicas\": 0}"; index01Docs(type, settings, testCase); testCase.ensureGreen(); diff --git a/docs/reference/mapping/types/date.asciidoc b/docs/reference/mapping/types/date.asciidoc index 46fa599ef28cd..85e86e6a6a145 100644 --- a/docs/reference/mapping/types/date.asciidoc +++ b/docs/reference/mapping/types/date.asciidoc @@ -129,15 +129,9 @@ The following parameters are accepted by `date` fields: which is substituted for any explicit `null` values. Defaults to `null`, which means the field is treated as missing. -<>:: - - Controls the number of extra terms that are indexed to make - <> faster. Defaults to `16`. - <>:: Whether the field value should be stored and retrievable separately from the <> field. Accepts `true` or `false` (default). - diff --git a/docs/reference/mapping/types/ip.asciidoc b/docs/reference/mapping/types/ip.asciidoc index 007a64e3f71be..9786ebf4fd876 100644 --- a/docs/reference/mapping/types/ip.asciidoc +++ b/docs/reference/mapping/types/ip.asciidoc @@ -1,9 +1,8 @@ [[ip]] -=== IPv4 datatype +=== IP datatype -An `ip` field is really a <> field which accepts -https://en.wikipedia.org/wiki/IPv4[IPv4] addresses and indexes them as long -values: +An `ip` field can index/store either https://en.wikipedia.org/wiki/IPv4[IPv4] or +https://en.wikipedia.org/wiki/IPv6[IPv6] addresses. [source,js] -------------------------------------------------- @@ -28,11 +27,8 @@ PUT my_index/my_type/1 GET my_index/_search { "query": { - "range": { - "ip_addr": { - "gte": "192.168.1.0", - "lt": "192.168.2.0" - } + "term": { + "ip_addr": "192.168.0.0/16" } } } @@ -75,16 +71,40 @@ The following parameters are accepted by `ip` fields: Accepts an IPv4 value which is substituted for any explicit `null` values. Defaults to `null`, which means the field is treated as missing. -<>:: - - Controls the number of extra terms that are indexed to make - <> faster. Defaults to `16`. - <>:: Whether the field value should be stored and retrievable separately from the <> field. Accepts `true` or `false` (default). +==== Querying `ip` fields + +The most common way to query ip addresses is to use the +https://en.wikipedia.org/wiki/Classless_Inter-Domain_Routing#CIDR_notation[CIDR] +notation: `[ip_address]/[prefix_length]`. For instance: + +[source,js] +-------------------------------------------------- +GET my_index/_search +{ + "query": { + "term": { + "ip_addr": "192.168.0.0/16" + } + } +} +-------------------------------------------------- + +or -NOTE: IPv6 addresses are not supported yet. +[source,js] +-------------------------------------------------- +GET my_index/_search +{ + "query": { + "term": { + "ip_addr": "2001:db8::/48" + } + } +} +-------------------------------------------------- diff --git a/docs/reference/mapping/types/numeric.asciidoc b/docs/reference/mapping/types/numeric.asciidoc index ffe3bf057f4be..25d14386b8cad 100644 --- a/docs/reference/mapping/types/numeric.asciidoc +++ b/docs/reference/mapping/types/numeric.asciidoc @@ -79,12 +79,6 @@ The following parameters are accepted by numeric types: substituted for any explicit `null` values. Defaults to `null`, which means the field is treated as missing. -<>:: - - Controls the number of extra terms that are indexed to make - <> faster. The default depends on the - numeric `type`. - <>:: Whether the field value should be stored and retrievable separately from diff --git a/docs/reference/migration/migrate_5_0.asciidoc b/docs/reference/migration/migrate_5_0.asciidoc index 29d41851f3589..9d2370a983c53 100644 --- a/docs/reference/migration/migrate_5_0.asciidoc +++ b/docs/reference/migration/migrate_5_0.asciidoc @@ -41,6 +41,8 @@ way to do this is to upgrade to Elasticsearch 2.3 or later and to use the * <> * <> * <> +* <> + include::migrate_5_0/search.asciidoc[] @@ -65,3 +67,5 @@ include::migrate_5_0/packaging.asciidoc[] include::migrate_5_0/plugins.asciidoc[] include::migrate_5_0/fs.asciidoc[] + +include::migrate_5_0/aggregations.asciidoc[] diff --git a/docs/reference/migration/migrate_5_0/aggregations.asciidoc b/docs/reference/migration/migrate_5_0/aggregations.asciidoc new file mode 100644 index 0000000000000..32a8bbd35d2f1 --- /dev/null +++ b/docs/reference/migration/migrate_5_0/aggregations.asciidoc @@ -0,0 +1,13 @@ +[[breaking_50_aggregations_changes]] +=== Aggregation changes + +==== Significant terms on numeric fields + +Numeric fields have been refactored to use a different data structure that +performs better for range queries. However, since this data structure does +not record document frequencies, numeric fields can no longer be used for +significant terms aggregations. It is recommended to use <> +fields instead, either directly or through a <> +if the numeric representation is still needed for sorting, range queries or +numeric aggregations like +<>. diff --git a/docs/reference/migration/migrate_5_0/mapping.asciidoc b/docs/reference/migration/migrate_5_0/mapping.asciidoc index 23298cd733c7b..98a33d80aa945 100644 --- a/docs/reference/migration/migrate_5_0/mapping.asciidoc +++ b/docs/reference/migration/migrate_5_0/mapping.asciidoc @@ -36,6 +36,46 @@ String mappings now have the following default mappings: This allows to perform full-text search on the original field name and to sort and run aggregations on the sub keyword field. +==== Numeric fields + +Numeric fields are now indexed with a completely different data-structure, called +BKD tree, that is expected to require less disk space and be faster for range +queries than the previous way that numerics were indexed. + +Term queries will return constant scores now, while they used to return higher +scores for rare terms due to the contribution of the document frequency, which +this new BKD structure does not record. If scoring is needed, then it is advised +to map the numeric fields as <> too. + +Note that this <> mapping do not need to replace the numeric +mapping. For instance if you need both sorting and scoring on your numeric field, +you could map it both as a number and a `keyword` using <>: + +[source,js] +-------------------------------------------------- +PUT /my_index +{ + "mappings": { + "my_type": { + "properties": { + "my_number": { + "type": "long", + "fields": { + "keyword": { + "type": "keyword" + } + } + } + } + } + } +} +-------------------------------------------------- +// AUTOSENSE + +Also the `precision_step` parameter is now irrelevant and will be rejected on +indices that are created on or after 5.0. + ==== `index` property On all field datatypes (except for the deprecated `string` field), the `index` diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngineService.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngineService.java index 72adbd7d448ed..d78f80bfe497c 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngineService.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngineService.java @@ -36,6 +36,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.core.DateFieldMapper; +import org.elasticsearch.index.mapper.core.LegacyDateFieldMapper; import org.elasticsearch.script.ClassPermission; import org.elasticsearch.script.CompiledScript; import org.elasticsearch.script.ExecutableScript; @@ -246,7 +247,8 @@ protected ValueSource getMethodValueSource(MappedFieldType fieldType, IndexField } protected ValueSource getDateMethodValueSource(MappedFieldType fieldType, IndexFieldData fieldData, String fieldName, String methodName, int calendarType) { - if (!(fieldType instanceof DateFieldMapper.DateFieldType)) { + if (fieldType instanceof LegacyDateFieldMapper.DateFieldType == false + && fieldType instanceof DateFieldMapper.DateFieldType == false) { throw new IllegalArgumentException("Member method [" + methodName + "] can only be used with a date field type, not the field [" + fieldName + "]."); } diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IPv4RangeTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IPv4RangeTests.java index 1fb16f6c56df1..23ecfad39b882 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IPv4RangeTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IPv4RangeTests.java @@ -18,9 +18,10 @@ */ package org.elasticsearch.messy.tests; +import org.apache.lucene.util.LuceneTestCase.AwaitsFix; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.index.mapper.ip.IpFieldMapper; +import org.elasticsearch.index.mapper.ip.LegacyIpFieldMapper; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.Script; import org.elasticsearch.script.groovy.GroovyPlugin; @@ -51,6 +52,7 @@ /** * */ +@AwaitsFix(bugUrl="https://github.com/elastic/elasticsearch/issues/17700") @ESIntegTestCase.SuiteScopeTestCase public class IPv4RangeTests extends ESIntegTestCase { @@ -143,23 +145,23 @@ public void testSingleValueField() throws Exception { assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY)); assertThat(bucket.getFromAsString(), nullValue()); assertThat(bucket.getToAsString(), equalTo("10.0.0.100")); - assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100"))); + assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.100"))); assertThat(bucket.getDocCount(), equalTo(100L)); bucket = buckets.get(1); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("10.0.0.100-10.0.0.200")); assertThat(bucket.getFromAsString(), equalTo("10.0.0.100")); - assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100"))); + assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.100"))); assertThat(bucket.getToAsString(), equalTo("10.0.0.200")); - assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200"))); + assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.200"))); assertThat(bucket.getDocCount(), equalTo(100L)); bucket = buckets.get(2); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("10.0.0.200-*")); assertThat(bucket.getFromAsString(), equalTo("10.0.0.200")); - assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200"))); + assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.200"))); assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY)); assertThat(bucket.getToAsString(), nullValue()); assertThat(bucket.getDocCount(), equalTo(55L)); @@ -185,18 +187,18 @@ public void testSingleValueFieldWithMaskRange() throws Exception { Range.Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("10.0.0.0/25")); - assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.0"))); + assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.0"))); assertThat(bucket.getFromAsString(), equalTo("10.0.0.0")); - assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.128"))); + assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.128"))); assertThat(bucket.getToAsString(), equalTo("10.0.0.128")); assertThat(bucket.getDocCount(), equalTo(128L)); bucket = buckets.get(1); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("10.0.0.128/25")); - assertThat((long) ((Number) bucket.getFrom()).doubleValue(), equalTo(IpFieldMapper.ipToLong("10.0.0.128"))); + assertThat((long) ((Number) bucket.getFrom()).doubleValue(), equalTo(LegacyIpFieldMapper.ipToLong("10.0.0.128"))); assertThat(bucket.getFromAsString(), equalTo("10.0.0.128")); - assertThat((long) ((Number) bucket.getTo()).doubleValue(), equalTo(IpFieldMapper.ipToLong("10.0.1.0"))); // range is exclusive on the to side + assertThat((long) ((Number) bucket.getTo()).doubleValue(), equalTo(LegacyIpFieldMapper.ipToLong("10.0.1.0"))); // range is exclusive on the to side assertThat(bucket.getToAsString(), equalTo("10.0.1.0")); assertThat(bucket.getDocCount(), equalTo(127L)); // include 10.0.0.128 } @@ -225,23 +227,23 @@ public void testSingleValueFieldWithCustomKey() throws Exception { assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY)); assertThat(bucket.getFromAsString(), nullValue()); assertThat(bucket.getToAsString(), equalTo("10.0.0.100")); - assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100"))); + assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.100"))); assertThat(bucket.getDocCount(), equalTo(100L)); bucket = buckets.get(1); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("r2")); assertThat(bucket.getFromAsString(), equalTo("10.0.0.100")); - assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100"))); + assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.100"))); assertThat(bucket.getToAsString(), equalTo("10.0.0.200")); - assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200"))); + assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.200"))); assertThat(bucket.getDocCount(), equalTo(100L)); bucket = buckets.get(2); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("r3")); assertThat(bucket.getFromAsString(), equalTo("10.0.0.200")); - assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200"))); + assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.200"))); assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY)); assertThat(bucket.getToAsString(), nullValue()); assertThat(bucket.getDocCount(), equalTo(55L)); @@ -275,7 +277,7 @@ public void testSingleValuedFieldWithSubAggregation() throws Exception { assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY)); assertThat(bucket.getFromAsString(), nullValue()); assertThat(bucket.getToAsString(), equalTo("10.0.0.100")); - assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100"))); + assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.100"))); assertThat(bucket.getDocCount(), equalTo(100L)); Sum sum = bucket.getAggregations().get("sum"); assertThat(sum, notNullValue()); @@ -288,9 +290,9 @@ public void testSingleValuedFieldWithSubAggregation() throws Exception { assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("10.0.0.100-10.0.0.200")); assertThat(bucket.getFromAsString(), equalTo("10.0.0.100")); - assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100"))); + assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.100"))); assertThat(bucket.getToAsString(), equalTo("10.0.0.200")); - assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200"))); + assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.200"))); assertThat(bucket.getDocCount(), equalTo(100L)); sum = bucket.getAggregations().get("sum"); assertThat(sum, notNullValue()); @@ -303,7 +305,7 @@ public void testSingleValuedFieldWithSubAggregation() throws Exception { assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("10.0.0.200-*")); assertThat(bucket.getFromAsString(), equalTo("10.0.0.200")); - assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200"))); + assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.200"))); assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY)); assertThat(bucket.getToAsString(), nullValue()); assertThat(bucket.getDocCount(), equalTo(55L)); @@ -336,23 +338,23 @@ public void testSingleValuedFieldWithValueScript() throws Exception { assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY)); assertThat(bucket.getFromAsString(), nullValue()); assertThat(bucket.getToAsString(), equalTo("10.0.0.100")); - assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100"))); + assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.100"))); assertThat(bucket.getDocCount(), equalTo(100L)); bucket = buckets.get(1); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("10.0.0.100-10.0.0.200")); assertThat(bucket.getFromAsString(), equalTo("10.0.0.100")); - assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100"))); + assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.100"))); assertThat(bucket.getToAsString(), equalTo("10.0.0.200")); - assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200"))); + assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.200"))); assertThat(bucket.getDocCount(), equalTo(100L)); bucket = buckets.get(2); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("10.0.0.200-*")); assertThat(bucket.getFromAsString(), equalTo("10.0.0.200")); - assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200"))); + assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.200"))); assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY)); assertThat(bucket.getToAsString(), nullValue()); assertThat(bucket.getDocCount(), equalTo(55L)); @@ -399,23 +401,23 @@ public void testMultiValuedField() throws Exception { assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY)); assertThat(bucket.getFromAsString(), nullValue()); assertThat(bucket.getToAsString(), equalTo("10.0.0.100")); - assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100"))); + assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.100"))); assertThat(bucket.getDocCount(), equalTo(100L)); bucket = buckets.get(1); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("10.0.0.100-10.0.0.200")); assertThat(bucket.getFromAsString(), equalTo("10.0.0.100")); - assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100"))); + assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.100"))); assertThat(bucket.getToAsString(), equalTo("10.0.0.200")); - assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200"))); + assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.200"))); assertThat(bucket.getDocCount(), equalTo(101L)); bucket = buckets.get(2); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("10.0.0.200-*")); assertThat(bucket.getFromAsString(), equalTo("10.0.0.200")); - assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200"))); + assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.200"))); assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY)); assertThat(bucket.getToAsString(), nullValue()); assertThat(bucket.getDocCount(), equalTo(56L)); @@ -443,23 +445,23 @@ public void testMultiValuedFieldWithValueScript() throws Exception { assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY)); assertThat(bucket.getFromAsString(), nullValue()); assertThat(bucket.getToAsString(), equalTo("10.0.0.100")); - assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100"))); + assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.100"))); assertThat(bucket.getDocCount(), equalTo(100L)); bucket = buckets.get(1); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("10.0.0.100-10.0.0.200")); assertThat(bucket.getFromAsString(), equalTo("10.0.0.100")); - assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100"))); + assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.100"))); assertThat(bucket.getToAsString(), equalTo("10.0.0.200")); - assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200"))); + assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.200"))); assertThat(bucket.getDocCount(), equalTo(101L)); bucket = buckets.get(2); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("10.0.0.200-*")); assertThat(bucket.getFromAsString(), equalTo("10.0.0.200")); - assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200"))); + assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.200"))); assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY)); assertThat(bucket.getToAsString(), nullValue()); assertThat(bucket.getDocCount(), equalTo(56L)); @@ -487,23 +489,23 @@ public void testScriptSingleValue() throws Exception { assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY)); assertThat(bucket.getFromAsString(), nullValue()); assertThat(bucket.getToAsString(), equalTo("10.0.0.100")); - assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100"))); + assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.100"))); assertThat(bucket.getDocCount(), equalTo(100L)); bucket = buckets.get(1); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("10.0.0.100-10.0.0.200")); assertThat(bucket.getFromAsString(), equalTo("10.0.0.100")); - assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100"))); + assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.100"))); assertThat(bucket.getToAsString(), equalTo("10.0.0.200")); - assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200"))); + assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.200"))); assertThat(bucket.getDocCount(), equalTo(100L)); bucket = buckets.get(2); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("10.0.0.200-*")); assertThat(bucket.getFromAsString(), equalTo("10.0.0.200")); - assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200"))); + assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.200"))); assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY)); assertThat(bucket.getToAsString(), nullValue()); assertThat(bucket.getDocCount(), equalTo(55L)); @@ -531,23 +533,23 @@ public void testScriptMultiValued() throws Exception { assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY)); assertThat(bucket.getFromAsString(), nullValue()); assertThat(bucket.getToAsString(), equalTo("10.0.0.100")); - assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100"))); + assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.100"))); assertThat(bucket.getDocCount(), equalTo(100L)); bucket = buckets.get(1); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("10.0.0.100-10.0.0.200")); assertThat(bucket.getFromAsString(), equalTo("10.0.0.100")); - assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100"))); + assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.100"))); assertThat(bucket.getToAsString(), equalTo("10.0.0.200")); - assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200"))); + assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.200"))); assertThat(bucket.getDocCount(), equalTo(101L)); bucket = buckets.get(2); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("10.0.0.200-*")); assertThat(bucket.getFromAsString(), equalTo("10.0.0.200")); - assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200"))); + assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.200"))); assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY)); assertThat(bucket.getToAsString(), nullValue()); assertThat(bucket.getDocCount(), equalTo(56L)); @@ -577,23 +579,23 @@ public void testUnmapped() throws Exception { assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY)); assertThat(bucket.getFromAsString(), nullValue()); assertThat(bucket.getToAsString(), equalTo("10.0.0.100")); - assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100"))); + assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.100"))); assertThat(bucket.getDocCount(), equalTo(0L)); bucket = buckets.get(1); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("10.0.0.100-10.0.0.200")); assertThat(bucket.getFromAsString(), equalTo("10.0.0.100")); - assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100"))); + assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.100"))); assertThat(bucket.getToAsString(), equalTo("10.0.0.200")); - assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200"))); + assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.200"))); assertThat(bucket.getDocCount(), equalTo(0L)); bucket = buckets.get(2); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("10.0.0.200-*")); assertThat(bucket.getFromAsString(), equalTo("10.0.0.200")); - assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200"))); + assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.200"))); assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY)); assertThat(bucket.getToAsString(), nullValue()); assertThat(bucket.getDocCount(), equalTo(0L)); @@ -623,23 +625,23 @@ public void testPartiallyUnmapped() throws Exception { assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY)); assertThat(bucket.getFromAsString(), nullValue()); assertThat(bucket.getToAsString(), equalTo("10.0.0.100")); - assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100"))); + assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.100"))); assertThat(bucket.getDocCount(), equalTo(100L)); bucket = buckets.get(1); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("10.0.0.100-10.0.0.200")); assertThat(bucket.getFromAsString(), equalTo("10.0.0.100")); - assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100"))); + assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.100"))); assertThat(bucket.getToAsString(), equalTo("10.0.0.200")); - assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200"))); + assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.200"))); assertThat(bucket.getDocCount(), equalTo(100L)); bucket = buckets.get(2); assertThat(bucket, notNullValue()); assertThat((String) bucket.getKey(), equalTo("10.0.0.200-*")); assertThat(bucket.getFromAsString(), equalTo("10.0.0.200")); - assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200"))); + assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.200"))); assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY)); assertThat(bucket.getToAsString(), nullValue()); assertThat(bucket.getDocCount(), equalTo(55L)); diff --git a/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java b/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java index aadc0df83e984..4878dc0f2cdbb 100644 --- a/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java +++ b/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java @@ -39,7 +39,8 @@ import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.core.DateFieldMapper; -import org.elasticsearch.index.mapper.core.IntegerFieldMapper; +import org.elasticsearch.index.mapper.core.NumberFieldMapper; +import org.elasticsearch.index.mapper.core.NumberFieldMapper.NumberType; import org.elasticsearch.index.mapper.core.TextFieldMapper; import java.io.IOException; @@ -141,7 +142,7 @@ public static class Builder extends FieldMapper.Builder contentTypeBuilder = new TextFieldMapper.Builder(FieldNames.CONTENT_TYPE); - private Mapper.Builder contentLengthBuilder = new IntegerFieldMapper.Builder(FieldNames.CONTENT_LENGTH); + private Mapper.Builder contentLengthBuilder = new NumberFieldMapper.Builder(FieldNames.CONTENT_LENGTH, NumberType.INTEGER); private Mapper.Builder languageBuilder = new TextFieldMapper.Builder(FieldNames.LANGUAGE); diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MultifieldAttachmentMapperTests.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MultifieldAttachmentMapperTests.java index 6b781a6f4a653..956bea4fe0699 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MultifieldAttachmentMapperTests.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MultifieldAttachmentMapperTests.java @@ -29,10 +29,7 @@ import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.core.DateFieldMapper; -import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.index.mapper.core.TextFieldMapper; -import org.elasticsearch.threadpool.ThreadPool; -import org.junit.After; import org.junit.Before; import java.nio.charset.StandardCharsets; @@ -49,7 +46,6 @@ public class MultifieldAttachmentMapperTests extends AttachmentUnitTestCase { private DocumentMapperParser mapperParser; - private ThreadPool threadPool; @Before public void setupMapperParser() throws Exception { @@ -57,11 +53,6 @@ public void setupMapperParser() throws Exception { } - @After - public void cleanup() throws InterruptedException { - terminate(threadPool); - } - public void testSimpleMappings() throws Exception { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/multifield/multifield-mapping.json"); DocumentMapper docMapper = mapperParser.parse("person", new CompressedXContent(mapping)); @@ -94,7 +85,6 @@ public void testExternalValues() throws Exception { String forcedName = "dummyname.txt"; String bytes = Base64.encodeBytes(originalText.getBytes(StandardCharsets.ISO_8859_1)); - threadPool = new ThreadPool("testing-only"); MapperService mapperService = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY, getIndicesModuleWithRegisteredAttachmentMapper()); diff --git a/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java b/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java index 84a903e0946e4..2a802500907f4 100644 --- a/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java +++ b/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java @@ -24,46 +24,46 @@ import java.util.Map; import org.apache.lucene.document.Field; +import org.apache.lucene.document.SortedNumericDocValuesField; +import org.apache.lucene.document.StoredField; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.Explicit; +import org.elasticsearch.Version; import org.elasticsearch.common.hash.MurmurHash3; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.fielddata.IndexFieldData; +import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType; +import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData; +import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParseContext; -import org.elasticsearch.index.mapper.core.LongFieldMapper; -import org.elasticsearch.index.mapper.core.NumberFieldMapper; +import org.elasticsearch.index.mapper.core.TypeParsers; -import static org.elasticsearch.index.mapper.core.TypeParsers.parseNumberField; - -public class Murmur3FieldMapper extends LongFieldMapper { +public class Murmur3FieldMapper extends FieldMapper { public static final String CONTENT_TYPE = "murmur3"; - public static class Defaults extends LongFieldMapper.Defaults { + public static class Defaults { public static final MappedFieldType FIELD_TYPE = new Murmur3FieldType(); static { FIELD_TYPE.freeze(); } } - public static class Builder extends NumberFieldMapper.Builder { + public static class Builder extends FieldMapper.Builder { public Builder(String name) { - super(name, Defaults.FIELD_TYPE, Integer.MAX_VALUE); + super(name, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE); builder = this; - builder.precisionStep(Integer.MAX_VALUE); } @Override public Murmur3FieldMapper build(BuilderContext context) { setupFieldType(context); - Murmur3FieldMapper fieldMapper = new Murmur3FieldMapper(name, fieldType, defaultFieldType, - ignoreMalformed(context), coerce(context), + return new Murmur3FieldMapper(name, fieldType, defaultFieldType, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); - return (Murmur3FieldMapper) fieldMapper.includeInAll(includeInAll); } @Override @@ -74,11 +74,6 @@ protected void setupFieldType(BuilderContext context) { fieldType.setHasDocValues(true); defaultFieldType.setHasDocValues(true); } - - @Override - protected int maxPrecisionStep() { - return 64; - } } public static class TypeParser implements Mapper.TypeParser { @@ -94,18 +89,18 @@ public static class TypeParser implements Mapper.TypeParser { throw new MapperParsingException("Setting [index] cannot be modified for field [" + name + "]"); } - parseNumberField(builder, name, node, parserContext); - // Because this mapper extends LongFieldMapper the null_value field will be added to the JSON when transferring cluster state - // between nodes so we have to remove the entry here so that the validation doesn't fail - // TODO should murmur3 support null_value? at the moment if a user sets null_value it has to be silently ignored since we can't - // determine whether the JSON is the original JSON from the user or if its the serialised cluster state being passed between nodes. -// node.remove("null_value"); + if (parserContext.indexVersionCreated().before(Version.V_5_0_0)) { + node.remove("precision_step"); + } + + TypeParsers.parseField(builder, name, node, parserContext); + return builder; } } // this only exists so a check can be done to match the field type to using murmur3 hashing... - public static class Murmur3FieldType extends LongFieldMapper.LongFieldType { + public static class Murmur3FieldType extends MappedFieldType { public Murmur3FieldType() { } @@ -113,16 +108,26 @@ protected Murmur3FieldType(Murmur3FieldType ref) { super(ref); } + @Override + public String typeName() { + return CONTENT_TYPE; + } + @Override public Murmur3FieldType clone() { return new Murmur3FieldType(this); } + + @Override + public IndexFieldData.Builder fielddataBuilder() { + failIfNoDocValues(); + return new DocValuesIndexFieldData.Builder().numericType(NumericType.LONG); + } } protected Murmur3FieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, - Explicit ignoreMalformed, Explicit coerce, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(simpleName, fieldType, defaultFieldType, ignoreMalformed, coerce, indexSettings, multiFields, copyTo); + super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo); } @Override @@ -131,7 +136,8 @@ protected String contentType() { } @Override - protected void innerParseCreateField(ParseContext context, List fields) throws IOException { + protected void parseCreateField(ParseContext context, List fields) + throws IOException { final Object value; if (context.externalValueSet()) { value = context.externalValue(); @@ -141,9 +147,11 @@ protected void innerParseCreateField(ParseContext context, List fields) t if (value != null) { final BytesRef bytes = new BytesRef(value.toString()); final long hash = MurmurHash3.hash128(bytes.bytes, bytes.offset, bytes.length, 0, new MurmurHash3.Hash128()).h1; - super.innerParseCreateField(context.createExternalValueContext(hash), fields); + fields.add(new SortedNumericDocValuesField(fieldType().name(), hash)); + if (fieldType().stored()) { + fields.add(new StoredField(name(), hash)); + } } - } @Override diff --git a/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java b/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java index 622cdff82e236..3cadb9500e572 100644 --- a/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java +++ b/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java @@ -20,6 +20,8 @@ package org.elasticsearch.index.mapper.size; import org.apache.lucene.document.Field; +import org.apache.lucene.index.IndexOptions; +import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; @@ -29,7 +31,8 @@ import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParseContext; -import org.elasticsearch.index.mapper.core.IntegerFieldMapper; +import org.elasticsearch.index.mapper.core.LegacyIntegerFieldMapper; +import org.elasticsearch.index.mapper.core.NumberFieldMapper; import org.elasticsearch.index.mapper.internal.EnabledAttributeMapper; import java.io.IOException; @@ -44,18 +47,25 @@ public class SizeFieldMapper extends MetadataFieldMapper { public static final String NAME = "_size"; public static final String CONTENT_TYPE = "_size"; - public static class Defaults extends IntegerFieldMapper.Defaults { + public static class Defaults { public static final EnabledAttributeMapper ENABLED_STATE = EnabledAttributeMapper.UNSET_DISABLED; - public static final MappedFieldType SIZE_FIELD_TYPE = IntegerFieldMapper.Defaults.FIELD_TYPE.clone(); + public static final MappedFieldType SIZE_FIELD_TYPE = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER); + public static final MappedFieldType LEGACY_SIZE_FIELD_TYPE = LegacyIntegerFieldMapper.Defaults.FIELD_TYPE.clone(); static { SIZE_FIELD_TYPE.setStored(true); - SIZE_FIELD_TYPE.setNumericPrecisionStep(Defaults.PRECISION_STEP_32_BIT); SIZE_FIELD_TYPE.setName(NAME); SIZE_FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); SIZE_FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); SIZE_FIELD_TYPE.freeze(); + + LEGACY_SIZE_FIELD_TYPE.setStored(true); + LEGACY_SIZE_FIELD_TYPE.setNumericPrecisionStep(LegacyIntegerFieldMapper.Defaults.PRECISION_STEP_32_BIT); + LEGACY_SIZE_FIELD_TYPE.setName(NAME); + LEGACY_SIZE_FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); + LEGACY_SIZE_FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); + LEGACY_SIZE_FIELD_TYPE.freeze(); } } @@ -63,8 +73,10 @@ public static class Builder extends MetadataFieldMapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - Builder builder = new Builder(parserContext.mapperService().fullName(NAME)); + Builder builder = new Builder(parserContext.mapperService().fullName(NAME), parserContext.indexVersionCreated()); for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { Map.Entry entry = iterator.next(); String fieldName = Strings.toUnderscoreCase(entry.getKey()); @@ -106,11 +118,11 @@ public MetadataFieldMapper getDefault(Settings indexSettings, MappedFieldType fi private EnabledAttributeMapper enabledState; private SizeFieldMapper(Settings indexSettings, MappedFieldType mappedFieldType) { - this(Defaults.ENABLED_STATE, mappedFieldType == null ? Defaults.SIZE_FIELD_TYPE : mappedFieldType, indexSettings); + this(Defaults.ENABLED_STATE, mappedFieldType == null ? Defaults.LEGACY_SIZE_FIELD_TYPE : mappedFieldType, indexSettings); } private SizeFieldMapper(EnabledAttributeMapper enabled, MappedFieldType fieldType, Settings indexSettings) { - super(NAME, fieldType, Defaults.SIZE_FIELD_TYPE, indexSettings); + super(NAME, fieldType, Defaults.LEGACY_SIZE_FIELD_TYPE, indexSettings); this.enabledState = enabled; } @@ -148,7 +160,15 @@ protected void parseCreateField(ParseContext context, List fields) throws if (context.source() == null) { return; } - fields.add(new IntegerFieldMapper.CustomIntegerNumericField(context.source().length(), fieldType())); + final int value = context.source().length(); + if (Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0)) { + fields.add(new LegacyIntegerFieldMapper.CustomIntegerNumericField(value, fieldType())); + } else { + boolean indexed = fieldType().indexOptions() != IndexOptions.NONE; + boolean docValued = fieldType().hasDocValues(); + boolean stored = fieldType().stored(); + fields.addAll(NumberFieldMapper.NumberType.INTEGER.createFields(name(), value, indexed, docValued, stored)); + } } @Override diff --git a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java index 174520cfada54..a55ab2e50bbc9 100644 --- a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java +++ b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java @@ -39,6 +39,8 @@ import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; +import org.apache.lucene.index.IndexableField; + public class SizeMappingTests extends ESSingleNodeTestCase { IndexService indexService; @@ -67,8 +69,14 @@ public void testSizeEnabled() throws Exception { .bytes(); ParsedDocument doc = docMapper.parse(SourceToParse.source(source).type("type").id("1")); - assertThat(doc.rootDoc().getField("_size").fieldType().stored(), equalTo(true)); - assertThat(doc.rootDoc().getField("_size").tokenStream(docMapper.mappers().indexAnalyzer(), null), notNullValue()); + boolean stored = false; + boolean points = false; + for (IndexableField field : doc.rootDoc().getFields("_size")) { + stored |= field.fieldType().stored(); + points |= field.fieldType().pointDimensionCount() > 0; + } + assertTrue(stored); + assertTrue(points); } public void testSizeDisabled() throws Exception {