Skip to content

Commit

Permalink
Revert "Deprecate resolution loss on date field (#78921)" (#80003)
Browse files Browse the repository at this point in the history
This reverts commit d50c9e8.

relates #78921

reason #37962 (comment)
  • Loading branch information
pgomulka authored Oct 28, 2021
1 parent f5e0270 commit 8674678
Show file tree
Hide file tree
Showing 13 changed files with 35 additions and 122 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -201,7 +201,7 @@ public void testDateNanosFormatUpgrade() throws IOException {
Request index = new Request("POST", "/" + indexName + "/_doc/");
XContentBuilder doc = XContentBuilder.builder(XContentType.JSON.xContent())
.startObject()
.field("date", "2015-01-01T12:10:30.123Z")
.field("date", "2015-01-01T12:10:30.123456789Z")
.field("date_nanos", "2015-01-01T12:10:30.123456789Z")
.endObject();
index.addParameter("refresh", "true");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
index: timetest
body:
mappings:
"properties": { "my_time": {"type": "date_nanos", "format": "strict_date_optional_time_nanos"}}
"properties": { "my_time": {"type": "date", "format": "strict_date_optional_time_nanos"}}

- do:
ingest.put_pipeline:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ setup:
mappings:
properties:
mydate:
type: date_nanos
type: date
format: "uuuu-MM-dd'T'HH:mm:ss.SSSSSSSSSZZZZZ"

- do:
Expand Down
3 changes: 1 addition & 2 deletions server/src/main/java/org/elasticsearch/index/IndexMode.java
Original file line number Diff line number Diff line change
Expand Up @@ -117,8 +117,7 @@ public void completeMappings(MappingParserContext context, Map<String, Object> m
DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER,
context.scriptCompiler(),
DateFieldMapper.IGNORE_MALFORMED_SETTING.get(context.getSettings()),
context.getIndexSettings().getIndexVersionCreated(),
context.getIndexSettings().getIndex().getName()
context.getIndexSettings().getIndexVersionCreated()
)
);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,6 @@ public final class DateFieldMapper extends FieldMapper {
"strict_date_optional_time_nanos||epoch_millis"
);
private static final DateMathParser EPOCH_MILLIS_PARSER = DateFormatter.forPattern("epoch_millis").toDateMathParser();
private final String indexName;

public enum Resolution {
MILLISECONDS(CONTENT_TYPE, NumericType.DATE) {
Expand Down Expand Up @@ -238,7 +237,6 @@ public static class Builder extends FieldMapper.Builder {
private final Parameter<String> nullValue = Parameter.stringParam("null_value", false, m -> toType(m).nullValueAsString, null)
.acceptsNull();
private final Parameter<Boolean> ignoreMalformed;
private String indexName;

private final Parameter<Script> script = Parameter.scriptParam(m -> toType(m).script);
private final Parameter<String> onScriptError = Parameter.onScriptErrorParam(m -> toType(m).onScriptError, script);
Expand All @@ -253,15 +251,13 @@ public Builder(
DateFormatter dateFormatter,
ScriptCompiler scriptCompiler,
boolean ignoreMalformedByDefault,
Version indexCreatedVersion,
String indexName
Version indexCreatedVersion
) {
super(name);
this.resolution = resolution;
this.indexCreatedVersion = indexCreatedVersion;
this.scriptCompiler = Objects.requireNonNull(scriptCompiler);
this.ignoreMalformed = Parameter.boolParam("ignore_malformed", true, m -> toType(m).ignoreMalformed, ignoreMalformedByDefault);
this.indexName = indexName;

this.script.precludesParameters(nullValue, ignoreMalformed);
addScriptValidation(script, index, docValues);
Expand Down Expand Up @@ -301,13 +297,12 @@ protected List<Parameter<?>> getParameters() {
return List.of(index, docValues, store, format, locale, nullValue, ignoreMalformed, script, onScriptError, meta);
}

private Long parseNullValue(DateFieldType fieldType, String indexName) {
private Long parseNullValue(DateFieldType fieldType) {
if (nullValue.getValue() == null) {
return null;
}
try {
final String fieldName = fieldType.name();
return fieldType.parseNullValueWithDeprecation(nullValue.getValue(), fieldName, indexName);
return fieldType.parse(nullValue.getValue());
} catch (Exception e) {
if (indexCreatedVersion.onOrAfter(Version.V_8_0_0)) {
throw new MapperParsingException("Error parsing [null_value] on field [" + name() + "]: " + e.getMessage(), e);
Expand Down Expand Up @@ -340,7 +335,7 @@ public DateFieldMapper build(MapperBuilderContext context) {
meta.getValue()
);

Long nullTimestamp = parseNullValue(ft, indexName);
Long nullTimestamp = parseNullValue(ft);
return new DateFieldMapper(name, ft, multiFieldsBuilder.build(this, context), copyTo.build(), nullTimestamp, resolution, this);
}
}
Expand All @@ -353,8 +348,7 @@ public DateFieldMapper build(MapperBuilderContext context) {
c.getDateFormatter(),
c.scriptCompiler(),
ignoreMalformedByDefault,
c.indexVersionCreated(),
c.getIndexSettings() != null ? c.getIndexSettings().getIndex().getName() : null
c.indexVersionCreated()
);
});

Expand All @@ -366,8 +360,7 @@ public DateFieldMapper build(MapperBuilderContext context) {
c.getDateFormatter(),
c.scriptCompiler(),
ignoreMalformedByDefault,
c.indexVersionCreated(),
c.getIndexSettings() != null ? c.getIndexSettings().getIndex().getName() : null
c.indexVersionCreated()
);
});

Expand Down Expand Up @@ -432,42 +425,7 @@ protected DateMathParser dateMathParser() {

// Visible for testing.
public long parse(String value) {
final Instant instant = getInstant(value);
return resolution.convert(instant);
}

public long parseWithDeprecation(String value, String fieldName, String indexName) {
final Instant instant = getInstant(value);
if (resolution == Resolution.MILLISECONDS && instant.getNano() % 1000000 != 0) {
DEPRECATION_LOGGER.warn(
DeprecationCategory.MAPPINGS,
"date_field_with_nanos",
"You are attempting to store a nanosecond resolution on a field [{}] of type date on index [{}]. "
+ "The nanosecond part was lost. Use date_nanos field type.",
fieldName,
indexName
);
}
return resolution.convert(instant);
}

public long parseNullValueWithDeprecation(String value, String fieldName, String indexName) {
final Instant instant = getInstant(value);
if (resolution == Resolution.MILLISECONDS && instant.getNano() % 1000000 != 0) {
DEPRECATION_LOGGER.warn(
DeprecationCategory.MAPPINGS,
"date_field_with_nanos",
"You are attempting to set null_value with a nanosecond resolution on a field [{}] of type date on index [{}]. "
+ "The nanosecond part was lost. Use date_nanos field type.",
fieldName,
indexName
);
}
return resolution.convert(instant);
}

private Instant getInstant(String value) {
return DateFormatters.from(dateTimeFormatter().parse(value), dateTimeFormatter().locale()).toInstant();
return resolution.convert(DateFormatters.from(dateTimeFormatter().parse(value), dateTimeFormatter().locale()).toInstant());
}

/**
Expand Down Expand Up @@ -783,14 +741,11 @@ private DateFieldMapper(
this.script = builder.script.get();
this.scriptCompiler = builder.scriptCompiler;
this.scriptValues = builder.scriptValues();
this.indexName = builder.indexName;
}

@Override
public FieldMapper.Builder getMergeBuilder() {
return new Builder(simpleName(), resolution, null, scriptCompiler, ignoreMalformedByDefault, indexCreatedVersion, indexName).init(
this
);
return new Builder(simpleName(), resolution, null, scriptCompiler, ignoreMalformedByDefault, indexCreatedVersion).init(this);
}

@Override
Expand All @@ -815,9 +770,7 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio
timestamp = nullValue;
} else {
try {
final String fieldName = fieldType().name();
final String indexName = context.indexSettings().getIndex().getName();
timestamp = fieldType().parseWithDeprecation(dateAsString, fieldName, indexName);
timestamp = fieldType().parse(dateAsString);
} catch (IllegalArgumentException | ElasticsearchParseException | DateTimeException | ArithmeticException e) {
if (ignoreMalformed) {
context.addIgnoredField(mappedFieldType.name());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -362,8 +362,7 @@ public void newDynamicDateField(DocumentParserContext context, String name, Date
dateTimeFormatter,
ScriptCompiler.NONE,
ignoreMalformed,
context.indexSettings().getIndexVersionCreated(),
context.indexSettings().getIndex().getName()
context.indexSettings().getIndexVersionCreated()
),
context
);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -672,8 +672,7 @@ public void testRolloverClusterStateForDataStream() throws Exception {
null,
ScriptCompiler.NONE,
false,
Version.CURRENT,
"indexName"
Version.CURRENT
).build(MapperBuilderContext.ROOT);
ClusterService clusterService = ClusterServiceUtils.createClusterService(testThreadPool);
Environment env = mock(Environment.class);
Expand All @@ -688,8 +687,7 @@ public void testRolloverClusterStateForDataStream() throws Exception {
DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER,
ScriptCompiler.NONE,
true,
Version.CURRENT,
"indexName"
Version.CURRENT
)
);
MetadataFieldMapper dtfm = getDataStreamTimestampFieldMapper();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@

package org.elasticsearch.index.mapper;

import org.apache.logging.log4j.Level;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.Version;
Expand Down Expand Up @@ -140,22 +139,6 @@ public void testIgnoreMalformed() throws IOException {
testIgnoreMalformedForValue("-522000000", "long overflow", "date_optional_time");
}

public void testResolutionLossDeprecation() throws Exception {
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "date")));

ParsedDocument doc = mapper.parse(source(b -> b.field("field", "2018-10-03T14:42:44.123456+0000")));

assertWarnings(
true,
new DeprecationWarning(
Level.WARN,
"You are attempting to store a nanosecond resolution "
+ "on a field [field] of type date on index [index]. "
+ "The nanosecond part was lost. Use date_nanos field type."
)
);
}

private void testIgnoreMalformedForValue(String value, String expectedCause, String dateFormat) throws IOException {

DocumentMapper mapper = createDocumentMapper(fieldMapping((builder) -> dateFieldMapping(builder, dateFormat)));
Expand Down Expand Up @@ -419,11 +402,11 @@ public void testFetchMillisFromIso8601() throws IOException {
}

public void testFetchMillisFromIso8601Nanos() throws IOException {
assertFetch(dateNanosMapperService(), "field", randomIs8601Nanos(MAX_NANOS), null);
assertFetch(dateMapperService(), "field", randomIs8601Nanos(MAX_ISO_DATE), null);
}

public void testFetchMillisFromIso8601NanosFormatted() throws IOException {
assertFetch(dateNanosMapperService(), "field", randomIs8601Nanos(MAX_NANOS), "strict_date_optional_time_nanos");
assertFetch(dateMapperService(), "field", randomIs8601Nanos(MAX_ISO_DATE), "strict_date_optional_time_nanos");
}

/**
Expand All @@ -434,8 +417,7 @@ public void testFetchMillisFromIso8601NanosFormatted() throws IOException {
* way.
*/
public void testFetchMillisFromRoundedNanos() throws IOException {
assertFetch(dateMapperService(), "field", randomDecimalMillis(MAX_ISO_DATE), null);
assertFetch(dateNanosMapperService(), "field", randomDecimalNanos(MAX_NANOS), null);
assertFetch(dateMapperService(), "field", randomDecimalNanos(MAX_ISO_DATE), null);
}

/**
Expand Down Expand Up @@ -543,7 +525,7 @@ protected Object generateRandomInputValue(MappedFieldType ft) {
switch (((DateFieldType) ft).resolution()) {
case MILLISECONDS:
if (randomBoolean()) {
return randomDecimalMillis(MAX_ISO_DATE);
return randomIs8601Nanos(MAX_ISO_DATE);
}
return randomLongBetween(0, Long.MAX_VALUE);
case NANOSECONDS:
Expand Down Expand Up @@ -576,10 +558,6 @@ private String randomDecimalNanos(long maxMillis) {
return Long.toString(randomLongBetween(0, maxMillis)) + "." + between(0, 999999);
}

private String randomDecimalMillis(long maxMillis) {
return Long.toString(randomLongBetween(0, maxMillis));
}

public void testScriptAndPrecludedParameters() {
{
Exception e = expectThrows(MapperParsingException.class, () -> createDocumentMapper(fieldMapping(b -> {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
import org.elasticsearch.client.ResponseException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.test.rest.ESRestTestCase;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.json.JsonXContent;
Expand Down Expand Up @@ -102,9 +101,7 @@ private void createTemplateWithAllowAutoCreate(Boolean allowAutoCreate) throws I

private Response indexDocument() throws IOException {
final Request indexDocumentRequest = new Request("POST", "recipe_kr/_doc");
final Instant now = Instant.now();
final String time = DateFormatter.forPattern("strict_date_optional_time").format(now);
indexDocumentRequest.setJsonEntity("{ \"@timestamp\": \"" + time + "\", \"name\": \"Kimchi\" }");
indexDocumentRequest.setJsonEntity("{ \"@timestamp\": \"" + Instant.now() + "\", \"name\": \"Kimchi\" }");
return client().performRequest(indexDocumentRequest);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.test.rest.ESRestTestCase;

import java.time.ZoneOffset;
Expand Down Expand Up @@ -309,7 +308,7 @@ public void testHRDSplit() throws Exception {

for (int i = 1; i <= 100; i++) {
ZonedDateTime time = baseTime.plusHours(i);
String formattedTime = DateFormatter.forPattern("strict_date_optional_time").format(time);
String formattedTime = time.format(DateTimeFormatter.ISO_DATE_TIME);
if (i % 50 == 0) {
// Anomaly has 100 docs, but we don't care about the value
for (int j = 0; j < 100; j++) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -210,8 +210,8 @@ teardown:
test_alias: {}
mappings:
properties:
date:
type: date_nanos
time:
type: date
user:
type: keyword
stars:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,8 +48,8 @@ teardown:
test_alias: {}
mappings:
properties:
date:
type: date_nanos
time:
type: date
user:
type: keyword
stars:
Expand Down Expand Up @@ -107,8 +107,8 @@ teardown:
test_alias: {}
mappings:
properties:
date:
type: date_nanos
time:
type: date
user:
type: keyword
stars:
Expand Down
Loading

0 comments on commit 8674678

Please sign in to comment.