Skip to content

Commit

Permalink
Deprecate resolution loss on date field (#78921) backport(#79355)
Browse files Browse the repository at this point in the history
When storing nanoseconds on a date field the nanosecond part is lost as
it cannot be stored. The date_nanos field should be used instead.
This commit emits a deprecation warning to notify users about this.

closes #37962
backports #78921
  • Loading branch information
pgomulka authored Oct 18, 2021
1 parent b7968e6 commit ff6e589
Show file tree
Hide file tree
Showing 13 changed files with 101 additions and 34 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -219,7 +219,7 @@ public void testDateNanosFormatUpgrade() throws IOException {
Request index = new Request("POST", "/" + indexName + "/_doc/");
XContentBuilder doc = XContentBuilder.builder(XContentType.JSON.xContent())
.startObject()
.field("date", "2015-01-01T12:10:30.123456789Z")
.field("date", "2015-01-01T12:10:30.123Z")
.field("date_nanos", "2015-01-01T12:10:30.123456789Z")
.endObject();
index.addParameter("refresh", "true");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
index: timetest
body:
mappings:
"properties": { "my_time": {"type": "date", "format": "strict_date_optional_time_nanos"}}
"properties": { "my_time": {"type": "date_nanos", "format": "strict_date_optional_time_nanos"}}

- do:
ingest.put_pipeline:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ setup:
mappings:
properties:
mydate:
type: date
type: date_nanos
format: "uuuu-MM-dd'T'HH:mm:ss.SSSSSSSSSZZZZZ"

- do:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,7 @@ public final class DateFieldMapper extends FieldMapper {
public static final DateFormatter DEFAULT_DATE_TIME_FORMATTER = DateFormatter.forPattern("strict_date_optional_time||epoch_millis");
public static final DateFormatter DEFAULT_DATE_TIME_NANOS_FORMATTER =
DateFormatter.forPattern("strict_date_optional_time_nanos||epoch_millis");
private final String indexName;

public enum Resolution {
MILLISECONDS(CONTENT_TYPE, NumericType.DATE) {
Expand Down Expand Up @@ -234,6 +235,7 @@ public static class Builder extends FieldMapper.Builder {
private final Parameter<String> nullValue
= Parameter.stringParam("null_value", false, m -> toType(m).nullValueAsString, null).acceptsNull();
private final Parameter<Boolean> ignoreMalformed;
private String indexName;

private final Parameter<Script> script = Parameter.scriptParam(m -> toType(m).script);
private final Parameter<String> onScriptError = Parameter.onScriptErrorParam(m -> toType(m).onScriptError, script);
Expand All @@ -244,13 +246,14 @@ public static class Builder extends FieldMapper.Builder {

public Builder(String name, Resolution resolution, DateFormatter dateFormatter,
ScriptCompiler scriptCompiler,
boolean ignoreMalformedByDefault, Version indexCreatedVersion) {
boolean ignoreMalformedByDefault, Version indexCreatedVersion, String indexName) {
super(name);
this.resolution = resolution;
this.indexCreatedVersion = indexCreatedVersion;
this.scriptCompiler = Objects.requireNonNull(scriptCompiler);
this.ignoreMalformed
= Parameter.boolParam("ignore_malformed", true, m -> toType(m).ignoreMalformed, ignoreMalformedByDefault);
this.indexName = indexName;

DateFormatter defaultFormat = resolution == Resolution.NANOSECONDS && indexCreatedVersion.onOrAfter(Version.V_7_0_0) ?
DEFAULT_DATE_TIME_NANOS_FORMATTER : DEFAULT_DATE_TIME_FORMATTER;
Expand Down Expand Up @@ -292,12 +295,13 @@ protected List<Parameter<?>> getParameters() {
return Arrays.asList(index, docValues, store, format, locale, nullValue, ignoreMalformed, script, onScriptError, boost, meta);
}

private Long parseNullValue(DateFieldType fieldType) {
private Long parseNullValue(DateFieldType fieldType, String indexName) {
if (nullValue.getValue() == null) {
return null;
}
try {
return fieldType.parse(nullValue.getValue());
final String fieldName = fieldType.name();
return fieldType.parseNullValueWithDeprecation(nullValue.getValue(), fieldName, indexName);
} catch (Exception e) {
DEPRECATION_LOGGER.critical(DeprecationCategory.MAPPINGS, "date_mapper_null_field",
"Error parsing [" + nullValue.getValue() + "] as date in [null_value] on field [" + name() + "]);"
Expand All @@ -311,7 +315,7 @@ public DateFieldMapper build(MapperBuilderContext context) {
DateFieldType ft = new DateFieldType(context.buildFullName(name()), index.getValue(), store.getValue(), docValues.getValue(),
buildFormatter(), resolution, nullValue.getValue(), scriptValues(), meta.getValue());
ft.setBoost(boost.getValue());
Long nullTimestamp = parseNullValue(ft);
Long nullTimestamp = parseNullValue(ft, indexName);
return new DateFieldMapper(name, ft, multiFieldsBuilder.build(this, context),
copyTo.build(), nullTimestamp, resolution, this);
}
Expand All @@ -320,13 +324,15 @@ public DateFieldMapper build(MapperBuilderContext context) {
public static final TypeParser MILLIS_PARSER = new TypeParser((n, c) -> {
boolean ignoreMalformedByDefault = IGNORE_MALFORMED_SETTING.get(c.getSettings());
return new Builder(n, Resolution.MILLISECONDS, c.getDateFormatter(), c.scriptCompiler(),
ignoreMalformedByDefault, c.indexVersionCreated());
ignoreMalformedByDefault, c.indexVersionCreated(),
c.getIndexSettings() != null ? c.getIndexSettings().getIndex().getName() : null);
});

public static final TypeParser NANOS_PARSER = new TypeParser((n, c) -> {
boolean ignoreMalformedByDefault = IGNORE_MALFORMED_SETTING.get(c.getSettings());
return new Builder(n, Resolution.NANOSECONDS, c.getDateFormatter(), c.scriptCompiler(),
ignoreMalformedByDefault, c.indexVersionCreated());
ignoreMalformedByDefault, c.indexVersionCreated(),
c.getIndexSettings() != null ? c.getIndexSettings().getIndex().getName() : null);
});

public static final class DateFieldType extends MappedFieldType {
Expand Down Expand Up @@ -382,7 +388,31 @@ protected DateMathParser dateMathParser() {

// Visible for testing.
public long parse(String value) {
return resolution.convert(DateFormatters.from(dateTimeFormatter().parse(value), dateTimeFormatter().locale()).toInstant());
final Instant instant = getInstant(value);
return resolution.convert(instant);
}

public long parseWithDeprecation(String value, String fieldName, String indexName) {
final Instant instant = getInstant(value);
if (resolution == Resolution.MILLISECONDS && instant.getNano() % 1000000 != 0) {
DEPRECATION_LOGGER.warn(DeprecationCategory.MAPPINGS, "date_field_with_nanos",
"You are attempting to store a nanosecond resolution on a field [{}] of type date on index [{}]. " +
"The nanosecond part was lost. Use date_nanos field type.", fieldName, indexName);
}
return resolution.convert(instant);
}

public long parseNullValueWithDeprecation(String value, String fieldName, String indexName) {
final Instant instant = getInstant(value);
if (resolution == Resolution.MILLISECONDS && instant.getNano() % 1000000 != 0) {
DEPRECATION_LOGGER.warn(DeprecationCategory.MAPPINGS, "date_field_with_nanos",
"You are attempting to set null_value with a nanosecond resolution on a field [{}] of type date on index [{}]. " +
"The nanosecond part was lost. Use date_nanos field type.", fieldName, indexName);
}
return resolution.convert(instant);
}
private Instant getInstant(String value) {
return DateFormatters.from(dateTimeFormatter().parse(value), dateTimeFormatter().locale()).toInstant();
}

/**
Expand Down Expand Up @@ -666,11 +696,13 @@ private DateFieldMapper(
this.script = builder.script.get();
this.scriptCompiler = builder.scriptCompiler;
this.scriptValues = builder.scriptValues();
this.indexName = builder.indexName;
}

@Override
public FieldMapper.Builder getMergeBuilder() {
return new Builder(simpleName(), resolution, null, scriptCompiler, ignoreMalformedByDefault, indexCreatedVersion).init(this);
return new Builder(simpleName(), resolution, null, scriptCompiler, ignoreMalformedByDefault, indexCreatedVersion, indexName)
.init(this);
}

@Override
Expand All @@ -695,7 +727,9 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio
timestamp = nullValue;
} else {
try {
timestamp = fieldType().parse(dateAsString);
final String fieldName = fieldType().name();
final String indexName = context.indexSettings().getIndex().getName();
timestamp = fieldType().parseWithDeprecation(dateAsString, fieldName, indexName);
} catch (IllegalArgumentException | ElasticsearchParseException | DateTimeException | ArithmeticException e) {
if (ignoreMalformed) {
context.addIgnoredField(mappedFieldType.name());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -300,7 +300,8 @@ public void newDynamicDateField(DocumentParserContext context, String name, Date
Settings settings = context.indexSettings().getSettings();
boolean ignoreMalformed = FieldMapper.IGNORE_MALFORMED_SETTING.get(settings);
createDynamicField(new DateFieldMapper.Builder(name, DateFieldMapper.Resolution.MILLISECONDS,
dateTimeFormatter, ScriptCompiler.NONE, ignoreMalformed, context.indexSettings().getIndexVersionCreated()), context);
dateTimeFormatter, ScriptCompiler.NONE, ignoreMalformed, context.indexSettings().getIndexVersionCreated(),
context.indexSettings().getIndex().getName()), context);
}

void newDynamicBinaryField(DocumentParserContext context, String name) throws IOException {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -106,8 +106,7 @@ public Mapping parse(@Nullable String type, CompressedXContent source, String de
}

MappingParserContext parserContext = parserContextSupplier.get();
RootObjectMapper rootObjectMapper
= rootObjectTypeParser.parse(type, mapping, parserContext).build(MapperBuilderContext.ROOT);
RootObjectMapper rootObjectMapper = rootObjectTypeParser.parse(type, mapping, parserContext).build(MapperBuilderContext.ROOT);

Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper> metadataMappers = metadataMappersFunction.apply(type);
Map<String, Object> meta = null;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -565,15 +565,15 @@ public void testRolloverClusterStateForDataStream() throws Exception {
null,
ScriptCompiler.NONE,
false,
Version.CURRENT).build(MapperBuilderContext.ROOT);
Version.CURRENT, "indexName").build(MapperBuilderContext.ROOT);
ClusterService clusterService = ClusterServiceUtils.createClusterService(testThreadPool);
Environment env = mock(Environment.class);
when(env.sharedDataFile()).thenReturn(null);
AllocationService allocationService = mock(AllocationService.class);
when(allocationService.reroute(any(ClusterState.class), any(String.class))).then(i -> i.getArguments()[0]);
RootObjectMapper.Builder root = new RootObjectMapper.Builder("_doc");
root.add(new DateFieldMapper.Builder(dataStream.getTimeStampField().getName(), DateFieldMapper.Resolution.MILLISECONDS,
DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, ScriptCompiler.NONE, true, Version.CURRENT));
DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, ScriptCompiler.NONE, true, Version.CURRENT, "indexName"));
MetadataFieldMapper dtfm = getDataStreamTimestampFieldMapper();
Mapping mapping = new Mapping(
root.build(MapperBuilderContext.ROOT),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -148,6 +148,17 @@ public void testIgnoreMalformed() throws IOException {
testIgnoreMalformedForValue("-522000000", "long overflow", "date_optional_time");
}

public void testResolutionLossDeprecation() throws Exception {
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b
.field("type", "date")));

ParsedDocument doc = mapper.parse(source(b -> b.field("field", "2018-10-03T14:42:44.123456+0000")));

assertWarnings("You are attempting to store a nanosecond resolution " +
"on a field [field] of type date on index [index]. " +
"The nanosecond part was lost. Use date_nanos field type.");
}

private void testIgnoreMalformedForValue(String value, String expectedCause, String dateFormat) throws IOException {

DocumentMapper mapper = createDocumentMapper(fieldMapping((builder)-> dateFieldMapping(builder, dateFormat)));
Expand Down Expand Up @@ -403,11 +414,11 @@ public void testFetchMillisFromIso8601() throws IOException {
}

public void testFetchMillisFromIso8601Nanos() throws IOException {
assertFetch(dateMapperService(), "field", randomIs8601Nanos(MAX_ISO_DATE), null);
assertFetch(dateNanosMapperService(), "field", randomIs8601Nanos(MAX_NANOS), null);
}

public void testFetchMillisFromIso8601NanosFormatted() throws IOException {
assertFetch(dateMapperService(), "field", randomIs8601Nanos(MAX_ISO_DATE), "strict_date_optional_time_nanos");
assertFetch(dateNanosMapperService(), "field", randomIs8601Nanos(MAX_NANOS), "strict_date_optional_time_nanos");
}

/**
Expand All @@ -418,7 +429,8 @@ public void testFetchMillisFromIso8601NanosFormatted() throws IOException {
* way.
*/
public void testFetchMillisFromRoundedNanos() throws IOException {
assertFetch(dateMapperService(), "field", randomDecimalNanos(MAX_ISO_DATE), null);
assertFetch(dateMapperService(), "field", randomDecimalMillis(MAX_ISO_DATE), null);
assertFetch(dateNanosMapperService(), "field", randomDecimalNanos(MAX_NANOS), null);
}

/**
Expand Down Expand Up @@ -531,7 +543,7 @@ protected Object generateRandomInputValue(MappedFieldType ft) {
switch (((DateFieldType) ft).resolution()) {
case MILLISECONDS:
if (randomBoolean()) {
return randomIs8601Nanos(MAX_ISO_DATE);
return randomDecimalMillis(MAX_ISO_DATE);
}
return randomLongBetween(0, Long.MAX_VALUE);
case NANOSECONDS:
Expand Down Expand Up @@ -564,6 +576,10 @@ private String randomDecimalNanos(long maxMillis) {
return Long.toString(randomLongBetween(0, maxMillis)) + "." + between(0, 999999);
}

private String randomDecimalMillis(long maxMillis) {
return Long.toString(randomLongBetween(0, maxMillis));
}

public void testScriptAndPrecludedParameters() {
{
Exception e = expectThrows(MapperParsingException.class, () -> createDocumentMapper(fieldMapping(b -> {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
import org.elasticsearch.client.ResponseException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.json.JsonXContent;
import org.elasticsearch.test.rest.ESRestTestCase;
Expand Down Expand Up @@ -101,7 +102,9 @@ private void createTemplateWithAllowAutoCreate(Boolean allowAutoCreate) throws I

private Response indexDocument() throws IOException {
final Request indexDocumentRequest = new Request("POST", "recipe_kr/_doc");
indexDocumentRequest.setJsonEntity("{ \"@timestamp\": \"" + Instant.now() + "\", \"name\": \"Kimchi\" }");
final Instant now = Instant.now();
final String time = DateFormatter.forPattern("strict_date_optional_time").format(now);
indexDocumentRequest.setJsonEntity("{ \"@timestamp\": \"" + time + "\", \"name\": \"Kimchi\" }");
return client().performRequest(indexDocumentRequest);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.test.rest.ESRestTestCase;

import java.time.ZoneOffset;
Expand Down Expand Up @@ -280,7 +282,7 @@ public void testHRDSplit() throws Exception {

for (int i = 1; i <= 100; i++) {
ZonedDateTime time = baseTime.plusHours(i);
String formattedTime = time.format(DateTimeFormatter.ISO_DATE_TIME);
String formattedTime = DateFormatter.forPattern("strict_date_optional_time").format(time);
if (i % 50 == 0) {
// Anomaly has 100 docs, but we don't care about the value
for (int j = 0; j < 100; j++) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -210,8 +210,8 @@ teardown:
test_alias: {}
mappings:
properties:
time:
type: date
date:
type: date_nanos
user:
type: keyword
stars:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,8 +48,8 @@ teardown:
test_alias: {}
mappings:
properties:
time:
type: date
date:
type: date_nanos
user:
type: keyword
stars:
Expand Down Expand Up @@ -107,8 +107,8 @@ teardown:
test_alias: {}
mappings:
properties:
time:
type: date
date:
type: date_nanos
user:
type: keyword
stars:
Expand Down
Loading

0 comments on commit ff6e589

Please sign in to comment.