diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregator.java index 27b21a1ebd896..d00bbda46049e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregator.java @@ -98,7 +98,7 @@ public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, if (pointConverter != null) { Number segMax = findLeafMaxValue(ctx.reader(), pointField, pointConverter); if (segMax != null) { - /** + /* * There is no parent aggregator (see {@link MinAggregator#getPointReaderOrNull} * so the ordinal for the bucket is always 0. */ diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregator.java index 38841d7e4d809..96563d917bf8f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregator.java @@ -103,7 +103,7 @@ public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, if (pointConverter != null) { Number segMin = findLeafMinValue(ctx.reader(), pointField, pointConverter); if (segMin != null) { - /** + /* * There is no parent aggregator (see {@link MinAggregator#getPointReaderOrNull} * so the ordinal for the bucket is always 0. */ @@ -190,7 +190,12 @@ static Function getPointReaderOrNull(SearchContext context, Aggr if (fieldType instanceof NumberFieldMapper.NumberFieldType) { converter = ((NumberFieldMapper.NumberFieldType) fieldType)::parsePoint; } else if (fieldType.getClass() == DateFieldMapper.DateFieldType.class) { - converter = (in) -> LongPoint.decodeDimension(in, 0); + DateFieldMapper.DateFieldType dft = (DateFieldMapper.DateFieldType) fieldType; + /* + * Makes sure that nanoseconds decode to milliseconds, just + * like they do when you run the agg without the optimization. + */ + converter = (in) -> dft.resolution().toInstant(LongPoint.decodeDimension(in, 0)).toEpochMilli(); } return converter; } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorTests.java index 72781f332a007..32c539e43348a 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorTests.java @@ -46,13 +46,17 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.CheckedConsumer; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.query.QueryShardContext; @@ -87,9 +91,9 @@ import org.elasticsearch.search.lookup.LeafDocLookup; import java.io.IOException; +import java.time.Instant; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; @@ -97,7 +101,6 @@ import java.util.Map; import java.util.function.BiFunction; import java.util.function.Consumer; -import java.util.function.DoubleConsumer; import java.util.function.Function; import java.util.function.Supplier; @@ -740,41 +743,56 @@ public void testShortcutIsApplicable() { ) ); } - assertNotNull( + for (DateFieldMapper.Resolution resolution : DateFieldMapper.Resolution.values()) { + assertNull( + MinAggregator.getPointReaderOrNull( + mockSearchContext(new MatchAllDocsQuery()), + mockAggregator(), + mockDateValuesSourceConfig("number", true, resolution) + ) + ); + assertNull( + MinAggregator.getPointReaderOrNull( + mockSearchContext(new TermQuery(new Term("foo", "bar"))), + null, + mockDateValuesSourceConfig("number", true, resolution) + ) + ); + assertNull( + MinAggregator.getPointReaderOrNull( + mockSearchContext(null), + mockAggregator(), + mockDateValuesSourceConfig("number", true, resolution) + ) + ); + assertNull( + MinAggregator.getPointReaderOrNull( + mockSearchContext(null), + null, + mockDateValuesSourceConfig("number", false, resolution) + ) + ); + } + // Check that we decode a dates "just like" the doc values instance. + Instant expected = Instant.from(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse("2020-01-01T00:00:00Z")); + byte[] scratch = new byte[8]; + LongPoint.encodeDimension(DateFieldMapper.Resolution.MILLISECONDS.convert(expected), scratch, 0); + assertThat( MinAggregator.getPointReaderOrNull( mockSearchContext(new MatchAllDocsQuery()), null, - mockDateValuesSourceConfig("number", true) - ) + mockDateValuesSourceConfig("number", true, DateFieldMapper.Resolution.MILLISECONDS) + ).apply(scratch), equalTo(expected.toEpochMilli()) ); - assertNull( + LongPoint.encodeDimension(DateFieldMapper.Resolution.NANOSECONDS.convert(expected), scratch, 0); + assertThat( MinAggregator.getPointReaderOrNull( mockSearchContext(new MatchAllDocsQuery()), - mockAggregator(), - mockDateValuesSourceConfig("number", true) - ) - ); - assertNull( - MinAggregator.getPointReaderOrNull( - mockSearchContext(new TermQuery(new Term("foo", "bar"))), null, - mockDateValuesSourceConfig("number", true) - ) - ); - assertNull( - MinAggregator.getPointReaderOrNull( - mockSearchContext(null), - mockAggregator(), - mockDateValuesSourceConfig("number", true) - ) - ); - assertNull( - MinAggregator.getPointReaderOrNull( - mockSearchContext(null), - null, - mockDateValuesSourceConfig("number", false) - ) + mockDateValuesSourceConfig("number", true, DateFieldMapper.Resolution.NANOSECONDS) + ).apply(scratch), equalTo(expected.toEpochMilli()) ); + } public void testMinShortcutRandom() throws Exception { @@ -799,21 +817,6 @@ public void testMinShortcutRandom() throws Exception { (v) -> DoublePoint.decodeDimension(v, 0)); } - private void testMinCase(IndexSearcher searcher, - AggregationBuilder aggregationBuilder, - MappedFieldType ft, - DoubleConsumer testResult) throws IOException { - Collection queries = Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery(ft.name())); - for (Query query : queries) { - MinAggregator aggregator = createAggregator(query, aggregationBuilder, searcher, createIndexSettings(), ft); - aggregator.preCollection(); - searcher.search(new MatchAllDocsQuery(), aggregator); - aggregator.postCollection(); - InternalMin result = (InternalMin) aggregator.buildAggregation(0L); - testResult.accept(result.getValue()); - } - } - private void testMinShortcutCase(Supplier randomNumber, Function pointFieldFunc, Function pointConvertFunc) throws IOException { @@ -889,12 +892,17 @@ private ValuesSourceConfig mockNumericValuesSourceConfig(S return config; } - private ValuesSourceConfig mockDateValuesSourceConfig(String fieldName, boolean indexed) { + private ValuesSourceConfig mockDateValuesSourceConfig(String fieldName, boolean indexed, + DateFieldMapper.Resolution resolution) { ValuesSourceConfig config = mock(ValuesSourceConfig.class); - MappedFieldType ft = new DateFieldMapper.Builder(fieldName).fieldType(); - ft.setName(fieldName); - ft.setIndexOptions(indexed ? IndexOptions.DOCS : IndexOptions.NONE); - ft.freeze(); + Mapper.BuilderContext builderContext = new Mapper.BuilderContext( + Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(), + new ContentPath()); + MappedFieldType ft = new DateFieldMapper.Builder(fieldName) + .index(indexed) + .withResolution(resolution) + .build(builderContext) + .fieldType(); when(config.fieldContext()).thenReturn(new FieldContext(fieldName, null, ft)); return config; }