diff --git a/docs/reference/aggregations/bucket/autodatehistogram-aggregation.asciidoc b/docs/reference/aggregations/bucket/autodatehistogram-aggregation.asciidoc index 3bd430d03d5ac..e371674228bb4 100644 --- a/docs/reference/aggregations/bucket/autodatehistogram-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/autodatehistogram-aggregation.asciidoc @@ -81,7 +81,8 @@ Response: "key": 1425168000000, "doc_count": 2 } - ] + ], + "interval": "1M" } } } @@ -174,7 +175,8 @@ starting at midnight UTC on 1 October 2015: "key": 1443664800000, "doc_count": 1 } - ] + ], + "interval": "1h" } } } @@ -229,7 +231,8 @@ the specified time zone. "key": 1443664800000, "doc_count": 1 } - ] + ], + "interval": "1h" } } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregationBuilder.java index a07e3302f20c0..4fb4a6bff5b12 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregationBuilder.java @@ -78,17 +78,17 @@ public class AutoDateHistogramAggregationBuilder static RoundingInfo[] buildRoundings(DateTimeZone timeZone) { RoundingInfo[] roundings = new RoundingInfo[6]; roundings[0] = new RoundingInfo(createRounding(DateTimeUnit.SECOND_OF_MINUTE, timeZone), - 1000L, 1, 5, 10, 30); + 1000L, "s" , 1, 5, 10, 30); roundings[1] = new RoundingInfo(createRounding(DateTimeUnit.MINUTES_OF_HOUR, timeZone), - 60 * 1000L, 1, 5, 10, 30); + 60 * 1000L, "m", 1, 5, 10, 30); roundings[2] = new RoundingInfo(createRounding(DateTimeUnit.HOUR_OF_DAY, timeZone), - 60 * 60 * 1000L, 1, 3, 12); + 60 * 60 * 1000L, "h", 1, 3, 12); roundings[3] = new RoundingInfo(createRounding(DateTimeUnit.DAY_OF_MONTH, timeZone), - 24 * 60 * 60 * 1000L, 1, 7); + 24 * 60 * 60 * 1000L, "d", 1, 7); roundings[4] = new RoundingInfo(createRounding(DateTimeUnit.MONTH_OF_YEAR, timeZone), - 30 * 24 * 60 * 60 * 1000L, 1, 3); + 30 * 24 * 60 * 60 * 1000L, "M", 1, 3); roundings[5] = new RoundingInfo(createRounding(DateTimeUnit.YEAR_OF_CENTURY, timeZone), - 365 * 24 * 60 * 60 * 1000L, 1, 5, 10, 20, 50, 100); + 365 * 24 * 60 * 60 * 1000L, "y", 1, 5, 10, 20, 50, 100); return roundings; } @@ -200,10 +200,12 @@ public static class RoundingInfo implements Writeable { final Rounding rounding; final int[] innerIntervals; final long roughEstimateDurationMillis; + final String unitAbbreviation; - public RoundingInfo(Rounding rounding, long roughEstimateDurationMillis, int... innerIntervals) { + public RoundingInfo(Rounding rounding, long roughEstimateDurationMillis, String unitAbbreviation, int... innerIntervals) { this.rounding = rounding; this.roughEstimateDurationMillis = roughEstimateDurationMillis; + this.unitAbbreviation = unitAbbreviation; this.innerIntervals = innerIntervals; } @@ -211,6 +213,7 @@ public RoundingInfo(StreamInput in) throws IOException { rounding = Rounding.Streams.read(in); roughEstimateDurationMillis = in.readVLong(); innerIntervals = in.readIntArray(); + unitAbbreviation = in.readString(); } @Override @@ -218,6 +221,7 @@ public void writeTo(StreamOutput out) throws IOException { Rounding.Streams.write(rounding, out); out.writeVLong(roughEstimateDurationMillis); out.writeIntArray(innerIntervals); + out.writeString(unitAbbreviation); } public int getMaximumInnerInterval() { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java index f86145386f1df..95376710373f8 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java @@ -181,7 +181,8 @@ public InternalAggregation buildAggregation(long owningBucketOrdinal) throws IOE InternalAutoDateHistogram.BucketInfo emptyBucketInfo = new InternalAutoDateHistogram.BucketInfo(roundingInfos, roundingIdx, buildEmptySubAggregations()); - return new InternalAutoDateHistogram(name, buckets, targetBuckets, emptyBucketInfo, formatter, pipelineAggregators(), metaData()); + return new InternalAutoDateHistogram(name, buckets, targetBuckets, emptyBucketInfo, + formatter, pipelineAggregators(), metaData(), 1); } @Override @@ -189,7 +190,7 @@ public InternalAggregation buildEmptyAggregation() { InternalAutoDateHistogram.BucketInfo emptyBucketInfo = new InternalAutoDateHistogram.BucketInfo(roundingInfos, roundingIdx, buildEmptySubAggregations()); return new InternalAutoDateHistogram(name, Collections.emptyList(), targetBuckets, emptyBucketInfo, formatter, - pipelineAggregators(), metaData()); + pipelineAggregators(), metaData(), 1); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogram.java index 7bc2b9a317838..f2e450942c3ad 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogram.java @@ -208,15 +208,16 @@ public int hashCode() { private final DocValueFormat format; private final BucketInfo bucketInfo; private final int targetBuckets; - + private long bucketInnerInterval; InternalAutoDateHistogram(String name, List buckets, int targetBuckets, BucketInfo emptyBucketInfo, DocValueFormat formatter, - List pipelineAggregators, Map metaData) { + List pipelineAggregators, Map metaData, long bucketInnerInterval) { super(name, pipelineAggregators, metaData); this.buckets = buckets; this.bucketInfo = emptyBucketInfo; this.format = formatter; this.targetBuckets = targetBuckets; + this.bucketInnerInterval = bucketInnerInterval; } /** @@ -238,6 +239,13 @@ protected void doWriteTo(StreamOutput out) throws IOException { out.writeVInt(targetBuckets); } + public DateHistogramInterval getInterval() { + + RoundingInfo roundingInfo = this.bucketInfo.roundingInfos[this.bucketInfo.roundingIdx]; + String unitAbbreviation = roundingInfo.unitAbbreviation; + return new DateHistogramInterval(Long.toString(bucketInnerInterval) + unitAbbreviation); + } + @Override public String getWriteableName() { return AutoDateHistogramAggregationBuilder.NAME; @@ -262,7 +270,7 @@ public BucketInfo getBucketInfo() { @Override public InternalAutoDateHistogram create(List buckets) { - return new InternalAutoDateHistogram(name, buckets, targetBuckets, bucketInfo, format, pipelineAggregators(), metaData); + return new InternalAutoDateHistogram(name, buckets, targetBuckets, bucketInfo, format, pipelineAggregators(), metaData, 1); } @Override @@ -279,7 +287,6 @@ private static class IteratorAndCurrent { this.iterator = iterator; current = iterator.next(); } - } /** @@ -365,7 +372,7 @@ private BucketReduceResult mergeBucketsIfNeeded(List reducedBuckets, int reduceRoundingInfo = bucketInfo.roundingInfos[reduceRoundingIdx]; reducedBuckets = mergeBuckets(reducedBuckets, reduceRoundingInfo.rounding, reduceContext); } - return new BucketReduceResult(reducedBuckets, reduceRoundingInfo, reduceRoundingIdx); + return new BucketReduceResult(reducedBuckets, reduceRoundingInfo, reduceRoundingIdx, 1); } private List mergeBuckets(List reducedBuckets, Rounding reduceRounding, ReduceContext reduceContext) { @@ -403,12 +410,13 @@ private static class BucketReduceResult { List buckets; RoundingInfo roundingInfo; int roundingIdx; + long innerInterval; - BucketReduceResult(List buckets, RoundingInfo roundingInfo, int roundingIdx) { + BucketReduceResult(List buckets, RoundingInfo roundingInfo, int roundingIdx, long innerInterval) { this.buckets = buckets; this.roundingInfo = roundingInfo; this.roundingIdx = roundingIdx; - + this.innerInterval = innerInterval; } } @@ -444,7 +452,7 @@ private BucketReduceResult addEmptyBuckets(BucketReduceResult currentResult, Red } lastBucket = iter.next(); } - return new BucketReduceResult(list, roundingInfo, roundingIdx); + return new BucketReduceResult(list, roundingInfo, roundingIdx, currentResult.innerInterval); } static int getAppropriateRounding(long minKey, long maxKey, int roundingIdx, @@ -507,7 +515,7 @@ public InternalAggregation doReduce(List aggregations, Redu this.bucketInfo.emptySubAggregations); return new InternalAutoDateHistogram(getName(), reducedBucketsResult.buckets, targetBuckets, bucketInfo, format, - pipelineAggregators(), getMetaData()); + pipelineAggregators(), getMetaData(), reducedBucketsResult.innerInterval); } private BucketReduceResult maybeMergeConsecutiveBuckets(BucketReduceResult reducedBucketsResult, @@ -547,7 +555,7 @@ private BucketReduceResult mergeConsecutiveBuckets(List reducedBuckets, reduceContext.consumeBucketsAndMaybeBreak(1); mergedBuckets.add(sameKeyedBuckets.get(0).reduce(sameKeyedBuckets, roundingInfo.rounding, reduceContext)); } - return new BucketReduceResult(mergedBuckets, roundingInfo, roundingIdx); + return new BucketReduceResult(mergedBuckets, roundingInfo, roundingIdx, mergeInterval); } @Override @@ -557,6 +565,7 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th bucket.toXContent(builder, params); } builder.endArray(); + builder.field("interval", getInterval().toString()); return builder; } @@ -580,7 +589,7 @@ public InternalAggregation createAggregation(List getBuckets() { return buckets; @@ -47,6 +58,8 @@ public List getBuckets() { declareMultiBucketAggregationFields(PARSER, parser -> ParsedBucket.fromXContent(parser, false), parser -> ParsedBucket.fromXContent(parser, true)); + PARSER.declareString((parsed, value) -> parsed.interval = value, + new ParseField("interval")); } public static ParsedAutoDateHistogram fromXContent(XContentParser parser, String name) throws IOException { @@ -55,6 +68,14 @@ public static ParsedAutoDateHistogram fromXContent(XContentParser parser, String return aggregation; } + @Override + protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { + builder = super.doXContentBody(builder, params); + builder.field("interval", getInterval()); + return builder; + } + + public static class ParsedBucket extends ParsedMultiBucketAggregation.ParsedBucket implements Histogram.Bucket { private Long key; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java index b7c5bf03ac552..dd3425c20f43c 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java @@ -80,7 +80,7 @@ protected InternalAutoDateHistogram createTestInstance(String name, } InternalAggregations subAggregations = new InternalAggregations(Collections.emptyList()); BucketInfo bucketInfo = new BucketInfo(roundingInfos, randomIntBetween(0, roundingInfos.length - 1), subAggregations); - return new InternalAutoDateHistogram(name, buckets, targetBuckets, bucketInfo, format, pipelineAggregators, metaData); + return new InternalAutoDateHistogram(name, buckets, targetBuckets, bucketInfo, format, pipelineAggregators, metaData, 1); } /* @@ -94,11 +94,11 @@ public void testGetAppropriateRoundingUsesCorrectIntervals() { // an innerInterval that is quite large, such that targetBuckets * roundings[i].getMaximumInnerInterval() // will be larger than the estimate. roundings[0] = new RoundingInfo(createRounding(DateTimeUnit.SECOND_OF_MINUTE, timeZone), - 1000L, 1000); + 1000L, "s", 1000); roundings[1] = new RoundingInfo(createRounding(DateTimeUnit.MINUTES_OF_HOUR, timeZone), - 60 * 1000L, 1, 5, 10, 30); + 60 * 1000L, "m", 1, 5, 10, 30); roundings[2] = new RoundingInfo(createRounding(DateTimeUnit.HOUR_OF_DAY, timeZone), - 60 * 60 * 1000L, 1, 3, 12); + 60 * 60 * 1000L, "h", 1, 3, 12); OffsetDateTime timestamp = Instant.parse("2018-01-01T00:00:01.000Z").atOffset(ZoneOffset.UTC); // We want to pass a roundingIdx of zero, because in order to reproduce this bug, we need the function @@ -198,6 +198,14 @@ protected void assertReduced(InternalAutoDateHistogram reduced, List (oldValue == null ? 0 : oldValue) + bucket.getDocCount()); } assertEquals(expectedCounts, actualCounts); + + DateHistogramInterval expectedInterval; + if (reduced.getBuckets().size() == 1) { + expectedInterval = reduced.getInterval(); + } else { + expectedInterval = new DateHistogramInterval(innerIntervalToUse+roundingInfo.unitAbbreviation); + } + assertThat(reduced.getInterval(), equalTo(expectedInterval)); } private int getBucketCount(long lowest, long highest, RoundingInfo roundingInfo, long intervalInMillis) { @@ -252,6 +260,6 @@ protected InternalAutoDateHistogram mutateInstance(InternalAutoDateHistogram ins default: throw new AssertionError("Illegal randomisation branch"); } - return new InternalAutoDateHistogram(name, buckets, targetBuckets, bucketInfo, format, pipelineAggregators, metaData); + return new InternalAutoDateHistogram(name, buckets, targetBuckets, bucketInfo, format, pipelineAggregators, metaData, 1); } }