From cedd78c3c94fb8f4803b1779d2b419f14e70481e Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Mon, 11 Feb 2019 11:52:26 -0600 Subject: [PATCH] [ML-DataFrame] Add support for (date) histogram pivots (#38725) * [FEATURE][DATA_FRAME] Adding (date) histogram group_by support for pivot * adjusting format for merge * Update DataFramePivotRestIT.java --- .../integration/DataFramePivotRestIT.java | 77 +++++++- .../integration/DataFrameRestTestCase.java | 16 +- .../pivot/DateHistogramGroupSource.java | 177 ++++++++++++++++++ .../transforms/pivot/GroupConfig.java | 12 ++ .../pivot/HistogramGroupSource.java | 97 ++++++++++ .../transforms/pivot/SingleGroupSource.java | 8 +- .../pivot/DateHistogramGroupSourceTests.java | 51 +++++ .../pivot/HistogramGroupSourceTests.java | 38 ++++ 8 files changed, 469 insertions(+), 7 deletions(-) create mode 100644 x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/DateHistogramGroupSource.java create mode 100644 x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/HistogramGroupSource.java create mode 100644 x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/DateHistogramGroupSourceTests.java create mode 100644 x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/HistogramGroupSourceTests.java diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java index cf2bb34e942ec..6cf07fd88e0c2 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java @@ -75,6 +75,44 @@ public void testSimplePivotWithQuery() throws Exception { assertOnePivotValue(dataFrameIndex + "/_search?q=reviewer:user_26", 3.918918918); } + public void testHistogramPivot() throws Exception { + String transformId = "simpleHistogramPivot"; + String dataFrameIndex = "pivot_reviews_via_histogram"; + + final Request createDataframeTransformRequest = new Request("PUT", DATAFRAME_ENDPOINT + transformId); + + String config = "{" + + " \"source\": \"reviews\"," + + " \"dest\": \"" + dataFrameIndex + "\","; + + + config += " \"pivot\": {" + + " \"group_by\": [ {" + + " \"every_2\": {" + + " \"histogram\": {" + + " \"interval\": 2,\"field\":\"stars\"" + + " } } } ]," + + " \"aggregations\": {" + + " \"avg_rating\": {" + + " \"avg\": {" + + " \"field\": \"stars\"" + + " } } } }" + + "}"; + + + createDataframeTransformRequest.setJsonEntity(config); + Map createDataframeTransformResponse = entityAsMap(client().performRequest(createDataframeTransformRequest)); + assertThat(createDataframeTransformResponse.get("acknowledged"), equalTo(Boolean.TRUE)); + assertTrue(indexExists(dataFrameIndex)); + + startAndWaitForTransform(transformId, dataFrameIndex); + + // we expect 3 documents as there shall be 5 unique star values and we are bucketing every 2 starting at 0 + Map indexStats = getAsMap(dataFrameIndex + "/_stats"); + assertEquals(3, XContentMapValues.extractValue("_all.total.docs.count", indexStats)); + assertOnePivotValue(dataFrameIndex + "/_search?q=every_2:0.0", 1.0); + } + public void testBiggerPivot() throws Exception { String transformId = "biggerPivot"; String dataFrameIndex = "bigger_pivot_reviews"; @@ -149,6 +187,43 @@ public void testBiggerPivot() throws Exception { assertEquals(41, actual.longValue()); } + public void testDateHistogramPivot() throws Exception { + String transformId = "simpleDateHistogramPivot"; + String dataFrameIndex = "pivot_reviews_via_date_histogram"; + + final Request createDataframeTransformRequest = new Request("PUT", DATAFRAME_ENDPOINT + transformId); + + String config = "{" + + " \"source\": \"reviews\"," + + " \"dest\": \"" + dataFrameIndex + "\","; + + + config += " \"pivot\": {" + + " \"group_by\": [ {" + + " \"by_day\": {" + + " \"date_histogram\": {" + + " \"interval\": \"1d\",\"field\":\"timestamp\",\"format\":\"yyyy-MM-DD\"" + + " } } } ]," + + " \"aggregations\": {" + + " \"avg_rating\": {" + + " \"avg\": {" + + " \"field\": \"stars\"" + + " } } } }" + + "}"; + + createDataframeTransformRequest.setJsonEntity(config); + Map createDataframeTransformResponse = entityAsMap(client().performRequest(createDataframeTransformRequest)); + assertThat(createDataframeTransformResponse.get("acknowledged"), equalTo(Boolean.TRUE)); + assertTrue(indexExists(dataFrameIndex)); + + startAndWaitForTransform(transformId, dataFrameIndex); + + // we expect 21 documents as there shall be 21 days worth of docs + Map indexStats = getAsMap(dataFrameIndex + "/_stats"); + assertEquals(21, XContentMapValues.extractValue("_all.total.docs.count", indexStats)); + assertOnePivotValue(dataFrameIndex + "/_search?q=by_day:2017-01-15", 3.82); + } + private void startAndWaitForTransform(String transformId, String dataFrameIndex) throws IOException, Exception { // start the transform final Request startTransformRequest = new Request("POST", DATAFRAME_ENDPOINT + transformId + "/_start"); @@ -160,8 +235,6 @@ private void startAndWaitForTransform(String transformId, String dataFrameIndex) refreshIndex(dataFrameIndex); } - - private void waitForDataFrameGeneration(String transformId) throws Exception { assertBusy(() -> { long generation = getDataFrameGeneration(transformId); diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java index 7bb5bee90400a..d31c63de54279 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java @@ -45,6 +45,9 @@ protected void createReviewsIndex() throws IOException { { builder.startObject("mappings") .startObject("properties") + .startObject("timestamp") + .field("type", "date") + .endObject() .startObject("user_id") .field("type", "keyword") .endObject() @@ -66,11 +69,17 @@ protected void createReviewsIndex() throws IOException { // create index final StringBuilder bulk = new StringBuilder(); + int day = 10; for (int i = 0; i < numDocs; i++) { bulk.append("{\"index\":{\"_index\":\"reviews\"}}\n"); long user = Math.round(Math.pow(i * 31 % 1000, distributionTable[i % distributionTable.length]) % 27); int stars = distributionTable[(i * 33) % distributionTable.length]; long business = Math.round(Math.pow(user * stars, distributionTable[i % distributionTable.length]) % 13); + int hour = randomIntBetween(10, 20); + int min = randomIntBetween(30, 59); + int sec = randomIntBetween(30, 59); + + String date_string = "2017-01-" + day + "T" + hour + ":" + min + ":" + sec + "Z"; bulk.append("{\"user_id\":\"") .append("user_") .append(user) @@ -79,7 +88,9 @@ protected void createReviewsIndex() throws IOException { .append(business) .append("\",\"stars\":") .append(stars) - .append("}\n"); + .append(",\"timestamp\":\"") + .append(date_string) + .append("\"}\n"); if (i % 50 == 0) { bulk.append("\r\n"); @@ -89,6 +100,7 @@ protected void createReviewsIndex() throws IOException { client().performRequest(bulkRequest); // clear the builder bulk.setLength(0); + day += 1; } } bulk.append("\r\n"); @@ -209,4 +221,4 @@ protected static void wipeIndices() throws IOException { } } } -} \ No newline at end of file +} diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/DateHistogramGroupSource.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/DateHistogramGroupSource.java new file mode 100644 index 0000000000000..539b4d221304b --- /dev/null +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/DateHistogramGroupSource.java @@ -0,0 +1,177 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.dataframe.transforms.pivot; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; + +import java.io.IOException; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.util.Objects; + + +public class DateHistogramGroupSource extends SingleGroupSource { + + private static final String NAME = "data_frame_date_histogram_group"; + private static final ParseField TIME_ZONE = new ParseField("time_zone"); + private static final ParseField FORMAT = new ParseField("format"); + + private static final ConstructingObjectParser STRICT_PARSER = createParser(false); + private static final ConstructingObjectParser LENIENT_PARSER = createParser(true); + private long interval = 0; + private DateHistogramInterval dateHistogramInterval; + private String format; + private ZoneId timeZone; + + public DateHistogramGroupSource(String field) { + super(field); + } + + public DateHistogramGroupSource(StreamInput in) throws IOException { + super(in); + this.interval = in.readLong(); + this.dateHistogramInterval = in.readOptionalWriteable(DateHistogramInterval::new); + this.timeZone = in.readOptionalZoneId(); + this.format = in.readOptionalString(); + } + + private static ConstructingObjectParser createParser(boolean lenient) { + ConstructingObjectParser parser = new ConstructingObjectParser<>(NAME, lenient, (args) -> { + String field = (String) args[0]; + return new DateHistogramGroupSource(field); + }); + + SingleGroupSource.declareValuesSourceFields(parser, null); + + parser.declareField((histogram, interval) -> { + if (interval instanceof Long) { + histogram.setInterval((long) interval); + } else { + histogram.setDateHistogramInterval((DateHistogramInterval) interval); + } + }, p -> { + if (p.currentToken() == XContentParser.Token.VALUE_NUMBER) { + return p.longValue(); + } else { + return new DateHistogramInterval(p.text()); + } + }, HistogramGroupSource.INTERVAL, ObjectParser.ValueType.LONG); + + parser.declareField(DateHistogramGroupSource::setTimeZone, p -> { + if (p.currentToken() == XContentParser.Token.VALUE_STRING) { + return ZoneId.of(p.text()); + } else { + return ZoneOffset.ofHours(p.intValue()); + } + }, TIME_ZONE, ObjectParser.ValueType.LONG); + + parser.declareString(DateHistogramGroupSource::setFormat, FORMAT); + return parser; + } + + public static DateHistogramGroupSource fromXContent(final XContentParser parser, boolean lenient) throws IOException { + return lenient ? LENIENT_PARSER.apply(parser, null) : STRICT_PARSER.apply(parser, null); + } + + public long getInterval() { + return interval; + } + + public void setInterval(long interval) { + if (interval < 1) { + throw new IllegalArgumentException("[interval] must be greater than or equal to 1."); + } + this.interval = interval; + } + + public DateHistogramInterval getDateHistogramInterval() { + return dateHistogramInterval; + } + + public void setDateHistogramInterval(DateHistogramInterval dateHistogramInterval) { + if (dateHistogramInterval == null) { + throw new IllegalArgumentException("[dateHistogramInterval] must not be null"); + } + this.dateHistogramInterval = dateHistogramInterval; + } + + public String getFormat() { + return format; + } + + public void setFormat(String format) { + this.format = format; + } + + public ZoneId getTimeZone() { + return timeZone; + } + + public void setTimeZone(ZoneId timeZone) { + this.timeZone = timeZone; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeOptionalString(field); + out.writeLong(interval); + out.writeOptionalWriteable(dateHistogramInterval); + out.writeOptionalZoneId(timeZone); + out.writeOptionalString(format); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + if (field != null) { + builder.field(FIELD.getPreferredName(), field); + } + if (dateHistogramInterval == null) { + builder.field(HistogramGroupSource.INTERVAL.getPreferredName(), interval); + } else { + builder.field(HistogramGroupSource.INTERVAL.getPreferredName(), dateHistogramInterval.toString()); + } + if (timeZone != null) { + builder.field(TIME_ZONE.getPreferredName(), timeZone.toString()); + } + if (format != null) { + builder.field(FORMAT.getPreferredName(), format); + } + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } + + if (other == null || getClass() != other.getClass()) { + return false; + } + + final DateHistogramGroupSource that = (DateHistogramGroupSource) other; + + return Objects.equals(this.field, that.field) && + Objects.equals(interval, that.interval) && + Objects.equals(dateHistogramInterval, that.dateHistogramInterval) && + Objects.equals(timeZone, that.timeZone) && + Objects.equals(format, that.format); + } + + @Override + public int hashCode() { + return Objects.hash(field, interval, dateHistogramInterval, timeZone, format); + } +} diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/GroupConfig.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/GroupConfig.java index e674a809a2281..4792d59cdac59 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/GroupConfig.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/GroupConfig.java @@ -43,6 +43,12 @@ public GroupConfig(StreamInput in) throws IOException { case TERMS: groupSource = in.readOptionalWriteable(TermsGroupSource::new); break; + case HISTOGRAM: + groupSource = in.readOptionalWriteable(HistogramGroupSource::new); + break; + case DATE_HISTOGRAM: + groupSource = in.readOptionalWriteable(DateHistogramGroupSource::new); + break; default: throw new IOException("Unknown group type"); } @@ -126,6 +132,12 @@ public static GroupConfig fromXContent(final XContentParser parser, boolean leni case TERMS: groupSource = TermsGroupSource.fromXContent(parser, lenient); break; + case HISTOGRAM: + groupSource = HistogramGroupSource.fromXContent(parser, lenient); + break; + case DATE_HISTOGRAM: + groupSource = DateHistogramGroupSource.fromXContent(parser, lenient); + break; default: throw new ParsingException(parser.getTokenLocation(), "invalid grouping type: " + groupType); } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/HistogramGroupSource.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/HistogramGroupSource.java new file mode 100644 index 0000000000000..2e6101368619e --- /dev/null +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/HistogramGroupSource.java @@ -0,0 +1,97 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.dataframe.transforms.pivot; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; + +public class HistogramGroupSource extends SingleGroupSource { + + static final ParseField INTERVAL = new ParseField("interval"); + private static final String NAME = "data_frame_histogram_group"; + private static final ConstructingObjectParser STRICT_PARSER = createParser(false); + private static final ConstructingObjectParser LENIENT_PARSER = createParser(true); + private final double interval; + + public HistogramGroupSource(String field, double interval) { + super(field); + if (interval <= 0) { + throw new IllegalArgumentException("[interval] must be greater than 0."); + } + this.interval = interval; + } + + public HistogramGroupSource(StreamInput in) throws IOException { + super(in); + interval = in.readDouble(); + } + + private static ConstructingObjectParser createParser(boolean lenient) { + ConstructingObjectParser parser = new ConstructingObjectParser<>(NAME, lenient, (args) -> { + String field = (String) args[0]; + double interval = (double) args[1]; + return new HistogramGroupSource(field, interval); + }); + SingleGroupSource.declareValuesSourceFields(parser, null); + parser.declareDouble(optionalConstructorArg(), INTERVAL); + return parser; + } + + public static HistogramGroupSource fromXContent(final XContentParser parser, boolean lenient) throws IOException { + return lenient ? LENIENT_PARSER.apply(parser, null) : STRICT_PARSER.apply(parser, null); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeOptionalString(field); + out.writeDouble(interval); + } + + public double getInterval() { + return interval; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + if (field != null) { + builder.field(FIELD.getPreferredName(), field); + } + builder.field(INTERVAL.getPreferredName(), interval); + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } + + if (other == null || getClass() != other.getClass()) { + return false; + } + + final HistogramGroupSource that = (HistogramGroupSource) other; + + return Objects.equals(this.field, that.field) && + Objects.equals(this.interval, that.interval); + } + + @Override + public int hashCode() { + return Objects.hash(field, interval); + } +} diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/SingleGroupSource.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/SingleGroupSource.java index b049666ea8db8..5cd65124f0650 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/SingleGroupSource.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/SingleGroupSource.java @@ -27,7 +27,9 @@ public abstract class SingleGroupSource> implements Writeable, ToXContentObject { public enum Type { - TERMS(0); + TERMS(0), + HISTOGRAM(1), + DATE_HISTOGRAM(2); private final byte id; @@ -53,10 +55,10 @@ public String value() { } } - private static final ParseField FIELD = new ParseField("field"); + protected static final ParseField FIELD = new ParseField("field"); // TODO: add script - private final String field; + protected final String field; static , T> void declareValuesSourceFields(AbstractObjectParser parser, ValueType targetValueType) { diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/DateHistogramGroupSourceTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/DateHistogramGroupSourceTests.java new file mode 100644 index 0000000000000..8e7c6028af5ba --- /dev/null +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/DateHistogramGroupSourceTests.java @@ -0,0 +1,51 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.dataframe.transforms.pivot; + +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; +import org.elasticsearch.test.AbstractSerializingTestCase; + +import java.io.IOException; + +public class DateHistogramGroupSourceTests extends AbstractSerializingTestCase { + + public static DateHistogramGroupSource randomDateHistogramGroupSource() { + String field = randomAlphaOfLengthBetween(1, 20); + DateHistogramGroupSource dateHistogramGroupSource = new DateHistogramGroupSource(field); + if (randomBoolean()) { + dateHistogramGroupSource.setInterval(randomLongBetween(1, 10_000)); + } else { + dateHistogramGroupSource.setDateHistogramInterval(randomFrom(DateHistogramInterval.days(10), + DateHistogramInterval.minutes(1), DateHistogramInterval.weeks(1))); + } + if (randomBoolean()) { + dateHistogramGroupSource.setTimeZone(randomZone()); + } + if (randomBoolean()) { + dateHistogramGroupSource.setFormat(randomAlphaOfLength(10)); + } + return dateHistogramGroupSource; + } + + @Override + protected DateHistogramGroupSource doParseInstance(XContentParser parser) throws IOException { + return DateHistogramGroupSource.fromXContent(parser, false); + } + + @Override + protected DateHistogramGroupSource createTestInstance() { + return randomDateHistogramGroupSource(); + } + + @Override + protected Reader instanceReader() { + return DateHistogramGroupSource::new; + } + +} diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/HistogramGroupSourceTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/HistogramGroupSourceTests.java new file mode 100644 index 0000000000000..3e2581fca5249 --- /dev/null +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/HistogramGroupSourceTests.java @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.dataframe.transforms.pivot; + +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractSerializingTestCase; + +import java.io.IOException; + +public class HistogramGroupSourceTests extends AbstractSerializingTestCase { + + public static HistogramGroupSource randomHistogramGroupSource() { + String field = randomAlphaOfLengthBetween(1, 20); + double interval = randomDoubleBetween(Math.nextUp(0), Double.MAX_VALUE, false); + return new HistogramGroupSource(field, interval); + } + + @Override + protected HistogramGroupSource doParseInstance(XContentParser parser) throws IOException { + return HistogramGroupSource.fromXContent(parser, false); + } + + @Override + protected HistogramGroupSource createTestInstance() { + return randomHistogramGroupSource(); + } + + @Override + protected Reader instanceReader() { + return HistogramGroupSource::new; + } + +}