Skip to content

Commit

Permalink
Merge Aggregations into InternalAggregations (#104896)
Browse files Browse the repository at this point in the history
This commit merges Aggregations into InternalAggregations in order to remove the unnecessary hierarchy.
  • Loading branch information
iverase authored Jan 30, 2024
1 parent 50afaad commit 7c8bb14
Show file tree
Hide file tree
Showing 91 changed files with 408 additions and 458 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,8 @@
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.RangeQueryBuilder;
import org.elasticsearch.search.aggregations.Aggregations;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.PipelineAggregatorBuilders;
import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation;
import org.elasticsearch.search.aggregations.bucket.global.Global;
Expand Down Expand Up @@ -177,7 +177,7 @@ public void setupSuiteScopeCluster() throws Exception {

public void testStandAloneTimeSeriesAgg() {
assertNoFailuresAndResponse(prepareSearch("index").setSize(0).addAggregation(timeSeries("by_ts")), response -> {
Aggregations aggregations = response.getAggregations();
InternalAggregations aggregations = response.getAggregations();
assertNotNull(aggregations);
InternalTimeSeries timeSeries = aggregations.get("by_ts");
assertThat(
Expand All @@ -203,7 +203,7 @@ public void testTimeSeriesGroupedByADimension() {
.subAggregation(timeSeries("by_ts"))
),
response -> {
Aggregations aggregations = response.getAggregations();
InternalAggregations aggregations = response.getAggregations();
assertNotNull(aggregations);
Terms terms = aggregations.get("by_dim");
Set<Map<String, String>> keys = new HashSet<>();
Expand Down Expand Up @@ -236,7 +236,7 @@ public void testTimeSeriesGroupedByDateHistogram() {
.subAggregation(timeSeries("by_ts").subAggregation(stats("timestamp").field("@timestamp")))
),
response -> {
Aggregations aggregations = response.getAggregations();
InternalAggregations aggregations = response.getAggregations();
assertNotNull(aggregations);
Histogram histogram = aggregations.get("by_time");
Map<Map<String, String>, Long> keys = new HashMap<>();
Expand Down Expand Up @@ -275,7 +275,7 @@ public void testStandAloneTimeSeriesAggWithDimFilter() {
assertNoFailuresAndResponse(
prepareSearch("index").setQuery(queryBuilder).setSize(0).addAggregation(timeSeries("by_ts")),
response -> {
Aggregations aggregations = response.getAggregations();
InternalAggregations aggregations = response.getAggregations();
assertNotNull(aggregations);
InternalTimeSeries timeSeries = aggregations.get("by_ts");
Map<Map<String, String>, Map<Long, Map<String, Double>>> filteredData = dataFilteredByDimension("dim_" + dim, val, include);
Expand Down Expand Up @@ -308,7 +308,7 @@ public void testStandAloneTimeSeriesAggWithGlobalAggregation() {
.addAggregation(global("everything").subAggregation(sum("all_sum").field("metric_" + metric)))
.addAggregation(PipelineAggregatorBuilders.sumBucket("total_filter_sum", "by_ts>filter_sum")),
response -> {
Aggregations aggregations = response.getAggregations();
InternalAggregations aggregations = response.getAggregations();
assertNotNull(aggregations);
InternalTimeSeries timeSeries = aggregations.get("by_ts");
Map<Map<String, String>, Map<Long, Map<String, Double>>> filteredData = dataFilteredByDimension("dim_" + dim, val, include);
Expand Down Expand Up @@ -353,7 +353,7 @@ public void testStandAloneTimeSeriesAggWithMetricFilter() {
assertNoFailuresAndResponse(
prepareSearch("index").setQuery(queryBuilder).setSize(0).addAggregation(timeSeries("by_ts")),
response -> {
Aggregations aggregations = response.getAggregations();
InternalAggregations aggregations = response.getAggregations();
assertNotNull(aggregations);
InternalTimeSeries timeSeries = aggregations.get("by_ts");
Map<Map<String, String>, Map<Long, Map<String, Double>>> filteredData = dataFilteredByMetric(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,6 @@ public InternalAggregation reduce(InternalAggregation aggregation, AggregationRe
xDiff = (thisBucketKey.doubleValue() - lastBucketKey.doubleValue()) / xAxisUnits;
}
final List<InternalAggregation> aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false)
.map((p) -> (InternalAggregation) p)
.collect(Collectors.toCollection(ArrayList::new));
aggs.add(new Derivative(name(), gradient, xDiff, formatter, metadata()));
Bucket newBucket = factory.createBucket(factory.getKey(bucket), bucket.getDocCount(), InternalAggregations.from(aggs));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,13 +41,13 @@
import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregationExecutionContext;
import org.elasticsearch.search.aggregations.Aggregations;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorBase;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.CardinalityUpperBound;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.LeafBucketCollector;
import org.elasticsearch.search.aggregations.bucket.terms.LongTerms;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
Expand Down Expand Up @@ -280,7 +280,7 @@ public void testFinalReduce() throws ExecutionException, InterruptedException {
: SearchRequest.subSearchRequest(taskId, originalRequest, Strings.EMPTY_ARRAY, "remote", nowInMillis, true);
assertResponse(client().search(searchRequest), searchResponse -> {
assertEquals(2, searchResponse.getHits().getTotalHits().value);
Aggregations aggregations = searchResponse.getAggregations();
InternalAggregations aggregations = searchResponse.getAggregations();
LongTerms longTerms = aggregations.get("terms");
assertEquals(1, longTerms.getBuckets().size());
});
Expand All @@ -296,7 +296,7 @@ public void testFinalReduce() throws ExecutionException, InterruptedException {
);
assertResponse(client().search(searchRequest), searchResponse -> {
assertEquals(2, searchResponse.getHits().getTotalHits().value);
Aggregations aggregations = searchResponse.getAggregations();
InternalAggregations aggregations = searchResponse.getAggregations();
LongTerms longTerms = aggregations.get("terms");
assertEquals(2, longTerms.getBuckets().size());
});
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ public void testScroll() {
assertNoFailures(response);

if (respNum == 1) { // initial response.
Aggregations aggregations = response.getAggregations();
InternalAggregations aggregations = response.getAggregations();
assertNotNull(aggregations);
Terms terms = aggregations.get("f");
assertEquals(Math.min(numDocs, 3L), terms.getBucketByKey("0").getDocCount());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ public void testMultipleAggsOnSameField_WithDifferentRequiredValueSourceType() t
prepareSearch("idx").addAggregation(missing("missing_values").field("value"))
.addAggregation(terms("values").field("value").collectMode(aggCollectionMode)),
response -> {
Aggregations aggs = response.getAggregations();
InternalAggregations aggs = response.getAggregations();

Missing missing = aggs.get("missing_values");
assertNotNull(missing);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ public void testMetadataSetOnAggregationResult() throws Exception {
terms("the_terms").setMetadata(metadata).field("name").subAggregation(sum("the_sum").setMetadata(metadata).field("value"))
).addAggregation(maxBucket("the_max_bucket", "the_terms>the_sum").setMetadata(metadata)),
response -> {
Aggregations aggs = response.getAggregations();
InternalAggregations aggs = response.getAggregations();
assertNotNull(aggs);

Terms terms = aggs.get("the_terms");
Expand All @@ -52,7 +52,7 @@ public void testMetadataSetOnAggregationResult() throws Exception {

List<? extends Terms.Bucket> buckets = terms.getBuckets();
for (Terms.Bucket bucket : buckets) {
Aggregations subAggs = bucket.getAggregations();
InternalAggregations subAggs = bucket.getAggregations();
assertNotNull(subAggs);

Sum sum = subAggs.get("the_sum");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,9 @@
import org.elasticsearch.script.MockScriptPlugin;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.aggregations.Aggregation;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.Aggregations;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.bucket.filter.InternalFilter;
import org.elasticsearch.search.aggregations.bucket.terms.SignificantTerms;
import org.elasticsearch.search.aggregations.bucket.terms.SignificantTermsAggregatorFactory;
Expand Down Expand Up @@ -136,7 +136,7 @@ public void testXContentResponse() throws Exception {
StringTerms classes = response.getAggregations().get("class");
assertThat(classes.getBuckets().size(), equalTo(2));
for (Terms.Bucket classBucket : classes.getBuckets()) {
Map<String, Aggregation> aggs = classBucket.getAggregations().asMap();
Map<String, InternalAggregation> aggs = classBucket.getAggregations().asMap();
assertTrue(aggs.containsKey("sig_terms"));
SignificantTerms agg = (SignificantTerms) aggs.get("sig_terms");
assertThat(agg.getBuckets().size(), equalTo(1));
Expand Down Expand Up @@ -331,7 +331,7 @@ public void testBackgroundVsSeparateSet(
double score10Background = sigTerms1.getBucketByKey("0").getSignificanceScore();
double score11Background = sigTerms1.getBucketByKey("1").getSignificanceScore();

Aggregations aggs = response2.getAggregations();
InternalAggregations aggs = response2.getAggregations();

sigTerms0 = (SignificantTerms) ((InternalFilter) aggs.get("0")).getAggregations().getAsMap().get("sig_terms");
double score00SeparateSets = sigTerms0.getBucketByKey("0").getSignificanceScore();
Expand Down Expand Up @@ -386,7 +386,7 @@ public void testScoresEqualForPositiveAndNegative(SignificanceHeuristic heuristi
assertThat(classes.getBuckets().size(), equalTo(2));
Iterator<? extends Terms.Bucket> classBuckets = classes.getBuckets().iterator();

Aggregations aggregations = classBuckets.next().getAggregations();
InternalAggregations aggregations = classBuckets.next().getAggregations();
SignificantTerms sigTerms = aggregations.get("mySignificantTerms");

List<? extends SignificantTerms.Bucket> classA = sigTerms.getBuckets();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.aggregations.Aggregation;
import org.elasticsearch.search.aggregations.Aggregations;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.bucket.global.Global;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket;
Expand Down Expand Up @@ -1037,7 +1037,7 @@ public void testInitMapCombineReduceWithParamsAsSubAgg() {
for (Bucket b : buckets) {
assertThat(b, notNullValue());
assertThat(b.getDocCount(), equalTo(1L));
Aggregations subAggs = b.getAggregations();
InternalAggregations subAggs = b.getAggregations();
assertThat(subAggs, notNullValue());
assertThat(subAggs.asList().size(), equalTo(1));
Aggregation subAgg = subAggs.get("scripted");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
import org.elasticsearch.search.aggregations.bucket.terms.IncludeExclude;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation;
import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation;
import org.elasticsearch.search.aggregations.metrics.Sum;
import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder;
import org.elasticsearch.test.ESIntegTestCase;
Expand All @@ -44,7 +44,7 @@
import static org.hamcrest.core.IsNull.notNullValue;

@ESIntegTestCase.SuiteScopeTestCase
abstract class BucketMetricsPipeLineAggregationTestCase<T extends NumericMetricsAggregation> extends ESIntegTestCase {
abstract class BucketMetricsPipeLineAggregationTestCase<T extends InternalNumericMetricsAggregation> extends ESIntegTestCase {

static final String SINGLE_VALUED_FIELD_NAME = "l_value";

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.core.IsNull.notNullValue;

public class ExtendedStatsBucketIT extends BucketMetricsPipeLineAggregationTestCase<ExtendedStatsBucket> {
public class ExtendedStatsBucketIT extends BucketMetricsPipeLineAggregationTestCase<InternalExtendedStatsBucket> {

@Override
protected ExtendedStatsBucketPipelineAggregationBuilder BucketMetricsPipelineAgg(String name, String bucketsPath) {
Expand All @@ -43,7 +43,7 @@ protected void assertResult(
IntToDoubleFunction buckets,
Function<Integer, String> bucketKeys,
int numBuckets,
ExtendedStatsBucket pipelineBucket
InternalExtendedStatsBucket pipelineBucket
) {
double sum = 0;
int count = 0;
Expand Down Expand Up @@ -71,7 +71,7 @@ protected String nestedMetric() {
}

@Override
protected double getNestedMetric(ExtendedStatsBucket bucket) {
protected double getNestedMetric(InternalExtendedStatsBucket bucket) {
return bucket.getAvg();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.core.IsNull.notNullValue;

public class PercentilesBucketIT extends BucketMetricsPipeLineAggregationTestCase<PercentilesBucket> {
public class PercentilesBucketIT extends BucketMetricsPipeLineAggregationTestCase<InternalPercentilesBucket> {

private static final double[] PERCENTS = { 0.0, 1.0, 25.0, 50.0, 75.0, 99.0, 100.0 };

Expand All @@ -46,7 +46,7 @@ protected void assertResult(
IntToDoubleFunction bucketValues,
Function<Integer, String> bucketKeys,
int numBuckets,
PercentilesBucket pipelineBucket
InternalPercentilesBucket pipelineBucket
) {
double[] values = new double[numBuckets];
for (int i = 0; i < numBuckets; ++i) {
Expand All @@ -62,7 +62,7 @@ protected String nestedMetric() {
}

@Override
protected double getNestedMetric(PercentilesBucket bucket) {
protected double getNestedMetric(InternalPercentilesBucket bucket) {
return bucket.percentile(50);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
import static org.elasticsearch.search.aggregations.PipelineAggregatorBuilders.statsBucket;
import static org.hamcrest.Matchers.equalTo;

public class StatsBucketIT extends BucketMetricsPipeLineAggregationTestCase<StatsBucket> {
public class StatsBucketIT extends BucketMetricsPipeLineAggregationTestCase<InternalStatsBucket> {

@Override
protected StatsBucketPipelineAggregationBuilder BucketMetricsPipelineAgg(String name, String bucketsPath) {
Expand All @@ -26,7 +26,7 @@ protected void assertResult(
IntToDoubleFunction bucketValues,
Function<Integer, String> bucketKeys,
int numBuckets,
StatsBucket pipelineBucket
InternalStatsBucket pipelineBucket
) {
double sum = 0;
int count = 0;
Expand All @@ -52,7 +52,7 @@ protected String nestedMetric() {
}

@Override
protected double getNestedMetric(StatsBucket bucket) {
protected double getNestedMetric(InternalStatsBucket bucket) {
return bucket.getAvg();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.Aggregations;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.bucket.range.InternalGeoDistance;
import org.elasticsearch.search.aggregations.bucket.range.Range;
import org.elasticsearch.test.ESIntegTestCase;
Expand Down Expand Up @@ -216,7 +216,7 @@ public void testGeoDistanceAggregation() throws IOException {
.addRange(0, 25000)
),
response -> {
Aggregations aggregations = response.getAggregations();
InternalAggregations aggregations = response.getAggregations();
assertNotNull(aggregations);
InternalGeoDistance geoDistance = aggregations.get(name);
assertNotNull(geoDistance);
Expand Down
Loading

0 comments on commit 7c8bb14

Please sign in to comment.