Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix profiled global agg (backport of #71575) #71634

Merged
merged 1 commit into from
Apr 13, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -261,6 +261,11 @@ public Query buildQuery(QueryBuilder builder) throws IOException {
throw new UnsupportedOperationException();
}

@Override
public Query filterQuery(Query query) {
throw new UnsupportedOperationException();
}

@Override
public IndexSettings getIndexSettings() {
throw new UnsupportedOperationException();
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
setup:
- do:
bulk:
refresh: true
index: test
body:
- '{"index": {}}'
- '{"name": "one"}'
- '{"index": {}}'
- '{"name": "two"}'
- '{"index": {}}'
- '{"name": "two"}'

---
simple:
- do:
search:
index: test
body:
size: 0
query:
match:
name: two
aggs:
g:
global: {}
aggs:
t:
terms:
field: name.keyword

- match: { aggregations.g.doc_count: 3 }
- length: { aggregations.g.t.buckets: 2 }
- match: { aggregations.g.t.buckets.0.key: two }
- match: { aggregations.g.t.buckets.1.key: one }

---
profile:
- skip:
version: " - 7.99.99"
reason: fixed in 8.0.0 (to be backported to 7.13.0)

- do:
search:
index: test
body:
profile: true
size: 0
query:
match:
name: two
aggs:
g:
global: {}
aggs:
t:
terms:
field: name.keyword

- match: { aggregations.g.doc_count: 3 }
- length: { aggregations.g.t.buckets: 2 }
- match: { aggregations.g.t.buckets.0.key: two }
- match: { aggregations.g.t.buckets.1.key: one }
- match: { profile.shards.0.aggregations.0.description: g }
Original file line number Diff line number Diff line change
Expand Up @@ -983,7 +983,8 @@ private void parseSource(DefaultSearchContext context, SearchSourceBuilder sourc
context.bitsetFilterCache(),
context.indexShard().shardId().hashCode(),
context::getRelativeTimeInMillis,
context::isCancelled
context::isCancelled,
context::buildFilteredQuery
);
context.addReleasable(aggContext);
try {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,18 +8,13 @@
package org.elasticsearch.search.aggregations;

import org.apache.lucene.search.Collector;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregator;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.profile.query.CollectorResult;
import org.elasticsearch.search.profile.query.InternalProfileCollector;
import org.elasticsearch.search.query.QueryPhaseExecutionException;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;

/**
Expand All @@ -32,31 +27,24 @@ public AggregationPhase() {
}

public void preProcess(SearchContext context) {
if (context.aggregations() != null) {
List<Aggregator> collectors = new ArrayList<>();
Aggregator[] aggregators;
try {
aggregators = context.aggregations().factories().createTopLevelAggregators();
for (int i = 0; i < aggregators.length; i++) {
if (aggregators[i] instanceof GlobalAggregator == false) {
collectors.add(aggregators[i]);
}
}
context.aggregations().aggregators(aggregators);
if (collectors.isEmpty() == false) {
Collector collector = MultiBucketCollector.wrap(true, collectors);
((BucketCollector)collector).preCollection();
if (context.getProfilers() != null) {
collector = new InternalProfileCollector(collector, CollectorResult.REASON_AGGREGATION,
// TODO: report on child aggs as well
Collections.emptyList());
}
context.queryCollectors().put(AggregationPhase.class, collector);
}
} catch (IOException e) {
throw new AggregationInitializationException("Could not initialize aggregators", e);
}
if (context.aggregations() == null) {
return;
}
BucketCollector bucketCollector;
try {
context.aggregations().aggregators(context.aggregations().factories().createTopLevelAggregators());
bucketCollector = MultiBucketCollector.wrap(
true,
org.elasticsearch.common.collect.List.of(context.aggregations().aggregators())
);
bucketCollector.preCollection();
} catch (IOException e) {
throw new AggregationInitializationException("Could not initialize aggregators", e);
}
Collector collector = context.getProfilers() == null
? bucketCollector
: new InternalProfileCollector(bucketCollector, CollectorResult.REASON_AGGREGATION, org.elasticsearch.common.collect.List.of());
context.queryCollectors().put(AggregationPhase.class, collector);
}

public void execute(SearchContext context) {
Expand All @@ -71,37 +59,6 @@ public void execute(SearchContext context) {
}

Aggregator[] aggregators = context.aggregations().aggregators();
List<Aggregator> globals = new ArrayList<>();
for (int i = 0; i < aggregators.length; i++) {
if (aggregators[i] instanceof GlobalAggregator) {
globals.add(aggregators[i]);
}
}

// optimize the global collector based execution
if (globals.isEmpty() == false) {
BucketCollector globalsCollector = MultiBucketCollector.wrap(false, globals);
Query query = context.buildFilteredQuery(Queries.newMatchAllQuery());

try {
final Collector collector;
if (context.getProfilers() == null) {
collector = globalsCollector;
} else {
InternalProfileCollector profileCollector = new InternalProfileCollector(
globalsCollector, CollectorResult.REASON_AGGREGATION_GLOBAL,
// TODO: report on sub collectors
Collections.emptyList());
collector = profileCollector;
// start a new profile with this collector
context.getProfilers().addQueryProfiler().setCollector(profileCollector);
}
globalsCollector.preCollection();
context.searcher().search(query, collector);
} catch (Exception e) {
throw new QueryPhaseExecutionException(context.shardTarget(), "Failed to execute global aggregators", e);
}
}

List<InternalAggregation> aggregations = new ArrayList<>(aggregators.length);
if (context.aggregations().factories().context() != null) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,15 @@
package org.elasticsearch.search.aggregations.bucket.global;

import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.BulkScorer;
import org.apache.lucene.search.LeafCollector;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Scorable;
import org.apache.lucene.search.Weight;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.CardinalityUpperBound;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.LeafBucketCollector;
import org.elasticsearch.search.aggregations.LeafBucketCollectorBase;
import org.elasticsearch.search.aggregations.bucket.BucketsAggregator;
import org.elasticsearch.search.aggregations.bucket.SingleBucketAggregator;
import org.elasticsearch.search.aggregations.support.AggregationContext;
Expand All @@ -21,22 +25,34 @@
import java.util.Map;

public class GlobalAggregator extends BucketsAggregator implements SingleBucketAggregator {
private final Weight weight;

public GlobalAggregator(String name, AggregatorFactories subFactories, AggregationContext context, Map<String, Object> metadata)
throws IOException {

super(name, subFactories, context, null, CardinalityUpperBound.ONE, metadata);
weight = context.filterQuery(new MatchAllDocsQuery()).createWeight(context.searcher(), scoreMode(), 1.0f);
}

@Override
public LeafBucketCollector getLeafCollector(LeafReaderContext ctx,
final LeafBucketCollector sub) throws IOException {
return new LeafBucketCollectorBase(sub, null) {
public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, LeafBucketCollector sub) throws IOException {
// Run sub-aggregations on child documents
BulkScorer scorer = weight.bulkScorer(ctx);
if (scorer == null) {
return LeafBucketCollector.NO_OP_COLLECTOR;
}
scorer.score(new LeafCollector() {
@Override
public void collect(int doc) throws IOException {
collectBucket(sub, doc, 0);
}

@Override
public void collect(int doc, long bucket) throws IOException {
assert bucket == 0 : "global aggregator can only be a top level aggregator";
collectBucket(sub, doc, bucket);
public void setScorer(Scorable scorer) throws IOException {
sub.setScorer(scorer);
}
};
}, ctx.reader().getLiveDocs());
return LeafBucketCollector.NO_OP_COLLECTOR;
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -160,6 +160,13 @@ public final AggregationUsageService getUsageService() {
*/
public abstract Query buildQuery(QueryBuilder builder) throws IOException;

/**
* Add filters from slice or filtered aliases. If you make a new query
* and don't combine it with the {@link #query() top level query} then
* you must provide it to this method.
*/
public abstract Query filterQuery(Query query);

/**
* The settings for the index against which this search is running.
*/
Expand Down Expand Up @@ -259,6 +266,7 @@ public static class ProductionAggregationContext extends AggregationContext {
private final int randomSeed;
private final LongSupplier relativeTimeInMillis;
private final Supplier<Boolean> isCancelled;
private final Function<Query, Query> filterQuery;

private final List<Aggregator> releaseMe = new ArrayList<>();

Expand All @@ -273,7 +281,8 @@ public ProductionAggregationContext(
BitsetFilterCache bitsetFilterCache,
int randomSeed,
LongSupplier relativeTimeInMillis,
Supplier<Boolean> isCancelled
Supplier<Boolean> isCancelled,
Function<Query, Query> filterQuery
) {
this.context = context;
if (bytesToPreallocate == 0) {
Expand Down Expand Up @@ -303,6 +312,7 @@ public ProductionAggregationContext(
this.randomSeed = randomSeed;
this.relativeTimeInMillis = relativeTimeInMillis;
this.isCancelled = isCancelled;
this.filterQuery = filterQuery;
}

@Override
Expand Down Expand Up @@ -378,6 +388,11 @@ public Query buildQuery(QueryBuilder builder) throws IOException {
return Rewriteable.rewrite(builder, context, true).toQuery(context);
}

@Override
public Query filterQuery(Query query) {
return filterQuery.apply(query);
}

@Override
public IndexSettings getIndexSettings() {
return context.getIndexSettings();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,11 @@
import org.elasticsearch.index.mapper.DocCountFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.NumberFieldMapper;
import org.elasticsearch.index.query.MatchAllQueryBuilder;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorTestCase;
import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.global.InternalGlobal;
import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.filter.InternalFilter;

import java.io.IOException;
import org.elasticsearch.common.collect.List;
Expand Down Expand Up @@ -88,10 +90,10 @@ public void testQueryFiltering() throws IOException {

private void testAggregation(Query query,
CheckedConsumer<RandomIndexWriter, IOException> indexer,
Consumer<InternalGlobal> verify) throws IOException {
GlobalAggregationBuilder aggregationBuilder = new GlobalAggregationBuilder("_name");
Consumer<InternalFilter> verify) throws IOException {
AggregationBuilder builder = new FilterAggregationBuilder("f", new MatchAllQueryBuilder());
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NUMBER_FIELD, NumberFieldMapper.NumberType.LONG);
MappedFieldType docCountFieldType = new DocCountFieldMapper.DocCountFieldType();
testCase(aggregationBuilder, query, indexer, verify, fieldType, docCountFieldType);
testCase(builder, query, indexer, verify, fieldType, docCountFieldType);
}
}
Loading