From 21d9de04a1c00b5c8524ae0639909c134ce0c69f Mon Sep 17 00:00:00 2001 From: Chris Hegarty <62058229+ChrisHegarty@users.noreply.github.com> Date: Fri, 22 Sep 2023 14:30:34 +0100 Subject: [PATCH] Add a Factory for building blocks and Vectors (#99657) This commit adds a BlockFactory - an extra level of indirection when building blocks. The factory couples circuit breaking when building, allowing for incrementing the breaker as blocks and Vectors are built. This PR adds the infrastructure to allow us to move the operators and implementations over to the factory, rather than actually moving all there at once. --- .../compute/operator/AggregatorBenchmark.java | 12 +- .../compute/operator/EvalBenchmark.java | 7 +- x-pack/plugin/esql/compute/build.gradle | 2 +- .../compute/data/BooleanArrayBlock.java | 17 +- .../compute/data/BooleanArrayVector.java | 10 +- .../compute/data/BooleanBigArrayVector.java | 6 +- .../compute/data/BooleanBlock.java | 16 +- .../compute/data/BooleanBlockBuilder.java | 22 +- .../compute/data/BooleanVector.java | 8 +- .../compute/data/BooleanVectorBlock.java | 7 +- .../compute/data/BooleanVectorBuilder.java | 25 +- .../compute/data/BytesRefArrayBlock.java | 19 +- .../compute/data/BytesRefArrayVector.java | 7 +- .../compute/data/BytesRefBlock.java | 16 +- .../compute/data/BytesRefBlockBuilder.java | 25 +- .../compute/data/BytesRefVector.java | 8 +- .../compute/data/BytesRefVectorBlock.java | 7 +- .../compute/data/BytesRefVectorBuilder.java | 24 +- .../compute/data/ConstantBooleanVector.java | 8 +- .../compute/data/ConstantBytesRefVector.java | 8 +- .../compute/data/ConstantDoubleVector.java | 8 +- .../compute/data/ConstantIntVector.java | 8 +- .../compute/data/ConstantLongVector.java | 8 +- .../compute/data/DoubleArrayBlock.java | 17 +- .../compute/data/DoubleArrayVector.java | 10 +- .../compute/data/DoubleBigArrayVector.java | 6 +- .../compute/data/DoubleBlock.java | 16 +- .../compute/data/DoubleBlockBuilder.java | 22 +- .../compute/data/DoubleVector.java | 8 +- .../compute/data/DoubleVectorBlock.java | 7 +- .../compute/data/DoubleVectorBuilder.java | 25 +- .../compute/data/FilterBooleanVector.java | 7 +- .../compute/data/FilterBytesRefVector.java | 7 +- .../compute/data/FilterDoubleVector.java | 7 +- .../compute/data/FilterIntVector.java | 7 +- .../compute/data/FilterLongVector.java | 7 +- .../compute/data/IntArrayBlock.java | 17 +- .../compute/data/IntArrayVector.java | 10 +- .../compute/data/IntBigArrayVector.java | 6 +- .../elasticsearch/compute/data/IntBlock.java | 16 +- .../compute/data/IntBlockBuilder.java | 22 +- .../elasticsearch/compute/data/IntVector.java | 8 +- .../compute/data/IntVectorBlock.java | 7 +- .../compute/data/IntVectorBuilder.java | 25 +- .../compute/data/LongArrayBlock.java | 17 +- .../compute/data/LongArrayVector.java | 10 +- .../compute/data/LongBigArrayVector.java | 6 +- .../elasticsearch/compute/data/LongBlock.java | 16 +- .../compute/data/LongBlockBuilder.java | 22 +- .../compute/data/LongVector.java | 8 +- .../compute/data/LongVectorBlock.java | 7 +- .../compute/data/LongVectorBuilder.java | 25 +- .../compute/src/main/java/module-info.java | 2 + .../blockhash/BytesRefLongBlockHash.java | 1 + .../aggregation/blockhash/IntBlockHash.java | 2 + .../aggregation/blockhash/LongBlockHash.java | 3 +- .../compute/data/AbstractArrayBlock.java | 14 +- .../compute/data/AbstractBlock.java | 13 +- .../compute/data/AbstractBlockBuilder.java | 20 +- .../compute/data/AbstractFilterBlock.java | 5 + .../compute/data/AbstractFilterVector.java | 4 +- .../compute/data/AbstractVector.java | 15 +- .../compute/data/AbstractVectorBlock.java | 4 +- .../compute/data/AbstractVectorBuilder.java | 18 + .../org/elasticsearch/compute/data/Block.java | 12 +- .../compute/data/BlockFactory.java | 317 ++++++++++ .../compute/data/BlockFactoryParameters.java | 23 + .../compute/data/ConstantNullBlock.java | 9 +- .../elasticsearch/compute/data/DocBlock.java | 2 +- .../elasticsearch/compute/data/DocVector.java | 2 +- .../elasticsearch/compute/data/Vector.java | 3 + .../compute/data/X-ArrayBlock.java.st | 29 +- .../compute/data/X-ArrayVector.java.st | 19 +- .../compute/data/X-BigArrayVector.java.st | 6 +- .../compute/data/X-Block.java.st | 16 +- .../compute/data/X-BlockBuilder.java.st | 34 +- .../compute/data/X-ConstantVector.java.st | 8 +- .../compute/data/X-FilterVector.java.st | 7 +- .../compute/data/X-Vector.java.st | 8 +- .../compute/data/X-VectorBlock.java.st | 7 +- .../compute/data/X-VectorBuilder.java.st | 41 +- .../compute/operator/DriverContext.java | 11 +- .../operator/ThrowingDriverContext.java | 8 +- .../operator/topn/ResultBuilderForDoc.java | 8 +- .../elasticsearch/compute/OperatorTests.java | 16 +- .../compute/TestBlockFactoryParameters.java | 42 ++ ...untDistinctIntAggregatorFunctionTests.java | 5 +- ...ntDistinctLongAggregatorFunctionTests.java | 5 +- .../GroupingAggregatorFunctionTestCase.java | 8 +- .../SumIntAggregatorFunctionTests.java | 5 +- .../SumLongAggregatorFunctionTests.java | 5 +- .../blockhash/BlockHashRandomizedTests.java | 4 +- .../aggregation/blockhash/BlockHashTests.java | 105 +++- .../compute/data/BasicBlockTests.java | 174 ++++-- .../compute/data/BlockAccountingTests.java | 61 +- .../data/BlockBuilderAppendBlockTests.java | 2 +- .../compute/data/BlockFactoryTests.java | 564 ++++++++++++++++++ .../compute/operator/AnyOperatorTestCase.java | 3 +- .../compute/operator/AsyncOperatorTests.java | 4 +- .../compute/operator/DriverContextTests.java | 6 +- .../compute/operator/OperatorTestCase.java | 10 +- .../compute/operator/RowOperatorTests.java | 4 +- .../SequenceDoubleBlockSourceOperator.java | 4 +- .../SequenceLongBlockSourceOperator.java | 4 +- .../exchange/ExchangeServiceTests.java | 4 +- .../operator/topn/TopNOperatorTests.java | 13 +- ...search.compute.data.BlockFactoryParameters | 8 + .../xpack/esql/lookup/EnrichLookupIT.java | 4 +- .../esql/enrich/EnrichLookupService.java | 8 +- .../esql/planner/LocalExecutionPlanner.java | 16 +- .../xpack/esql/plugin/ComputeService.java | 8 +- .../esql/plugin/EsqlBlockFactoryParams.java | 45 ++ .../esql/plugin/TransportEsqlQueryAction.java | 8 +- ...search.compute.data.BlockFactoryParameters | 8 + .../elasticsearch/xpack/esql/CsvTests.java | 2 + .../function/AbstractFunctionTestCase.java | 4 +- .../xpack/esql/planner/EvalMapperTests.java | 4 +- .../planner/LocalExecutionPlannerTests.java | 2 + 118 files changed, 2089 insertions(+), 393 deletions(-) create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactoryParameters.java create mode 100644 x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/TestBlockFactoryParameters.java create mode 100644 x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockFactoryTests.java create mode 100644 x-pack/plugin/esql/compute/src/test/resources/META-INF/services/org.elasticsearch.compute.data.BlockFactoryParameters create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlBlockFactoryParams.java create mode 100644 x-pack/plugin/esql/src/main/resources/META-INF/services/org.elasticsearch.compute.data.BlockFactoryParameters diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/AggregatorBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/AggregatorBenchmark.java index 75fe76ea8fbb0..545960c7003ab 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/AggregatorBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/AggregatorBenchmark.java @@ -9,6 +9,7 @@ package org.elasticsearch.benchmark.compute.operator; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.AggregatorMode; @@ -23,6 +24,7 @@ import org.elasticsearch.compute.aggregation.SumLongAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.blockhash.BlockHash; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.DoubleArrayVector; @@ -139,10 +141,11 @@ private static Operator operator(String grouping, String op, String dataType) { ); default -> throw new IllegalArgumentException("unsupported grouping [" + grouping + "]"); }; + DriverContext driverContext = driverContext(); return new HashAggregationOperator( List.of(supplier(op, dataType, groups.size()).groupingAggregatorFactory(AggregatorMode.SINGLE)), () -> BlockHash.build(groups, BIG_ARRAYS, 16 * 1024, false), - new DriverContext(BigArrays.NON_RECYCLING_INSTANCE) + driverContext ); } @@ -576,4 +579,11 @@ private static void run(String grouping, String op, String blockType, int opCoun operator.finish(); checkExpected(grouping, op, blockType, dataType, operator.getOutput(), opCount); } + + static DriverContext driverContext() { + return new DriverContext( + BigArrays.NON_RECYCLING_INSTANCE, + BlockFactory.getInstance(new NoopCircuitBreaker("noop"), BigArrays.NON_RECYCLING_INSTANCE) + ); + } } diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java index 909bf16815a0d..82c9416515d24 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java @@ -8,8 +8,10 @@ package org.elasticsearch.benchmark.compute.operator; +import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.IntBlock; @@ -262,6 +264,9 @@ private static void run(String operation) { } static DriverContext driverContext() { - return new DriverContext(BigArrays.NON_RECYCLING_INSTANCE); + return new DriverContext( + BigArrays.NON_RECYCLING_INSTANCE, + BlockFactory.getInstance(new NoopCircuitBreaker("noop"), BigArrays.NON_RECYCLING_INSTANCE) + ); } } diff --git a/x-pack/plugin/esql/compute/build.gradle b/x-pack/plugin/esql/compute/build.gradle index 6058770f25d1b..cd4b131a5b0fe 100644 --- a/x-pack/plugin/esql/compute/build.gradle +++ b/x-pack/plugin/esql/compute/build.gradle @@ -50,7 +50,7 @@ tasks.named('stringTemplates').configure { var longProperties = prop("Long", "long", "LONG", "Long.BYTES") var doubleProperties = prop("Double", "double", "DOUBLE", "Double.BYTES") var bytesRefProperties = prop("BytesRef", "BytesRef", "BYTES_REF", "org.apache.lucene.util.RamUsageEstimator.NUM_BYTES_OBJECT_REF") - var booleanProperties = prop("Boolean", "boolean", "BOOLEAN", "Boolean.BYTES") + var booleanProperties = prop("Boolean", "boolean", "BOOLEAN", "Byte.BYTES") // primitive vectors File vectorInputFile = new File("${projectDir}/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st") template { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java index 53986bf693122..fafe1ce5a0416 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java @@ -24,7 +24,18 @@ public final class BooleanArrayBlock extends AbstractArrayBlock implements Boole private final boolean[] values; public BooleanArrayBlock(boolean[] values, int positionCount, int[] firstValueIndexes, BitSet nulls, MvOrdering mvOrdering) { - super(positionCount, firstValueIndexes, nulls, mvOrdering); + this(values, positionCount, firstValueIndexes, nulls, mvOrdering, BlockFactory.getNonBreakingInstance()); + } + + public BooleanArrayBlock( + boolean[] values, + int positionCount, + int[] firstValueIndexes, + BitSet nulls, + MvOrdering mvOrdering, + BlockFactory blockFactory + ) { + super(positionCount, firstValueIndexes, nulls, mvOrdering, blockFactory); this.values = values; } @@ -58,7 +69,7 @@ public BooleanBlock expand() { return new BooleanArrayVector(values, end).asBlock(); } int[] firstValues = IntStream.range(0, end + 1).toArray(); - return new BooleanArrayBlock(values, end, firstValues, shiftNullsToExpandedPositions(), MvOrdering.UNORDERED); + return new BooleanArrayBlock(values, end, firstValues, shiftNullsToExpandedPositions(), MvOrdering.UNORDERED, blockFactory); } public static long ramBytesEstimated(boolean[] values, int[] firstValueIndexes, BitSet nullsMask) { @@ -98,6 +109,6 @@ public String toString() { @Override public void close() { - // no-op + blockFactory.adjustBreaker(-ramBytesUsed(), true); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java index be89563e1faf3..840f965ff6806 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java @@ -22,7 +22,11 @@ public final class BooleanArrayVector extends AbstractVector implements BooleanV private final boolean[] values; public BooleanArrayVector(boolean[] values, int positionCount) { - super(positionCount); + this(values, positionCount, BlockFactory.getNonBreakingInstance()); + } + + public BooleanArrayVector(boolean[] values, int positionCount, BlockFactory blockFactory) { + super(positionCount, blockFactory); this.values = values; } @@ -78,8 +82,4 @@ public String toString() { return getClass().getSimpleName() + "[positions=" + getPositionCount() + ", values=" + Arrays.toString(values) + ']'; } - @Override - public void close() { - // no-op - } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayVector.java index 25a34b383a4b4..777914f51aef4 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayVector.java @@ -22,7 +22,11 @@ public final class BooleanBigArrayVector extends AbstractVector implements Boole private final BitArray values; public BooleanBigArrayVector(BitArray values, int positionCount) { - super(positionCount); + this(values, positionCount, BlockFactory.getNonBreakingInstance()); + } + + public BooleanBigArrayVector(BitArray values, int positionCount, BlockFactory blockFactory) { + super(positionCount, blockFactory); this.values = values; } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java index 03f82d7b952cb..f2501d54a4ae3 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java @@ -157,12 +157,24 @@ static int hash(BooleanBlock block) { return result; } + /** Returns a builder using the {@link BlockFactory#getNonBreakingInstance block factory}. */ + // Eventually, we want to remove this entirely, always passing an explicit BlockFactory static Builder newBlockBuilder(int estimatedSize) { - return new BooleanBlockBuilder(estimatedSize); + return newBlockBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); } + static Builder newBlockBuilder(int estimatedSize, BlockFactory blockFactory) { + return blockFactory.newBooleanBlockBuilder(estimatedSize); + } + + /** Returns a block using the {@link BlockFactory#getNonBreakingInstance block factory}. */ + // Eventually, we want to remove this entirely, always passing an explicit BlockFactory static BooleanBlock newConstantBlockWith(boolean value, int positions) { - return new ConstantBooleanVector(value, positions).asBlock(); + return newConstantBlockWith(value, positions, BlockFactory.getNonBreakingInstance()); + } + + static BooleanBlock newConstantBlockWith(boolean value, int positions, BlockFactory blockFactory) { + return blockFactory.newConstantBooleanBlockWith(value, positions); } sealed interface Builder extends Block.Builder permits BooleanBlockBuilder { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java index 101998edbd3bd..a7d397fcfb98e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java @@ -17,8 +17,11 @@ final class BooleanBlockBuilder extends AbstractBlockBuilder implements BooleanB private boolean[] values; - BooleanBlockBuilder(int estimatedSize) { - values = new boolean[Math.max(estimatedSize, 2)]; + BooleanBlockBuilder(int estimatedSize, BlockFactory blockFactory) { + super(blockFactory); + int initialSize = Math.max(estimatedSize, 2); + adjustBreaker(initialSize); + values = new boolean[initialSize]; } @Override @@ -31,6 +34,11 @@ public BooleanBlockBuilder appendBoolean(boolean value) { return this; } + @Override + protected int elementSize() { + return Byte.BYTES; + } + @Override protected int valuesLength() { return values.length; @@ -171,17 +179,21 @@ public BooleanBlockBuilder mvOrdering(Block.MvOrdering mvOrdering) { @Override public BooleanBlock build() { finish(); + BooleanBlock block; if (hasNonNullValue && positionCount == 1 && valueCount == 1) { - return new ConstantBooleanVector(values[0], 1).asBlock(); + block = new ConstantBooleanVector(values[0], 1, blockFactory).asBlock(); } else { if (values.length - valueCount > 1024 || valueCount < (values.length / 2)) { values = Arrays.copyOf(values, valueCount); } if (isDense() && singleValued()) { - return new BooleanArrayVector(values, positionCount).asBlock(); + block = new BooleanArrayVector(values, positionCount, blockFactory).asBlock(); } else { - return new BooleanArrayBlock(values, positionCount, firstValueIndexes, nullsMask, mvOrdering); + block = new BooleanArrayBlock(values, positionCount, firstValueIndexes, nullsMask, mvOrdering, blockFactory); } } + // update the breaker with the actual bytes used. + blockFactory.adjustBreaker(block.ramBytesUsed() - estimatedBytes, true); + return block; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java index 1a5687050392e..477c8310e9708 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java @@ -100,8 +100,14 @@ default void writeTo(StreamOutput out) throws IOException { } } + /** Returns a builder using the {@link BlockFactory#getNonBreakingInstance block factory}. */ + // Eventually, we want to remove this entirely, always passing an explicit BlockFactory static Builder newVectorBuilder(int estimatedSize) { - return new BooleanVectorBuilder(estimatedSize); + return newVectorBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); + } + + static Builder newVectorBuilder(int estimatedSize, BlockFactory blockFactory) { + return blockFactory.newBooleanVectorBuilder(estimatedSize); } sealed interface Builder extends Vector.Builder permits BooleanVectorBuilder { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java index 6c138b8f56d11..1727b83360cab 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java @@ -7,7 +7,6 @@ package org.elasticsearch.compute.data; -import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.core.Releasables; /** @@ -16,12 +15,10 @@ */ public final class BooleanVectorBlock extends AbstractVectorBlock implements BooleanBlock { - private static final long RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(BooleanVectorBlock.class); - private final BooleanVector vector; BooleanVectorBlock(BooleanVector vector) { - super(vector.getPositionCount()); + super(vector.getPositionCount(), vector.blockFactory()); this.vector = vector; } @@ -52,7 +49,7 @@ public BooleanBlock filter(int... positions) { @Override public long ramBytesUsed() { - return RAM_BYTES_USED + RamUsageEstimator.sizeOf(vector); + return vector.ramBytesUsed(); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBuilder.java index cd1f84e03dd14..d9926227e1c60 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBuilder.java @@ -17,7 +17,10 @@ final class BooleanVectorBuilder extends AbstractVectorBuilder implements Boolea private boolean[] values; - BooleanVectorBuilder(int estimatedSize) { + BooleanVectorBuilder(int estimatedSize, BlockFactory blockFactory) { + super(blockFactory); + int initialSize = Math.max(estimatedSize, 2); + adjustBreaker(initialSize); values = new boolean[Math.max(estimatedSize, 2)]; } @@ -29,6 +32,11 @@ public BooleanVectorBuilder appendBoolean(boolean value) { return this; } + @Override + protected int elementSize() { + return Byte.BYTES; + } + @Override protected int valuesLength() { return values.length; @@ -41,12 +49,17 @@ protected void growValuesArray(int newSize) { @Override public BooleanVector build() { + BooleanVector vector; if (valueCount == 1) { - return new ConstantBooleanVector(values[0], 1); - } - if (values.length - valueCount > 1024 || valueCount < (values.length / 2)) { - values = Arrays.copyOf(values, valueCount); + vector = new ConstantBooleanVector(values[0], 1, blockFactory); + } else { + if (values.length - valueCount > 1024 || valueCount < (values.length / 2)) { + values = Arrays.copyOf(values, valueCount); + } + vector = new BooleanArrayVector(values, valueCount, blockFactory); } - return new BooleanArrayVector(values, valueCount); + // update the breaker with the actual bytes used. + blockFactory.adjustBreaker(vector.ramBytesUsed() - estimatedBytes, true); + return vector; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java index 47c75862c6370..426731ac06798 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java @@ -10,6 +10,7 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.util.BytesRefArray; +import org.elasticsearch.core.Releasables; import java.util.BitSet; import java.util.stream.IntStream; @@ -25,7 +26,18 @@ public final class BytesRefArrayBlock extends AbstractArrayBlock implements Byte private final BytesRefArray values; public BytesRefArrayBlock(BytesRefArray values, int positionCount, int[] firstValueIndexes, BitSet nulls, MvOrdering mvOrdering) { - super(positionCount, firstValueIndexes, nulls, mvOrdering); + this(values, positionCount, firstValueIndexes, nulls, mvOrdering, BlockFactory.getNonBreakingInstance()); + } + + public BytesRefArrayBlock( + BytesRefArray values, + int positionCount, + int[] firstValueIndexes, + BitSet nulls, + MvOrdering mvOrdering, + BlockFactory blockFactory + ) { + super(positionCount, firstValueIndexes, nulls, mvOrdering, blockFactory); this.values = values; } @@ -59,7 +71,7 @@ public BytesRefBlock expand() { return new BytesRefArrayVector(values, end).asBlock(); } int[] firstValues = IntStream.range(0, end + 1).toArray(); - return new BytesRefArrayBlock(values, end, firstValues, shiftNullsToExpandedPositions(), MvOrdering.UNORDERED); + return new BytesRefArrayBlock(values, end, firstValues, shiftNullsToExpandedPositions(), MvOrdering.UNORDERED, blockFactory); } public static long ramBytesEstimated(BytesRefArray values, int[] firstValueIndexes, BitSet nullsMask) { @@ -99,6 +111,7 @@ public String toString() { @Override public void close() { - // no-op + blockFactory.adjustBreaker(-(ramBytesUsed() - values.ramBytesUsed()), true); + Releasables.closeExpectNoException(values); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java index 21422598a0bde..fc32519a6acce 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java @@ -23,7 +23,11 @@ public final class BytesRefArrayVector extends AbstractVector implements BytesRe private final BytesRefArray values; public BytesRefArrayVector(BytesRefArray values, int positionCount) { - super(positionCount); + this(values, positionCount, BlockFactory.getNonBreakingInstance()); + } + + public BytesRefArrayVector(BytesRefArray values, int positionCount, BlockFactory blockFactory) { + super(positionCount, blockFactory); this.values = values; } @@ -81,6 +85,7 @@ public String toString() { @Override public void close() { + blockFactory.adjustBreaker(-BASE_RAM_BYTES_USED, true); Releasables.closeExpectNoException(values); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java index 30cce3dbf0bad..e8cf8926d3cd2 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java @@ -162,12 +162,24 @@ static int hash(BytesRefBlock block) { return result; } + /** Returns a builder using the {@link BlockFactory#getNonBreakingInstance block factory}. */ + // Eventually, we want to remove this entirely, always passing an explicit BlockFactory static Builder newBlockBuilder(int estimatedSize) { - return new BytesRefBlockBuilder(estimatedSize); + return newBlockBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); } + static Builder newBlockBuilder(int estimatedSize, BlockFactory blockFactory) { + return blockFactory.newBytesRefBlockBuilder(estimatedSize); + } + + /** Returns a block using the {@link BlockFactory#getNonBreakingInstance block factory}. */ + // Eventually, we want to remove this entirely, always passing an explicit BlockFactory static BytesRefBlock newConstantBlockWith(BytesRef value, int positions) { - return new ConstantBytesRefVector(value, positions).asBlock(); + return newConstantBlockWith(value, positions, BlockFactory.getNonBreakingInstance()); + } + + static BytesRefBlock newConstantBlockWith(BytesRef value, int positions, BlockFactory blockFactory) { + return blockFactory.newConstantBytesRefBlockWith(value, positions); } sealed interface Builder extends Block.Builder permits BytesRefBlockBuilder { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java index ed80dcf28fb8e..23c18d2a9ca6e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java @@ -10,6 +10,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BytesRefArray; +import org.elasticsearch.core.Releasables; /** * Block build of BytesRefBlocks. @@ -19,11 +20,12 @@ final class BytesRefBlockBuilder extends AbstractBlockBuilder implements BytesRe private BytesRefArray values; - BytesRefBlockBuilder(int estimatedSize) { - this(estimatedSize, BigArrays.NON_RECYCLING_INSTANCE); + BytesRefBlockBuilder(int estimatedSize, BlockFactory blockFactory) { + this(estimatedSize, BigArrays.NON_RECYCLING_INSTANCE, blockFactory); } - BytesRefBlockBuilder(int estimatedSize, BigArrays bigArrays) { + BytesRefBlockBuilder(int estimatedSize, BigArrays bigArrays, BlockFactory blockFactory) { + super(blockFactory); values = new BytesRefArray(Math.max(estimatedSize, 2), bigArrays); } @@ -37,6 +39,11 @@ public BytesRefBlockBuilder appendBytesRef(BytesRef value) { return this; } + @Override + protected int elementSize() { + return -1; + } + @Override protected int valuesLength() { return Integer.MAX_VALUE; // allow the BytesRefArray through its own append @@ -185,14 +192,20 @@ public BytesRefBlockBuilder mvOrdering(Block.MvOrdering mvOrdering) { @Override public BytesRefBlock build() { finish(); + BytesRefBlock block; if (hasNonNullValue && positionCount == 1 && valueCount == 1) { - return new ConstantBytesRefVector(values.get(0, new BytesRef()), 1).asBlock(); + block = new ConstantBytesRefVector(BytesRef.deepCopyOf(values.get(0, new BytesRef())), 1, blockFactory).asBlock(); + Releasables.closeExpectNoException(values); } else { + estimatedBytes += values.ramBytesUsed(); if (isDense() && singleValued()) { - return new BytesRefArrayVector(values, positionCount).asBlock(); + block = new BytesRefArrayVector(values, positionCount, blockFactory).asBlock(); } else { - return new BytesRefArrayBlock(values, positionCount, firstValueIndexes, nullsMask, mvOrdering); + block = new BytesRefArrayBlock(values, positionCount, firstValueIndexes, nullsMask, mvOrdering, blockFactory); } } + // update the breaker with the actual bytes used. + blockFactory.adjustBreaker(block.ramBytesUsed() - estimatedBytes, true); + return block; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVector.java index 6201ab4a3728a..ffe0b06d1f430 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVector.java @@ -100,8 +100,14 @@ default void writeTo(StreamOutput out) throws IOException { } } + /** Returns a builder using the {@link BlockFactory#getNonBreakingInstance block factory}. */ + // Eventually, we want to remove this entirely, always passing an explicit BlockFactory static Builder newVectorBuilder(int estimatedSize) { - return new BytesRefVectorBuilder(estimatedSize); + return newVectorBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); + } + + static Builder newVectorBuilder(int estimatedSize, BlockFactory blockFactory) { + return blockFactory.newBytesRefVectorBuilder(estimatedSize); } sealed interface Builder extends Vector.Builder permits BytesRefVectorBuilder { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java index 791ea6809de63..3799e9c5b7ef7 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java @@ -8,7 +8,6 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.core.Releasables; /** @@ -17,12 +16,10 @@ */ public final class BytesRefVectorBlock extends AbstractVectorBlock implements BytesRefBlock { - private static final long RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(BytesRefVectorBlock.class); - private final BytesRefVector vector; BytesRefVectorBlock(BytesRefVector vector) { - super(vector.getPositionCount()); + super(vector.getPositionCount(), vector.blockFactory()); this.vector = vector; } @@ -53,7 +50,7 @@ public BytesRefBlock filter(int... positions) { @Override public long ramBytesUsed() { - return RAM_BYTES_USED + RamUsageEstimator.sizeOf(vector); + return vector.ramBytesUsed(); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBuilder.java index 6035cdf5774e4..be753771ac961 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBuilder.java @@ -10,6 +10,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BytesRefArray; +import org.elasticsearch.core.Releasables; /** * Block build of BytesRefBlocks. @@ -19,11 +20,12 @@ final class BytesRefVectorBuilder extends AbstractVectorBuilder implements Bytes private BytesRefArray values; - BytesRefVectorBuilder(int estimatedSize) { - this(estimatedSize, BigArrays.NON_RECYCLING_INSTANCE); + BytesRefVectorBuilder(int estimatedSize, BlockFactory blockFactory) { + this(estimatedSize, BigArrays.NON_RECYCLING_INSTANCE, blockFactory); } - BytesRefVectorBuilder(int estimatedSize, BigArrays bigArrays) { + BytesRefVectorBuilder(int estimatedSize, BigArrays bigArrays, BlockFactory blockFactory) { + super(blockFactory); values = new BytesRefArray(Math.max(estimatedSize, 2), bigArrays); } @@ -35,6 +37,11 @@ public BytesRefVectorBuilder appendBytesRef(BytesRef value) { return this; } + @Override + protected int elementSize() { + return -1; + } + @Override protected int valuesLength() { return Integer.MAX_VALUE; // allow the BytesRefArray through its own append @@ -47,9 +54,16 @@ protected void growValuesArray(int newSize) { @Override public BytesRefVector build() { + BytesRefVector vector; if (valueCount == 1) { - return new ConstantBytesRefVector(values.get(0, new BytesRef()), 1); + vector = new ConstantBytesRefVector(BytesRef.deepCopyOf(values.get(0, new BytesRef())), 1, blockFactory); + Releasables.closeExpectNoException(values); + } else { + estimatedBytes = values.ramBytesUsed(); + vector = new BytesRefArrayVector(values, valueCount, blockFactory); } - return new BytesRefArrayVector(values, valueCount); + // update the breaker with the actual bytes used. + blockFactory.adjustBreaker(vector.ramBytesUsed() - estimatedBytes, true); + return vector; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBooleanVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBooleanVector.java index cae795a614732..7119721811401 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBooleanVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBooleanVector.java @@ -20,7 +20,11 @@ public final class ConstantBooleanVector extends AbstractVector implements Boole private final boolean value; public ConstantBooleanVector(boolean value, int positionCount) { - super(positionCount); + this(value, positionCount, BlockFactory.getNonBreakingInstance()); + } + + public ConstantBooleanVector(boolean value, int positionCount, BlockFactory blockFactory) { + super(positionCount, blockFactory); this.value = value; } @@ -73,6 +77,6 @@ public String toString() { @Override public void close() { - // no-op + blockFactory.adjustBreaker(-ramBytesUsed(), true); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java index 09b8bda0e38ce..caa30a5a2148c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java @@ -21,7 +21,11 @@ public final class ConstantBytesRefVector extends AbstractVector implements Byte private final BytesRef value; public ConstantBytesRefVector(BytesRef value, int positionCount) { - super(positionCount); + this(value, positionCount, BlockFactory.getNonBreakingInstance()); + } + + public ConstantBytesRefVector(BytesRef value, int positionCount, BlockFactory blockFactory) { + super(positionCount, blockFactory); this.value = value; } @@ -74,6 +78,6 @@ public String toString() { @Override public void close() { - // no-op + blockFactory.adjustBreaker(-ramBytesUsed(), true); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantDoubleVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantDoubleVector.java index b4aa5012ce2a0..be41df1188ea0 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantDoubleVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantDoubleVector.java @@ -20,7 +20,11 @@ public final class ConstantDoubleVector extends AbstractVector implements Double private final double value; public ConstantDoubleVector(double value, int positionCount) { - super(positionCount); + this(value, positionCount, BlockFactory.getNonBreakingInstance()); + } + + public ConstantDoubleVector(double value, int positionCount, BlockFactory blockFactory) { + super(positionCount, blockFactory); this.value = value; } @@ -73,6 +77,6 @@ public String toString() { @Override public void close() { - // no-op + blockFactory.adjustBreaker(-ramBytesUsed(), true); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java index a1ccf781b18f3..4854db91fe567 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java @@ -20,7 +20,11 @@ public final class ConstantIntVector extends AbstractVector implements IntVector private final int value; public ConstantIntVector(int value, int positionCount) { - super(positionCount); + this(value, positionCount, BlockFactory.getNonBreakingInstance()); + } + + public ConstantIntVector(int value, int positionCount, BlockFactory blockFactory) { + super(positionCount, blockFactory); this.value = value; } @@ -73,6 +77,6 @@ public String toString() { @Override public void close() { - // no-op + blockFactory.adjustBreaker(-ramBytesUsed(), true); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantLongVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantLongVector.java index 38672e5df9b04..1f33d97e9c39d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantLongVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantLongVector.java @@ -20,7 +20,11 @@ public final class ConstantLongVector extends AbstractVector implements LongVect private final long value; public ConstantLongVector(long value, int positionCount) { - super(positionCount); + this(value, positionCount, BlockFactory.getNonBreakingInstance()); + } + + public ConstantLongVector(long value, int positionCount, BlockFactory blockFactory) { + super(positionCount, blockFactory); this.value = value; } @@ -73,6 +77,6 @@ public String toString() { @Override public void close() { - // no-op + blockFactory.adjustBreaker(-ramBytesUsed(), true); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java index 6a81b10a3b107..c8d7035b31d3b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java @@ -24,7 +24,18 @@ public final class DoubleArrayBlock extends AbstractArrayBlock implements Double private final double[] values; public DoubleArrayBlock(double[] values, int positionCount, int[] firstValueIndexes, BitSet nulls, MvOrdering mvOrdering) { - super(positionCount, firstValueIndexes, nulls, mvOrdering); + this(values, positionCount, firstValueIndexes, nulls, mvOrdering, BlockFactory.getNonBreakingInstance()); + } + + public DoubleArrayBlock( + double[] values, + int positionCount, + int[] firstValueIndexes, + BitSet nulls, + MvOrdering mvOrdering, + BlockFactory blockFactory + ) { + super(positionCount, firstValueIndexes, nulls, mvOrdering, blockFactory); this.values = values; } @@ -58,7 +69,7 @@ public DoubleBlock expand() { return new DoubleArrayVector(values, end).asBlock(); } int[] firstValues = IntStream.range(0, end + 1).toArray(); - return new DoubleArrayBlock(values, end, firstValues, shiftNullsToExpandedPositions(), MvOrdering.UNORDERED); + return new DoubleArrayBlock(values, end, firstValues, shiftNullsToExpandedPositions(), MvOrdering.UNORDERED, blockFactory); } public static long ramBytesEstimated(double[] values, int[] firstValueIndexes, BitSet nullsMask) { @@ -98,6 +109,6 @@ public String toString() { @Override public void close() { - // no-op + blockFactory.adjustBreaker(-ramBytesUsed(), true); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java index 6f82d60ae1421..44bf852f628ca 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java @@ -22,7 +22,11 @@ public final class DoubleArrayVector extends AbstractVector implements DoubleVec private final double[] values; public DoubleArrayVector(double[] values, int positionCount) { - super(positionCount); + this(values, positionCount, BlockFactory.getNonBreakingInstance()); + } + + public DoubleArrayVector(double[] values, int positionCount, BlockFactory blockFactory) { + super(positionCount, blockFactory); this.values = values; } @@ -78,8 +82,4 @@ public String toString() { return getClass().getSimpleName() + "[positions=" + getPositionCount() + ", values=" + Arrays.toString(values) + ']'; } - @Override - public void close() { - // no-op - } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayVector.java index 129d4b3c31d93..f97384bc0a4b8 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayVector.java @@ -22,7 +22,11 @@ public final class DoubleBigArrayVector extends AbstractVector implements Double private final DoubleArray values; public DoubleBigArrayVector(DoubleArray values, int positionCount) { - super(positionCount); + this(values, positionCount, BlockFactory.getNonBreakingInstance()); + } + + public DoubleBigArrayVector(DoubleArray values, int positionCount, BlockFactory blockFactory) { + super(positionCount, blockFactory); this.values = values; } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java index cf749f20de9b2..9edd887448938 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java @@ -158,12 +158,24 @@ static int hash(DoubleBlock block) { return result; } + /** Returns a builder using the {@link BlockFactory#getNonBreakingInstance block factory}. */ + // Eventually, we want to remove this entirely, always passing an explicit BlockFactory static Builder newBlockBuilder(int estimatedSize) { - return new DoubleBlockBuilder(estimatedSize); + return newBlockBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); } + static Builder newBlockBuilder(int estimatedSize, BlockFactory blockFactory) { + return blockFactory.newDoubleBlockBuilder(estimatedSize); + } + + /** Returns a block using the {@link BlockFactory#getNonBreakingInstance block factory}. */ + // Eventually, we want to remove this entirely, always passing an explicit BlockFactory static DoubleBlock newConstantBlockWith(double value, int positions) { - return new ConstantDoubleVector(value, positions).asBlock(); + return newConstantBlockWith(value, positions, BlockFactory.getNonBreakingInstance()); + } + + static DoubleBlock newConstantBlockWith(double value, int positions, BlockFactory blockFactory) { + return blockFactory.newConstantDoubleBlockWith(value, positions); } sealed interface Builder extends Block.Builder permits DoubleBlockBuilder { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java index ff5b1ddf6e1d2..a97f58f3924b1 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java @@ -17,8 +17,11 @@ final class DoubleBlockBuilder extends AbstractBlockBuilder implements DoubleBlo private double[] values; - DoubleBlockBuilder(int estimatedSize) { - values = new double[Math.max(estimatedSize, 2)]; + DoubleBlockBuilder(int estimatedSize, BlockFactory blockFactory) { + super(blockFactory); + int initialSize = Math.max(estimatedSize, 2); + adjustBreaker(initialSize); + values = new double[initialSize]; } @Override @@ -31,6 +34,11 @@ public DoubleBlockBuilder appendDouble(double value) { return this; } + @Override + protected int elementSize() { + return Double.BYTES; + } + @Override protected int valuesLength() { return values.length; @@ -171,17 +179,21 @@ public DoubleBlockBuilder mvOrdering(Block.MvOrdering mvOrdering) { @Override public DoubleBlock build() { finish(); + DoubleBlock block; if (hasNonNullValue && positionCount == 1 && valueCount == 1) { - return new ConstantDoubleVector(values[0], 1).asBlock(); + block = new ConstantDoubleVector(values[0], 1, blockFactory).asBlock(); } else { if (values.length - valueCount > 1024 || valueCount < (values.length / 2)) { values = Arrays.copyOf(values, valueCount); } if (isDense() && singleValued()) { - return new DoubleArrayVector(values, positionCount).asBlock(); + block = new DoubleArrayVector(values, positionCount, blockFactory).asBlock(); } else { - return new DoubleArrayBlock(values, positionCount, firstValueIndexes, nullsMask, mvOrdering); + block = new DoubleArrayBlock(values, positionCount, firstValueIndexes, nullsMask, mvOrdering, blockFactory); } } + // update the breaker with the actual bytes used. + blockFactory.adjustBreaker(block.ramBytesUsed() - estimatedBytes, true); + return block; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java index 09bdcafffbfe5..8461f36fb9e7a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java @@ -101,8 +101,14 @@ default void writeTo(StreamOutput out) throws IOException { } } + /** Returns a builder using the {@link BlockFactory#getNonBreakingInstance block factory}. */ + // Eventually, we want to remove this entirely, always passing an explicit BlockFactory static Builder newVectorBuilder(int estimatedSize) { - return new DoubleVectorBuilder(estimatedSize); + return newVectorBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); + } + + static Builder newVectorBuilder(int estimatedSize, BlockFactory blockFactory) { + return blockFactory.newDoubleVectorBuilder(estimatedSize); } sealed interface Builder extends Vector.Builder permits DoubleVectorBuilder { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java index 92243b6a53b70..dba00f6b393a9 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java @@ -7,7 +7,6 @@ package org.elasticsearch.compute.data; -import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.core.Releasables; /** @@ -16,12 +15,10 @@ */ public final class DoubleVectorBlock extends AbstractVectorBlock implements DoubleBlock { - private static final long RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(DoubleVectorBlock.class); - private final DoubleVector vector; DoubleVectorBlock(DoubleVector vector) { - super(vector.getPositionCount()); + super(vector.getPositionCount(), vector.blockFactory()); this.vector = vector; } @@ -52,7 +49,7 @@ public DoubleBlock filter(int... positions) { @Override public long ramBytesUsed() { - return RAM_BYTES_USED + RamUsageEstimator.sizeOf(vector); + return vector.ramBytesUsed(); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBuilder.java index 782b43c1bd9e2..8112c5458280f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBuilder.java @@ -17,7 +17,10 @@ final class DoubleVectorBuilder extends AbstractVectorBuilder implements DoubleV private double[] values; - DoubleVectorBuilder(int estimatedSize) { + DoubleVectorBuilder(int estimatedSize, BlockFactory blockFactory) { + super(blockFactory); + int initialSize = Math.max(estimatedSize, 2); + adjustBreaker(initialSize); values = new double[Math.max(estimatedSize, 2)]; } @@ -29,6 +32,11 @@ public DoubleVectorBuilder appendDouble(double value) { return this; } + @Override + protected int elementSize() { + return Double.BYTES; + } + @Override protected int valuesLength() { return values.length; @@ -41,12 +49,17 @@ protected void growValuesArray(int newSize) { @Override public DoubleVector build() { + DoubleVector vector; if (valueCount == 1) { - return new ConstantDoubleVector(values[0], 1); - } - if (values.length - valueCount > 1024 || valueCount < (values.length / 2)) { - values = Arrays.copyOf(values, valueCount); + vector = new ConstantDoubleVector(values[0], 1, blockFactory); + } else { + if (values.length - valueCount > 1024 || valueCount < (values.length / 2)) { + values = Arrays.copyOf(values, valueCount); + } + vector = new DoubleArrayVector(values, valueCount, blockFactory); } - return new DoubleArrayVector(values, valueCount); + // update the breaker with the actual bytes used. + blockFactory.adjustBreaker(vector.ramBytesUsed() - estimatedBytes, true); + return vector; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterBooleanVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterBooleanVector.java index 2ff1dcd58dea5..4bf1a3b986eb3 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterBooleanVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterBooleanVector.java @@ -21,7 +21,7 @@ public final class FilterBooleanVector extends AbstractFilterVector implements B private final BooleanVector vector; FilterBooleanVector(BooleanVector vector, int... positions) { - super(positions); + super(positions, vector.blockFactory()); this.vector = vector; } @@ -90,6 +90,11 @@ private void appendValues(StringBuilder sb) { } } + @Override + public BlockFactory blockFactory() { + return vector.blockFactory(); + } + @Override public void close() { Releasables.closeExpectNoException(vector); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterBytesRefVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterBytesRefVector.java index 0d51121d3c0ec..0491a4453617d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterBytesRefVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterBytesRefVector.java @@ -22,7 +22,7 @@ public final class FilterBytesRefVector extends AbstractFilterVector implements private final BytesRefVector vector; FilterBytesRefVector(BytesRefVector vector, int... positions) { - super(positions); + super(positions, vector.blockFactory()); this.vector = vector; } @@ -91,6 +91,11 @@ private void appendValues(StringBuilder sb) { } } + @Override + public BlockFactory blockFactory() { + return vector.blockFactory(); + } + @Override public void close() { Releasables.closeExpectNoException(vector); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterDoubleVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterDoubleVector.java index 1c1c6d1c3db02..50784f09c2b27 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterDoubleVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterDoubleVector.java @@ -21,7 +21,7 @@ public final class FilterDoubleVector extends AbstractFilterVector implements Do private final DoubleVector vector; FilterDoubleVector(DoubleVector vector, int... positions) { - super(positions); + super(positions, vector.blockFactory()); this.vector = vector; } @@ -90,6 +90,11 @@ private void appendValues(StringBuilder sb) { } } + @Override + public BlockFactory blockFactory() { + return vector.blockFactory(); + } + @Override public void close() { Releasables.closeExpectNoException(vector); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterIntVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterIntVector.java index f0833c1878b61..2d0f8551d9ccc 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterIntVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterIntVector.java @@ -21,7 +21,7 @@ public final class FilterIntVector extends AbstractFilterVector implements IntVe private final IntVector vector; FilterIntVector(IntVector vector, int... positions) { - super(positions); + super(positions, vector.blockFactory()); this.vector = vector; } @@ -90,6 +90,11 @@ private void appendValues(StringBuilder sb) { } } + @Override + public BlockFactory blockFactory() { + return vector.blockFactory(); + } + @Override public void close() { Releasables.closeExpectNoException(vector); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterLongVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterLongVector.java index 5eb987863aa80..d88357deaadfb 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterLongVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FilterLongVector.java @@ -21,7 +21,7 @@ public final class FilterLongVector extends AbstractFilterVector implements Long private final LongVector vector; FilterLongVector(LongVector vector, int... positions) { - super(positions); + super(positions, vector.blockFactory()); this.vector = vector; } @@ -90,6 +90,11 @@ private void appendValues(StringBuilder sb) { } } + @Override + public BlockFactory blockFactory() { + return vector.blockFactory(); + } + @Override public void close() { Releasables.closeExpectNoException(vector); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java index 8df36ff95117b..782e45a6df463 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java @@ -24,7 +24,18 @@ public final class IntArrayBlock extends AbstractArrayBlock implements IntBlock private final int[] values; public IntArrayBlock(int[] values, int positionCount, int[] firstValueIndexes, BitSet nulls, MvOrdering mvOrdering) { - super(positionCount, firstValueIndexes, nulls, mvOrdering); + this(values, positionCount, firstValueIndexes, nulls, mvOrdering, BlockFactory.getNonBreakingInstance()); + } + + public IntArrayBlock( + int[] values, + int positionCount, + int[] firstValueIndexes, + BitSet nulls, + MvOrdering mvOrdering, + BlockFactory blockFactory + ) { + super(positionCount, firstValueIndexes, nulls, mvOrdering, blockFactory); this.values = values; } @@ -58,7 +69,7 @@ public IntBlock expand() { return new IntArrayVector(values, end).asBlock(); } int[] firstValues = IntStream.range(0, end + 1).toArray(); - return new IntArrayBlock(values, end, firstValues, shiftNullsToExpandedPositions(), MvOrdering.UNORDERED); + return new IntArrayBlock(values, end, firstValues, shiftNullsToExpandedPositions(), MvOrdering.UNORDERED, blockFactory); } public static long ramBytesEstimated(int[] values, int[] firstValueIndexes, BitSet nullsMask) { @@ -98,6 +109,6 @@ public String toString() { @Override public void close() { - // no-op + blockFactory.adjustBreaker(-ramBytesUsed(), true); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java index d8e15408d4492..9f39c74c2e2a3 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java @@ -22,7 +22,11 @@ public final class IntArrayVector extends AbstractVector implements IntVector { private final int[] values; public IntArrayVector(int[] values, int positionCount) { - super(positionCount); + this(values, positionCount, BlockFactory.getNonBreakingInstance()); + } + + public IntArrayVector(int[] values, int positionCount, BlockFactory blockFactory) { + super(positionCount, blockFactory); this.values = values; } @@ -78,8 +82,4 @@ public String toString() { return getClass().getSimpleName() + "[positions=" + getPositionCount() + ", values=" + Arrays.toString(values) + ']'; } - @Override - public void close() { - // no-op - } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayVector.java index 2058006eb45bb..f3bcc1ef9bb01 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayVector.java @@ -22,7 +22,11 @@ public final class IntBigArrayVector extends AbstractVector implements IntVector private final IntArray values; public IntBigArrayVector(IntArray values, int positionCount) { - super(positionCount); + this(values, positionCount, BlockFactory.getNonBreakingInstance()); + } + + public IntBigArrayVector(IntArray values, int positionCount, BlockFactory blockFactory) { + super(positionCount, blockFactory); this.values = values; } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java index 0fbcbe6c56362..d6f39de6fc938 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java @@ -157,12 +157,24 @@ static int hash(IntBlock block) { return result; } + /** Returns a builder using the {@link BlockFactory#getNonBreakingInstance block factory}. */ + // Eventually, we want to remove this entirely, always passing an explicit BlockFactory static Builder newBlockBuilder(int estimatedSize) { - return new IntBlockBuilder(estimatedSize); + return newBlockBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); } + static Builder newBlockBuilder(int estimatedSize, BlockFactory blockFactory) { + return blockFactory.newIntBlockBuilder(estimatedSize); + } + + /** Returns a block using the {@link BlockFactory#getNonBreakingInstance block factory}. */ + // Eventually, we want to remove this entirely, always passing an explicit BlockFactory static IntBlock newConstantBlockWith(int value, int positions) { - return new ConstantIntVector(value, positions).asBlock(); + return newConstantBlockWith(value, positions, BlockFactory.getNonBreakingInstance()); + } + + static IntBlock newConstantBlockWith(int value, int positions, BlockFactory blockFactory) { + return blockFactory.newConstantIntBlockWith(value, positions); } sealed interface Builder extends Block.Builder permits IntBlockBuilder { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java index ba45611a7bdc7..53d379d715c9b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java @@ -17,8 +17,11 @@ final class IntBlockBuilder extends AbstractBlockBuilder implements IntBlock.Bui private int[] values; - IntBlockBuilder(int estimatedSize) { - values = new int[Math.max(estimatedSize, 2)]; + IntBlockBuilder(int estimatedSize, BlockFactory blockFactory) { + super(blockFactory); + int initialSize = Math.max(estimatedSize, 2); + adjustBreaker(initialSize); + values = new int[initialSize]; } @Override @@ -31,6 +34,11 @@ public IntBlockBuilder appendInt(int value) { return this; } + @Override + protected int elementSize() { + return Integer.BYTES; + } + @Override protected int valuesLength() { return values.length; @@ -171,17 +179,21 @@ public IntBlockBuilder mvOrdering(Block.MvOrdering mvOrdering) { @Override public IntBlock build() { finish(); + IntBlock block; if (hasNonNullValue && positionCount == 1 && valueCount == 1) { - return new ConstantIntVector(values[0], 1).asBlock(); + block = new ConstantIntVector(values[0], 1, blockFactory).asBlock(); } else { if (values.length - valueCount > 1024 || valueCount < (values.length / 2)) { values = Arrays.copyOf(values, valueCount); } if (isDense() && singleValued()) { - return new IntArrayVector(values, positionCount).asBlock(); + block = new IntArrayVector(values, positionCount, blockFactory).asBlock(); } else { - return new IntArrayBlock(values, positionCount, firstValueIndexes, nullsMask, mvOrdering); + block = new IntArrayBlock(values, positionCount, firstValueIndexes, nullsMask, mvOrdering, blockFactory); } } + // update the breaker with the actual bytes used. + blockFactory.adjustBreaker(block.ramBytesUsed() - estimatedBytes, true); + return block; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java index 35bab4278d2fd..a6347cb5da70f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java @@ -100,8 +100,14 @@ default void writeTo(StreamOutput out) throws IOException { } } + /** Returns a builder using the {@link BlockFactory#getNonBreakingInstance block factory}. */ + // Eventually, we want to remove this entirely, always passing an explicit BlockFactory static Builder newVectorBuilder(int estimatedSize) { - return new IntVectorBuilder(estimatedSize); + return newVectorBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); + } + + static Builder newVectorBuilder(int estimatedSize, BlockFactory blockFactory) { + return blockFactory.newIntVectorBuilder(estimatedSize); } /** Create a vector for a range of ints. */ diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java index 20499fe900558..b91c4c8dbeefa 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java @@ -7,7 +7,6 @@ package org.elasticsearch.compute.data; -import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.core.Releasables; /** @@ -16,12 +15,10 @@ */ public final class IntVectorBlock extends AbstractVectorBlock implements IntBlock { - private static final long RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(IntVectorBlock.class); - private final IntVector vector; IntVectorBlock(IntVector vector) { - super(vector.getPositionCount()); + super(vector.getPositionCount(), vector.blockFactory()); this.vector = vector; } @@ -52,7 +49,7 @@ public IntBlock filter(int... positions) { @Override public long ramBytesUsed() { - return RAM_BYTES_USED + RamUsageEstimator.sizeOf(vector); + return vector.ramBytesUsed(); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBuilder.java index 9ae625152ce8e..8bf4a4a96c5cb 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBuilder.java @@ -17,7 +17,10 @@ final class IntVectorBuilder extends AbstractVectorBuilder implements IntVector. private int[] values; - IntVectorBuilder(int estimatedSize) { + IntVectorBuilder(int estimatedSize, BlockFactory blockFactory) { + super(blockFactory); + int initialSize = Math.max(estimatedSize, 2); + adjustBreaker(initialSize); values = new int[Math.max(estimatedSize, 2)]; } @@ -29,6 +32,11 @@ public IntVectorBuilder appendInt(int value) { return this; } + @Override + protected int elementSize() { + return Integer.BYTES; + } + @Override protected int valuesLength() { return values.length; @@ -41,12 +49,17 @@ protected void growValuesArray(int newSize) { @Override public IntVector build() { + IntVector vector; if (valueCount == 1) { - return new ConstantIntVector(values[0], 1); - } - if (values.length - valueCount > 1024 || valueCount < (values.length / 2)) { - values = Arrays.copyOf(values, valueCount); + vector = new ConstantIntVector(values[0], 1, blockFactory); + } else { + if (values.length - valueCount > 1024 || valueCount < (values.length / 2)) { + values = Arrays.copyOf(values, valueCount); + } + vector = new IntArrayVector(values, valueCount, blockFactory); } - return new IntArrayVector(values, valueCount); + // update the breaker with the actual bytes used. + blockFactory.adjustBreaker(vector.ramBytesUsed() - estimatedBytes, true); + return vector; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java index 9a1681a97a27c..5d6c3d2931a85 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java @@ -24,7 +24,18 @@ public final class LongArrayBlock extends AbstractArrayBlock implements LongBloc private final long[] values; public LongArrayBlock(long[] values, int positionCount, int[] firstValueIndexes, BitSet nulls, MvOrdering mvOrdering) { - super(positionCount, firstValueIndexes, nulls, mvOrdering); + this(values, positionCount, firstValueIndexes, nulls, mvOrdering, BlockFactory.getNonBreakingInstance()); + } + + public LongArrayBlock( + long[] values, + int positionCount, + int[] firstValueIndexes, + BitSet nulls, + MvOrdering mvOrdering, + BlockFactory blockFactory + ) { + super(positionCount, firstValueIndexes, nulls, mvOrdering, blockFactory); this.values = values; } @@ -58,7 +69,7 @@ public LongBlock expand() { return new LongArrayVector(values, end).asBlock(); } int[] firstValues = IntStream.range(0, end + 1).toArray(); - return new LongArrayBlock(values, end, firstValues, shiftNullsToExpandedPositions(), MvOrdering.UNORDERED); + return new LongArrayBlock(values, end, firstValues, shiftNullsToExpandedPositions(), MvOrdering.UNORDERED, blockFactory); } public static long ramBytesEstimated(long[] values, int[] firstValueIndexes, BitSet nullsMask) { @@ -98,6 +109,6 @@ public String toString() { @Override public void close() { - // no-op + blockFactory.adjustBreaker(-ramBytesUsed(), true); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java index a9d7cdfb40bf8..b4d467d44af3e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java @@ -22,7 +22,11 @@ public final class LongArrayVector extends AbstractVector implements LongVector private final long[] values; public LongArrayVector(long[] values, int positionCount) { - super(positionCount); + this(values, positionCount, BlockFactory.getNonBreakingInstance()); + } + + public LongArrayVector(long[] values, int positionCount, BlockFactory blockFactory) { + super(positionCount, blockFactory); this.values = values; } @@ -78,8 +82,4 @@ public String toString() { return getClass().getSimpleName() + "[positions=" + getPositionCount() + ", values=" + Arrays.toString(values) + ']'; } - @Override - public void close() { - // no-op - } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayVector.java index 6db8675a8d69f..3d3ac9ea09e32 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayVector.java @@ -22,7 +22,11 @@ public final class LongBigArrayVector extends AbstractVector implements LongVect private final LongArray values; public LongBigArrayVector(LongArray values, int positionCount) { - super(positionCount); + this(values, positionCount, BlockFactory.getNonBreakingInstance()); + } + + public LongBigArrayVector(LongArray values, int positionCount, BlockFactory blockFactory) { + super(positionCount, blockFactory); this.values = values; } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java index 965c59e3b7f73..d3dc5928cb543 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java @@ -158,12 +158,24 @@ static int hash(LongBlock block) { return result; } + /** Returns a builder using the {@link BlockFactory#getNonBreakingInstance block factory}. */ + // Eventually, we want to remove this entirely, always passing an explicit BlockFactory static Builder newBlockBuilder(int estimatedSize) { - return new LongBlockBuilder(estimatedSize); + return newBlockBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); } + static Builder newBlockBuilder(int estimatedSize, BlockFactory blockFactory) { + return blockFactory.newLongBlockBuilder(estimatedSize); + } + + /** Returns a block using the {@link BlockFactory#getNonBreakingInstance block factory}. */ + // Eventually, we want to remove this entirely, always passing an explicit BlockFactory static LongBlock newConstantBlockWith(long value, int positions) { - return new ConstantLongVector(value, positions).asBlock(); + return newConstantBlockWith(value, positions, BlockFactory.getNonBreakingInstance()); + } + + static LongBlock newConstantBlockWith(long value, int positions, BlockFactory blockFactory) { + return blockFactory.newConstantLongBlockWith(value, positions); } sealed interface Builder extends Block.Builder permits LongBlockBuilder { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java index 9834de886d904..a378b382ce31e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java @@ -17,8 +17,11 @@ final class LongBlockBuilder extends AbstractBlockBuilder implements LongBlock.B private long[] values; - LongBlockBuilder(int estimatedSize) { - values = new long[Math.max(estimatedSize, 2)]; + LongBlockBuilder(int estimatedSize, BlockFactory blockFactory) { + super(blockFactory); + int initialSize = Math.max(estimatedSize, 2); + adjustBreaker(initialSize); + values = new long[initialSize]; } @Override @@ -31,6 +34,11 @@ public LongBlockBuilder appendLong(long value) { return this; } + @Override + protected int elementSize() { + return Long.BYTES; + } + @Override protected int valuesLength() { return values.length; @@ -171,17 +179,21 @@ public LongBlockBuilder mvOrdering(Block.MvOrdering mvOrdering) { @Override public LongBlock build() { finish(); + LongBlock block; if (hasNonNullValue && positionCount == 1 && valueCount == 1) { - return new ConstantLongVector(values[0], 1).asBlock(); + block = new ConstantLongVector(values[0], 1, blockFactory).asBlock(); } else { if (values.length - valueCount > 1024 || valueCount < (values.length / 2)) { values = Arrays.copyOf(values, valueCount); } if (isDense() && singleValued()) { - return new LongArrayVector(values, positionCount).asBlock(); + block = new LongArrayVector(values, positionCount, blockFactory).asBlock(); } else { - return new LongArrayBlock(values, positionCount, firstValueIndexes, nullsMask, mvOrdering); + block = new LongArrayBlock(values, positionCount, firstValueIndexes, nullsMask, mvOrdering, blockFactory); } } + // update the breaker with the actual bytes used. + blockFactory.adjustBreaker(block.ramBytesUsed() - estimatedBytes, true); + return block; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java index c931613f9e40c..6e447dde251a4 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java @@ -101,8 +101,14 @@ default void writeTo(StreamOutput out) throws IOException { } } + /** Returns a builder using the {@link BlockFactory#getNonBreakingInstance block factory}. */ + // Eventually, we want to remove this entirely, always passing an explicit BlockFactory static Builder newVectorBuilder(int estimatedSize) { - return new LongVectorBuilder(estimatedSize); + return newVectorBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); + } + + static Builder newVectorBuilder(int estimatedSize, BlockFactory blockFactory) { + return blockFactory.newLongVectorBuilder(estimatedSize); } sealed interface Builder extends Vector.Builder permits LongVectorBuilder { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java index cf8fc931f1351..3b3d13bf9c36a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java @@ -7,7 +7,6 @@ package org.elasticsearch.compute.data; -import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.core.Releasables; /** @@ -16,12 +15,10 @@ */ public final class LongVectorBlock extends AbstractVectorBlock implements LongBlock { - private static final long RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(LongVectorBlock.class); - private final LongVector vector; LongVectorBlock(LongVector vector) { - super(vector.getPositionCount()); + super(vector.getPositionCount(), vector.blockFactory()); this.vector = vector; } @@ -52,7 +49,7 @@ public LongBlock filter(int... positions) { @Override public long ramBytesUsed() { - return RAM_BYTES_USED + RamUsageEstimator.sizeOf(vector); + return vector.ramBytesUsed(); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBuilder.java index ba4864bdde812..10daed94a966e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBuilder.java @@ -17,7 +17,10 @@ final class LongVectorBuilder extends AbstractVectorBuilder implements LongVecto private long[] values; - LongVectorBuilder(int estimatedSize) { + LongVectorBuilder(int estimatedSize, BlockFactory blockFactory) { + super(blockFactory); + int initialSize = Math.max(estimatedSize, 2); + adjustBreaker(initialSize); values = new long[Math.max(estimatedSize, 2)]; } @@ -29,6 +32,11 @@ public LongVectorBuilder appendLong(long value) { return this; } + @Override + protected int elementSize() { + return Long.BYTES; + } + @Override protected int valuesLength() { return values.length; @@ -41,12 +49,17 @@ protected void growValuesArray(int newSize) { @Override public LongVector build() { + LongVector vector; if (valueCount == 1) { - return new ConstantLongVector(values[0], 1); - } - if (values.length - valueCount > 1024 || valueCount < (values.length / 2)) { - values = Arrays.copyOf(values, valueCount); + vector = new ConstantLongVector(values[0], 1, blockFactory); + } else { + if (values.length - valueCount > 1024 || valueCount < (values.length / 2)) { + values = Arrays.copyOf(values, valueCount); + } + vector = new LongArrayVector(values, valueCount, blockFactory); } - return new LongArrayVector(values, valueCount); + // update the breaker with the actual bytes used. + blockFactory.adjustBreaker(vector.ramBytesUsed() - estimatedBytes, true); + return vector; } } diff --git a/x-pack/plugin/esql/compute/src/main/java/module-info.java b/x-pack/plugin/esql/compute/src/main/java/module-info.java index 69aa6f5bb217a..3104fb05280eb 100644 --- a/x-pack/plugin/esql/compute/src/main/java/module-info.java +++ b/x-pack/plugin/esql/compute/src/main/java/module-info.java @@ -6,6 +6,8 @@ */ module org.elasticsearch.compute { + uses org.elasticsearch.compute.data.BlockFactoryParameters; + requires org.apache.lucene.core; requires org.elasticsearch.base; requires org.elasticsearch.server; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRefLongBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRefLongBlockHash.java index 50fd1bb7b0943..0c5b60f471f8c 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRefLongBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRefLongBlockHash.java @@ -74,6 +74,7 @@ public void add(Page page, GroupingAggregatorFunction.AddInput addInput) { } else { new AddBlock(block1, block2, addInput).add(); } + Releasables.closeExpectNoException(block1, block2); } public IntVector add(BytesRefVector vector1, LongVector vector2) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java index 4fcd9735f6158..7e5f3c94b91cb 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java @@ -20,6 +20,7 @@ import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.MultivalueDedupe; import org.elasticsearch.compute.operator.MultivalueDedupeInt; +import org.elasticsearch.core.Releasables; import java.util.BitSet; @@ -52,6 +53,7 @@ public void add(Page page, GroupingAggregatorFunction.AddInput addInput) { } else { addInput.add(0, add(vector)); } + Releasables.closeExpectNoException(block); } private IntVector add(IntVector vector) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java index 5e5b46ae6eda1..b8b66e2197b63 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java @@ -13,7 +13,6 @@ import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; import org.elasticsearch.compute.aggregation.SeenGroupIds; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.IntArrayVector; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.LongArrayBlock; @@ -63,7 +62,7 @@ private IntVector add(LongVector vector) { for (int i = 0; i < vector.getPositionCount(); i++) { groups[i] = Math.toIntExact(hashOrdToGroupNullReserved(longHash.add(vector.getLong(i)))); } - return new IntArrayVector(groups, groups.length); + return vector.blockFactory().newIntArrayVector(groups, groups.length); } private IntBlock add(LongBlock block) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractArrayBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractArrayBlock.java index 8fb91e4a07a5c..fe1ecbec92e5b 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractArrayBlock.java @@ -18,16 +18,22 @@ abstract class AbstractArrayBlock extends AbstractBlock { /** * @param positionCount the number of values in this block */ - protected AbstractArrayBlock(int positionCount, MvOrdering mvOrdering) { - super(positionCount); + protected AbstractArrayBlock(int positionCount, MvOrdering mvOrdering, BlockFactory blockFactory) { + super(positionCount, blockFactory); this.mvOrdering = mvOrdering; } /** * @param positionCount the number of values in this block */ - protected AbstractArrayBlock(int positionCount, @Nullable int[] firstValueIndexes, @Nullable BitSet nullsMask, MvOrdering mvOrdering) { - super(positionCount, firstValueIndexes, nullsMask); + protected AbstractArrayBlock( + int positionCount, + @Nullable int[] firstValueIndexes, + @Nullable BitSet nullsMask, + MvOrdering mvOrdering, + BlockFactory blockFactory + ) { + super(positionCount, firstValueIndexes, nullsMask, blockFactory); this.mvOrdering = mvOrdering; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlock.java index 2d8d75f6c3972..cf8a0d9a833ec 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlock.java @@ -21,12 +21,15 @@ abstract class AbstractBlock implements Block { @Nullable protected final BitSet nullsMask; + protected final BlockFactory blockFactory; + /** * @param positionCount the number of values in this block */ - protected AbstractBlock(int positionCount) { + protected AbstractBlock(int positionCount, BlockFactory blockFactory) { assert positionCount >= 0; this.positionCount = positionCount; + this.blockFactory = blockFactory; this.firstValueIndexes = null; this.nullsMask = null; } @@ -34,9 +37,10 @@ protected AbstractBlock(int positionCount) { /** * @param positionCount the number of values in this block */ - protected AbstractBlock(int positionCount, @Nullable int[] firstValueIndexes, @Nullable BitSet nullsMask) { + protected AbstractBlock(int positionCount, @Nullable int[] firstValueIndexes, @Nullable BitSet nullsMask, BlockFactory blockFactory) { assert positionCount >= 0; this.positionCount = positionCount; + this.blockFactory = blockFactory; this.firstValueIndexes = firstValueIndexes; this.nullsMask = nullsMask == null || nullsMask.isEmpty() ? null : nullsMask; assert (firstValueIndexes == null && this.nullsMask == null) == false; @@ -85,4 +89,9 @@ public int nullValuesCount() { public boolean areAllValuesNull() { return nullValuesCount() == getPositionCount(); } + + @Override + public BlockFactory blockFactory() { + return blockFactory; + } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlockBuilder.java index 88e7b27adf915..a6ad5d1299543 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlockBuilder.java @@ -13,6 +13,8 @@ abstract class AbstractBlockBuilder implements Block.Builder { + protected final BlockFactory blockFactory; + protected int[] firstValueIndexes; // lazily initialized, if multi-values protected BitSet nullsMask; // lazily initialized, if sparse @@ -28,7 +30,12 @@ abstract class AbstractBlockBuilder implements Block.Builder { protected Block.MvOrdering mvOrdering = Block.MvOrdering.UNORDERED; - protected AbstractBlockBuilder() {} + /** The number of bytes currently estimated with the breaker. */ + protected long estimatedBytes; + + protected AbstractBlockBuilder(BlockFactory blockFactory) { + this.blockFactory = blockFactory; + } @Override public AbstractBlockBuilder appendNull() { @@ -105,12 +112,16 @@ protected final void finish() { protected abstract void growValuesArray(int newSize); + /** The number of bytes used to represent each value element. */ + protected abstract int elementSize(); + protected final void ensureCapacity() { int valuesLength = valuesLength(); if (valueCount < valuesLength) { return; } int newSize = calculateNewArraySize(valuesLength); + adjustBreaker((long) (newSize - valuesLength) * elementSize()); growValuesArray(newSize); } @@ -119,8 +130,15 @@ static int calculateNewArraySize(int currentSize) { return currentSize + (currentSize >> 1); } + protected void adjustBreaker(long deltaBytes) { + blockFactory.adjustBreaker(deltaBytes, false); + estimatedBytes += deltaBytes; + } + private void setFirstValue(int position, int value) { if (position >= firstValueIndexes.length) { + final int currentSize = firstValueIndexes.length; + adjustBreaker((long) (position + 1 - currentSize) * Integer.BYTES); firstValueIndexes = Arrays.copyOf(firstValueIndexes, position + 1); } firstValueIndexes[position] = value; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractFilterBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractFilterBlock.java index 6ab1ea2063722..b7ed14f955244 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractFilterBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractFilterBlock.java @@ -100,6 +100,11 @@ public MvOrdering mvOrdering() { return block.mvOrdering(); } + @Override + public BlockFactory blockFactory() { + return block.blockFactory(); + } + private int mapPosition(int position) { assert assertPosition(position); return positions[position]; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractFilterVector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractFilterVector.java index a74ff44511602..c4f9498670ae9 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractFilterVector.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractFilterVector.java @@ -17,8 +17,8 @@ abstract class AbstractFilterVector extends AbstractVector { protected final int[] positions; - protected AbstractFilterVector(int[] positions) { - super(positions.length); + protected AbstractFilterVector(int[] positions, BlockFactory blockFactory) { + super(positions.length, blockFactory); this.positions = positions; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVector.java index 6b7ef080ae5a3..a0335bb5c24e7 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVector.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVector.java @@ -13,9 +13,11 @@ abstract class AbstractVector implements Vector { private final int positionCount; + protected final BlockFactory blockFactory; - protected AbstractVector(int positionCount) { + protected AbstractVector(int positionCount, BlockFactory blockFactory) { this.positionCount = positionCount; + this.blockFactory = blockFactory; } public final int getPositionCount() { @@ -26,4 +28,15 @@ public final int getPositionCount() { public final Vector getRow(int position) { return filter(position); } + + @Override + public BlockFactory blockFactory() { + return blockFactory; + } + + @Override + public void close() { + blockFactory.adjustBreaker(-ramBytesUsed(), true); + } + } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVectorBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVectorBlock.java index c95a4cfa52757..d83d26cf33831 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVectorBlock.java @@ -12,8 +12,8 @@ */ abstract class AbstractVectorBlock extends AbstractBlock { - AbstractVectorBlock(int positionCount) { - super(positionCount); + AbstractVectorBlock(int positionCount, BlockFactory blockFactory) { + super(positionCount, blockFactory); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVectorBuilder.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVectorBuilder.java index 08b7e0d5dc10f..49ce276074735 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVectorBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVectorBuilder.java @@ -10,17 +10,30 @@ abstract class AbstractVectorBuilder { protected int valueCount; + protected final BlockFactory blockFactory; + + /** The number of bytes currently estimated with the breaker. */ + protected long estimatedBytes; + + protected AbstractVectorBuilder(BlockFactory blockFactory) { + this.blockFactory = blockFactory; + } + /** The length of the internal values array. */ protected abstract int valuesLength(); protected abstract void growValuesArray(int newSize); + /** The number of bytes used to represent each value element. */ + protected abstract int elementSize(); + protected final void ensureCapacity() { int valuesLength = valuesLength(); if (valueCount < valuesLength) { return; } int newSize = calculateNewArraySize(valuesLength); + adjustBreaker((long) (newSize - valuesLength) * elementSize()); growValuesArray(newSize); } @@ -28,4 +41,9 @@ static int calculateNewArraySize(int currentSize) { // trivially, grows array by 50% return currentSize + (currentSize >> 1); } + + protected void adjustBreaker(long deltaBytes) { + blockFactory.adjustBreaker(deltaBytes, false); + estimatedBytes += deltaBytes; + } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java index d80119ad57fae..1982c937f2a17 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java @@ -58,6 +58,9 @@ public interface Block extends Accountable, NamedWriteable, Releasable { */ ElementType elementType(); + /** The block factory associated with this block. */ + BlockFactory blockFactory(); + /** * Returns true if the value stored at the given position is null, false otherwise. * @@ -116,10 +119,15 @@ enum MvOrdering { Block expand(); /** - * {@return a constant null block with the given number of positions}. + * {@return a constant null block with the given number of positions, using the non-breaking block factory}. */ + // Eventually, this should use the GLOBAL breaking instance static Block constantNullBlock(int positions) { - return new ConstantNullBlock(positions); + return constantNullBlock(positions, BlockFactory.getNonBreakingInstance()); + } + + static Block constantNullBlock(int positions, BlockFactory blockFactory) { + return blockFactory.newConstantNullBlock(positions); } interface Builder { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java new file mode 100644 index 0000000000000..ca2c4e7c453d0 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java @@ -0,0 +1,317 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.data; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.breaker.CircuitBreakingException; +import org.elasticsearch.common.breaker.NoopCircuitBreaker; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.BytesRefArray; +import org.elasticsearch.compute.data.Block.MvOrdering; + +import java.util.BitSet; +import java.util.List; +import java.util.ServiceLoader; + +public class BlockFactory { + + private static final BlockFactory NON_BREAKING = BlockFactory.getInstance( + new NoopCircuitBreaker("noop-esql-breaker"), + BigArrays.NON_RECYCLING_INSTANCE + ); + + private static final BlockFactory GLOBAL = loadGlobalFactory(); + // new BlockFactory(new NoopCircuitBreaker("esql_noop_breaker"), BigArrays.NON_RECYCLING_INSTANCE); + + private static BlockFactory loadGlobalFactory() { + ServiceLoader loader = ServiceLoader.load( + BlockFactoryParameters.class, + BlockFactory.class.getClassLoader() + ); + List> impls = loader.stream().toList(); + if (impls.size() != 1) { + throw new AssertionError("expected exactly one impl, but got:" + impls); + } + BlockFactoryParameters params = impls.get(0).get(); + return new BlockFactory(params.breaker(), params.bigArrays()); + } + + private final CircuitBreaker breaker; + + private final BigArrays bigArrays; + + private BlockFactory(CircuitBreaker breaker, BigArrays bigArrays) { + this.breaker = breaker; + this.bigArrays = bigArrays; + } + + /** + * Returns the global ESQL block factory. + */ + public static BlockFactory getGlobalInstance() { + return GLOBAL; + } + + /** + * Returns the Non-Breaking block factory. + */ + public static BlockFactory getNonBreakingInstance() { + return NON_BREAKING; + } + + public static BlockFactory getInstance(CircuitBreaker breaker, BigArrays bigArrays) { + return new BlockFactory(breaker, bigArrays); + } + + // For testing + public CircuitBreaker breaker() { + return breaker; + } + + // For testing + public BigArrays bigArrays() { + return bigArrays; + } + + /** + * Adjust the circuit breaker with the given delta, if the delta is negative, the breaker will + * be adjusted without tripping. If the data was already created before calling this method, + * and the breaker trips, we add the delta without breaking to account for the created data. + * If the data has not been created yet, we do not add the delta to the breaker if it trips. + */ + void adjustBreaker(final long delta, final boolean isDataAlreadyCreated) { + // checking breaker means potentially tripping, but it doesn't + // have to if the delta is negative + if (delta > 0) { + try { + breaker.addEstimateBytesAndMaybeBreak(delta, ""); + } catch (CircuitBreakingException e) { + if (isDataAlreadyCreated) { + // since we've already created the data, we need to + // add it so closing the stream re-adjusts properly + breaker.addWithoutBreaking(delta); + } + // re-throw the original exception + throw e; + } + } else { + breaker.addWithoutBreaking(delta); + } + } + + /** Pre-adjusts the breaker for the given position count and element type. Returns the pre-adjusted amount. */ + public long preAdjustBreakerForBoolean(int positionCount) { + long bytes = (long) positionCount * Byte.BYTES; + adjustBreaker(bytes, false); + return bytes; + } + + public long preAdjustBreakerForInt(int positionCount) { + long bytes = (long) positionCount * Integer.BYTES; + adjustBreaker(bytes, false); + return bytes; + } + + public long preAdjustBreakerForLong(int positionCount) { + long bytes = (long) positionCount * Long.BYTES; + adjustBreaker(bytes, false); + return bytes; + } + + public long preAdjustBreakerForDouble(int positionCount) { + long bytes = (long) positionCount * Double.BYTES; + adjustBreaker(bytes, false); + return bytes; + } + + public BooleanBlock.Builder newBooleanBlockBuilder(int estimatedSize) { + return new BooleanBlockBuilder(estimatedSize, this); + } + + public BooleanBlock newBooleanArrayBlock( + boolean[] values, + int positionCount, + int[] firstValueIndexes, + BitSet nulls, + MvOrdering mvOrdering + ) { + var b = new BooleanArrayBlock(values, positionCount, firstValueIndexes, nulls, mvOrdering, this); + adjustBreaker(b.ramBytesUsed(), true); + return b; + } + + public BooleanVector.Builder newBooleanVectorBuilder(int estimatedSize) { + return new BooleanVectorBuilder(estimatedSize, this); + } + + public BooleanVector newBooleanArrayVector(boolean[] values, int positionCount) { + return newBooleanArrayVector(values, positionCount, 0L); + } + + public BooleanVector newBooleanArrayVector(boolean[] values, int positionCount, long preAdjustedBytes) { + var b = new BooleanArrayVector(values, positionCount, this); + adjustBreaker(b.ramBytesUsed() - preAdjustedBytes, true); + return b; + } + + public BooleanBlock newConstantBooleanBlockWith(boolean value, int positions) { + var b = new ConstantBooleanVector(value, positions, this).asBlock(); + adjustBreaker(b.ramBytesUsed(), true); + return b; + } + + public IntBlock.Builder newIntBlockBuilder(int estimatedSize) { + return new IntBlockBuilder(estimatedSize, this); + } + + public IntBlock newIntArrayBlock(int[] values, int positionCount, int[] firstValueIndexes, BitSet nulls, MvOrdering mvOrdering) { + var b = new IntArrayBlock(values, positionCount, firstValueIndexes, nulls, mvOrdering, this); + adjustBreaker(b.ramBytesUsed(), true); + return b; + } + + public IntVector.Builder newIntVectorBuilder(int estimatedSize) { + return new IntVectorBuilder(estimatedSize, this); + } + + /** + * Creates a new Vector with the given values and positionCount. Equivalent to: + * newIntArrayVector(values, positionCount, 0L); // with zero pre-adjusted bytes + */ + public IntVector newIntArrayVector(int[] values, int positionCount) { + return newIntArrayVector(values, positionCount, 0L); + } + + /** + * Creates a new Vector with the given values and positionCount, where the caller has already + * pre-adjusted a number of bytes with the factory's breaker. + * + * long preAdjustedBytes = blockFactory.preAdjustBreakerForInt(positionCount); + * int[] values = new int[positionCount]; + * for (int i = 0; i < positionCount; i++) { + * values[i] = doWhateverStuff + * } + * var vector = blockFactory.newIntArrayVector(values, positionCount, preAdjustedBytes); + */ + public IntVector newIntArrayVector(int[] values, int positionCount, long preAdjustedBytes) { + var b = new IntArrayVector(values, positionCount, this); + adjustBreaker(b.ramBytesUsed() - preAdjustedBytes, true); + return b; + } + + public IntBlock newConstantIntBlockWith(int value, int positions) { + var b = new ConstantIntVector(value, positions, this).asBlock(); + adjustBreaker(b.ramBytesUsed(), true); + return b; + } + + public LongBlock.Builder newLongBlockBuilder(int estimatedSize) { + return new LongBlockBuilder(estimatedSize, this); + } + + public LongBlock newLongArrayBlock(long[] values, int positionCount, int[] firstValueIndexes, BitSet nulls, MvOrdering mvOrdering) { + var b = new LongArrayBlock(values, positionCount, firstValueIndexes, nulls, mvOrdering, this); + adjustBreaker(b.ramBytesUsed(), true); + return b; + } + + public LongVector.Builder newLongVectorBuilder(int estimatedSize) { + return new LongVectorBuilder(estimatedSize, this); + } + + public LongVector newLongArrayVector(long[] values, int positionCount) { + return newLongArrayVector(values, positionCount, 0L); + } + + public LongVector newLongArrayVector(long[] values, int positionCount, long preAdjustedBytes) { + var b = new LongArrayVector(values, positionCount, this); + adjustBreaker(b.ramBytesUsed() - preAdjustedBytes, true); + return b; + } + + public LongBlock newConstantLongBlockWith(long value, int positions) { + var b = new ConstantLongVector(value, positions, this).asBlock(); + adjustBreaker(b.ramBytesUsed(), true); + return b; + } + + public DoubleBlock.Builder newDoubleBlockBuilder(int estimatedSize) { + return new DoubleBlockBuilder(estimatedSize, this); + } + + public DoubleBlock newDoubleArrayBlock( + double[] values, + int positionCount, + int[] firstValueIndexes, + BitSet nulls, + MvOrdering mvOrdering + ) { + var b = new DoubleArrayBlock(values, positionCount, firstValueIndexes, nulls, mvOrdering, this); + adjustBreaker(b.ramBytesUsed(), true); + return b; + } + + public DoubleVector.Builder newDoubleVectorBuilder(int estimatedSize) { + return new DoubleVectorBuilder(estimatedSize, this); + } + + public DoubleVector newDoubleArrayVector(double[] values, int positionCount) { + return newDoubleArrayVector(values, positionCount, 0L); + } + + public DoubleVector newDoubleArrayVector(double[] values, int positionCount, long preAdjustedBytes) { + var b = new DoubleArrayVector(values, positionCount, this); + adjustBreaker(b.ramBytesUsed() - preAdjustedBytes, true); + return b; + } + + public DoubleBlock newConstantDoubleBlockWith(double value, int positions) { + var b = new ConstantDoubleVector(value, positions, this).asBlock(); + adjustBreaker(b.ramBytesUsed(), true); + return b; + } + + public BytesRefBlock.Builder newBytesRefBlockBuilder(int estimatedSize) { + return new BytesRefBlockBuilder(estimatedSize, bigArrays, this); + } + + public BytesRefBlock newBytesRefArrayBlock( + BytesRefArray values, + int positionCount, + int[] firstValueIndexes, + BitSet nulls, + MvOrdering mvOrdering + ) { + var b = new BytesRefArrayBlock(values, positionCount, firstValueIndexes, nulls, mvOrdering, this); + adjustBreaker(b.ramBytesUsed() - values.ramBytesUsed(), true); + return b; + } + + public BytesRefVector.Builder newBytesRefVectorBuilder(int estimatedSize) { + return new BytesRefVectorBuilder(estimatedSize, bigArrays, this); + } + + public BytesRefVector newBytesRefArrayVector(BytesRefArray values, int positionCount) { + var b = new BytesRefArrayVector(values, positionCount, this); + adjustBreaker(b.ramBytesUsed() - values.ramBytesUsed(), true); + return b; + } + + public BytesRefBlock newConstantBytesRefBlockWith(BytesRef value, int positions) { + var b = new ConstantBytesRefVector(value, positions, this).asBlock(); + adjustBreaker(b.ramBytesUsed(), true); + return b; + } + + public Block newConstantNullBlock(int positions) { + var b = new ConstantNullBlock(positions, this); + adjustBreaker(b.ramBytesUsed(), true); + return b; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactoryParameters.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactoryParameters.java new file mode 100644 index 0000000000000..a9dc11635f8c0 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactoryParameters.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.data; + +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.util.BigArrays; + +/** + * Allows to inject instances of a breaker and bigArrays into the Global block factory. + * The Global factory is somewhat temporary, therefore this interface and its ServiceLoader + * machinery can be removed once the Global factory is removed. + */ +public interface BlockFactoryParameters { + + CircuitBreaker breaker(); + + BigArrays bigArrays(); +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java index 5a5ed16738810..7ad60d89ed72d 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java @@ -22,8 +22,13 @@ public final class ConstantNullBlock extends AbstractBlock { private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ConstantNullBlock.class); + // Eventually, this should use the GLOBAL breaking instance ConstantNullBlock(int positionCount) { - super(positionCount); + this(positionCount, BlockFactory.getNonBreakingInstance()); + } + + ConstantNullBlock(int positionCount, BlockFactory blockFactory) { + super(positionCount, blockFactory); } @Override @@ -121,7 +126,7 @@ public String toString() { @Override public void close() { - // no-op + blockFactory.adjustBreaker(-ramBytesUsed(), true); } static class Builder implements Block.Builder { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java index 7a2ea0ddd69c5..433591be5b2bb 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java @@ -23,7 +23,7 @@ public class DocBlock extends AbstractVectorBlock implements Block { private final DocVector vector; DocBlock(DocVector vector) { - super(vector.getPositionCount()); + super(vector.getPositionCount(), vector.blockFactory()); this.vector = vector; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java index eb67d89c3a869..c334e48d74610 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java @@ -47,7 +47,7 @@ public class DocVector extends AbstractVector implements Vector { private int[] shardSegmentDocMapBackwards; public DocVector(IntVector shards, IntVector segments, IntVector docs, Boolean singleSegmentNonDecreasing) { - super(shards.getPositionCount()); + super(shards.getPositionCount(), null); this.shards = shards; this.segments = segments; this.docs = docs; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java index e8636e2a39970..171bdbd62f4d0 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java @@ -47,6 +47,9 @@ public interface Vector extends Accountable, Releasable { */ boolean isConstant(); + /** The block factory associated with this vector. */ + BlockFactory blockFactory(); + interface Builder { /** * Builds the block. This method can be called multiple times. diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st index b0fbea3feccd4..dd3a914eae9f7 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st @@ -11,6 +11,7 @@ $if(BytesRef)$ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.util.BytesRefArray; +import org.elasticsearch.core.Releasables; $else$ import org.apache.lucene.util.RamUsageEstimator; @@ -35,12 +36,19 @@ $else$ private final $type$[] values; $endif$ -$if(BytesRef)$ - public $Type$ArrayBlock(BytesRefArray values, int positionCount, int[] firstValueIndexes, BitSet nulls, MvOrdering mvOrdering) { -$else$ - public $Type$ArrayBlock($type$[] values, int positionCount, int[] firstValueIndexes, BitSet nulls, MvOrdering mvOrdering) { -$endif$ - super(positionCount, firstValueIndexes, nulls, mvOrdering); + public $Type$ArrayBlock($if(BytesRef)$BytesRefArray$else$$type$[]$endif$ values, int positionCount, int[] firstValueIndexes, BitSet nulls, MvOrdering mvOrdering) { + this(values, positionCount, firstValueIndexes, nulls, mvOrdering, BlockFactory.getNonBreakingInstance()); + } + + public $Type$ArrayBlock( + $if(BytesRef)$BytesRefArray$else$$type$[]$endif$ values, + int positionCount, + int[] firstValueIndexes, + BitSet nulls, + MvOrdering mvOrdering, + BlockFactory blockFactory + ) { + super(positionCount, firstValueIndexes, nulls, mvOrdering, blockFactory); this.values = values; } @@ -79,7 +87,7 @@ $endif$ return new $Type$ArrayVector(values, end).asBlock(); } int[] firstValues = IntStream.range(0, end + 1).toArray(); - return new $Type$ArrayBlock(values, end, firstValues, shiftNullsToExpandedPositions(), MvOrdering.UNORDERED); + return new $Type$ArrayBlock(values, end, firstValues, shiftNullsToExpandedPositions(), MvOrdering.UNORDERED, blockFactory); } public static long ramBytesEstimated($if(BytesRef)$BytesRefArray$else$$type$[]$endif$ values, int[] firstValueIndexes, BitSet nullsMask) { @@ -124,6 +132,11 @@ $endif$ @Override public void close() { - // no-op + $if(BytesRef)$ + blockFactory.adjustBreaker(-(ramBytesUsed() - values.ramBytesUsed()), true); + Releasables.closeExpectNoException(values); + $else$ + blockFactory.adjustBreaker(-ramBytesUsed(), true); + $endif$ } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st index 103e8bc22d9ed..6065e95daaae9 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st @@ -34,12 +34,12 @@ $else$ private final $type$[] values; $endif$ -$if(BytesRef)$ - public $Type$ArrayVector(BytesRefArray values, int positionCount) { -$else$ - public $Type$ArrayVector($type$[] values, int positionCount) { -$endif$ - super(positionCount); + public $Type$ArrayVector($if(BytesRef)$BytesRefArray$else$$type$[]$endif$ values, int positionCount) { + this(values, positionCount, BlockFactory.getNonBreakingInstance()); + } + + public $Type$ArrayVector($if(BytesRef)$BytesRefArray$else$$type$[]$endif$ values, int positionCount, BlockFactory blockFactory) { + super(positionCount, blockFactory); this.values = values; } @@ -107,12 +107,11 @@ $else$ $endif$ } +$if(BytesRef)$ @Override public void close() { -$if(BytesRef)$ + blockFactory.adjustBreaker(-BASE_RAM_BYTES_USED, true); Releasables.closeExpectNoException(values); -$else$ - // no-op -$endif$ } +$endif$ } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayVector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayVector.java.st index 153cf3c039145..82a2aae80c9de 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayVector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayVector.java.st @@ -22,7 +22,11 @@ public final class $Type$BigArrayVector extends AbstractVector implements $Type$ private final $if(boolean)$Bit$else$$Type$$endif$Array values; public $Type$BigArrayVector($if(boolean)$Bit$else$$Type$$endif$Array values, int positionCount) { - super(positionCount); + this(values, positionCount, BlockFactory.getNonBreakingInstance()); + } + + public $Type$BigArrayVector($if(boolean)$Bit$else$$Type$$endif$Array values, int positionCount, BlockFactory blockFactory) { + super(positionCount, blockFactory); this.values = values; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st index 9588cdbe9c353..3f626e463f428 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st @@ -194,12 +194,24 @@ $endif$ return result; } + /** Returns a builder using the {@link BlockFactory#getNonBreakingInstance block factory}. */ + // Eventually, we want to remove this entirely, always passing an explicit BlockFactory static Builder newBlockBuilder(int estimatedSize) { - return new $Type$BlockBuilder(estimatedSize); + return newBlockBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); } + static Builder newBlockBuilder(int estimatedSize, BlockFactory blockFactory) { + return blockFactory.new$Type$BlockBuilder(estimatedSize); + } + + /** Returns a block using the {@link BlockFactory#getNonBreakingInstance block factory}. */ + // Eventually, we want to remove this entirely, always passing an explicit BlockFactory static $Type$Block newConstantBlockWith($type$ value, int positions) { - return new Constant$Type$Vector(value, positions).asBlock(); + return newConstantBlockWith(value, positions, BlockFactory.getNonBreakingInstance()); + } + + static $Type$Block newConstantBlockWith($type$ value, int positions, BlockFactory blockFactory) { + return blockFactory.newConstant$Type$BlockWith(value, positions); } sealed interface Builder extends Block.Builder permits $Type$BlockBuilder { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st index fc407361c04ba..4d43f25577cc5 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st @@ -11,6 +11,7 @@ $if(BytesRef)$ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BytesRefArray; +import org.elasticsearch.core.Releasables; $else$ import java.util.Arrays; @@ -25,19 +26,23 @@ final class $Type$BlockBuilder extends AbstractBlockBuilder implements $Type$Blo $if(BytesRef)$ private BytesRefArray values; - BytesRefBlockBuilder(int estimatedSize) { - this(estimatedSize, BigArrays.NON_RECYCLING_INSTANCE); + BytesRefBlockBuilder(int estimatedSize, BlockFactory blockFactory) { + this(estimatedSize, BigArrays.NON_RECYCLING_INSTANCE, blockFactory); } - BytesRefBlockBuilder(int estimatedSize, BigArrays bigArrays) { + BytesRefBlockBuilder(int estimatedSize, BigArrays bigArrays, BlockFactory blockFactory) { + super(blockFactory); values = new BytesRefArray(Math.max(estimatedSize, 2), bigArrays); } $else$ private $type$[] values; - $Type$BlockBuilder(int estimatedSize) { - values = new $type$[Math.max(estimatedSize, 2)]; + $Type$BlockBuilder(int estimatedSize, BlockFactory blockFactory) { + super(blockFactory); + int initialSize = Math.max(estimatedSize, 2); + adjustBreaker(initialSize); + values = new $type$[initialSize]; } $endif$ @@ -55,6 +60,11 @@ $endif$ return this; } + @Override + protected int elementSize() { + return $if(BytesRef)$-1$else$$BYTES$$endif$; + } + @Override protected int valuesLength() { $if(BytesRef)$ @@ -235,22 +245,28 @@ $endif$ @Override public $Type$Block build() { finish(); + $Type$Block block; if (hasNonNullValue && positionCount == 1 && valueCount == 1) { $if(BytesRef)$ - return new ConstantBytesRefVector(values.get(0, new BytesRef()), 1).asBlock(); + block = new ConstantBytesRefVector(BytesRef.deepCopyOf(values.get(0, new BytesRef())), 1, blockFactory).asBlock(); + Releasables.closeExpectNoException(values); } else { + estimatedBytes += values.ramBytesUsed(); $else$ - return new Constant$Type$Vector(values[0], 1).asBlock(); + block = new Constant$Type$Vector(values[0], 1, blockFactory).asBlock(); } else { if (values.length - valueCount > 1024 || valueCount < (values.length / 2)) { values = Arrays.copyOf(values, valueCount); } $endif$ if (isDense() && singleValued()) { - return new $Type$ArrayVector(values, positionCount).asBlock(); + block = new $Type$ArrayVector(values, positionCount, blockFactory).asBlock(); } else { - return new $Type$ArrayBlock(values, positionCount, firstValueIndexes, nullsMask, mvOrdering); + block = new $Type$ArrayBlock(values, positionCount, firstValueIndexes, nullsMask, mvOrdering, blockFactory); } } + // update the breaker with the actual bytes used. + blockFactory.adjustBreaker(block.ramBytesUsed() - estimatedBytes, true); + return block; } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st index 8f6c911dc4ebb..d1b724b37cd83 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st @@ -23,7 +23,11 @@ public final class Constant$Type$Vector extends AbstractVector implements $Type$ private final $type$ value; public Constant$Type$Vector($type$ value, int positionCount) { - super(positionCount); + this(value, positionCount, BlockFactory.getNonBreakingInstance()); + } + + public Constant$Type$Vector($type$ value, int positionCount, BlockFactory blockFactory) { + super(positionCount, blockFactory); this.value = value; } @@ -80,6 +84,6 @@ $endif$ @Override public void close() { - // no-op + blockFactory.adjustBreaker(-ramBytesUsed(), true); } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-FilterVector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-FilterVector.java.st index e68fc6838d3f4..9d67e8182fc17 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-FilterVector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-FilterVector.java.st @@ -24,7 +24,7 @@ public final class Filter$Type$Vector extends AbstractFilterVector implements $T private final $Type$Vector vector; Filter$Type$Vector($Type$Vector vector, int... positions) { - super(positions); + super(positions, vector.blockFactory()); this.vector = vector; } @@ -102,6 +102,11 @@ $endif$ } } + @Override + public BlockFactory blockFactory() { + return vector.blockFactory(); + } + @Override public void close() { Releasables.closeExpectNoException(vector); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st index f48ad43faefc8..4acb4243c131a 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st @@ -142,8 +142,14 @@ $endif$ } } + /** Returns a builder using the {@link BlockFactory#getNonBreakingInstance block factory}. */ + // Eventually, we want to remove this entirely, always passing an explicit BlockFactory static Builder newVectorBuilder(int estimatedSize) { - return new $Type$VectorBuilder(estimatedSize); + return newVectorBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); + } + + static Builder newVectorBuilder(int estimatedSize, BlockFactory blockFactory) { + return blockFactory.new$Type$VectorBuilder(estimatedSize); } $if(int)$ diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st index 3abc702839118..0f97119d971a4 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st @@ -10,7 +10,6 @@ package org.elasticsearch.compute.data; $if(BytesRef)$ import org.apache.lucene.util.BytesRef; $endif$ -import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.core.Releasables; /** @@ -19,12 +18,10 @@ import org.elasticsearch.core.Releasables; */ public final class $Type$VectorBlock extends AbstractVectorBlock implements $Type$Block { - private static final long RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance($Type$VectorBlock.class); - private final $Type$Vector vector; $Type$VectorBlock($Type$Vector vector) { - super(vector.getPositionCount()); + super(vector.getPositionCount(), vector.blockFactory()); this.vector = vector; } @@ -60,7 +57,7 @@ $endif$ @Override public long ramBytesUsed() { - return RAM_BYTES_USED + RamUsageEstimator.sizeOf(vector); + return vector.ramBytesUsed(); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBuilder.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBuilder.java.st index 4c4747e949bff..09e95e16c303d 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBuilder.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBuilder.java.st @@ -11,6 +11,7 @@ $if(BytesRef)$ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BytesRefArray; +import org.elasticsearch.core.Releasables; $else$ import java.util.Arrays; @@ -25,18 +26,22 @@ final class $Type$VectorBuilder extends AbstractVectorBuilder implements $Type$V $if(BytesRef)$ private BytesRefArray values; - BytesRefVectorBuilder(int estimatedSize) { - this(estimatedSize, BigArrays.NON_RECYCLING_INSTANCE); + BytesRefVectorBuilder(int estimatedSize, BlockFactory blockFactory) { + this(estimatedSize, BigArrays.NON_RECYCLING_INSTANCE, blockFactory); } - BytesRefVectorBuilder(int estimatedSize, BigArrays bigArrays) { + BytesRefVectorBuilder(int estimatedSize, BigArrays bigArrays, BlockFactory blockFactory) { + super(blockFactory); values = new BytesRefArray(Math.max(estimatedSize, 2), bigArrays); } $else$ private $type$[] values; - $Type$VectorBuilder(int estimatedSize) { + $Type$VectorBuilder(int estimatedSize, BlockFactory blockFactory) { + super(blockFactory); + int initialSize = Math.max(estimatedSize, 2); + adjustBreaker(initialSize); values = new $type$[Math.max(estimatedSize, 2)]; } $endif$ @@ -53,6 +58,11 @@ $endif$ return this; } + @Override + protected int elementSize() { + return $if(BytesRef)$-1$else$$BYTES$$endif$; + } + @Override protected int valuesLength() { $if(BytesRef)$ @@ -73,17 +83,24 @@ $endif$ @Override public $Type$Vector build() { + $Type$Vector vector; if (valueCount == 1) { $if(BytesRef)$ - return new ConstantBytesRefVector(values.get(0, new BytesRef()), 1); - } + vector = new ConstantBytesRefVector(BytesRef.deepCopyOf(values.get(0, new BytesRef())), 1, blockFactory); + Releasables.closeExpectNoException(values); + } else { + estimatedBytes = values.ramBytesUsed(); $else$ - return new Constant$Type$Vector(values[0], 1); - } - if (values.length - valueCount > 1024 || valueCount < (values.length / 2)) { - values = Arrays.copyOf(values, valueCount); - } + vector = new Constant$Type$Vector(values[0], 1, blockFactory); + } else { + if (values.length - valueCount > 1024 || valueCount < (values.length / 2)) { + values = Arrays.copyOf(values, valueCount); + } $endif$ - return new $Type$ArrayVector(values, valueCount); + vector = new $Type$ArrayVector(values, valueCount, blockFactory); + } + // update the breaker with the actual bytes used. + blockFactory.adjustBreaker(vector.ramBytesUsed() - estimatedBytes, true); + return vector; } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverContext.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverContext.java index db60b45f4516c..8743c64db472a 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverContext.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverContext.java @@ -8,6 +8,7 @@ package org.elasticsearch.compute.operator; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.core.Releasable; import java.util.Collections; @@ -42,15 +43,23 @@ public class DriverContext { private final BigArrays bigArrays; - public DriverContext(BigArrays bigArrays) { + private final BlockFactory blockFactory; + + public DriverContext(BigArrays bigArrays, BlockFactory blockFactory) { Objects.requireNonNull(bigArrays); + Objects.requireNonNull(blockFactory); this.bigArrays = bigArrays; + this.blockFactory = blockFactory; } public BigArrays bigArrays() { return bigArrays; } + public BlockFactory blockFactory() { + return blockFactory; + } + /** A snapshot of the driver context. */ public record Snapshot(Set releasables) {} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/ThrowingDriverContext.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/ThrowingDriverContext.java index d985d7649ee38..313ec0b682602 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/ThrowingDriverContext.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/ThrowingDriverContext.java @@ -13,11 +13,12 @@ import org.elasticsearch.common.util.FloatArray; import org.elasticsearch.common.util.IntArray; import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.core.Releasable; public class ThrowingDriverContext extends DriverContext { public ThrowingDriverContext() { - super(new ThrowingBigArrays()); + super(new ThrowingBigArrays(), BlockFactory.getNonBreakingInstance()); } @Override @@ -25,6 +26,11 @@ public BigArrays bigArrays() { throw new AssertionError("should not reach here"); } + @Override + public BlockFactory blockFactory() { + throw new AssertionError("should not reach here"); + } + @Override public boolean addReleasable(Releasable releasable) { throw new AssertionError("should not reach here"); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilderForDoc.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilderForDoc.java index a825b7d160551..166d5be83b474 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilderForDoc.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ResultBuilderForDoc.java @@ -9,8 +9,8 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DocVector; -import org.elasticsearch.compute.data.IntArrayVector; class ResultBuilderForDoc implements ResultBuilder { private final int[] shards; @@ -40,9 +40,9 @@ public void decodeValue(BytesRef values) { @Override public Block build() { return new DocVector( - new IntArrayVector(shards, position), - new IntArrayVector(segments, position), - new IntArrayVector(docs, position), + BlockFactory.getNonBreakingInstance().newIntArrayVector(shards, position), + BlockFactory.getNonBreakingInstance().newIntArrayVector(segments, position), + BlockFactory.getNonBreakingInstance().newIntArrayVector(docs, position), null ).asBlock(); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java index d7ec9bcaf99ee..04a966b399870 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java @@ -29,6 +29,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.MockPageCacheRecycler; @@ -36,11 +37,11 @@ import org.elasticsearch.compute.aggregation.CountAggregatorFunction; import org.elasticsearch.compute.aggregation.blockhash.BlockHash; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.DocBlock; import org.elasticsearch.compute.data.DocVector; import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.IntArrayVector; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.LongBlock; @@ -147,6 +148,10 @@ public void testQueryOperator() throws IOException { // @Repeat(iterations = 1) public void testGroupingWithOrdinals() throws Exception { + DriverContext driverContext = driverContext(); + BlockFactory blockFactory = driverContext.blockFactory(); + BigArrays bigArrays = driverContext.bigArrays(); + final String gField = "g"; final int numDocs = 2856; // between(100, 10000); final Map expectedCounts = new HashMap<>(); @@ -162,7 +167,6 @@ public void testGroupingWithOrdinals() throws Exception { } writer.commit(); Map actualCounts = new HashMap<>(); - BigArrays bigArrays = bigArrays(); boolean shuffleDocs = randomBoolean(); Operator shuffleDocsOperator = new AbstractPageMappingOperator() { @Override @@ -195,7 +199,7 @@ protected Page process(Page page) { ids.add(docs.getInt(i)); } Collections.shuffle(ids, random()); - docs = new IntArrayVector(ids.stream().mapToInt(n -> n).toArray(), positionCount); + docs = blockFactory.newIntArrayVector(ids.stream().mapToInt(n -> n).toArray(), positionCount); } Block[] blocks = new Block[page.getBlockCount()]; blocks[0] = new DocVector(shards, segments, docs, false).asBlock(); @@ -212,8 +216,6 @@ public String toString() { }; try (DirectoryReader reader = writer.getReader()) { - DriverContext driverContext = driverContext(); - Driver driver = new Driver( driverContext, luceneOperatorFactory(reader, new MatchAllDocsQuery(), LuceneOperator.NO_LIMIT).get(driverContext), @@ -263,7 +265,6 @@ public String toString() { keys.getBytesRef(i, spare); actualCounts.put(BytesRef.deepCopyOf(spare), counts.getLong(i)); } - // System.out.println("HEGO: keys.getPositionCount=" + keys.getPositionCount()); // Releasables.close(keys); }), () -> {} @@ -398,7 +399,8 @@ private BigArrays bigArrays() { * A {@link DriverContext} that won't throw {@link CircuitBreakingException}. */ protected final DriverContext driverContext() { - return new DriverContext(bigArrays()); + var breaker = new MockBigArrays.LimitedBreaker("esql-test-breaker", ByteSizeValue.ofGb(1)); + return new DriverContext(bigArrays(), BlockFactory.getInstance(breaker, bigArrays())); } public static void assertDriverContext(DriverContext driverContext) { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/TestBlockFactoryParameters.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/TestBlockFactoryParameters.java new file mode 100644 index 0000000000000..8fa38b6864674 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/TestBlockFactoryParameters.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute; + +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.common.util.PageCacheRecycler; +import org.elasticsearch.compute.data.BlockFactoryParameters; +import org.elasticsearch.indices.breaker.CircuitBreakerService; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class TestBlockFactoryParameters implements BlockFactoryParameters { + + final CircuitBreaker breaker; + final BigArrays bigArrays; + + public TestBlockFactoryParameters() { + breaker = new MockBigArrays.LimitedBreaker("esql-test-breaker", ByteSizeValue.ofGb(1)); + var breakerService = mock(CircuitBreakerService.class); + when(breakerService.getBreaker(CircuitBreaker.REQUEST)).thenReturn(breaker); + bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, breakerService); + } + + @Override + public CircuitBreaker breaker() { + return breaker; + } + + @Override + public BigArrays bigArrays() { + return bigArrays; + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionTests.java index 974046469e518..3699a87431937 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionTests.java @@ -11,7 +11,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.BasicBlockTests; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.DoubleArrayVector; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.CannedSourceOperator; @@ -63,10 +63,11 @@ protected void assertOutputFromEmpty(Block b) { public void testRejectsDouble() { DriverContext driverContext = driverContext(); + BlockFactory blockFactory = driverContext.blockFactory(); try ( Driver d = new Driver( driverContext, - new CannedSourceOperator(Iterators.single(new Page(new DoubleArrayVector(new double[] { 1.0 }, 1).asBlock()))), + new CannedSourceOperator(Iterators.single(new Page(blockFactory.newDoubleArrayVector(new double[] { 1.0 }, 1).asBlock()))), List.of(simple(nonBreakingBigArrays()).get(driverContext)), new PageConsumerOperator(page -> fail("shouldn't have made it this far")), () -> {} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionTests.java index 04cbe0ed53236..556f9d0ccc462 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionTests.java @@ -11,7 +11,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.BasicBlockTests; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.DoubleArrayVector; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.CannedSourceOperator; @@ -64,10 +64,11 @@ protected void assertOutputFromEmpty(Block b) { public void testRejectsDouble() { DriverContext driverContext = driverContext(); + BlockFactory blockFactory = driverContext.blockFactory(); try ( Driver d = new Driver( driverContext, - new CannedSourceOperator(Iterators.single(new Page(new DoubleArrayVector(new double[] { 1.0 }, 1).asBlock()))), + new CannedSourceOperator(Iterators.single(new Page(blockFactory.newDoubleArrayVector(new double[] { 1.0 }, 1).asBlock()))), List.of(simple(nonBreakingBigArrays()).get(driverContext)), new PageConsumerOperator(page -> fail("shouldn't have made it this far")), () -> {} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java index 23015d066810a..eab6eb30261bd 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java @@ -12,11 +12,11 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BitArray; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.IntArrayVector; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.LongBlock; @@ -513,7 +513,8 @@ public void add(int positionOffset, IntVector groupIds) { seenGroupIds.set(group); chunk[count++] = group; } - delegateAddInput.add(positionOffset + offset, new IntArrayVector(chunk, count)); + BlockFactory blockFactory = driverContext().blockFactory(); // TODO: just for compile + delegateAddInput.add(positionOffset + offset, blockFactory.newIntArrayVector(chunk, count)); } } }; @@ -527,7 +528,8 @@ public void addIntermediateInput(int positionOffset, IntVector groupIds, Page pa for (int i = offset; i < Math.min(groupIds.getPositionCount(), offset + emitChunkSize); i++) { chunk[count++] = groupIds.getInt(i); } - delegate.addIntermediateInput(positionOffset + offset, new IntArrayVector(chunk, count), page); + BlockFactory blockFactory = driverContext().blockFactory(); // TODO: just for compile + delegate.addIntermediateInput(positionOffset + offset, blockFactory.newIntArrayVector(chunk, count), page); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionTests.java index 552b0d2d8836f..e6fccf2d46f61 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionTests.java @@ -10,7 +10,7 @@ import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.DoubleArrayVector; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.CannedSourceOperator; @@ -50,10 +50,11 @@ protected void assertSimpleOutput(List input, Block result) { public void testRejectsDouble() { DriverContext driverContext = driverContext(); + BlockFactory blockFactory = driverContext.blockFactory(); try ( Driver d = new Driver( driverContext, - new CannedSourceOperator(Iterators.single(new Page(new DoubleArrayVector(new double[] { 1.0 }, 1).asBlock()))), + new CannedSourceOperator(Iterators.single(new Page(blockFactory.newDoubleArrayVector(new double[] { 1.0 }, 1).asBlock()))), List.of(simple(nonBreakingBigArrays()).get(driverContext)), new PageConsumerOperator(page -> fail("shouldn't have made it this far")), () -> {} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionTests.java index 21880eb6b1a3e..ae5aaa5b21965 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionTests.java @@ -10,7 +10,7 @@ import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.DoubleArrayVector; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.CannedSourceOperator; @@ -66,10 +66,11 @@ public void testOverflowFails() { public void testRejectsDouble() { DriverContext driverContext = driverContext(); + BlockFactory blockFactory = driverContext.blockFactory(); try ( Driver d = new Driver( driverContext, - new CannedSourceOperator(Iterators.single(new Page(new DoubleArrayVector(new double[] { 1.0 }, 1).asBlock()))), + new CannedSourceOperator(Iterators.single(new Page(blockFactory.newDoubleArrayVector(new double[] { 1.0 }, 1).asBlock()))), List.of(simple(nonBreakingBigArrays()).get(driverContext)), new PageConsumerOperator(page -> fail("shouldn't have made it this far")), () -> {} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashRandomizedTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashRandomizedTests.java index f00c8d6d3cb1f..5775a983a2373 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashRandomizedTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashRandomizedTests.java @@ -10,6 +10,7 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.compute.data.BasicBlockTests; @@ -35,6 +36,7 @@ //@TestLogging(value = "org.elasticsearch.compute:TRACE", reason = "debug") public class BlockHashRandomizedTests extends ESTestCase { + @ParametersFactory public static List params() { List params = new ArrayList<>(); @@ -165,7 +167,7 @@ private BlockHash newBlockHash(int emitBatchSize, List types) { for (int c = 0; c < types.size(); c++) { specs.add(new HashAggregationOperator.GroupSpec(c, types.get(c))); } - MockBigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new NoneCircuitBreakerService()); + BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new NoneCircuitBreakerService()); return forcePackedHash ? new PackedValuesBlockHash(specs, bigArrays, emitBatchSize) : BlockHash.build(specs, bigArrays, emitBatchSize, true); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java index 553ce83d8002c..620bb5ab5319a 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java @@ -10,25 +10,27 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.inject.name.Named; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BooleanArrayVector; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.DoubleArrayVector; import org.elasticsearch.compute.data.DoubleBlock; -import org.elasticsearch.compute.data.IntArrayVector; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.LongArrayVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.HashAggregationOperator; +import org.elasticsearch.core.Releasables; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.test.ESTestCase; +import org.junit.After; import java.util.ArrayList; import java.util.Arrays; @@ -46,6 +48,10 @@ import static org.hamcrest.Matchers.startsWith; public class BlockHashTests extends ESTestCase { + + static final CircuitBreaker breaker = new MockBigArrays.LimitedBreaker("esql-test-breaker", ByteSizeValue.ofGb(1)); + static final BlockFactory blockFactory = BlockFactory.getInstance(breaker, BigArrays.NON_RECYCLING_INSTANCE); + @ParametersFactory public static List params() { List params = new ArrayList<>(); @@ -54,6 +60,11 @@ public static List params() { return params; } + @After + public void checkBreaker() { + // assertThat(breaker.getUsed(), is(0L)); // TODO: enable once all blocks are released + } + private final boolean forcePackedHash; public BlockHashTests(@Named("forcePackedHash") boolean forcePackedHash) { @@ -62,7 +73,7 @@ public BlockHashTests(@Named("forcePackedHash") boolean forcePackedHash) { public void testIntHash() { int[] values = new int[] { 1, 2, 3, 1, 2, 3, 1, 2, 3 }; - IntBlock block = new IntArrayVector(values, values.length).asBlock(); + IntBlock block = blockFactory.newIntArrayVector(values, values.length).asBlock(); OrdsAndKeys ordsAndKeys = hash(block); if (forcePackedHash) { @@ -75,6 +86,7 @@ public void testIntHash() { assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.range(1, 4))); } assertKeys(ordsAndKeys.keys, 1, 2, 3); + Releasables.closeExpectNoException(block); } public void testIntHashWithNulls() { @@ -150,7 +162,7 @@ public void testIntHashWithMultiValuedFields() { public void testLongHash() { long[] values = new long[] { 2, 1, 4, 2, 4, 1, 3, 4 }; - LongBlock block = new LongArrayVector(values, values.length).asBlock(); + LongBlock block = blockFactory.newLongArrayVector(values, values.length).asBlock(); OrdsAndKeys ordsAndKeys = hash(block); if (forcePackedHash) { @@ -163,6 +175,7 @@ public void testLongHash() { assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.range(1, 5))); } assertKeys(ordsAndKeys.keys, 2L, 1L, 4L, 3L); + Releasables.closeExpectNoException(block); } public void testLongHashWithNulls() { @@ -207,7 +220,8 @@ public void testLongHashWithMultiValuedFields() { builder.appendLong(1); builder.endPositionEntry(); - OrdsAndKeys ordsAndKeys = hash(builder.build()); + Block block = builder.build(); + OrdsAndKeys ordsAndKeys = hash(block); if (forcePackedHash) { assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:LONG], entries=4, size=")); assertOrds( @@ -234,11 +248,12 @@ public void testLongHashWithMultiValuedFields() { assertKeys(ordsAndKeys.keys, null, 1L, 2L, 3L); } assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.range(0, 4))); + Releasables.closeExpectNoException(block); } public void testDoubleHash() { double[] values = new double[] { 2.0, 1.0, 4.0, 2.0, 4.0, 1.0, 3.0, 4.0 }; - DoubleBlock block = new DoubleArrayVector(values, values.length).asBlock(); + DoubleBlock block = blockFactory.newDoubleArrayVector(values, values.length).asBlock(); OrdsAndKeys ordsAndKeys = hash(block); if (forcePackedHash) { @@ -251,6 +266,7 @@ public void testDoubleHash() { assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.range(1, 5))); } assertKeys(ordsAndKeys.keys, 2.0, 1.0, 4.0, 3.0); + Releasables.closeExpectNoException(block); } public void testDoubleHashWithNulls() { @@ -260,7 +276,8 @@ public void testDoubleHashWithNulls() { builder.appendDouble(2); builder.appendNull(); - OrdsAndKeys ordsAndKeys = hash(builder.build()); + Block block = builder.build(); + OrdsAndKeys ordsAndKeys = hash(block); if (forcePackedHash) { assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:DOUBLE], entries=3, size=")); assertOrds(ordsAndKeys.ords, 0, 1, 2, 1); @@ -271,6 +288,7 @@ public void testDoubleHashWithNulls() { assertKeys(ordsAndKeys.keys, null, 0.0, 2.0); } assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.range(0, 3))); + Releasables.closeExpectNoException(block); } public void testDoubleHashWithMultiValuedFields() { @@ -294,7 +312,8 @@ public void testDoubleHashWithMultiValuedFields() { builder.appendDouble(2); builder.endPositionEntry(); - OrdsAndKeys ordsAndKeys = hash(builder.build()); + Block block = builder.build(); + OrdsAndKeys ordsAndKeys = hash(block); if (forcePackedHash) { assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:DOUBLE], entries=4, size=")); assertOrds( @@ -321,10 +340,11 @@ public void testDoubleHashWithMultiValuedFields() { assertKeys(ordsAndKeys.keys, null, 1.0, 2.0, 3.0); } assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.range(0, 4))); + Releasables.closeExpectNoException(block); } public void testBasicBytesRefHash() { - var builder = BytesRefBlock.newBlockBuilder(8); + var builder = blockFactory.newBytesRefBlockBuilder(8); builder.appendBytesRef(new BytesRef("item-2")); builder.appendBytesRef(new BytesRef("item-1")); builder.appendBytesRef(new BytesRef("item-4")); @@ -334,7 +354,8 @@ public void testBasicBytesRefHash() { builder.appendBytesRef(new BytesRef("item-3")); builder.appendBytesRef(new BytesRef("item-4")); - OrdsAndKeys ordsAndKeys = hash(builder.build()); + Block block = builder.build(); + OrdsAndKeys ordsAndKeys = hash(block); if (forcePackedHash) { assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:BYTES_REF], entries=4, size=")); assertThat(ordsAndKeys.description, endsWith("b}")); @@ -347,6 +368,7 @@ public void testBasicBytesRefHash() { assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.range(1, 5))); } assertKeys(ordsAndKeys.keys, "item-2", "item-1", "item-4", "item-3"); + Releasables.closeExpectNoException(block); } public void testBytesRefHashWithNulls() { @@ -356,7 +378,8 @@ public void testBytesRefHashWithNulls() { builder.appendBytesRef(new BytesRef("dog")); builder.appendNull(); - OrdsAndKeys ordsAndKeys = hash(builder.build()); + Block block = builder.build(); + OrdsAndKeys ordsAndKeys = hash(block); if (forcePackedHash) { assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:BYTES_REF], entries=3, size=")); assertThat(ordsAndKeys.description, endsWith("b}")); @@ -369,6 +392,7 @@ public void testBytesRefHashWithNulls() { assertKeys(ordsAndKeys.keys, null, "cat", "dog"); } assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.range(0, 3))); + Releasables.closeExpectNoException(block); } public void testBytesRefHashWithMultiValuedFields() { @@ -393,7 +417,8 @@ public void testBytesRefHashWithMultiValuedFields() { builder.appendBytesRef(new BytesRef("bar")); builder.endPositionEntry(); - OrdsAndKeys ordsAndKeys = hash(builder.build()); + Block block = builder.build(); + OrdsAndKeys ordsAndKeys = hash(block); if (forcePackedHash) { assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:BYTES_REF], entries=4, size=")); assertThat(ordsAndKeys.description, endsWith("b}")); @@ -422,11 +447,12 @@ public void testBytesRefHashWithMultiValuedFields() { assertKeys(ordsAndKeys.keys, null, "foo", "bar", "bort"); } assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.range(0, 4))); + Releasables.closeExpectNoException(block); } public void testBooleanHashFalseFirst() { boolean[] values = new boolean[] { false, true, true, true, true }; - BooleanBlock block = new BooleanArrayVector(values, values.length).asBlock(); + BooleanBlock block = blockFactory.newBooleanArrayVector(values, values.length).asBlock(); OrdsAndKeys ordsAndKeys = hash(block); if (forcePackedHash) { @@ -439,11 +465,12 @@ public void testBooleanHashFalseFirst() { assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.range(1, 3))); } assertKeys(ordsAndKeys.keys, false, true); + Releasables.closeExpectNoException(block); } public void testBooleanHashTrueFirst() { boolean[] values = new boolean[] { true, false, false, true, true }; - BooleanBlock block = new BooleanArrayVector(values, values.length).asBlock(); + BooleanBlock block = blockFactory.newBooleanArrayVector(values, values.length).asBlock(); OrdsAndKeys ordsAndKeys = hash(block); if (forcePackedHash) { @@ -457,11 +484,12 @@ public void testBooleanHashTrueFirst() { assertKeys(ordsAndKeys.keys, false, true); assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.range(1, 3))); } + Releasables.closeExpectNoException(block); } public void testBooleanHashTrueOnly() { boolean[] values = new boolean[] { true, true, true, true }; - BooleanBlock block = new BooleanArrayVector(values, values.length).asBlock(); + BooleanBlock block = blockFactory.newBooleanArrayVector(values, values.length).asBlock(); OrdsAndKeys ordsAndKeys = hash(block); if (forcePackedHash) { @@ -475,11 +503,12 @@ public void testBooleanHashTrueOnly() { assertKeys(ordsAndKeys.keys, true); assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.newVectorBuilder(1).appendInt(2).build())); } + Releasables.closeExpectNoException(block); } public void testBooleanHashFalseOnly() { boolean[] values = new boolean[] { false, false, false, false }; - BooleanBlock block = new BooleanArrayVector(values, values.length).asBlock(); + BooleanBlock block = blockFactory.newBooleanArrayVector(values, values.length).asBlock(); OrdsAndKeys ordsAndKeys = hash(block); if (forcePackedHash) { @@ -492,6 +521,7 @@ public void testBooleanHashFalseOnly() { assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.newVectorBuilder(1).appendInt(1).build())); } assertKeys(ordsAndKeys.keys, false); + Releasables.closeExpectNoException(block); } public void testBooleanHashWithNulls() { @@ -501,7 +531,8 @@ public void testBooleanHashWithNulls() { builder.appendBoolean(true); builder.appendNull(); - OrdsAndKeys ordsAndKeys = hash(builder.build()); + Block block = builder.build(); + OrdsAndKeys ordsAndKeys = hash(block); if (forcePackedHash) { assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:BOOLEAN], entries=3, size=")); assertOrds(ordsAndKeys.ords, 0, 1, 2, 1); @@ -512,6 +543,7 @@ public void testBooleanHashWithNulls() { assertKeys(ordsAndKeys.keys, null, false, true); } assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.range(0, 3))); + Releasables.closeExpectNoException(block); } public void testBooleanHashWithMultiValuedFields() { @@ -535,7 +567,8 @@ public void testBooleanHashWithMultiValuedFields() { builder.appendBoolean(false); builder.endPositionEntry(); - OrdsAndKeys ordsAndKeys = hash(builder.build()); + Block block = builder.build(); + OrdsAndKeys ordsAndKeys = hash(block); if (forcePackedHash) { assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:BOOLEAN], entries=3, size=")); assertOrds( @@ -562,13 +595,14 @@ public void testBooleanHashWithMultiValuedFields() { assertKeys(ordsAndKeys.keys, null, false, true); } assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.range(0, 3))); + Releasables.closeExpectNoException(block); } public void testLongLongHash() { long[] values1 = new long[] { 0, 1, 0, 1, 0, 1 }; - LongBlock block1 = new LongArrayVector(values1, values1.length).asBlock(); + LongBlock block1 = blockFactory.newLongArrayVector(values1, values1.length).asBlock(); long[] values2 = new long[] { 0, 0, 0, 1, 1, 1 }; - LongBlock block2 = new LongArrayVector(values2, values2.length).asBlock(); + LongBlock block2 = blockFactory.newLongArrayVector(values2, values2.length).asBlock(); Object[][] expectedKeys = { new Object[] { 0L, 0L }, new Object[] { 1L, 0L }, new Object[] { 1L, 1L }, new Object[] { 0L, 1L } }; OrdsAndKeys ordsAndKeys = hash(block1, block2); @@ -581,6 +615,7 @@ public void testLongLongHash() { assertOrds(ordsAndKeys.ords, 0, 1, 0, 2, 3, 2); assertKeys(ordsAndKeys.keys, expectedKeys); assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.range(0, 4))); + Releasables.closeExpectNoException(block1, block2); } private void append(LongBlock.Builder b1, LongBlock.Builder b2, long[] v1, long[] v2) { @@ -621,7 +656,9 @@ public void testLongLongHashWithMultiValuedFields() { append(b1, b2, new long[] { 1, 1, 2, 2 }, new long[] { 10, 20, 20 }); append(b1, b2, new long[] { 1, 2, 3 }, new long[] { 30, 30, 10 }); - OrdsAndKeys ordsAndKeys = hash(b1.build(), b2.build()); + Block block1 = b1.build(); + Block block2 = b2.build(); + OrdsAndKeys ordsAndKeys = hash(block1, block2); if (forcePackedHash) { assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:LONG, 1:LONG], entries=10, size=")); assertOrds( @@ -678,6 +715,7 @@ public void testLongLongHashWithMultiValuedFields() { new Object[] { 3L, 10L }, } ); assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.range(0, 8))); + Releasables.closeExpectNoException(block1, block2); } } @@ -715,9 +753,9 @@ public void testLongLongHashHugeCombinatorialExplosion() { public void testIntLongHash() { int[] values1 = new int[] { 0, 1, 0, 1, 0, 1 }; - IntBlock block1 = new IntArrayVector(values1, values1.length).asBlock(); + IntBlock block1 = blockFactory.newIntArrayVector(values1, values1.length).asBlock(); long[] values2 = new long[] { 0, 0, 0, 1, 1, 1 }; - LongBlock block2 = new LongArrayVector(values2, values2.length).asBlock(); + LongBlock block2 = blockFactory.newLongArrayVector(values2, values2.length).asBlock(); Object[][] expectedKeys = { new Object[] { 0, 0L }, new Object[] { 1, 0L }, new Object[] { 1, 1L }, new Object[] { 0, 1L } }; OrdsAndKeys ordsAndKeys = hash(block1, block2); @@ -725,13 +763,14 @@ public void testIntLongHash() { assertThat(ordsAndKeys.description, endsWith("b}")); assertOrds(ordsAndKeys.ords, 0, 1, 0, 2, 3, 2); assertKeys(ordsAndKeys.keys, expectedKeys); + Releasables.closeExpectNoException(block1, block2); } public void testLongDoubleHash() { long[] values1 = new long[] { 0, 1, 0, 1, 0, 1 }; - LongBlock block1 = new LongArrayVector(values1, values1.length).asBlock(); + LongBlock block1 = blockFactory.newLongArrayVector(values1, values1.length).asBlock(); double[] values2 = new double[] { 0, 0, 0, 1, 1, 1 }; - DoubleBlock block2 = new DoubleArrayVector(values2, values2.length).asBlock(); + DoubleBlock block2 = blockFactory.newDoubleArrayVector(values2, values2.length).asBlock(); Object[][] expectedKeys = { new Object[] { 0L, 0d }, new Object[] { 1L, 0d }, new Object[] { 1L, 1d }, new Object[] { 0L, 1d } }; OrdsAndKeys ordsAndKeys = hash(block1, block2); assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:LONG, 1:DOUBLE], entries=4, size=")); @@ -742,9 +781,9 @@ public void testLongDoubleHash() { public void testIntBooleanHash() { int[] values1 = new int[] { 0, 1, 0, 1, 0, 1 }; - IntBlock block1 = new IntArrayVector(values1, values1.length).asBlock(); + IntBlock block1 = blockFactory.newIntArrayVector(values1, values1.length).asBlock(); boolean[] values2 = new boolean[] { false, false, false, true, true, true }; - BooleanBlock block2 = new BooleanArrayVector(values2, values2.length).asBlock(); + BooleanBlock block2 = blockFactory.newBooleanArrayVector(values2, values2.length).asBlock(); Object[][] expectedKeys = { new Object[] { 0, false }, new Object[] { 1, false }, @@ -796,8 +835,8 @@ public void testLongLongHashWithNull() { public void testLongBytesRefHash() { long[] values1 = new long[] { 0, 1, 0, 1, 0, 1 }; - LongBlock block1 = new LongArrayVector(values1, values1.length).asBlock(); - BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(8); + LongBlock block1 = blockFactory.newLongArrayVector(values1, values1.length).asBlock(); + BytesRefBlock.Builder builder = blockFactory.newBytesRefBlockBuilder(8); builder.appendBytesRef(new BytesRef("cat")); builder.appendBytesRef(new BytesRef("cat")); builder.appendBytesRef(new BytesRef("cat")); @@ -824,6 +863,7 @@ public void testLongBytesRefHash() { assertOrds(ordsAndKeys.ords, 0, 1, 0, 2, 3, 2); assertKeys(ordsAndKeys.keys, expectedKeys); assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.range(0, 4))); + Releasables.closeExpectNoException(block1, block2); } public void testLongBytesRefHashWithNull() { @@ -1028,7 +1068,7 @@ private void hash(Consumer callback, int emitBatchSize, Block... va for (int c = 0; c < values.length; c++) { specs.add(new HashAggregationOperator.GroupSpec(c, values[c].elementType())); } - MockBigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new NoneCircuitBreakerService()); + BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new NoneCircuitBreakerService()); try ( BlockHash blockHash = forcePackedHash ? new PackedValuesBlockHash(specs, bigArrays, emitBatchSize) @@ -1068,6 +1108,7 @@ public void add(int positionOffset, IntBlock groupIds) { } } callback.accept(result); + // Releasables.closeExpectNoException(result.keys()); // TODO: who should release the keys? } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java index b3c0624496bde..df16f0036c767 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java @@ -7,10 +7,20 @@ package org.elasticsearch.compute.data; +import org.apache.lucene.util.Accountable; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BytesRefArray; +import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.common.util.PageCacheRecycler; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.test.ESTestCase; +import org.junit.After; +import org.junit.Before; import java.util.ArrayList; import java.util.Arrays; @@ -26,51 +36,48 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class BasicBlockTests extends ESTestCase { - public void testEmpty() { - assertThat( - new IntArrayBlock(new int[] {}, 0, new int[] {}, new BitSet(), randomFrom(Block.MvOrdering.values())).getPositionCount(), - is(0) - ); - assertThat(IntBlock.newBlockBuilder(0).build().getPositionCount(), is(0)); - assertThat(new IntArrayVector(new int[] {}, 0).getPositionCount(), is(0)); - assertThat(IntVector.newVectorBuilder(0).build().getPositionCount(), is(0)); + final CircuitBreaker breaker = new MockBigArrays.LimitedBreaker("esql-test-breaker", ByteSizeValue.ofGb(1)); + final BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, mockBreakerService(breaker)); + final BlockFactory blockFactory = BlockFactory.getInstance(breaker, bigArrays); - assertThat( - new LongArrayBlock(new long[] {}, 0, new int[] {}, new BitSet(), randomFrom(Block.MvOrdering.values())).getPositionCount(), - is(0) - ); - assertThat(LongBlock.newBlockBuilder(0).build().getPositionCount(), is(0)); - assertThat(new LongArrayVector(new long[] {}, 0).getPositionCount(), is(0)); - assertThat(LongVector.newVectorBuilder(0).build().getPositionCount(), is(0)); + @Before + @After + public void checkBreaker() { + assertThat(breaker.getUsed(), is(0L)); + } - assertThat( - new DoubleArrayBlock(new double[] {}, 0, new int[] {}, new BitSet(), randomFrom(Block.MvOrdering.values())).getPositionCount(), - is(0) - ); - assertThat(DoubleBlock.newBlockBuilder(0).build().getPositionCount(), is(0)); - assertThat(new DoubleArrayVector(new double[] {}, 0).getPositionCount(), is(0)); - assertThat(DoubleVector.newVectorBuilder(0).build().getPositionCount(), is(0)); - - var emptyArray = new BytesRefArray(0, BigArrays.NON_RECYCLING_INSTANCE); - assertThat( - new BytesRefArrayBlock(emptyArray, 0, new int[] {}, new BitSet(), randomFrom(Block.MvOrdering.values())).getPositionCount(), - is(0) - ); - assertThat(BytesRefBlock.newBlockBuilder(0).build().getPositionCount(), is(0)); - assertThat(new BytesRefArrayVector(emptyArray, 0).getPositionCount(), is(0)); - assertThat(BytesRefVector.newVectorBuilder(0).build().getPositionCount(), is(0)); - - assertThat( - new BooleanArrayBlock(new boolean[] {}, 0, new int[] {}, new BitSet(), randomFrom(Block.MvOrdering.values())) - .getPositionCount(), - is(0) + public void testEmpty() { + testEmpty(blockFactory); + } + + void testEmpty(BlockFactory bf) { + assertZeroPositionsAndRelease(bf.newIntArrayBlock(new int[] {}, 0, new int[] {}, new BitSet(), randomOrdering())); + assertZeroPositionsAndRelease(IntBlock.newBlockBuilder(0, bf).build()); + assertZeroPositionsAndRelease(bf.newIntArrayVector(new int[] {}, 0)); + assertZeroPositionsAndRelease(IntVector.newVectorBuilder(0, bf).build()); + assertZeroPositionsAndRelease(bf.newLongArrayBlock(new long[] {}, 0, new int[] {}, new BitSet(), randomOrdering())); + assertZeroPositionsAndRelease(LongBlock.newBlockBuilder(0, bf).build()); + assertZeroPositionsAndRelease(bf.newLongArrayVector(new long[] {}, 0)); + assertZeroPositionsAndRelease(LongVector.newVectorBuilder(0, bf).build()); + assertZeroPositionsAndRelease(bf.newDoubleArrayBlock(new double[] {}, 0, new int[] {}, new BitSet(), randomOrdering())); + assertZeroPositionsAndRelease(DoubleBlock.newBlockBuilder(0, bf).build()); + assertZeroPositionsAndRelease(bf.newDoubleArrayVector(new double[] {}, 0)); + assertZeroPositionsAndRelease(DoubleVector.newVectorBuilder(0, bf).build()); + assertZeroPositionsAndRelease( + bf.newBytesRefArrayBlock(new BytesRefArray(0, bf.bigArrays()), 0, new int[] {}, new BitSet(), randomOrdering()) ); - assertThat(BooleanBlock.newBlockBuilder(0).build().getPositionCount(), is(0)); - assertThat(new BooleanArrayVector(new boolean[] {}, 0).getPositionCount(), is(0)); - assertThat(BooleanVector.newVectorBuilder(0).build().getPositionCount(), is(0)); + assertZeroPositionsAndRelease(BytesRefBlock.newBlockBuilder(0, bf).build()); + assertZeroPositionsAndRelease(bf.newBytesRefArrayVector(new BytesRefArray(0, bf.bigArrays()), 0)); + assertZeroPositionsAndRelease(BytesRefVector.newVectorBuilder(0, bf).build()); + assertZeroPositionsAndRelease(bf.newBooleanArrayBlock(new boolean[] {}, 0, new int[] {}, new BitSet(), randomOrdering())); + assertZeroPositionsAndRelease(BooleanBlock.newBlockBuilder(0, bf).build()); + assertZeroPositionsAndRelease(bf.newBooleanArrayVector(new boolean[] {}, 0)); + assertZeroPositionsAndRelease(BooleanVector.newVectorBuilder(0, bf).build()); } public void testSmallSingleValueDenseGrowthInt() { @@ -141,15 +148,16 @@ static void assertSingleValueDenseBlock(Block initialBlock) { public void testIntBlock() { for (int i = 0; i < 1000; i++) { + assertThat(breaker.getUsed(), is(0L)); int positionCount = randomIntBetween(1, 16 * 1024); IntBlock block; if (randomBoolean()) { final int builderEstimateSize = randomBoolean() ? randomIntBetween(1, positionCount) : positionCount; - IntBlock.Builder blockBuilder = IntBlock.newBlockBuilder(builderEstimateSize); + IntBlock.Builder blockBuilder = IntBlock.newBlockBuilder(builderEstimateSize, blockFactory); IntStream.range(0, positionCount).forEach(blockBuilder::appendInt); block = blockBuilder.build(); } else { - block = new IntArrayVector(IntStream.range(0, positionCount).toArray(), positionCount).asBlock(); + block = blockFactory.newIntArrayVector(IntStream.range(0, positionCount).toArray(), positionCount).asBlock(); } assertThat(block.getPositionCount(), equalTo(positionCount)); @@ -158,11 +166,12 @@ public void testIntBlock() { int pos = block.getInt(randomPosition(positionCount)); assertThat(pos, is(block.getInt(pos))); assertSingleValueDenseBlock(block); + releaseAndAssertBreaker(block); if (positionCount > 1) { assertNullValues( positionCount, - size -> IntBlock.newBlockBuilder(size), + size -> IntBlock.newBlockBuilder(size, blockFactory), (bb, value) -> bb.appendInt(value), position -> position, IntBlock.Builder::build, @@ -172,28 +181,32 @@ public void testIntBlock() { ); } - IntBlock.Builder blockBuilder = IntBlock.newBlockBuilder(1); + IntBlock.Builder blockBuilder = IntBlock.newBlockBuilder(1, blockFactory); IntBlock copy = blockBuilder.copyFrom(block, 0, block.getPositionCount()).build(); assertThat(copy, equalTo(block)); + releaseAndAssertBreaker(copy); IntVector.Builder vectorBuilder = IntVector.newVectorBuilder( - randomBoolean() ? randomIntBetween(1, positionCount) : positionCount + randomBoolean() ? randomIntBetween(1, positionCount) : positionCount, + blockFactory ); IntStream.range(0, positionCount).forEach(vectorBuilder::appendInt); IntVector vector = vectorBuilder.build(); assertSingleValueDenseBlock(vector.asBlock()); + releaseAndAssertBreaker(vector); } } public void testConstantIntBlock() { for (int i = 0; i < 1000; i++) { + assertThat(breaker.getUsed(), is(0L)); int positionCount = randomIntBetween(1, 16 * 1024); int value = randomInt(); IntBlock block; if (randomBoolean()) { - block = IntBlock.newConstantBlockWith(value, positionCount); + block = IntBlock.newConstantBlockWith(value, positionCount, blockFactory); } else { - block = new ConstantIntVector(value, positionCount).asBlock(); + block = blockFactory.newConstantIntBlockWith(value, positionCount); } assertThat(positionCount, is(block.getPositionCount())); assertThat(value, is(block.getInt(0))); @@ -201,20 +214,22 @@ public void testConstantIntBlock() { assertThat(value, is(block.getInt(randomPosition(positionCount)))); assertThat(block.isNull(randomPosition(positionCount)), is(false)); assertSingleValueDenseBlock(block); + releaseAndAssertBreaker(block); } } public void testLongBlock() { for (int i = 0; i < 1000; i++) { + assertThat(breaker.getUsed(), is(0L)); int positionCount = randomIntBetween(1, 16 * 1024); LongBlock block; if (randomBoolean()) { final int builderEstimateSize = randomBoolean() ? randomIntBetween(1, positionCount) : positionCount; - LongBlock.Builder blockBuilder = LongBlock.newBlockBuilder(builderEstimateSize); + LongBlock.Builder blockBuilder = blockFactory.newLongBlockBuilder(builderEstimateSize); LongStream.range(0, positionCount).forEach(blockBuilder::appendLong); block = blockBuilder.build(); } else { - block = new LongArrayVector(LongStream.range(0, positionCount).toArray(), positionCount).asBlock(); + block = blockFactory.newLongArrayVector(LongStream.range(0, positionCount).toArray(), positionCount).asBlock(); } assertThat(positionCount, is(block.getPositionCount())); @@ -223,11 +238,12 @@ public void testLongBlock() { int pos = (int) block.getLong(randomPosition(positionCount)); assertThat((long) pos, is(block.getLong(pos))); assertSingleValueDenseBlock(block); + releaseAndAssertBreaker(block); if (positionCount > 1) { assertNullValues( positionCount, - size -> LongBlock.newBlockBuilder(size), + size -> LongBlock.newBlockBuilder(size, blockFactory), (bb, value) -> bb.appendLong(value), position -> (long) position, LongBlock.Builder::build, @@ -252,13 +268,14 @@ public void testLongBlock() { public void testConstantLongBlock() { for (int i = 0; i < 1000; i++) { + assertThat(breaker.getUsed(), is(0L)); int positionCount = randomIntBetween(1, 16 * 1024); long value = randomLong(); LongBlock block; if (randomBoolean()) { - block = LongBlock.newConstantBlockWith(value, positionCount); + block = LongBlock.newConstantBlockWith(value, positionCount, blockFactory); } else { - block = new ConstantLongVector(value, positionCount).asBlock(); + block = blockFactory.newConstantLongBlockWith(value, positionCount); } assertThat(positionCount, is(block.getPositionCount())); assertThat(value, is(block.getLong(0))); @@ -266,9 +283,14 @@ public void testConstantLongBlock() { assertThat(value, is(block.getLong(randomPosition(positionCount)))); assertThat(block.isNull(randomPosition(positionCount)), is(false)); assertSingleValueDenseBlock(block); + releaseAndAssertBreaker(block); } } + // TODO: continue to update the test, as above. + // Try to not complicate the "basic" test any more than necessary, but it already has great coverage + // for building all types of blocks!! + public void testDoubleBlock() { for (int i = 0; i < 1000; i++) { int positionCount = randomIntBetween(1, 16 * 1024); @@ -292,7 +314,7 @@ public void testDoubleBlock() { if (positionCount > 1) { assertNullValues( positionCount, - size -> DoubleBlock.newBlockBuilder(size), + size -> DoubleBlock.newBlockBuilder(size, blockFactory), (bb, value) -> bb.appendDouble(value), position -> (double) position, DoubleBlock.Builder::build, @@ -369,7 +391,7 @@ public void testBytesRefBlock() { if (positionCount > 1) { assertNullValues( positionCount, - size -> BytesRefBlock.newBlockBuilder(size), + size -> BytesRefBlock.newBlockBuilder(size, blockFactory), (bb, value) -> bb.appendBytesRef(value), position -> values[position], BytesRefBlock.Builder::build, @@ -479,7 +501,7 @@ public void testBooleanBlock() { if (positionCount > 1) { assertNullValues( positionCount, - BooleanBlock::newBlockBuilder, + size -> BooleanBlock.newBlockBuilder(size, blockFactory), (bb, value) -> bb.appendBoolean(value), position -> position % 10 == 0, BooleanBlock.Builder::build, @@ -520,6 +542,18 @@ public void testConstantBooleanBlock() { } } + public void testConstantNullBlock() { + for (int i = 0; i < 100; i++) { + assertThat(breaker.getUsed(), is(0L)); + int positionCount = randomIntBetween(1, 16 * 1024); + Block block = Block.constantNullBlock(positionCount, blockFactory); + assertThat(positionCount, is(block.getPositionCount())); + assertThat(block.getPositionCount(), is(positionCount)); + assertThat(block.isNull(randomPosition(positionCount)), is(true)); + releaseAndAssertBreaker(block); + } + } + public void testSingleValueSparseInt() { int positionCount = randomIntBetween(2, 16 * 1024); final int builderEstimateSize = randomBoolean() ? randomIntBetween(1, positionCount) : positionCount; @@ -851,9 +885,41 @@ private static void assertNullVal asserter.accept(randomNonNullPosition, block); assertTrue(block.isNull(randomNullPosition)); assertFalse(block.isNull(randomNonNullPosition)); + releaseAndAssertBreaker(block, block.blockFactory().breaker()); + } + + void assertZeroPositionsAndRelease(Block block) { + assertThat(block.getPositionCount(), is(0)); + releaseAndAssertBreaker(block); + } + + void assertZeroPositionsAndRelease(Vector vector) { + assertThat(vector.getPositionCount(), is(0)); + releaseAndAssertBreaker(vector); + } + + void releaseAndAssertBreaker(T data) { + releaseAndAssertBreaker(data, breaker); + } + + static void releaseAndAssertBreaker(T data, CircuitBreaker breaker) { + assertThat(breaker.getUsed(), greaterThan(0L)); + Releasables.closeExpectNoException(data); + assertThat(breaker.getUsed(), is(0L)); } static int randomPosition(int positionCount) { return positionCount == 1 ? 0 : randomIntBetween(0, positionCount - 1); } + + static Block.MvOrdering randomOrdering() { + return randomFrom(Block.MvOrdering.values()); + } + + // A breaker service that always returns the given breaker for getBreaker(CircuitBreaker.REQUEST) + static CircuitBreakerService mockBreakerService(CircuitBreaker breaker) { + CircuitBreakerService breakerService = mock(CircuitBreakerService.class); + when(breakerService.getBreaker(CircuitBreaker.REQUEST)).thenReturn(breaker); + return breakerService; + } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockAccountingTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockAccountingTests.java index 5503c02be9794..c93b07e4d40f3 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockAccountingTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockAccountingTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.compute.data; import org.apache.lucene.tests.util.RamUsageTester; +import org.apache.lucene.tests.util.RamUsageTester.Accumulator; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.util.BigArray; import org.elasticsearch.common.util.BigArrays; @@ -28,13 +29,15 @@ public class BlockAccountingTests extends ESTestCase { + static final Accumulator RAM_USAGE_ACCUMULATOR = new TestRamUsageAccumulator(); + // A large(ish) upperbound simply so that effective greaterThan assertions are not unbounded static final long UPPER_BOUND = 10_000; // Array Vectors public void testBooleanVector() { Vector empty = new BooleanArrayVector(new boolean[] {}, 0); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty); + long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); Vector emptyPlusOne = new BooleanArrayVector(new boolean[] { randomBoolean() }, 1); @@ -51,7 +54,7 @@ public void testBooleanVector() { public void testIntVector() { Vector empty = new IntArrayVector(new int[] {}, 0); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty); + long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); Vector emptyPlusOne = new IntArrayVector(new int[] { randomInt() }, 1); @@ -68,7 +71,7 @@ public void testIntVector() { public void testLongVector() { Vector empty = new LongArrayVector(new long[] {}, 0); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty); + long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); Vector emptyPlusOne = new LongArrayVector(new long[] { randomLong() }, 1); @@ -85,7 +88,7 @@ public void testLongVector() { public void testDoubleVector() { Vector empty = new DoubleArrayVector(new double[] {}, 0); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty); + long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); Vector emptyPlusOne = new DoubleArrayVector(new double[] { randomDouble() }, 1); @@ -105,23 +108,8 @@ public void testBytesRefVector() { var emptyArray = new BytesRefArray(0, BigArrays.NON_RECYCLING_INSTANCE); var arrayWithOne = new BytesRefArray(0, BigArrays.NON_RECYCLING_INSTANCE) ) { - var acc = new RamUsageTester.Accumulator() { - @Override - public long accumulateObject(Object o, long shallowSize, Map fieldValues, Collection queue) { - for (var entry : fieldValues.entrySet()) { - if (entry.getKey().getType().equals(BigArrays.class)) { - // skip BigArrays, as it is (correctly) not part of the ramBytesUsed for BytesRefArray - } else if (o instanceof BigArray bigArray) { - return bigArray.ramBytesUsed(); - } else { - queue.add(entry.getValue()); - } - } - return shallowSize; - } - }; Vector emptyVector = new BytesRefArrayVector(emptyArray, 0); - long expectedEmptyVectorUsed = RamUsageTester.ramUsed(emptyVector, acc); + long expectedEmptyVectorUsed = RamUsageTester.ramUsed(emptyVector, RAM_USAGE_ACCUMULATOR); assertThat(emptyVector.ramBytesUsed(), is(expectedEmptyVectorUsed)); var bytesRef = new BytesRef(randomAlphaOfLengthBetween(1, 16)); @@ -138,7 +126,7 @@ public long accumulateObject(Object o, long shallowSize, Map fiel // Array Blocks public void testBooleanBlock() { Block empty = new BooleanArrayBlock(new boolean[] {}, 0, new int[] {}, null, Block.MvOrdering.UNORDERED); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty); + long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); Block emptyPlusOne = new BooleanArrayBlock(new boolean[] { randomBoolean() }, 1, new int[] {}, null, Block.MvOrdering.UNORDERED); @@ -154,13 +142,13 @@ public void testBooleanBlock() { public void testBooleanBlockWithNullFirstValues() { Block empty = new BooleanArrayBlock(new boolean[] {}, 0, null, BitSet.valueOf(new byte[] { 1 }), Block.MvOrdering.UNORDERED); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty); + long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR); assertThat(empty.ramBytesUsed(), lessThanOrEqualTo(expectedEmptyUsed)); } public void testIntBlock() { Block empty = new IntArrayBlock(new int[] {}, 0, new int[] {}, null, Block.MvOrdering.UNORDERED); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty); + long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); Block emptyPlusOne = new IntArrayBlock(new int[] { randomInt() }, 1, new int[] {}, null, Block.MvOrdering.UNORDERED); @@ -176,13 +164,13 @@ public void testIntBlock() { public void testIntBlockWithNullFirstValues() { Block empty = new IntArrayBlock(new int[] {}, 0, null, BitSet.valueOf(new byte[] { 1 }), Block.MvOrdering.UNORDERED); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty); + long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); } public void testLongBlock() { Block empty = new LongArrayBlock(new long[] {}, 0, new int[] {}, null, Block.MvOrdering.UNORDERED); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty); + long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); Block emptyPlusOne = new LongArrayBlock(new long[] { randomInt() }, 1, new int[] {}, null, Block.MvOrdering.UNORDERED); @@ -198,13 +186,13 @@ public void testLongBlock() { public void testLongBlockWithNullFirstValues() { Block empty = new LongArrayBlock(new long[] {}, 0, null, BitSet.valueOf(new byte[] { 1 }), Block.MvOrdering.UNORDERED); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty); + long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); } public void testDoubleBlock() { Block empty = new DoubleArrayBlock(new double[] {}, 0, new int[] {}, null, Block.MvOrdering.UNORDERED); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty); + long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); Block emptyPlusOne = new DoubleArrayBlock(new double[] { randomInt() }, 1, new int[] {}, null, Block.MvOrdering.UNORDERED); @@ -220,11 +208,28 @@ public void testDoubleBlock() { public void testDoubleBlockWithNullFirstValues() { Block empty = new DoubleArrayBlock(new double[] {}, 0, null, BitSet.valueOf(new byte[] { 1 }), Block.MvOrdering.UNORDERED); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty); + long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); } static Matcher between(long minInclusive, long maxInclusive) { return allOf(greaterThanOrEqualTo(minInclusive), lessThanOrEqualTo(maxInclusive)); } + + /** An accumulator that stops at BigArrays or BlockFactory. And calls ramBytesUsed on BigArray instances. */ + static class TestRamUsageAccumulator extends Accumulator { + @Override + public long accumulateObject(Object o, long shallowSize, Map fieldValues, Collection queue) { + for (var entry : fieldValues.entrySet()) { + if (entry.getKey().getType().equals(BigArrays.class) || entry.getKey().getType().equals(BlockFactory.class)) { + // skip BigArrays, as it is (correctly) not part of the ramBytesUsed for BytesRefArray + } else if (o instanceof BigArray bigArray) { + return bigArray.ramBytesUsed(); + } else { + queue.add(entry.getValue()); + } + } + return shallowSize; + } + }; } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderAppendBlockTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderAppendBlockTests.java index 1feabec374170..a9f08eee02d70 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderAppendBlockTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderAppendBlockTests.java @@ -17,7 +17,7 @@ public class BlockBuilderAppendBlockTests extends ESTestCase { public void testBasic() { - IntBlock src = new IntBlockBuilder(10).appendInt(1) + IntBlock src = new IntBlockBuilder(10, BlockFactory.getNonBreakingInstance()).appendInt(1) .appendNull() .beginPositionEntry() .appendInt(4) diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockFactoryTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockFactoryTests.java new file mode 100644 index 0000000000000..831be03cb0c81 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockFactoryTests.java @@ -0,0 +1,564 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.data; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.Accountable; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.BytesRefArray; +import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.common.util.PageCacheRecycler; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.indices.breaker.CircuitBreakerService; +import org.elasticsearch.test.ESTestCase; +import org.junit.After; +import org.junit.Before; + +import java.util.BitSet; +import java.util.List; +import java.util.function.Supplier; + +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +// BlockFactory is used and effectively tested in many other places, but this class contains tests +// more specific to the factory implementation itself (and not necessarily tested elsewhere). +public class BlockFactoryTests extends ESTestCase { + + final CircuitBreaker breaker; + final BigArrays bigArrays; + final BlockFactory blockFactory; + + @ParametersFactory + public static List params() { + List> l = List.of(() -> { + CircuitBreaker breaker = new MockBigArrays.LimitedBreaker("esql-test-breaker", ByteSizeValue.ofGb(1)); + BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, mockBreakerService(breaker)); + return BlockFactory.getInstance(breaker, bigArrays); + }, BlockFactory::getGlobalInstance); + return l.stream().map(s -> new Object[] { s }).toList(); + } + + public BlockFactoryTests(@Name("blockFactorySupplier") Supplier blockFactorySupplier) { + this.blockFactory = blockFactorySupplier.get(); + this.breaker = blockFactory.breaker(); + this.bigArrays = blockFactory.bigArrays(); + } + + @Before + @After + public void checkBreaker() { + assertThat(breaker.getUsed(), is(0L)); + } + + public void testPreAdjusters() { + for (int i = 0; i < 1000; i++) { + int positions = randomIntBetween(1, 16384); + long preAdjustBytes = blockFactory.preAdjustBreakerForBoolean(positions); + assertThat(preAdjustBytes, is((long) positions)); + blockFactory.adjustBreaker(-preAdjustBytes, true); + + preAdjustBytes = blockFactory.preAdjustBreakerForInt(positions); + assertThat(preAdjustBytes, is((long) positions * 4)); + blockFactory.adjustBreaker(-preAdjustBytes, true); + + preAdjustBytes = blockFactory.preAdjustBreakerForLong(positions); + assertThat(preAdjustBytes, is((long) positions * 8)); + blockFactory.adjustBreaker(-preAdjustBytes, true); + + preAdjustBytes = blockFactory.preAdjustBreakerForDouble(positions); + assertThat(preAdjustBytes, is((long) positions * 8)); + blockFactory.adjustBreaker(-preAdjustBytes, true); + } + } + + public void testIntBlockBuilderWithPossiblyLargeEstimateEmpty() { + var builder = blockFactory.newIntBlockBuilder(randomIntBetween(0, 2048)); + assertThat(breaker.getUsed(), greaterThan(0L)); + var block = builder.build(); + releaseAndAssertBreaker(block); + + block = blockFactory.newIntArrayBlock(new int[] {}, 0, new int[] {}, new BitSet(), randomOrdering()); + assertThat(breaker.getUsed(), greaterThan(0L)); + releaseAndAssertBreaker(block); + } + + public void testIntBlockBuilderWithPossiblyLargeEstimateSingle() { + var builder = blockFactory.newIntBlockBuilder(randomIntBetween(0, 2048)); + builder.appendInt(randomInt()); + assertThat(breaker.getUsed(), greaterThan(0L)); + var block = builder.build(); + releaseAndAssertBreaker(block); + + block = blockFactory.newIntArrayBlock(new int[] { randomInt() }, 1, new int[] {}, new BitSet(), randomOrdering()); + assertThat(breaker.getUsed(), greaterThan(0L)); + releaseAndAssertBreaker(block); + + block = blockFactory.newConstantIntBlockWith(randomInt(), randomIntBetween(1, 2048)); + assertThat(breaker.getUsed(), greaterThan(0L)); + releaseAndAssertBreaker(block); + } + + public void testIntBlockBuilderWithPossiblyLargeEstimateRandom() { + for (int i = 0; i < 1000; i++) { + assertThat(breaker.getUsed(), is(0L)); + var builder = blockFactory.newIntBlockBuilder(randomIntBetween(0, 2048)); + + builder.appendInt(randomInt()); + if (randomBoolean()) { // null-ness + builder.appendNull(); + } + if (randomBoolean()) { // mv-ness + builder.beginPositionEntry(); + builder.appendInt(randomInt()); + builder.appendInt(randomInt()); + builder.endPositionEntry(); + } + builder.appendInt(randomInt()); + assertThat(breaker.getUsed(), greaterThan(0L)); + var block = builder.build(); + releaseAndAssertBreaker(block); + } + } + + public void testIntVectorBuilderWithPossiblyLargeEstimateEmpty() { + var builder = blockFactory.newIntVectorBuilder(randomIntBetween(0, 2048)); + assertThat(breaker.getUsed(), greaterThan(0L)); + var vector = builder.build(); + releaseAndAssertBreaker(vector); + + vector = blockFactory.newIntArrayVector(new int[] {}, 0); + assertThat(breaker.getUsed(), greaterThan(0L)); + releaseAndAssertBreaker(vector); + } + + public void testIntVectorBuilderWithPossiblyLargeEstimateSingle() { + var builder = blockFactory.newIntVectorBuilder(randomIntBetween(0, 2048)); + builder.appendInt(randomInt()); + assertThat(breaker.getUsed(), greaterThan(0L)); + var vector = builder.build(); + releaseAndAssertBreaker(vector); + + vector = blockFactory.newIntArrayVector(new int[] { randomInt() }, 1); + assertThat(breaker.getUsed(), greaterThan(0L)); + releaseAndAssertBreaker(vector); + + vector = blockFactory.newConstantIntBlockWith(randomInt(), randomIntBetween(1, 2048)).asVector(); + assertThat(breaker.getUsed(), greaterThan(0L)); + releaseAndAssertBreaker(vector); + } + + public void testIntVectorBuilderWithPossiblyLargeEstimateRandom() { + for (int i = 0; i < 1000; i++) { + assertThat(breaker.getUsed(), is(0L)); + var builder = blockFactory.newIntVectorBuilder(randomIntBetween(0, 2048)); + builder.appendInt(randomInt()); + if (randomBoolean()) { // constant-ness or not + builder.appendInt(randomInt()); + } + assertThat(breaker.getUsed(), greaterThan(0L)); + var vector = builder.build(); + releaseAndAssertBreaker(vector); + } + } + + public void testLongBlockBuilderWithPossiblyLargeEstimateEmpty() { + var builder = blockFactory.newLongBlockBuilder(randomIntBetween(0, 2048)); + assertThat(breaker.getUsed(), greaterThan(0L)); + var block = builder.build(); + releaseAndAssertBreaker(block); + + block = blockFactory.newLongArrayBlock(new long[] {}, 0, new int[] {}, new BitSet(), randomOrdering()); + assertThat(breaker.getUsed(), greaterThan(0L)); + releaseAndAssertBreaker(block); + } + + public void testLongBlockBuilderWithPossiblyLargeEstimateSingle() { + var builder = blockFactory.newLongBlockBuilder(randomIntBetween(0, 2048)); + builder.appendLong(randomLong()); + assertThat(breaker.getUsed(), greaterThan(0L)); + var block = builder.build(); + releaseAndAssertBreaker(block); + + block = blockFactory.newLongArrayBlock(new long[] { randomLong() }, 1, new int[] {}, new BitSet(), randomOrdering()); + assertThat(breaker.getUsed(), greaterThan(0L)); + releaseAndAssertBreaker(block); + + block = blockFactory.newConstantLongBlockWith(randomLong(), randomIntBetween(1, 2048)); + assertThat(breaker.getUsed(), greaterThan(0L)); + releaseAndAssertBreaker(block); + } + + public void testLongBlockBuilderWithPossiblyLargeEstimateRandom() { + for (int i = 0; i < 1000; i++) { + assertThat(breaker.getUsed(), is(0L)); + var builder = blockFactory.newLongBlockBuilder(randomIntBetween(0, 2048)); + + builder.appendLong(randomLong()); + if (randomBoolean()) { // null-ness + builder.appendNull(); + } + if (randomBoolean()) { // mv-ness + builder.beginPositionEntry(); + builder.appendLong(randomInt()); + builder.appendLong(randomInt()); + builder.endPositionEntry(); + } + builder.appendLong(randomLong()); + assertThat(breaker.getUsed(), greaterThan(0L)); + var block = builder.build(); + releaseAndAssertBreaker(block); + } + } + + public void testLongVectorBuilderWithPossiblyLargeEstimateEmpty() { + var builder = blockFactory.newLongVectorBuilder(randomIntBetween(0, 2048)); + assertThat(breaker.getUsed(), greaterThan(0L)); + var vector = builder.build(); + releaseAndAssertBreaker(vector); + + vector = blockFactory.newLongArrayVector(new long[] {}, 0); + assertThat(breaker.getUsed(), greaterThan(0L)); + releaseAndAssertBreaker(vector); + } + + public void testLongVectorBuilderWithPossiblyLargeEstimateSingle() { + var builder = blockFactory.newLongVectorBuilder(randomIntBetween(0, 2048)); + builder.appendLong(randomLong()); + assertThat(breaker.getUsed(), greaterThan(0L)); + var vector = builder.build(); + releaseAndAssertBreaker(vector); + + vector = blockFactory.newLongArrayVector(new long[] { randomLong() }, 1); + assertThat(breaker.getUsed(), greaterThan(0L)); + releaseAndAssertBreaker(vector); + + vector = blockFactory.newConstantLongBlockWith(randomLong(), randomIntBetween(1, 2048)).asVector(); + assertThat(breaker.getUsed(), greaterThan(0L)); + releaseAndAssertBreaker(vector); + } + + public void testLongVectorBuilderWithPossiblyLargeEstimateRandom() { + for (int i = 0; i < 1000; i++) { + assertThat(breaker.getUsed(), is(0L)); + var builder = blockFactory.newLongVectorBuilder(randomIntBetween(0, 2048)); + builder.appendLong(randomLong()); + if (randomBoolean()) { // constant-ness or not + builder.appendLong(randomLong()); + } + assertThat(breaker.getUsed(), greaterThan(0L)); + var vector = builder.build(); + releaseAndAssertBreaker(vector); + } + } + + public void testDoubleBlockBuilderWithPossiblyLargeEstimateEmpty() { + var builder = blockFactory.newDoubleBlockBuilder(randomIntBetween(0, 2048)); + assertThat(breaker.getUsed(), greaterThan(0L)); + var block = builder.build(); + releaseAndAssertBreaker(block); + + block = blockFactory.newDoubleArrayBlock(new double[] {}, 0, new int[] {}, new BitSet(), randomOrdering()); + assertThat(breaker.getUsed(), greaterThan(0L)); + releaseAndAssertBreaker(block); + } + + public void testDoubleBlockBuilderWithPossiblyLargeEstimateSingle() { + var builder = blockFactory.newDoubleBlockBuilder(randomIntBetween(0, 2048)); + builder.appendDouble(randomDouble()); + assertThat(breaker.getUsed(), greaterThan(0L)); + var block = builder.build(); + releaseAndAssertBreaker(block); + + block = blockFactory.newDoubleArrayBlock(new double[] { randomDouble() }, 1, new int[] {}, new BitSet(), randomOrdering()); + assertThat(breaker.getUsed(), greaterThan(0L)); + releaseAndAssertBreaker(block); + + block = blockFactory.newConstantDoubleBlockWith(randomDouble(), randomIntBetween(1, 2048)); + assertThat(breaker.getUsed(), greaterThan(0L)); + releaseAndAssertBreaker(block); + } + + public void testDoubleBlockBuilderWithPossiblyLargeEstimateRandom() { + for (int i = 0; i < 1000; i++) { + assertThat(breaker.getUsed(), is(0L)); + var builder = blockFactory.newDoubleBlockBuilder(randomIntBetween(0, 2048)); + + builder.appendDouble(randomDouble()); + if (randomBoolean()) { // null-ness + builder.appendNull(); + } + if (randomBoolean()) { // mv-ness + builder.beginPositionEntry(); + builder.appendDouble(randomDouble()); + builder.appendDouble(randomDouble()); + builder.endPositionEntry(); + } + builder.appendDouble(randomDouble()); + assertThat(breaker.getUsed(), greaterThan(0L)); + var block = builder.build(); + releaseAndAssertBreaker(block); + } + } + + public void testDoubleVectorBuilderWithPossiblyLargeEstimateEmpty() { + var builder = blockFactory.newDoubleVectorBuilder(randomIntBetween(0, 2048)); + assertThat(breaker.getUsed(), greaterThan(0L)); + var vector = builder.build(); + releaseAndAssertBreaker(vector); + + vector = blockFactory.newDoubleArrayVector(new double[] {}, 0); + assertThat(breaker.getUsed(), greaterThan(0L)); + releaseAndAssertBreaker(vector); + } + + public void testDoubleVectorBuilderWithPossiblyLargeEstimateSingle() { + var builder = blockFactory.newDoubleVectorBuilder(randomIntBetween(0, 2048)); + builder.appendDouble(randomDouble()); + assertThat(breaker.getUsed(), greaterThan(0L)); + var vector = builder.build(); + releaseAndAssertBreaker(vector); + + vector = blockFactory.newDoubleArrayVector(new double[] { randomDouble() }, 1); + assertThat(breaker.getUsed(), greaterThan(0L)); + releaseAndAssertBreaker(vector); + + vector = blockFactory.newConstantDoubleBlockWith(randomDouble(), randomIntBetween(1, 2048)).asVector(); + assertThat(breaker.getUsed(), greaterThan(0L)); + releaseAndAssertBreaker(vector); + } + + public void testDoubleVectorBuilderWithPossiblyLargeEstimateRandom() { + for (int i = 0; i < 1000; i++) { + assertThat(breaker.getUsed(), is(0L)); + var builder = blockFactory.newDoubleVectorBuilder(randomIntBetween(0, 2048)); + builder.appendDouble(randomDouble()); + if (randomBoolean()) { // constant-ness or not + builder.appendDouble(randomDouble()); + } + assertThat(breaker.getUsed(), greaterThan(0L)); + var vector = builder.build(); + releaseAndAssertBreaker(vector); + } + } + + public void testBooleanBlockBuilderWithPossiblyLargeEstimateEmpty() { + var builder = blockFactory.newBooleanBlockBuilder(randomIntBetween(0, 2048)); + assertThat(breaker.getUsed(), greaterThan(0L)); + var block = builder.build(); + releaseAndAssertBreaker(block); + + block = blockFactory.newBooleanArrayBlock(new boolean[] {}, 0, new int[] {}, new BitSet(), randomOrdering()); + assertThat(breaker.getUsed(), greaterThan(0L)); + releaseAndAssertBreaker(block); + } + + public void testBooleanBlockBuilderWithPossiblyLargeEstimateSingle() { + var builder = blockFactory.newBooleanBlockBuilder(randomIntBetween(0, 2048)); + builder.appendBoolean(randomBoolean()); + assertThat(breaker.getUsed(), greaterThan(0L)); + var block = builder.build(); + releaseAndAssertBreaker(block); + + block = blockFactory.newBooleanArrayBlock(new boolean[] { randomBoolean() }, 1, new int[] {}, new BitSet(), randomOrdering()); + assertThat(breaker.getUsed(), greaterThan(0L)); + releaseAndAssertBreaker(block); + + block = blockFactory.newConstantBooleanBlockWith(randomBoolean(), randomIntBetween(1, 2048)); + assertThat(breaker.getUsed(), greaterThan(0L)); + releaseAndAssertBreaker(block); + } + + public void testBooleanBlockBuilderWithPossiblyLargeEstimateRandom() { + for (int i = 0; i < 1000; i++) { + assertThat(breaker.getUsed(), is(0L)); + var builder = blockFactory.newBooleanBlockBuilder(randomIntBetween(0, 2048)); + + builder.appendBoolean(randomBoolean()); + if (randomBoolean()) { // null-ness + builder.appendNull(); + } + if (randomBoolean()) { // mv-ness + builder.beginPositionEntry(); + builder.appendBoolean(randomBoolean()); + builder.appendBoolean(randomBoolean()); + builder.endPositionEntry(); + } + builder.appendBoolean(randomBoolean()); + assertThat(breaker.getUsed(), greaterThan(0L)); + var block = builder.build(); + releaseAndAssertBreaker(block); + } + } + + public void testBooleanVectorBuilderWithPossiblyLargeEstimateEmpty() { + var builder = blockFactory.newBooleanVectorBuilder(randomIntBetween(0, 2048)); + assertThat(breaker.getUsed(), greaterThan(0L)); + var vector = builder.build(); + releaseAndAssertBreaker(vector); + + vector = blockFactory.newBooleanArrayVector(new boolean[] {}, 0); + assertThat(breaker.getUsed(), greaterThan(0L)); + releaseAndAssertBreaker(vector); + } + + public void testBooleanVectorBuilderWithPossiblyLargeEstimateSingle() { + var builder = blockFactory.newBooleanVectorBuilder(randomIntBetween(0, 2048)); + builder.appendBoolean(randomBoolean()); + assertThat(breaker.getUsed(), greaterThan(0L)); + var vector = builder.build(); + releaseAndAssertBreaker(vector); + + vector = blockFactory.newBooleanArrayVector(new boolean[] { randomBoolean() }, 1); + assertThat(breaker.getUsed(), greaterThan(0L)); + releaseAndAssertBreaker(vector); + + vector = blockFactory.newConstantBooleanBlockWith(randomBoolean(), randomIntBetween(1, 2048)).asVector(); + assertThat(breaker.getUsed(), greaterThan(0L)); + releaseAndAssertBreaker(vector); + } + + public void testBooleanVectorBuilderWithPossiblyLargeEstimateRandom() { + for (int i = 0; i < 1000; i++) { + assertThat(breaker.getUsed(), is(0L)); + var builder = blockFactory.newBooleanVectorBuilder(randomIntBetween(0, 2048)); + builder.appendBoolean(randomBoolean()); + if (randomBoolean()) { // constant-ness or not + builder.appendBoolean(randomBoolean()); + } + assertThat(breaker.getUsed(), greaterThan(0L)); + var vector = builder.build(); + releaseAndAssertBreaker(vector); + } + } + + public void testBytesRefBlockBuilderWithPossiblyLargeEstimateEmpty() { + var builder = blockFactory.newBytesRefBlockBuilder(randomIntBetween(0, 2048)); + assertThat(breaker.getUsed(), greaterThan(0L)); + var block = builder.build(); + releaseAndAssertBreaker(block); + + var emptyArray = new BytesRefArray(0, bigArrays); + block = blockFactory.newBytesRefArrayBlock(emptyArray, 0, new int[] {}, new BitSet(), randomOrdering()); + assertThat(breaker.getUsed(), greaterThan(0L)); + releaseAndAssertBreaker(block); + } + + public void testBytesRefBlockBuilderWithPossiblyLargeEstimateSingle() { + var builder = blockFactory.newBytesRefBlockBuilder(randomIntBetween(0, 2048)); + builder.appendBytesRef(randomBytesRef()); + assertThat(breaker.getUsed(), greaterThan(0L)); + var block = builder.build(); + releaseAndAssertBreaker(block); + + var array = new BytesRefArray(1, bigArrays); + array.append(randomBytesRef()); + block = blockFactory.newBytesRefArrayBlock(array, 1, new int[] {}, new BitSet(), randomOrdering()); + assertThat(breaker.getUsed(), greaterThan(0L)); + releaseAndAssertBreaker(block); + + block = blockFactory.newConstantBytesRefBlockWith(randomBytesRef(), randomIntBetween(1, 2048)); + assertThat(breaker.getUsed(), greaterThan(0L)); + releaseAndAssertBreaker(block); + } + + public void testBytesRefBlockBuilderWithPossiblyLargeEstimateRandom() { + for (int i = 0; i < 1000; i++) { + assertThat(breaker.getUsed(), is(0L)); + var builder = blockFactory.newBytesRefBlockBuilder(randomIntBetween(0, 2048)); + + builder.appendBytesRef(randomBytesRef()); + if (randomBoolean()) { // null-ness + builder.appendNull(); + } + if (randomBoolean()) { // mv-ness + builder.beginPositionEntry(); + builder.appendBytesRef(randomBytesRef()); + builder.appendBytesRef(randomBytesRef()); + builder.endPositionEntry(); + } + builder.appendBytesRef(randomBytesRef()); + assertThat(breaker.getUsed(), greaterThan(0L)); + var block = builder.build(); + releaseAndAssertBreaker(block); + } + } + + public void testBytesRefVectorBuilderWithPossiblyLargeEstimateEmpty() { + var builder = blockFactory.newBytesRefVectorBuilder(randomIntBetween(0, 2048)); + assertThat(breaker.getUsed(), greaterThan(0L)); + var vector = builder.build(); + releaseAndAssertBreaker(vector); + + var emptyArray = new BytesRefArray(0, bigArrays); + vector = blockFactory.newBytesRefArrayVector(emptyArray, 0); + assertThat(breaker.getUsed(), greaterThan(0L)); + releaseAndAssertBreaker(vector); + } + + public void testBytesRefVectorBuilderWithPossiblyLargeEstimateSingle() { + var builder = blockFactory.newBytesRefVectorBuilder(randomIntBetween(0, 2048)); + builder.appendBytesRef(randomBytesRef()); + assertThat(breaker.getUsed(), greaterThan(0L)); + var vector = builder.build(); + releaseAndAssertBreaker(vector); + + var array = new BytesRefArray(1, bigArrays); + array.append(randomBytesRef()); + vector = blockFactory.newBytesRefArrayVector(array, 0); + assertThat(breaker.getUsed(), greaterThan(0L)); + releaseAndAssertBreaker(vector); + + vector = blockFactory.newConstantBytesRefBlockWith(randomBytesRef(), randomIntBetween(1, 2048)).asVector(); + assertThat(breaker.getUsed(), greaterThan(0L)); + releaseAndAssertBreaker(vector); + } + + public void testBytesRefVectorBuilderWithPossiblyLargeEstimateRandom() { + for (int i = 0; i < 1000; i++) { + assertThat(breaker.getUsed(), is(0L)); + var builder = blockFactory.newBytesRefVectorBuilder(randomIntBetween(0, 2048)); + builder.appendBytesRef(randomBytesRef()); + if (randomBoolean()) { // constant-ness or not + builder.appendBytesRef(randomBytesRef()); + } + assertThat(breaker.getUsed(), greaterThan(0L)); + var vector = builder.build(); + releaseAndAssertBreaker(vector); + } + } + + static BytesRef randomBytesRef() { + return new BytesRef(randomByteArrayOfLength(between(1, 20))); + } + + static Block.MvOrdering randomOrdering() { + return randomFrom(Block.MvOrdering.values()); + } + + void releaseAndAssertBreaker(T data) { + assertThat(breaker.getUsed(), greaterThan(0L)); + Releasables.closeExpectNoException(data); + assertThat(breaker.getUsed(), is(0L)); + } + + // A breaker service that always returns the given breaker for getBreaker(CircuitBreaker.REQUEST) + static CircuitBreakerService mockBreakerService(CircuitBreaker breaker) { + CircuitBreakerService breakerService = mock(CircuitBreakerService.class); + when(breakerService.getBreaker(CircuitBreaker.REQUEST)).thenReturn(breaker); + return breakerService; + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AnyOperatorTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AnyOperatorTestCase.java index 7bfeb57e6999a..edbc59f9497fc 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AnyOperatorTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AnyOperatorTestCase.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.test.ESTestCase; @@ -95,6 +96,6 @@ protected final BigArrays nonBreakingBigArrays() { * A {@link DriverContext} with a nonBreakingBigArrays. */ protected final DriverContext driverContext() { - return new DriverContext(nonBreakingBigArrays()); + return new DriverContext(nonBreakingBigArrays(), BlockFactory.getNonBreakingInstance()); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java index 7365a55778084..50d41978aa84f 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -214,7 +215,8 @@ protected void doRun() { */ DriverContext driverContext() { return new DriverContext( - new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new NoneCircuitBreakerService()).withCircuitBreaking() + new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new NoneCircuitBreakerService()).withCircuitBreaking(), + BlockFactory.getNonBreakingInstance() ); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverContextTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverContextTests.java index dcf56c09efe05..99d7a0eb01748 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverContextTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverContextTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.core.Releasable; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.test.ESTestCase; @@ -144,7 +145,10 @@ static class AssertingDriverContext extends DriverContext { volatile Thread thread; AssertingDriverContext() { - super(new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new NoneCircuitBreakerService())); + super( + new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new NoneCircuitBreakerService()), + BlockFactory.getNonBreakingInstance() + ); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java index 8355a5a444bf5..6a2ace060e1e6 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java @@ -19,6 +19,7 @@ import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; import org.elasticsearch.core.TimeValue; import org.elasticsearch.indices.CrankyCircuitBreakerService; @@ -80,9 +81,11 @@ public final void testSimpleLargeInput() { */ public final void testSimpleCircuitBreaking() { BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, smallEnoughToCircuitBreak()); + CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); + BlockFactory blockFactory = BlockFactory.getInstance(breaker, bigArrays); Exception e = expectThrows( CircuitBreakingException.class, - () -> assertSimple(new DriverContext(bigArrays), between(1_000, 10_000)) + () -> assertSimple(new DriverContext(bigArrays, blockFactory), between(1_000, 10_000)) ); assertThat(e.getMessage(), equalTo(MockBigArrays.ERROR_MESSAGE)); assertThat(bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST).getUsed(), equalTo(0L)); @@ -97,8 +100,9 @@ public final void testSimpleCircuitBreaking() { public final void testSimpleWithCranky() { CrankyCircuitBreakerService breaker = new CrankyCircuitBreakerService(); BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, breaker).withCircuitBreaking(); + BlockFactory blockFactory = BlockFactory.getInstance(breaker.getBreaker("request"), bigArrays); try { - assertSimple(new DriverContext(bigArrays), between(1_000, 10_000)); + assertSimple(new DriverContext(bigArrays, blockFactory), between(1_000, 10_000)); // Either we get lucky and cranky doesn't throw and the test completes or we don't and it throws } catch (CircuitBreakingException e) { assertThat(e.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE)); @@ -173,7 +177,7 @@ public static void runDriver(List drivers) { drivers.add( new Driver( "dummy-session", - new DriverContext(BigArrays.NON_RECYCLING_INSTANCE), + new DriverContext(BigArrays.NON_RECYCLING_INSTANCE, BlockFactory.getNonBreakingInstance()), () -> "dummy-driver", new SequenceLongBlockSourceOperator(LongStream.range(0, between(1, 100)), between(1, 100)), List.of(), diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/RowOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/RowOperatorTests.java index bb2713e105b93..c8250eba5703a 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/RowOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/RowOperatorTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.DoubleBlock; @@ -26,7 +27,8 @@ public class RowOperatorTests extends ESTestCase { final DriverContext driverContext = new DriverContext( - new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new NoneCircuitBreakerService()).withCircuitBreaking() + new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new NoneCircuitBreakerService()).withCircuitBreaking(), + BlockFactory.getNonBreakingInstance() ); public void testBoolean() { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceDoubleBlockSourceOperator.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceDoubleBlockSourceOperator.java index 4c1590ae9b8ff..af14d0be0710c 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceDoubleBlockSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceDoubleBlockSourceOperator.java @@ -7,7 +7,7 @@ package org.elasticsearch.compute.operator; -import org.elasticsearch.compute.data.DoubleArrayVector; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; import java.util.List; @@ -48,7 +48,7 @@ protected Page createPage(int positionOffset, int length) { array[i] = values[positionOffset + i]; } currentPosition += length; - return new Page(new DoubleArrayVector(array, array.length).asBlock()); + return new Page(BlockFactory.getNonBreakingInstance().newDoubleArrayVector(array, array.length).asBlock()); } protected int remaining() { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceLongBlockSourceOperator.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceLongBlockSourceOperator.java index 8600237401ed0..0aa78f3ad0ab3 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceLongBlockSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceLongBlockSourceOperator.java @@ -7,7 +7,7 @@ package org.elasticsearch.compute.operator; -import org.elasticsearch.compute.data.LongArrayVector; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; import java.util.List; @@ -48,7 +48,7 @@ protected Page createPage(int positionOffset, int length) { array[i] = values[positionOffset + i]; } currentPosition += length; - return new Page(new LongArrayVector(array, array.length).asBlock()); + return new Page(BlockFactory.getNonBreakingInstance().newLongArrayVector(array, array.length).asBlock()); // TODO: just for compile } protected int remaining() { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java index 87fd1513aaf2d..5b6b33ea0b80a 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.ConstantIntVector; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.Page; @@ -478,7 +479,8 @@ public void sendResponse(Exception exception) throws IOException { */ DriverContext driverContext() { return new DriverContext( - new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new NoneCircuitBreakerService()).withCircuitBreaking() + new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new NoneCircuitBreakerService()).withCircuitBreaking(), + BlockFactory.getNonBreakingInstance() ); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java index a3e060f5693d8..7491ffde6766e 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.DoubleBlock; @@ -422,12 +423,15 @@ public void testTopNTwoColumns() { } public void testCollectAllValues() { + DriverContext driverContext = driverContext(); + BlockFactory blockFactory = driverContext.blockFactory(); + int size = 10; int topCount = 3; List blocks = new ArrayList<>(); List> expectedTop = new ArrayList<>(); - IntBlock keys = new IntArrayVector(IntStream.range(0, size).toArray(), size).asBlock(); + IntBlock keys = blockFactory.newIntArrayVector(IntStream.range(0, size).toArray(), size).asBlock(); List topKeys = new ArrayList<>(IntStream.range(size - topCount, size).boxed().toList()); Collections.reverse(topKeys); expectedTop.add(topKeys); @@ -461,7 +465,6 @@ public void testCollectAllValues() { } List> actualTop = new ArrayList<>(); - DriverContext driverContext = driverContext(); try ( Driver driver = new Driver( driverContext, @@ -488,13 +491,16 @@ public void testCollectAllValues() { } public void testCollectAllValues_RandomMultiValues() { + DriverContext driverContext = driverContext(); + BlockFactory blockFactory = driverContext.blockFactory(); + int rows = 10; int topCount = 3; int blocksCount = 20; List blocks = new ArrayList<>(); List> expectedTop = new ArrayList<>(); - IntBlock keys = new IntArrayVector(IntStream.range(0, rows).toArray(), rows).asBlock(); + IntBlock keys = blockFactory.newIntArrayVector(IntStream.range(0, rows).toArray(), rows).asBlock(); List topKeys = new ArrayList<>(IntStream.range(rows - topCount, rows).boxed().toList()); Collections.reverse(topKeys); expectedTop.add(topKeys); @@ -546,7 +552,6 @@ public void testCollectAllValues_RandomMultiValues() { expectedTop.add(eTop); } - DriverContext driverContext = driverContext(); List> actualTop = new ArrayList<>(); try ( Driver driver = new Driver( diff --git a/x-pack/plugin/esql/compute/src/test/resources/META-INF/services/org.elasticsearch.compute.data.BlockFactoryParameters b/x-pack/plugin/esql/compute/src/test/resources/META-INF/services/org.elasticsearch.compute.data.BlockFactoryParameters new file mode 100644 index 0000000000000..2536d1e189285 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/resources/META-INF/services/org.elasticsearch.compute.data.BlockFactoryParameters @@ -0,0 +1,8 @@ +# +# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +# or more contributor license agreements. Licensed under the Elastic License +# 2.0; you may not use this file except in compliance with the Elastic License +# 2.0. +# + +org.elasticsearch.compute.TestBlockFactoryParameters diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/lookup/EnrichLookupIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/lookup/EnrichLookupIT.java index 3829ed3ac3198..d6611881f8546 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/lookup/EnrichLookupIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/lookup/EnrichLookupIT.java @@ -17,6 +17,7 @@ import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.LongBlock; @@ -230,7 +231,8 @@ public void testMultipleMatches() { static DriverContext driverContext() { return new DriverContext( - new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new NoneCircuitBreakerService()).withCircuitBreaking() + new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new NoneCircuitBreakerService()).withCircuitBreaking(), + BlockFactory.getGlobalInstance() ); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java index df7058e28fb43..be75c0d1c05d6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.lucene.ValueSources; @@ -98,18 +99,21 @@ public class EnrichLookupService { private final TransportService transportService; private final Executor executor; private final BigArrays bigArrays; + private final BlockFactory blockFactory; public EnrichLookupService( ClusterService clusterService, SearchService searchService, TransportService transportService, - BigArrays bigArrays + BigArrays bigArrays, + BlockFactory blockFactory ) { this.clusterService = clusterService; this.searchService = searchService; this.transportService = transportService; this.executor = transportService.getThreadPool().executor(EsqlPlugin.ESQL_THREAD_POOL_NAME); this.bigArrays = bigArrays; + this.blockFactory = blockFactory; transportService.registerRequestHandler(LOOKUP_ACTION_NAME, this.executor, LookupRequest::new, new TransportHandler()); } @@ -208,7 +212,7 @@ private void doLookup( OutputOperator outputOperator = new OutputOperator(List.of(), Function.identity(), result::set); Driver driver = new Driver( "enrich-lookup:" + sessionId, - new DriverContext(bigArrays), + new DriverContext(bigArrays, blockFactory), () -> lookupDescription(sessionId, shardId, matchType, matchField, extractFields, inputPage.getPositionCount()), queryOperator, intermediateOperators, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java index d5b8b6df1db8c..700edcf5582c2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.compute.Describable; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.lucene.DataPartitioning; @@ -108,6 +109,7 @@ public class LocalExecutionPlanner { private final String sessionId; private final CancellableTask parentTask; private final BigArrays bigArrays; + private final BlockFactory blockFactory; private final EsqlConfiguration configuration; private final ExchangeSourceHandler exchangeSourceHandler; private final ExchangeSinkHandler exchangeSinkHandler; @@ -118,6 +120,7 @@ public LocalExecutionPlanner( String sessionId, CancellableTask parentTask, BigArrays bigArrays, + BlockFactory blockFactory, EsqlConfiguration configuration, ExchangeSourceHandler exchangeSourceHandler, ExchangeSinkHandler exchangeSinkHandler, @@ -127,6 +130,7 @@ public LocalExecutionPlanner( this.sessionId = sessionId; this.parentTask = parentTask; this.bigArrays = bigArrays; + this.blockFactory = blockFactory; this.exchangeSourceHandler = exchangeSourceHandler; this.exchangeSinkHandler = exchangeSinkHandler; this.enrichLookupService = enrichLookupService; @@ -144,14 +148,15 @@ public LocalExecutionPlan plan(PhysicalPlan node) { configuration.pragmas().taskConcurrency(), configuration.pragmas().dataPartitioning(), configuration.pragmas().pageSize(), - bigArrays + bigArrays, + blockFactory ); PhysicalOperation physicalOperation = plan(node, context); context.addDriverFactory( new DriverFactory( - new DriverSupplier(context.bigArrays, physicalOperation, configuration.pragmas().statusInterval()), + new DriverSupplier(context.bigArrays, context.blockFactory, physicalOperation, configuration.pragmas().statusInterval()), context.driverParallelism().get() ) ); @@ -659,7 +664,8 @@ public record LocalExecutionPlannerContext( int taskConcurrency, DataPartitioning dataPartitioning, int configuredPageSize, - BigArrays bigArrays + BigArrays bigArrays, + BlockFactory blockFactory ) { void addDriverFactory(DriverFactory driverFactory) { driverFactories.add(driverFactory); @@ -683,7 +689,7 @@ int pageSize(Integer estimatedRowSize) { } } - record DriverSupplier(BigArrays bigArrays, PhysicalOperation physicalOperation, TimeValue statusInterval) + record DriverSupplier(BigArrays bigArrays, BlockFactory blockFactory, PhysicalOperation physicalOperation, TimeValue statusInterval) implements Function, Describable { @@ -693,7 +699,7 @@ public Driver apply(String sessionId) { List operators = new ArrayList<>(); SinkOperator sink = null; boolean success = false; - var driverContext = new DriverContext(bigArrays); + var driverContext = new DriverContext(bigArrays, blockFactory); try { source = physicalOperation.source(driverContext); physicalOperation.operators(operators, driverContext); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java index 5c14a19afd6f2..7d332ce28025d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.CountDown; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverTaskRunner; @@ -86,6 +87,8 @@ public class ComputeService { private static final Logger LOGGER = LogManager.getLogger(ComputeService.class); private final SearchService searchService; private final BigArrays bigArrays; + private final BlockFactory blockFactory; + private final TransportService transportService; private final Executor esqlExecutor; private final DriverTaskRunner driverRunner; @@ -98,11 +101,13 @@ public ComputeService( ExchangeService exchangeService, EnrichLookupService enrichLookupService, ThreadPool threadPool, - BigArrays bigArrays + BigArrays bigArrays, + BlockFactory blockFactory ) { this.searchService = searchService; this.transportService = transportService; this.bigArrays = bigArrays.withCircuitBreaking(); + this.blockFactory = blockFactory; this.esqlExecutor = threadPool.executor(ESQL_THREAD_POOL_NAME); transportService.registerRequestHandler(DATA_ACTION_NAME, this.esqlExecutor, DataNodeRequest::new, new DataNodeRequestHandler()); this.driverRunner = new DriverTaskRunner(transportService, this.esqlExecutor); @@ -238,6 +243,7 @@ void runCompute(CancellableTask task, ComputeContext context, PhysicalPlan plan, context.sessionId, task, bigArrays, + blockFactory, context.configuration, context.exchangeSource(), context.exchangeSink(), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlBlockFactoryParams.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlBlockFactoryParams.java new file mode 100644 index 0000000000000..1ca1d5e217f6a --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlBlockFactoryParams.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.plugin; + +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.breaker.NoopCircuitBreaker; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.data.BlockFactoryParameters; + +/** A provider for sharing the given parameters with the compute engine's block factory. */ +public class EsqlBlockFactoryParams implements BlockFactoryParameters { + + static final CircuitBreaker NOOP_BREAKER = new NoopCircuitBreaker("ESQL-noop-breaker"); + + static CircuitBreaker ESQL_BREAKER; + static BigArrays ESQL_BIGARRAYS; + + static void init(BigArrays bigArrays) { + ESQL_BREAKER = bigArrays.breakerService().getBreaker("request"); + ESQL_BIGARRAYS = bigArrays; + } + + final CircuitBreaker breaker; + final BigArrays bigArrays; + + public EsqlBlockFactoryParams() { + this.breaker = ESQL_BREAKER; + this.bigArrays = ESQL_BIGARRAYS; + } + + @Override + public CircuitBreaker breaker() { + return breaker != null ? breaker : NOOP_BREAKER; + } + + @Override + public BigArrays bigArrays() { + return bigArrays != null ? bigArrays : BigArrays.NON_RECYCLING_INSTANCE; + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java index fbaa812f68db7..550e42e715228 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.operator.exchange.ExchangeService; import org.elasticsearch.search.SearchService; import org.elasticsearch.tasks.CancellableTask; @@ -66,14 +67,17 @@ public TransportEsqlQueryAction( this.requestExecutor = threadPool.executor(EsqlPlugin.ESQL_THREAD_POOL_NAME); exchangeService.registerTransportHandler(transportService); this.exchangeService = exchangeService; - this.enrichLookupService = new EnrichLookupService(clusterService, searchService, transportService, bigArrays); + EsqlBlockFactoryParams.init(bigArrays); + var blockFactory = BlockFactory.getGlobalInstance(); + this.enrichLookupService = new EnrichLookupService(clusterService, searchService, transportService, bigArrays, blockFactory); this.computeService = new ComputeService( searchService, transportService, exchangeService, enrichLookupService, threadPool, - bigArrays + bigArrays, + blockFactory ); this.settings = settings; } diff --git a/x-pack/plugin/esql/src/main/resources/META-INF/services/org.elasticsearch.compute.data.BlockFactoryParameters b/x-pack/plugin/esql/src/main/resources/META-INF/services/org.elasticsearch.compute.data.BlockFactoryParameters new file mode 100644 index 0000000000000..e397954c84cbe --- /dev/null +++ b/x-pack/plugin/esql/src/main/resources/META-INF/services/org.elasticsearch.compute.data.BlockFactoryParameters @@ -0,0 +1,8 @@ +# +# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +# or more contributor license agreements. Licensed under the Elastic License +# 2.0; you may not use this file except in compliance with the Elastic License +# 2.0. +# + +org.elasticsearch.xpack.esql.plugin.EsqlBlockFactoryParams diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index 8a18d7b3a26ed..caf907c94feb5 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverRunner; @@ -334,6 +335,7 @@ private ActualResults executePlan() throws Exception { sessionId, new CancellableTask(1, "transport", "esql", null, TaskId.EMPTY_TASK_ID, Map.of()), bigArrays, + BlockFactory.getGlobalInstance(), configuration, exchangeSource, exchangeSink, diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 5efd165b53173..25e6c3672f020 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockUtils; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.DriverContext; @@ -618,7 +619,8 @@ private static void writeToTempDir(String subdir, String str, String extension) */ protected DriverContext driverContext() { return new DriverContext( - new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new NoneCircuitBreakerService()).withCircuitBreaking() + new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new NoneCircuitBreakerService()).withCircuitBreaking(), + BlockFactory.getGlobalInstance() ); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/EvalMapperTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/EvalMapperTests.java index efe8e773bfdaa..aa13838b28266 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/EvalMapperTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/EvalMapperTests.java @@ -12,6 +12,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.PageCacheRecycler; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; @@ -149,7 +150,8 @@ private static FieldAttribute field(String name, DataType type) { static DriverContext driverContext() { return new DriverContext( - new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new NoneCircuitBreakerService()).withCircuitBreaking() + new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new NoneCircuitBreakerService()).withCircuitBreaking(), + BlockFactory.getGlobalInstance() ); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java index b1965f19e44f5..645833f01ba28 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java @@ -19,6 +19,7 @@ import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.lucene.LuceneSourceOperator; import org.elasticsearch.compute.lucene.LuceneTopNSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -118,6 +119,7 @@ private LocalExecutionPlanner planner() throws IOException { "test", null, BigArrays.NON_RECYCLING_INSTANCE, + BlockFactory.getGlobalInstance(), config(), null, null,