From 37a8fc998b16a94b1c78090414b11a265373e566 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Mon, 2 Oct 2023 15:18:32 -0400 Subject: [PATCH 1/6] ESQL: Read from the `BlockFactory` This links the `BlockFactory` into the `Block` serialization code. With this blocks that are deserialized from over the wire are tracked. --- .../test/AbstractSerializationTestCase.java | 71 +++---- .../test/AbstractWireTestCase.java | 131 ++++++++----- .../test/AbstractXContentTestCase.java | 49 +++-- .../test/EqualsHashCodeTestUtils.java | 67 +++++-- .../compute/data/BooleanBlock.java | 34 ++-- .../compute/data/BooleanVector.java | 13 +- .../compute/data/BytesRefBlock.java | 34 ++-- .../compute/data/BytesRefVector.java | 13 +- .../compute/data/ConstantBytesRefVector.java | 6 +- .../compute/data/DoubleBlock.java | 34 ++-- .../compute/data/DoubleVector.java | 13 +- .../elasticsearch/compute/data/IntBlock.java | 34 ++-- .../elasticsearch/compute/data/IntVector.java | 13 +- .../elasticsearch/compute/data/LongBlock.java | 34 ++-- .../compute/data/LongVector.java | 13 +- .../org/elasticsearch/compute/data/Block.java | 13 +- .../compute/data/BlockFactory.java | 31 ++- .../compute/data/ConstantNullBlock.java | 1 + .../org/elasticsearch/compute/data/Page.java | 16 -- .../compute/data/X-Block.java.st | 34 ++-- .../compute/data/X-ConstantVector.java.st | 14 +- .../compute/data/X-Vector.java.st | 13 +- .../compute/operator/LimitOperator.java | 6 +- .../compute/data/BasicPageTests.java | 78 ++++---- .../compute/data/BigArrayVectorTests.java | 11 +- .../compute/data/BlockSerializationTests.java | 73 ++++--- .../compute/data/MultiValueBlockTests.java | 62 +++--- .../compute/data/SerializationTestCase.java | 27 ++- .../compute/operator/LimitOperatorTests.java | 78 ++++++-- .../exchange/ExchangeServiceTests.java | 2 +- .../xpack/esql/action/EsqlActionIT.java | 1 + .../xpack/esql/lookup/EnrichLookupIT.java | 2 + .../xpack/esql/plugin/CanMatchIT.java | 185 ++++++++++++------ .../xpack/esql/plugin/EsqlPlugin.java | 17 +- .../esql/plugin/TransportEsqlQueryAction.java | 11 +- .../esql/action/EsqlQueryResponseTests.java | 78 ++++++-- 36 files changed, 853 insertions(+), 459 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractSerializationTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractSerializationTestCase.java index 5abe6bc2fc640..238f523872f83 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractSerializationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractSerializationTestCase.java @@ -39,6 +39,7 @@ public final void testFromXContent() throws IOException { .randomFieldsExcludeFilter(getRandomFieldsExcludeFilter()) .assertEqualsConsumer(this::assertEqualInstances) .assertToXContentEquivalence(assertToXContentEquivalence()) + .dispose(this::dispose) .test(); } @@ -61,41 +62,45 @@ public final void testConcurrentToXContent() throws IOException, InterruptedExce () -> randomFrom(XContentType.values()) ); T testInstance = createXContextTestInstance(xContentType); - ToXContent.Params params = new ToXContent.DelegatingMapParams( - singletonMap(RestSearchAction.TYPED_KEYS_PARAM, "true"), - getToXContentParams() - ); - boolean humanReadable = randomBoolean(); - BytesRef firstTimeBytes = toXContent(asXContent(testInstance), xContentType, params, humanReadable).toBytesRef(); - - /* - * 500 rounds seems to consistently reproduce the issue on Nik's - * laptop. Larger numbers are going to be slower but more likely - * to reproduce the issue. - */ - int rounds = scaledRandomIntBetween(300, 5000); - concurrentTest(() -> { - try { - for (int r = 0; r < rounds; r++) { - BytesRef thisRoundBytes = toXContent(asXContent(testInstance), xContentType, params, humanReadable).toBytesRef(); - if (firstTimeBytes.bytesEquals(thisRoundBytes)) { - continue; - } - StringBuilder error = new StringBuilder("Failed to round trip over "); - if (humanReadable) { - error.append("human readable "); + try { + ToXContent.Params params = new ToXContent.DelegatingMapParams( + singletonMap(RestSearchAction.TYPED_KEYS_PARAM, "true"), + getToXContentParams() + ); + boolean humanReadable = randomBoolean(); + BytesRef firstTimeBytes = toXContent(asXContent(testInstance), xContentType, params, humanReadable).toBytesRef(); + + /* + * 500 rounds seems to consistently reproduce the issue on Nik's + * laptop. Larger numbers are going to be slower but more likely + * to reproduce the issue. + */ + int rounds = scaledRandomIntBetween(300, 5000); + concurrentTest(() -> { + try { + for (int r = 0; r < rounds; r++) { + BytesRef thisRoundBytes = toXContent(asXContent(testInstance), xContentType, params, humanReadable).toBytesRef(); + if (firstTimeBytes.bytesEquals(thisRoundBytes)) { + continue; + } + StringBuilder error = new StringBuilder("Failed to round trip over "); + if (humanReadable) { + error.append("human readable "); + } + error.append(xContentType); + error.append("\nCanonical is:\n").append(Strings.toString(asXContent(testInstance), true, true)); + boolean showBytes = xContentType.xContent() == CborXContent.cborXContent; + error.append("\nWanted : ").append(showBytes ? firstTimeBytes : firstTimeBytes.utf8ToString()); + error.append("\nBut got: ").append(showBytes ? thisRoundBytes : thisRoundBytes.utf8ToString()); + fail(error.toString()); } - error.append(xContentType); - error.append("\nCanonical is:\n").append(Strings.toString(asXContent(testInstance), true, true)); - boolean showBytes = xContentType.xContent() == CborXContent.cborXContent; - error.append("\nWanted : ").append(showBytes ? firstTimeBytes : firstTimeBytes.utf8ToString()); - error.append("\nBut got: ").append(showBytes ? thisRoundBytes : thisRoundBytes.utf8ToString()); - fail(error.toString()); + } catch (IOException e) { + throw new AssertionError(e); } - } catch (IOException e) { - throw new AssertionError(e); - } - }); + }); + } finally { + dispose(testInstance); + } } protected abstract ToXContent asXContent(T instance); diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractWireTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractWireTestCase.java index 0f2a64920fcbd..8d4085623d156 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractWireTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractWireTestCase.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.core.Releasable; import org.elasticsearch.xcontent.ToXContent; import java.io.IOException; @@ -54,10 +55,20 @@ public abstract class AbstractWireTestCase extends ESTestCase { */ public final void testEqualsAndHashcode() { for (int runs = 0; runs < NUMBER_OF_TEST_RUNS; runs++) { - EqualsHashCodeTestUtils.checkEqualsAndHashCode(createTestInstance(), this::copyInstance, this::mutateInstance); + T testInstance = createTestInstance(); + try { + EqualsHashCodeTestUtils.checkEqualsAndHashCode(testInstance, this::copyInstance, this::mutateInstance, this::dispose); + } finally { + dispose(testInstance); + } } } + /** + * Dispose of the copy, usually {@link Releasable#close} or a noop. + */ + protected void dispose(T t) {} + /** * Calls {@link Object#equals} on equal objects on many threads and verifies * they all return true. Folks tend to assume this is true about @@ -67,19 +78,27 @@ public final void testEqualsAndHashcode() { */ public final void testConcurrentEquals() throws IOException, InterruptedException, ExecutionException { T testInstance = createTestInstance(); - T copy = copyInstance(testInstance); - - /* - * 500 rounds seems to consistently reproduce the issue on Nik's - * laptop. Larger numbers are going to be slower but more likely - * to reproduce the issue. - */ - int rounds = scaledRandomIntBetween(300, 5000); - concurrentTest(() -> { - for (int r = 0; r < rounds; r++) { - assertEquals(testInstance, copy); + try { + T copy = copyInstance(testInstance); + try { + + /* + * 500 rounds seems to consistently reproduce the issue on Nik's + * laptop. Larger numbers are going to be slower but more likely + * to reproduce the issue. + */ + int rounds = scaledRandomIntBetween(300, 5000); + concurrentTest(() -> { + for (int r = 0; r < rounds; r++) { + assertEquals(testInstance, copy); + } + }); + } finally { + dispose(copy); } - }); + } finally { + dispose(testInstance); + } } /** @@ -111,25 +130,34 @@ protected void concurrentTest(Runnable r) throws InterruptedException, Execution */ public final void testConcurrentHashCode() throws InterruptedException, ExecutionException { T testInstance = createTestInstance(); - int firstHashCode = testInstance.hashCode(); - - /* - * 500 rounds seems to consistently reproduce the issue on Nik's - * laptop. Larger numbers are going to be slower but more likely - * to reproduce the issue. - */ - int rounds = scaledRandomIntBetween(300, 5000); - concurrentTest(() -> { - for (int r = 0; r < rounds; r++) { - assertEquals(firstHashCode, testInstance.hashCode()); - } - }); + try { + int firstHashCode = testInstance.hashCode(); + + /* + * 500 rounds seems to consistently reproduce the issue on Nik's + * laptop. Larger numbers are going to be slower but more likely + * to reproduce the issue. + */ + int rounds = scaledRandomIntBetween(300, 5000); + concurrentTest(() -> { + for (int r = 0; r < rounds; r++) { + assertEquals(firstHashCode, testInstance.hashCode()); + } + }); + } finally { + dispose(testInstance); + } } public void testToString() throws Exception { - final String toString = createTestInstance().toString(); - assertNotNull(toString); - assertThat(toString, not(emptyString())); + T testInstance = createTestInstance(); + try { + final String toString = testInstance.toString(); + assertNotNull(toString); + assertThat(toString, not(emptyString())); + } finally { + dispose(testInstance); + } } /** @@ -138,7 +166,11 @@ public void testToString() throws Exception { public final void testSerialization() throws IOException { for (int runs = 0; runs < NUMBER_OF_TEST_RUNS; runs++) { T testInstance = createTestInstance(); - assertSerialization(testInstance); + try { + assertSerialization(testInstance); + } finally { + dispose(testInstance); + } } } @@ -155,22 +187,25 @@ public final void testSerialization() throws IOException { */ public final void testConcurrentSerialization() throws InterruptedException, ExecutionException { T testInstance = createTestInstance(); - - /* - * 500 rounds seems to consistently reproduce the issue on Nik's - * laptop. Larger numbers are going to be slower but more likely - * to reproduce the issue. - */ - int rounds = scaledRandomIntBetween(300, 2000); - concurrentTest(() -> { - try { - for (int r = 0; r < rounds; r++) { - assertSerialization(testInstance); + try { + /* + * 500 rounds seems to consistently reproduce the issue on Nik's + * laptop. Larger numbers are going to be slower but more likely + * to reproduce the issue. + */ + int rounds = scaledRandomIntBetween(300, 2000); + concurrentTest(() -> { + try { + for (int r = 0; r < rounds; r++) { + assertSerialization(testInstance); + } + } catch (IOException e) { + throw new AssertionError("error serializing", e); } - } catch (IOException e) { - throw new AssertionError("error serializing", e); - } - }); + }); + } finally { + dispose(testInstance); + } } /** @@ -187,7 +222,11 @@ protected final void assertSerialization(T testInstance) throws IOException { */ protected final void assertSerialization(T testInstance, TransportVersion version) throws IOException { T deserializedInstance = copyInstance(testInstance, version); - assertEqualInstances(testInstance, deserializedInstance); + try { + assertEqualInstances(testInstance, deserializedInstance); + } finally { + dispose(deserializedInstance); + } } /** diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractXContentTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractXContentTestCase.java index 5e9ed3e26d970..fa4d196ceaeda 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractXContentTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractXContentTestCase.java @@ -24,6 +24,7 @@ import java.io.IOException; import java.util.function.BiConsumer; +import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Predicate; import java.util.function.Supplier; @@ -125,6 +126,7 @@ public static class XContentTester { assertEquals(expectedInstance.hashCode(), newInstance.hashCode()); }; private boolean assertToXContentEquivalence = true; + private Consumer dispose = t -> {}; private XContentTester( CheckedBiFunction createParser, @@ -142,24 +144,32 @@ public void test() throws IOException { for (int runs = 0; runs < numberOfTestRuns; runs++) { XContentType xContentType = randomFrom(XContentType.values()).canonical(); T testInstance = instanceSupplier.apply(xContentType); - BytesReference originalXContent = toXContent.apply(testInstance, xContentType); - BytesReference shuffledContent = insertRandomFieldsAndShuffle( - originalXContent, - xContentType, - supportsUnknownFields, - shuffleFieldsExceptions, - randomFieldsExcludeFilter, - createParser - ); - XContentParser parser = createParser.apply(XContentFactory.xContent(xContentType), shuffledContent); - T parsed = fromXContent.apply(parser); - assertEqualsConsumer.accept(testInstance, parsed); - if (assertToXContentEquivalence) { - assertToXContentEquivalent( - toXContent.apply(testInstance, xContentType), - toXContent.apply(parsed, xContentType), - xContentType + try { + BytesReference originalXContent = toXContent.apply(testInstance, xContentType); + BytesReference shuffledContent = insertRandomFieldsAndShuffle( + originalXContent, + xContentType, + supportsUnknownFields, + shuffleFieldsExceptions, + randomFieldsExcludeFilter, + createParser ); + XContentParser parser = createParser.apply(XContentFactory.xContent(xContentType), shuffledContent); + T parsed = fromXContent.apply(parser); + try { + assertEqualsConsumer.accept(testInstance, parsed); + if (assertToXContentEquivalence) { + assertToXContentEquivalent( + toXContent.apply(testInstance, xContentType), + toXContent.apply(parsed, xContentType), + xContentType + ); + } + } finally { + dispose.accept(parsed); + } + } finally { + dispose.accept(testInstance); } } } @@ -193,6 +203,11 @@ public XContentTester assertToXContentEquivalence(boolean assertToXContentEqu this.assertToXContentEquivalence = assertToXContentEquivalence; return this; } + + public XContentTester dispose(Consumer dispose) { + this.dispose = dispose; + return this; + } } public static void testFromXContent( diff --git a/test/framework/src/main/java/org/elasticsearch/test/EqualsHashCodeTestUtils.java b/test/framework/src/main/java/org/elasticsearch/test/EqualsHashCodeTestUtils.java index 143ab9011c85d..b295dad6a6b16 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/EqualsHashCodeTestUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/EqualsHashCodeTestUtils.java @@ -8,7 +8,10 @@ package org.elasticsearch.test; +import org.elasticsearch.core.Releasable; + import java.io.IOException; +import java.util.function.Consumer; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; @@ -57,6 +60,24 @@ public static void checkEqualsAndHashCode(T original, CopyFunction copyFu * to the input object */ public static void checkEqualsAndHashCode(T original, CopyFunction copyFunction, MutateFunction mutationFunction) { + checkEqualsAndHashCode(original, copyFunction, mutationFunction, unused -> {}); + } + + /** + * Perform common equality and hashCode checks on the input object + * @param original the object under test + * @param copyFunction a function that creates a deep copy of the input object + * @param mutationFunction a function that creates a copy of the input object that is different + * @param dispose dispose of the copy, usually {@link Releasable#close} or a noop + * from the input in one aspect. The output of this call is used to check that it is not equal() + * to the input object + */ + public static void checkEqualsAndHashCode( + T original, + CopyFunction copyFunction, + MutateFunction mutationFunction, + Consumer dispose + ) { try { String objectName = original.getClass().getSimpleName(); assertFalse(objectName + " is equal to null", original.equals(null)); @@ -70,25 +91,41 @@ public static void checkEqualsAndHashCode(T original, CopyFunction copyFu ); if (mutationFunction != null) { T mutation = mutationFunction.mutate(original); - assertThat(objectName + " mutation should not be equal to original", mutation, not(equalTo(original))); - // equals is symmetric: for any non-null reference values x and y, x.equals(y) should return true if and only - // if y.equals(x) returns true. Conversely, y.equals(x) should return true if and only if x.equals(y) - assertThat("original should not be equal to mutation" + objectName, original, not(equalTo(mutation))); + try { + assertThat(objectName + " mutation should not be equal to original", mutation, not(equalTo(original))); + // equals is symmetric: for any non-null reference values x and y, x.equals(y) should return true if and only + // if y.equals(x) returns true. Conversely, y.equals(x) should return true if and only if x.equals(y) + assertThat("original should not be equal to mutation" + objectName, original, not(equalTo(mutation))); + } finally { + dispose.accept(mutation); + } } T copy = copyFunction.copy(original); - assertTrue(objectName + " copy is not equal to self", copy.equals(copy)); - assertTrue(objectName + " is not equal to its copy", original.equals(copy)); - assertTrue("equals is not symmetric", copy.equals(original)); - assertThat(objectName + " hashcode is different from copies hashcode", copy.hashCode(), equalTo(original.hashCode())); + try { + assertTrue(objectName + " copy is not equal to self", copy.equals(copy)); + assertTrue(objectName + " is not equal to its copy", original.equals(copy)); + assertTrue("equals is not symmetric", copy.equals(original)); + assertThat(objectName + " hashcode is different from copies hashcode", copy.hashCode(), equalTo(original.hashCode())); - T secondCopy = copyFunction.copy(copy); - assertTrue("second copy is not equal to self", secondCopy.equals(secondCopy)); - assertTrue("copy is not equal to its second copy", copy.equals(secondCopy)); - assertThat("second copy's hashcode is different from original hashcode", copy.hashCode(), equalTo(secondCopy.hashCode())); - assertTrue("equals is not transitive", original.equals(secondCopy)); - assertTrue("equals is not symmetric", secondCopy.equals(copy)); - assertTrue("equals is not symmetric", secondCopy.equals(original)); + T secondCopy = copyFunction.copy(copy); + try { + assertTrue("second copy is not equal to self", secondCopy.equals(secondCopy)); + assertTrue("copy is not equal to its second copy", copy.equals(secondCopy)); + assertThat( + "second copy's hashcode is different from original hashcode", + copy.hashCode(), + equalTo(secondCopy.hashCode()) + ); + assertTrue("equals is not transitive", original.equals(secondCopy)); + assertTrue("equals is not symmetric", secondCopy.equals(copy)); + assertTrue("equals is not symmetric", secondCopy.equals(original)); + } finally { + dispose.accept(secondCopy); + } + } finally { + dispose.accept(copy); + } } catch (IOException e) { throw new RuntimeException(e); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java index f2501d54a4ae3..74d5f27d8d5f5 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; +import java.util.function.Supplier; /** * Block that stores boolean values. @@ -36,33 +37,36 @@ public sealed interface BooleanBlock extends Block permits FilterBooleanBlock, B @Override BooleanBlock filter(int... positions); - NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Block.class, "BooleanBlock", BooleanBlock::of); - @Override default String getWriteableName() { return "BooleanBlock"; } - static BooleanBlock of(StreamInput in) throws IOException { + static NamedWriteableRegistry.Entry namedWriteableEntry(Supplier blockFactory) { + return new NamedWriteableRegistry.Entry(Block.class, "BooleanBlock", in -> readFrom(blockFactory.get(), in)); + } + + private static BooleanBlock readFrom(BlockFactory blockFactory, StreamInput in) throws IOException { final boolean isVector = in.readBoolean(); if (isVector) { - return BooleanVector.of(in).asBlock(); + return BooleanVector.readFrom(blockFactory, in).asBlock(); } final int positions = in.readVInt(); - var builder = newBlockBuilder(positions); - for (int i = 0; i < positions; i++) { - if (in.readBoolean()) { - builder.appendNull(); - } else { - final int valueCount = in.readVInt(); - builder.beginPositionEntry(); - for (int valueIndex = 0; valueIndex < valueCount; valueIndex++) { - builder.appendBoolean(in.readBoolean()); + try (BooleanBlock.Builder builder = blockFactory.newBooleanBlockBuilder(positions)) { + for (int i = 0; i < positions; i++) { + if (in.readBoolean()) { + builder.appendNull(); + } else { + final int valueCount = in.readVInt(); + builder.beginPositionEntry(); + for (int valueIndex = 0; valueIndex < valueCount; valueIndex++) { + builder.appendBoolean(in.readBoolean()); + } + builder.endPositionEntry(); } - builder.endPositionEntry(); } + return builder.build(); } - return builder.build(); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java index 79b5ec40b81e5..d5dc9c23d7eee 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java @@ -72,17 +72,18 @@ static int hash(BooleanVector vector) { } /** Deserializes a Vector from the given stream input. */ - static BooleanVector of(StreamInput in) throws IOException { + static BooleanVector readFrom(BlockFactory blockFactory, StreamInput in) throws IOException { final int positions = in.readVInt(); final boolean constant = in.readBoolean(); if (constant && positions > 0) { - return new ConstantBooleanVector(in.readBoolean(), positions); + return blockFactory.newConstantBooleanVector(in.readBoolean(), positions); } else { - var builder = BooleanVector.newVectorBuilder(positions); - for (int i = 0; i < positions; i++) { - builder.appendBoolean(in.readBoolean()); + try (var builder = blockFactory.newBooleanVectorFixedBuilder(positions)) { + for (int i = 0; i < positions; i++) { + builder.appendBoolean(in.readBoolean()); + } + return builder.build(); } - return builder.build(); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java index e8cf8926d3cd2..29f5115be3934 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; +import java.util.function.Supplier; /** * Block that stores BytesRef values. @@ -40,33 +41,36 @@ public sealed interface BytesRefBlock extends Block permits FilterBytesRefBlock, @Override BytesRefBlock filter(int... positions); - NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Block.class, "BytesRefBlock", BytesRefBlock::of); - @Override default String getWriteableName() { return "BytesRefBlock"; } - static BytesRefBlock of(StreamInput in) throws IOException { + static NamedWriteableRegistry.Entry namedWriteableEntry(Supplier blockFactory) { + return new NamedWriteableRegistry.Entry(Block.class, "BytesRefBlock", in -> readFrom(blockFactory.get(), in)); + } + + private static BytesRefBlock readFrom(BlockFactory blockFactory, StreamInput in) throws IOException { final boolean isVector = in.readBoolean(); if (isVector) { - return BytesRefVector.of(in).asBlock(); + return BytesRefVector.readFrom(blockFactory, in).asBlock(); } final int positions = in.readVInt(); - var builder = newBlockBuilder(positions); - for (int i = 0; i < positions; i++) { - if (in.readBoolean()) { - builder.appendNull(); - } else { - final int valueCount = in.readVInt(); - builder.beginPositionEntry(); - for (int valueIndex = 0; valueIndex < valueCount; valueIndex++) { - builder.appendBytesRef(in.readBytesRef()); + try (BytesRefBlock.Builder builder = blockFactory.newBytesRefBlockBuilder(positions)) { + for (int i = 0; i < positions; i++) { + if (in.readBoolean()) { + builder.appendNull(); + } else { + final int valueCount = in.readVInt(); + builder.beginPositionEntry(); + for (int valueIndex = 0; valueIndex < valueCount; valueIndex++) { + builder.appendBytesRef(in.readBytesRef()); + } + builder.endPositionEntry(); } - builder.endPositionEntry(); } + return builder.build(); } - return builder.build(); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVector.java index 84cb24f955618..3dd334a9fa71d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVector.java @@ -72,17 +72,18 @@ static int hash(BytesRefVector vector) { } /** Deserializes a Vector from the given stream input. */ - static BytesRefVector of(StreamInput in) throws IOException { + static BytesRefVector readFrom(BlockFactory blockFactory, StreamInput in) throws IOException { final int positions = in.readVInt(); final boolean constant = in.readBoolean(); if (constant && positions > 0) { - return new ConstantBytesRefVector(in.readBytesRef(), positions); + return blockFactory.newConstantBytesRefVector(in.readBytesRef(), positions); } else { - var builder = BytesRefVector.newVectorBuilder(positions); - for (int i = 0; i < positions; i++) { - builder.appendBytesRef(in.readBytesRef()); + try (var builder = blockFactory.newBytesRefVectorBuilder(positions)) { + for (int i = 0; i < positions; i++) { + builder.appendBytesRef(in.readBytesRef()); + } + return builder.build(); } - return builder.build(); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java index 3f7ae8449425e..6fc64a6891c32 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java @@ -57,9 +57,13 @@ public boolean isConstant() { return true; } + public static long ramBytesUsed(BytesRef value) { + return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(value.bytes); + } + @Override public long ramBytesUsed() { - return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(value.bytes); + return ramBytesUsed(value); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java index 9edd887448938..895d72cec3a4e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; +import java.util.function.Supplier; /** * Block that stores double values. @@ -36,33 +37,36 @@ public sealed interface DoubleBlock extends Block permits FilterDoubleBlock, Dou @Override DoubleBlock filter(int... positions); - NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Block.class, "DoubleBlock", DoubleBlock::of); - @Override default String getWriteableName() { return "DoubleBlock"; } - static DoubleBlock of(StreamInput in) throws IOException { + static NamedWriteableRegistry.Entry namedWriteableEntry(Supplier blockFactory) { + return new NamedWriteableRegistry.Entry(Block.class, "DoubleBlock", in -> readFrom(blockFactory.get(), in)); + } + + private static DoubleBlock readFrom(BlockFactory blockFactory, StreamInput in) throws IOException { final boolean isVector = in.readBoolean(); if (isVector) { - return DoubleVector.of(in).asBlock(); + return DoubleVector.readFrom(blockFactory, in).asBlock(); } final int positions = in.readVInt(); - var builder = newBlockBuilder(positions); - for (int i = 0; i < positions; i++) { - if (in.readBoolean()) { - builder.appendNull(); - } else { - final int valueCount = in.readVInt(); - builder.beginPositionEntry(); - for (int valueIndex = 0; valueIndex < valueCount; valueIndex++) { - builder.appendDouble(in.readDouble()); + try (DoubleBlock.Builder builder = blockFactory.newDoubleBlockBuilder(positions)) { + for (int i = 0; i < positions; i++) { + if (in.readBoolean()) { + builder.appendNull(); + } else { + final int valueCount = in.readVInt(); + builder.beginPositionEntry(); + for (int valueIndex = 0; valueIndex < valueCount; valueIndex++) { + builder.appendDouble(in.readDouble()); + } + builder.endPositionEntry(); } - builder.endPositionEntry(); } + return builder.build(); } - return builder.build(); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java index ce3e1ffa291f4..e2aaeed94ba6d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java @@ -73,17 +73,18 @@ static int hash(DoubleVector vector) { } /** Deserializes a Vector from the given stream input. */ - static DoubleVector of(StreamInput in) throws IOException { + static DoubleVector readFrom(BlockFactory blockFactory, StreamInput in) throws IOException { final int positions = in.readVInt(); final boolean constant = in.readBoolean(); if (constant && positions > 0) { - return new ConstantDoubleVector(in.readDouble(), positions); + return blockFactory.newConstantDoubleVector(in.readDouble(), positions); } else { - var builder = DoubleVector.newVectorBuilder(positions); - for (int i = 0; i < positions; i++) { - builder.appendDouble(in.readDouble()); + try (var builder = blockFactory.newDoubleVectorFixedBuilder(positions)) { + for (int i = 0; i < positions; i++) { + builder.appendDouble(in.readDouble()); + } + return builder.build(); } - return builder.build(); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java index d6f39de6fc938..a36411e7e3d43 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; +import java.util.function.Supplier; /** * Block that stores int values. @@ -36,33 +37,36 @@ public sealed interface IntBlock extends Block permits FilterIntBlock, IntArrayB @Override IntBlock filter(int... positions); - NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Block.class, "IntBlock", IntBlock::of); - @Override default String getWriteableName() { return "IntBlock"; } - static IntBlock of(StreamInput in) throws IOException { + static NamedWriteableRegistry.Entry namedWriteableEntry(Supplier blockFactory) { + return new NamedWriteableRegistry.Entry(Block.class, "IntBlock", in -> readFrom(blockFactory.get(), in)); + } + + private static IntBlock readFrom(BlockFactory blockFactory, StreamInput in) throws IOException { final boolean isVector = in.readBoolean(); if (isVector) { - return IntVector.of(in).asBlock(); + return IntVector.readFrom(blockFactory, in).asBlock(); } final int positions = in.readVInt(); - var builder = newBlockBuilder(positions); - for (int i = 0; i < positions; i++) { - if (in.readBoolean()) { - builder.appendNull(); - } else { - final int valueCount = in.readVInt(); - builder.beginPositionEntry(); - for (int valueIndex = 0; valueIndex < valueCount; valueIndex++) { - builder.appendInt(in.readInt()); + try (IntBlock.Builder builder = blockFactory.newIntBlockBuilder(positions)) { + for (int i = 0; i < positions; i++) { + if (in.readBoolean()) { + builder.appendNull(); + } else { + final int valueCount = in.readVInt(); + builder.beginPositionEntry(); + for (int valueIndex = 0; valueIndex < valueCount; valueIndex++) { + builder.appendInt(in.readInt()); + } + builder.endPositionEntry(); } - builder.endPositionEntry(); } + return builder.build(); } - return builder.build(); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java index 79f4d5c31845d..157f7f1406072 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java @@ -72,17 +72,18 @@ static int hash(IntVector vector) { } /** Deserializes a Vector from the given stream input. */ - static IntVector of(StreamInput in) throws IOException { + static IntVector readFrom(BlockFactory blockFactory, StreamInput in) throws IOException { final int positions = in.readVInt(); final boolean constant = in.readBoolean(); if (constant && positions > 0) { - return new ConstantIntVector(in.readInt(), positions); + return blockFactory.newConstantIntVector(in.readInt(), positions); } else { - var builder = IntVector.newVectorBuilder(positions); - for (int i = 0; i < positions; i++) { - builder.appendInt(in.readInt()); + try (var builder = blockFactory.newIntVectorFixedBuilder(positions)) { + for (int i = 0; i < positions; i++) { + builder.appendInt(in.readInt()); + } + return builder.build(); } - return builder.build(); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java index d3dc5928cb543..c14b0b7bc7d06 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; +import java.util.function.Supplier; /** * Block that stores long values. @@ -36,33 +37,36 @@ public sealed interface LongBlock extends Block permits FilterLongBlock, LongArr @Override LongBlock filter(int... positions); - NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Block.class, "LongBlock", LongBlock::of); - @Override default String getWriteableName() { return "LongBlock"; } - static LongBlock of(StreamInput in) throws IOException { + static NamedWriteableRegistry.Entry namedWriteableEntry(Supplier blockFactory) { + return new NamedWriteableRegistry.Entry(Block.class, "LongBlock", in -> readFrom(blockFactory.get(), in)); + } + + private static LongBlock readFrom(BlockFactory blockFactory, StreamInput in) throws IOException { final boolean isVector = in.readBoolean(); if (isVector) { - return LongVector.of(in).asBlock(); + return LongVector.readFrom(blockFactory, in).asBlock(); } final int positions = in.readVInt(); - var builder = newBlockBuilder(positions); - for (int i = 0; i < positions; i++) { - if (in.readBoolean()) { - builder.appendNull(); - } else { - final int valueCount = in.readVInt(); - builder.beginPositionEntry(); - for (int valueIndex = 0; valueIndex < valueCount; valueIndex++) { - builder.appendLong(in.readLong()); + try (LongBlock.Builder builder = blockFactory.newLongBlockBuilder(positions)) { + for (int i = 0; i < positions; i++) { + if (in.readBoolean()) { + builder.appendNull(); + } else { + final int valueCount = in.readVInt(); + builder.beginPositionEntry(); + for (int valueIndex = 0; valueIndex < valueCount; valueIndex++) { + builder.appendLong(in.readLong()); + } + builder.endPositionEntry(); } - builder.endPositionEntry(); } + return builder.build(); } - return builder.build(); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java index 584e9ecfa9ce0..de2e51cfda4ea 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java @@ -73,17 +73,18 @@ static int hash(LongVector vector) { } /** Deserializes a Vector from the given stream input. */ - static LongVector of(StreamInput in) throws IOException { + static LongVector readFrom(BlockFactory blockFactory, StreamInput in) throws IOException { final int positions = in.readVInt(); final boolean constant = in.readBoolean(); if (constant && positions > 0) { - return new ConstantLongVector(in.readLong(), positions); + return blockFactory.newConstantLongVector(in.readLong(), positions); } else { - var builder = LongVector.newVectorBuilder(positions); - for (int i = 0; i < positions; i++) { - builder.appendLong(in.readLong()); + try (var builder = blockFactory.newLongVectorFixedBuilder(positions)) { + for (int i = 0; i < positions; i++) { + builder.appendLong(in.readLong()); + } + return builder.build(); } - return builder.build(); } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java index c5d6780e84685..ed720dc554aa3 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java @@ -14,6 +14,7 @@ import org.elasticsearch.core.Releasable; import java.util.List; +import java.util.function.Supplier; /** * A Block is a columnar representation of homogenous data. It has a position (row) count, and @@ -251,13 +252,13 @@ public void close() { } } - static List getNamedWriteables() { + static List getNamedWriteables(Supplier blockFactory) { return List.of( - IntBlock.ENTRY, - LongBlock.ENTRY, - DoubleBlock.ENTRY, - BytesRefBlock.ENTRY, - BooleanBlock.ENTRY, + IntBlock.namedWriteableEntry(blockFactory), + LongBlock.namedWriteableEntry(blockFactory), + DoubleBlock.namedWriteableEntry(blockFactory), + BytesRefBlock.namedWriteableEntry(blockFactory), + BooleanBlock.namedWriteableEntry(blockFactory), ConstantNullBlock.ENTRY ); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java index f094f8462e673..0e93bc1ee5e90 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java @@ -147,6 +147,13 @@ public BooleanBlock newConstantBooleanBlockWith(boolean value, int positions, lo return b; } + public BooleanVector newConstantBooleanVector(boolean value, int positions) { + adjustBreaker(ConstantBooleanVector.RAM_BYTES_USED, false); + var v = new ConstantBooleanVector(value, positions, this); + assert v.ramBytesUsed() == ConstantBooleanVector.RAM_BYTES_USED; + return v; + } + public IntBlock.Builder newIntBlockBuilder(int estimatedSize) { return new IntBlockBuilder(estimatedSize, this); } @@ -207,7 +214,7 @@ public IntBlock newConstantIntBlockWith(int value, int positions, long preAdjust public IntVector newConstantIntVector(int value, int positions) { adjustBreaker(ConstantIntVector.RAM_BYTES_USED, false); var v = new ConstantIntVector(value, positions, this); - adjustBreaker(v.ramBytesUsed() - ConstantIntVector.RAM_BYTES_USED, true); + assert v.ramBytesUsed() == ConstantLongVector.RAM_BYTES_USED; return v; } @@ -253,6 +260,13 @@ public LongBlock newConstantLongBlockWith(long value, int positions, long preAdj return b; } + public LongVector newConstantLongVector(long value, int positions) { + adjustBreaker(ConstantLongVector.RAM_BYTES_USED, false); + var v = new ConstantLongVector(value, positions, this); + assert v.ramBytesUsed() == ConstantLongVector.RAM_BYTES_USED; + return v; + } + public DoubleBlock.Builder newDoubleBlockBuilder(int estimatedSize) { return new DoubleBlockBuilder(estimatedSize, this); } @@ -296,6 +310,13 @@ public DoubleBlock newConstantDoubleBlockWith(double value, int positions, long return b; } + public DoubleVector newConstantDoubleVector(double value, int positions) { + adjustBreaker(ConstantDoubleVector.RAM_BYTES_USED, false); + var v = new ConstantDoubleVector(value, positions, this); + assert v.ramBytesUsed() == ConstantDoubleVector.RAM_BYTES_USED; + return v; + } + public BytesRefBlock.Builder newBytesRefBlockBuilder(int estimatedSize) { return new BytesRefBlockBuilder(estimatedSize, bigArrays, this); } @@ -322,6 +343,14 @@ public BytesRefBlock newConstantBytesRefBlockWith(BytesRef value, int positions) return b; } + public BytesRefVector newConstantBytesRefVector(BytesRef value, int positions) { + long preadjusted = ConstantBytesRefVector.ramBytesUsed(value); + adjustBreaker(preadjusted, false); + var v = new ConstantBytesRefVector(value, positions, this); + assert v.ramBytesUsed() == preadjusted; + return v; + } + public Block newConstantNullBlock(int positions) { var b = new ConstantNullBlock(positions, this); adjustBreaker(b.ramBytesUsed(), true); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java index e7cd6efdc0fc5..198973a619fcc 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java @@ -68,6 +68,7 @@ public ElementType elementType() { @Override public Block filter(int... positions) { + close(); return new ConstantNullBlock(positions.length); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Page.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Page.java index d45555790c4cd..18f3ed7ba61bf 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Page.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Page.java @@ -224,20 +224,4 @@ public void releaseBlocks() { blocksReleased = true; Releasables.closeExpectNoException(blocks); } - - public static class PageWriter implements Writeable.Writer { - - @Override - public void write(StreamOutput out, Page value) throws IOException { - value.writeTo(out); - } - } - - public static class PageReader implements Writeable.Reader { - - @Override - public Page read(StreamInput in) throws IOException { - return new Page(in); - } - } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st index 3f626e463f428..5eb43929f3280 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st @@ -16,6 +16,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; +import java.util.function.Supplier; /** * Block that stores $type$ values. @@ -52,33 +53,36 @@ $endif$ @Override $Type$Block filter(int... positions); - NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Block.class, "$Type$Block", $Type$Block::of); - @Override default String getWriteableName() { return "$Type$Block"; } - static $Type$Block of(StreamInput in) throws IOException { + static NamedWriteableRegistry.Entry namedWriteableEntry(Supplier blockFactory) { + return new NamedWriteableRegistry.Entry(Block.class, "$Type$Block", in -> readFrom(blockFactory.get(), in)); + } + + private static $Type$Block readFrom(BlockFactory blockFactory, StreamInput in) throws IOException { final boolean isVector = in.readBoolean(); if (isVector) { - return $Type$Vector.of(in).asBlock(); + return $Type$Vector.readFrom(blockFactory, in).asBlock(); } final int positions = in.readVInt(); - var builder = newBlockBuilder(positions); - for (int i = 0; i < positions; i++) { - if (in.readBoolean()) { - builder.appendNull(); - } else { - final int valueCount = in.readVInt(); - builder.beginPositionEntry(); - for (int valueIndex = 0; valueIndex < valueCount; valueIndex++) { - builder.append$Type$(in.read$Type$()); + try ($Type$Block.Builder builder = blockFactory.new$Type$BlockBuilder(positions)) { + for (int i = 0; i < positions; i++) { + if (in.readBoolean()) { + builder.appendNull(); + } else { + final int valueCount = in.readVInt(); + builder.beginPositionEntry(); + for (int valueIndex = 0; valueIndex < valueCount; valueIndex++) { + builder.append$Type$(in.read$Type$()); + } + builder.endPositionEntry(); } - builder.endPositionEntry(); } + return builder.build(); } - return builder.build(); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st index bfa33e54132a6..36384f3996f55 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st @@ -68,15 +68,23 @@ $endif$ return true; } - @Override - public long ramBytesUsed() { $if(BytesRef)$ + public static long ramBytesUsed(BytesRef value) { return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(value.bytes); + } + + @Override + public long ramBytesUsed() { + return ramBytesUsed(value); + } + $else$ + @Override + public long ramBytesUsed() { return RAM_BYTES_USED; -$endif$ } +$endif$ @Override public boolean equals(Object obj) { if (obj instanceof $Type$Vector that) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st index 9f29f3e25fe91..7e0c5b55fb2a6 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st @@ -106,17 +106,18 @@ $endif$ } /** Deserializes a Vector from the given stream input. */ - static $Type$Vector of(StreamInput in) throws IOException { + static $Type$Vector readFrom(BlockFactory blockFactory, StreamInput in) throws IOException { final int positions = in.readVInt(); final boolean constant = in.readBoolean(); if (constant && positions > 0) { - return new Constant$Type$Vector(in.read$Type$(), positions); + return blockFactory.newConstant$Type$Vector(in.read$Type$(), positions); } else { - var builder = $Type$Vector.newVectorBuilder(positions); - for (int i = 0; i < positions; i++) { - builder.append$Type$(in.read$Type$()); + try (var builder = blockFactory.new$Type$Vector$if(BytesRef)$$else$Fixed$endif$Builder(positions)) { + for (int i = 0; i < positions; i++) { + builder.append$Type$(in.read$Type$()); + } + return builder.build(); } - return builder.build(); } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/LimitOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/LimitOperator.java index 99fb410122d4e..7507eb8f978c8 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/LimitOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/LimitOperator.java @@ -45,7 +45,7 @@ public LimitOperator(int limit) { public record Factory(int limit) implements OperatorFactory { @Override - public Operator get(DriverContext driverContext) { + public LimitOperator get(DriverContext driverContext) { return new LimitOperator(limit); } @@ -114,7 +114,9 @@ public Status status() { @Override public void close() { - + if (lastInput != null) { + lastInput.releaseBlocks(); + } } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicPageTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicPageTests.java index ca7ef54f7f321..69d2620d6d863 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicPageTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicPageTests.java @@ -9,15 +9,13 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.ByteBufferStreamInput; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BytesRefArray; -import org.elasticsearch.common.util.MockBigArrays; -import org.elasticsearch.common.util.PageCacheRecycler; -import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; +import org.elasticsearch.core.Releasables; import org.elasticsearch.test.EqualsHashCodeTestUtils; import java.io.IOException; @@ -120,9 +118,13 @@ public void testEqualityAndHashCode() throws IOException { }; } Page page = new Page(positions, blocks); - EqualsHashCodeTestUtils.checkEqualsAndHashCode(page, copyPageFunction, mutatePageFunction); + try { + EqualsHashCodeTestUtils.checkEqualsAndHashCode(page, copyPageFunction, mutatePageFunction); - EqualsHashCodeTestUtils.checkEqualsAndHashCode(page, unused -> serializeDeserializePage(page)); + EqualsHashCodeTestUtils.checkEqualsAndHashCode(page, this::serializeDeserializePage, null, Page::releaseBlocks); + } finally { + page.releaseBlocks(); + } } public void testBasic() { @@ -146,28 +148,33 @@ public void testAppend() { } public void testPageSerializationSimple() throws IOException { - try (var bytesRefArray = bytesRefArrayOf("0a", "1b", "2c", "3d", "4e", "5f", "6g", "7h", "8i", "9j")) { - final BytesStreamOutput out = new BytesStreamOutput(); - Page origPage = new Page( - new IntArrayVector(IntStream.range(0, 10).toArray(), 10).asBlock(), - new LongArrayVector(LongStream.range(10, 20).toArray(), 10).asBlock(), - new DoubleArrayVector(LongStream.range(30, 40).mapToDouble(i -> i).toArray(), 10).asBlock(), - new BytesRefArrayVector(bytesRefArray, 10).asBlock(), - IntBlock.newConstantBlockWith(randomInt(), 10), - LongBlock.newConstantBlockWith(randomInt(), 10), - DoubleBlock.newConstantBlockWith(randomInt(), 10), - BytesRefBlock.newConstantBlockWith(new BytesRef(Integer.toHexString(randomInt())), 10), - new IntArrayVector(IntStream.range(0, 20).toArray(), 20).filter(5, 6, 7, 8, 9, 10, 11, 12, 13, 14).asBlock() - ); + Page origPage = new Page( + new IntArrayVector(IntStream.range(0, 10).toArray(), 10).asBlock(), + new LongArrayVector(LongStream.range(10, 20).toArray(), 10).asBlock(), + new DoubleArrayVector(LongStream.range(30, 40).mapToDouble(i -> i).toArray(), 10).asBlock(), + new BytesRefArrayVector(bytesRefArrayOf("0a", "1b", "2c", "3d", "4e", "5f", "6g", "7h", "8i", "9j"), 10).asBlock(), + IntBlock.newConstantBlockWith(randomInt(), 10), + LongBlock.newConstantBlockWith(randomInt(), 10), + DoubleBlock.newConstantBlockWith(randomInt(), 10), + BytesRefBlock.newConstantBlockWith(new BytesRef(Integer.toHexString(randomInt())), 10), + new IntArrayVector(IntStream.range(0, 20).toArray(), 20).filter(5, 6, 7, 8, 9, 10, 11, 12, 13, 14).asBlock() + ); + try { Page deserPage = serializeDeserializePage(origPage); - EqualsHashCodeTestUtils.checkEqualsAndHashCode(origPage, unused -> deserPage); - - for (int i = 0; i < origPage.getBlockCount(); i++) { - Vector vector = origPage.getBlock(i).asVector(); - if (vector != null) { - assertEquals(vector.isConstant(), deserPage.getBlock(i).asVector().isConstant()); + try { + EqualsHashCodeTestUtils.checkEqualsAndHashCode(origPage, unused -> deserPage); + + for (int i = 0; i < origPage.getBlockCount(); i++) { + Vector vector = origPage.getBlock(i).asVector(); + if (vector != null) { + assertEquals(vector.isConstant(), deserPage.getBlock(i).asVector().isConstant()); + } } + } finally { + deserPage.releaseBlocks(); } + } finally { + origPage.releaseBlocks(); } } @@ -181,16 +188,21 @@ public void testSerializationListPages() throws IOException { ), new Page(BytesRefBlock.newConstantBlockWith(new BytesRef("Hello World"), positions)) ); - final BytesStreamOutput out = new BytesStreamOutput(); - out.writeCollection(origPages); - StreamInput in = new NamedWriteableAwareStreamInput(ByteBufferStreamInput.wrap(BytesReference.toBytes(out.bytes())), registry); - - List deserPages = in.readCollectionAsList(new Page.PageReader()); - EqualsHashCodeTestUtils.checkEqualsAndHashCode(origPages, unused -> deserPages); + try { + EqualsHashCodeTestUtils.checkEqualsAndHashCode(origPages, page -> { + BytesStreamOutput out = new BytesStreamOutput(); + out.writeCollection(origPages); + StreamInput in = new NamedWriteableAwareStreamInput( + ByteBufferStreamInput.wrap(BytesReference.toBytes(out.bytes())), + registry + ); + return in.readCollectionAsList(Page::new); + }, null, pages -> Releasables.close(() -> Iterators.map(pages.iterator(), p -> p::releaseBlocks))); + } finally { + Releasables.close(() -> Iterators.map(origPages.iterator(), p -> p::releaseBlocks)); + } } - final BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new NoneCircuitBreakerService()); - BytesRefArray bytesRefArrayOf(String... values) { var array = new BytesRefArray(values.length, bigArrays); Arrays.stream(values).map(BytesRef::new).forEach(array::append); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BigArrayVectorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BigArrayVectorTests.java index 66f62a2052689..3033f672f897f 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BigArrayVectorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BigArrayVectorTests.java @@ -140,10 +140,11 @@ public void testDouble() throws IOException { } void assertSerialization(Block origBlock) throws IOException { - Block deserBlock = serializeDeserializeBlock(origBlock); - EqualsHashCodeTestUtils.checkEqualsAndHashCode(origBlock, unused -> deserBlock); - EqualsHashCodeTestUtils.checkEqualsAndHashCode(origBlock.asVector(), unused -> deserBlock.asVector()); - assertThat(deserBlock.asVector(), is(origBlock.asVector())); - assertThat(deserBlock.asVector().isConstant(), is(origBlock.asVector().isConstant())); + try (Block deserBlock = serializeDeserializeBlock(origBlock)) { + EqualsHashCodeTestUtils.checkEqualsAndHashCode(origBlock, unused -> deserBlock); + EqualsHashCodeTestUtils.checkEqualsAndHashCode(origBlock.asVector(), unused -> deserBlock.asVector()); + assertThat(deserBlock.asVector(), is(origBlock.asVector())); + assertThat(deserBlock.asVector().isConstant(), is(origBlock.asVector().isConstant())); + } } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockSerializationTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockSerializationTests.java index 50a3abb1204ad..22d66921770c7 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockSerializationTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockSerializationTests.java @@ -8,8 +8,8 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.aggregation.SumLongAggregatorFunction; +import org.elasticsearch.core.Releasables; import org.elasticsearch.test.EqualsHashCodeTestUtils; import java.io.IOException; @@ -40,42 +40,45 @@ public void testConstantBytesRefBlock() throws IOException { private void assertConstantBlockImpl(Block origBlock) throws IOException { assertThat(origBlock.asVector().isConstant(), is(true)); - Block deserBlock = serializeDeserializeBlock(origBlock); - EqualsHashCodeTestUtils.checkEqualsAndHashCode(origBlock, unused -> deserBlock); - assertThat(deserBlock.asVector().isConstant(), is(true)); + try (Block deserBlock = serializeDeserializeBlock(origBlock)) { + EqualsHashCodeTestUtils.checkEqualsAndHashCode(origBlock, unused -> deserBlock); + assertThat(deserBlock.asVector().isConstant(), is(true)); + } } - public void testEmptyIntBlock() { + public void testEmptyIntBlock() throws IOException { assertEmptyBlock(IntBlock.newBlockBuilder(0).build()); assertEmptyBlock(IntBlock.newBlockBuilder(0).appendNull().build().filter()); assertEmptyBlock(IntVector.newVectorBuilder(0).build().asBlock()); assertEmptyBlock(IntVector.newVectorBuilder(0).appendInt(randomInt()).build().filter().asBlock()); } - public void testEmptyLongBlock() { + public void testEmptyLongBlock() throws IOException { assertEmptyBlock(LongBlock.newBlockBuilder(0).build()); assertEmptyBlock(LongBlock.newBlockBuilder(0).appendNull().build().filter()); assertEmptyBlock(LongVector.newVectorBuilder(0).build().asBlock()); assertEmptyBlock(LongVector.newVectorBuilder(0).appendLong(randomLong()).build().filter().asBlock()); } - public void testEmptyDoubleBlock() { + public void testEmptyDoubleBlock() throws IOException { assertEmptyBlock(DoubleBlock.newBlockBuilder(0).build()); assertEmptyBlock(DoubleBlock.newBlockBuilder(0).appendNull().build().filter()); assertEmptyBlock(DoubleVector.newVectorBuilder(0).build().asBlock()); assertEmptyBlock(DoubleVector.newVectorBuilder(0).appendDouble(randomDouble()).build().filter().asBlock()); } - public void testEmptyBytesRefBlock() { + public void testEmptyBytesRefBlock() throws IOException { assertEmptyBlock(BytesRefBlock.newBlockBuilder(0).build()); assertEmptyBlock(BytesRefBlock.newBlockBuilder(0).appendNull().build().filter()); assertEmptyBlock(BytesRefVector.newVectorBuilder(0).build().asBlock()); assertEmptyBlock(BytesRefVector.newVectorBuilder(0).appendBytesRef(randomBytesRef()).build().filter().asBlock()); } - private void assertEmptyBlock(Block origBlock) { + private void assertEmptyBlock(Block origBlock) throws IOException { assertThat(origBlock.getPositionCount(), is(0)); - EqualsHashCodeTestUtils.checkEqualsAndHashCode(origBlock, block -> serializeDeserializeBlock(block)); + try (Block deserBlock = serializeDeserializeBlock(origBlock)) { + EqualsHashCodeTestUtils.checkEqualsAndHashCode(origBlock, unused -> deserBlock); + } } public void testFilterIntBlock() throws IOException { @@ -125,37 +128,47 @@ public void testFilterBytesRefBlock() throws IOException { private void assertFilterBlock(Block origBlock) throws IOException { assertThat(origBlock.getPositionCount(), is(1)); - Block deserBlock = serializeDeserializeBlock(origBlock); - EqualsHashCodeTestUtils.checkEqualsAndHashCode(origBlock, unused -> deserBlock); - assertThat(deserBlock.getPositionCount(), is(1)); + try (Block deserBlock = serializeDeserializeBlock(origBlock)) { + EqualsHashCodeTestUtils.checkEqualsAndHashCode(origBlock, unused -> deserBlock); + assertThat(deserBlock.getPositionCount(), is(1)); + } } public void testConstantNullBlock() throws IOException { Block origBlock = new ConstantNullBlock(randomIntBetween(1, 8192)); - Block deserBlock = serializeDeserializeBlock(origBlock); - EqualsHashCodeTestUtils.checkEqualsAndHashCode(origBlock, unused -> deserBlock); + try (Block deserBlock = serializeDeserializeBlock(origBlock)) { + EqualsHashCodeTestUtils.checkEqualsAndHashCode(origBlock, unused -> deserBlock); + } } // TODO: more types, grouping, etc... - public void testAggregatorStateBlock() throws IOException { + public void testAggregatorStateBlock() { // TODO we don't have an aggregator state block any more. Do we need this? Page page = new Page(new LongArrayVector(new long[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }, 10).asBlock()); - var bigArrays = BigArrays.NON_RECYCLING_INSTANCE; - var params = new Object[] {}; var function = SumLongAggregatorFunction.create(List.of(0)); function.addRawInput(page); Block[] blocks = new Block[function.intermediateBlockCount()]; - function.evaluateIntermediate(blocks, 0); - - Block[] deserBlocks = Arrays.stream(blocks).map(this::uncheckedSerializeDeserializeBlock).toArray(Block[]::new); - IntStream.range(0, blocks.length).forEach(i -> EqualsHashCodeTestUtils.checkEqualsAndHashCode(blocks[i], unused -> deserBlocks[i])); - - var inputChannels = IntStream.range(0, SumLongAggregatorFunction.intermediateStateDesc().size()).boxed().toList(); - var finalAggregator = SumLongAggregatorFunction.create(inputChannels); - finalAggregator.addIntermediateInput(new Page(deserBlocks)); - Block[] finalBlocks = new Block[1]; - finalAggregator.evaluateFinal(finalBlocks, 0); - var finalBlock = (LongBlock) finalBlocks[0]; - assertThat(finalBlock.getLong(0), is(55L)); + try { + function.evaluateIntermediate(blocks, 0); + + Block[] deserBlocks = Arrays.stream(blocks).map(this::uncheckedSerializeDeserializeBlock).toArray(Block[]::new); + try { + IntStream.range(0, blocks.length) + .forEach(i -> EqualsHashCodeTestUtils.checkEqualsAndHashCode(blocks[i], unused -> deserBlocks[i])); + + var inputChannels = IntStream.range(0, SumLongAggregatorFunction.intermediateStateDesc().size()).boxed().toList(); + var finalAggregator = SumLongAggregatorFunction.create(inputChannels); + finalAggregator.addIntermediateInput(new Page(deserBlocks)); + Block[] finalBlocks = new Block[1]; + finalAggregator.evaluateFinal(finalBlocks, 0); + try (var finalBlock = (LongBlock) finalBlocks[0]) { + assertThat(finalBlock.getLong(0), is(55L)); + } + } finally { + Releasables.close(deserBlocks); + } + } finally { + Releasables.close(blocks); + } } static BytesRef randomBytesRef() { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/MultiValueBlockTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/MultiValueBlockTests.java index 482a61a329a94..f067999a04ff1 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/MultiValueBlockTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/MultiValueBlockTests.java @@ -7,6 +7,8 @@ package org.elasticsearch.compute.data; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; import org.elasticsearch.test.EqualsHashCodeTestUtils; import java.io.IOException; @@ -51,7 +53,7 @@ public void testIntBlockTrivial1() { // cannot get a Vector view assertNull(block.asVector()); - EqualsHashCodeTestUtils.checkEqualsAndHashCode(block, b -> serializeDeserializeBlock(b)); + EqualsHashCodeTestUtils.checkEqualsAndHashCode(block, this::serializeDeserializeBlock, null, Releasable::close); } public void testIntBlockTrivial() { @@ -76,7 +78,7 @@ public void testIntBlockTrivial() { assertThat(block.getValueCount(0), is(1)); assertThat(block.getInt(block.getFirstValueIndex(0)), is(1)); assertNull(block.asVector()); - EqualsHashCodeTestUtils.checkEqualsAndHashCode(block, b -> serializeDeserializeBlock(b)); + EqualsHashCodeTestUtils.checkEqualsAndHashCode(block, this::serializeDeserializeBlock, null, Releasable::close); } public void testEmpty() { @@ -84,22 +86,22 @@ public void testEmpty() { IntBlock intBlock = IntBlock.newBlockBuilder(initialSize).build(); assertThat(intBlock.getPositionCount(), is(0)); assertThat(intBlock.asVector(), is(notNullValue())); - EqualsHashCodeTestUtils.checkEqualsAndHashCode(intBlock, block -> serializeDeserializeBlock(block)); + EqualsHashCodeTestUtils.checkEqualsAndHashCode(intBlock, this::serializeDeserializeBlock, null, Releasable::close); LongBlock longBlock = LongBlock.newBlockBuilder(initialSize).build(); assertThat(longBlock.getPositionCount(), is(0)); assertThat(longBlock.asVector(), is(notNullValue())); - EqualsHashCodeTestUtils.checkEqualsAndHashCode(longBlock, block -> serializeDeserializeBlock(block)); + EqualsHashCodeTestUtils.checkEqualsAndHashCode(longBlock, this::serializeDeserializeBlock, null, Releasable::close); DoubleBlock doubleBlock = DoubleBlock.newBlockBuilder(initialSize).build(); assertThat(doubleBlock.getPositionCount(), is(0)); assertThat(doubleBlock.asVector(), is(notNullValue())); - EqualsHashCodeTestUtils.checkEqualsAndHashCode(doubleBlock, block -> serializeDeserializeBlock(block)); + EqualsHashCodeTestUtils.checkEqualsAndHashCode(doubleBlock, this::serializeDeserializeBlock, null, Releasable::close); BytesRefBlock bytesRefBlock = BytesRefBlock.newBlockBuilder(initialSize).build(); assertThat(bytesRefBlock.getPositionCount(), is(0)); assertThat(bytesRefBlock.asVector(), is(notNullValue())); - EqualsHashCodeTestUtils.checkEqualsAndHashCode(bytesRefBlock, block -> serializeDeserializeBlock(block)); + EqualsHashCodeTestUtils.checkEqualsAndHashCode(bytesRefBlock, this::serializeDeserializeBlock, null, Releasable::close); } } @@ -109,25 +111,25 @@ public void testNullOnly() throws IOException { assertThat(intBlock.getPositionCount(), is(1)); assertThat(intBlock.getValueCount(0), is(0)); assertNull(intBlock.asVector()); - EqualsHashCodeTestUtils.checkEqualsAndHashCode(intBlock, block -> serializeDeserializeBlock(block)); + EqualsHashCodeTestUtils.checkEqualsAndHashCode(intBlock, this::serializeDeserializeBlock, null, Releasable::close); LongBlock longBlock = LongBlock.newBlockBuilder(initialSize).appendNull().build(); assertThat(longBlock.getPositionCount(), is(1)); assertThat(longBlock.getValueCount(0), is(0)); assertNull(longBlock.asVector()); - EqualsHashCodeTestUtils.checkEqualsAndHashCode(longBlock, block -> serializeDeserializeBlock(block)); + EqualsHashCodeTestUtils.checkEqualsAndHashCode(longBlock, this::serializeDeserializeBlock, null, Releasable::close); DoubleBlock doubleBlock = DoubleBlock.newBlockBuilder(initialSize).appendNull().build(); assertThat(doubleBlock.getPositionCount(), is(1)); assertThat(doubleBlock.getValueCount(0), is(0)); assertNull(doubleBlock.asVector()); - EqualsHashCodeTestUtils.checkEqualsAndHashCode(doubleBlock, block -> serializeDeserializeBlock(block)); + EqualsHashCodeTestUtils.checkEqualsAndHashCode(doubleBlock, this::serializeDeserializeBlock, null, Releasable::close); BytesRefBlock bytesRefBlock = BytesRefBlock.newBlockBuilder(initialSize).appendNull().build(); assertThat(bytesRefBlock.getPositionCount(), is(1)); assertThat(bytesRefBlock.getValueCount(0), is(0)); assertNull(bytesRefBlock.asVector()); - EqualsHashCodeTestUtils.checkEqualsAndHashCode(bytesRefBlock, block -> serializeDeserializeBlock(block)); + EqualsHashCodeTestUtils.checkEqualsAndHashCode(bytesRefBlock, this::serializeDeserializeBlock, null, Releasable::close); } } @@ -149,22 +151,22 @@ public void testNullsFollowedByValues() { Block intBlock = TestBlockBuilder.blockFromValues(blockValues, ElementType.INT); assertThat(intBlock.elementType(), is(equalTo(ElementType.INT))); BlockValueAsserter.assertBlockValues(intBlock, blockValues); - EqualsHashCodeTestUtils.checkEqualsAndHashCode(intBlock, block -> serializeDeserializeBlock(block)); + EqualsHashCodeTestUtils.checkEqualsAndHashCode(intBlock, this::serializeDeserializeBlock, null, Releasable::close); Block longBlock = TestBlockBuilder.blockFromValues(blockValues, ElementType.LONG); assertThat(longBlock.elementType(), is(equalTo(ElementType.LONG))); BlockValueAsserter.assertBlockValues(longBlock, blockValues); - EqualsHashCodeTestUtils.checkEqualsAndHashCode(intBlock, block -> serializeDeserializeBlock(block)); + EqualsHashCodeTestUtils.checkEqualsAndHashCode(intBlock, this::serializeDeserializeBlock, null, Releasable::close); Block doubleBlock = TestBlockBuilder.blockFromValues(blockValues, ElementType.DOUBLE); assertThat(doubleBlock.elementType(), is(equalTo(ElementType.DOUBLE))); BlockValueAsserter.assertBlockValues(doubleBlock, blockValues); - EqualsHashCodeTestUtils.checkEqualsAndHashCode(intBlock, block -> serializeDeserializeBlock(block)); + EqualsHashCodeTestUtils.checkEqualsAndHashCode(intBlock, this::serializeDeserializeBlock, null, Releasable::close); Block bytesRefBlock = TestBlockBuilder.blockFromValues(blockValues, ElementType.BYTES_REF); assertThat(bytesRefBlock.elementType(), is(equalTo(ElementType.BYTES_REF))); BlockValueAsserter.assertBlockValues(bytesRefBlock, blockValues); - EqualsHashCodeTestUtils.checkEqualsAndHashCode(intBlock, block -> serializeDeserializeBlock(block)); + EqualsHashCodeTestUtils.checkEqualsAndHashCode(intBlock, this::serializeDeserializeBlock, null, Releasable::close); } public void testMultiValuesAndNullsSmall() { @@ -181,22 +183,22 @@ public void testMultiValuesAndNullsSmall() { Block intBlock = TestBlockBuilder.blockFromValues(blockValues, ElementType.INT); assertThat(intBlock.elementType(), is(equalTo(ElementType.INT))); BlockValueAsserter.assertBlockValues(intBlock, blockValues); - EqualsHashCodeTestUtils.checkEqualsAndHashCode(intBlock, block -> serializeDeserializeBlock(block)); + EqualsHashCodeTestUtils.checkEqualsAndHashCode(intBlock, this::serializeDeserializeBlock, null, Releasable::close); Block longBlock = TestBlockBuilder.blockFromValues(blockValues, ElementType.LONG); assertThat(longBlock.elementType(), is(equalTo(ElementType.LONG))); BlockValueAsserter.assertBlockValues(longBlock, blockValues); - EqualsHashCodeTestUtils.checkEqualsAndHashCode(intBlock, block -> serializeDeserializeBlock(block)); + EqualsHashCodeTestUtils.checkEqualsAndHashCode(intBlock, this::serializeDeserializeBlock, null, Releasable::close); Block doubleBlock = TestBlockBuilder.blockFromValues(blockValues, ElementType.DOUBLE); assertThat(doubleBlock.elementType(), is(equalTo(ElementType.DOUBLE))); BlockValueAsserter.assertBlockValues(doubleBlock, blockValues); - EqualsHashCodeTestUtils.checkEqualsAndHashCode(intBlock, block -> serializeDeserializeBlock(block)); + EqualsHashCodeTestUtils.checkEqualsAndHashCode(intBlock, this::serializeDeserializeBlock, null, Releasable::close); Block bytesRefBlock = TestBlockBuilder.blockFromValues(blockValues, ElementType.BYTES_REF); assertThat(bytesRefBlock.elementType(), is(equalTo(ElementType.BYTES_REF))); BlockValueAsserter.assertBlockValues(bytesRefBlock, blockValues); - EqualsHashCodeTestUtils.checkEqualsAndHashCode(intBlock, block -> serializeDeserializeBlock(block)); + EqualsHashCodeTestUtils.checkEqualsAndHashCode(intBlock, this::serializeDeserializeBlock, null, Releasable::close); } public void testMultiValuesAndNulls() { @@ -217,27 +219,27 @@ public void testMultiValuesAndNulls() { Block intBlock = TestBlockBuilder.blockFromValues(blockValues, ElementType.INT); assertThat(intBlock.elementType(), is(equalTo(ElementType.INT))); BlockValueAsserter.assertBlockValues(intBlock, blockValues); - EqualsHashCodeTestUtils.checkEqualsAndHashCode(intBlock, block -> serializeDeserializeBlock(block)); + EqualsHashCodeTestUtils.checkEqualsAndHashCode(intBlock, this::serializeDeserializeBlock, null, Releasable::close); Block longBlock = TestBlockBuilder.blockFromValues(blockValues, ElementType.LONG); assertThat(longBlock.elementType(), is(equalTo(ElementType.LONG))); BlockValueAsserter.assertBlockValues(longBlock, blockValues); - EqualsHashCodeTestUtils.checkEqualsAndHashCode(intBlock, block -> serializeDeserializeBlock(block)); + EqualsHashCodeTestUtils.checkEqualsAndHashCode(intBlock, this::serializeDeserializeBlock, null, Releasable::close); Block doubleBlock = TestBlockBuilder.blockFromValues(blockValues, ElementType.DOUBLE); assertThat(doubleBlock.elementType(), is(equalTo(ElementType.DOUBLE))); BlockValueAsserter.assertBlockValues(doubleBlock, blockValues); - EqualsHashCodeTestUtils.checkEqualsAndHashCode(intBlock, block -> serializeDeserializeBlock(block)); + EqualsHashCodeTestUtils.checkEqualsAndHashCode(intBlock, this::serializeDeserializeBlock, null, Releasable::close); Block bytesRefBlock = TestBlockBuilder.blockFromValues(blockValues, ElementType.BYTES_REF); assertThat(bytesRefBlock.elementType(), is(equalTo(ElementType.BYTES_REF))); BlockValueAsserter.assertBlockValues(bytesRefBlock, blockValues); - EqualsHashCodeTestUtils.checkEqualsAndHashCode(intBlock, block -> serializeDeserializeBlock(block)); + EqualsHashCodeTestUtils.checkEqualsAndHashCode(intBlock, this::serializeDeserializeBlock, null, Releasable::close); } // Tests that the use of Block builder beginPositionEntry (or not) with just a single value, // and no nulls, builds a block backed by a vector. - public void testSingleNonNullValues() { + public void testSingleNonNullValues() throws IOException { List blockValues = new ArrayList<>(); int positions = randomInt(512); for (int i = 0; i < positions; i++) { @@ -256,10 +258,14 @@ public void testSingleNonNullValues() { TestBlockBuilder.blockFromSingleValues(blockValues, ElementType.BYTES_REF), TestBlockBuilder.blockFromValues(blockValues.stream().map(List::of).toList(), ElementType.BYTES_REF) ); - for (Block block : blocks) { - assertThat(block.asVector(), is(notNullValue())); - BlockValueAsserter.assertBlockValues(block, blockValues.stream().map(List::of).toList()); - EqualsHashCodeTestUtils.checkEqualsAndHashCode(block, unused -> serializeDeserializeBlock(block)); + try { + for (Block block : blocks) { + assertThat(block.asVector(), is(notNullValue())); + BlockValueAsserter.assertBlockValues(block, blockValues.stream().map(List::of).toList()); + EqualsHashCodeTestUtils.checkEqualsAndHashCode(block, this::serializeDeserializeBlock, null, Releasable::close); + } + } finally { + Releasables.close(blocks); } } @@ -302,7 +308,7 @@ public void testSingleWithNullValues() { for (Block block : blocks) { assertThat(block.asVector(), is(nullValue())); BlockValueAsserter.assertBlockValues(block, blockValues.stream().map(MultiValueBlockTests::mapToList).toList()); - EqualsHashCodeTestUtils.checkEqualsAndHashCode(block, unused -> serializeDeserializeBlock(block)); + EqualsHashCodeTestUtils.checkEqualsAndHashCode(block, this::serializeDeserializeBlock, null, Releasable::close); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/SerializationTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/SerializationTestCase.java index 62b754d76fe49..a5f0421fd9dff 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/SerializationTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/SerializationTestCase.java @@ -7,20 +7,43 @@ package org.elasticsearch.compute.data; +import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.ByteBufferStreamInput; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.test.ESTestCase; +import org.junit.After; +import org.junit.Before; import java.io.IOException; import java.io.UncheckedIOException; -abstract class SerializationTestCase extends ESTestCase { +import static org.hamcrest.Matchers.equalTo; - final NamedWriteableRegistry registry = new NamedWriteableRegistry(Block.getNamedWriteables()); +public abstract class SerializationTestCase extends ESTestCase { + BigArrays bigArrays; + private BlockFactory blockFactory; + NamedWriteableRegistry registry = new NamedWriteableRegistry(Block.getNamedWriteables(() -> blockFactory)); + + @Before + public final void newBlockFactory() { + bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, ByteSizeValue.ofGb(1)).withCircuitBreaking(); + blockFactory = new BlockFactory(bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST), bigArrays); + } + + @After + public final void blockFactoryEmpty() { + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + blockFactory = null; + registry = null; + } Page serializeDeserializePage(Page origPage) throws IOException { try (BytesStreamOutput out = new BytesStreamOutput()) { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java index bbbfd44014ffc..1852e715bfb08 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java @@ -9,7 +9,6 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; @@ -17,10 +16,16 @@ import java.util.stream.LongStream; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.sameInstance; public class LimitOperatorTests extends OperatorTestCase { @Override - protected Operator.OperatorFactory simple(BigArrays bigArrays) { + protected DriverContext driverContext() { + return breakingDriverContext(); + } + + @Override + protected LimitOperator.Factory simple(BigArrays bigArrays) { return new LimitOperator.Factory(100); } @@ -53,16 +58,21 @@ protected ByteSizeValue smallEnoughToCircuitBreak() { } public void testStatus() { - LimitOperator op = (LimitOperator) simple(BigArrays.NON_RECYCLING_INSTANCE).get(driverContext()); + BlockFactory blockFactory = driverContext().blockFactory(); + LimitOperator op = simple(BigArrays.NON_RECYCLING_INSTANCE).get(driverContext()); LimitOperator.Status status = op.status(); assertThat(status.limit(), equalTo(100)); assertThat(status.limitRemaining(), equalTo(100)); assertThat(status.pagesProcessed(), equalTo(0)); - Page p = new Page(Block.constantNullBlock(10)); - op.addInput(p); - assertSame(p, op.getOutput()); + Page p = new Page(blockFactory.newConstantNullBlock(10)); + try { + op.addInput(p); + assertSame(p, op.getOutput()); + } finally { + p.releaseBlocks(); + } status = op.status(); assertThat(status.limit(), equalTo(100)); assertThat(status.limitRemaining(), equalTo(90)); @@ -70,14 +80,52 @@ public void testStatus() { } public void testNeedInput() { - LimitOperator op = (LimitOperator) simple(BigArrays.NON_RECYCLING_INSTANCE).get(driverContext()); - assertTrue(op.needsInput()); - Page p = new Page(Block.constantNullBlock(10)); - op.addInput(p); - assertFalse(op.needsInput()); - op.getOutput(); - assertTrue(op.needsInput()); - op.finish(); - assertFalse(op.needsInput()); + BlockFactory blockFactory = driverContext().blockFactory(); + try (LimitOperator op = simple(BigArrays.NON_RECYCLING_INSTANCE).get(driverContext())) { + assertTrue(op.needsInput()); + Page p = new Page(blockFactory.newConstantNullBlock(10)); + op.addInput(p); + assertFalse(op.needsInput()); + op.getOutput().releaseBlocks(); + assertTrue(op.needsInput()); + op.finish(); + assertFalse(op.needsInput()); + } + } + + public void testBlockBiggerThanRemaining() { + BlockFactory blockFactory = driverContext().blockFactory(); + try (LimitOperator op = simple(BigArrays.NON_RECYCLING_INSTANCE).get(driverContext())) { + assertTrue(op.needsInput()); + Page p = new Page(blockFactory.newConstantNullBlock(200)); // test doesn't close because operator returns a view + op.addInput(p); + assertFalse(op.needsInput()); + Page result = op.getOutput(); + try { + assertThat(result.getPositionCount(), equalTo(100)); + } finally { + result.releaseBlocks(); + } + assertFalse(op.needsInput()); + assertTrue(op.isFinished()); + } + } + + public void testBlockPreciselyRemaining() { + BlockFactory blockFactory = driverContext().blockFactory(); + try (LimitOperator op = simple(BigArrays.NON_RECYCLING_INSTANCE).get(driverContext())) { + assertTrue(op.needsInput()); + Page p = new Page(blockFactory.newConstantNullBlock(100)); // test doesn't close because operator returns same page + op.addInput(p); + assertFalse(op.needsInput()); + Page result = op.getOutput(); + try { + assertThat(result, sameInstance(p)); + } finally { + result.releaseBlocks(); + } + assertFalse(op.needsInput()); + assertTrue(op.isFinished()); + } } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java index 114d4caed8a0f..1ea9db61be8fa 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java @@ -428,7 +428,7 @@ public void sendResponse(TransportResponse response) throws IOException { private MockTransportService newTransportService() { List namedWriteables = new ArrayList<>(ClusterModule.getNamedWriteables()); - namedWriteables.addAll(Block.getNamedWriteables()); + namedWriteables.addAll(Block.getNamedWriteables(BlockFactory::getNonBreakingInstance)); NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(namedWriteables); MockTransportService service = MockTransportService.createNewService( Settings.EMPTY, diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java index a4c13af3d99ad..f10ca17d741d8 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java @@ -861,6 +861,7 @@ public void testFromStatsLimit() { } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/99826") public void testFromLimit() { try (EsqlQueryResponse results = run("from test | keep data | limit 2")) { logger.info(results); diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/lookup/EnrichLookupIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/lookup/EnrichLookupIT.java index f9d97cbd910e0..f3e28d6d6bdb4 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/lookup/EnrichLookupIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/lookup/EnrichLookupIT.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.lookup; +import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.lucene.util.BytesRef; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; @@ -52,6 +53,7 @@ import static org.hamcrest.Matchers.equalTo; +@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/99826") public class EnrichLookupIT extends AbstractEsqlIntegTestCase { public void testSimple() { diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/CanMatchIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/CanMatchIT.java index d4d6e8100f152..3001ee926b271 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/CanMatchIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/CanMatchIT.java @@ -78,26 +78,41 @@ public void testCanMatch() { handler.messageReceived(request, channel, task); }); } - EsqlQueryResponse resp = run("from events_*", randomPragmas(), new RangeQueryBuilder("@timestamp").gte("2023-01-01")); - assertThat(getValuesList(resp), hasSize(4)); - assertThat(queriedIndices, equalTo(Set.of("events_2023"))); - queriedIndices.clear(); - - resp = run("from events_*", randomPragmas(), new RangeQueryBuilder("@timestamp").lt("2023-01-01")); - assertThat(getValuesList(resp), hasSize(3)); - assertThat(queriedIndices, equalTo(Set.of("events_2022"))); - queriedIndices.clear(); - - resp = run("from events_*", randomPragmas(), new RangeQueryBuilder("@timestamp").gt("2022-01-01").lt("2023-12-31")); - assertThat(getValuesList(resp), hasSize(7)); - assertThat(queriedIndices, equalTo(Set.of("events_2022", "events_2023"))); - queriedIndices.clear(); - - resp = run("from events_*", randomPragmas(), new RangeQueryBuilder("@timestamp").gt("2021-01-01").lt("2021-12-31")); - assertThat(getValuesList(resp), hasSize(0)); - assertThat(queriedIndices, empty()); - queriedIndices.clear(); + try (EsqlQueryResponse resp = run("from events_*", randomPragmas(), new RangeQueryBuilder("@timestamp").gte("2023-01-01"))) { + assertThat(getValuesList(resp), hasSize(4)); + assertThat(queriedIndices, equalTo(Set.of("events_2023"))); + queriedIndices.clear(); + } + + try (EsqlQueryResponse resp = run("from events_*", randomPragmas(), new RangeQueryBuilder("@timestamp").lt("2023-01-01"))) { + assertThat(getValuesList(resp), hasSize(3)); + assertThat(queriedIndices, equalTo(Set.of("events_2022"))); + queriedIndices.clear(); + } + try ( + EsqlQueryResponse resp = run( + "from events_*", + randomPragmas(), + new RangeQueryBuilder("@timestamp").gt("2022-01-01").lt("2023-12-31") + ) + ) { + assertThat(getValuesList(resp), hasSize(7)); + assertThat(queriedIndices, equalTo(Set.of("events_2022", "events_2023"))); + queriedIndices.clear(); + } + + try ( + EsqlQueryResponse resp = run( + "from events_*", + randomPragmas(), + new RangeQueryBuilder("@timestamp").gt("2021-01-01").lt("2021-12-31") + ) + ) { + assertThat(getValuesList(resp), hasSize(0)); + assertThat(queriedIndices, empty()); + queriedIndices.clear(); + } } finally { for (TransportService ts : internalCluster().getInstances(TransportService.class)) { ((MockTransportService) ts).clearAllRules(); @@ -129,50 +144,101 @@ public void testAliasFilters() { .addAlias("employees", "engineers", new MatchQueryBuilder("dept", "engineering")) .addAlias("employees", "sales", new MatchQueryBuilder("dept", "sales")) ); - EsqlQueryResponse resp; // employees index - resp = run("from employees | stats count(emp_no)", randomPragmas()); - assertThat(getValuesList(resp).get(0), equalTo(List.of(6L))); - resp = run("from employees | stats avg(salary)", randomPragmas()); - assertThat(getValuesList(resp).get(0), equalTo(List.of(26.95d))); + try (EsqlQueryResponse resp = run("from employees | stats count(emp_no)", randomPragmas())) { + assertThat(getValuesList(resp).get(0), equalTo(List.of(6L))); + } + try (EsqlQueryResponse resp = run("from employees | stats avg(salary)", randomPragmas())) { + assertThat(getValuesList(resp).get(0), equalTo(List.of(26.95d))); + } - resp = run("from employees | stats count(emp_no)", randomPragmas(), new RangeQueryBuilder("hired").lt("2012-04-30")); - assertThat(getValuesList(resp).get(0), equalTo(List.of(4L))); - resp = run("from employees | stats avg(salary)", randomPragmas(), new RangeQueryBuilder("hired").lt("2012-04-30")); - assertThat(getValuesList(resp).get(0), equalTo(List.of(26.65d))); + try ( + EsqlQueryResponse resp = run( + "from employees | stats count(emp_no)", + randomPragmas(), + new RangeQueryBuilder("hired").lt("2012-04-30") + ) + ) { + assertThat(getValuesList(resp).get(0), equalTo(List.of(4L))); + } + try ( + EsqlQueryResponse resp = run( + "from employees | stats avg(salary)", + randomPragmas(), + new RangeQueryBuilder("hired").lt("2012-04-30") + ) + ) { + assertThat(getValuesList(resp).get(0), equalTo(List.of(26.65d))); + } // match both employees index and engineers alias -> employees - resp = run("from e* | stats count(emp_no)", randomPragmas()); - assertThat(getValuesList(resp).get(0), equalTo(List.of(6L))); - resp = run("from employees | stats avg(salary)", randomPragmas()); - assertThat(getValuesList(resp).get(0), equalTo(List.of(26.95d))); + try (EsqlQueryResponse resp = run("from e* | stats count(emp_no)", randomPragmas())) { + assertThat(getValuesList(resp).get(0), equalTo(List.of(6L))); + } + try (EsqlQueryResponse resp = run("from employees | stats avg(salary)", randomPragmas())) { + assertThat(getValuesList(resp).get(0), equalTo(List.of(26.95d))); + } - resp = run("from e* | stats count(emp_no)", randomPragmas(), new RangeQueryBuilder("hired").lt("2012-04-30")); - assertThat(getValuesList(resp).get(0), equalTo(List.of(4L))); - resp = run("from e* | stats avg(salary)", randomPragmas(), new RangeQueryBuilder("hired").lt("2012-04-30")); - assertThat(getValuesList(resp).get(0), equalTo(List.of(26.65d))); + try ( + EsqlQueryResponse resp = run("from e* | stats count(emp_no)", randomPragmas(), new RangeQueryBuilder("hired").lt("2012-04-30")) + ) { + assertThat(getValuesList(resp).get(0), equalTo(List.of(4L))); + } + try ( + EsqlQueryResponse resp = run("from e* | stats avg(salary)", randomPragmas(), new RangeQueryBuilder("hired").lt("2012-04-30")) + ) { + assertThat(getValuesList(resp).get(0), equalTo(List.of(26.65d))); + } // engineers alias - resp = run("from engineer* | stats count(emp_no)", randomPragmas()); - assertThat(getValuesList(resp).get(0), equalTo(List.of(4L))); - resp = run("from engineer* | stats avg(salary)", randomPragmas()); - assertThat(getValuesList(resp).get(0), equalTo(List.of(26.65d))); + try (EsqlQueryResponse resp = run("from engineer* | stats count(emp_no)", randomPragmas())) { + assertThat(getValuesList(resp).get(0), equalTo(List.of(4L))); + } + try (EsqlQueryResponse resp = run("from engineer* | stats avg(salary)", randomPragmas())) { + assertThat(getValuesList(resp).get(0), equalTo(List.of(26.65d))); + } - resp = run("from engineer* | stats count(emp_no)", randomPragmas(), new RangeQueryBuilder("hired").lt("2012-04-30")); - assertThat(getValuesList(resp).get(0), equalTo(List.of(3L))); - resp = run("from engineer* | stats avg(salary)", randomPragmas(), new RangeQueryBuilder("hired").lt("2012-04-30")); - assertThat(getValuesList(resp).get(0), equalTo(List.of(27.2d))); + try ( + EsqlQueryResponse resp = run( + "from engineer* | stats count(emp_no)", + randomPragmas(), + new RangeQueryBuilder("hired").lt("2012-04-30") + ) + ) { + assertThat(getValuesList(resp).get(0), equalTo(List.of(3L))); + } + try ( + EsqlQueryResponse resp = run( + "from engineer* | stats avg(salary)", + randomPragmas(), + new RangeQueryBuilder("hired").lt("2012-04-30") + ) + ) { + assertThat(getValuesList(resp).get(0), equalTo(List.of(27.2d))); + } // sales alias - resp = run("from sales | stats count(emp_no)", randomPragmas()); - assertThat(getValuesList(resp).get(0), equalTo(List.of(2L))); - resp = run("from sales | stats avg(salary)", randomPragmas()); - assertThat(getValuesList(resp).get(0), equalTo(List.of(27.55d))); - - resp = run("from sales | stats count(emp_no)", randomPragmas(), new RangeQueryBuilder("hired").lt("2012-04-30")); - assertThat(getValuesList(resp).get(0), equalTo(List.of(1L))); - resp = run("from sales | stats avg(salary)", randomPragmas(), new RangeQueryBuilder("hired").lt("2012-04-30")); - assertThat(getValuesList(resp).get(0), equalTo(List.of(25.0d))); + try (EsqlQueryResponse resp = run("from sales | stats count(emp_no)", randomPragmas())) { + assertThat(getValuesList(resp).get(0), equalTo(List.of(2L))); + } + try (EsqlQueryResponse resp = run("from sales | stats avg(salary)", randomPragmas())) { + assertThat(getValuesList(resp).get(0), equalTo(List.of(27.55d))); + } + + try ( + EsqlQueryResponse resp = run( + "from sales | stats count(emp_no)", + randomPragmas(), + new RangeQueryBuilder("hired").lt("2012-04-30") + ) + ) { + assertThat(getValuesList(resp).get(0), equalTo(List.of(1L))); + } + try ( + EsqlQueryResponse resp = run("from sales | stats avg(salary)", randomPragmas(), new RangeQueryBuilder("hired").lt("2012-04-30")) + ) { + assertThat(getValuesList(resp).get(0), equalTo(List.of(25.0d))); + } } public void testFailOnUnavailableShards() throws Exception { @@ -211,11 +277,12 @@ public void testFailOnUnavailableShards() throws Exception { .add(new IndexRequest().source("timestamp", 10, "message", "aa")) .add(new IndexRequest().source("timestamp", 11, "message", "bb")) .get(); - EsqlQueryResponse resp = run("from events,logs | KEEP timestamp,message"); - assertThat(getValuesList(resp), hasSize(5)); - internalCluster().stopNode(logsOnlyNode); - ensureClusterSizeConsistency(); - Exception error = expectThrows(Exception.class, () -> run("from events,logs | KEEP timestamp,message")); - assertThat(error.getMessage(), containsString("no shard copies found")); + try (EsqlQueryResponse resp = run("from events,logs | KEEP timestamp,message")) { + assertThat(getValuesList(resp), hasSize(5)); + internalCluster().stopNode(logsOnlyNode); + ensureClusterSizeConsistency(); + Exception error = expectThrows(Exception.class, () -> run("from events,logs | KEEP timestamp,message")); + assertThat(error.getMessage(), containsString("no shard copies found")); + } } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java index 62a74e5023773..761bddfb1c2e8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java @@ -21,6 +21,7 @@ import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.lucene.LuceneSourceOperator; import org.elasticsearch.compute.lucene.ValuesSourceReaderOperator; import org.elasticsearch.compute.operator.AbstractPageMappingOperator; @@ -83,6 +84,8 @@ public class EsqlPlugin extends Plugin implements ActionPlugin { Setting.Property.NodeScope ); + private BlockFactoryHolder blockFactoryHolder = new BlockFactoryHolder(); + @Override public Collection createComponents( Client client, @@ -102,7 +105,8 @@ public Collection createComponents( ) { return List.of( new PlanExecutor(new IndexResolver(client, clusterService.getClusterName().value(), EsqlDataTypeRegistry.INSTANCE, Set::of)), - new ExchangeService(clusterService.getSettings(), threadPool, EsqlPlugin.ESQL_THREAD_POOL_NAME) + new ExchangeService(clusterService.getSettings(), threadPool, EsqlPlugin.ESQL_THREAD_POOL_NAME), + blockFactoryHolder ); } @@ -153,7 +157,7 @@ public List getNamedWriteables() { ValuesSourceReaderOperator.Status.ENTRY, SingleValueQuery.ENTRY ).stream(), - Block.getNamedWriteables().stream() + Block.getNamedWriteables(blockFactoryHolder).stream() ).toList(); } @@ -181,4 +185,13 @@ public List> getExecutorBuilders(Settings settings) { ) ); } + + static class BlockFactoryHolder implements Supplier { + BlockFactory blockFactory; + + @Override + public BlockFactory get() { + return blockFactory; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java index af515d0797202..79e1183b3cbb7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java @@ -62,7 +62,8 @@ public TransportEsqlQueryAction( ExchangeService exchangeService, ClusterService clusterService, ThreadPool threadPool, - BigArrays bigArrays + BigArrays bigArrays, + EsqlPlugin.BlockFactoryHolder blockFactoryHolder ) { // TODO replace SAME when removing workaround for https://github.com/elastic/elasticsearch/issues/97916 super(EsqlQueryAction.NAME, transportService, actionFilters, EsqlQueryRequest::new, EsExecutors.DIRECT_EXECUTOR_SERVICE); @@ -84,6 +85,14 @@ public TransportEsqlQueryAction( blockFactory ); this.settings = settings; + /* + * This hacks the block factory into a shared place where + * it can be used for deserialization. We'd prefer a less + * strange way to do it, but Plugin doesn't give us access + * to BigArrays, which we need to build the BlockFactory + * up front. + */ + blockFactoryHolder.blockFactory = blockFactory; } static BlockFactory createBlockFactory(BigArrays bigArrays) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java index 5bf8df1c3fd0b..68240ea838dc5 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java @@ -10,9 +10,16 @@ import org.apache.lucene.document.InetAddressPoint; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockUtils; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.DoubleBlock; @@ -29,6 +36,8 @@ import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; import org.elasticsearch.xpack.versionfield.Version; +import org.junit.After; +import org.junit.Before; import java.util.ArrayList; import java.util.List; @@ -36,9 +45,22 @@ import static org.hamcrest.Matchers.equalTo; public class EsqlQueryResponseTests extends AbstractChunkedSerializingTestCase { + private BlockFactory blockFactory; + + @Before + public void newBlockFactory() { + BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, ByteSizeValue.ofGb(1)).withCircuitBreaking(); + blockFactory = new BlockFactory(bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST), bigArrays); + } + + @After + public void blockFactoryEmpty() { + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + } + @Override protected NamedWriteableRegistry getNamedWriteableRegistry() { - return new NamedWriteableRegistry(Block.getNamedWriteables()); + return new NamedWriteableRegistry(Block.getNamedWriteables(() -> blockFactory)); } @Override @@ -72,7 +94,7 @@ private ColumnInfo randomColumnInfo() { private Page randomPage(List columns) { return new Page(columns.stream().map(c -> { - Block.Builder builder = LocalExecutionPlanner.toElementType(EsqlDataTypes.fromName(c.type())).newBlockBuilder(1); + Block.Builder builder = LocalExecutionPlanner.toElementType(EsqlDataTypes.fromName(c.type())).newBlockBuilder(1, blockFactory); switch (c.type()) { case "unsigned_long", "long" -> ((LongBlock.Builder) builder).appendLong(randomLong()); case "integer" -> ((IntBlock.Builder) builder).appendInt(randomInt()); @@ -109,9 +131,9 @@ protected EsqlQueryResponse mutateInstance(EsqlQueryResponse instance) { List cols = new ArrayList<>(instance.columns()); // keep the type the same so the values are still valid but change the name cols.set(mutCol, new ColumnInfo(cols.get(mutCol).name() + "mut", cols.get(mutCol).type())); - yield new EsqlQueryResponse(cols, instance.pages(), instance.columnar()); + yield new EsqlQueryResponse(cols, deepCopyOfPages(instance), instance.columnar()); } - case 1 -> new EsqlQueryResponse(instance.columns(), instance.pages(), false == instance.columnar()); + case 1 -> new EsqlQueryResponse(instance.columns(), deepCopyOfPages(instance), false == instance.columnar()); case 2 -> { int noPages = instance.pages().size(); yield new EsqlQueryResponse( @@ -124,6 +146,19 @@ yield new EsqlQueryResponse( }; } + private List deepCopyOfPages(EsqlQueryResponse response) { + List deepCopiedPages = new ArrayList<>(response.pages().size()); + for (Page p : response.pages()) { + Block[] deepCopiedBlocks = new Block[p.getBlockCount()]; + for (int b = 0; b < p.getBlockCount(); b++) { + deepCopiedBlocks[b] = BlockUtils.deepCopyOf(p.getBlock(b), blockFactory); + } + deepCopiedPages.add(new Page(deepCopiedBlocks)); + } + assertThat(deepCopiedPages, equalTo(response.pages())); + return deepCopiedPages; + } + @Override protected Writeable.Reader instanceReader() { return EsqlQueryResponse::new; @@ -135,28 +170,32 @@ protected EsqlQueryResponse doParseInstance(XContentParser parser) { } public void testChunkResponseSizeColumnar() { - EsqlQueryResponse resp = randomResponse(true); - int columnCount = resp.pages().get(0).getBlockCount(); - int bodySize = resp.pages().stream().mapToInt(p -> p.getPositionCount() * p.getBlockCount()).sum() + columnCount * 2; - assertChunkCount(resp, r -> 5 + bodySize); + try (EsqlQueryResponse resp = randomResponse(true)) { + int columnCount = resp.pages().get(0).getBlockCount(); + int bodySize = resp.pages().stream().mapToInt(p -> p.getPositionCount() * p.getBlockCount()).sum() + columnCount * 2; + assertChunkCount(resp, r -> 5 + bodySize); + } } public void testChunkResponseSizeRows() { - EsqlQueryResponse resp = randomResponse(false); - int bodySize = resp.pages().stream().mapToInt(p -> p.getPositionCount()).sum(); - assertChunkCount(resp, r -> 5 + bodySize); + try (EsqlQueryResponse resp = randomResponse(false)) { + int bodySize = resp.pages().stream().mapToInt(p -> p.getPositionCount()).sum(); + assertChunkCount(resp, r -> 5 + bodySize); + } } public void testSimpleXContentColumnar() { - EsqlQueryResponse response = simple(true); - assertThat(Strings.toString(response), equalTo(""" - {"columns":[{"name":"foo","type":"integer"}],"values":[[40,80]]}""")); + try (EsqlQueryResponse response = simple(true)) { + assertThat(Strings.toString(response), equalTo(""" + {"columns":[{"name":"foo","type":"integer"}],"values":[[40,80]]}""")); + } } public void testSimpleXContentRows() { - EsqlQueryResponse response = simple(false); - assertThat(Strings.toString(response), equalTo(""" - {"columns":[{"name":"foo","type":"integer"}],"values":[[40],[80]]}""")); + try (EsqlQueryResponse response = simple(false)) { + assertThat(Strings.toString(response), equalTo(""" + {"columns":[{"name":"foo","type":"integer"}],"values":[[40],[80]]}""")); + } } private EsqlQueryResponse simple(boolean columnar) { @@ -166,4 +205,9 @@ private EsqlQueryResponse simple(boolean columnar) { columnar ); } + + @Override + protected void dispose(EsqlQueryResponse esqlQueryResponse) { + esqlQueryResponse.close(); + } } From d3506fffb007bd67f25a0b8eccdba3b5d779a5b1 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Wed, 4 Oct 2023 10:43:39 -0400 Subject: [PATCH 2/6] Merge update --- ...ountDistinctBooleanAggregatorFunction.java | 123 ---------- ...inctBooleanAggregatorFunctionSupplier.java | 44 ---- ...inctBooleanGroupingAggregatorFunction.java | 212 ----------------- ...untDistinctBytesRefAggregatorFunction.java | 131 ----------- ...nctBytesRefAggregatorFunctionSupplier.java | 47 ---- ...nctBytesRefGroupingAggregatorFunction.java | 218 ------------------ ...CountDistinctDoubleAggregatorFunction.java | 131 ----------- ...tinctDoubleAggregatorFunctionSupplier.java | 47 ---- ...tinctDoubleGroupingAggregatorFunction.java | 216 ----------------- .../CountDistinctIntAggregatorFunction.java | 131 ----------- ...DistinctIntAggregatorFunctionSupplier.java | 47 ---- ...DistinctIntGroupingAggregatorFunction.java | 214 ----------------- .../CountDistinctLongAggregatorFunction.java | 131 ----------- ...istinctLongAggregatorFunctionSupplier.java | 47 ---- ...istinctLongGroupingAggregatorFunction.java | 216 ----------------- .../MaxDoubleAggregatorFunction.java | 134 ----------- .../MaxDoubleAggregatorFunctionSupplier.java | 42 ---- .../MaxDoubleGroupingAggregatorFunction.java | 213 ----------------- .../aggregation/MaxIntAggregatorFunction.java | 134 ----------- .../MaxIntAggregatorFunctionSupplier.java | 42 ---- .../MaxIntGroupingAggregatorFunction.java | 211 ----------------- .../MaxLongAggregatorFunction.java | 134 ----------- .../MaxLongAggregatorFunctionSupplier.java | 42 ---- .../MaxLongGroupingAggregatorFunction.java | 213 ----------------- ...luteDeviationDoubleAggregatorFunction.java | 124 ---------- ...ationDoubleAggregatorFunctionSupplier.java | 44 ---- ...ationDoubleGroupingAggregatorFunction.java | 212 ----------------- ...bsoluteDeviationIntAggregatorFunction.java | 124 ---------- ...eviationIntAggregatorFunctionSupplier.java | 44 ---- ...eviationIntGroupingAggregatorFunction.java | 210 ----------------- ...soluteDeviationLongAggregatorFunction.java | 124 ---------- ...viationLongAggregatorFunctionSupplier.java | 44 ---- ...viationLongGroupingAggregatorFunction.java | 212 ----------------- .../MinDoubleAggregatorFunction.java | 134 ----------- .../MinDoubleAggregatorFunctionSupplier.java | 42 ---- .../MinDoubleGroupingAggregatorFunction.java | 213 ----------------- .../aggregation/MinIntAggregatorFunction.java | 134 ----------- .../MinIntAggregatorFunctionSupplier.java | 42 ---- .../MinIntGroupingAggregatorFunction.java | 211 ----------------- .../MinLongAggregatorFunction.java | 134 ----------- .../MinLongAggregatorFunctionSupplier.java | 42 ---- .../MinLongGroupingAggregatorFunction.java | 213 ----------------- .../PercentileDoubleAggregatorFunction.java | 127 ---------- ...ntileDoubleAggregatorFunctionSupplier.java | 47 ---- ...ntileDoubleGroupingAggregatorFunction.java | 216 ----------------- .../PercentileIntAggregatorFunction.java | 127 ---------- ...rcentileIntAggregatorFunctionSupplier.java | 46 ---- ...rcentileIntGroupingAggregatorFunction.java | 214 ----------------- .../PercentileLongAggregatorFunction.java | 127 ---------- ...centileLongAggregatorFunctionSupplier.java | 46 ---- ...centileLongGroupingAggregatorFunction.java | 216 ----------------- .../SumDoubleAggregatorFunction.java | 133 ----------- .../SumDoubleAggregatorFunctionSupplier.java | 42 ---- .../SumDoubleGroupingAggregatorFunction.java | 216 ----------------- .../aggregation/SumIntAggregatorFunction.java | 136 ----------- .../SumIntAggregatorFunctionSupplier.java | 42 ---- .../SumIntGroupingAggregatorFunction.java | 213 ----------------- .../SumLongAggregatorFunction.java | 134 ----------- .../SumLongAggregatorFunctionSupplier.java | 42 ---- .../SumLongGroupingAggregatorFunction.java | 213 ----------------- .../compute/data/BlockSerializationTests.java | 4 +- .../xpack/esql/plugin/CanMatchIT.java | 92 -------- 62 files changed, 2 insertions(+), 7854 deletions(-) delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunction.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunctionSupplier.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunction.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunction.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunctionSupplier.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunction.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunction.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunctionSupplier.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunction.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunction.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionSupplier.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunction.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunction.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionSupplier.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunction.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunction.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunctionSupplier.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunction.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunction.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunctionSupplier.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunction.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunction.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunctionSupplier.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunction.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunction.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunctionSupplier.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunction.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunction.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunctionSupplier.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunction.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunction.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunctionSupplier.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunction.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunction.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunctionSupplier.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunction.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunction.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunctionSupplier.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunction.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunction.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunctionSupplier.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunction.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunction.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunctionSupplier.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunction.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunction.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunctionSupplier.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunction.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunction.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunctionSupplier.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunction.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunction.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionSupplier.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunction.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunction.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionSupplier.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunction.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunction.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionSupplier.java delete mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunction.java diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunction.java deleted file mode 100644 index 915f22560a0a3..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunction.java +++ /dev/null @@ -1,123 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.lang.StringBuilder; -import java.util.List; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BooleanBlock; -import org.elasticsearch.compute.data.BooleanVector; -import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link AggregatorFunction} implementation for {@link CountDistinctBooleanAggregator}. - * This class is generated. Do not edit it. - */ -public final class CountDistinctBooleanAggregatorFunction implements AggregatorFunction { - private static final List INTERMEDIATE_STATE_DESC = List.of( - new IntermediateStateDesc("fbit", ElementType.BOOLEAN), - new IntermediateStateDesc("tbit", ElementType.BOOLEAN) ); - - private final DriverContext driverContext; - - private final CountDistinctBooleanAggregator.SingleState state; - - private final List channels; - - public CountDistinctBooleanAggregatorFunction(DriverContext driverContext, List channels, - CountDistinctBooleanAggregator.SingleState state) { - this.driverContext = driverContext; - this.channels = channels; - this.state = state; - } - - public static CountDistinctBooleanAggregatorFunction create(DriverContext driverContext, - List channels) { - return new CountDistinctBooleanAggregatorFunction(driverContext, channels, CountDistinctBooleanAggregator.initSingle()); - } - - public static List intermediateStateDesc() { - return INTERMEDIATE_STATE_DESC; - } - - @Override - public int intermediateBlockCount() { - return INTERMEDIATE_STATE_DESC.size(); - } - - @Override - public void addRawInput(Page page) { - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { - return; - } - BooleanBlock block = (BooleanBlock) uncastBlock; - BooleanVector vector = block.asVector(); - if (vector != null) { - addRawVector(vector); - } else { - addRawBlock(block); - } - } - - private void addRawVector(BooleanVector vector) { - for (int i = 0; i < vector.getPositionCount(); i++) { - CountDistinctBooleanAggregator.combine(state, vector.getBoolean(i)); - } - } - - private void addRawBlock(BooleanBlock block) { - for (int p = 0; p < block.getPositionCount(); p++) { - if (block.isNull(p)) { - continue; - } - int start = block.getFirstValueIndex(p); - int end = start + block.getValueCount(p); - for (int i = start; i < end; i++) { - CountDistinctBooleanAggregator.combine(state, block.getBoolean(i)); - } - } - } - - @Override - public void addIntermediateInput(Page page) { - assert channels.size() == intermediateBlockCount(); - assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - BooleanVector fbit = page.getBlock(channels.get(0)).asVector(); - BooleanVector tbit = page.getBlock(channels.get(1)).asVector(); - assert fbit.getPositionCount() == 1; - assert fbit.getPositionCount() == tbit.getPositionCount(); - CountDistinctBooleanAggregator.combineIntermediate(state, fbit.getBoolean(0), tbit.getBoolean(0)); - } - - @Override - public void evaluateIntermediate(Block[] blocks, int offset) { - state.toIntermediate(blocks, offset); - } - - @Override - public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { - blocks[offset] = CountDistinctBooleanAggregator.evaluateFinal(state, driverContext); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(getClass().getSimpleName()).append("["); - sb.append("channels=").append(channels); - sb.append("]"); - return sb.toString(); - } - - @Override - public void close() { - state.close(); - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunctionSupplier.java deleted file mode 100644 index 87eb2b97974da..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunctionSupplier.java +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.util.List; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link AggregatorFunctionSupplier} implementation for {@link CountDistinctBooleanAggregator}. - * This class is generated. Do not edit it. - */ -public final class CountDistinctBooleanAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final BigArrays bigArrays; - - private final List channels; - - public CountDistinctBooleanAggregatorFunctionSupplier(BigArrays bigArrays, - List channels) { - this.bigArrays = bigArrays; - this.channels = channels; - } - - @Override - public CountDistinctBooleanAggregatorFunction aggregator(DriverContext driverContext) { - return CountDistinctBooleanAggregatorFunction.create(driverContext, channels); - } - - @Override - public CountDistinctBooleanGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { - return CountDistinctBooleanGroupingAggregatorFunction.create(channels, driverContext, bigArrays); - } - - @Override - public String describe() { - return "count_distinct of booleans"; - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunction.java deleted file mode 100644 index 820cb889d2ec6..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunction.java +++ /dev/null @@ -1,212 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.lang.StringBuilder; -import java.util.List; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BooleanBlock; -import org.elasticsearch.compute.data.BooleanVector; -import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link GroupingAggregatorFunction} implementation for {@link CountDistinctBooleanAggregator}. - * This class is generated. Do not edit it. - */ -public final class CountDistinctBooleanGroupingAggregatorFunction implements GroupingAggregatorFunction { - private static final List INTERMEDIATE_STATE_DESC = List.of( - new IntermediateStateDesc("fbit", ElementType.BOOLEAN), - new IntermediateStateDesc("tbit", ElementType.BOOLEAN) ); - - private final CountDistinctBooleanAggregator.GroupingState state; - - private final List channels; - - private final DriverContext driverContext; - - private final BigArrays bigArrays; - - public CountDistinctBooleanGroupingAggregatorFunction(List channels, - CountDistinctBooleanAggregator.GroupingState state, DriverContext driverContext, - BigArrays bigArrays) { - this.channels = channels; - this.state = state; - this.driverContext = driverContext; - this.bigArrays = bigArrays; - } - - public static CountDistinctBooleanGroupingAggregatorFunction create(List channels, - DriverContext driverContext, BigArrays bigArrays) { - return new CountDistinctBooleanGroupingAggregatorFunction(channels, CountDistinctBooleanAggregator.initGrouping(bigArrays), driverContext, bigArrays); - } - - public static List intermediateStateDesc() { - return INTERMEDIATE_STATE_DESC; - } - - @Override - public int intermediateBlockCount() { - return INTERMEDIATE_STATE_DESC.size(); - } - - @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, - Page page) { - Block uncastValuesBlock = page.getBlock(channels.get(0)); - if (uncastValuesBlock.areAllValuesNull()) { - state.enableGroupIdTracking(seenGroupIds); - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - } - }; - } - BooleanBlock valuesBlock = (BooleanBlock) uncastValuesBlock; - BooleanVector valuesVector = valuesBlock.asVector(); - if (valuesVector == null) { - if (valuesBlock.mayHaveNulls()) { - state.enableGroupIdTracking(seenGroupIds); - } - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - addRawInput(positionOffset, groupIds, valuesBlock); - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - addRawInput(positionOffset, groupIds, valuesBlock); - } - }; - } - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - addRawInput(positionOffset, groupIds, valuesVector); - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - addRawInput(positionOffset, groupIds, valuesVector); - } - }; - } - - private void addRawInput(int positionOffset, IntVector groups, BooleanBlock values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - if (values.isNull(groupPosition + positionOffset)) { - continue; - } - int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); - int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); - for (int v = valuesStart; v < valuesEnd; v++) { - CountDistinctBooleanAggregator.combine(state, groupId, values.getBoolean(v)); - } - } - } - - private void addRawInput(int positionOffset, IntVector groups, BooleanVector values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - CountDistinctBooleanAggregator.combine(state, groupId, values.getBoolean(groupPosition + positionOffset)); - } - } - - private void addRawInput(int positionOffset, IntBlock groups, BooleanBlock values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - if (groups.isNull(groupPosition)) { - continue; - } - int groupStart = groups.getFirstValueIndex(groupPosition); - int groupEnd = groupStart + groups.getValueCount(groupPosition); - for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getInt(g)); - if (values.isNull(groupPosition + positionOffset)) { - continue; - } - int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); - int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); - for (int v = valuesStart; v < valuesEnd; v++) { - CountDistinctBooleanAggregator.combine(state, groupId, values.getBoolean(v)); - } - } - } - } - - private void addRawInput(int positionOffset, IntBlock groups, BooleanVector values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - if (groups.isNull(groupPosition)) { - continue; - } - int groupStart = groups.getFirstValueIndex(groupPosition); - int groupEnd = groupStart + groups.getValueCount(groupPosition); - for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getInt(g)); - CountDistinctBooleanAggregator.combine(state, groupId, values.getBoolean(groupPosition + positionOffset)); - } - } - } - - @Override - public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { - state.enableGroupIdTracking(new SeenGroupIds.Empty()); - assert channels.size() == intermediateBlockCount(); - BooleanVector fbit = page.getBlock(channels.get(0)).asVector(); - BooleanVector tbit = page.getBlock(channels.get(1)).asVector(); - assert fbit.getPositionCount() == tbit.getPositionCount(); - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - CountDistinctBooleanAggregator.combineIntermediate(state, groupId, fbit.getBoolean(groupPosition + positionOffset), tbit.getBoolean(groupPosition + positionOffset)); - } - } - - @Override - public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { - if (input.getClass() != getClass()) { - throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); - } - CountDistinctBooleanAggregator.GroupingState inState = ((CountDistinctBooleanGroupingAggregatorFunction) input).state; - state.enableGroupIdTracking(new SeenGroupIds.Empty()); - CountDistinctBooleanAggregator.combineStates(state, groupId, inState, position); - } - - @Override - public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { - state.toIntermediate(blocks, offset, selected, driverContext); - } - - @Override - public void evaluateFinal(Block[] blocks, int offset, IntVector selected, - DriverContext driverContext) { - blocks[offset] = CountDistinctBooleanAggregator.evaluateFinal(state, selected, driverContext); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(getClass().getSimpleName()).append("["); - sb.append("channels=").append(channels); - sb.append("]"); - return sb.toString(); - } - - @Override - public void close() { - state.close(); - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunction.java deleted file mode 100644 index 538fa5b35d7e5..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunction.java +++ /dev/null @@ -1,131 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.lang.StringBuilder; -import java.util.List; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link AggregatorFunction} implementation for {@link CountDistinctBytesRefAggregator}. - * This class is generated. Do not edit it. - */ -public final class CountDistinctBytesRefAggregatorFunction implements AggregatorFunction { - private static final List INTERMEDIATE_STATE_DESC = List.of( - new IntermediateStateDesc("hll", ElementType.BYTES_REF) ); - - private final DriverContext driverContext; - - private final HllStates.SingleState state; - - private final List channels; - - private final BigArrays bigArrays; - - private final int precision; - - public CountDistinctBytesRefAggregatorFunction(DriverContext driverContext, - List channels, HllStates.SingleState state, BigArrays bigArrays, int precision) { - this.driverContext = driverContext; - this.channels = channels; - this.state = state; - this.bigArrays = bigArrays; - this.precision = precision; - } - - public static CountDistinctBytesRefAggregatorFunction create(DriverContext driverContext, - List channels, BigArrays bigArrays, int precision) { - return new CountDistinctBytesRefAggregatorFunction(driverContext, channels, CountDistinctBytesRefAggregator.initSingle(bigArrays, precision), bigArrays, precision); - } - - public static List intermediateStateDesc() { - return INTERMEDIATE_STATE_DESC; - } - - @Override - public int intermediateBlockCount() { - return INTERMEDIATE_STATE_DESC.size(); - } - - @Override - public void addRawInput(Page page) { - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { - return; - } - BytesRefBlock block = (BytesRefBlock) uncastBlock; - BytesRefVector vector = block.asVector(); - if (vector != null) { - addRawVector(vector); - } else { - addRawBlock(block); - } - } - - private void addRawVector(BytesRefVector vector) { - BytesRef scratch = new BytesRef(); - for (int i = 0; i < vector.getPositionCount(); i++) { - CountDistinctBytesRefAggregator.combine(state, vector.getBytesRef(i, scratch)); - } - } - - private void addRawBlock(BytesRefBlock block) { - BytesRef scratch = new BytesRef(); - for (int p = 0; p < block.getPositionCount(); p++) { - if (block.isNull(p)) { - continue; - } - int start = block.getFirstValueIndex(p); - int end = start + block.getValueCount(p); - for (int i = start; i < end; i++) { - CountDistinctBytesRefAggregator.combine(state, block.getBytesRef(i, scratch)); - } - } - } - - @Override - public void addIntermediateInput(Page page) { - assert channels.size() == intermediateBlockCount(); - assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - BytesRefVector hll = page.getBlock(channels.get(0)).asVector(); - assert hll.getPositionCount() == 1; - BytesRef scratch = new BytesRef(); - CountDistinctBytesRefAggregator.combineIntermediate(state, hll.getBytesRef(0, scratch)); - } - - @Override - public void evaluateIntermediate(Block[] blocks, int offset) { - state.toIntermediate(blocks, offset); - } - - @Override - public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { - blocks[offset] = CountDistinctBytesRefAggregator.evaluateFinal(state, driverContext); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(getClass().getSimpleName()).append("["); - sb.append("channels=").append(channels); - sb.append("]"); - return sb.toString(); - } - - @Override - public void close() { - state.close(); - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunctionSupplier.java deleted file mode 100644 index 3f336519ac69f..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunctionSupplier.java +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.util.List; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link AggregatorFunctionSupplier} implementation for {@link CountDistinctBytesRefAggregator}. - * This class is generated. Do not edit it. - */ -public final class CountDistinctBytesRefAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final BigArrays bigArrays; - - private final List channels; - - private final int precision; - - public CountDistinctBytesRefAggregatorFunctionSupplier(BigArrays bigArrays, - List channels, int precision) { - this.bigArrays = bigArrays; - this.channels = channels; - this.precision = precision; - } - - @Override - public CountDistinctBytesRefAggregatorFunction aggregator(DriverContext driverContext) { - return CountDistinctBytesRefAggregatorFunction.create(driverContext, channels, bigArrays, precision); - } - - @Override - public CountDistinctBytesRefGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { - return CountDistinctBytesRefGroupingAggregatorFunction.create(channels, driverContext, bigArrays, precision); - } - - @Override - public String describe() { - return "count_distinct of bytes"; - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunction.java deleted file mode 100644 index 36bcb6d145c05..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunction.java +++ /dev/null @@ -1,218 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.lang.StringBuilder; -import java.util.List; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link GroupingAggregatorFunction} implementation for {@link CountDistinctBytesRefAggregator}. - * This class is generated. Do not edit it. - */ -public final class CountDistinctBytesRefGroupingAggregatorFunction implements GroupingAggregatorFunction { - private static final List INTERMEDIATE_STATE_DESC = List.of( - new IntermediateStateDesc("hll", ElementType.BYTES_REF) ); - - private final HllStates.GroupingState state; - - private final List channels; - - private final DriverContext driverContext; - - private final BigArrays bigArrays; - - private final int precision; - - public CountDistinctBytesRefGroupingAggregatorFunction(List channels, - HllStates.GroupingState state, DriverContext driverContext, BigArrays bigArrays, - int precision) { - this.channels = channels; - this.state = state; - this.driverContext = driverContext; - this.bigArrays = bigArrays; - this.precision = precision; - } - - public static CountDistinctBytesRefGroupingAggregatorFunction create(List channels, - DriverContext driverContext, BigArrays bigArrays, int precision) { - return new CountDistinctBytesRefGroupingAggregatorFunction(channels, CountDistinctBytesRefAggregator.initGrouping(bigArrays, precision), driverContext, bigArrays, precision); - } - - public static List intermediateStateDesc() { - return INTERMEDIATE_STATE_DESC; - } - - @Override - public int intermediateBlockCount() { - return INTERMEDIATE_STATE_DESC.size(); - } - - @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, - Page page) { - Block uncastValuesBlock = page.getBlock(channels.get(0)); - if (uncastValuesBlock.areAllValuesNull()) { - state.enableGroupIdTracking(seenGroupIds); - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - } - }; - } - BytesRefBlock valuesBlock = (BytesRefBlock) uncastValuesBlock; - BytesRefVector valuesVector = valuesBlock.asVector(); - if (valuesVector == null) { - if (valuesBlock.mayHaveNulls()) { - state.enableGroupIdTracking(seenGroupIds); - } - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - addRawInput(positionOffset, groupIds, valuesBlock); - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - addRawInput(positionOffset, groupIds, valuesBlock); - } - }; - } - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - addRawInput(positionOffset, groupIds, valuesVector); - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - addRawInput(positionOffset, groupIds, valuesVector); - } - }; - } - - private void addRawInput(int positionOffset, IntVector groups, BytesRefBlock values) { - BytesRef scratch = new BytesRef(); - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - if (values.isNull(groupPosition + positionOffset)) { - continue; - } - int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); - int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); - for (int v = valuesStart; v < valuesEnd; v++) { - CountDistinctBytesRefAggregator.combine(state, groupId, values.getBytesRef(v, scratch)); - } - } - } - - private void addRawInput(int positionOffset, IntVector groups, BytesRefVector values) { - BytesRef scratch = new BytesRef(); - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - CountDistinctBytesRefAggregator.combine(state, groupId, values.getBytesRef(groupPosition + positionOffset, scratch)); - } - } - - private void addRawInput(int positionOffset, IntBlock groups, BytesRefBlock values) { - BytesRef scratch = new BytesRef(); - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - if (groups.isNull(groupPosition)) { - continue; - } - int groupStart = groups.getFirstValueIndex(groupPosition); - int groupEnd = groupStart + groups.getValueCount(groupPosition); - for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getInt(g)); - if (values.isNull(groupPosition + positionOffset)) { - continue; - } - int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); - int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); - for (int v = valuesStart; v < valuesEnd; v++) { - CountDistinctBytesRefAggregator.combine(state, groupId, values.getBytesRef(v, scratch)); - } - } - } - } - - private void addRawInput(int positionOffset, IntBlock groups, BytesRefVector values) { - BytesRef scratch = new BytesRef(); - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - if (groups.isNull(groupPosition)) { - continue; - } - int groupStart = groups.getFirstValueIndex(groupPosition); - int groupEnd = groupStart + groups.getValueCount(groupPosition); - for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getInt(g)); - CountDistinctBytesRefAggregator.combine(state, groupId, values.getBytesRef(groupPosition + positionOffset, scratch)); - } - } - } - - @Override - public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { - state.enableGroupIdTracking(new SeenGroupIds.Empty()); - assert channels.size() == intermediateBlockCount(); - BytesRefVector hll = page.getBlock(channels.get(0)).asVector(); - BytesRef scratch = new BytesRef(); - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - CountDistinctBytesRefAggregator.combineIntermediate(state, groupId, hll.getBytesRef(groupPosition + positionOffset, scratch)); - } - } - - @Override - public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { - if (input.getClass() != getClass()) { - throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); - } - HllStates.GroupingState inState = ((CountDistinctBytesRefGroupingAggregatorFunction) input).state; - state.enableGroupIdTracking(new SeenGroupIds.Empty()); - CountDistinctBytesRefAggregator.combineStates(state, groupId, inState, position); - } - - @Override - public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { - state.toIntermediate(blocks, offset, selected, driverContext); - } - - @Override - public void evaluateFinal(Block[] blocks, int offset, IntVector selected, - DriverContext driverContext) { - blocks[offset] = CountDistinctBytesRefAggregator.evaluateFinal(state, selected, driverContext); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(getClass().getSimpleName()).append("["); - sb.append("channels=").append(channels); - sb.append("]"); - return sb.toString(); - } - - @Override - public void close() { - state.close(); - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunction.java deleted file mode 100644 index 341acf4048dbd..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunction.java +++ /dev/null @@ -1,131 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.lang.StringBuilder; -import java.util.List; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.DoubleBlock; -import org.elasticsearch.compute.data.DoubleVector; -import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link AggregatorFunction} implementation for {@link CountDistinctDoubleAggregator}. - * This class is generated. Do not edit it. - */ -public final class CountDistinctDoubleAggregatorFunction implements AggregatorFunction { - private static final List INTERMEDIATE_STATE_DESC = List.of( - new IntermediateStateDesc("hll", ElementType.BYTES_REF) ); - - private final DriverContext driverContext; - - private final HllStates.SingleState state; - - private final List channels; - - private final BigArrays bigArrays; - - private final int precision; - - public CountDistinctDoubleAggregatorFunction(DriverContext driverContext, List channels, - HllStates.SingleState state, BigArrays bigArrays, int precision) { - this.driverContext = driverContext; - this.channels = channels; - this.state = state; - this.bigArrays = bigArrays; - this.precision = precision; - } - - public static CountDistinctDoubleAggregatorFunction create(DriverContext driverContext, - List channels, BigArrays bigArrays, int precision) { - return new CountDistinctDoubleAggregatorFunction(driverContext, channels, CountDistinctDoubleAggregator.initSingle(bigArrays, precision), bigArrays, precision); - } - - public static List intermediateStateDesc() { - return INTERMEDIATE_STATE_DESC; - } - - @Override - public int intermediateBlockCount() { - return INTERMEDIATE_STATE_DESC.size(); - } - - @Override - public void addRawInput(Page page) { - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { - return; - } - DoubleBlock block = (DoubleBlock) uncastBlock; - DoubleVector vector = block.asVector(); - if (vector != null) { - addRawVector(vector); - } else { - addRawBlock(block); - } - } - - private void addRawVector(DoubleVector vector) { - for (int i = 0; i < vector.getPositionCount(); i++) { - CountDistinctDoubleAggregator.combine(state, vector.getDouble(i)); - } - } - - private void addRawBlock(DoubleBlock block) { - for (int p = 0; p < block.getPositionCount(); p++) { - if (block.isNull(p)) { - continue; - } - int start = block.getFirstValueIndex(p); - int end = start + block.getValueCount(p); - for (int i = start; i < end; i++) { - CountDistinctDoubleAggregator.combine(state, block.getDouble(i)); - } - } - } - - @Override - public void addIntermediateInput(Page page) { - assert channels.size() == intermediateBlockCount(); - assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - BytesRefVector hll = page.getBlock(channels.get(0)).asVector(); - assert hll.getPositionCount() == 1; - BytesRef scratch = new BytesRef(); - CountDistinctDoubleAggregator.combineIntermediate(state, hll.getBytesRef(0, scratch)); - } - - @Override - public void evaluateIntermediate(Block[] blocks, int offset) { - state.toIntermediate(blocks, offset); - } - - @Override - public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { - blocks[offset] = CountDistinctDoubleAggregator.evaluateFinal(state, driverContext); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(getClass().getSimpleName()).append("["); - sb.append("channels=").append(channels); - sb.append("]"); - return sb.toString(); - } - - @Override - public void close() { - state.close(); - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunctionSupplier.java deleted file mode 100644 index ee33aefb5242c..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunctionSupplier.java +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.util.List; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link AggregatorFunctionSupplier} implementation for {@link CountDistinctDoubleAggregator}. - * This class is generated. Do not edit it. - */ -public final class CountDistinctDoubleAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final BigArrays bigArrays; - - private final List channels; - - private final int precision; - - public CountDistinctDoubleAggregatorFunctionSupplier(BigArrays bigArrays, List channels, - int precision) { - this.bigArrays = bigArrays; - this.channels = channels; - this.precision = precision; - } - - @Override - public CountDistinctDoubleAggregatorFunction aggregator(DriverContext driverContext) { - return CountDistinctDoubleAggregatorFunction.create(driverContext, channels, bigArrays, precision); - } - - @Override - public CountDistinctDoubleGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { - return CountDistinctDoubleGroupingAggregatorFunction.create(channels, driverContext, bigArrays, precision); - } - - @Override - public String describe() { - return "count_distinct of doubles"; - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunction.java deleted file mode 100644 index 54c57c2138505..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunction.java +++ /dev/null @@ -1,216 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.lang.StringBuilder; -import java.util.List; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.DoubleBlock; -import org.elasticsearch.compute.data.DoubleVector; -import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link GroupingAggregatorFunction} implementation for {@link CountDistinctDoubleAggregator}. - * This class is generated. Do not edit it. - */ -public final class CountDistinctDoubleGroupingAggregatorFunction implements GroupingAggregatorFunction { - private static final List INTERMEDIATE_STATE_DESC = List.of( - new IntermediateStateDesc("hll", ElementType.BYTES_REF) ); - - private final HllStates.GroupingState state; - - private final List channels; - - private final DriverContext driverContext; - - private final BigArrays bigArrays; - - private final int precision; - - public CountDistinctDoubleGroupingAggregatorFunction(List channels, - HllStates.GroupingState state, DriverContext driverContext, BigArrays bigArrays, - int precision) { - this.channels = channels; - this.state = state; - this.driverContext = driverContext; - this.bigArrays = bigArrays; - this.precision = precision; - } - - public static CountDistinctDoubleGroupingAggregatorFunction create(List channels, - DriverContext driverContext, BigArrays bigArrays, int precision) { - return new CountDistinctDoubleGroupingAggregatorFunction(channels, CountDistinctDoubleAggregator.initGrouping(bigArrays, precision), driverContext, bigArrays, precision); - } - - public static List intermediateStateDesc() { - return INTERMEDIATE_STATE_DESC; - } - - @Override - public int intermediateBlockCount() { - return INTERMEDIATE_STATE_DESC.size(); - } - - @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, - Page page) { - Block uncastValuesBlock = page.getBlock(channels.get(0)); - if (uncastValuesBlock.areAllValuesNull()) { - state.enableGroupIdTracking(seenGroupIds); - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - } - }; - } - DoubleBlock valuesBlock = (DoubleBlock) uncastValuesBlock; - DoubleVector valuesVector = valuesBlock.asVector(); - if (valuesVector == null) { - if (valuesBlock.mayHaveNulls()) { - state.enableGroupIdTracking(seenGroupIds); - } - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - addRawInput(positionOffset, groupIds, valuesBlock); - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - addRawInput(positionOffset, groupIds, valuesBlock); - } - }; - } - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - addRawInput(positionOffset, groupIds, valuesVector); - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - addRawInput(positionOffset, groupIds, valuesVector); - } - }; - } - - private void addRawInput(int positionOffset, IntVector groups, DoubleBlock values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - if (values.isNull(groupPosition + positionOffset)) { - continue; - } - int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); - int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); - for (int v = valuesStart; v < valuesEnd; v++) { - CountDistinctDoubleAggregator.combine(state, groupId, values.getDouble(v)); - } - } - } - - private void addRawInput(int positionOffset, IntVector groups, DoubleVector values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - CountDistinctDoubleAggregator.combine(state, groupId, values.getDouble(groupPosition + positionOffset)); - } - } - - private void addRawInput(int positionOffset, IntBlock groups, DoubleBlock values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - if (groups.isNull(groupPosition)) { - continue; - } - int groupStart = groups.getFirstValueIndex(groupPosition); - int groupEnd = groupStart + groups.getValueCount(groupPosition); - for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getInt(g)); - if (values.isNull(groupPosition + positionOffset)) { - continue; - } - int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); - int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); - for (int v = valuesStart; v < valuesEnd; v++) { - CountDistinctDoubleAggregator.combine(state, groupId, values.getDouble(v)); - } - } - } - } - - private void addRawInput(int positionOffset, IntBlock groups, DoubleVector values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - if (groups.isNull(groupPosition)) { - continue; - } - int groupStart = groups.getFirstValueIndex(groupPosition); - int groupEnd = groupStart + groups.getValueCount(groupPosition); - for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getInt(g)); - CountDistinctDoubleAggregator.combine(state, groupId, values.getDouble(groupPosition + positionOffset)); - } - } - } - - @Override - public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { - state.enableGroupIdTracking(new SeenGroupIds.Empty()); - assert channels.size() == intermediateBlockCount(); - BytesRefVector hll = page.getBlock(channels.get(0)).asVector(); - BytesRef scratch = new BytesRef(); - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - CountDistinctDoubleAggregator.combineIntermediate(state, groupId, hll.getBytesRef(groupPosition + positionOffset, scratch)); - } - } - - @Override - public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { - if (input.getClass() != getClass()) { - throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); - } - HllStates.GroupingState inState = ((CountDistinctDoubleGroupingAggregatorFunction) input).state; - state.enableGroupIdTracking(new SeenGroupIds.Empty()); - CountDistinctDoubleAggregator.combineStates(state, groupId, inState, position); - } - - @Override - public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { - state.toIntermediate(blocks, offset, selected, driverContext); - } - - @Override - public void evaluateFinal(Block[] blocks, int offset, IntVector selected, - DriverContext driverContext) { - blocks[offset] = CountDistinctDoubleAggregator.evaluateFinal(state, selected, driverContext); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(getClass().getSimpleName()).append("["); - sb.append("channels=").append(channels); - sb.append("]"); - return sb.toString(); - } - - @Override - public void close() { - state.close(); - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunction.java deleted file mode 100644 index a35d00e25d171..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunction.java +++ /dev/null @@ -1,131 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.lang.StringBuilder; -import java.util.List; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link AggregatorFunction} implementation for {@link CountDistinctIntAggregator}. - * This class is generated. Do not edit it. - */ -public final class CountDistinctIntAggregatorFunction implements AggregatorFunction { - private static final List INTERMEDIATE_STATE_DESC = List.of( - new IntermediateStateDesc("hll", ElementType.BYTES_REF) ); - - private final DriverContext driverContext; - - private final HllStates.SingleState state; - - private final List channels; - - private final BigArrays bigArrays; - - private final int precision; - - public CountDistinctIntAggregatorFunction(DriverContext driverContext, List channels, - HllStates.SingleState state, BigArrays bigArrays, int precision) { - this.driverContext = driverContext; - this.channels = channels; - this.state = state; - this.bigArrays = bigArrays; - this.precision = precision; - } - - public static CountDistinctIntAggregatorFunction create(DriverContext driverContext, - List channels, BigArrays bigArrays, int precision) { - return new CountDistinctIntAggregatorFunction(driverContext, channels, CountDistinctIntAggregator.initSingle(bigArrays, precision), bigArrays, precision); - } - - public static List intermediateStateDesc() { - return INTERMEDIATE_STATE_DESC; - } - - @Override - public int intermediateBlockCount() { - return INTERMEDIATE_STATE_DESC.size(); - } - - @Override - public void addRawInput(Page page) { - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { - return; - } - IntBlock block = (IntBlock) uncastBlock; - IntVector vector = block.asVector(); - if (vector != null) { - addRawVector(vector); - } else { - addRawBlock(block); - } - } - - private void addRawVector(IntVector vector) { - for (int i = 0; i < vector.getPositionCount(); i++) { - CountDistinctIntAggregator.combine(state, vector.getInt(i)); - } - } - - private void addRawBlock(IntBlock block) { - for (int p = 0; p < block.getPositionCount(); p++) { - if (block.isNull(p)) { - continue; - } - int start = block.getFirstValueIndex(p); - int end = start + block.getValueCount(p); - for (int i = start; i < end; i++) { - CountDistinctIntAggregator.combine(state, block.getInt(i)); - } - } - } - - @Override - public void addIntermediateInput(Page page) { - assert channels.size() == intermediateBlockCount(); - assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - BytesRefVector hll = page.getBlock(channels.get(0)).asVector(); - assert hll.getPositionCount() == 1; - BytesRef scratch = new BytesRef(); - CountDistinctIntAggregator.combineIntermediate(state, hll.getBytesRef(0, scratch)); - } - - @Override - public void evaluateIntermediate(Block[] blocks, int offset) { - state.toIntermediate(blocks, offset); - } - - @Override - public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { - blocks[offset] = CountDistinctIntAggregator.evaluateFinal(state, driverContext); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(getClass().getSimpleName()).append("["); - sb.append("channels=").append(channels); - sb.append("]"); - return sb.toString(); - } - - @Override - public void close() { - state.close(); - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionSupplier.java deleted file mode 100644 index 315df36fcaa1b..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionSupplier.java +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.util.List; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link AggregatorFunctionSupplier} implementation for {@link CountDistinctIntAggregator}. - * This class is generated. Do not edit it. - */ -public final class CountDistinctIntAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final BigArrays bigArrays; - - private final List channels; - - private final int precision; - - public CountDistinctIntAggregatorFunctionSupplier(BigArrays bigArrays, List channels, - int precision) { - this.bigArrays = bigArrays; - this.channels = channels; - this.precision = precision; - } - - @Override - public CountDistinctIntAggregatorFunction aggregator(DriverContext driverContext) { - return CountDistinctIntAggregatorFunction.create(driverContext, channels, bigArrays, precision); - } - - @Override - public CountDistinctIntGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { - return CountDistinctIntGroupingAggregatorFunction.create(channels, driverContext, bigArrays, precision); - } - - @Override - public String describe() { - return "count_distinct of ints"; - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunction.java deleted file mode 100644 index acaff7b8e96d7..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunction.java +++ /dev/null @@ -1,214 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.lang.StringBuilder; -import java.util.List; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link GroupingAggregatorFunction} implementation for {@link CountDistinctIntAggregator}. - * This class is generated. Do not edit it. - */ -public final class CountDistinctIntGroupingAggregatorFunction implements GroupingAggregatorFunction { - private static final List INTERMEDIATE_STATE_DESC = List.of( - new IntermediateStateDesc("hll", ElementType.BYTES_REF) ); - - private final HllStates.GroupingState state; - - private final List channels; - - private final DriverContext driverContext; - - private final BigArrays bigArrays; - - private final int precision; - - public CountDistinctIntGroupingAggregatorFunction(List channels, - HllStates.GroupingState state, DriverContext driverContext, BigArrays bigArrays, - int precision) { - this.channels = channels; - this.state = state; - this.driverContext = driverContext; - this.bigArrays = bigArrays; - this.precision = precision; - } - - public static CountDistinctIntGroupingAggregatorFunction create(List channels, - DriverContext driverContext, BigArrays bigArrays, int precision) { - return new CountDistinctIntGroupingAggregatorFunction(channels, CountDistinctIntAggregator.initGrouping(bigArrays, precision), driverContext, bigArrays, precision); - } - - public static List intermediateStateDesc() { - return INTERMEDIATE_STATE_DESC; - } - - @Override - public int intermediateBlockCount() { - return INTERMEDIATE_STATE_DESC.size(); - } - - @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, - Page page) { - Block uncastValuesBlock = page.getBlock(channels.get(0)); - if (uncastValuesBlock.areAllValuesNull()) { - state.enableGroupIdTracking(seenGroupIds); - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - } - }; - } - IntBlock valuesBlock = (IntBlock) uncastValuesBlock; - IntVector valuesVector = valuesBlock.asVector(); - if (valuesVector == null) { - if (valuesBlock.mayHaveNulls()) { - state.enableGroupIdTracking(seenGroupIds); - } - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - addRawInput(positionOffset, groupIds, valuesBlock); - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - addRawInput(positionOffset, groupIds, valuesBlock); - } - }; - } - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - addRawInput(positionOffset, groupIds, valuesVector); - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - addRawInput(positionOffset, groupIds, valuesVector); - } - }; - } - - private void addRawInput(int positionOffset, IntVector groups, IntBlock values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - if (values.isNull(groupPosition + positionOffset)) { - continue; - } - int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); - int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); - for (int v = valuesStart; v < valuesEnd; v++) { - CountDistinctIntAggregator.combine(state, groupId, values.getInt(v)); - } - } - } - - private void addRawInput(int positionOffset, IntVector groups, IntVector values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - CountDistinctIntAggregator.combine(state, groupId, values.getInt(groupPosition + positionOffset)); - } - } - - private void addRawInput(int positionOffset, IntBlock groups, IntBlock values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - if (groups.isNull(groupPosition)) { - continue; - } - int groupStart = groups.getFirstValueIndex(groupPosition); - int groupEnd = groupStart + groups.getValueCount(groupPosition); - for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getInt(g)); - if (values.isNull(groupPosition + positionOffset)) { - continue; - } - int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); - int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); - for (int v = valuesStart; v < valuesEnd; v++) { - CountDistinctIntAggregator.combine(state, groupId, values.getInt(v)); - } - } - } - } - - private void addRawInput(int positionOffset, IntBlock groups, IntVector values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - if (groups.isNull(groupPosition)) { - continue; - } - int groupStart = groups.getFirstValueIndex(groupPosition); - int groupEnd = groupStart + groups.getValueCount(groupPosition); - for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getInt(g)); - CountDistinctIntAggregator.combine(state, groupId, values.getInt(groupPosition + positionOffset)); - } - } - } - - @Override - public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { - state.enableGroupIdTracking(new SeenGroupIds.Empty()); - assert channels.size() == intermediateBlockCount(); - BytesRefVector hll = page.getBlock(channels.get(0)).asVector(); - BytesRef scratch = new BytesRef(); - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - CountDistinctIntAggregator.combineIntermediate(state, groupId, hll.getBytesRef(groupPosition + positionOffset, scratch)); - } - } - - @Override - public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { - if (input.getClass() != getClass()) { - throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); - } - HllStates.GroupingState inState = ((CountDistinctIntGroupingAggregatorFunction) input).state; - state.enableGroupIdTracking(new SeenGroupIds.Empty()); - CountDistinctIntAggregator.combineStates(state, groupId, inState, position); - } - - @Override - public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { - state.toIntermediate(blocks, offset, selected, driverContext); - } - - @Override - public void evaluateFinal(Block[] blocks, int offset, IntVector selected, - DriverContext driverContext) { - blocks[offset] = CountDistinctIntAggregator.evaluateFinal(state, selected, driverContext); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(getClass().getSimpleName()).append("["); - sb.append("channels=").append(channels); - sb.append("]"); - return sb.toString(); - } - - @Override - public void close() { - state.close(); - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunction.java deleted file mode 100644 index b8243724e2f4a..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunction.java +++ /dev/null @@ -1,131 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.lang.StringBuilder; -import java.util.List; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.LongVector; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link AggregatorFunction} implementation for {@link CountDistinctLongAggregator}. - * This class is generated. Do not edit it. - */ -public final class CountDistinctLongAggregatorFunction implements AggregatorFunction { - private static final List INTERMEDIATE_STATE_DESC = List.of( - new IntermediateStateDesc("hll", ElementType.BYTES_REF) ); - - private final DriverContext driverContext; - - private final HllStates.SingleState state; - - private final List channels; - - private final BigArrays bigArrays; - - private final int precision; - - public CountDistinctLongAggregatorFunction(DriverContext driverContext, List channels, - HllStates.SingleState state, BigArrays bigArrays, int precision) { - this.driverContext = driverContext; - this.channels = channels; - this.state = state; - this.bigArrays = bigArrays; - this.precision = precision; - } - - public static CountDistinctLongAggregatorFunction create(DriverContext driverContext, - List channels, BigArrays bigArrays, int precision) { - return new CountDistinctLongAggregatorFunction(driverContext, channels, CountDistinctLongAggregator.initSingle(bigArrays, precision), bigArrays, precision); - } - - public static List intermediateStateDesc() { - return INTERMEDIATE_STATE_DESC; - } - - @Override - public int intermediateBlockCount() { - return INTERMEDIATE_STATE_DESC.size(); - } - - @Override - public void addRawInput(Page page) { - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { - return; - } - LongBlock block = (LongBlock) uncastBlock; - LongVector vector = block.asVector(); - if (vector != null) { - addRawVector(vector); - } else { - addRawBlock(block); - } - } - - private void addRawVector(LongVector vector) { - for (int i = 0; i < vector.getPositionCount(); i++) { - CountDistinctLongAggregator.combine(state, vector.getLong(i)); - } - } - - private void addRawBlock(LongBlock block) { - for (int p = 0; p < block.getPositionCount(); p++) { - if (block.isNull(p)) { - continue; - } - int start = block.getFirstValueIndex(p); - int end = start + block.getValueCount(p); - for (int i = start; i < end; i++) { - CountDistinctLongAggregator.combine(state, block.getLong(i)); - } - } - } - - @Override - public void addIntermediateInput(Page page) { - assert channels.size() == intermediateBlockCount(); - assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - BytesRefVector hll = page.getBlock(channels.get(0)).asVector(); - assert hll.getPositionCount() == 1; - BytesRef scratch = new BytesRef(); - CountDistinctLongAggregator.combineIntermediate(state, hll.getBytesRef(0, scratch)); - } - - @Override - public void evaluateIntermediate(Block[] blocks, int offset) { - state.toIntermediate(blocks, offset); - } - - @Override - public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { - blocks[offset] = CountDistinctLongAggregator.evaluateFinal(state, driverContext); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(getClass().getSimpleName()).append("["); - sb.append("channels=").append(channels); - sb.append("]"); - return sb.toString(); - } - - @Override - public void close() { - state.close(); - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionSupplier.java deleted file mode 100644 index 6069b5744b31b..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionSupplier.java +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.util.List; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link AggregatorFunctionSupplier} implementation for {@link CountDistinctLongAggregator}. - * This class is generated. Do not edit it. - */ -public final class CountDistinctLongAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final BigArrays bigArrays; - - private final List channels; - - private final int precision; - - public CountDistinctLongAggregatorFunctionSupplier(BigArrays bigArrays, List channels, - int precision) { - this.bigArrays = bigArrays; - this.channels = channels; - this.precision = precision; - } - - @Override - public CountDistinctLongAggregatorFunction aggregator(DriverContext driverContext) { - return CountDistinctLongAggregatorFunction.create(driverContext, channels, bigArrays, precision); - } - - @Override - public CountDistinctLongGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { - return CountDistinctLongGroupingAggregatorFunction.create(channels, driverContext, bigArrays, precision); - } - - @Override - public String describe() { - return "count_distinct of longs"; - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunction.java deleted file mode 100644 index 06165026a5d16..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunction.java +++ /dev/null @@ -1,216 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.lang.StringBuilder; -import java.util.List; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.LongVector; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link GroupingAggregatorFunction} implementation for {@link CountDistinctLongAggregator}. - * This class is generated. Do not edit it. - */ -public final class CountDistinctLongGroupingAggregatorFunction implements GroupingAggregatorFunction { - private static final List INTERMEDIATE_STATE_DESC = List.of( - new IntermediateStateDesc("hll", ElementType.BYTES_REF) ); - - private final HllStates.GroupingState state; - - private final List channels; - - private final DriverContext driverContext; - - private final BigArrays bigArrays; - - private final int precision; - - public CountDistinctLongGroupingAggregatorFunction(List channels, - HllStates.GroupingState state, DriverContext driverContext, BigArrays bigArrays, - int precision) { - this.channels = channels; - this.state = state; - this.driverContext = driverContext; - this.bigArrays = bigArrays; - this.precision = precision; - } - - public static CountDistinctLongGroupingAggregatorFunction create(List channels, - DriverContext driverContext, BigArrays bigArrays, int precision) { - return new CountDistinctLongGroupingAggregatorFunction(channels, CountDistinctLongAggregator.initGrouping(bigArrays, precision), driverContext, bigArrays, precision); - } - - public static List intermediateStateDesc() { - return INTERMEDIATE_STATE_DESC; - } - - @Override - public int intermediateBlockCount() { - return INTERMEDIATE_STATE_DESC.size(); - } - - @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, - Page page) { - Block uncastValuesBlock = page.getBlock(channels.get(0)); - if (uncastValuesBlock.areAllValuesNull()) { - state.enableGroupIdTracking(seenGroupIds); - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - } - }; - } - LongBlock valuesBlock = (LongBlock) uncastValuesBlock; - LongVector valuesVector = valuesBlock.asVector(); - if (valuesVector == null) { - if (valuesBlock.mayHaveNulls()) { - state.enableGroupIdTracking(seenGroupIds); - } - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - addRawInput(positionOffset, groupIds, valuesBlock); - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - addRawInput(positionOffset, groupIds, valuesBlock); - } - }; - } - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - addRawInput(positionOffset, groupIds, valuesVector); - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - addRawInput(positionOffset, groupIds, valuesVector); - } - }; - } - - private void addRawInput(int positionOffset, IntVector groups, LongBlock values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - if (values.isNull(groupPosition + positionOffset)) { - continue; - } - int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); - int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); - for (int v = valuesStart; v < valuesEnd; v++) { - CountDistinctLongAggregator.combine(state, groupId, values.getLong(v)); - } - } - } - - private void addRawInput(int positionOffset, IntVector groups, LongVector values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - CountDistinctLongAggregator.combine(state, groupId, values.getLong(groupPosition + positionOffset)); - } - } - - private void addRawInput(int positionOffset, IntBlock groups, LongBlock values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - if (groups.isNull(groupPosition)) { - continue; - } - int groupStart = groups.getFirstValueIndex(groupPosition); - int groupEnd = groupStart + groups.getValueCount(groupPosition); - for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getInt(g)); - if (values.isNull(groupPosition + positionOffset)) { - continue; - } - int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); - int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); - for (int v = valuesStart; v < valuesEnd; v++) { - CountDistinctLongAggregator.combine(state, groupId, values.getLong(v)); - } - } - } - } - - private void addRawInput(int positionOffset, IntBlock groups, LongVector values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - if (groups.isNull(groupPosition)) { - continue; - } - int groupStart = groups.getFirstValueIndex(groupPosition); - int groupEnd = groupStart + groups.getValueCount(groupPosition); - for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getInt(g)); - CountDistinctLongAggregator.combine(state, groupId, values.getLong(groupPosition + positionOffset)); - } - } - } - - @Override - public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { - state.enableGroupIdTracking(new SeenGroupIds.Empty()); - assert channels.size() == intermediateBlockCount(); - BytesRefVector hll = page.getBlock(channels.get(0)).asVector(); - BytesRef scratch = new BytesRef(); - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - CountDistinctLongAggregator.combineIntermediate(state, groupId, hll.getBytesRef(groupPosition + positionOffset, scratch)); - } - } - - @Override - public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { - if (input.getClass() != getClass()) { - throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); - } - HllStates.GroupingState inState = ((CountDistinctLongGroupingAggregatorFunction) input).state; - state.enableGroupIdTracking(new SeenGroupIds.Empty()); - CountDistinctLongAggregator.combineStates(state, groupId, inState, position); - } - - @Override - public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { - state.toIntermediate(blocks, offset, selected, driverContext); - } - - @Override - public void evaluateFinal(Block[] blocks, int offset, IntVector selected, - DriverContext driverContext) { - blocks[offset] = CountDistinctLongAggregator.evaluateFinal(state, selected, driverContext); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(getClass().getSimpleName()).append("["); - sb.append("channels=").append(channels); - sb.append("]"); - return sb.toString(); - } - - @Override - public void close() { - state.close(); - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunction.java deleted file mode 100644 index 7078f98c62466..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunction.java +++ /dev/null @@ -1,134 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.lang.StringBuilder; -import java.util.List; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BooleanBlock; -import org.elasticsearch.compute.data.BooleanVector; -import org.elasticsearch.compute.data.DoubleBlock; -import org.elasticsearch.compute.data.DoubleVector; -import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link AggregatorFunction} implementation for {@link MaxDoubleAggregator}. - * This class is generated. Do not edit it. - */ -public final class MaxDoubleAggregatorFunction implements AggregatorFunction { - private static final List INTERMEDIATE_STATE_DESC = List.of( - new IntermediateStateDesc("max", ElementType.DOUBLE), - new IntermediateStateDesc("seen", ElementType.BOOLEAN) ); - - private final DriverContext driverContext; - - private final DoubleState state; - - private final List channels; - - public MaxDoubleAggregatorFunction(DriverContext driverContext, List channels, - DoubleState state) { - this.driverContext = driverContext; - this.channels = channels; - this.state = state; - } - - public static MaxDoubleAggregatorFunction create(DriverContext driverContext, - List channels) { - return new MaxDoubleAggregatorFunction(driverContext, channels, new DoubleState(MaxDoubleAggregator.init())); - } - - public static List intermediateStateDesc() { - return INTERMEDIATE_STATE_DESC; - } - - @Override - public int intermediateBlockCount() { - return INTERMEDIATE_STATE_DESC.size(); - } - - @Override - public void addRawInput(Page page) { - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { - return; - } - DoubleBlock block = (DoubleBlock) uncastBlock; - DoubleVector vector = block.asVector(); - if (vector != null) { - addRawVector(vector); - } else { - addRawBlock(block); - } - } - - private void addRawVector(DoubleVector vector) { - state.seen(true); - for (int i = 0; i < vector.getPositionCount(); i++) { - state.doubleValue(MaxDoubleAggregator.combine(state.doubleValue(), vector.getDouble(i))); - } - } - - private void addRawBlock(DoubleBlock block) { - for (int p = 0; p < block.getPositionCount(); p++) { - if (block.isNull(p)) { - continue; - } - state.seen(true); - int start = block.getFirstValueIndex(p); - int end = start + block.getValueCount(p); - for (int i = start; i < end; i++) { - state.doubleValue(MaxDoubleAggregator.combine(state.doubleValue(), block.getDouble(i))); - } - } - } - - @Override - public void addIntermediateInput(Page page) { - assert channels.size() == intermediateBlockCount(); - assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - DoubleVector max = page.getBlock(channels.get(0)).asVector(); - BooleanVector seen = page.getBlock(channels.get(1)).asVector(); - assert max.getPositionCount() == 1; - assert max.getPositionCount() == seen.getPositionCount(); - if (seen.getBoolean(0)) { - state.doubleValue(MaxDoubleAggregator.combine(state.doubleValue(), max.getDouble(0))); - state.seen(true); - } - } - - @Override - public void evaluateIntermediate(Block[] blocks, int offset) { - state.toIntermediate(blocks, offset); - } - - @Override - public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { - if (state.seen() == false) { - blocks[offset] = Block.constantNullBlock(1, driverContext.blockFactory()); - return; - } - blocks[offset] = DoubleBlock.newConstantBlockWith(state.doubleValue(), 1, driverContext.blockFactory()); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(getClass().getSimpleName()).append("["); - sb.append("channels=").append(channels); - sb.append("]"); - return sb.toString(); - } - - @Override - public void close() { - state.close(); - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunctionSupplier.java deleted file mode 100644 index 850fff9a946ba..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunctionSupplier.java +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.util.List; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link AggregatorFunctionSupplier} implementation for {@link MaxDoubleAggregator}. - * This class is generated. Do not edit it. - */ -public final class MaxDoubleAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final BigArrays bigArrays; - - private final List channels; - - public MaxDoubleAggregatorFunctionSupplier(BigArrays bigArrays, List channels) { - this.bigArrays = bigArrays; - this.channels = channels; - } - - @Override - public MaxDoubleAggregatorFunction aggregator(DriverContext driverContext) { - return MaxDoubleAggregatorFunction.create(driverContext, channels); - } - - @Override - public MaxDoubleGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { - return MaxDoubleGroupingAggregatorFunction.create(channels, driverContext); - } - - @Override - public String describe() { - return "max of doubles"; - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunction.java deleted file mode 100644 index b761bb66b8edb..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunction.java +++ /dev/null @@ -1,213 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.lang.StringBuilder; -import java.util.List; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BooleanBlock; -import org.elasticsearch.compute.data.BooleanVector; -import org.elasticsearch.compute.data.DoubleBlock; -import org.elasticsearch.compute.data.DoubleVector; -import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link GroupingAggregatorFunction} implementation for {@link MaxDoubleAggregator}. - * This class is generated. Do not edit it. - */ -public final class MaxDoubleGroupingAggregatorFunction implements GroupingAggregatorFunction { - private static final List INTERMEDIATE_STATE_DESC = List.of( - new IntermediateStateDesc("max", ElementType.DOUBLE), - new IntermediateStateDesc("seen", ElementType.BOOLEAN) ); - - private final DoubleArrayState state; - - private final List channels; - - private final DriverContext driverContext; - - public MaxDoubleGroupingAggregatorFunction(List channels, DoubleArrayState state, - DriverContext driverContext) { - this.channels = channels; - this.state = state; - this.driverContext = driverContext; - } - - public static MaxDoubleGroupingAggregatorFunction create(List channels, - DriverContext driverContext) { - return new MaxDoubleGroupingAggregatorFunction(channels, new DoubleArrayState(driverContext.bigArrays(), MaxDoubleAggregator.init()), driverContext); - } - - public static List intermediateStateDesc() { - return INTERMEDIATE_STATE_DESC; - } - - @Override - public int intermediateBlockCount() { - return INTERMEDIATE_STATE_DESC.size(); - } - - @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, - Page page) { - Block uncastValuesBlock = page.getBlock(channels.get(0)); - if (uncastValuesBlock.areAllValuesNull()) { - state.enableGroupIdTracking(seenGroupIds); - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - } - }; - } - DoubleBlock valuesBlock = (DoubleBlock) uncastValuesBlock; - DoubleVector valuesVector = valuesBlock.asVector(); - if (valuesVector == null) { - if (valuesBlock.mayHaveNulls()) { - state.enableGroupIdTracking(seenGroupIds); - } - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - addRawInput(positionOffset, groupIds, valuesBlock); - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - addRawInput(positionOffset, groupIds, valuesBlock); - } - }; - } - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - addRawInput(positionOffset, groupIds, valuesVector); - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - addRawInput(positionOffset, groupIds, valuesVector); - } - }; - } - - private void addRawInput(int positionOffset, IntVector groups, DoubleBlock values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - if (values.isNull(groupPosition + positionOffset)) { - continue; - } - int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); - int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); - for (int v = valuesStart; v < valuesEnd; v++) { - state.set(groupId, MaxDoubleAggregator.combine(state.getOrDefault(groupId), values.getDouble(v))); - } - } - } - - private void addRawInput(int positionOffset, IntVector groups, DoubleVector values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - state.set(groupId, MaxDoubleAggregator.combine(state.getOrDefault(groupId), values.getDouble(groupPosition + positionOffset))); - } - } - - private void addRawInput(int positionOffset, IntBlock groups, DoubleBlock values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - if (groups.isNull(groupPosition)) { - continue; - } - int groupStart = groups.getFirstValueIndex(groupPosition); - int groupEnd = groupStart + groups.getValueCount(groupPosition); - for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getInt(g)); - if (values.isNull(groupPosition + positionOffset)) { - continue; - } - int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); - int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); - for (int v = valuesStart; v < valuesEnd; v++) { - state.set(groupId, MaxDoubleAggregator.combine(state.getOrDefault(groupId), values.getDouble(v))); - } - } - } - } - - private void addRawInput(int positionOffset, IntBlock groups, DoubleVector values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - if (groups.isNull(groupPosition)) { - continue; - } - int groupStart = groups.getFirstValueIndex(groupPosition); - int groupEnd = groupStart + groups.getValueCount(groupPosition); - for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getInt(g)); - state.set(groupId, MaxDoubleAggregator.combine(state.getOrDefault(groupId), values.getDouble(groupPosition + positionOffset))); - } - } - } - - @Override - public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { - state.enableGroupIdTracking(new SeenGroupIds.Empty()); - assert channels.size() == intermediateBlockCount(); - DoubleVector max = page.getBlock(channels.get(0)).asVector(); - BooleanVector seen = page.getBlock(channels.get(1)).asVector(); - assert max.getPositionCount() == seen.getPositionCount(); - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - if (seen.getBoolean(groupPosition + positionOffset)) { - state.set(groupId, MaxDoubleAggregator.combine(state.getOrDefault(groupId), max.getDouble(groupPosition + positionOffset))); - } - } - } - - @Override - public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { - if (input.getClass() != getClass()) { - throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); - } - DoubleArrayState inState = ((MaxDoubleGroupingAggregatorFunction) input).state; - state.enableGroupIdTracking(new SeenGroupIds.Empty()); - if (inState.hasValue(position)) { - state.set(groupId, MaxDoubleAggregator.combine(state.getOrDefault(groupId), inState.get(position))); - } - } - - @Override - public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { - state.toIntermediate(blocks, offset, selected, driverContext); - } - - @Override - public void evaluateFinal(Block[] blocks, int offset, IntVector selected, - DriverContext driverContext) { - blocks[offset] = state.toValuesBlock(selected, driverContext); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(getClass().getSimpleName()).append("["); - sb.append("channels=").append(channels); - sb.append("]"); - return sb.toString(); - } - - @Override - public void close() { - state.close(); - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunction.java deleted file mode 100644 index 371793fe1f7e4..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunction.java +++ /dev/null @@ -1,134 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.lang.StringBuilder; -import java.util.List; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BooleanBlock; -import org.elasticsearch.compute.data.BooleanVector; -import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link AggregatorFunction} implementation for {@link MaxIntAggregator}. - * This class is generated. Do not edit it. - */ -public final class MaxIntAggregatorFunction implements AggregatorFunction { - private static final List INTERMEDIATE_STATE_DESC = List.of( - new IntermediateStateDesc("max", ElementType.INT), - new IntermediateStateDesc("seen", ElementType.BOOLEAN) ); - - private final DriverContext driverContext; - - private final IntState state; - - private final List channels; - - public MaxIntAggregatorFunction(DriverContext driverContext, List channels, - IntState state) { - this.driverContext = driverContext; - this.channels = channels; - this.state = state; - } - - public static MaxIntAggregatorFunction create(DriverContext driverContext, - List channels) { - return new MaxIntAggregatorFunction(driverContext, channels, new IntState(MaxIntAggregator.init())); - } - - public static List intermediateStateDesc() { - return INTERMEDIATE_STATE_DESC; - } - - @Override - public int intermediateBlockCount() { - return INTERMEDIATE_STATE_DESC.size(); - } - - @Override - public void addRawInput(Page page) { - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { - return; - } - IntBlock block = (IntBlock) uncastBlock; - IntVector vector = block.asVector(); - if (vector != null) { - addRawVector(vector); - } else { - addRawBlock(block); - } - } - - private void addRawVector(IntVector vector) { - state.seen(true); - for (int i = 0; i < vector.getPositionCount(); i++) { - state.intValue(MaxIntAggregator.combine(state.intValue(), vector.getInt(i))); - } - } - - private void addRawBlock(IntBlock block) { - for (int p = 0; p < block.getPositionCount(); p++) { - if (block.isNull(p)) { - continue; - } - state.seen(true); - int start = block.getFirstValueIndex(p); - int end = start + block.getValueCount(p); - for (int i = start; i < end; i++) { - state.intValue(MaxIntAggregator.combine(state.intValue(), block.getInt(i))); - } - } - } - - @Override - public void addIntermediateInput(Page page) { - assert channels.size() == intermediateBlockCount(); - assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - IntVector max = page.getBlock(channels.get(0)).asVector(); - BooleanVector seen = page.getBlock(channels.get(1)).asVector(); - assert max.getPositionCount() == 1; - assert max.getPositionCount() == seen.getPositionCount(); - if (seen.getBoolean(0)) { - state.intValue(MaxIntAggregator.combine(state.intValue(), max.getInt(0))); - state.seen(true); - } - } - - @Override - public void evaluateIntermediate(Block[] blocks, int offset) { - state.toIntermediate(blocks, offset); - } - - @Override - public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { - if (state.seen() == false) { - blocks[offset] = Block.constantNullBlock(1, driverContext.blockFactory()); - return; - } - blocks[offset] = IntBlock.newConstantBlockWith(state.intValue(), 1, driverContext.blockFactory()); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(getClass().getSimpleName()).append("["); - sb.append("channels=").append(channels); - sb.append("]"); - return sb.toString(); - } - - @Override - public void close() { - state.close(); - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunctionSupplier.java deleted file mode 100644 index cfac1c68fc065..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunctionSupplier.java +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.util.List; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link AggregatorFunctionSupplier} implementation for {@link MaxIntAggregator}. - * This class is generated. Do not edit it. - */ -public final class MaxIntAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final BigArrays bigArrays; - - private final List channels; - - public MaxIntAggregatorFunctionSupplier(BigArrays bigArrays, List channels) { - this.bigArrays = bigArrays; - this.channels = channels; - } - - @Override - public MaxIntAggregatorFunction aggregator(DriverContext driverContext) { - return MaxIntAggregatorFunction.create(driverContext, channels); - } - - @Override - public MaxIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { - return MaxIntGroupingAggregatorFunction.create(channels, driverContext); - } - - @Override - public String describe() { - return "max of ints"; - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunction.java deleted file mode 100644 index a7ef8d5573fdc..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunction.java +++ /dev/null @@ -1,211 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.lang.StringBuilder; -import java.util.List; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BooleanBlock; -import org.elasticsearch.compute.data.BooleanVector; -import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link GroupingAggregatorFunction} implementation for {@link MaxIntAggregator}. - * This class is generated. Do not edit it. - */ -public final class MaxIntGroupingAggregatorFunction implements GroupingAggregatorFunction { - private static final List INTERMEDIATE_STATE_DESC = List.of( - new IntermediateStateDesc("max", ElementType.INT), - new IntermediateStateDesc("seen", ElementType.BOOLEAN) ); - - private final IntArrayState state; - - private final List channels; - - private final DriverContext driverContext; - - public MaxIntGroupingAggregatorFunction(List channels, IntArrayState state, - DriverContext driverContext) { - this.channels = channels; - this.state = state; - this.driverContext = driverContext; - } - - public static MaxIntGroupingAggregatorFunction create(List channels, - DriverContext driverContext) { - return new MaxIntGroupingAggregatorFunction(channels, new IntArrayState(driverContext.bigArrays(), MaxIntAggregator.init()), driverContext); - } - - public static List intermediateStateDesc() { - return INTERMEDIATE_STATE_DESC; - } - - @Override - public int intermediateBlockCount() { - return INTERMEDIATE_STATE_DESC.size(); - } - - @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, - Page page) { - Block uncastValuesBlock = page.getBlock(channels.get(0)); - if (uncastValuesBlock.areAllValuesNull()) { - state.enableGroupIdTracking(seenGroupIds); - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - } - }; - } - IntBlock valuesBlock = (IntBlock) uncastValuesBlock; - IntVector valuesVector = valuesBlock.asVector(); - if (valuesVector == null) { - if (valuesBlock.mayHaveNulls()) { - state.enableGroupIdTracking(seenGroupIds); - } - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - addRawInput(positionOffset, groupIds, valuesBlock); - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - addRawInput(positionOffset, groupIds, valuesBlock); - } - }; - } - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - addRawInput(positionOffset, groupIds, valuesVector); - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - addRawInput(positionOffset, groupIds, valuesVector); - } - }; - } - - private void addRawInput(int positionOffset, IntVector groups, IntBlock values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - if (values.isNull(groupPosition + positionOffset)) { - continue; - } - int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); - int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); - for (int v = valuesStart; v < valuesEnd; v++) { - state.set(groupId, MaxIntAggregator.combine(state.getOrDefault(groupId), values.getInt(v))); - } - } - } - - private void addRawInput(int positionOffset, IntVector groups, IntVector values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - state.set(groupId, MaxIntAggregator.combine(state.getOrDefault(groupId), values.getInt(groupPosition + positionOffset))); - } - } - - private void addRawInput(int positionOffset, IntBlock groups, IntBlock values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - if (groups.isNull(groupPosition)) { - continue; - } - int groupStart = groups.getFirstValueIndex(groupPosition); - int groupEnd = groupStart + groups.getValueCount(groupPosition); - for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getInt(g)); - if (values.isNull(groupPosition + positionOffset)) { - continue; - } - int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); - int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); - for (int v = valuesStart; v < valuesEnd; v++) { - state.set(groupId, MaxIntAggregator.combine(state.getOrDefault(groupId), values.getInt(v))); - } - } - } - } - - private void addRawInput(int positionOffset, IntBlock groups, IntVector values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - if (groups.isNull(groupPosition)) { - continue; - } - int groupStart = groups.getFirstValueIndex(groupPosition); - int groupEnd = groupStart + groups.getValueCount(groupPosition); - for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getInt(g)); - state.set(groupId, MaxIntAggregator.combine(state.getOrDefault(groupId), values.getInt(groupPosition + positionOffset))); - } - } - } - - @Override - public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { - state.enableGroupIdTracking(new SeenGroupIds.Empty()); - assert channels.size() == intermediateBlockCount(); - IntVector max = page.getBlock(channels.get(0)).asVector(); - BooleanVector seen = page.getBlock(channels.get(1)).asVector(); - assert max.getPositionCount() == seen.getPositionCount(); - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - if (seen.getBoolean(groupPosition + positionOffset)) { - state.set(groupId, MaxIntAggregator.combine(state.getOrDefault(groupId), max.getInt(groupPosition + positionOffset))); - } - } - } - - @Override - public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { - if (input.getClass() != getClass()) { - throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); - } - IntArrayState inState = ((MaxIntGroupingAggregatorFunction) input).state; - state.enableGroupIdTracking(new SeenGroupIds.Empty()); - if (inState.hasValue(position)) { - state.set(groupId, MaxIntAggregator.combine(state.getOrDefault(groupId), inState.get(position))); - } - } - - @Override - public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { - state.toIntermediate(blocks, offset, selected, driverContext); - } - - @Override - public void evaluateFinal(Block[] blocks, int offset, IntVector selected, - DriverContext driverContext) { - blocks[offset] = state.toValuesBlock(selected, driverContext); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(getClass().getSimpleName()).append("["); - sb.append("channels=").append(channels); - sb.append("]"); - return sb.toString(); - } - - @Override - public void close() { - state.close(); - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunction.java deleted file mode 100644 index e48353b4e6a72..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunction.java +++ /dev/null @@ -1,134 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.lang.StringBuilder; -import java.util.List; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BooleanBlock; -import org.elasticsearch.compute.data.BooleanVector; -import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.LongVector; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link AggregatorFunction} implementation for {@link MaxLongAggregator}. - * This class is generated. Do not edit it. - */ -public final class MaxLongAggregatorFunction implements AggregatorFunction { - private static final List INTERMEDIATE_STATE_DESC = List.of( - new IntermediateStateDesc("max", ElementType.LONG), - new IntermediateStateDesc("seen", ElementType.BOOLEAN) ); - - private final DriverContext driverContext; - - private final LongState state; - - private final List channels; - - public MaxLongAggregatorFunction(DriverContext driverContext, List channels, - LongState state) { - this.driverContext = driverContext; - this.channels = channels; - this.state = state; - } - - public static MaxLongAggregatorFunction create(DriverContext driverContext, - List channels) { - return new MaxLongAggregatorFunction(driverContext, channels, new LongState(MaxLongAggregator.init())); - } - - public static List intermediateStateDesc() { - return INTERMEDIATE_STATE_DESC; - } - - @Override - public int intermediateBlockCount() { - return INTERMEDIATE_STATE_DESC.size(); - } - - @Override - public void addRawInput(Page page) { - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { - return; - } - LongBlock block = (LongBlock) uncastBlock; - LongVector vector = block.asVector(); - if (vector != null) { - addRawVector(vector); - } else { - addRawBlock(block); - } - } - - private void addRawVector(LongVector vector) { - state.seen(true); - for (int i = 0; i < vector.getPositionCount(); i++) { - state.longValue(MaxLongAggregator.combine(state.longValue(), vector.getLong(i))); - } - } - - private void addRawBlock(LongBlock block) { - for (int p = 0; p < block.getPositionCount(); p++) { - if (block.isNull(p)) { - continue; - } - state.seen(true); - int start = block.getFirstValueIndex(p); - int end = start + block.getValueCount(p); - for (int i = start; i < end; i++) { - state.longValue(MaxLongAggregator.combine(state.longValue(), block.getLong(i))); - } - } - } - - @Override - public void addIntermediateInput(Page page) { - assert channels.size() == intermediateBlockCount(); - assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - LongVector max = page.getBlock(channels.get(0)).asVector(); - BooleanVector seen = page.getBlock(channels.get(1)).asVector(); - assert max.getPositionCount() == 1; - assert max.getPositionCount() == seen.getPositionCount(); - if (seen.getBoolean(0)) { - state.longValue(MaxLongAggregator.combine(state.longValue(), max.getLong(0))); - state.seen(true); - } - } - - @Override - public void evaluateIntermediate(Block[] blocks, int offset) { - state.toIntermediate(blocks, offset); - } - - @Override - public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { - if (state.seen() == false) { - blocks[offset] = Block.constantNullBlock(1, driverContext.blockFactory()); - return; - } - blocks[offset] = LongBlock.newConstantBlockWith(state.longValue(), 1, driverContext.blockFactory()); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(getClass().getSimpleName()).append("["); - sb.append("channels=").append(channels); - sb.append("]"); - return sb.toString(); - } - - @Override - public void close() { - state.close(); - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunctionSupplier.java deleted file mode 100644 index 5f1b1d1e7dc82..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunctionSupplier.java +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.util.List; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link AggregatorFunctionSupplier} implementation for {@link MaxLongAggregator}. - * This class is generated. Do not edit it. - */ -public final class MaxLongAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final BigArrays bigArrays; - - private final List channels; - - public MaxLongAggregatorFunctionSupplier(BigArrays bigArrays, List channels) { - this.bigArrays = bigArrays; - this.channels = channels; - } - - @Override - public MaxLongAggregatorFunction aggregator(DriverContext driverContext) { - return MaxLongAggregatorFunction.create(driverContext, channels); - } - - @Override - public MaxLongGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { - return MaxLongGroupingAggregatorFunction.create(channels, driverContext); - } - - @Override - public String describe() { - return "max of longs"; - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunction.java deleted file mode 100644 index d224ecd2d293e..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunction.java +++ /dev/null @@ -1,213 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.lang.StringBuilder; -import java.util.List; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BooleanBlock; -import org.elasticsearch.compute.data.BooleanVector; -import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.LongVector; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link GroupingAggregatorFunction} implementation for {@link MaxLongAggregator}. - * This class is generated. Do not edit it. - */ -public final class MaxLongGroupingAggregatorFunction implements GroupingAggregatorFunction { - private static final List INTERMEDIATE_STATE_DESC = List.of( - new IntermediateStateDesc("max", ElementType.LONG), - new IntermediateStateDesc("seen", ElementType.BOOLEAN) ); - - private final LongArrayState state; - - private final List channels; - - private final DriverContext driverContext; - - public MaxLongGroupingAggregatorFunction(List channels, LongArrayState state, - DriverContext driverContext) { - this.channels = channels; - this.state = state; - this.driverContext = driverContext; - } - - public static MaxLongGroupingAggregatorFunction create(List channels, - DriverContext driverContext) { - return new MaxLongGroupingAggregatorFunction(channels, new LongArrayState(driverContext.bigArrays(), MaxLongAggregator.init()), driverContext); - } - - public static List intermediateStateDesc() { - return INTERMEDIATE_STATE_DESC; - } - - @Override - public int intermediateBlockCount() { - return INTERMEDIATE_STATE_DESC.size(); - } - - @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, - Page page) { - Block uncastValuesBlock = page.getBlock(channels.get(0)); - if (uncastValuesBlock.areAllValuesNull()) { - state.enableGroupIdTracking(seenGroupIds); - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - } - }; - } - LongBlock valuesBlock = (LongBlock) uncastValuesBlock; - LongVector valuesVector = valuesBlock.asVector(); - if (valuesVector == null) { - if (valuesBlock.mayHaveNulls()) { - state.enableGroupIdTracking(seenGroupIds); - } - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - addRawInput(positionOffset, groupIds, valuesBlock); - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - addRawInput(positionOffset, groupIds, valuesBlock); - } - }; - } - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - addRawInput(positionOffset, groupIds, valuesVector); - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - addRawInput(positionOffset, groupIds, valuesVector); - } - }; - } - - private void addRawInput(int positionOffset, IntVector groups, LongBlock values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - if (values.isNull(groupPosition + positionOffset)) { - continue; - } - int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); - int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); - for (int v = valuesStart; v < valuesEnd; v++) { - state.set(groupId, MaxLongAggregator.combine(state.getOrDefault(groupId), values.getLong(v))); - } - } - } - - private void addRawInput(int positionOffset, IntVector groups, LongVector values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - state.set(groupId, MaxLongAggregator.combine(state.getOrDefault(groupId), values.getLong(groupPosition + positionOffset))); - } - } - - private void addRawInput(int positionOffset, IntBlock groups, LongBlock values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - if (groups.isNull(groupPosition)) { - continue; - } - int groupStart = groups.getFirstValueIndex(groupPosition); - int groupEnd = groupStart + groups.getValueCount(groupPosition); - for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getInt(g)); - if (values.isNull(groupPosition + positionOffset)) { - continue; - } - int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); - int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); - for (int v = valuesStart; v < valuesEnd; v++) { - state.set(groupId, MaxLongAggregator.combine(state.getOrDefault(groupId), values.getLong(v))); - } - } - } - } - - private void addRawInput(int positionOffset, IntBlock groups, LongVector values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - if (groups.isNull(groupPosition)) { - continue; - } - int groupStart = groups.getFirstValueIndex(groupPosition); - int groupEnd = groupStart + groups.getValueCount(groupPosition); - for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getInt(g)); - state.set(groupId, MaxLongAggregator.combine(state.getOrDefault(groupId), values.getLong(groupPosition + positionOffset))); - } - } - } - - @Override - public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { - state.enableGroupIdTracking(new SeenGroupIds.Empty()); - assert channels.size() == intermediateBlockCount(); - LongVector max = page.getBlock(channels.get(0)).asVector(); - BooleanVector seen = page.getBlock(channels.get(1)).asVector(); - assert max.getPositionCount() == seen.getPositionCount(); - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - if (seen.getBoolean(groupPosition + positionOffset)) { - state.set(groupId, MaxLongAggregator.combine(state.getOrDefault(groupId), max.getLong(groupPosition + positionOffset))); - } - } - } - - @Override - public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { - if (input.getClass() != getClass()) { - throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); - } - LongArrayState inState = ((MaxLongGroupingAggregatorFunction) input).state; - state.enableGroupIdTracking(new SeenGroupIds.Empty()); - if (inState.hasValue(position)) { - state.set(groupId, MaxLongAggregator.combine(state.getOrDefault(groupId), inState.get(position))); - } - } - - @Override - public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { - state.toIntermediate(blocks, offset, selected, driverContext); - } - - @Override - public void evaluateFinal(Block[] blocks, int offset, IntVector selected, - DriverContext driverContext) { - blocks[offset] = state.toValuesBlock(selected, driverContext); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(getClass().getSimpleName()).append("["); - sb.append("channels=").append(channels); - sb.append("]"); - return sb.toString(); - } - - @Override - public void close() { - state.close(); - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunction.java deleted file mode 100644 index 13db172741152..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunction.java +++ /dev/null @@ -1,124 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.lang.StringBuilder; -import java.util.List; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.DoubleBlock; -import org.elasticsearch.compute.data.DoubleVector; -import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link AggregatorFunction} implementation for {@link MedianAbsoluteDeviationDoubleAggregator}. - * This class is generated. Do not edit it. - */ -public final class MedianAbsoluteDeviationDoubleAggregatorFunction implements AggregatorFunction { - private static final List INTERMEDIATE_STATE_DESC = List.of( - new IntermediateStateDesc("quart", ElementType.BYTES_REF) ); - - private final DriverContext driverContext; - - private final QuantileStates.SingleState state; - - private final List channels; - - public MedianAbsoluteDeviationDoubleAggregatorFunction(DriverContext driverContext, - List channels, QuantileStates.SingleState state) { - this.driverContext = driverContext; - this.channels = channels; - this.state = state; - } - - public static MedianAbsoluteDeviationDoubleAggregatorFunction create(DriverContext driverContext, - List channels) { - return new MedianAbsoluteDeviationDoubleAggregatorFunction(driverContext, channels, MedianAbsoluteDeviationDoubleAggregator.initSingle()); - } - - public static List intermediateStateDesc() { - return INTERMEDIATE_STATE_DESC; - } - - @Override - public int intermediateBlockCount() { - return INTERMEDIATE_STATE_DESC.size(); - } - - @Override - public void addRawInput(Page page) { - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { - return; - } - DoubleBlock block = (DoubleBlock) uncastBlock; - DoubleVector vector = block.asVector(); - if (vector != null) { - addRawVector(vector); - } else { - addRawBlock(block); - } - } - - private void addRawVector(DoubleVector vector) { - for (int i = 0; i < vector.getPositionCount(); i++) { - MedianAbsoluteDeviationDoubleAggregator.combine(state, vector.getDouble(i)); - } - } - - private void addRawBlock(DoubleBlock block) { - for (int p = 0; p < block.getPositionCount(); p++) { - if (block.isNull(p)) { - continue; - } - int start = block.getFirstValueIndex(p); - int end = start + block.getValueCount(p); - for (int i = start; i < end; i++) { - MedianAbsoluteDeviationDoubleAggregator.combine(state, block.getDouble(i)); - } - } - } - - @Override - public void addIntermediateInput(Page page) { - assert channels.size() == intermediateBlockCount(); - assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); - assert quart.getPositionCount() == 1; - BytesRef scratch = new BytesRef(); - MedianAbsoluteDeviationDoubleAggregator.combineIntermediate(state, quart.getBytesRef(0, scratch)); - } - - @Override - public void evaluateIntermediate(Block[] blocks, int offset) { - state.toIntermediate(blocks, offset); - } - - @Override - public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { - blocks[offset] = MedianAbsoluteDeviationDoubleAggregator.evaluateFinal(state, driverContext); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(getClass().getSimpleName()).append("["); - sb.append("channels=").append(channels); - sb.append("]"); - return sb.toString(); - } - - @Override - public void close() { - state.close(); - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunctionSupplier.java deleted file mode 100644 index 44cea6eab23bb..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunctionSupplier.java +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.util.List; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link AggregatorFunctionSupplier} implementation for {@link MedianAbsoluteDeviationDoubleAggregator}. - * This class is generated. Do not edit it. - */ -public final class MedianAbsoluteDeviationDoubleAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final BigArrays bigArrays; - - private final List channels; - - public MedianAbsoluteDeviationDoubleAggregatorFunctionSupplier(BigArrays bigArrays, - List channels) { - this.bigArrays = bigArrays; - this.channels = channels; - } - - @Override - public MedianAbsoluteDeviationDoubleAggregatorFunction aggregator(DriverContext driverContext) { - return MedianAbsoluteDeviationDoubleAggregatorFunction.create(driverContext, channels); - } - - @Override - public MedianAbsoluteDeviationDoubleGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { - return MedianAbsoluteDeviationDoubleGroupingAggregatorFunction.create(channels, driverContext, bigArrays); - } - - @Override - public String describe() { - return "median_absolute_deviation of doubles"; - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunction.java deleted file mode 100644 index 5707f9942d0f8..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunction.java +++ /dev/null @@ -1,212 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.lang.StringBuilder; -import java.util.List; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.DoubleBlock; -import org.elasticsearch.compute.data.DoubleVector; -import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link GroupingAggregatorFunction} implementation for {@link MedianAbsoluteDeviationDoubleAggregator}. - * This class is generated. Do not edit it. - */ -public final class MedianAbsoluteDeviationDoubleGroupingAggregatorFunction implements GroupingAggregatorFunction { - private static final List INTERMEDIATE_STATE_DESC = List.of( - new IntermediateStateDesc("quart", ElementType.BYTES_REF) ); - - private final QuantileStates.GroupingState state; - - private final List channels; - - private final DriverContext driverContext; - - private final BigArrays bigArrays; - - public MedianAbsoluteDeviationDoubleGroupingAggregatorFunction(List channels, - QuantileStates.GroupingState state, DriverContext driverContext, BigArrays bigArrays) { - this.channels = channels; - this.state = state; - this.driverContext = driverContext; - this.bigArrays = bigArrays; - } - - public static MedianAbsoluteDeviationDoubleGroupingAggregatorFunction create( - List channels, DriverContext driverContext, BigArrays bigArrays) { - return new MedianAbsoluteDeviationDoubleGroupingAggregatorFunction(channels, MedianAbsoluteDeviationDoubleAggregator.initGrouping(bigArrays), driverContext, bigArrays); - } - - public static List intermediateStateDesc() { - return INTERMEDIATE_STATE_DESC; - } - - @Override - public int intermediateBlockCount() { - return INTERMEDIATE_STATE_DESC.size(); - } - - @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, - Page page) { - Block uncastValuesBlock = page.getBlock(channels.get(0)); - if (uncastValuesBlock.areAllValuesNull()) { - state.enableGroupIdTracking(seenGroupIds); - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - } - }; - } - DoubleBlock valuesBlock = (DoubleBlock) uncastValuesBlock; - DoubleVector valuesVector = valuesBlock.asVector(); - if (valuesVector == null) { - if (valuesBlock.mayHaveNulls()) { - state.enableGroupIdTracking(seenGroupIds); - } - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - addRawInput(positionOffset, groupIds, valuesBlock); - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - addRawInput(positionOffset, groupIds, valuesBlock); - } - }; - } - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - addRawInput(positionOffset, groupIds, valuesVector); - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - addRawInput(positionOffset, groupIds, valuesVector); - } - }; - } - - private void addRawInput(int positionOffset, IntVector groups, DoubleBlock values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - if (values.isNull(groupPosition + positionOffset)) { - continue; - } - int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); - int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); - for (int v = valuesStart; v < valuesEnd; v++) { - MedianAbsoluteDeviationDoubleAggregator.combine(state, groupId, values.getDouble(v)); - } - } - } - - private void addRawInput(int positionOffset, IntVector groups, DoubleVector values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - MedianAbsoluteDeviationDoubleAggregator.combine(state, groupId, values.getDouble(groupPosition + positionOffset)); - } - } - - private void addRawInput(int positionOffset, IntBlock groups, DoubleBlock values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - if (groups.isNull(groupPosition)) { - continue; - } - int groupStart = groups.getFirstValueIndex(groupPosition); - int groupEnd = groupStart + groups.getValueCount(groupPosition); - for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getInt(g)); - if (values.isNull(groupPosition + positionOffset)) { - continue; - } - int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); - int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); - for (int v = valuesStart; v < valuesEnd; v++) { - MedianAbsoluteDeviationDoubleAggregator.combine(state, groupId, values.getDouble(v)); - } - } - } - } - - private void addRawInput(int positionOffset, IntBlock groups, DoubleVector values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - if (groups.isNull(groupPosition)) { - continue; - } - int groupStart = groups.getFirstValueIndex(groupPosition); - int groupEnd = groupStart + groups.getValueCount(groupPosition); - for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getInt(g)); - MedianAbsoluteDeviationDoubleAggregator.combine(state, groupId, values.getDouble(groupPosition + positionOffset)); - } - } - } - - @Override - public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { - state.enableGroupIdTracking(new SeenGroupIds.Empty()); - assert channels.size() == intermediateBlockCount(); - BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); - BytesRef scratch = new BytesRef(); - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - MedianAbsoluteDeviationDoubleAggregator.combineIntermediate(state, groupId, quart.getBytesRef(groupPosition + positionOffset, scratch)); - } - } - - @Override - public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { - if (input.getClass() != getClass()) { - throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); - } - QuantileStates.GroupingState inState = ((MedianAbsoluteDeviationDoubleGroupingAggregatorFunction) input).state; - state.enableGroupIdTracking(new SeenGroupIds.Empty()); - MedianAbsoluteDeviationDoubleAggregator.combineStates(state, groupId, inState, position); - } - - @Override - public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { - state.toIntermediate(blocks, offset, selected, driverContext); - } - - @Override - public void evaluateFinal(Block[] blocks, int offset, IntVector selected, - DriverContext driverContext) { - blocks[offset] = MedianAbsoluteDeviationDoubleAggregator.evaluateFinal(state, selected, driverContext); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(getClass().getSimpleName()).append("["); - sb.append("channels=").append(channels); - sb.append("]"); - return sb.toString(); - } - - @Override - public void close() { - state.close(); - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunction.java deleted file mode 100644 index eb74abde9cd3f..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunction.java +++ /dev/null @@ -1,124 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.lang.StringBuilder; -import java.util.List; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link AggregatorFunction} implementation for {@link MedianAbsoluteDeviationIntAggregator}. - * This class is generated. Do not edit it. - */ -public final class MedianAbsoluteDeviationIntAggregatorFunction implements AggregatorFunction { - private static final List INTERMEDIATE_STATE_DESC = List.of( - new IntermediateStateDesc("quart", ElementType.BYTES_REF) ); - - private final DriverContext driverContext; - - private final QuantileStates.SingleState state; - - private final List channels; - - public MedianAbsoluteDeviationIntAggregatorFunction(DriverContext driverContext, - List channels, QuantileStates.SingleState state) { - this.driverContext = driverContext; - this.channels = channels; - this.state = state; - } - - public static MedianAbsoluteDeviationIntAggregatorFunction create(DriverContext driverContext, - List channels) { - return new MedianAbsoluteDeviationIntAggregatorFunction(driverContext, channels, MedianAbsoluteDeviationIntAggregator.initSingle()); - } - - public static List intermediateStateDesc() { - return INTERMEDIATE_STATE_DESC; - } - - @Override - public int intermediateBlockCount() { - return INTERMEDIATE_STATE_DESC.size(); - } - - @Override - public void addRawInput(Page page) { - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { - return; - } - IntBlock block = (IntBlock) uncastBlock; - IntVector vector = block.asVector(); - if (vector != null) { - addRawVector(vector); - } else { - addRawBlock(block); - } - } - - private void addRawVector(IntVector vector) { - for (int i = 0; i < vector.getPositionCount(); i++) { - MedianAbsoluteDeviationIntAggregator.combine(state, vector.getInt(i)); - } - } - - private void addRawBlock(IntBlock block) { - for (int p = 0; p < block.getPositionCount(); p++) { - if (block.isNull(p)) { - continue; - } - int start = block.getFirstValueIndex(p); - int end = start + block.getValueCount(p); - for (int i = start; i < end; i++) { - MedianAbsoluteDeviationIntAggregator.combine(state, block.getInt(i)); - } - } - } - - @Override - public void addIntermediateInput(Page page) { - assert channels.size() == intermediateBlockCount(); - assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); - assert quart.getPositionCount() == 1; - BytesRef scratch = new BytesRef(); - MedianAbsoluteDeviationIntAggregator.combineIntermediate(state, quart.getBytesRef(0, scratch)); - } - - @Override - public void evaluateIntermediate(Block[] blocks, int offset) { - state.toIntermediate(blocks, offset); - } - - @Override - public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { - blocks[offset] = MedianAbsoluteDeviationIntAggregator.evaluateFinal(state, driverContext); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(getClass().getSimpleName()).append("["); - sb.append("channels=").append(channels); - sb.append("]"); - return sb.toString(); - } - - @Override - public void close() { - state.close(); - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunctionSupplier.java deleted file mode 100644 index c00fb4b0c7b5e..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunctionSupplier.java +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.util.List; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link AggregatorFunctionSupplier} implementation for {@link MedianAbsoluteDeviationIntAggregator}. - * This class is generated. Do not edit it. - */ -public final class MedianAbsoluteDeviationIntAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final BigArrays bigArrays; - - private final List channels; - - public MedianAbsoluteDeviationIntAggregatorFunctionSupplier(BigArrays bigArrays, - List channels) { - this.bigArrays = bigArrays; - this.channels = channels; - } - - @Override - public MedianAbsoluteDeviationIntAggregatorFunction aggregator(DriverContext driverContext) { - return MedianAbsoluteDeviationIntAggregatorFunction.create(driverContext, channels); - } - - @Override - public MedianAbsoluteDeviationIntGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { - return MedianAbsoluteDeviationIntGroupingAggregatorFunction.create(channels, driverContext, bigArrays); - } - - @Override - public String describe() { - return "median_absolute_deviation of ints"; - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunction.java deleted file mode 100644 index b8f08efffd7d1..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunction.java +++ /dev/null @@ -1,210 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.lang.StringBuilder; -import java.util.List; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link GroupingAggregatorFunction} implementation for {@link MedianAbsoluteDeviationIntAggregator}. - * This class is generated. Do not edit it. - */ -public final class MedianAbsoluteDeviationIntGroupingAggregatorFunction implements GroupingAggregatorFunction { - private static final List INTERMEDIATE_STATE_DESC = List.of( - new IntermediateStateDesc("quart", ElementType.BYTES_REF) ); - - private final QuantileStates.GroupingState state; - - private final List channels; - - private final DriverContext driverContext; - - private final BigArrays bigArrays; - - public MedianAbsoluteDeviationIntGroupingAggregatorFunction(List channels, - QuantileStates.GroupingState state, DriverContext driverContext, BigArrays bigArrays) { - this.channels = channels; - this.state = state; - this.driverContext = driverContext; - this.bigArrays = bigArrays; - } - - public static MedianAbsoluteDeviationIntGroupingAggregatorFunction create(List channels, - DriverContext driverContext, BigArrays bigArrays) { - return new MedianAbsoluteDeviationIntGroupingAggregatorFunction(channels, MedianAbsoluteDeviationIntAggregator.initGrouping(bigArrays), driverContext, bigArrays); - } - - public static List intermediateStateDesc() { - return INTERMEDIATE_STATE_DESC; - } - - @Override - public int intermediateBlockCount() { - return INTERMEDIATE_STATE_DESC.size(); - } - - @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, - Page page) { - Block uncastValuesBlock = page.getBlock(channels.get(0)); - if (uncastValuesBlock.areAllValuesNull()) { - state.enableGroupIdTracking(seenGroupIds); - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - } - }; - } - IntBlock valuesBlock = (IntBlock) uncastValuesBlock; - IntVector valuesVector = valuesBlock.asVector(); - if (valuesVector == null) { - if (valuesBlock.mayHaveNulls()) { - state.enableGroupIdTracking(seenGroupIds); - } - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - addRawInput(positionOffset, groupIds, valuesBlock); - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - addRawInput(positionOffset, groupIds, valuesBlock); - } - }; - } - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - addRawInput(positionOffset, groupIds, valuesVector); - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - addRawInput(positionOffset, groupIds, valuesVector); - } - }; - } - - private void addRawInput(int positionOffset, IntVector groups, IntBlock values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - if (values.isNull(groupPosition + positionOffset)) { - continue; - } - int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); - int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); - for (int v = valuesStart; v < valuesEnd; v++) { - MedianAbsoluteDeviationIntAggregator.combine(state, groupId, values.getInt(v)); - } - } - } - - private void addRawInput(int positionOffset, IntVector groups, IntVector values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - MedianAbsoluteDeviationIntAggregator.combine(state, groupId, values.getInt(groupPosition + positionOffset)); - } - } - - private void addRawInput(int positionOffset, IntBlock groups, IntBlock values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - if (groups.isNull(groupPosition)) { - continue; - } - int groupStart = groups.getFirstValueIndex(groupPosition); - int groupEnd = groupStart + groups.getValueCount(groupPosition); - for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getInt(g)); - if (values.isNull(groupPosition + positionOffset)) { - continue; - } - int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); - int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); - for (int v = valuesStart; v < valuesEnd; v++) { - MedianAbsoluteDeviationIntAggregator.combine(state, groupId, values.getInt(v)); - } - } - } - } - - private void addRawInput(int positionOffset, IntBlock groups, IntVector values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - if (groups.isNull(groupPosition)) { - continue; - } - int groupStart = groups.getFirstValueIndex(groupPosition); - int groupEnd = groupStart + groups.getValueCount(groupPosition); - for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getInt(g)); - MedianAbsoluteDeviationIntAggregator.combine(state, groupId, values.getInt(groupPosition + positionOffset)); - } - } - } - - @Override - public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { - state.enableGroupIdTracking(new SeenGroupIds.Empty()); - assert channels.size() == intermediateBlockCount(); - BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); - BytesRef scratch = new BytesRef(); - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - MedianAbsoluteDeviationIntAggregator.combineIntermediate(state, groupId, quart.getBytesRef(groupPosition + positionOffset, scratch)); - } - } - - @Override - public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { - if (input.getClass() != getClass()) { - throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); - } - QuantileStates.GroupingState inState = ((MedianAbsoluteDeviationIntGroupingAggregatorFunction) input).state; - state.enableGroupIdTracking(new SeenGroupIds.Empty()); - MedianAbsoluteDeviationIntAggregator.combineStates(state, groupId, inState, position); - } - - @Override - public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { - state.toIntermediate(blocks, offset, selected, driverContext); - } - - @Override - public void evaluateFinal(Block[] blocks, int offset, IntVector selected, - DriverContext driverContext) { - blocks[offset] = MedianAbsoluteDeviationIntAggregator.evaluateFinal(state, selected, driverContext); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(getClass().getSimpleName()).append("["); - sb.append("channels=").append(channels); - sb.append("]"); - return sb.toString(); - } - - @Override - public void close() { - state.close(); - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunction.java deleted file mode 100644 index 1fdec90f8a242..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunction.java +++ /dev/null @@ -1,124 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.lang.StringBuilder; -import java.util.List; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.LongVector; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link AggregatorFunction} implementation for {@link MedianAbsoluteDeviationLongAggregator}. - * This class is generated. Do not edit it. - */ -public final class MedianAbsoluteDeviationLongAggregatorFunction implements AggregatorFunction { - private static final List INTERMEDIATE_STATE_DESC = List.of( - new IntermediateStateDesc("quart", ElementType.BYTES_REF) ); - - private final DriverContext driverContext; - - private final QuantileStates.SingleState state; - - private final List channels; - - public MedianAbsoluteDeviationLongAggregatorFunction(DriverContext driverContext, - List channels, QuantileStates.SingleState state) { - this.driverContext = driverContext; - this.channels = channels; - this.state = state; - } - - public static MedianAbsoluteDeviationLongAggregatorFunction create(DriverContext driverContext, - List channels) { - return new MedianAbsoluteDeviationLongAggregatorFunction(driverContext, channels, MedianAbsoluteDeviationLongAggregator.initSingle()); - } - - public static List intermediateStateDesc() { - return INTERMEDIATE_STATE_DESC; - } - - @Override - public int intermediateBlockCount() { - return INTERMEDIATE_STATE_DESC.size(); - } - - @Override - public void addRawInput(Page page) { - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { - return; - } - LongBlock block = (LongBlock) uncastBlock; - LongVector vector = block.asVector(); - if (vector != null) { - addRawVector(vector); - } else { - addRawBlock(block); - } - } - - private void addRawVector(LongVector vector) { - for (int i = 0; i < vector.getPositionCount(); i++) { - MedianAbsoluteDeviationLongAggregator.combine(state, vector.getLong(i)); - } - } - - private void addRawBlock(LongBlock block) { - for (int p = 0; p < block.getPositionCount(); p++) { - if (block.isNull(p)) { - continue; - } - int start = block.getFirstValueIndex(p); - int end = start + block.getValueCount(p); - for (int i = start; i < end; i++) { - MedianAbsoluteDeviationLongAggregator.combine(state, block.getLong(i)); - } - } - } - - @Override - public void addIntermediateInput(Page page) { - assert channels.size() == intermediateBlockCount(); - assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); - assert quart.getPositionCount() == 1; - BytesRef scratch = new BytesRef(); - MedianAbsoluteDeviationLongAggregator.combineIntermediate(state, quart.getBytesRef(0, scratch)); - } - - @Override - public void evaluateIntermediate(Block[] blocks, int offset) { - state.toIntermediate(blocks, offset); - } - - @Override - public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { - blocks[offset] = MedianAbsoluteDeviationLongAggregator.evaluateFinal(state, driverContext); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(getClass().getSimpleName()).append("["); - sb.append("channels=").append(channels); - sb.append("]"); - return sb.toString(); - } - - @Override - public void close() { - state.close(); - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunctionSupplier.java deleted file mode 100644 index 71b0488488227..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunctionSupplier.java +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.util.List; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link AggregatorFunctionSupplier} implementation for {@link MedianAbsoluteDeviationLongAggregator}. - * This class is generated. Do not edit it. - */ -public final class MedianAbsoluteDeviationLongAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final BigArrays bigArrays; - - private final List channels; - - public MedianAbsoluteDeviationLongAggregatorFunctionSupplier(BigArrays bigArrays, - List channels) { - this.bigArrays = bigArrays; - this.channels = channels; - } - - @Override - public MedianAbsoluteDeviationLongAggregatorFunction aggregator(DriverContext driverContext) { - return MedianAbsoluteDeviationLongAggregatorFunction.create(driverContext, channels); - } - - @Override - public MedianAbsoluteDeviationLongGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { - return MedianAbsoluteDeviationLongGroupingAggregatorFunction.create(channels, driverContext, bigArrays); - } - - @Override - public String describe() { - return "median_absolute_deviation of longs"; - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunction.java deleted file mode 100644 index 74ee25c27c86a..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunction.java +++ /dev/null @@ -1,212 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.lang.StringBuilder; -import java.util.List; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.LongVector; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link GroupingAggregatorFunction} implementation for {@link MedianAbsoluteDeviationLongAggregator}. - * This class is generated. Do not edit it. - */ -public final class MedianAbsoluteDeviationLongGroupingAggregatorFunction implements GroupingAggregatorFunction { - private static final List INTERMEDIATE_STATE_DESC = List.of( - new IntermediateStateDesc("quart", ElementType.BYTES_REF) ); - - private final QuantileStates.GroupingState state; - - private final List channels; - - private final DriverContext driverContext; - - private final BigArrays bigArrays; - - public MedianAbsoluteDeviationLongGroupingAggregatorFunction(List channels, - QuantileStates.GroupingState state, DriverContext driverContext, BigArrays bigArrays) { - this.channels = channels; - this.state = state; - this.driverContext = driverContext; - this.bigArrays = bigArrays; - } - - public static MedianAbsoluteDeviationLongGroupingAggregatorFunction create(List channels, - DriverContext driverContext, BigArrays bigArrays) { - return new MedianAbsoluteDeviationLongGroupingAggregatorFunction(channels, MedianAbsoluteDeviationLongAggregator.initGrouping(bigArrays), driverContext, bigArrays); - } - - public static List intermediateStateDesc() { - return INTERMEDIATE_STATE_DESC; - } - - @Override - public int intermediateBlockCount() { - return INTERMEDIATE_STATE_DESC.size(); - } - - @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, - Page page) { - Block uncastValuesBlock = page.getBlock(channels.get(0)); - if (uncastValuesBlock.areAllValuesNull()) { - state.enableGroupIdTracking(seenGroupIds); - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - } - }; - } - LongBlock valuesBlock = (LongBlock) uncastValuesBlock; - LongVector valuesVector = valuesBlock.asVector(); - if (valuesVector == null) { - if (valuesBlock.mayHaveNulls()) { - state.enableGroupIdTracking(seenGroupIds); - } - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - addRawInput(positionOffset, groupIds, valuesBlock); - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - addRawInput(positionOffset, groupIds, valuesBlock); - } - }; - } - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - addRawInput(positionOffset, groupIds, valuesVector); - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - addRawInput(positionOffset, groupIds, valuesVector); - } - }; - } - - private void addRawInput(int positionOffset, IntVector groups, LongBlock values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - if (values.isNull(groupPosition + positionOffset)) { - continue; - } - int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); - int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); - for (int v = valuesStart; v < valuesEnd; v++) { - MedianAbsoluteDeviationLongAggregator.combine(state, groupId, values.getLong(v)); - } - } - } - - private void addRawInput(int positionOffset, IntVector groups, LongVector values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - MedianAbsoluteDeviationLongAggregator.combine(state, groupId, values.getLong(groupPosition + positionOffset)); - } - } - - private void addRawInput(int positionOffset, IntBlock groups, LongBlock values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - if (groups.isNull(groupPosition)) { - continue; - } - int groupStart = groups.getFirstValueIndex(groupPosition); - int groupEnd = groupStart + groups.getValueCount(groupPosition); - for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getInt(g)); - if (values.isNull(groupPosition + positionOffset)) { - continue; - } - int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); - int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); - for (int v = valuesStart; v < valuesEnd; v++) { - MedianAbsoluteDeviationLongAggregator.combine(state, groupId, values.getLong(v)); - } - } - } - } - - private void addRawInput(int positionOffset, IntBlock groups, LongVector values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - if (groups.isNull(groupPosition)) { - continue; - } - int groupStart = groups.getFirstValueIndex(groupPosition); - int groupEnd = groupStart + groups.getValueCount(groupPosition); - for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getInt(g)); - MedianAbsoluteDeviationLongAggregator.combine(state, groupId, values.getLong(groupPosition + positionOffset)); - } - } - } - - @Override - public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { - state.enableGroupIdTracking(new SeenGroupIds.Empty()); - assert channels.size() == intermediateBlockCount(); - BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); - BytesRef scratch = new BytesRef(); - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - MedianAbsoluteDeviationLongAggregator.combineIntermediate(state, groupId, quart.getBytesRef(groupPosition + positionOffset, scratch)); - } - } - - @Override - public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { - if (input.getClass() != getClass()) { - throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); - } - QuantileStates.GroupingState inState = ((MedianAbsoluteDeviationLongGroupingAggregatorFunction) input).state; - state.enableGroupIdTracking(new SeenGroupIds.Empty()); - MedianAbsoluteDeviationLongAggregator.combineStates(state, groupId, inState, position); - } - - @Override - public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { - state.toIntermediate(blocks, offset, selected, driverContext); - } - - @Override - public void evaluateFinal(Block[] blocks, int offset, IntVector selected, - DriverContext driverContext) { - blocks[offset] = MedianAbsoluteDeviationLongAggregator.evaluateFinal(state, selected, driverContext); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(getClass().getSimpleName()).append("["); - sb.append("channels=").append(channels); - sb.append("]"); - return sb.toString(); - } - - @Override - public void close() { - state.close(); - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunction.java deleted file mode 100644 index 83fa4ac1d9a90..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunction.java +++ /dev/null @@ -1,134 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.lang.StringBuilder; -import java.util.List; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BooleanBlock; -import org.elasticsearch.compute.data.BooleanVector; -import org.elasticsearch.compute.data.DoubleBlock; -import org.elasticsearch.compute.data.DoubleVector; -import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link AggregatorFunction} implementation for {@link MinDoubleAggregator}. - * This class is generated. Do not edit it. - */ -public final class MinDoubleAggregatorFunction implements AggregatorFunction { - private static final List INTERMEDIATE_STATE_DESC = List.of( - new IntermediateStateDesc("min", ElementType.DOUBLE), - new IntermediateStateDesc("seen", ElementType.BOOLEAN) ); - - private final DriverContext driverContext; - - private final DoubleState state; - - private final List channels; - - public MinDoubleAggregatorFunction(DriverContext driverContext, List channels, - DoubleState state) { - this.driverContext = driverContext; - this.channels = channels; - this.state = state; - } - - public static MinDoubleAggregatorFunction create(DriverContext driverContext, - List channels) { - return new MinDoubleAggregatorFunction(driverContext, channels, new DoubleState(MinDoubleAggregator.init())); - } - - public static List intermediateStateDesc() { - return INTERMEDIATE_STATE_DESC; - } - - @Override - public int intermediateBlockCount() { - return INTERMEDIATE_STATE_DESC.size(); - } - - @Override - public void addRawInput(Page page) { - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { - return; - } - DoubleBlock block = (DoubleBlock) uncastBlock; - DoubleVector vector = block.asVector(); - if (vector != null) { - addRawVector(vector); - } else { - addRawBlock(block); - } - } - - private void addRawVector(DoubleVector vector) { - state.seen(true); - for (int i = 0; i < vector.getPositionCount(); i++) { - state.doubleValue(MinDoubleAggregator.combine(state.doubleValue(), vector.getDouble(i))); - } - } - - private void addRawBlock(DoubleBlock block) { - for (int p = 0; p < block.getPositionCount(); p++) { - if (block.isNull(p)) { - continue; - } - state.seen(true); - int start = block.getFirstValueIndex(p); - int end = start + block.getValueCount(p); - for (int i = start; i < end; i++) { - state.doubleValue(MinDoubleAggregator.combine(state.doubleValue(), block.getDouble(i))); - } - } - } - - @Override - public void addIntermediateInput(Page page) { - assert channels.size() == intermediateBlockCount(); - assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - DoubleVector min = page.getBlock(channels.get(0)).asVector(); - BooleanVector seen = page.getBlock(channels.get(1)).asVector(); - assert min.getPositionCount() == 1; - assert min.getPositionCount() == seen.getPositionCount(); - if (seen.getBoolean(0)) { - state.doubleValue(MinDoubleAggregator.combine(state.doubleValue(), min.getDouble(0))); - state.seen(true); - } - } - - @Override - public void evaluateIntermediate(Block[] blocks, int offset) { - state.toIntermediate(blocks, offset); - } - - @Override - public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { - if (state.seen() == false) { - blocks[offset] = Block.constantNullBlock(1, driverContext.blockFactory()); - return; - } - blocks[offset] = DoubleBlock.newConstantBlockWith(state.doubleValue(), 1, driverContext.blockFactory()); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(getClass().getSimpleName()).append("["); - sb.append("channels=").append(channels); - sb.append("]"); - return sb.toString(); - } - - @Override - public void close() { - state.close(); - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunctionSupplier.java deleted file mode 100644 index 1dcc4126dc508..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunctionSupplier.java +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.util.List; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link AggregatorFunctionSupplier} implementation for {@link MinDoubleAggregator}. - * This class is generated. Do not edit it. - */ -public final class MinDoubleAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final BigArrays bigArrays; - - private final List channels; - - public MinDoubleAggregatorFunctionSupplier(BigArrays bigArrays, List channels) { - this.bigArrays = bigArrays; - this.channels = channels; - } - - @Override - public MinDoubleAggregatorFunction aggregator(DriverContext driverContext) { - return MinDoubleAggregatorFunction.create(driverContext, channels); - } - - @Override - public MinDoubleGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { - return MinDoubleGroupingAggregatorFunction.create(channels, driverContext); - } - - @Override - public String describe() { - return "min of doubles"; - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunction.java deleted file mode 100644 index d7c2c59eaed2c..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunction.java +++ /dev/null @@ -1,213 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.lang.StringBuilder; -import java.util.List; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BooleanBlock; -import org.elasticsearch.compute.data.BooleanVector; -import org.elasticsearch.compute.data.DoubleBlock; -import org.elasticsearch.compute.data.DoubleVector; -import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link GroupingAggregatorFunction} implementation for {@link MinDoubleAggregator}. - * This class is generated. Do not edit it. - */ -public final class MinDoubleGroupingAggregatorFunction implements GroupingAggregatorFunction { - private static final List INTERMEDIATE_STATE_DESC = List.of( - new IntermediateStateDesc("min", ElementType.DOUBLE), - new IntermediateStateDesc("seen", ElementType.BOOLEAN) ); - - private final DoubleArrayState state; - - private final List channels; - - private final DriverContext driverContext; - - public MinDoubleGroupingAggregatorFunction(List channels, DoubleArrayState state, - DriverContext driverContext) { - this.channels = channels; - this.state = state; - this.driverContext = driverContext; - } - - public static MinDoubleGroupingAggregatorFunction create(List channels, - DriverContext driverContext) { - return new MinDoubleGroupingAggregatorFunction(channels, new DoubleArrayState(driverContext.bigArrays(), MinDoubleAggregator.init()), driverContext); - } - - public static List intermediateStateDesc() { - return INTERMEDIATE_STATE_DESC; - } - - @Override - public int intermediateBlockCount() { - return INTERMEDIATE_STATE_DESC.size(); - } - - @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, - Page page) { - Block uncastValuesBlock = page.getBlock(channels.get(0)); - if (uncastValuesBlock.areAllValuesNull()) { - state.enableGroupIdTracking(seenGroupIds); - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - } - }; - } - DoubleBlock valuesBlock = (DoubleBlock) uncastValuesBlock; - DoubleVector valuesVector = valuesBlock.asVector(); - if (valuesVector == null) { - if (valuesBlock.mayHaveNulls()) { - state.enableGroupIdTracking(seenGroupIds); - } - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - addRawInput(positionOffset, groupIds, valuesBlock); - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - addRawInput(positionOffset, groupIds, valuesBlock); - } - }; - } - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - addRawInput(positionOffset, groupIds, valuesVector); - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - addRawInput(positionOffset, groupIds, valuesVector); - } - }; - } - - private void addRawInput(int positionOffset, IntVector groups, DoubleBlock values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - if (values.isNull(groupPosition + positionOffset)) { - continue; - } - int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); - int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); - for (int v = valuesStart; v < valuesEnd; v++) { - state.set(groupId, MinDoubleAggregator.combine(state.getOrDefault(groupId), values.getDouble(v))); - } - } - } - - private void addRawInput(int positionOffset, IntVector groups, DoubleVector values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - state.set(groupId, MinDoubleAggregator.combine(state.getOrDefault(groupId), values.getDouble(groupPosition + positionOffset))); - } - } - - private void addRawInput(int positionOffset, IntBlock groups, DoubleBlock values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - if (groups.isNull(groupPosition)) { - continue; - } - int groupStart = groups.getFirstValueIndex(groupPosition); - int groupEnd = groupStart + groups.getValueCount(groupPosition); - for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getInt(g)); - if (values.isNull(groupPosition + positionOffset)) { - continue; - } - int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); - int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); - for (int v = valuesStart; v < valuesEnd; v++) { - state.set(groupId, MinDoubleAggregator.combine(state.getOrDefault(groupId), values.getDouble(v))); - } - } - } - } - - private void addRawInput(int positionOffset, IntBlock groups, DoubleVector values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - if (groups.isNull(groupPosition)) { - continue; - } - int groupStart = groups.getFirstValueIndex(groupPosition); - int groupEnd = groupStart + groups.getValueCount(groupPosition); - for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getInt(g)); - state.set(groupId, MinDoubleAggregator.combine(state.getOrDefault(groupId), values.getDouble(groupPosition + positionOffset))); - } - } - } - - @Override - public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { - state.enableGroupIdTracking(new SeenGroupIds.Empty()); - assert channels.size() == intermediateBlockCount(); - DoubleVector min = page.getBlock(channels.get(0)).asVector(); - BooleanVector seen = page.getBlock(channels.get(1)).asVector(); - assert min.getPositionCount() == seen.getPositionCount(); - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - if (seen.getBoolean(groupPosition + positionOffset)) { - state.set(groupId, MinDoubleAggregator.combine(state.getOrDefault(groupId), min.getDouble(groupPosition + positionOffset))); - } - } - } - - @Override - public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { - if (input.getClass() != getClass()) { - throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); - } - DoubleArrayState inState = ((MinDoubleGroupingAggregatorFunction) input).state; - state.enableGroupIdTracking(new SeenGroupIds.Empty()); - if (inState.hasValue(position)) { - state.set(groupId, MinDoubleAggregator.combine(state.getOrDefault(groupId), inState.get(position))); - } - } - - @Override - public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { - state.toIntermediate(blocks, offset, selected, driverContext); - } - - @Override - public void evaluateFinal(Block[] blocks, int offset, IntVector selected, - DriverContext driverContext) { - blocks[offset] = state.toValuesBlock(selected, driverContext); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(getClass().getSimpleName()).append("["); - sb.append("channels=").append(channels); - sb.append("]"); - return sb.toString(); - } - - @Override - public void close() { - state.close(); - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunction.java deleted file mode 100644 index 7e30dcd5a0561..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunction.java +++ /dev/null @@ -1,134 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.lang.StringBuilder; -import java.util.List; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BooleanBlock; -import org.elasticsearch.compute.data.BooleanVector; -import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link AggregatorFunction} implementation for {@link MinIntAggregator}. - * This class is generated. Do not edit it. - */ -public final class MinIntAggregatorFunction implements AggregatorFunction { - private static final List INTERMEDIATE_STATE_DESC = List.of( - new IntermediateStateDesc("min", ElementType.INT), - new IntermediateStateDesc("seen", ElementType.BOOLEAN) ); - - private final DriverContext driverContext; - - private final IntState state; - - private final List channels; - - public MinIntAggregatorFunction(DriverContext driverContext, List channels, - IntState state) { - this.driverContext = driverContext; - this.channels = channels; - this.state = state; - } - - public static MinIntAggregatorFunction create(DriverContext driverContext, - List channels) { - return new MinIntAggregatorFunction(driverContext, channels, new IntState(MinIntAggregator.init())); - } - - public static List intermediateStateDesc() { - return INTERMEDIATE_STATE_DESC; - } - - @Override - public int intermediateBlockCount() { - return INTERMEDIATE_STATE_DESC.size(); - } - - @Override - public void addRawInput(Page page) { - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { - return; - } - IntBlock block = (IntBlock) uncastBlock; - IntVector vector = block.asVector(); - if (vector != null) { - addRawVector(vector); - } else { - addRawBlock(block); - } - } - - private void addRawVector(IntVector vector) { - state.seen(true); - for (int i = 0; i < vector.getPositionCount(); i++) { - state.intValue(MinIntAggregator.combine(state.intValue(), vector.getInt(i))); - } - } - - private void addRawBlock(IntBlock block) { - for (int p = 0; p < block.getPositionCount(); p++) { - if (block.isNull(p)) { - continue; - } - state.seen(true); - int start = block.getFirstValueIndex(p); - int end = start + block.getValueCount(p); - for (int i = start; i < end; i++) { - state.intValue(MinIntAggregator.combine(state.intValue(), block.getInt(i))); - } - } - } - - @Override - public void addIntermediateInput(Page page) { - assert channels.size() == intermediateBlockCount(); - assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - IntVector min = page.getBlock(channels.get(0)).asVector(); - BooleanVector seen = page.getBlock(channels.get(1)).asVector(); - assert min.getPositionCount() == 1; - assert min.getPositionCount() == seen.getPositionCount(); - if (seen.getBoolean(0)) { - state.intValue(MinIntAggregator.combine(state.intValue(), min.getInt(0))); - state.seen(true); - } - } - - @Override - public void evaluateIntermediate(Block[] blocks, int offset) { - state.toIntermediate(blocks, offset); - } - - @Override - public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { - if (state.seen() == false) { - blocks[offset] = Block.constantNullBlock(1, driverContext.blockFactory()); - return; - } - blocks[offset] = IntBlock.newConstantBlockWith(state.intValue(), 1, driverContext.blockFactory()); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(getClass().getSimpleName()).append("["); - sb.append("channels=").append(channels); - sb.append("]"); - return sb.toString(); - } - - @Override - public void close() { - state.close(); - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunctionSupplier.java deleted file mode 100644 index d1a6411c2cf2c..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunctionSupplier.java +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.util.List; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link AggregatorFunctionSupplier} implementation for {@link MinIntAggregator}. - * This class is generated. Do not edit it. - */ -public final class MinIntAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final BigArrays bigArrays; - - private final List channels; - - public MinIntAggregatorFunctionSupplier(BigArrays bigArrays, List channels) { - this.bigArrays = bigArrays; - this.channels = channels; - } - - @Override - public MinIntAggregatorFunction aggregator(DriverContext driverContext) { - return MinIntAggregatorFunction.create(driverContext, channels); - } - - @Override - public MinIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { - return MinIntGroupingAggregatorFunction.create(channels, driverContext); - } - - @Override - public String describe() { - return "min of ints"; - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunction.java deleted file mode 100644 index bf0c4c5a5e02c..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunction.java +++ /dev/null @@ -1,211 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.lang.StringBuilder; -import java.util.List; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BooleanBlock; -import org.elasticsearch.compute.data.BooleanVector; -import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link GroupingAggregatorFunction} implementation for {@link MinIntAggregator}. - * This class is generated. Do not edit it. - */ -public final class MinIntGroupingAggregatorFunction implements GroupingAggregatorFunction { - private static final List INTERMEDIATE_STATE_DESC = List.of( - new IntermediateStateDesc("min", ElementType.INT), - new IntermediateStateDesc("seen", ElementType.BOOLEAN) ); - - private final IntArrayState state; - - private final List channels; - - private final DriverContext driverContext; - - public MinIntGroupingAggregatorFunction(List channels, IntArrayState state, - DriverContext driverContext) { - this.channels = channels; - this.state = state; - this.driverContext = driverContext; - } - - public static MinIntGroupingAggregatorFunction create(List channels, - DriverContext driverContext) { - return new MinIntGroupingAggregatorFunction(channels, new IntArrayState(driverContext.bigArrays(), MinIntAggregator.init()), driverContext); - } - - public static List intermediateStateDesc() { - return INTERMEDIATE_STATE_DESC; - } - - @Override - public int intermediateBlockCount() { - return INTERMEDIATE_STATE_DESC.size(); - } - - @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, - Page page) { - Block uncastValuesBlock = page.getBlock(channels.get(0)); - if (uncastValuesBlock.areAllValuesNull()) { - state.enableGroupIdTracking(seenGroupIds); - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - } - }; - } - IntBlock valuesBlock = (IntBlock) uncastValuesBlock; - IntVector valuesVector = valuesBlock.asVector(); - if (valuesVector == null) { - if (valuesBlock.mayHaveNulls()) { - state.enableGroupIdTracking(seenGroupIds); - } - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - addRawInput(positionOffset, groupIds, valuesBlock); - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - addRawInput(positionOffset, groupIds, valuesBlock); - } - }; - } - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - addRawInput(positionOffset, groupIds, valuesVector); - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - addRawInput(positionOffset, groupIds, valuesVector); - } - }; - } - - private void addRawInput(int positionOffset, IntVector groups, IntBlock values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - if (values.isNull(groupPosition + positionOffset)) { - continue; - } - int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); - int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); - for (int v = valuesStart; v < valuesEnd; v++) { - state.set(groupId, MinIntAggregator.combine(state.getOrDefault(groupId), values.getInt(v))); - } - } - } - - private void addRawInput(int positionOffset, IntVector groups, IntVector values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - state.set(groupId, MinIntAggregator.combine(state.getOrDefault(groupId), values.getInt(groupPosition + positionOffset))); - } - } - - private void addRawInput(int positionOffset, IntBlock groups, IntBlock values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - if (groups.isNull(groupPosition)) { - continue; - } - int groupStart = groups.getFirstValueIndex(groupPosition); - int groupEnd = groupStart + groups.getValueCount(groupPosition); - for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getInt(g)); - if (values.isNull(groupPosition + positionOffset)) { - continue; - } - int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); - int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); - for (int v = valuesStart; v < valuesEnd; v++) { - state.set(groupId, MinIntAggregator.combine(state.getOrDefault(groupId), values.getInt(v))); - } - } - } - } - - private void addRawInput(int positionOffset, IntBlock groups, IntVector values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - if (groups.isNull(groupPosition)) { - continue; - } - int groupStart = groups.getFirstValueIndex(groupPosition); - int groupEnd = groupStart + groups.getValueCount(groupPosition); - for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getInt(g)); - state.set(groupId, MinIntAggregator.combine(state.getOrDefault(groupId), values.getInt(groupPosition + positionOffset))); - } - } - } - - @Override - public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { - state.enableGroupIdTracking(new SeenGroupIds.Empty()); - assert channels.size() == intermediateBlockCount(); - IntVector min = page.getBlock(channels.get(0)).asVector(); - BooleanVector seen = page.getBlock(channels.get(1)).asVector(); - assert min.getPositionCount() == seen.getPositionCount(); - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - if (seen.getBoolean(groupPosition + positionOffset)) { - state.set(groupId, MinIntAggregator.combine(state.getOrDefault(groupId), min.getInt(groupPosition + positionOffset))); - } - } - } - - @Override - public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { - if (input.getClass() != getClass()) { - throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); - } - IntArrayState inState = ((MinIntGroupingAggregatorFunction) input).state; - state.enableGroupIdTracking(new SeenGroupIds.Empty()); - if (inState.hasValue(position)) { - state.set(groupId, MinIntAggregator.combine(state.getOrDefault(groupId), inState.get(position))); - } - } - - @Override - public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { - state.toIntermediate(blocks, offset, selected, driverContext); - } - - @Override - public void evaluateFinal(Block[] blocks, int offset, IntVector selected, - DriverContext driverContext) { - blocks[offset] = state.toValuesBlock(selected, driverContext); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(getClass().getSimpleName()).append("["); - sb.append("channels=").append(channels); - sb.append("]"); - return sb.toString(); - } - - @Override - public void close() { - state.close(); - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunction.java deleted file mode 100644 index f6987443ebfaa..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunction.java +++ /dev/null @@ -1,134 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.lang.StringBuilder; -import java.util.List; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BooleanBlock; -import org.elasticsearch.compute.data.BooleanVector; -import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.LongVector; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link AggregatorFunction} implementation for {@link MinLongAggregator}. - * This class is generated. Do not edit it. - */ -public final class MinLongAggregatorFunction implements AggregatorFunction { - private static final List INTERMEDIATE_STATE_DESC = List.of( - new IntermediateStateDesc("min", ElementType.LONG), - new IntermediateStateDesc("seen", ElementType.BOOLEAN) ); - - private final DriverContext driverContext; - - private final LongState state; - - private final List channels; - - public MinLongAggregatorFunction(DriverContext driverContext, List channels, - LongState state) { - this.driverContext = driverContext; - this.channels = channels; - this.state = state; - } - - public static MinLongAggregatorFunction create(DriverContext driverContext, - List channels) { - return new MinLongAggregatorFunction(driverContext, channels, new LongState(MinLongAggregator.init())); - } - - public static List intermediateStateDesc() { - return INTERMEDIATE_STATE_DESC; - } - - @Override - public int intermediateBlockCount() { - return INTERMEDIATE_STATE_DESC.size(); - } - - @Override - public void addRawInput(Page page) { - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { - return; - } - LongBlock block = (LongBlock) uncastBlock; - LongVector vector = block.asVector(); - if (vector != null) { - addRawVector(vector); - } else { - addRawBlock(block); - } - } - - private void addRawVector(LongVector vector) { - state.seen(true); - for (int i = 0; i < vector.getPositionCount(); i++) { - state.longValue(MinLongAggregator.combine(state.longValue(), vector.getLong(i))); - } - } - - private void addRawBlock(LongBlock block) { - for (int p = 0; p < block.getPositionCount(); p++) { - if (block.isNull(p)) { - continue; - } - state.seen(true); - int start = block.getFirstValueIndex(p); - int end = start + block.getValueCount(p); - for (int i = start; i < end; i++) { - state.longValue(MinLongAggregator.combine(state.longValue(), block.getLong(i))); - } - } - } - - @Override - public void addIntermediateInput(Page page) { - assert channels.size() == intermediateBlockCount(); - assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - LongVector min = page.getBlock(channels.get(0)).asVector(); - BooleanVector seen = page.getBlock(channels.get(1)).asVector(); - assert min.getPositionCount() == 1; - assert min.getPositionCount() == seen.getPositionCount(); - if (seen.getBoolean(0)) { - state.longValue(MinLongAggregator.combine(state.longValue(), min.getLong(0))); - state.seen(true); - } - } - - @Override - public void evaluateIntermediate(Block[] blocks, int offset) { - state.toIntermediate(blocks, offset); - } - - @Override - public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { - if (state.seen() == false) { - blocks[offset] = Block.constantNullBlock(1, driverContext.blockFactory()); - return; - } - blocks[offset] = LongBlock.newConstantBlockWith(state.longValue(), 1, driverContext.blockFactory()); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(getClass().getSimpleName()).append("["); - sb.append("channels=").append(channels); - sb.append("]"); - return sb.toString(); - } - - @Override - public void close() { - state.close(); - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunctionSupplier.java deleted file mode 100644 index 4015e8de18e7b..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunctionSupplier.java +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.util.List; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link AggregatorFunctionSupplier} implementation for {@link MinLongAggregator}. - * This class is generated. Do not edit it. - */ -public final class MinLongAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final BigArrays bigArrays; - - private final List channels; - - public MinLongAggregatorFunctionSupplier(BigArrays bigArrays, List channels) { - this.bigArrays = bigArrays; - this.channels = channels; - } - - @Override - public MinLongAggregatorFunction aggregator(DriverContext driverContext) { - return MinLongAggregatorFunction.create(driverContext, channels); - } - - @Override - public MinLongGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { - return MinLongGroupingAggregatorFunction.create(channels, driverContext); - } - - @Override - public String describe() { - return "min of longs"; - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunction.java deleted file mode 100644 index d7baa88d6da26..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunction.java +++ /dev/null @@ -1,213 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.lang.StringBuilder; -import java.util.List; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BooleanBlock; -import org.elasticsearch.compute.data.BooleanVector; -import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.LongVector; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link GroupingAggregatorFunction} implementation for {@link MinLongAggregator}. - * This class is generated. Do not edit it. - */ -public final class MinLongGroupingAggregatorFunction implements GroupingAggregatorFunction { - private static final List INTERMEDIATE_STATE_DESC = List.of( - new IntermediateStateDesc("min", ElementType.LONG), - new IntermediateStateDesc("seen", ElementType.BOOLEAN) ); - - private final LongArrayState state; - - private final List channels; - - private final DriverContext driverContext; - - public MinLongGroupingAggregatorFunction(List channels, LongArrayState state, - DriverContext driverContext) { - this.channels = channels; - this.state = state; - this.driverContext = driverContext; - } - - public static MinLongGroupingAggregatorFunction create(List channels, - DriverContext driverContext) { - return new MinLongGroupingAggregatorFunction(channels, new LongArrayState(driverContext.bigArrays(), MinLongAggregator.init()), driverContext); - } - - public static List intermediateStateDesc() { - return INTERMEDIATE_STATE_DESC; - } - - @Override - public int intermediateBlockCount() { - return INTERMEDIATE_STATE_DESC.size(); - } - - @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, - Page page) { - Block uncastValuesBlock = page.getBlock(channels.get(0)); - if (uncastValuesBlock.areAllValuesNull()) { - state.enableGroupIdTracking(seenGroupIds); - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - } - }; - } - LongBlock valuesBlock = (LongBlock) uncastValuesBlock; - LongVector valuesVector = valuesBlock.asVector(); - if (valuesVector == null) { - if (valuesBlock.mayHaveNulls()) { - state.enableGroupIdTracking(seenGroupIds); - } - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - addRawInput(positionOffset, groupIds, valuesBlock); - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - addRawInput(positionOffset, groupIds, valuesBlock); - } - }; - } - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - addRawInput(positionOffset, groupIds, valuesVector); - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - addRawInput(positionOffset, groupIds, valuesVector); - } - }; - } - - private void addRawInput(int positionOffset, IntVector groups, LongBlock values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - if (values.isNull(groupPosition + positionOffset)) { - continue; - } - int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); - int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); - for (int v = valuesStart; v < valuesEnd; v++) { - state.set(groupId, MinLongAggregator.combine(state.getOrDefault(groupId), values.getLong(v))); - } - } - } - - private void addRawInput(int positionOffset, IntVector groups, LongVector values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - state.set(groupId, MinLongAggregator.combine(state.getOrDefault(groupId), values.getLong(groupPosition + positionOffset))); - } - } - - private void addRawInput(int positionOffset, IntBlock groups, LongBlock values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - if (groups.isNull(groupPosition)) { - continue; - } - int groupStart = groups.getFirstValueIndex(groupPosition); - int groupEnd = groupStart + groups.getValueCount(groupPosition); - for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getInt(g)); - if (values.isNull(groupPosition + positionOffset)) { - continue; - } - int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); - int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); - for (int v = valuesStart; v < valuesEnd; v++) { - state.set(groupId, MinLongAggregator.combine(state.getOrDefault(groupId), values.getLong(v))); - } - } - } - } - - private void addRawInput(int positionOffset, IntBlock groups, LongVector values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - if (groups.isNull(groupPosition)) { - continue; - } - int groupStart = groups.getFirstValueIndex(groupPosition); - int groupEnd = groupStart + groups.getValueCount(groupPosition); - for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getInt(g)); - state.set(groupId, MinLongAggregator.combine(state.getOrDefault(groupId), values.getLong(groupPosition + positionOffset))); - } - } - } - - @Override - public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { - state.enableGroupIdTracking(new SeenGroupIds.Empty()); - assert channels.size() == intermediateBlockCount(); - LongVector min = page.getBlock(channels.get(0)).asVector(); - BooleanVector seen = page.getBlock(channels.get(1)).asVector(); - assert min.getPositionCount() == seen.getPositionCount(); - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - if (seen.getBoolean(groupPosition + positionOffset)) { - state.set(groupId, MinLongAggregator.combine(state.getOrDefault(groupId), min.getLong(groupPosition + positionOffset))); - } - } - } - - @Override - public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { - if (input.getClass() != getClass()) { - throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); - } - LongArrayState inState = ((MinLongGroupingAggregatorFunction) input).state; - state.enableGroupIdTracking(new SeenGroupIds.Empty()); - if (inState.hasValue(position)) { - state.set(groupId, MinLongAggregator.combine(state.getOrDefault(groupId), inState.get(position))); - } - } - - @Override - public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { - state.toIntermediate(blocks, offset, selected, driverContext); - } - - @Override - public void evaluateFinal(Block[] blocks, int offset, IntVector selected, - DriverContext driverContext) { - blocks[offset] = state.toValuesBlock(selected, driverContext); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(getClass().getSimpleName()).append("["); - sb.append("channels=").append(channels); - sb.append("]"); - return sb.toString(); - } - - @Override - public void close() { - state.close(); - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunction.java deleted file mode 100644 index cf2fc63584bec..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunction.java +++ /dev/null @@ -1,127 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.lang.StringBuilder; -import java.util.List; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.DoubleBlock; -import org.elasticsearch.compute.data.DoubleVector; -import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link AggregatorFunction} implementation for {@link PercentileDoubleAggregator}. - * This class is generated. Do not edit it. - */ -public final class PercentileDoubleAggregatorFunction implements AggregatorFunction { - private static final List INTERMEDIATE_STATE_DESC = List.of( - new IntermediateStateDesc("quart", ElementType.BYTES_REF) ); - - private final DriverContext driverContext; - - private final QuantileStates.SingleState state; - - private final List channels; - - private final double percentile; - - public PercentileDoubleAggregatorFunction(DriverContext driverContext, List channels, - QuantileStates.SingleState state, double percentile) { - this.driverContext = driverContext; - this.channels = channels; - this.state = state; - this.percentile = percentile; - } - - public static PercentileDoubleAggregatorFunction create(DriverContext driverContext, - List channels, double percentile) { - return new PercentileDoubleAggregatorFunction(driverContext, channels, PercentileDoubleAggregator.initSingle(percentile), percentile); - } - - public static List intermediateStateDesc() { - return INTERMEDIATE_STATE_DESC; - } - - @Override - public int intermediateBlockCount() { - return INTERMEDIATE_STATE_DESC.size(); - } - - @Override - public void addRawInput(Page page) { - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { - return; - } - DoubleBlock block = (DoubleBlock) uncastBlock; - DoubleVector vector = block.asVector(); - if (vector != null) { - addRawVector(vector); - } else { - addRawBlock(block); - } - } - - private void addRawVector(DoubleVector vector) { - for (int i = 0; i < vector.getPositionCount(); i++) { - PercentileDoubleAggregator.combine(state, vector.getDouble(i)); - } - } - - private void addRawBlock(DoubleBlock block) { - for (int p = 0; p < block.getPositionCount(); p++) { - if (block.isNull(p)) { - continue; - } - int start = block.getFirstValueIndex(p); - int end = start + block.getValueCount(p); - for (int i = start; i < end; i++) { - PercentileDoubleAggregator.combine(state, block.getDouble(i)); - } - } - } - - @Override - public void addIntermediateInput(Page page) { - assert channels.size() == intermediateBlockCount(); - assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); - assert quart.getPositionCount() == 1; - BytesRef scratch = new BytesRef(); - PercentileDoubleAggregator.combineIntermediate(state, quart.getBytesRef(0, scratch)); - } - - @Override - public void evaluateIntermediate(Block[] blocks, int offset) { - state.toIntermediate(blocks, offset); - } - - @Override - public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { - blocks[offset] = PercentileDoubleAggregator.evaluateFinal(state, driverContext); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(getClass().getSimpleName()).append("["); - sb.append("channels=").append(channels); - sb.append("]"); - return sb.toString(); - } - - @Override - public void close() { - state.close(); - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunctionSupplier.java deleted file mode 100644 index b32c28ef133ec..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunctionSupplier.java +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.util.List; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link AggregatorFunctionSupplier} implementation for {@link PercentileDoubleAggregator}. - * This class is generated. Do not edit it. - */ -public final class PercentileDoubleAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final BigArrays bigArrays; - - private final List channels; - - private final double percentile; - - public PercentileDoubleAggregatorFunctionSupplier(BigArrays bigArrays, List channels, - double percentile) { - this.bigArrays = bigArrays; - this.channels = channels; - this.percentile = percentile; - } - - @Override - public PercentileDoubleAggregatorFunction aggregator(DriverContext driverContext) { - return PercentileDoubleAggregatorFunction.create(driverContext, channels, percentile); - } - - @Override - public PercentileDoubleGroupingAggregatorFunction groupingAggregator( - DriverContext driverContext) { - return PercentileDoubleGroupingAggregatorFunction.create(channels, driverContext, bigArrays, percentile); - } - - @Override - public String describe() { - return "percentile of doubles"; - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunction.java deleted file mode 100644 index 63613daa47ad1..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunction.java +++ /dev/null @@ -1,216 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.lang.StringBuilder; -import java.util.List; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.DoubleBlock; -import org.elasticsearch.compute.data.DoubleVector; -import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link GroupingAggregatorFunction} implementation for {@link PercentileDoubleAggregator}. - * This class is generated. Do not edit it. - */ -public final class PercentileDoubleGroupingAggregatorFunction implements GroupingAggregatorFunction { - private static final List INTERMEDIATE_STATE_DESC = List.of( - new IntermediateStateDesc("quart", ElementType.BYTES_REF) ); - - private final QuantileStates.GroupingState state; - - private final List channels; - - private final DriverContext driverContext; - - private final BigArrays bigArrays; - - private final double percentile; - - public PercentileDoubleGroupingAggregatorFunction(List channels, - QuantileStates.GroupingState state, DriverContext driverContext, BigArrays bigArrays, - double percentile) { - this.channels = channels; - this.state = state; - this.driverContext = driverContext; - this.bigArrays = bigArrays; - this.percentile = percentile; - } - - public static PercentileDoubleGroupingAggregatorFunction create(List channels, - DriverContext driverContext, BigArrays bigArrays, double percentile) { - return new PercentileDoubleGroupingAggregatorFunction(channels, PercentileDoubleAggregator.initGrouping(bigArrays, percentile), driverContext, bigArrays, percentile); - } - - public static List intermediateStateDesc() { - return INTERMEDIATE_STATE_DESC; - } - - @Override - public int intermediateBlockCount() { - return INTERMEDIATE_STATE_DESC.size(); - } - - @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, - Page page) { - Block uncastValuesBlock = page.getBlock(channels.get(0)); - if (uncastValuesBlock.areAllValuesNull()) { - state.enableGroupIdTracking(seenGroupIds); - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - } - }; - } - DoubleBlock valuesBlock = (DoubleBlock) uncastValuesBlock; - DoubleVector valuesVector = valuesBlock.asVector(); - if (valuesVector == null) { - if (valuesBlock.mayHaveNulls()) { - state.enableGroupIdTracking(seenGroupIds); - } - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - addRawInput(positionOffset, groupIds, valuesBlock); - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - addRawInput(positionOffset, groupIds, valuesBlock); - } - }; - } - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - addRawInput(positionOffset, groupIds, valuesVector); - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - addRawInput(positionOffset, groupIds, valuesVector); - } - }; - } - - private void addRawInput(int positionOffset, IntVector groups, DoubleBlock values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - if (values.isNull(groupPosition + positionOffset)) { - continue; - } - int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); - int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); - for (int v = valuesStart; v < valuesEnd; v++) { - PercentileDoubleAggregator.combine(state, groupId, values.getDouble(v)); - } - } - } - - private void addRawInput(int positionOffset, IntVector groups, DoubleVector values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - PercentileDoubleAggregator.combine(state, groupId, values.getDouble(groupPosition + positionOffset)); - } - } - - private void addRawInput(int positionOffset, IntBlock groups, DoubleBlock values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - if (groups.isNull(groupPosition)) { - continue; - } - int groupStart = groups.getFirstValueIndex(groupPosition); - int groupEnd = groupStart + groups.getValueCount(groupPosition); - for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getInt(g)); - if (values.isNull(groupPosition + positionOffset)) { - continue; - } - int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); - int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); - for (int v = valuesStart; v < valuesEnd; v++) { - PercentileDoubleAggregator.combine(state, groupId, values.getDouble(v)); - } - } - } - } - - private void addRawInput(int positionOffset, IntBlock groups, DoubleVector values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - if (groups.isNull(groupPosition)) { - continue; - } - int groupStart = groups.getFirstValueIndex(groupPosition); - int groupEnd = groupStart + groups.getValueCount(groupPosition); - for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getInt(g)); - PercentileDoubleAggregator.combine(state, groupId, values.getDouble(groupPosition + positionOffset)); - } - } - } - - @Override - public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { - state.enableGroupIdTracking(new SeenGroupIds.Empty()); - assert channels.size() == intermediateBlockCount(); - BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); - BytesRef scratch = new BytesRef(); - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - PercentileDoubleAggregator.combineIntermediate(state, groupId, quart.getBytesRef(groupPosition + positionOffset, scratch)); - } - } - - @Override - public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { - if (input.getClass() != getClass()) { - throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); - } - QuantileStates.GroupingState inState = ((PercentileDoubleGroupingAggregatorFunction) input).state; - state.enableGroupIdTracking(new SeenGroupIds.Empty()); - PercentileDoubleAggregator.combineStates(state, groupId, inState, position); - } - - @Override - public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { - state.toIntermediate(blocks, offset, selected, driverContext); - } - - @Override - public void evaluateFinal(Block[] blocks, int offset, IntVector selected, - DriverContext driverContext) { - blocks[offset] = PercentileDoubleAggregator.evaluateFinal(state, selected, driverContext); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(getClass().getSimpleName()).append("["); - sb.append("channels=").append(channels); - sb.append("]"); - return sb.toString(); - } - - @Override - public void close() { - state.close(); - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunction.java deleted file mode 100644 index ab69c5f27c8f2..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunction.java +++ /dev/null @@ -1,127 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.lang.StringBuilder; -import java.util.List; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link AggregatorFunction} implementation for {@link PercentileIntAggregator}. - * This class is generated. Do not edit it. - */ -public final class PercentileIntAggregatorFunction implements AggregatorFunction { - private static final List INTERMEDIATE_STATE_DESC = List.of( - new IntermediateStateDesc("quart", ElementType.BYTES_REF) ); - - private final DriverContext driverContext; - - private final QuantileStates.SingleState state; - - private final List channels; - - private final double percentile; - - public PercentileIntAggregatorFunction(DriverContext driverContext, List channels, - QuantileStates.SingleState state, double percentile) { - this.driverContext = driverContext; - this.channels = channels; - this.state = state; - this.percentile = percentile; - } - - public static PercentileIntAggregatorFunction create(DriverContext driverContext, - List channels, double percentile) { - return new PercentileIntAggregatorFunction(driverContext, channels, PercentileIntAggregator.initSingle(percentile), percentile); - } - - public static List intermediateStateDesc() { - return INTERMEDIATE_STATE_DESC; - } - - @Override - public int intermediateBlockCount() { - return INTERMEDIATE_STATE_DESC.size(); - } - - @Override - public void addRawInput(Page page) { - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { - return; - } - IntBlock block = (IntBlock) uncastBlock; - IntVector vector = block.asVector(); - if (vector != null) { - addRawVector(vector); - } else { - addRawBlock(block); - } - } - - private void addRawVector(IntVector vector) { - for (int i = 0; i < vector.getPositionCount(); i++) { - PercentileIntAggregator.combine(state, vector.getInt(i)); - } - } - - private void addRawBlock(IntBlock block) { - for (int p = 0; p < block.getPositionCount(); p++) { - if (block.isNull(p)) { - continue; - } - int start = block.getFirstValueIndex(p); - int end = start + block.getValueCount(p); - for (int i = start; i < end; i++) { - PercentileIntAggregator.combine(state, block.getInt(i)); - } - } - } - - @Override - public void addIntermediateInput(Page page) { - assert channels.size() == intermediateBlockCount(); - assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); - assert quart.getPositionCount() == 1; - BytesRef scratch = new BytesRef(); - PercentileIntAggregator.combineIntermediate(state, quart.getBytesRef(0, scratch)); - } - - @Override - public void evaluateIntermediate(Block[] blocks, int offset) { - state.toIntermediate(blocks, offset); - } - - @Override - public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { - blocks[offset] = PercentileIntAggregator.evaluateFinal(state, driverContext); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(getClass().getSimpleName()).append("["); - sb.append("channels=").append(channels); - sb.append("]"); - return sb.toString(); - } - - @Override - public void close() { - state.close(); - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunctionSupplier.java deleted file mode 100644 index 72893a1dd95b3..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunctionSupplier.java +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.util.List; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link AggregatorFunctionSupplier} implementation for {@link PercentileIntAggregator}. - * This class is generated. Do not edit it. - */ -public final class PercentileIntAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final BigArrays bigArrays; - - private final List channels; - - private final double percentile; - - public PercentileIntAggregatorFunctionSupplier(BigArrays bigArrays, List channels, - double percentile) { - this.bigArrays = bigArrays; - this.channels = channels; - this.percentile = percentile; - } - - @Override - public PercentileIntAggregatorFunction aggregator(DriverContext driverContext) { - return PercentileIntAggregatorFunction.create(driverContext, channels, percentile); - } - - @Override - public PercentileIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { - return PercentileIntGroupingAggregatorFunction.create(channels, driverContext, bigArrays, percentile); - } - - @Override - public String describe() { - return "percentile of ints"; - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunction.java deleted file mode 100644 index 38911d7a02f05..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunction.java +++ /dev/null @@ -1,214 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.lang.StringBuilder; -import java.util.List; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link GroupingAggregatorFunction} implementation for {@link PercentileIntAggregator}. - * This class is generated. Do not edit it. - */ -public final class PercentileIntGroupingAggregatorFunction implements GroupingAggregatorFunction { - private static final List INTERMEDIATE_STATE_DESC = List.of( - new IntermediateStateDesc("quart", ElementType.BYTES_REF) ); - - private final QuantileStates.GroupingState state; - - private final List channels; - - private final DriverContext driverContext; - - private final BigArrays bigArrays; - - private final double percentile; - - public PercentileIntGroupingAggregatorFunction(List channels, - QuantileStates.GroupingState state, DriverContext driverContext, BigArrays bigArrays, - double percentile) { - this.channels = channels; - this.state = state; - this.driverContext = driverContext; - this.bigArrays = bigArrays; - this.percentile = percentile; - } - - public static PercentileIntGroupingAggregatorFunction create(List channels, - DriverContext driverContext, BigArrays bigArrays, double percentile) { - return new PercentileIntGroupingAggregatorFunction(channels, PercentileIntAggregator.initGrouping(bigArrays, percentile), driverContext, bigArrays, percentile); - } - - public static List intermediateStateDesc() { - return INTERMEDIATE_STATE_DESC; - } - - @Override - public int intermediateBlockCount() { - return INTERMEDIATE_STATE_DESC.size(); - } - - @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, - Page page) { - Block uncastValuesBlock = page.getBlock(channels.get(0)); - if (uncastValuesBlock.areAllValuesNull()) { - state.enableGroupIdTracking(seenGroupIds); - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - } - }; - } - IntBlock valuesBlock = (IntBlock) uncastValuesBlock; - IntVector valuesVector = valuesBlock.asVector(); - if (valuesVector == null) { - if (valuesBlock.mayHaveNulls()) { - state.enableGroupIdTracking(seenGroupIds); - } - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - addRawInput(positionOffset, groupIds, valuesBlock); - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - addRawInput(positionOffset, groupIds, valuesBlock); - } - }; - } - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - addRawInput(positionOffset, groupIds, valuesVector); - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - addRawInput(positionOffset, groupIds, valuesVector); - } - }; - } - - private void addRawInput(int positionOffset, IntVector groups, IntBlock values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - if (values.isNull(groupPosition + positionOffset)) { - continue; - } - int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); - int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); - for (int v = valuesStart; v < valuesEnd; v++) { - PercentileIntAggregator.combine(state, groupId, values.getInt(v)); - } - } - } - - private void addRawInput(int positionOffset, IntVector groups, IntVector values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - PercentileIntAggregator.combine(state, groupId, values.getInt(groupPosition + positionOffset)); - } - } - - private void addRawInput(int positionOffset, IntBlock groups, IntBlock values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - if (groups.isNull(groupPosition)) { - continue; - } - int groupStart = groups.getFirstValueIndex(groupPosition); - int groupEnd = groupStart + groups.getValueCount(groupPosition); - for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getInt(g)); - if (values.isNull(groupPosition + positionOffset)) { - continue; - } - int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); - int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); - for (int v = valuesStart; v < valuesEnd; v++) { - PercentileIntAggregator.combine(state, groupId, values.getInt(v)); - } - } - } - } - - private void addRawInput(int positionOffset, IntBlock groups, IntVector values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - if (groups.isNull(groupPosition)) { - continue; - } - int groupStart = groups.getFirstValueIndex(groupPosition); - int groupEnd = groupStart + groups.getValueCount(groupPosition); - for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getInt(g)); - PercentileIntAggregator.combine(state, groupId, values.getInt(groupPosition + positionOffset)); - } - } - } - - @Override - public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { - state.enableGroupIdTracking(new SeenGroupIds.Empty()); - assert channels.size() == intermediateBlockCount(); - BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); - BytesRef scratch = new BytesRef(); - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - PercentileIntAggregator.combineIntermediate(state, groupId, quart.getBytesRef(groupPosition + positionOffset, scratch)); - } - } - - @Override - public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { - if (input.getClass() != getClass()) { - throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); - } - QuantileStates.GroupingState inState = ((PercentileIntGroupingAggregatorFunction) input).state; - state.enableGroupIdTracking(new SeenGroupIds.Empty()); - PercentileIntAggregator.combineStates(state, groupId, inState, position); - } - - @Override - public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { - state.toIntermediate(blocks, offset, selected, driverContext); - } - - @Override - public void evaluateFinal(Block[] blocks, int offset, IntVector selected, - DriverContext driverContext) { - blocks[offset] = PercentileIntAggregator.evaluateFinal(state, selected, driverContext); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(getClass().getSimpleName()).append("["); - sb.append("channels=").append(channels); - sb.append("]"); - return sb.toString(); - } - - @Override - public void close() { - state.close(); - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunction.java deleted file mode 100644 index 1629ea7de8f67..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunction.java +++ /dev/null @@ -1,127 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.lang.StringBuilder; -import java.util.List; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.LongVector; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link AggregatorFunction} implementation for {@link PercentileLongAggregator}. - * This class is generated. Do not edit it. - */ -public final class PercentileLongAggregatorFunction implements AggregatorFunction { - private static final List INTERMEDIATE_STATE_DESC = List.of( - new IntermediateStateDesc("quart", ElementType.BYTES_REF) ); - - private final DriverContext driverContext; - - private final QuantileStates.SingleState state; - - private final List channels; - - private final double percentile; - - public PercentileLongAggregatorFunction(DriverContext driverContext, List channels, - QuantileStates.SingleState state, double percentile) { - this.driverContext = driverContext; - this.channels = channels; - this.state = state; - this.percentile = percentile; - } - - public static PercentileLongAggregatorFunction create(DriverContext driverContext, - List channels, double percentile) { - return new PercentileLongAggregatorFunction(driverContext, channels, PercentileLongAggregator.initSingle(percentile), percentile); - } - - public static List intermediateStateDesc() { - return INTERMEDIATE_STATE_DESC; - } - - @Override - public int intermediateBlockCount() { - return INTERMEDIATE_STATE_DESC.size(); - } - - @Override - public void addRawInput(Page page) { - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { - return; - } - LongBlock block = (LongBlock) uncastBlock; - LongVector vector = block.asVector(); - if (vector != null) { - addRawVector(vector); - } else { - addRawBlock(block); - } - } - - private void addRawVector(LongVector vector) { - for (int i = 0; i < vector.getPositionCount(); i++) { - PercentileLongAggregator.combine(state, vector.getLong(i)); - } - } - - private void addRawBlock(LongBlock block) { - for (int p = 0; p < block.getPositionCount(); p++) { - if (block.isNull(p)) { - continue; - } - int start = block.getFirstValueIndex(p); - int end = start + block.getValueCount(p); - for (int i = start; i < end; i++) { - PercentileLongAggregator.combine(state, block.getLong(i)); - } - } - } - - @Override - public void addIntermediateInput(Page page) { - assert channels.size() == intermediateBlockCount(); - assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); - assert quart.getPositionCount() == 1; - BytesRef scratch = new BytesRef(); - PercentileLongAggregator.combineIntermediate(state, quart.getBytesRef(0, scratch)); - } - - @Override - public void evaluateIntermediate(Block[] blocks, int offset) { - state.toIntermediate(blocks, offset); - } - - @Override - public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { - blocks[offset] = PercentileLongAggregator.evaluateFinal(state, driverContext); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(getClass().getSimpleName()).append("["); - sb.append("channels=").append(channels); - sb.append("]"); - return sb.toString(); - } - - @Override - public void close() { - state.close(); - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunctionSupplier.java deleted file mode 100644 index a71de850814ff..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunctionSupplier.java +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.util.List; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link AggregatorFunctionSupplier} implementation for {@link PercentileLongAggregator}. - * This class is generated. Do not edit it. - */ -public final class PercentileLongAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final BigArrays bigArrays; - - private final List channels; - - private final double percentile; - - public PercentileLongAggregatorFunctionSupplier(BigArrays bigArrays, List channels, - double percentile) { - this.bigArrays = bigArrays; - this.channels = channels; - this.percentile = percentile; - } - - @Override - public PercentileLongAggregatorFunction aggregator(DriverContext driverContext) { - return PercentileLongAggregatorFunction.create(driverContext, channels, percentile); - } - - @Override - public PercentileLongGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { - return PercentileLongGroupingAggregatorFunction.create(channels, driverContext, bigArrays, percentile); - } - - @Override - public String describe() { - return "percentile of longs"; - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunction.java deleted file mode 100644 index efb512e84f1f5..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunction.java +++ /dev/null @@ -1,216 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.lang.StringBuilder; -import java.util.List; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.LongVector; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link GroupingAggregatorFunction} implementation for {@link PercentileLongAggregator}. - * This class is generated. Do not edit it. - */ -public final class PercentileLongGroupingAggregatorFunction implements GroupingAggregatorFunction { - private static final List INTERMEDIATE_STATE_DESC = List.of( - new IntermediateStateDesc("quart", ElementType.BYTES_REF) ); - - private final QuantileStates.GroupingState state; - - private final List channels; - - private final DriverContext driverContext; - - private final BigArrays bigArrays; - - private final double percentile; - - public PercentileLongGroupingAggregatorFunction(List channels, - QuantileStates.GroupingState state, DriverContext driverContext, BigArrays bigArrays, - double percentile) { - this.channels = channels; - this.state = state; - this.driverContext = driverContext; - this.bigArrays = bigArrays; - this.percentile = percentile; - } - - public static PercentileLongGroupingAggregatorFunction create(List channels, - DriverContext driverContext, BigArrays bigArrays, double percentile) { - return new PercentileLongGroupingAggregatorFunction(channels, PercentileLongAggregator.initGrouping(bigArrays, percentile), driverContext, bigArrays, percentile); - } - - public static List intermediateStateDesc() { - return INTERMEDIATE_STATE_DESC; - } - - @Override - public int intermediateBlockCount() { - return INTERMEDIATE_STATE_DESC.size(); - } - - @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, - Page page) { - Block uncastValuesBlock = page.getBlock(channels.get(0)); - if (uncastValuesBlock.areAllValuesNull()) { - state.enableGroupIdTracking(seenGroupIds); - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - } - }; - } - LongBlock valuesBlock = (LongBlock) uncastValuesBlock; - LongVector valuesVector = valuesBlock.asVector(); - if (valuesVector == null) { - if (valuesBlock.mayHaveNulls()) { - state.enableGroupIdTracking(seenGroupIds); - } - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - addRawInput(positionOffset, groupIds, valuesBlock); - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - addRawInput(positionOffset, groupIds, valuesBlock); - } - }; - } - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - addRawInput(positionOffset, groupIds, valuesVector); - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - addRawInput(positionOffset, groupIds, valuesVector); - } - }; - } - - private void addRawInput(int positionOffset, IntVector groups, LongBlock values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - if (values.isNull(groupPosition + positionOffset)) { - continue; - } - int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); - int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); - for (int v = valuesStart; v < valuesEnd; v++) { - PercentileLongAggregator.combine(state, groupId, values.getLong(v)); - } - } - } - - private void addRawInput(int positionOffset, IntVector groups, LongVector values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - PercentileLongAggregator.combine(state, groupId, values.getLong(groupPosition + positionOffset)); - } - } - - private void addRawInput(int positionOffset, IntBlock groups, LongBlock values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - if (groups.isNull(groupPosition)) { - continue; - } - int groupStart = groups.getFirstValueIndex(groupPosition); - int groupEnd = groupStart + groups.getValueCount(groupPosition); - for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getInt(g)); - if (values.isNull(groupPosition + positionOffset)) { - continue; - } - int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); - int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); - for (int v = valuesStart; v < valuesEnd; v++) { - PercentileLongAggregator.combine(state, groupId, values.getLong(v)); - } - } - } - } - - private void addRawInput(int positionOffset, IntBlock groups, LongVector values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - if (groups.isNull(groupPosition)) { - continue; - } - int groupStart = groups.getFirstValueIndex(groupPosition); - int groupEnd = groupStart + groups.getValueCount(groupPosition); - for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getInt(g)); - PercentileLongAggregator.combine(state, groupId, values.getLong(groupPosition + positionOffset)); - } - } - } - - @Override - public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { - state.enableGroupIdTracking(new SeenGroupIds.Empty()); - assert channels.size() == intermediateBlockCount(); - BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); - BytesRef scratch = new BytesRef(); - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - PercentileLongAggregator.combineIntermediate(state, groupId, quart.getBytesRef(groupPosition + positionOffset, scratch)); - } - } - - @Override - public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { - if (input.getClass() != getClass()) { - throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); - } - QuantileStates.GroupingState inState = ((PercentileLongGroupingAggregatorFunction) input).state; - state.enableGroupIdTracking(new SeenGroupIds.Empty()); - PercentileLongAggregator.combineStates(state, groupId, inState, position); - } - - @Override - public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { - state.toIntermediate(blocks, offset, selected, driverContext); - } - - @Override - public void evaluateFinal(Block[] blocks, int offset, IntVector selected, - DriverContext driverContext) { - blocks[offset] = PercentileLongAggregator.evaluateFinal(state, selected, driverContext); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(getClass().getSimpleName()).append("["); - sb.append("channels=").append(channels); - sb.append("]"); - return sb.toString(); - } - - @Override - public void close() { - state.close(); - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunction.java deleted file mode 100644 index d40c4812975a3..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunction.java +++ /dev/null @@ -1,133 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.lang.StringBuilder; -import java.util.List; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BooleanBlock; -import org.elasticsearch.compute.data.BooleanVector; -import org.elasticsearch.compute.data.DoubleBlock; -import org.elasticsearch.compute.data.DoubleVector; -import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link AggregatorFunction} implementation for {@link SumDoubleAggregator}. - * This class is generated. Do not edit it. - */ -public final class SumDoubleAggregatorFunction implements AggregatorFunction { - private static final List INTERMEDIATE_STATE_DESC = List.of( - new IntermediateStateDesc("value", ElementType.DOUBLE), - new IntermediateStateDesc("delta", ElementType.DOUBLE), - new IntermediateStateDesc("seen", ElementType.BOOLEAN) ); - - private final DriverContext driverContext; - - private final SumDoubleAggregator.SumState state; - - private final List channels; - - public SumDoubleAggregatorFunction(DriverContext driverContext, List channels, - SumDoubleAggregator.SumState state) { - this.driverContext = driverContext; - this.channels = channels; - this.state = state; - } - - public static SumDoubleAggregatorFunction create(DriverContext driverContext, - List channels) { - return new SumDoubleAggregatorFunction(driverContext, channels, SumDoubleAggregator.initSingle()); - } - - public static List intermediateStateDesc() { - return INTERMEDIATE_STATE_DESC; - } - - @Override - public int intermediateBlockCount() { - return INTERMEDIATE_STATE_DESC.size(); - } - - @Override - public void addRawInput(Page page) { - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { - return; - } - DoubleBlock block = (DoubleBlock) uncastBlock; - DoubleVector vector = block.asVector(); - if (vector != null) { - addRawVector(vector); - } else { - addRawBlock(block); - } - } - - private void addRawVector(DoubleVector vector) { - state.seen(true); - for (int i = 0; i < vector.getPositionCount(); i++) { - SumDoubleAggregator.combine(state, vector.getDouble(i)); - } - } - - private void addRawBlock(DoubleBlock block) { - for (int p = 0; p < block.getPositionCount(); p++) { - if (block.isNull(p)) { - continue; - } - state.seen(true); - int start = block.getFirstValueIndex(p); - int end = start + block.getValueCount(p); - for (int i = start; i < end; i++) { - SumDoubleAggregator.combine(state, block.getDouble(i)); - } - } - } - - @Override - public void addIntermediateInput(Page page) { - assert channels.size() == intermediateBlockCount(); - assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - DoubleVector value = page.getBlock(channels.get(0)).asVector(); - DoubleVector delta = page.getBlock(channels.get(1)).asVector(); - BooleanVector seen = page.getBlock(channels.get(2)).asVector(); - assert value.getPositionCount() == 1; - assert value.getPositionCount() == delta.getPositionCount() && value.getPositionCount() == seen.getPositionCount(); - SumDoubleAggregator.combineIntermediate(state, value.getDouble(0), delta.getDouble(0), seen.getBoolean(0)); - } - - @Override - public void evaluateIntermediate(Block[] blocks, int offset) { - state.toIntermediate(blocks, offset); - } - - @Override - public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { - if (state.seen() == false) { - blocks[offset] = Block.constantNullBlock(1, driverContext.blockFactory()); - return; - } - blocks[offset] = SumDoubleAggregator.evaluateFinal(state, driverContext); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(getClass().getSimpleName()).append("["); - sb.append("channels=").append(channels); - sb.append("]"); - return sb.toString(); - } - - @Override - public void close() { - state.close(); - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionSupplier.java deleted file mode 100644 index d6898669ab339..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionSupplier.java +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.util.List; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link AggregatorFunctionSupplier} implementation for {@link SumDoubleAggregator}. - * This class is generated. Do not edit it. - */ -public final class SumDoubleAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final BigArrays bigArrays; - - private final List channels; - - public SumDoubleAggregatorFunctionSupplier(BigArrays bigArrays, List channels) { - this.bigArrays = bigArrays; - this.channels = channels; - } - - @Override - public SumDoubleAggregatorFunction aggregator(DriverContext driverContext) { - return SumDoubleAggregatorFunction.create(driverContext, channels); - } - - @Override - public SumDoubleGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { - return SumDoubleGroupingAggregatorFunction.create(channels, driverContext, bigArrays); - } - - @Override - public String describe() { - return "sum of doubles"; - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunction.java deleted file mode 100644 index 6094c7115159f..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunction.java +++ /dev/null @@ -1,216 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.lang.StringBuilder; -import java.util.List; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BooleanBlock; -import org.elasticsearch.compute.data.BooleanVector; -import org.elasticsearch.compute.data.DoubleBlock; -import org.elasticsearch.compute.data.DoubleVector; -import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link GroupingAggregatorFunction} implementation for {@link SumDoubleAggregator}. - * This class is generated. Do not edit it. - */ -public final class SumDoubleGroupingAggregatorFunction implements GroupingAggregatorFunction { - private static final List INTERMEDIATE_STATE_DESC = List.of( - new IntermediateStateDesc("value", ElementType.DOUBLE), - new IntermediateStateDesc("delta", ElementType.DOUBLE), - new IntermediateStateDesc("seen", ElementType.BOOLEAN) ); - - private final SumDoubleAggregator.GroupingSumState state; - - private final List channels; - - private final DriverContext driverContext; - - private final BigArrays bigArrays; - - public SumDoubleGroupingAggregatorFunction(List channels, - SumDoubleAggregator.GroupingSumState state, DriverContext driverContext, - BigArrays bigArrays) { - this.channels = channels; - this.state = state; - this.driverContext = driverContext; - this.bigArrays = bigArrays; - } - - public static SumDoubleGroupingAggregatorFunction create(List channels, - DriverContext driverContext, BigArrays bigArrays) { - return new SumDoubleGroupingAggregatorFunction(channels, SumDoubleAggregator.initGrouping(bigArrays), driverContext, bigArrays); - } - - public static List intermediateStateDesc() { - return INTERMEDIATE_STATE_DESC; - } - - @Override - public int intermediateBlockCount() { - return INTERMEDIATE_STATE_DESC.size(); - } - - @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, - Page page) { - Block uncastValuesBlock = page.getBlock(channels.get(0)); - if (uncastValuesBlock.areAllValuesNull()) { - state.enableGroupIdTracking(seenGroupIds); - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - } - }; - } - DoubleBlock valuesBlock = (DoubleBlock) uncastValuesBlock; - DoubleVector valuesVector = valuesBlock.asVector(); - if (valuesVector == null) { - if (valuesBlock.mayHaveNulls()) { - state.enableGroupIdTracking(seenGroupIds); - } - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - addRawInput(positionOffset, groupIds, valuesBlock); - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - addRawInput(positionOffset, groupIds, valuesBlock); - } - }; - } - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - addRawInput(positionOffset, groupIds, valuesVector); - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - addRawInput(positionOffset, groupIds, valuesVector); - } - }; - } - - private void addRawInput(int positionOffset, IntVector groups, DoubleBlock values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - if (values.isNull(groupPosition + positionOffset)) { - continue; - } - int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); - int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); - for (int v = valuesStart; v < valuesEnd; v++) { - SumDoubleAggregator.combine(state, groupId, values.getDouble(v)); - } - } - } - - private void addRawInput(int positionOffset, IntVector groups, DoubleVector values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - SumDoubleAggregator.combine(state, groupId, values.getDouble(groupPosition + positionOffset)); - } - } - - private void addRawInput(int positionOffset, IntBlock groups, DoubleBlock values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - if (groups.isNull(groupPosition)) { - continue; - } - int groupStart = groups.getFirstValueIndex(groupPosition); - int groupEnd = groupStart + groups.getValueCount(groupPosition); - for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getInt(g)); - if (values.isNull(groupPosition + positionOffset)) { - continue; - } - int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); - int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); - for (int v = valuesStart; v < valuesEnd; v++) { - SumDoubleAggregator.combine(state, groupId, values.getDouble(v)); - } - } - } - } - - private void addRawInput(int positionOffset, IntBlock groups, DoubleVector values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - if (groups.isNull(groupPosition)) { - continue; - } - int groupStart = groups.getFirstValueIndex(groupPosition); - int groupEnd = groupStart + groups.getValueCount(groupPosition); - for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getInt(g)); - SumDoubleAggregator.combine(state, groupId, values.getDouble(groupPosition + positionOffset)); - } - } - } - - @Override - public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { - state.enableGroupIdTracking(new SeenGroupIds.Empty()); - assert channels.size() == intermediateBlockCount(); - DoubleVector value = page.getBlock(channels.get(0)).asVector(); - DoubleVector delta = page.getBlock(channels.get(1)).asVector(); - BooleanVector seen = page.getBlock(channels.get(2)).asVector(); - assert value.getPositionCount() == delta.getPositionCount() && value.getPositionCount() == seen.getPositionCount(); - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - SumDoubleAggregator.combineIntermediate(state, groupId, value.getDouble(groupPosition + positionOffset), delta.getDouble(groupPosition + positionOffset), seen.getBoolean(groupPosition + positionOffset)); - } - } - - @Override - public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { - if (input.getClass() != getClass()) { - throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); - } - SumDoubleAggregator.GroupingSumState inState = ((SumDoubleGroupingAggregatorFunction) input).state; - state.enableGroupIdTracking(new SeenGroupIds.Empty()); - SumDoubleAggregator.combineStates(state, groupId, inState, position); - } - - @Override - public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { - state.toIntermediate(blocks, offset, selected, driverContext); - } - - @Override - public void evaluateFinal(Block[] blocks, int offset, IntVector selected, - DriverContext driverContext) { - blocks[offset] = SumDoubleAggregator.evaluateFinal(state, selected, driverContext); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(getClass().getSimpleName()).append("["); - sb.append("channels=").append(channels); - sb.append("]"); - return sb.toString(); - } - - @Override - public void close() { - state.close(); - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunction.java deleted file mode 100644 index 2cc6c85c72857..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunction.java +++ /dev/null @@ -1,136 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.lang.StringBuilder; -import java.util.List; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BooleanBlock; -import org.elasticsearch.compute.data.BooleanVector; -import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.LongVector; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link AggregatorFunction} implementation for {@link SumIntAggregator}. - * This class is generated. Do not edit it. - */ -public final class SumIntAggregatorFunction implements AggregatorFunction { - private static final List INTERMEDIATE_STATE_DESC = List.of( - new IntermediateStateDesc("sum", ElementType.LONG), - new IntermediateStateDesc("seen", ElementType.BOOLEAN) ); - - private final DriverContext driverContext; - - private final LongState state; - - private final List channels; - - public SumIntAggregatorFunction(DriverContext driverContext, List channels, - LongState state) { - this.driverContext = driverContext; - this.channels = channels; - this.state = state; - } - - public static SumIntAggregatorFunction create(DriverContext driverContext, - List channels) { - return new SumIntAggregatorFunction(driverContext, channels, new LongState(SumIntAggregator.init())); - } - - public static List intermediateStateDesc() { - return INTERMEDIATE_STATE_DESC; - } - - @Override - public int intermediateBlockCount() { - return INTERMEDIATE_STATE_DESC.size(); - } - - @Override - public void addRawInput(Page page) { - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { - return; - } - IntBlock block = (IntBlock) uncastBlock; - IntVector vector = block.asVector(); - if (vector != null) { - addRawVector(vector); - } else { - addRawBlock(block); - } - } - - private void addRawVector(IntVector vector) { - state.seen(true); - for (int i = 0; i < vector.getPositionCount(); i++) { - state.longValue(SumIntAggregator.combine(state.longValue(), vector.getInt(i))); - } - } - - private void addRawBlock(IntBlock block) { - for (int p = 0; p < block.getPositionCount(); p++) { - if (block.isNull(p)) { - continue; - } - state.seen(true); - int start = block.getFirstValueIndex(p); - int end = start + block.getValueCount(p); - for (int i = start; i < end; i++) { - state.longValue(SumIntAggregator.combine(state.longValue(), block.getInt(i))); - } - } - } - - @Override - public void addIntermediateInput(Page page) { - assert channels.size() == intermediateBlockCount(); - assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - LongVector sum = page.getBlock(channels.get(0)).asVector(); - BooleanVector seen = page.getBlock(channels.get(1)).asVector(); - assert sum.getPositionCount() == 1; - assert sum.getPositionCount() == seen.getPositionCount(); - if (seen.getBoolean(0)) { - state.longValue(SumIntAggregator.combine(state.longValue(), sum.getLong(0))); - state.seen(true); - } - } - - @Override - public void evaluateIntermediate(Block[] blocks, int offset) { - state.toIntermediate(blocks, offset); - } - - @Override - public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { - if (state.seen() == false) { - blocks[offset] = Block.constantNullBlock(1, driverContext.blockFactory()); - return; - } - blocks[offset] = LongBlock.newConstantBlockWith(state.longValue(), 1, driverContext.blockFactory()); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(getClass().getSimpleName()).append("["); - sb.append("channels=").append(channels); - sb.append("]"); - return sb.toString(); - } - - @Override - public void close() { - state.close(); - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionSupplier.java deleted file mode 100644 index 01294de12de45..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionSupplier.java +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.util.List; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link AggregatorFunctionSupplier} implementation for {@link SumIntAggregator}. - * This class is generated. Do not edit it. - */ -public final class SumIntAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final BigArrays bigArrays; - - private final List channels; - - public SumIntAggregatorFunctionSupplier(BigArrays bigArrays, List channels) { - this.bigArrays = bigArrays; - this.channels = channels; - } - - @Override - public SumIntAggregatorFunction aggregator(DriverContext driverContext) { - return SumIntAggregatorFunction.create(driverContext, channels); - } - - @Override - public SumIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { - return SumIntGroupingAggregatorFunction.create(channels, driverContext); - } - - @Override - public String describe() { - return "sum of ints"; - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunction.java deleted file mode 100644 index d0c9ecb6ac84a..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunction.java +++ /dev/null @@ -1,213 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.lang.StringBuilder; -import java.util.List; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BooleanBlock; -import org.elasticsearch.compute.data.BooleanVector; -import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.LongVector; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link GroupingAggregatorFunction} implementation for {@link SumIntAggregator}. - * This class is generated. Do not edit it. - */ -public final class SumIntGroupingAggregatorFunction implements GroupingAggregatorFunction { - private static final List INTERMEDIATE_STATE_DESC = List.of( - new IntermediateStateDesc("sum", ElementType.LONG), - new IntermediateStateDesc("seen", ElementType.BOOLEAN) ); - - private final LongArrayState state; - - private final List channels; - - private final DriverContext driverContext; - - public SumIntGroupingAggregatorFunction(List channels, LongArrayState state, - DriverContext driverContext) { - this.channels = channels; - this.state = state; - this.driverContext = driverContext; - } - - public static SumIntGroupingAggregatorFunction create(List channels, - DriverContext driverContext) { - return new SumIntGroupingAggregatorFunction(channels, new LongArrayState(driverContext.bigArrays(), SumIntAggregator.init()), driverContext); - } - - public static List intermediateStateDesc() { - return INTERMEDIATE_STATE_DESC; - } - - @Override - public int intermediateBlockCount() { - return INTERMEDIATE_STATE_DESC.size(); - } - - @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, - Page page) { - Block uncastValuesBlock = page.getBlock(channels.get(0)); - if (uncastValuesBlock.areAllValuesNull()) { - state.enableGroupIdTracking(seenGroupIds); - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - } - }; - } - IntBlock valuesBlock = (IntBlock) uncastValuesBlock; - IntVector valuesVector = valuesBlock.asVector(); - if (valuesVector == null) { - if (valuesBlock.mayHaveNulls()) { - state.enableGroupIdTracking(seenGroupIds); - } - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - addRawInput(positionOffset, groupIds, valuesBlock); - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - addRawInput(positionOffset, groupIds, valuesBlock); - } - }; - } - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - addRawInput(positionOffset, groupIds, valuesVector); - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - addRawInput(positionOffset, groupIds, valuesVector); - } - }; - } - - private void addRawInput(int positionOffset, IntVector groups, IntBlock values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - if (values.isNull(groupPosition + positionOffset)) { - continue; - } - int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); - int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); - for (int v = valuesStart; v < valuesEnd; v++) { - state.set(groupId, SumIntAggregator.combine(state.getOrDefault(groupId), values.getInt(v))); - } - } - } - - private void addRawInput(int positionOffset, IntVector groups, IntVector values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - state.set(groupId, SumIntAggregator.combine(state.getOrDefault(groupId), values.getInt(groupPosition + positionOffset))); - } - } - - private void addRawInput(int positionOffset, IntBlock groups, IntBlock values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - if (groups.isNull(groupPosition)) { - continue; - } - int groupStart = groups.getFirstValueIndex(groupPosition); - int groupEnd = groupStart + groups.getValueCount(groupPosition); - for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getInt(g)); - if (values.isNull(groupPosition + positionOffset)) { - continue; - } - int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); - int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); - for (int v = valuesStart; v < valuesEnd; v++) { - state.set(groupId, SumIntAggregator.combine(state.getOrDefault(groupId), values.getInt(v))); - } - } - } - } - - private void addRawInput(int positionOffset, IntBlock groups, IntVector values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - if (groups.isNull(groupPosition)) { - continue; - } - int groupStart = groups.getFirstValueIndex(groupPosition); - int groupEnd = groupStart + groups.getValueCount(groupPosition); - for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getInt(g)); - state.set(groupId, SumIntAggregator.combine(state.getOrDefault(groupId), values.getInt(groupPosition + positionOffset))); - } - } - } - - @Override - public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { - state.enableGroupIdTracking(new SeenGroupIds.Empty()); - assert channels.size() == intermediateBlockCount(); - LongVector sum = page.getBlock(channels.get(0)).asVector(); - BooleanVector seen = page.getBlock(channels.get(1)).asVector(); - assert sum.getPositionCount() == seen.getPositionCount(); - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - if (seen.getBoolean(groupPosition + positionOffset)) { - state.set(groupId, SumIntAggregator.combine(state.getOrDefault(groupId), sum.getLong(groupPosition + positionOffset))); - } - } - } - - @Override - public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { - if (input.getClass() != getClass()) { - throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); - } - LongArrayState inState = ((SumIntGroupingAggregatorFunction) input).state; - state.enableGroupIdTracking(new SeenGroupIds.Empty()); - if (inState.hasValue(position)) { - state.set(groupId, SumIntAggregator.combine(state.getOrDefault(groupId), inState.get(position))); - } - } - - @Override - public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { - state.toIntermediate(blocks, offset, selected, driverContext); - } - - @Override - public void evaluateFinal(Block[] blocks, int offset, IntVector selected, - DriverContext driverContext) { - blocks[offset] = state.toValuesBlock(selected, driverContext); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(getClass().getSimpleName()).append("["); - sb.append("channels=").append(channels); - sb.append("]"); - return sb.toString(); - } - - @Override - public void close() { - state.close(); - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunction.java deleted file mode 100644 index e61c95ca622df..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunction.java +++ /dev/null @@ -1,134 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.lang.StringBuilder; -import java.util.List; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BooleanBlock; -import org.elasticsearch.compute.data.BooleanVector; -import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.LongVector; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link AggregatorFunction} implementation for {@link SumLongAggregator}. - * This class is generated. Do not edit it. - */ -public final class SumLongAggregatorFunction implements AggregatorFunction { - private static final List INTERMEDIATE_STATE_DESC = List.of( - new IntermediateStateDesc("sum", ElementType.LONG), - new IntermediateStateDesc("seen", ElementType.BOOLEAN) ); - - private final DriverContext driverContext; - - private final LongState state; - - private final List channels; - - public SumLongAggregatorFunction(DriverContext driverContext, List channels, - LongState state) { - this.driverContext = driverContext; - this.channels = channels; - this.state = state; - } - - public static SumLongAggregatorFunction create(DriverContext driverContext, - List channels) { - return new SumLongAggregatorFunction(driverContext, channels, new LongState(SumLongAggregator.init())); - } - - public static List intermediateStateDesc() { - return INTERMEDIATE_STATE_DESC; - } - - @Override - public int intermediateBlockCount() { - return INTERMEDIATE_STATE_DESC.size(); - } - - @Override - public void addRawInput(Page page) { - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { - return; - } - LongBlock block = (LongBlock) uncastBlock; - LongVector vector = block.asVector(); - if (vector != null) { - addRawVector(vector); - } else { - addRawBlock(block); - } - } - - private void addRawVector(LongVector vector) { - state.seen(true); - for (int i = 0; i < vector.getPositionCount(); i++) { - state.longValue(SumLongAggregator.combine(state.longValue(), vector.getLong(i))); - } - } - - private void addRawBlock(LongBlock block) { - for (int p = 0; p < block.getPositionCount(); p++) { - if (block.isNull(p)) { - continue; - } - state.seen(true); - int start = block.getFirstValueIndex(p); - int end = start + block.getValueCount(p); - for (int i = start; i < end; i++) { - state.longValue(SumLongAggregator.combine(state.longValue(), block.getLong(i))); - } - } - } - - @Override - public void addIntermediateInput(Page page) { - assert channels.size() == intermediateBlockCount(); - assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - LongVector sum = page.getBlock(channels.get(0)).asVector(); - BooleanVector seen = page.getBlock(channels.get(1)).asVector(); - assert sum.getPositionCount() == 1; - assert sum.getPositionCount() == seen.getPositionCount(); - if (seen.getBoolean(0)) { - state.longValue(SumLongAggregator.combine(state.longValue(), sum.getLong(0))); - state.seen(true); - } - } - - @Override - public void evaluateIntermediate(Block[] blocks, int offset) { - state.toIntermediate(blocks, offset); - } - - @Override - public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { - if (state.seen() == false) { - blocks[offset] = Block.constantNullBlock(1, driverContext.blockFactory()); - return; - } - blocks[offset] = LongBlock.newConstantBlockWith(state.longValue(), 1, driverContext.blockFactory()); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(getClass().getSimpleName()).append("["); - sb.append("channels=").append(channels); - sb.append("]"); - return sb.toString(); - } - - @Override - public void close() { - state.close(); - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionSupplier.java deleted file mode 100644 index d72927d181f12..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionSupplier.java +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.util.List; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link AggregatorFunctionSupplier} implementation for {@link SumLongAggregator}. - * This class is generated. Do not edit it. - */ -public final class SumLongAggregatorFunctionSupplier implements AggregatorFunctionSupplier { - private final BigArrays bigArrays; - - private final List channels; - - public SumLongAggregatorFunctionSupplier(BigArrays bigArrays, List channels) { - this.bigArrays = bigArrays; - this.channels = channels; - } - - @Override - public SumLongAggregatorFunction aggregator(DriverContext driverContext) { - return SumLongAggregatorFunction.create(driverContext, channels); - } - - @Override - public SumLongGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { - return SumLongGroupingAggregatorFunction.create(channels, driverContext); - } - - @Override - public String describe() { - return "sum of longs"; - } -} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunction.java deleted file mode 100644 index 9df3cd2d76cae..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunction.java +++ /dev/null @@ -1,213 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.compute.aggregation; - -import java.lang.Integer; -import java.lang.Override; -import java.lang.String; -import java.lang.StringBuilder; -import java.util.List; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BooleanBlock; -import org.elasticsearch.compute.data.BooleanVector; -import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.LongVector; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; - -/** - * {@link GroupingAggregatorFunction} implementation for {@link SumLongAggregator}. - * This class is generated. Do not edit it. - */ -public final class SumLongGroupingAggregatorFunction implements GroupingAggregatorFunction { - private static final List INTERMEDIATE_STATE_DESC = List.of( - new IntermediateStateDesc("sum", ElementType.LONG), - new IntermediateStateDesc("seen", ElementType.BOOLEAN) ); - - private final LongArrayState state; - - private final List channels; - - private final DriverContext driverContext; - - public SumLongGroupingAggregatorFunction(List channels, LongArrayState state, - DriverContext driverContext) { - this.channels = channels; - this.state = state; - this.driverContext = driverContext; - } - - public static SumLongGroupingAggregatorFunction create(List channels, - DriverContext driverContext) { - return new SumLongGroupingAggregatorFunction(channels, new LongArrayState(driverContext.bigArrays(), SumLongAggregator.init()), driverContext); - } - - public static List intermediateStateDesc() { - return INTERMEDIATE_STATE_DESC; - } - - @Override - public int intermediateBlockCount() { - return INTERMEDIATE_STATE_DESC.size(); - } - - @Override - public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, - Page page) { - Block uncastValuesBlock = page.getBlock(channels.get(0)); - if (uncastValuesBlock.areAllValuesNull()) { - state.enableGroupIdTracking(seenGroupIds); - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - } - }; - } - LongBlock valuesBlock = (LongBlock) uncastValuesBlock; - LongVector valuesVector = valuesBlock.asVector(); - if (valuesVector == null) { - if (valuesBlock.mayHaveNulls()) { - state.enableGroupIdTracking(seenGroupIds); - } - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - addRawInput(positionOffset, groupIds, valuesBlock); - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - addRawInput(positionOffset, groupIds, valuesBlock); - } - }; - } - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - addRawInput(positionOffset, groupIds, valuesVector); - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - addRawInput(positionOffset, groupIds, valuesVector); - } - }; - } - - private void addRawInput(int positionOffset, IntVector groups, LongBlock values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - if (values.isNull(groupPosition + positionOffset)) { - continue; - } - int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); - int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); - for (int v = valuesStart; v < valuesEnd; v++) { - state.set(groupId, SumLongAggregator.combine(state.getOrDefault(groupId), values.getLong(v))); - } - } - } - - private void addRawInput(int positionOffset, IntVector groups, LongVector values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - state.set(groupId, SumLongAggregator.combine(state.getOrDefault(groupId), values.getLong(groupPosition + positionOffset))); - } - } - - private void addRawInput(int positionOffset, IntBlock groups, LongBlock values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - if (groups.isNull(groupPosition)) { - continue; - } - int groupStart = groups.getFirstValueIndex(groupPosition); - int groupEnd = groupStart + groups.getValueCount(groupPosition); - for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getInt(g)); - if (values.isNull(groupPosition + positionOffset)) { - continue; - } - int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); - int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); - for (int v = valuesStart; v < valuesEnd; v++) { - state.set(groupId, SumLongAggregator.combine(state.getOrDefault(groupId), values.getLong(v))); - } - } - } - } - - private void addRawInput(int positionOffset, IntBlock groups, LongVector values) { - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - if (groups.isNull(groupPosition)) { - continue; - } - int groupStart = groups.getFirstValueIndex(groupPosition); - int groupEnd = groupStart + groups.getValueCount(groupPosition); - for (int g = groupStart; g < groupEnd; g++) { - int groupId = Math.toIntExact(groups.getInt(g)); - state.set(groupId, SumLongAggregator.combine(state.getOrDefault(groupId), values.getLong(groupPosition + positionOffset))); - } - } - } - - @Override - public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { - state.enableGroupIdTracking(new SeenGroupIds.Empty()); - assert channels.size() == intermediateBlockCount(); - LongVector sum = page.getBlock(channels.get(0)).asVector(); - BooleanVector seen = page.getBlock(channels.get(1)).asVector(); - assert sum.getPositionCount() == seen.getPositionCount(); - for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { - int groupId = Math.toIntExact(groups.getInt(groupPosition)); - if (seen.getBoolean(groupPosition + positionOffset)) { - state.set(groupId, SumLongAggregator.combine(state.getOrDefault(groupId), sum.getLong(groupPosition + positionOffset))); - } - } - } - - @Override - public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { - if (input.getClass() != getClass()) { - throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); - } - LongArrayState inState = ((SumLongGroupingAggregatorFunction) input).state; - state.enableGroupIdTracking(new SeenGroupIds.Empty()); - if (inState.hasValue(position)) { - state.set(groupId, SumLongAggregator.combine(state.getOrDefault(groupId), inState.get(position))); - } - } - - @Override - public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { - state.toIntermediate(blocks, offset, selected, driverContext); - } - - @Override - public void evaluateFinal(Block[] blocks, int offset, IntVector selected, - DriverContext driverContext) { - blocks[offset] = state.toValuesBlock(selected, driverContext); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(getClass().getSimpleName()).append("["); - sb.append("channels=").append(channels); - sb.append("]"); - return sb.toString(); - } - - @Override - public void close() { - state.close(); - } -} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockSerializationTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockSerializationTests.java index 412e45d74a892..4012416ad4fef 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockSerializationTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockSerializationTests.java @@ -167,10 +167,10 @@ public void testSimulateAggs() { .forEach(i -> EqualsHashCodeTestUtils.checkEqualsAndHashCode(blocks[i], unused -> deserBlocks[i])); var inputChannels = IntStream.range(0, SumLongAggregatorFunction.intermediateStateDesc().size()).boxed().toList(); - var finalAggregator = SumLongAggregatorFunction.create(inputChannels); + var finalAggregator = SumLongAggregatorFunction.create(driverCtx, inputChannels); finalAggregator.addIntermediateInput(new Page(deserBlocks)); Block[] finalBlocks = new Block[1]; - finalAggregator.evaluateFinal(finalBlocks, 0); + finalAggregator.evaluateFinal(finalBlocks, 0, driverCtx); try (var finalBlock = (LongBlock) finalBlocks[0]) { assertThat(finalBlock.getLong(0), is(55L)); } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/CanMatchIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/CanMatchIT.java index 35c7d4f67e8cb..3969190630fd3 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/CanMatchIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/CanMatchIT.java @@ -145,31 +145,6 @@ public void testAliasFilters() { .addAlias("employees", "sales", new MatchQueryBuilder("dept", "sales")) ); // employees index -<<<<<<< HEAD - try (EsqlQueryResponse resp = run("from employees | stats count(emp_no)", randomPragmas())) { - assertThat(getValuesList(resp).get(0), equalTo(List.of(6L))); - } - try (EsqlQueryResponse resp = run("from employees | stats avg(salary)", randomPragmas())) { - assertThat(getValuesList(resp).get(0), equalTo(List.of(26.95d))); - } - - try ( - EsqlQueryResponse resp = run( - "from employees | stats count(emp_no)", - randomPragmas(), - new RangeQueryBuilder("hired").lt("2012-04-30") - ) - ) { - assertThat(getValuesList(resp).get(0), equalTo(List.of(4L))); - } - try ( - EsqlQueryResponse resp = run( - "from employees | stats avg(salary)", - randomPragmas(), - new RangeQueryBuilder("hired").lt("2012-04-30") - ) - ) { -======= try (var resp = run("from employees | stats count(emp_no)", randomPragmas())) { assertThat(getValuesList(resp).get(0), equalTo(List.of(6L))); } @@ -181,28 +156,10 @@ public void testAliasFilters() { assertThat(getValuesList(resp).get(0), equalTo(List.of(4L))); } try (var resp = run("from employees | stats avg(salary)", randomPragmas(), new RangeQueryBuilder("hired").lt("2012-04-30"))) { ->>>>>>> main assertThat(getValuesList(resp).get(0), equalTo(List.of(26.65d))); } // match both employees index and engineers alias -> employees -<<<<<<< HEAD - try (EsqlQueryResponse resp = run("from e* | stats count(emp_no)", randomPragmas())) { - assertThat(getValuesList(resp).get(0), equalTo(List.of(6L))); - } - try (EsqlQueryResponse resp = run("from employees | stats avg(salary)", randomPragmas())) { - assertThat(getValuesList(resp).get(0), equalTo(List.of(26.95d))); - } - - try ( - EsqlQueryResponse resp = run("from e* | stats count(emp_no)", randomPragmas(), new RangeQueryBuilder("hired").lt("2012-04-30")) - ) { - assertThat(getValuesList(resp).get(0), equalTo(List.of(4L))); - } - try ( - EsqlQueryResponse resp = run("from e* | stats avg(salary)", randomPragmas(), new RangeQueryBuilder("hired").lt("2012-04-30")) - ) { -======= try (var resp = run("from e* | stats count(emp_no)", randomPragmas())) { assertThat(getValuesList(resp).get(0), equalTo(List.of(6L))); } @@ -214,36 +171,10 @@ public void testAliasFilters() { assertThat(getValuesList(resp).get(0), equalTo(List.of(4L))); } try (var resp = run("from e* | stats avg(salary)", randomPragmas(), new RangeQueryBuilder("hired").lt("2012-04-30"))) { ->>>>>>> main assertThat(getValuesList(resp).get(0), equalTo(List.of(26.65d))); } // engineers alias -<<<<<<< HEAD - try (EsqlQueryResponse resp = run("from engineer* | stats count(emp_no)", randomPragmas())) { - assertThat(getValuesList(resp).get(0), equalTo(List.of(4L))); - } - try (EsqlQueryResponse resp = run("from engineer* | stats avg(salary)", randomPragmas())) { - assertThat(getValuesList(resp).get(0), equalTo(List.of(26.65d))); - } - - try ( - EsqlQueryResponse resp = run( - "from engineer* | stats count(emp_no)", - randomPragmas(), - new RangeQueryBuilder("hired").lt("2012-04-30") - ) - ) { - assertThat(getValuesList(resp).get(0), equalTo(List.of(3L))); - } - try ( - EsqlQueryResponse resp = run( - "from engineer* | stats avg(salary)", - randomPragmas(), - new RangeQueryBuilder("hired").lt("2012-04-30") - ) - ) { -======= try (var resp = run("from engineer* | stats count(emp_no)", randomPragmas())) { assertThat(getValuesList(resp).get(0), equalTo(List.of(4L))); } @@ -255,32 +186,10 @@ public void testAliasFilters() { assertThat(getValuesList(resp).get(0), equalTo(List.of(3L))); } try (var resp = run("from engineer* | stats avg(salary)", randomPragmas(), new RangeQueryBuilder("hired").lt("2012-04-30"))) { ->>>>>>> main assertThat(getValuesList(resp).get(0), equalTo(List.of(27.2d))); } // sales alias -<<<<<<< HEAD - try (EsqlQueryResponse resp = run("from sales | stats count(emp_no)", randomPragmas())) { - assertThat(getValuesList(resp).get(0), equalTo(List.of(2L))); - } - try (EsqlQueryResponse resp = run("from sales | stats avg(salary)", randomPragmas())) { - assertThat(getValuesList(resp).get(0), equalTo(List.of(27.55d))); - } - - try ( - EsqlQueryResponse resp = run( - "from sales | stats count(emp_no)", - randomPragmas(), - new RangeQueryBuilder("hired").lt("2012-04-30") - ) - ) { - assertThat(getValuesList(resp).get(0), equalTo(List.of(1L))); - } - try ( - EsqlQueryResponse resp = run("from sales | stats avg(salary)", randomPragmas(), new RangeQueryBuilder("hired").lt("2012-04-30")) - ) { -======= try (var resp = run("from sales | stats count(emp_no)", randomPragmas())) { assertThat(getValuesList(resp).get(0), equalTo(List.of(2L))); } @@ -292,7 +201,6 @@ public void testAliasFilters() { assertThat(getValuesList(resp).get(0), equalTo(List.of(1L))); } try (var resp = run("from sales | stats avg(salary)", randomPragmas(), new RangeQueryBuilder("hired").lt("2012-04-30"))) { ->>>>>>> main assertThat(getValuesList(resp).get(0), equalTo(List.of(25.0d))); } } From e9d662870396731b80b1ca2df78db3dee7fbab71 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Wed, 4 Oct 2023 10:47:45 -0400 Subject: [PATCH 3/6] Oh --- ...ountDistinctBooleanAggregatorFunction.java | 123 ++++++++++ ...inctBooleanAggregatorFunctionSupplier.java | 44 ++++ ...inctBooleanGroupingAggregatorFunction.java | 212 +++++++++++++++++ ...untDistinctBytesRefAggregatorFunction.java | 131 +++++++++++ ...nctBytesRefAggregatorFunctionSupplier.java | 47 ++++ ...nctBytesRefGroupingAggregatorFunction.java | 218 ++++++++++++++++++ ...CountDistinctDoubleAggregatorFunction.java | 131 +++++++++++ ...tinctDoubleAggregatorFunctionSupplier.java | 47 ++++ ...tinctDoubleGroupingAggregatorFunction.java | 216 +++++++++++++++++ .../CountDistinctIntAggregatorFunction.java | 131 +++++++++++ ...DistinctIntAggregatorFunctionSupplier.java | 47 ++++ ...DistinctIntGroupingAggregatorFunction.java | 214 +++++++++++++++++ .../CountDistinctLongAggregatorFunction.java | 131 +++++++++++ ...istinctLongAggregatorFunctionSupplier.java | 47 ++++ ...istinctLongGroupingAggregatorFunction.java | 216 +++++++++++++++++ .../MaxDoubleAggregatorFunction.java | 134 +++++++++++ .../MaxDoubleAggregatorFunctionSupplier.java | 42 ++++ .../MaxDoubleGroupingAggregatorFunction.java | 213 +++++++++++++++++ .../aggregation/MaxIntAggregatorFunction.java | 134 +++++++++++ .../MaxIntAggregatorFunctionSupplier.java | 42 ++++ .../MaxIntGroupingAggregatorFunction.java | 211 +++++++++++++++++ .../MaxLongAggregatorFunction.java | 134 +++++++++++ .../MaxLongAggregatorFunctionSupplier.java | 42 ++++ .../MaxLongGroupingAggregatorFunction.java | 213 +++++++++++++++++ ...luteDeviationDoubleAggregatorFunction.java | 124 ++++++++++ ...ationDoubleAggregatorFunctionSupplier.java | 44 ++++ ...ationDoubleGroupingAggregatorFunction.java | 212 +++++++++++++++++ ...bsoluteDeviationIntAggregatorFunction.java | 124 ++++++++++ ...eviationIntAggregatorFunctionSupplier.java | 44 ++++ ...eviationIntGroupingAggregatorFunction.java | 210 +++++++++++++++++ ...soluteDeviationLongAggregatorFunction.java | 124 ++++++++++ ...viationLongAggregatorFunctionSupplier.java | 44 ++++ ...viationLongGroupingAggregatorFunction.java | 212 +++++++++++++++++ .../MinDoubleAggregatorFunction.java | 134 +++++++++++ .../MinDoubleAggregatorFunctionSupplier.java | 42 ++++ .../MinDoubleGroupingAggregatorFunction.java | 213 +++++++++++++++++ .../aggregation/MinIntAggregatorFunction.java | 134 +++++++++++ .../MinIntAggregatorFunctionSupplier.java | 42 ++++ .../MinIntGroupingAggregatorFunction.java | 211 +++++++++++++++++ .../MinLongAggregatorFunction.java | 134 +++++++++++ .../MinLongAggregatorFunctionSupplier.java | 42 ++++ .../MinLongGroupingAggregatorFunction.java | 213 +++++++++++++++++ .../PercentileDoubleAggregatorFunction.java | 127 ++++++++++ ...ntileDoubleAggregatorFunctionSupplier.java | 47 ++++ ...ntileDoubleGroupingAggregatorFunction.java | 216 +++++++++++++++++ .../PercentileIntAggregatorFunction.java | 127 ++++++++++ ...rcentileIntAggregatorFunctionSupplier.java | 46 ++++ ...rcentileIntGroupingAggregatorFunction.java | 214 +++++++++++++++++ .../PercentileLongAggregatorFunction.java | 127 ++++++++++ ...centileLongAggregatorFunctionSupplier.java | 46 ++++ ...centileLongGroupingAggregatorFunction.java | 216 +++++++++++++++++ .../SumDoubleAggregatorFunction.java | 133 +++++++++++ .../SumDoubleAggregatorFunctionSupplier.java | 42 ++++ .../SumDoubleGroupingAggregatorFunction.java | 216 +++++++++++++++++ .../aggregation/SumIntAggregatorFunction.java | 136 +++++++++++ .../SumIntAggregatorFunctionSupplier.java | 42 ++++ .../SumIntGroupingAggregatorFunction.java | 213 +++++++++++++++++ .../SumLongAggregatorFunction.java | 134 +++++++++++ .../SumLongAggregatorFunctionSupplier.java | 42 ++++ .../SumLongGroupingAggregatorFunction.java | 213 +++++++++++++++++ 60 files changed, 7760 insertions(+) create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunctionSupplier.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunctionSupplier.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunctionSupplier.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionSupplier.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionSupplier.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunctionSupplier.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunctionSupplier.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunctionSupplier.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunctionSupplier.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunctionSupplier.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunctionSupplier.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunctionSupplier.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunctionSupplier.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunctionSupplier.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunctionSupplier.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunctionSupplier.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunctionSupplier.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionSupplier.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionSupplier.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionSupplier.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunction.java diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunction.java new file mode 100644 index 0000000000000..915f22560a0a3 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunction.java @@ -0,0 +1,123 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link CountDistinctBooleanAggregator}. + * This class is generated. Do not edit it. + */ +public final class CountDistinctBooleanAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("fbit", ElementType.BOOLEAN), + new IntermediateStateDesc("tbit", ElementType.BOOLEAN) ); + + private final DriverContext driverContext; + + private final CountDistinctBooleanAggregator.SingleState state; + + private final List channels; + + public CountDistinctBooleanAggregatorFunction(DriverContext driverContext, List channels, + CountDistinctBooleanAggregator.SingleState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static CountDistinctBooleanAggregatorFunction create(DriverContext driverContext, + List channels) { + return new CountDistinctBooleanAggregatorFunction(driverContext, channels, CountDistinctBooleanAggregator.initSingle()); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page) { + Block uncastBlock = page.getBlock(channels.get(0)); + if (uncastBlock.areAllValuesNull()) { + return; + } + BooleanBlock block = (BooleanBlock) uncastBlock; + BooleanVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + } + + private void addRawVector(BooleanVector vector) { + for (int i = 0; i < vector.getPositionCount(); i++) { + CountDistinctBooleanAggregator.combine(state, vector.getBoolean(i)); + } + } + + private void addRawBlock(BooleanBlock block) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + CountDistinctBooleanAggregator.combine(state, block.getBoolean(i)); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + BooleanVector fbit = page.getBlock(channels.get(0)).asVector(); + BooleanVector tbit = page.getBlock(channels.get(1)).asVector(); + assert fbit.getPositionCount() == 1; + assert fbit.getPositionCount() == tbit.getPositionCount(); + CountDistinctBooleanAggregator.combineIntermediate(state, fbit.getBoolean(0), tbit.getBoolean(0)); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset) { + state.toIntermediate(blocks, offset); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = CountDistinctBooleanAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..87eb2b97974da --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunctionSupplier.java @@ -0,0 +1,44 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link CountDistinctBooleanAggregator}. + * This class is generated. Do not edit it. + */ +public final class CountDistinctBooleanAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final BigArrays bigArrays; + + private final List channels; + + public CountDistinctBooleanAggregatorFunctionSupplier(BigArrays bigArrays, + List channels) { + this.bigArrays = bigArrays; + this.channels = channels; + } + + @Override + public CountDistinctBooleanAggregatorFunction aggregator(DriverContext driverContext) { + return CountDistinctBooleanAggregatorFunction.create(driverContext, channels); + } + + @Override + public CountDistinctBooleanGroupingAggregatorFunction groupingAggregator( + DriverContext driverContext) { + return CountDistinctBooleanGroupingAggregatorFunction.create(channels, driverContext, bigArrays); + } + + @Override + public String describe() { + return "count_distinct of booleans"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..820cb889d2ec6 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunction.java @@ -0,0 +1,212 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link CountDistinctBooleanAggregator}. + * This class is generated. Do not edit it. + */ +public final class CountDistinctBooleanGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("fbit", ElementType.BOOLEAN), + new IntermediateStateDesc("tbit", ElementType.BOOLEAN) ); + + private final CountDistinctBooleanAggregator.GroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + private final BigArrays bigArrays; + + public CountDistinctBooleanGroupingAggregatorFunction(List channels, + CountDistinctBooleanAggregator.GroupingState state, DriverContext driverContext, + BigArrays bigArrays) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + this.bigArrays = bigArrays; + } + + public static CountDistinctBooleanGroupingAggregatorFunction create(List channels, + DriverContext driverContext, BigArrays bigArrays) { + return new CountDistinctBooleanGroupingAggregatorFunction(channels, CountDistinctBooleanAggregator.initGrouping(bigArrays), driverContext, bigArrays); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + Block uncastValuesBlock = page.getBlock(channels.get(0)); + if (uncastValuesBlock.areAllValuesNull()) { + state.enableGroupIdTracking(seenGroupIds); + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + } + }; + } + BooleanBlock valuesBlock = (BooleanBlock) uncastValuesBlock; + BooleanVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, BooleanBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + CountDistinctBooleanAggregator.combine(state, groupId, values.getBoolean(v)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, BooleanVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + CountDistinctBooleanAggregator.combine(state, groupId, values.getBoolean(groupPosition + positionOffset)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, BooleanBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + CountDistinctBooleanAggregator.combine(state, groupId, values.getBoolean(v)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, BooleanVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + CountDistinctBooleanAggregator.combine(state, groupId, values.getBoolean(groupPosition + positionOffset)); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + BooleanVector fbit = page.getBlock(channels.get(0)).asVector(); + BooleanVector tbit = page.getBlock(channels.get(1)).asVector(); + assert fbit.getPositionCount() == tbit.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + CountDistinctBooleanAggregator.combineIntermediate(state, groupId, fbit.getBoolean(groupPosition + positionOffset), tbit.getBoolean(groupPosition + positionOffset)); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + CountDistinctBooleanAggregator.GroupingState inState = ((CountDistinctBooleanGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + CountDistinctBooleanAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = CountDistinctBooleanAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunction.java new file mode 100644 index 0000000000000..538fa5b35d7e5 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunction.java @@ -0,0 +1,131 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link CountDistinctBytesRefAggregator}. + * This class is generated. Do not edit it. + */ +public final class CountDistinctBytesRefAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("hll", ElementType.BYTES_REF) ); + + private final DriverContext driverContext; + + private final HllStates.SingleState state; + + private final List channels; + + private final BigArrays bigArrays; + + private final int precision; + + public CountDistinctBytesRefAggregatorFunction(DriverContext driverContext, + List channels, HllStates.SingleState state, BigArrays bigArrays, int precision) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + this.bigArrays = bigArrays; + this.precision = precision; + } + + public static CountDistinctBytesRefAggregatorFunction create(DriverContext driverContext, + List channels, BigArrays bigArrays, int precision) { + return new CountDistinctBytesRefAggregatorFunction(driverContext, channels, CountDistinctBytesRefAggregator.initSingle(bigArrays, precision), bigArrays, precision); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page) { + Block uncastBlock = page.getBlock(channels.get(0)); + if (uncastBlock.areAllValuesNull()) { + return; + } + BytesRefBlock block = (BytesRefBlock) uncastBlock; + BytesRefVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + } + + private void addRawVector(BytesRefVector vector) { + BytesRef scratch = new BytesRef(); + for (int i = 0; i < vector.getPositionCount(); i++) { + CountDistinctBytesRefAggregator.combine(state, vector.getBytesRef(i, scratch)); + } + } + + private void addRawBlock(BytesRefBlock block) { + BytesRef scratch = new BytesRef(); + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + CountDistinctBytesRefAggregator.combine(state, block.getBytesRef(i, scratch)); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + BytesRefVector hll = page.getBlock(channels.get(0)).asVector(); + assert hll.getPositionCount() == 1; + BytesRef scratch = new BytesRef(); + CountDistinctBytesRefAggregator.combineIntermediate(state, hll.getBytesRef(0, scratch)); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset) { + state.toIntermediate(blocks, offset); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = CountDistinctBytesRefAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..3f336519ac69f --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunctionSupplier.java @@ -0,0 +1,47 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link CountDistinctBytesRefAggregator}. + * This class is generated. Do not edit it. + */ +public final class CountDistinctBytesRefAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final BigArrays bigArrays; + + private final List channels; + + private final int precision; + + public CountDistinctBytesRefAggregatorFunctionSupplier(BigArrays bigArrays, + List channels, int precision) { + this.bigArrays = bigArrays; + this.channels = channels; + this.precision = precision; + } + + @Override + public CountDistinctBytesRefAggregatorFunction aggregator(DriverContext driverContext) { + return CountDistinctBytesRefAggregatorFunction.create(driverContext, channels, bigArrays, precision); + } + + @Override + public CountDistinctBytesRefGroupingAggregatorFunction groupingAggregator( + DriverContext driverContext) { + return CountDistinctBytesRefGroupingAggregatorFunction.create(channels, driverContext, bigArrays, precision); + } + + @Override + public String describe() { + return "count_distinct of bytes"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..36bcb6d145c05 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunction.java @@ -0,0 +1,218 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link CountDistinctBytesRefAggregator}. + * This class is generated. Do not edit it. + */ +public final class CountDistinctBytesRefGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("hll", ElementType.BYTES_REF) ); + + private final HllStates.GroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + private final BigArrays bigArrays; + + private final int precision; + + public CountDistinctBytesRefGroupingAggregatorFunction(List channels, + HllStates.GroupingState state, DriverContext driverContext, BigArrays bigArrays, + int precision) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + this.bigArrays = bigArrays; + this.precision = precision; + } + + public static CountDistinctBytesRefGroupingAggregatorFunction create(List channels, + DriverContext driverContext, BigArrays bigArrays, int precision) { + return new CountDistinctBytesRefGroupingAggregatorFunction(channels, CountDistinctBytesRefAggregator.initGrouping(bigArrays, precision), driverContext, bigArrays, precision); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + Block uncastValuesBlock = page.getBlock(channels.get(0)); + if (uncastValuesBlock.areAllValuesNull()) { + state.enableGroupIdTracking(seenGroupIds); + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + } + }; + } + BytesRefBlock valuesBlock = (BytesRefBlock) uncastValuesBlock; + BytesRefVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, BytesRefBlock values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + CountDistinctBytesRefAggregator.combine(state, groupId, values.getBytesRef(v, scratch)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, BytesRefVector values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + CountDistinctBytesRefAggregator.combine(state, groupId, values.getBytesRef(groupPosition + positionOffset, scratch)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, BytesRefBlock values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + CountDistinctBytesRefAggregator.combine(state, groupId, values.getBytesRef(v, scratch)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, BytesRefVector values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + CountDistinctBytesRefAggregator.combine(state, groupId, values.getBytesRef(groupPosition + positionOffset, scratch)); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + BytesRefVector hll = page.getBlock(channels.get(0)).asVector(); + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + CountDistinctBytesRefAggregator.combineIntermediate(state, groupId, hll.getBytesRef(groupPosition + positionOffset, scratch)); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + HllStates.GroupingState inState = ((CountDistinctBytesRefGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + CountDistinctBytesRefAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = CountDistinctBytesRefAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunction.java new file mode 100644 index 0000000000000..341acf4048dbd --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunction.java @@ -0,0 +1,131 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link CountDistinctDoubleAggregator}. + * This class is generated. Do not edit it. + */ +public final class CountDistinctDoubleAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("hll", ElementType.BYTES_REF) ); + + private final DriverContext driverContext; + + private final HllStates.SingleState state; + + private final List channels; + + private final BigArrays bigArrays; + + private final int precision; + + public CountDistinctDoubleAggregatorFunction(DriverContext driverContext, List channels, + HllStates.SingleState state, BigArrays bigArrays, int precision) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + this.bigArrays = bigArrays; + this.precision = precision; + } + + public static CountDistinctDoubleAggregatorFunction create(DriverContext driverContext, + List channels, BigArrays bigArrays, int precision) { + return new CountDistinctDoubleAggregatorFunction(driverContext, channels, CountDistinctDoubleAggregator.initSingle(bigArrays, precision), bigArrays, precision); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page) { + Block uncastBlock = page.getBlock(channels.get(0)); + if (uncastBlock.areAllValuesNull()) { + return; + } + DoubleBlock block = (DoubleBlock) uncastBlock; + DoubleVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + } + + private void addRawVector(DoubleVector vector) { + for (int i = 0; i < vector.getPositionCount(); i++) { + CountDistinctDoubleAggregator.combine(state, vector.getDouble(i)); + } + } + + private void addRawBlock(DoubleBlock block) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + CountDistinctDoubleAggregator.combine(state, block.getDouble(i)); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + BytesRefVector hll = page.getBlock(channels.get(0)).asVector(); + assert hll.getPositionCount() == 1; + BytesRef scratch = new BytesRef(); + CountDistinctDoubleAggregator.combineIntermediate(state, hll.getBytesRef(0, scratch)); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset) { + state.toIntermediate(blocks, offset); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = CountDistinctDoubleAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..ee33aefb5242c --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunctionSupplier.java @@ -0,0 +1,47 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link CountDistinctDoubleAggregator}. + * This class is generated. Do not edit it. + */ +public final class CountDistinctDoubleAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final BigArrays bigArrays; + + private final List channels; + + private final int precision; + + public CountDistinctDoubleAggregatorFunctionSupplier(BigArrays bigArrays, List channels, + int precision) { + this.bigArrays = bigArrays; + this.channels = channels; + this.precision = precision; + } + + @Override + public CountDistinctDoubleAggregatorFunction aggregator(DriverContext driverContext) { + return CountDistinctDoubleAggregatorFunction.create(driverContext, channels, bigArrays, precision); + } + + @Override + public CountDistinctDoubleGroupingAggregatorFunction groupingAggregator( + DriverContext driverContext) { + return CountDistinctDoubleGroupingAggregatorFunction.create(channels, driverContext, bigArrays, precision); + } + + @Override + public String describe() { + return "count_distinct of doubles"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..54c57c2138505 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunction.java @@ -0,0 +1,216 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link CountDistinctDoubleAggregator}. + * This class is generated. Do not edit it. + */ +public final class CountDistinctDoubleGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("hll", ElementType.BYTES_REF) ); + + private final HllStates.GroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + private final BigArrays bigArrays; + + private final int precision; + + public CountDistinctDoubleGroupingAggregatorFunction(List channels, + HllStates.GroupingState state, DriverContext driverContext, BigArrays bigArrays, + int precision) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + this.bigArrays = bigArrays; + this.precision = precision; + } + + public static CountDistinctDoubleGroupingAggregatorFunction create(List channels, + DriverContext driverContext, BigArrays bigArrays, int precision) { + return new CountDistinctDoubleGroupingAggregatorFunction(channels, CountDistinctDoubleAggregator.initGrouping(bigArrays, precision), driverContext, bigArrays, precision); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + Block uncastValuesBlock = page.getBlock(channels.get(0)); + if (uncastValuesBlock.areAllValuesNull()) { + state.enableGroupIdTracking(seenGroupIds); + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + } + }; + } + DoubleBlock valuesBlock = (DoubleBlock) uncastValuesBlock; + DoubleVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, DoubleBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + CountDistinctDoubleAggregator.combine(state, groupId, values.getDouble(v)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, DoubleVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + CountDistinctDoubleAggregator.combine(state, groupId, values.getDouble(groupPosition + positionOffset)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, DoubleBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + CountDistinctDoubleAggregator.combine(state, groupId, values.getDouble(v)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, DoubleVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + CountDistinctDoubleAggregator.combine(state, groupId, values.getDouble(groupPosition + positionOffset)); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + BytesRefVector hll = page.getBlock(channels.get(0)).asVector(); + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + CountDistinctDoubleAggregator.combineIntermediate(state, groupId, hll.getBytesRef(groupPosition + positionOffset, scratch)); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + HllStates.GroupingState inState = ((CountDistinctDoubleGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + CountDistinctDoubleAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = CountDistinctDoubleAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunction.java new file mode 100644 index 0000000000000..a35d00e25d171 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunction.java @@ -0,0 +1,131 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link CountDistinctIntAggregator}. + * This class is generated. Do not edit it. + */ +public final class CountDistinctIntAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("hll", ElementType.BYTES_REF) ); + + private final DriverContext driverContext; + + private final HllStates.SingleState state; + + private final List channels; + + private final BigArrays bigArrays; + + private final int precision; + + public CountDistinctIntAggregatorFunction(DriverContext driverContext, List channels, + HllStates.SingleState state, BigArrays bigArrays, int precision) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + this.bigArrays = bigArrays; + this.precision = precision; + } + + public static CountDistinctIntAggregatorFunction create(DriverContext driverContext, + List channels, BigArrays bigArrays, int precision) { + return new CountDistinctIntAggregatorFunction(driverContext, channels, CountDistinctIntAggregator.initSingle(bigArrays, precision), bigArrays, precision); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page) { + Block uncastBlock = page.getBlock(channels.get(0)); + if (uncastBlock.areAllValuesNull()) { + return; + } + IntBlock block = (IntBlock) uncastBlock; + IntVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + } + + private void addRawVector(IntVector vector) { + for (int i = 0; i < vector.getPositionCount(); i++) { + CountDistinctIntAggregator.combine(state, vector.getInt(i)); + } + } + + private void addRawBlock(IntBlock block) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + CountDistinctIntAggregator.combine(state, block.getInt(i)); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + BytesRefVector hll = page.getBlock(channels.get(0)).asVector(); + assert hll.getPositionCount() == 1; + BytesRef scratch = new BytesRef(); + CountDistinctIntAggregator.combineIntermediate(state, hll.getBytesRef(0, scratch)); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset) { + state.toIntermediate(blocks, offset); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = CountDistinctIntAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..315df36fcaa1b --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionSupplier.java @@ -0,0 +1,47 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link CountDistinctIntAggregator}. + * This class is generated. Do not edit it. + */ +public final class CountDistinctIntAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final BigArrays bigArrays; + + private final List channels; + + private final int precision; + + public CountDistinctIntAggregatorFunctionSupplier(BigArrays bigArrays, List channels, + int precision) { + this.bigArrays = bigArrays; + this.channels = channels; + this.precision = precision; + } + + @Override + public CountDistinctIntAggregatorFunction aggregator(DriverContext driverContext) { + return CountDistinctIntAggregatorFunction.create(driverContext, channels, bigArrays, precision); + } + + @Override + public CountDistinctIntGroupingAggregatorFunction groupingAggregator( + DriverContext driverContext) { + return CountDistinctIntGroupingAggregatorFunction.create(channels, driverContext, bigArrays, precision); + } + + @Override + public String describe() { + return "count_distinct of ints"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..acaff7b8e96d7 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunction.java @@ -0,0 +1,214 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link CountDistinctIntAggregator}. + * This class is generated. Do not edit it. + */ +public final class CountDistinctIntGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("hll", ElementType.BYTES_REF) ); + + private final HllStates.GroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + private final BigArrays bigArrays; + + private final int precision; + + public CountDistinctIntGroupingAggregatorFunction(List channels, + HllStates.GroupingState state, DriverContext driverContext, BigArrays bigArrays, + int precision) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + this.bigArrays = bigArrays; + this.precision = precision; + } + + public static CountDistinctIntGroupingAggregatorFunction create(List channels, + DriverContext driverContext, BigArrays bigArrays, int precision) { + return new CountDistinctIntGroupingAggregatorFunction(channels, CountDistinctIntAggregator.initGrouping(bigArrays, precision), driverContext, bigArrays, precision); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + Block uncastValuesBlock = page.getBlock(channels.get(0)); + if (uncastValuesBlock.areAllValuesNull()) { + state.enableGroupIdTracking(seenGroupIds); + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + } + }; + } + IntBlock valuesBlock = (IntBlock) uncastValuesBlock; + IntVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, IntBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + CountDistinctIntAggregator.combine(state, groupId, values.getInt(v)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, IntVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + CountDistinctIntAggregator.combine(state, groupId, values.getInt(groupPosition + positionOffset)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, IntBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + CountDistinctIntAggregator.combine(state, groupId, values.getInt(v)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, IntVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + CountDistinctIntAggregator.combine(state, groupId, values.getInt(groupPosition + positionOffset)); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + BytesRefVector hll = page.getBlock(channels.get(0)).asVector(); + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + CountDistinctIntAggregator.combineIntermediate(state, groupId, hll.getBytesRef(groupPosition + positionOffset, scratch)); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + HllStates.GroupingState inState = ((CountDistinctIntGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + CountDistinctIntAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = CountDistinctIntAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunction.java new file mode 100644 index 0000000000000..b8243724e2f4a --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunction.java @@ -0,0 +1,131 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link CountDistinctLongAggregator}. + * This class is generated. Do not edit it. + */ +public final class CountDistinctLongAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("hll", ElementType.BYTES_REF) ); + + private final DriverContext driverContext; + + private final HllStates.SingleState state; + + private final List channels; + + private final BigArrays bigArrays; + + private final int precision; + + public CountDistinctLongAggregatorFunction(DriverContext driverContext, List channels, + HllStates.SingleState state, BigArrays bigArrays, int precision) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + this.bigArrays = bigArrays; + this.precision = precision; + } + + public static CountDistinctLongAggregatorFunction create(DriverContext driverContext, + List channels, BigArrays bigArrays, int precision) { + return new CountDistinctLongAggregatorFunction(driverContext, channels, CountDistinctLongAggregator.initSingle(bigArrays, precision), bigArrays, precision); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page) { + Block uncastBlock = page.getBlock(channels.get(0)); + if (uncastBlock.areAllValuesNull()) { + return; + } + LongBlock block = (LongBlock) uncastBlock; + LongVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + } + + private void addRawVector(LongVector vector) { + for (int i = 0; i < vector.getPositionCount(); i++) { + CountDistinctLongAggregator.combine(state, vector.getLong(i)); + } + } + + private void addRawBlock(LongBlock block) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + CountDistinctLongAggregator.combine(state, block.getLong(i)); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + BytesRefVector hll = page.getBlock(channels.get(0)).asVector(); + assert hll.getPositionCount() == 1; + BytesRef scratch = new BytesRef(); + CountDistinctLongAggregator.combineIntermediate(state, hll.getBytesRef(0, scratch)); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset) { + state.toIntermediate(blocks, offset); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = CountDistinctLongAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..6069b5744b31b --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionSupplier.java @@ -0,0 +1,47 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link CountDistinctLongAggregator}. + * This class is generated. Do not edit it. + */ +public final class CountDistinctLongAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final BigArrays bigArrays; + + private final List channels; + + private final int precision; + + public CountDistinctLongAggregatorFunctionSupplier(BigArrays bigArrays, List channels, + int precision) { + this.bigArrays = bigArrays; + this.channels = channels; + this.precision = precision; + } + + @Override + public CountDistinctLongAggregatorFunction aggregator(DriverContext driverContext) { + return CountDistinctLongAggregatorFunction.create(driverContext, channels, bigArrays, precision); + } + + @Override + public CountDistinctLongGroupingAggregatorFunction groupingAggregator( + DriverContext driverContext) { + return CountDistinctLongGroupingAggregatorFunction.create(channels, driverContext, bigArrays, precision); + } + + @Override + public String describe() { + return "count_distinct of longs"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..06165026a5d16 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunction.java @@ -0,0 +1,216 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link CountDistinctLongAggregator}. + * This class is generated. Do not edit it. + */ +public final class CountDistinctLongGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("hll", ElementType.BYTES_REF) ); + + private final HllStates.GroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + private final BigArrays bigArrays; + + private final int precision; + + public CountDistinctLongGroupingAggregatorFunction(List channels, + HllStates.GroupingState state, DriverContext driverContext, BigArrays bigArrays, + int precision) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + this.bigArrays = bigArrays; + this.precision = precision; + } + + public static CountDistinctLongGroupingAggregatorFunction create(List channels, + DriverContext driverContext, BigArrays bigArrays, int precision) { + return new CountDistinctLongGroupingAggregatorFunction(channels, CountDistinctLongAggregator.initGrouping(bigArrays, precision), driverContext, bigArrays, precision); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + Block uncastValuesBlock = page.getBlock(channels.get(0)); + if (uncastValuesBlock.areAllValuesNull()) { + state.enableGroupIdTracking(seenGroupIds); + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + } + }; + } + LongBlock valuesBlock = (LongBlock) uncastValuesBlock; + LongVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, LongBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + CountDistinctLongAggregator.combine(state, groupId, values.getLong(v)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, LongVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + CountDistinctLongAggregator.combine(state, groupId, values.getLong(groupPosition + positionOffset)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, LongBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + CountDistinctLongAggregator.combine(state, groupId, values.getLong(v)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, LongVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + CountDistinctLongAggregator.combine(state, groupId, values.getLong(groupPosition + positionOffset)); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + BytesRefVector hll = page.getBlock(channels.get(0)).asVector(); + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + CountDistinctLongAggregator.combineIntermediate(state, groupId, hll.getBytesRef(groupPosition + positionOffset, scratch)); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + HllStates.GroupingState inState = ((CountDistinctLongGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + CountDistinctLongAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = CountDistinctLongAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunction.java new file mode 100644 index 0000000000000..7078f98c62466 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunction.java @@ -0,0 +1,134 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link MaxDoubleAggregator}. + * This class is generated. Do not edit it. + */ +public final class MaxDoubleAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("max", ElementType.DOUBLE), + new IntermediateStateDesc("seen", ElementType.BOOLEAN) ); + + private final DriverContext driverContext; + + private final DoubleState state; + + private final List channels; + + public MaxDoubleAggregatorFunction(DriverContext driverContext, List channels, + DoubleState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static MaxDoubleAggregatorFunction create(DriverContext driverContext, + List channels) { + return new MaxDoubleAggregatorFunction(driverContext, channels, new DoubleState(MaxDoubleAggregator.init())); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page) { + Block uncastBlock = page.getBlock(channels.get(0)); + if (uncastBlock.areAllValuesNull()) { + return; + } + DoubleBlock block = (DoubleBlock) uncastBlock; + DoubleVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + } + + private void addRawVector(DoubleVector vector) { + state.seen(true); + for (int i = 0; i < vector.getPositionCount(); i++) { + state.doubleValue(MaxDoubleAggregator.combine(state.doubleValue(), vector.getDouble(i))); + } + } + + private void addRawBlock(DoubleBlock block) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + state.seen(true); + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + state.doubleValue(MaxDoubleAggregator.combine(state.doubleValue(), block.getDouble(i))); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + DoubleVector max = page.getBlock(channels.get(0)).asVector(); + BooleanVector seen = page.getBlock(channels.get(1)).asVector(); + assert max.getPositionCount() == 1; + assert max.getPositionCount() == seen.getPositionCount(); + if (seen.getBoolean(0)) { + state.doubleValue(MaxDoubleAggregator.combine(state.doubleValue(), max.getDouble(0))); + state.seen(true); + } + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset) { + state.toIntermediate(blocks, offset); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + if (state.seen() == false) { + blocks[offset] = Block.constantNullBlock(1, driverContext.blockFactory()); + return; + } + blocks[offset] = DoubleBlock.newConstantBlockWith(state.doubleValue(), 1, driverContext.blockFactory()); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..850fff9a946ba --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunctionSupplier.java @@ -0,0 +1,42 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link MaxDoubleAggregator}. + * This class is generated. Do not edit it. + */ +public final class MaxDoubleAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final BigArrays bigArrays; + + private final List channels; + + public MaxDoubleAggregatorFunctionSupplier(BigArrays bigArrays, List channels) { + this.bigArrays = bigArrays; + this.channels = channels; + } + + @Override + public MaxDoubleAggregatorFunction aggregator(DriverContext driverContext) { + return MaxDoubleAggregatorFunction.create(driverContext, channels); + } + + @Override + public MaxDoubleGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + return MaxDoubleGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return "max of doubles"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..b761bb66b8edb --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunction.java @@ -0,0 +1,213 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link MaxDoubleAggregator}. + * This class is generated. Do not edit it. + */ +public final class MaxDoubleGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("max", ElementType.DOUBLE), + new IntermediateStateDesc("seen", ElementType.BOOLEAN) ); + + private final DoubleArrayState state; + + private final List channels; + + private final DriverContext driverContext; + + public MaxDoubleGroupingAggregatorFunction(List channels, DoubleArrayState state, + DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static MaxDoubleGroupingAggregatorFunction create(List channels, + DriverContext driverContext) { + return new MaxDoubleGroupingAggregatorFunction(channels, new DoubleArrayState(driverContext.bigArrays(), MaxDoubleAggregator.init()), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + Block uncastValuesBlock = page.getBlock(channels.get(0)); + if (uncastValuesBlock.areAllValuesNull()) { + state.enableGroupIdTracking(seenGroupIds); + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + } + }; + } + DoubleBlock valuesBlock = (DoubleBlock) uncastValuesBlock; + DoubleVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, DoubleBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + state.set(groupId, MaxDoubleAggregator.combine(state.getOrDefault(groupId), values.getDouble(v))); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, DoubleVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + state.set(groupId, MaxDoubleAggregator.combine(state.getOrDefault(groupId), values.getDouble(groupPosition + positionOffset))); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, DoubleBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + state.set(groupId, MaxDoubleAggregator.combine(state.getOrDefault(groupId), values.getDouble(v))); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, DoubleVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + state.set(groupId, MaxDoubleAggregator.combine(state.getOrDefault(groupId), values.getDouble(groupPosition + positionOffset))); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + DoubleVector max = page.getBlock(channels.get(0)).asVector(); + BooleanVector seen = page.getBlock(channels.get(1)).asVector(); + assert max.getPositionCount() == seen.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + if (seen.getBoolean(groupPosition + positionOffset)) { + state.set(groupId, MaxDoubleAggregator.combine(state.getOrDefault(groupId), max.getDouble(groupPosition + positionOffset))); + } + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + DoubleArrayState inState = ((MaxDoubleGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + if (inState.hasValue(position)) { + state.set(groupId, MaxDoubleAggregator.combine(state.getOrDefault(groupId), inState.get(position))); + } + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = state.toValuesBlock(selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunction.java new file mode 100644 index 0000000000000..371793fe1f7e4 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunction.java @@ -0,0 +1,134 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link MaxIntAggregator}. + * This class is generated. Do not edit it. + */ +public final class MaxIntAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("max", ElementType.INT), + new IntermediateStateDesc("seen", ElementType.BOOLEAN) ); + + private final DriverContext driverContext; + + private final IntState state; + + private final List channels; + + public MaxIntAggregatorFunction(DriverContext driverContext, List channels, + IntState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static MaxIntAggregatorFunction create(DriverContext driverContext, + List channels) { + return new MaxIntAggregatorFunction(driverContext, channels, new IntState(MaxIntAggregator.init())); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page) { + Block uncastBlock = page.getBlock(channels.get(0)); + if (uncastBlock.areAllValuesNull()) { + return; + } + IntBlock block = (IntBlock) uncastBlock; + IntVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + } + + private void addRawVector(IntVector vector) { + state.seen(true); + for (int i = 0; i < vector.getPositionCount(); i++) { + state.intValue(MaxIntAggregator.combine(state.intValue(), vector.getInt(i))); + } + } + + private void addRawBlock(IntBlock block) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + state.seen(true); + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + state.intValue(MaxIntAggregator.combine(state.intValue(), block.getInt(i))); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + IntVector max = page.getBlock(channels.get(0)).asVector(); + BooleanVector seen = page.getBlock(channels.get(1)).asVector(); + assert max.getPositionCount() == 1; + assert max.getPositionCount() == seen.getPositionCount(); + if (seen.getBoolean(0)) { + state.intValue(MaxIntAggregator.combine(state.intValue(), max.getInt(0))); + state.seen(true); + } + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset) { + state.toIntermediate(blocks, offset); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + if (state.seen() == false) { + blocks[offset] = Block.constantNullBlock(1, driverContext.blockFactory()); + return; + } + blocks[offset] = IntBlock.newConstantBlockWith(state.intValue(), 1, driverContext.blockFactory()); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..cfac1c68fc065 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunctionSupplier.java @@ -0,0 +1,42 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link MaxIntAggregator}. + * This class is generated. Do not edit it. + */ +public final class MaxIntAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final BigArrays bigArrays; + + private final List channels; + + public MaxIntAggregatorFunctionSupplier(BigArrays bigArrays, List channels) { + this.bigArrays = bigArrays; + this.channels = channels; + } + + @Override + public MaxIntAggregatorFunction aggregator(DriverContext driverContext) { + return MaxIntAggregatorFunction.create(driverContext, channels); + } + + @Override + public MaxIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + return MaxIntGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return "max of ints"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..a7ef8d5573fdc --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunction.java @@ -0,0 +1,211 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link MaxIntAggregator}. + * This class is generated. Do not edit it. + */ +public final class MaxIntGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("max", ElementType.INT), + new IntermediateStateDesc("seen", ElementType.BOOLEAN) ); + + private final IntArrayState state; + + private final List channels; + + private final DriverContext driverContext; + + public MaxIntGroupingAggregatorFunction(List channels, IntArrayState state, + DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static MaxIntGroupingAggregatorFunction create(List channels, + DriverContext driverContext) { + return new MaxIntGroupingAggregatorFunction(channels, new IntArrayState(driverContext.bigArrays(), MaxIntAggregator.init()), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + Block uncastValuesBlock = page.getBlock(channels.get(0)); + if (uncastValuesBlock.areAllValuesNull()) { + state.enableGroupIdTracking(seenGroupIds); + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + } + }; + } + IntBlock valuesBlock = (IntBlock) uncastValuesBlock; + IntVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, IntBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + state.set(groupId, MaxIntAggregator.combine(state.getOrDefault(groupId), values.getInt(v))); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, IntVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + state.set(groupId, MaxIntAggregator.combine(state.getOrDefault(groupId), values.getInt(groupPosition + positionOffset))); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, IntBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + state.set(groupId, MaxIntAggregator.combine(state.getOrDefault(groupId), values.getInt(v))); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, IntVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + state.set(groupId, MaxIntAggregator.combine(state.getOrDefault(groupId), values.getInt(groupPosition + positionOffset))); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + IntVector max = page.getBlock(channels.get(0)).asVector(); + BooleanVector seen = page.getBlock(channels.get(1)).asVector(); + assert max.getPositionCount() == seen.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + if (seen.getBoolean(groupPosition + positionOffset)) { + state.set(groupId, MaxIntAggregator.combine(state.getOrDefault(groupId), max.getInt(groupPosition + positionOffset))); + } + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + IntArrayState inState = ((MaxIntGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + if (inState.hasValue(position)) { + state.set(groupId, MaxIntAggregator.combine(state.getOrDefault(groupId), inState.get(position))); + } + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = state.toValuesBlock(selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunction.java new file mode 100644 index 0000000000000..e48353b4e6a72 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunction.java @@ -0,0 +1,134 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link MaxLongAggregator}. + * This class is generated. Do not edit it. + */ +public final class MaxLongAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("max", ElementType.LONG), + new IntermediateStateDesc("seen", ElementType.BOOLEAN) ); + + private final DriverContext driverContext; + + private final LongState state; + + private final List channels; + + public MaxLongAggregatorFunction(DriverContext driverContext, List channels, + LongState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static MaxLongAggregatorFunction create(DriverContext driverContext, + List channels) { + return new MaxLongAggregatorFunction(driverContext, channels, new LongState(MaxLongAggregator.init())); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page) { + Block uncastBlock = page.getBlock(channels.get(0)); + if (uncastBlock.areAllValuesNull()) { + return; + } + LongBlock block = (LongBlock) uncastBlock; + LongVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + } + + private void addRawVector(LongVector vector) { + state.seen(true); + for (int i = 0; i < vector.getPositionCount(); i++) { + state.longValue(MaxLongAggregator.combine(state.longValue(), vector.getLong(i))); + } + } + + private void addRawBlock(LongBlock block) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + state.seen(true); + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + state.longValue(MaxLongAggregator.combine(state.longValue(), block.getLong(i))); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + LongVector max = page.getBlock(channels.get(0)).asVector(); + BooleanVector seen = page.getBlock(channels.get(1)).asVector(); + assert max.getPositionCount() == 1; + assert max.getPositionCount() == seen.getPositionCount(); + if (seen.getBoolean(0)) { + state.longValue(MaxLongAggregator.combine(state.longValue(), max.getLong(0))); + state.seen(true); + } + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset) { + state.toIntermediate(blocks, offset); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + if (state.seen() == false) { + blocks[offset] = Block.constantNullBlock(1, driverContext.blockFactory()); + return; + } + blocks[offset] = LongBlock.newConstantBlockWith(state.longValue(), 1, driverContext.blockFactory()); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..5f1b1d1e7dc82 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunctionSupplier.java @@ -0,0 +1,42 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link MaxLongAggregator}. + * This class is generated. Do not edit it. + */ +public final class MaxLongAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final BigArrays bigArrays; + + private final List channels; + + public MaxLongAggregatorFunctionSupplier(BigArrays bigArrays, List channels) { + this.bigArrays = bigArrays; + this.channels = channels; + } + + @Override + public MaxLongAggregatorFunction aggregator(DriverContext driverContext) { + return MaxLongAggregatorFunction.create(driverContext, channels); + } + + @Override + public MaxLongGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + return MaxLongGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return "max of longs"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..d224ecd2d293e --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunction.java @@ -0,0 +1,213 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link MaxLongAggregator}. + * This class is generated. Do not edit it. + */ +public final class MaxLongGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("max", ElementType.LONG), + new IntermediateStateDesc("seen", ElementType.BOOLEAN) ); + + private final LongArrayState state; + + private final List channels; + + private final DriverContext driverContext; + + public MaxLongGroupingAggregatorFunction(List channels, LongArrayState state, + DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static MaxLongGroupingAggregatorFunction create(List channels, + DriverContext driverContext) { + return new MaxLongGroupingAggregatorFunction(channels, new LongArrayState(driverContext.bigArrays(), MaxLongAggregator.init()), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + Block uncastValuesBlock = page.getBlock(channels.get(0)); + if (uncastValuesBlock.areAllValuesNull()) { + state.enableGroupIdTracking(seenGroupIds); + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + } + }; + } + LongBlock valuesBlock = (LongBlock) uncastValuesBlock; + LongVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, LongBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + state.set(groupId, MaxLongAggregator.combine(state.getOrDefault(groupId), values.getLong(v))); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, LongVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + state.set(groupId, MaxLongAggregator.combine(state.getOrDefault(groupId), values.getLong(groupPosition + positionOffset))); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, LongBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + state.set(groupId, MaxLongAggregator.combine(state.getOrDefault(groupId), values.getLong(v))); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, LongVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + state.set(groupId, MaxLongAggregator.combine(state.getOrDefault(groupId), values.getLong(groupPosition + positionOffset))); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + LongVector max = page.getBlock(channels.get(0)).asVector(); + BooleanVector seen = page.getBlock(channels.get(1)).asVector(); + assert max.getPositionCount() == seen.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + if (seen.getBoolean(groupPosition + positionOffset)) { + state.set(groupId, MaxLongAggregator.combine(state.getOrDefault(groupId), max.getLong(groupPosition + positionOffset))); + } + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + LongArrayState inState = ((MaxLongGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + if (inState.hasValue(position)) { + state.set(groupId, MaxLongAggregator.combine(state.getOrDefault(groupId), inState.get(position))); + } + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = state.toValuesBlock(selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunction.java new file mode 100644 index 0000000000000..13db172741152 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunction.java @@ -0,0 +1,124 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link MedianAbsoluteDeviationDoubleAggregator}. + * This class is generated. Do not edit it. + */ +public final class MedianAbsoluteDeviationDoubleAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("quart", ElementType.BYTES_REF) ); + + private final DriverContext driverContext; + + private final QuantileStates.SingleState state; + + private final List channels; + + public MedianAbsoluteDeviationDoubleAggregatorFunction(DriverContext driverContext, + List channels, QuantileStates.SingleState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static MedianAbsoluteDeviationDoubleAggregatorFunction create(DriverContext driverContext, + List channels) { + return new MedianAbsoluteDeviationDoubleAggregatorFunction(driverContext, channels, MedianAbsoluteDeviationDoubleAggregator.initSingle()); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page) { + Block uncastBlock = page.getBlock(channels.get(0)); + if (uncastBlock.areAllValuesNull()) { + return; + } + DoubleBlock block = (DoubleBlock) uncastBlock; + DoubleVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + } + + private void addRawVector(DoubleVector vector) { + for (int i = 0; i < vector.getPositionCount(); i++) { + MedianAbsoluteDeviationDoubleAggregator.combine(state, vector.getDouble(i)); + } + } + + private void addRawBlock(DoubleBlock block) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + MedianAbsoluteDeviationDoubleAggregator.combine(state, block.getDouble(i)); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); + assert quart.getPositionCount() == 1; + BytesRef scratch = new BytesRef(); + MedianAbsoluteDeviationDoubleAggregator.combineIntermediate(state, quart.getBytesRef(0, scratch)); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset) { + state.toIntermediate(blocks, offset); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = MedianAbsoluteDeviationDoubleAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..44cea6eab23bb --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunctionSupplier.java @@ -0,0 +1,44 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link MedianAbsoluteDeviationDoubleAggregator}. + * This class is generated. Do not edit it. + */ +public final class MedianAbsoluteDeviationDoubleAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final BigArrays bigArrays; + + private final List channels; + + public MedianAbsoluteDeviationDoubleAggregatorFunctionSupplier(BigArrays bigArrays, + List channels) { + this.bigArrays = bigArrays; + this.channels = channels; + } + + @Override + public MedianAbsoluteDeviationDoubleAggregatorFunction aggregator(DriverContext driverContext) { + return MedianAbsoluteDeviationDoubleAggregatorFunction.create(driverContext, channels); + } + + @Override + public MedianAbsoluteDeviationDoubleGroupingAggregatorFunction groupingAggregator( + DriverContext driverContext) { + return MedianAbsoluteDeviationDoubleGroupingAggregatorFunction.create(channels, driverContext, bigArrays); + } + + @Override + public String describe() { + return "median_absolute_deviation of doubles"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..5707f9942d0f8 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunction.java @@ -0,0 +1,212 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link MedianAbsoluteDeviationDoubleAggregator}. + * This class is generated. Do not edit it. + */ +public final class MedianAbsoluteDeviationDoubleGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("quart", ElementType.BYTES_REF) ); + + private final QuantileStates.GroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + private final BigArrays bigArrays; + + public MedianAbsoluteDeviationDoubleGroupingAggregatorFunction(List channels, + QuantileStates.GroupingState state, DriverContext driverContext, BigArrays bigArrays) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + this.bigArrays = bigArrays; + } + + public static MedianAbsoluteDeviationDoubleGroupingAggregatorFunction create( + List channels, DriverContext driverContext, BigArrays bigArrays) { + return new MedianAbsoluteDeviationDoubleGroupingAggregatorFunction(channels, MedianAbsoluteDeviationDoubleAggregator.initGrouping(bigArrays), driverContext, bigArrays); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + Block uncastValuesBlock = page.getBlock(channels.get(0)); + if (uncastValuesBlock.areAllValuesNull()) { + state.enableGroupIdTracking(seenGroupIds); + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + } + }; + } + DoubleBlock valuesBlock = (DoubleBlock) uncastValuesBlock; + DoubleVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, DoubleBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + MedianAbsoluteDeviationDoubleAggregator.combine(state, groupId, values.getDouble(v)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, DoubleVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + MedianAbsoluteDeviationDoubleAggregator.combine(state, groupId, values.getDouble(groupPosition + positionOffset)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, DoubleBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + MedianAbsoluteDeviationDoubleAggregator.combine(state, groupId, values.getDouble(v)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, DoubleVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + MedianAbsoluteDeviationDoubleAggregator.combine(state, groupId, values.getDouble(groupPosition + positionOffset)); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + MedianAbsoluteDeviationDoubleAggregator.combineIntermediate(state, groupId, quart.getBytesRef(groupPosition + positionOffset, scratch)); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + QuantileStates.GroupingState inState = ((MedianAbsoluteDeviationDoubleGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + MedianAbsoluteDeviationDoubleAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = MedianAbsoluteDeviationDoubleAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunction.java new file mode 100644 index 0000000000000..eb74abde9cd3f --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunction.java @@ -0,0 +1,124 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link MedianAbsoluteDeviationIntAggregator}. + * This class is generated. Do not edit it. + */ +public final class MedianAbsoluteDeviationIntAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("quart", ElementType.BYTES_REF) ); + + private final DriverContext driverContext; + + private final QuantileStates.SingleState state; + + private final List channels; + + public MedianAbsoluteDeviationIntAggregatorFunction(DriverContext driverContext, + List channels, QuantileStates.SingleState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static MedianAbsoluteDeviationIntAggregatorFunction create(DriverContext driverContext, + List channels) { + return new MedianAbsoluteDeviationIntAggregatorFunction(driverContext, channels, MedianAbsoluteDeviationIntAggregator.initSingle()); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page) { + Block uncastBlock = page.getBlock(channels.get(0)); + if (uncastBlock.areAllValuesNull()) { + return; + } + IntBlock block = (IntBlock) uncastBlock; + IntVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + } + + private void addRawVector(IntVector vector) { + for (int i = 0; i < vector.getPositionCount(); i++) { + MedianAbsoluteDeviationIntAggregator.combine(state, vector.getInt(i)); + } + } + + private void addRawBlock(IntBlock block) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + MedianAbsoluteDeviationIntAggregator.combine(state, block.getInt(i)); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); + assert quart.getPositionCount() == 1; + BytesRef scratch = new BytesRef(); + MedianAbsoluteDeviationIntAggregator.combineIntermediate(state, quart.getBytesRef(0, scratch)); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset) { + state.toIntermediate(blocks, offset); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = MedianAbsoluteDeviationIntAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..c00fb4b0c7b5e --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunctionSupplier.java @@ -0,0 +1,44 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link MedianAbsoluteDeviationIntAggregator}. + * This class is generated. Do not edit it. + */ +public final class MedianAbsoluteDeviationIntAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final BigArrays bigArrays; + + private final List channels; + + public MedianAbsoluteDeviationIntAggregatorFunctionSupplier(BigArrays bigArrays, + List channels) { + this.bigArrays = bigArrays; + this.channels = channels; + } + + @Override + public MedianAbsoluteDeviationIntAggregatorFunction aggregator(DriverContext driverContext) { + return MedianAbsoluteDeviationIntAggregatorFunction.create(driverContext, channels); + } + + @Override + public MedianAbsoluteDeviationIntGroupingAggregatorFunction groupingAggregator( + DriverContext driverContext) { + return MedianAbsoluteDeviationIntGroupingAggregatorFunction.create(channels, driverContext, bigArrays); + } + + @Override + public String describe() { + return "median_absolute_deviation of ints"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..b8f08efffd7d1 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunction.java @@ -0,0 +1,210 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link MedianAbsoluteDeviationIntAggregator}. + * This class is generated. Do not edit it. + */ +public final class MedianAbsoluteDeviationIntGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("quart", ElementType.BYTES_REF) ); + + private final QuantileStates.GroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + private final BigArrays bigArrays; + + public MedianAbsoluteDeviationIntGroupingAggregatorFunction(List channels, + QuantileStates.GroupingState state, DriverContext driverContext, BigArrays bigArrays) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + this.bigArrays = bigArrays; + } + + public static MedianAbsoluteDeviationIntGroupingAggregatorFunction create(List channels, + DriverContext driverContext, BigArrays bigArrays) { + return new MedianAbsoluteDeviationIntGroupingAggregatorFunction(channels, MedianAbsoluteDeviationIntAggregator.initGrouping(bigArrays), driverContext, bigArrays); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + Block uncastValuesBlock = page.getBlock(channels.get(0)); + if (uncastValuesBlock.areAllValuesNull()) { + state.enableGroupIdTracking(seenGroupIds); + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + } + }; + } + IntBlock valuesBlock = (IntBlock) uncastValuesBlock; + IntVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, IntBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + MedianAbsoluteDeviationIntAggregator.combine(state, groupId, values.getInt(v)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, IntVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + MedianAbsoluteDeviationIntAggregator.combine(state, groupId, values.getInt(groupPosition + positionOffset)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, IntBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + MedianAbsoluteDeviationIntAggregator.combine(state, groupId, values.getInt(v)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, IntVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + MedianAbsoluteDeviationIntAggregator.combine(state, groupId, values.getInt(groupPosition + positionOffset)); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + MedianAbsoluteDeviationIntAggregator.combineIntermediate(state, groupId, quart.getBytesRef(groupPosition + positionOffset, scratch)); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + QuantileStates.GroupingState inState = ((MedianAbsoluteDeviationIntGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + MedianAbsoluteDeviationIntAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = MedianAbsoluteDeviationIntAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunction.java new file mode 100644 index 0000000000000..1fdec90f8a242 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunction.java @@ -0,0 +1,124 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link MedianAbsoluteDeviationLongAggregator}. + * This class is generated. Do not edit it. + */ +public final class MedianAbsoluteDeviationLongAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("quart", ElementType.BYTES_REF) ); + + private final DriverContext driverContext; + + private final QuantileStates.SingleState state; + + private final List channels; + + public MedianAbsoluteDeviationLongAggregatorFunction(DriverContext driverContext, + List channels, QuantileStates.SingleState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static MedianAbsoluteDeviationLongAggregatorFunction create(DriverContext driverContext, + List channels) { + return new MedianAbsoluteDeviationLongAggregatorFunction(driverContext, channels, MedianAbsoluteDeviationLongAggregator.initSingle()); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page) { + Block uncastBlock = page.getBlock(channels.get(0)); + if (uncastBlock.areAllValuesNull()) { + return; + } + LongBlock block = (LongBlock) uncastBlock; + LongVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + } + + private void addRawVector(LongVector vector) { + for (int i = 0; i < vector.getPositionCount(); i++) { + MedianAbsoluteDeviationLongAggregator.combine(state, vector.getLong(i)); + } + } + + private void addRawBlock(LongBlock block) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + MedianAbsoluteDeviationLongAggregator.combine(state, block.getLong(i)); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); + assert quart.getPositionCount() == 1; + BytesRef scratch = new BytesRef(); + MedianAbsoluteDeviationLongAggregator.combineIntermediate(state, quart.getBytesRef(0, scratch)); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset) { + state.toIntermediate(blocks, offset); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = MedianAbsoluteDeviationLongAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..71b0488488227 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunctionSupplier.java @@ -0,0 +1,44 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link MedianAbsoluteDeviationLongAggregator}. + * This class is generated. Do not edit it. + */ +public final class MedianAbsoluteDeviationLongAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final BigArrays bigArrays; + + private final List channels; + + public MedianAbsoluteDeviationLongAggregatorFunctionSupplier(BigArrays bigArrays, + List channels) { + this.bigArrays = bigArrays; + this.channels = channels; + } + + @Override + public MedianAbsoluteDeviationLongAggregatorFunction aggregator(DriverContext driverContext) { + return MedianAbsoluteDeviationLongAggregatorFunction.create(driverContext, channels); + } + + @Override + public MedianAbsoluteDeviationLongGroupingAggregatorFunction groupingAggregator( + DriverContext driverContext) { + return MedianAbsoluteDeviationLongGroupingAggregatorFunction.create(channels, driverContext, bigArrays); + } + + @Override + public String describe() { + return "median_absolute_deviation of longs"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..74ee25c27c86a --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunction.java @@ -0,0 +1,212 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link MedianAbsoluteDeviationLongAggregator}. + * This class is generated. Do not edit it. + */ +public final class MedianAbsoluteDeviationLongGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("quart", ElementType.BYTES_REF) ); + + private final QuantileStates.GroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + private final BigArrays bigArrays; + + public MedianAbsoluteDeviationLongGroupingAggregatorFunction(List channels, + QuantileStates.GroupingState state, DriverContext driverContext, BigArrays bigArrays) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + this.bigArrays = bigArrays; + } + + public static MedianAbsoluteDeviationLongGroupingAggregatorFunction create(List channels, + DriverContext driverContext, BigArrays bigArrays) { + return new MedianAbsoluteDeviationLongGroupingAggregatorFunction(channels, MedianAbsoluteDeviationLongAggregator.initGrouping(bigArrays), driverContext, bigArrays); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + Block uncastValuesBlock = page.getBlock(channels.get(0)); + if (uncastValuesBlock.areAllValuesNull()) { + state.enableGroupIdTracking(seenGroupIds); + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + } + }; + } + LongBlock valuesBlock = (LongBlock) uncastValuesBlock; + LongVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, LongBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + MedianAbsoluteDeviationLongAggregator.combine(state, groupId, values.getLong(v)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, LongVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + MedianAbsoluteDeviationLongAggregator.combine(state, groupId, values.getLong(groupPosition + positionOffset)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, LongBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + MedianAbsoluteDeviationLongAggregator.combine(state, groupId, values.getLong(v)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, LongVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + MedianAbsoluteDeviationLongAggregator.combine(state, groupId, values.getLong(groupPosition + positionOffset)); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + MedianAbsoluteDeviationLongAggregator.combineIntermediate(state, groupId, quart.getBytesRef(groupPosition + positionOffset, scratch)); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + QuantileStates.GroupingState inState = ((MedianAbsoluteDeviationLongGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + MedianAbsoluteDeviationLongAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = MedianAbsoluteDeviationLongAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunction.java new file mode 100644 index 0000000000000..83fa4ac1d9a90 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunction.java @@ -0,0 +1,134 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link MinDoubleAggregator}. + * This class is generated. Do not edit it. + */ +public final class MinDoubleAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("min", ElementType.DOUBLE), + new IntermediateStateDesc("seen", ElementType.BOOLEAN) ); + + private final DriverContext driverContext; + + private final DoubleState state; + + private final List channels; + + public MinDoubleAggregatorFunction(DriverContext driverContext, List channels, + DoubleState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static MinDoubleAggregatorFunction create(DriverContext driverContext, + List channels) { + return new MinDoubleAggregatorFunction(driverContext, channels, new DoubleState(MinDoubleAggregator.init())); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page) { + Block uncastBlock = page.getBlock(channels.get(0)); + if (uncastBlock.areAllValuesNull()) { + return; + } + DoubleBlock block = (DoubleBlock) uncastBlock; + DoubleVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + } + + private void addRawVector(DoubleVector vector) { + state.seen(true); + for (int i = 0; i < vector.getPositionCount(); i++) { + state.doubleValue(MinDoubleAggregator.combine(state.doubleValue(), vector.getDouble(i))); + } + } + + private void addRawBlock(DoubleBlock block) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + state.seen(true); + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + state.doubleValue(MinDoubleAggregator.combine(state.doubleValue(), block.getDouble(i))); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + DoubleVector min = page.getBlock(channels.get(0)).asVector(); + BooleanVector seen = page.getBlock(channels.get(1)).asVector(); + assert min.getPositionCount() == 1; + assert min.getPositionCount() == seen.getPositionCount(); + if (seen.getBoolean(0)) { + state.doubleValue(MinDoubleAggregator.combine(state.doubleValue(), min.getDouble(0))); + state.seen(true); + } + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset) { + state.toIntermediate(blocks, offset); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + if (state.seen() == false) { + blocks[offset] = Block.constantNullBlock(1, driverContext.blockFactory()); + return; + } + blocks[offset] = DoubleBlock.newConstantBlockWith(state.doubleValue(), 1, driverContext.blockFactory()); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..1dcc4126dc508 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunctionSupplier.java @@ -0,0 +1,42 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link MinDoubleAggregator}. + * This class is generated. Do not edit it. + */ +public final class MinDoubleAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final BigArrays bigArrays; + + private final List channels; + + public MinDoubleAggregatorFunctionSupplier(BigArrays bigArrays, List channels) { + this.bigArrays = bigArrays; + this.channels = channels; + } + + @Override + public MinDoubleAggregatorFunction aggregator(DriverContext driverContext) { + return MinDoubleAggregatorFunction.create(driverContext, channels); + } + + @Override + public MinDoubleGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + return MinDoubleGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return "min of doubles"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..d7c2c59eaed2c --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunction.java @@ -0,0 +1,213 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link MinDoubleAggregator}. + * This class is generated. Do not edit it. + */ +public final class MinDoubleGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("min", ElementType.DOUBLE), + new IntermediateStateDesc("seen", ElementType.BOOLEAN) ); + + private final DoubleArrayState state; + + private final List channels; + + private final DriverContext driverContext; + + public MinDoubleGroupingAggregatorFunction(List channels, DoubleArrayState state, + DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static MinDoubleGroupingAggregatorFunction create(List channels, + DriverContext driverContext) { + return new MinDoubleGroupingAggregatorFunction(channels, new DoubleArrayState(driverContext.bigArrays(), MinDoubleAggregator.init()), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + Block uncastValuesBlock = page.getBlock(channels.get(0)); + if (uncastValuesBlock.areAllValuesNull()) { + state.enableGroupIdTracking(seenGroupIds); + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + } + }; + } + DoubleBlock valuesBlock = (DoubleBlock) uncastValuesBlock; + DoubleVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, DoubleBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + state.set(groupId, MinDoubleAggregator.combine(state.getOrDefault(groupId), values.getDouble(v))); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, DoubleVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + state.set(groupId, MinDoubleAggregator.combine(state.getOrDefault(groupId), values.getDouble(groupPosition + positionOffset))); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, DoubleBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + state.set(groupId, MinDoubleAggregator.combine(state.getOrDefault(groupId), values.getDouble(v))); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, DoubleVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + state.set(groupId, MinDoubleAggregator.combine(state.getOrDefault(groupId), values.getDouble(groupPosition + positionOffset))); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + DoubleVector min = page.getBlock(channels.get(0)).asVector(); + BooleanVector seen = page.getBlock(channels.get(1)).asVector(); + assert min.getPositionCount() == seen.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + if (seen.getBoolean(groupPosition + positionOffset)) { + state.set(groupId, MinDoubleAggregator.combine(state.getOrDefault(groupId), min.getDouble(groupPosition + positionOffset))); + } + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + DoubleArrayState inState = ((MinDoubleGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + if (inState.hasValue(position)) { + state.set(groupId, MinDoubleAggregator.combine(state.getOrDefault(groupId), inState.get(position))); + } + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = state.toValuesBlock(selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunction.java new file mode 100644 index 0000000000000..7e30dcd5a0561 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunction.java @@ -0,0 +1,134 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link MinIntAggregator}. + * This class is generated. Do not edit it. + */ +public final class MinIntAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("min", ElementType.INT), + new IntermediateStateDesc("seen", ElementType.BOOLEAN) ); + + private final DriverContext driverContext; + + private final IntState state; + + private final List channels; + + public MinIntAggregatorFunction(DriverContext driverContext, List channels, + IntState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static MinIntAggregatorFunction create(DriverContext driverContext, + List channels) { + return new MinIntAggregatorFunction(driverContext, channels, new IntState(MinIntAggregator.init())); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page) { + Block uncastBlock = page.getBlock(channels.get(0)); + if (uncastBlock.areAllValuesNull()) { + return; + } + IntBlock block = (IntBlock) uncastBlock; + IntVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + } + + private void addRawVector(IntVector vector) { + state.seen(true); + for (int i = 0; i < vector.getPositionCount(); i++) { + state.intValue(MinIntAggregator.combine(state.intValue(), vector.getInt(i))); + } + } + + private void addRawBlock(IntBlock block) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + state.seen(true); + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + state.intValue(MinIntAggregator.combine(state.intValue(), block.getInt(i))); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + IntVector min = page.getBlock(channels.get(0)).asVector(); + BooleanVector seen = page.getBlock(channels.get(1)).asVector(); + assert min.getPositionCount() == 1; + assert min.getPositionCount() == seen.getPositionCount(); + if (seen.getBoolean(0)) { + state.intValue(MinIntAggregator.combine(state.intValue(), min.getInt(0))); + state.seen(true); + } + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset) { + state.toIntermediate(blocks, offset); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + if (state.seen() == false) { + blocks[offset] = Block.constantNullBlock(1, driverContext.blockFactory()); + return; + } + blocks[offset] = IntBlock.newConstantBlockWith(state.intValue(), 1, driverContext.blockFactory()); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..d1a6411c2cf2c --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunctionSupplier.java @@ -0,0 +1,42 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link MinIntAggregator}. + * This class is generated. Do not edit it. + */ +public final class MinIntAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final BigArrays bigArrays; + + private final List channels; + + public MinIntAggregatorFunctionSupplier(BigArrays bigArrays, List channels) { + this.bigArrays = bigArrays; + this.channels = channels; + } + + @Override + public MinIntAggregatorFunction aggregator(DriverContext driverContext) { + return MinIntAggregatorFunction.create(driverContext, channels); + } + + @Override + public MinIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + return MinIntGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return "min of ints"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..bf0c4c5a5e02c --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunction.java @@ -0,0 +1,211 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link MinIntAggregator}. + * This class is generated. Do not edit it. + */ +public final class MinIntGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("min", ElementType.INT), + new IntermediateStateDesc("seen", ElementType.BOOLEAN) ); + + private final IntArrayState state; + + private final List channels; + + private final DriverContext driverContext; + + public MinIntGroupingAggregatorFunction(List channels, IntArrayState state, + DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static MinIntGroupingAggregatorFunction create(List channels, + DriverContext driverContext) { + return new MinIntGroupingAggregatorFunction(channels, new IntArrayState(driverContext.bigArrays(), MinIntAggregator.init()), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + Block uncastValuesBlock = page.getBlock(channels.get(0)); + if (uncastValuesBlock.areAllValuesNull()) { + state.enableGroupIdTracking(seenGroupIds); + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + } + }; + } + IntBlock valuesBlock = (IntBlock) uncastValuesBlock; + IntVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, IntBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + state.set(groupId, MinIntAggregator.combine(state.getOrDefault(groupId), values.getInt(v))); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, IntVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + state.set(groupId, MinIntAggregator.combine(state.getOrDefault(groupId), values.getInt(groupPosition + positionOffset))); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, IntBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + state.set(groupId, MinIntAggregator.combine(state.getOrDefault(groupId), values.getInt(v))); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, IntVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + state.set(groupId, MinIntAggregator.combine(state.getOrDefault(groupId), values.getInt(groupPosition + positionOffset))); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + IntVector min = page.getBlock(channels.get(0)).asVector(); + BooleanVector seen = page.getBlock(channels.get(1)).asVector(); + assert min.getPositionCount() == seen.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + if (seen.getBoolean(groupPosition + positionOffset)) { + state.set(groupId, MinIntAggregator.combine(state.getOrDefault(groupId), min.getInt(groupPosition + positionOffset))); + } + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + IntArrayState inState = ((MinIntGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + if (inState.hasValue(position)) { + state.set(groupId, MinIntAggregator.combine(state.getOrDefault(groupId), inState.get(position))); + } + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = state.toValuesBlock(selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunction.java new file mode 100644 index 0000000000000..f6987443ebfaa --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunction.java @@ -0,0 +1,134 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link MinLongAggregator}. + * This class is generated. Do not edit it. + */ +public final class MinLongAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("min", ElementType.LONG), + new IntermediateStateDesc("seen", ElementType.BOOLEAN) ); + + private final DriverContext driverContext; + + private final LongState state; + + private final List channels; + + public MinLongAggregatorFunction(DriverContext driverContext, List channels, + LongState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static MinLongAggregatorFunction create(DriverContext driverContext, + List channels) { + return new MinLongAggregatorFunction(driverContext, channels, new LongState(MinLongAggregator.init())); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page) { + Block uncastBlock = page.getBlock(channels.get(0)); + if (uncastBlock.areAllValuesNull()) { + return; + } + LongBlock block = (LongBlock) uncastBlock; + LongVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + } + + private void addRawVector(LongVector vector) { + state.seen(true); + for (int i = 0; i < vector.getPositionCount(); i++) { + state.longValue(MinLongAggregator.combine(state.longValue(), vector.getLong(i))); + } + } + + private void addRawBlock(LongBlock block) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + state.seen(true); + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + state.longValue(MinLongAggregator.combine(state.longValue(), block.getLong(i))); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + LongVector min = page.getBlock(channels.get(0)).asVector(); + BooleanVector seen = page.getBlock(channels.get(1)).asVector(); + assert min.getPositionCount() == 1; + assert min.getPositionCount() == seen.getPositionCount(); + if (seen.getBoolean(0)) { + state.longValue(MinLongAggregator.combine(state.longValue(), min.getLong(0))); + state.seen(true); + } + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset) { + state.toIntermediate(blocks, offset); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + if (state.seen() == false) { + blocks[offset] = Block.constantNullBlock(1, driverContext.blockFactory()); + return; + } + blocks[offset] = LongBlock.newConstantBlockWith(state.longValue(), 1, driverContext.blockFactory()); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..4015e8de18e7b --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunctionSupplier.java @@ -0,0 +1,42 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link MinLongAggregator}. + * This class is generated. Do not edit it. + */ +public final class MinLongAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final BigArrays bigArrays; + + private final List channels; + + public MinLongAggregatorFunctionSupplier(BigArrays bigArrays, List channels) { + this.bigArrays = bigArrays; + this.channels = channels; + } + + @Override + public MinLongAggregatorFunction aggregator(DriverContext driverContext) { + return MinLongAggregatorFunction.create(driverContext, channels); + } + + @Override + public MinLongGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + return MinLongGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return "min of longs"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..d7baa88d6da26 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunction.java @@ -0,0 +1,213 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link MinLongAggregator}. + * This class is generated. Do not edit it. + */ +public final class MinLongGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("min", ElementType.LONG), + new IntermediateStateDesc("seen", ElementType.BOOLEAN) ); + + private final LongArrayState state; + + private final List channels; + + private final DriverContext driverContext; + + public MinLongGroupingAggregatorFunction(List channels, LongArrayState state, + DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static MinLongGroupingAggregatorFunction create(List channels, + DriverContext driverContext) { + return new MinLongGroupingAggregatorFunction(channels, new LongArrayState(driverContext.bigArrays(), MinLongAggregator.init()), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + Block uncastValuesBlock = page.getBlock(channels.get(0)); + if (uncastValuesBlock.areAllValuesNull()) { + state.enableGroupIdTracking(seenGroupIds); + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + } + }; + } + LongBlock valuesBlock = (LongBlock) uncastValuesBlock; + LongVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, LongBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + state.set(groupId, MinLongAggregator.combine(state.getOrDefault(groupId), values.getLong(v))); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, LongVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + state.set(groupId, MinLongAggregator.combine(state.getOrDefault(groupId), values.getLong(groupPosition + positionOffset))); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, LongBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + state.set(groupId, MinLongAggregator.combine(state.getOrDefault(groupId), values.getLong(v))); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, LongVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + state.set(groupId, MinLongAggregator.combine(state.getOrDefault(groupId), values.getLong(groupPosition + positionOffset))); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + LongVector min = page.getBlock(channels.get(0)).asVector(); + BooleanVector seen = page.getBlock(channels.get(1)).asVector(); + assert min.getPositionCount() == seen.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + if (seen.getBoolean(groupPosition + positionOffset)) { + state.set(groupId, MinLongAggregator.combine(state.getOrDefault(groupId), min.getLong(groupPosition + positionOffset))); + } + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + LongArrayState inState = ((MinLongGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + if (inState.hasValue(position)) { + state.set(groupId, MinLongAggregator.combine(state.getOrDefault(groupId), inState.get(position))); + } + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = state.toValuesBlock(selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunction.java new file mode 100644 index 0000000000000..cf2fc63584bec --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunction.java @@ -0,0 +1,127 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link PercentileDoubleAggregator}. + * This class is generated. Do not edit it. + */ +public final class PercentileDoubleAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("quart", ElementType.BYTES_REF) ); + + private final DriverContext driverContext; + + private final QuantileStates.SingleState state; + + private final List channels; + + private final double percentile; + + public PercentileDoubleAggregatorFunction(DriverContext driverContext, List channels, + QuantileStates.SingleState state, double percentile) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + this.percentile = percentile; + } + + public static PercentileDoubleAggregatorFunction create(DriverContext driverContext, + List channels, double percentile) { + return new PercentileDoubleAggregatorFunction(driverContext, channels, PercentileDoubleAggregator.initSingle(percentile), percentile); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page) { + Block uncastBlock = page.getBlock(channels.get(0)); + if (uncastBlock.areAllValuesNull()) { + return; + } + DoubleBlock block = (DoubleBlock) uncastBlock; + DoubleVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + } + + private void addRawVector(DoubleVector vector) { + for (int i = 0; i < vector.getPositionCount(); i++) { + PercentileDoubleAggregator.combine(state, vector.getDouble(i)); + } + } + + private void addRawBlock(DoubleBlock block) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + PercentileDoubleAggregator.combine(state, block.getDouble(i)); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); + assert quart.getPositionCount() == 1; + BytesRef scratch = new BytesRef(); + PercentileDoubleAggregator.combineIntermediate(state, quart.getBytesRef(0, scratch)); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset) { + state.toIntermediate(blocks, offset); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = PercentileDoubleAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..b32c28ef133ec --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunctionSupplier.java @@ -0,0 +1,47 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link PercentileDoubleAggregator}. + * This class is generated. Do not edit it. + */ +public final class PercentileDoubleAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final BigArrays bigArrays; + + private final List channels; + + private final double percentile; + + public PercentileDoubleAggregatorFunctionSupplier(BigArrays bigArrays, List channels, + double percentile) { + this.bigArrays = bigArrays; + this.channels = channels; + this.percentile = percentile; + } + + @Override + public PercentileDoubleAggregatorFunction aggregator(DriverContext driverContext) { + return PercentileDoubleAggregatorFunction.create(driverContext, channels, percentile); + } + + @Override + public PercentileDoubleGroupingAggregatorFunction groupingAggregator( + DriverContext driverContext) { + return PercentileDoubleGroupingAggregatorFunction.create(channels, driverContext, bigArrays, percentile); + } + + @Override + public String describe() { + return "percentile of doubles"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..63613daa47ad1 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunction.java @@ -0,0 +1,216 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link PercentileDoubleAggregator}. + * This class is generated. Do not edit it. + */ +public final class PercentileDoubleGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("quart", ElementType.BYTES_REF) ); + + private final QuantileStates.GroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + private final BigArrays bigArrays; + + private final double percentile; + + public PercentileDoubleGroupingAggregatorFunction(List channels, + QuantileStates.GroupingState state, DriverContext driverContext, BigArrays bigArrays, + double percentile) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + this.bigArrays = bigArrays; + this.percentile = percentile; + } + + public static PercentileDoubleGroupingAggregatorFunction create(List channels, + DriverContext driverContext, BigArrays bigArrays, double percentile) { + return new PercentileDoubleGroupingAggregatorFunction(channels, PercentileDoubleAggregator.initGrouping(bigArrays, percentile), driverContext, bigArrays, percentile); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + Block uncastValuesBlock = page.getBlock(channels.get(0)); + if (uncastValuesBlock.areAllValuesNull()) { + state.enableGroupIdTracking(seenGroupIds); + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + } + }; + } + DoubleBlock valuesBlock = (DoubleBlock) uncastValuesBlock; + DoubleVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, DoubleBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + PercentileDoubleAggregator.combine(state, groupId, values.getDouble(v)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, DoubleVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + PercentileDoubleAggregator.combine(state, groupId, values.getDouble(groupPosition + positionOffset)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, DoubleBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + PercentileDoubleAggregator.combine(state, groupId, values.getDouble(v)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, DoubleVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + PercentileDoubleAggregator.combine(state, groupId, values.getDouble(groupPosition + positionOffset)); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + PercentileDoubleAggregator.combineIntermediate(state, groupId, quart.getBytesRef(groupPosition + positionOffset, scratch)); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + QuantileStates.GroupingState inState = ((PercentileDoubleGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + PercentileDoubleAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = PercentileDoubleAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunction.java new file mode 100644 index 0000000000000..ab69c5f27c8f2 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunction.java @@ -0,0 +1,127 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link PercentileIntAggregator}. + * This class is generated. Do not edit it. + */ +public final class PercentileIntAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("quart", ElementType.BYTES_REF) ); + + private final DriverContext driverContext; + + private final QuantileStates.SingleState state; + + private final List channels; + + private final double percentile; + + public PercentileIntAggregatorFunction(DriverContext driverContext, List channels, + QuantileStates.SingleState state, double percentile) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + this.percentile = percentile; + } + + public static PercentileIntAggregatorFunction create(DriverContext driverContext, + List channels, double percentile) { + return new PercentileIntAggregatorFunction(driverContext, channels, PercentileIntAggregator.initSingle(percentile), percentile); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page) { + Block uncastBlock = page.getBlock(channels.get(0)); + if (uncastBlock.areAllValuesNull()) { + return; + } + IntBlock block = (IntBlock) uncastBlock; + IntVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + } + + private void addRawVector(IntVector vector) { + for (int i = 0; i < vector.getPositionCount(); i++) { + PercentileIntAggregator.combine(state, vector.getInt(i)); + } + } + + private void addRawBlock(IntBlock block) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + PercentileIntAggregator.combine(state, block.getInt(i)); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); + assert quart.getPositionCount() == 1; + BytesRef scratch = new BytesRef(); + PercentileIntAggregator.combineIntermediate(state, quart.getBytesRef(0, scratch)); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset) { + state.toIntermediate(blocks, offset); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = PercentileIntAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..72893a1dd95b3 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunctionSupplier.java @@ -0,0 +1,46 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link PercentileIntAggregator}. + * This class is generated. Do not edit it. + */ +public final class PercentileIntAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final BigArrays bigArrays; + + private final List channels; + + private final double percentile; + + public PercentileIntAggregatorFunctionSupplier(BigArrays bigArrays, List channels, + double percentile) { + this.bigArrays = bigArrays; + this.channels = channels; + this.percentile = percentile; + } + + @Override + public PercentileIntAggregatorFunction aggregator(DriverContext driverContext) { + return PercentileIntAggregatorFunction.create(driverContext, channels, percentile); + } + + @Override + public PercentileIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + return PercentileIntGroupingAggregatorFunction.create(channels, driverContext, bigArrays, percentile); + } + + @Override + public String describe() { + return "percentile of ints"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..38911d7a02f05 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunction.java @@ -0,0 +1,214 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link PercentileIntAggregator}. + * This class is generated. Do not edit it. + */ +public final class PercentileIntGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("quart", ElementType.BYTES_REF) ); + + private final QuantileStates.GroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + private final BigArrays bigArrays; + + private final double percentile; + + public PercentileIntGroupingAggregatorFunction(List channels, + QuantileStates.GroupingState state, DriverContext driverContext, BigArrays bigArrays, + double percentile) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + this.bigArrays = bigArrays; + this.percentile = percentile; + } + + public static PercentileIntGroupingAggregatorFunction create(List channels, + DriverContext driverContext, BigArrays bigArrays, double percentile) { + return new PercentileIntGroupingAggregatorFunction(channels, PercentileIntAggregator.initGrouping(bigArrays, percentile), driverContext, bigArrays, percentile); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + Block uncastValuesBlock = page.getBlock(channels.get(0)); + if (uncastValuesBlock.areAllValuesNull()) { + state.enableGroupIdTracking(seenGroupIds); + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + } + }; + } + IntBlock valuesBlock = (IntBlock) uncastValuesBlock; + IntVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, IntBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + PercentileIntAggregator.combine(state, groupId, values.getInt(v)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, IntVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + PercentileIntAggregator.combine(state, groupId, values.getInt(groupPosition + positionOffset)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, IntBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + PercentileIntAggregator.combine(state, groupId, values.getInt(v)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, IntVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + PercentileIntAggregator.combine(state, groupId, values.getInt(groupPosition + positionOffset)); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + PercentileIntAggregator.combineIntermediate(state, groupId, quart.getBytesRef(groupPosition + positionOffset, scratch)); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + QuantileStates.GroupingState inState = ((PercentileIntGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + PercentileIntAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = PercentileIntAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunction.java new file mode 100644 index 0000000000000..1629ea7de8f67 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunction.java @@ -0,0 +1,127 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link PercentileLongAggregator}. + * This class is generated. Do not edit it. + */ +public final class PercentileLongAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("quart", ElementType.BYTES_REF) ); + + private final DriverContext driverContext; + + private final QuantileStates.SingleState state; + + private final List channels; + + private final double percentile; + + public PercentileLongAggregatorFunction(DriverContext driverContext, List channels, + QuantileStates.SingleState state, double percentile) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + this.percentile = percentile; + } + + public static PercentileLongAggregatorFunction create(DriverContext driverContext, + List channels, double percentile) { + return new PercentileLongAggregatorFunction(driverContext, channels, PercentileLongAggregator.initSingle(percentile), percentile); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page) { + Block uncastBlock = page.getBlock(channels.get(0)); + if (uncastBlock.areAllValuesNull()) { + return; + } + LongBlock block = (LongBlock) uncastBlock; + LongVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + } + + private void addRawVector(LongVector vector) { + for (int i = 0; i < vector.getPositionCount(); i++) { + PercentileLongAggregator.combine(state, vector.getLong(i)); + } + } + + private void addRawBlock(LongBlock block) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + PercentileLongAggregator.combine(state, block.getLong(i)); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); + assert quart.getPositionCount() == 1; + BytesRef scratch = new BytesRef(); + PercentileLongAggregator.combineIntermediate(state, quart.getBytesRef(0, scratch)); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset) { + state.toIntermediate(blocks, offset); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = PercentileLongAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..a71de850814ff --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunctionSupplier.java @@ -0,0 +1,46 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link PercentileLongAggregator}. + * This class is generated. Do not edit it. + */ +public final class PercentileLongAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final BigArrays bigArrays; + + private final List channels; + + private final double percentile; + + public PercentileLongAggregatorFunctionSupplier(BigArrays bigArrays, List channels, + double percentile) { + this.bigArrays = bigArrays; + this.channels = channels; + this.percentile = percentile; + } + + @Override + public PercentileLongAggregatorFunction aggregator(DriverContext driverContext) { + return PercentileLongAggregatorFunction.create(driverContext, channels, percentile); + } + + @Override + public PercentileLongGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + return PercentileLongGroupingAggregatorFunction.create(channels, driverContext, bigArrays, percentile); + } + + @Override + public String describe() { + return "percentile of longs"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..efb512e84f1f5 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunction.java @@ -0,0 +1,216 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link PercentileLongAggregator}. + * This class is generated. Do not edit it. + */ +public final class PercentileLongGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("quart", ElementType.BYTES_REF) ); + + private final QuantileStates.GroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + private final BigArrays bigArrays; + + private final double percentile; + + public PercentileLongGroupingAggregatorFunction(List channels, + QuantileStates.GroupingState state, DriverContext driverContext, BigArrays bigArrays, + double percentile) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + this.bigArrays = bigArrays; + this.percentile = percentile; + } + + public static PercentileLongGroupingAggregatorFunction create(List channels, + DriverContext driverContext, BigArrays bigArrays, double percentile) { + return new PercentileLongGroupingAggregatorFunction(channels, PercentileLongAggregator.initGrouping(bigArrays, percentile), driverContext, bigArrays, percentile); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + Block uncastValuesBlock = page.getBlock(channels.get(0)); + if (uncastValuesBlock.areAllValuesNull()) { + state.enableGroupIdTracking(seenGroupIds); + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + } + }; + } + LongBlock valuesBlock = (LongBlock) uncastValuesBlock; + LongVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, LongBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + PercentileLongAggregator.combine(state, groupId, values.getLong(v)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, LongVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + PercentileLongAggregator.combine(state, groupId, values.getLong(groupPosition + positionOffset)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, LongBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + PercentileLongAggregator.combine(state, groupId, values.getLong(v)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, LongVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + PercentileLongAggregator.combine(state, groupId, values.getLong(groupPosition + positionOffset)); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + PercentileLongAggregator.combineIntermediate(state, groupId, quart.getBytesRef(groupPosition + positionOffset, scratch)); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + QuantileStates.GroupingState inState = ((PercentileLongGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + PercentileLongAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = PercentileLongAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunction.java new file mode 100644 index 0000000000000..d40c4812975a3 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunction.java @@ -0,0 +1,133 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link SumDoubleAggregator}. + * This class is generated. Do not edit it. + */ +public final class SumDoubleAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("value", ElementType.DOUBLE), + new IntermediateStateDesc("delta", ElementType.DOUBLE), + new IntermediateStateDesc("seen", ElementType.BOOLEAN) ); + + private final DriverContext driverContext; + + private final SumDoubleAggregator.SumState state; + + private final List channels; + + public SumDoubleAggregatorFunction(DriverContext driverContext, List channels, + SumDoubleAggregator.SumState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static SumDoubleAggregatorFunction create(DriverContext driverContext, + List channels) { + return new SumDoubleAggregatorFunction(driverContext, channels, SumDoubleAggregator.initSingle()); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page) { + Block uncastBlock = page.getBlock(channels.get(0)); + if (uncastBlock.areAllValuesNull()) { + return; + } + DoubleBlock block = (DoubleBlock) uncastBlock; + DoubleVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + } + + private void addRawVector(DoubleVector vector) { + state.seen(true); + for (int i = 0; i < vector.getPositionCount(); i++) { + SumDoubleAggregator.combine(state, vector.getDouble(i)); + } + } + + private void addRawBlock(DoubleBlock block) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + state.seen(true); + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + SumDoubleAggregator.combine(state, block.getDouble(i)); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + DoubleVector value = page.getBlock(channels.get(0)).asVector(); + DoubleVector delta = page.getBlock(channels.get(1)).asVector(); + BooleanVector seen = page.getBlock(channels.get(2)).asVector(); + assert value.getPositionCount() == 1; + assert value.getPositionCount() == delta.getPositionCount() && value.getPositionCount() == seen.getPositionCount(); + SumDoubleAggregator.combineIntermediate(state, value.getDouble(0), delta.getDouble(0), seen.getBoolean(0)); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset) { + state.toIntermediate(blocks, offset); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + if (state.seen() == false) { + blocks[offset] = Block.constantNullBlock(1, driverContext.blockFactory()); + return; + } + blocks[offset] = SumDoubleAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..d6898669ab339 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionSupplier.java @@ -0,0 +1,42 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link SumDoubleAggregator}. + * This class is generated. Do not edit it. + */ +public final class SumDoubleAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final BigArrays bigArrays; + + private final List channels; + + public SumDoubleAggregatorFunctionSupplier(BigArrays bigArrays, List channels) { + this.bigArrays = bigArrays; + this.channels = channels; + } + + @Override + public SumDoubleAggregatorFunction aggregator(DriverContext driverContext) { + return SumDoubleAggregatorFunction.create(driverContext, channels); + } + + @Override + public SumDoubleGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + return SumDoubleGroupingAggregatorFunction.create(channels, driverContext, bigArrays); + } + + @Override + public String describe() { + return "sum of doubles"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..6094c7115159f --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunction.java @@ -0,0 +1,216 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link SumDoubleAggregator}. + * This class is generated. Do not edit it. + */ +public final class SumDoubleGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("value", ElementType.DOUBLE), + new IntermediateStateDesc("delta", ElementType.DOUBLE), + new IntermediateStateDesc("seen", ElementType.BOOLEAN) ); + + private final SumDoubleAggregator.GroupingSumState state; + + private final List channels; + + private final DriverContext driverContext; + + private final BigArrays bigArrays; + + public SumDoubleGroupingAggregatorFunction(List channels, + SumDoubleAggregator.GroupingSumState state, DriverContext driverContext, + BigArrays bigArrays) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + this.bigArrays = bigArrays; + } + + public static SumDoubleGroupingAggregatorFunction create(List channels, + DriverContext driverContext, BigArrays bigArrays) { + return new SumDoubleGroupingAggregatorFunction(channels, SumDoubleAggregator.initGrouping(bigArrays), driverContext, bigArrays); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + Block uncastValuesBlock = page.getBlock(channels.get(0)); + if (uncastValuesBlock.areAllValuesNull()) { + state.enableGroupIdTracking(seenGroupIds); + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + } + }; + } + DoubleBlock valuesBlock = (DoubleBlock) uncastValuesBlock; + DoubleVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, DoubleBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + SumDoubleAggregator.combine(state, groupId, values.getDouble(v)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, DoubleVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + SumDoubleAggregator.combine(state, groupId, values.getDouble(groupPosition + positionOffset)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, DoubleBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + SumDoubleAggregator.combine(state, groupId, values.getDouble(v)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, DoubleVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + SumDoubleAggregator.combine(state, groupId, values.getDouble(groupPosition + positionOffset)); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + DoubleVector value = page.getBlock(channels.get(0)).asVector(); + DoubleVector delta = page.getBlock(channels.get(1)).asVector(); + BooleanVector seen = page.getBlock(channels.get(2)).asVector(); + assert value.getPositionCount() == delta.getPositionCount() && value.getPositionCount() == seen.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + SumDoubleAggregator.combineIntermediate(state, groupId, value.getDouble(groupPosition + positionOffset), delta.getDouble(groupPosition + positionOffset), seen.getBoolean(groupPosition + positionOffset)); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + SumDoubleAggregator.GroupingSumState inState = ((SumDoubleGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + SumDoubleAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = SumDoubleAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunction.java new file mode 100644 index 0000000000000..2cc6c85c72857 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunction.java @@ -0,0 +1,136 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link SumIntAggregator}. + * This class is generated. Do not edit it. + */ +public final class SumIntAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("sum", ElementType.LONG), + new IntermediateStateDesc("seen", ElementType.BOOLEAN) ); + + private final DriverContext driverContext; + + private final LongState state; + + private final List channels; + + public SumIntAggregatorFunction(DriverContext driverContext, List channels, + LongState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static SumIntAggregatorFunction create(DriverContext driverContext, + List channels) { + return new SumIntAggregatorFunction(driverContext, channels, new LongState(SumIntAggregator.init())); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page) { + Block uncastBlock = page.getBlock(channels.get(0)); + if (uncastBlock.areAllValuesNull()) { + return; + } + IntBlock block = (IntBlock) uncastBlock; + IntVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + } + + private void addRawVector(IntVector vector) { + state.seen(true); + for (int i = 0; i < vector.getPositionCount(); i++) { + state.longValue(SumIntAggregator.combine(state.longValue(), vector.getInt(i))); + } + } + + private void addRawBlock(IntBlock block) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + state.seen(true); + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + state.longValue(SumIntAggregator.combine(state.longValue(), block.getInt(i))); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + LongVector sum = page.getBlock(channels.get(0)).asVector(); + BooleanVector seen = page.getBlock(channels.get(1)).asVector(); + assert sum.getPositionCount() == 1; + assert sum.getPositionCount() == seen.getPositionCount(); + if (seen.getBoolean(0)) { + state.longValue(SumIntAggregator.combine(state.longValue(), sum.getLong(0))); + state.seen(true); + } + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset) { + state.toIntermediate(blocks, offset); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + if (state.seen() == false) { + blocks[offset] = Block.constantNullBlock(1, driverContext.blockFactory()); + return; + } + blocks[offset] = LongBlock.newConstantBlockWith(state.longValue(), 1, driverContext.blockFactory()); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..01294de12de45 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionSupplier.java @@ -0,0 +1,42 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link SumIntAggregator}. + * This class is generated. Do not edit it. + */ +public final class SumIntAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final BigArrays bigArrays; + + private final List channels; + + public SumIntAggregatorFunctionSupplier(BigArrays bigArrays, List channels) { + this.bigArrays = bigArrays; + this.channels = channels; + } + + @Override + public SumIntAggregatorFunction aggregator(DriverContext driverContext) { + return SumIntAggregatorFunction.create(driverContext, channels); + } + + @Override + public SumIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + return SumIntGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return "sum of ints"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..d0c9ecb6ac84a --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunction.java @@ -0,0 +1,213 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link SumIntAggregator}. + * This class is generated. Do not edit it. + */ +public final class SumIntGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("sum", ElementType.LONG), + new IntermediateStateDesc("seen", ElementType.BOOLEAN) ); + + private final LongArrayState state; + + private final List channels; + + private final DriverContext driverContext; + + public SumIntGroupingAggregatorFunction(List channels, LongArrayState state, + DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static SumIntGroupingAggregatorFunction create(List channels, + DriverContext driverContext) { + return new SumIntGroupingAggregatorFunction(channels, new LongArrayState(driverContext.bigArrays(), SumIntAggregator.init()), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + Block uncastValuesBlock = page.getBlock(channels.get(0)); + if (uncastValuesBlock.areAllValuesNull()) { + state.enableGroupIdTracking(seenGroupIds); + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + } + }; + } + IntBlock valuesBlock = (IntBlock) uncastValuesBlock; + IntVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, IntBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + state.set(groupId, SumIntAggregator.combine(state.getOrDefault(groupId), values.getInt(v))); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, IntVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + state.set(groupId, SumIntAggregator.combine(state.getOrDefault(groupId), values.getInt(groupPosition + positionOffset))); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, IntBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + state.set(groupId, SumIntAggregator.combine(state.getOrDefault(groupId), values.getInt(v))); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, IntVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + state.set(groupId, SumIntAggregator.combine(state.getOrDefault(groupId), values.getInt(groupPosition + positionOffset))); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + LongVector sum = page.getBlock(channels.get(0)).asVector(); + BooleanVector seen = page.getBlock(channels.get(1)).asVector(); + assert sum.getPositionCount() == seen.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + if (seen.getBoolean(groupPosition + positionOffset)) { + state.set(groupId, SumIntAggregator.combine(state.getOrDefault(groupId), sum.getLong(groupPosition + positionOffset))); + } + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + LongArrayState inState = ((SumIntGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + if (inState.hasValue(position)) { + state.set(groupId, SumIntAggregator.combine(state.getOrDefault(groupId), inState.get(position))); + } + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = state.toValuesBlock(selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunction.java new file mode 100644 index 0000000000000..e61c95ca622df --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunction.java @@ -0,0 +1,134 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link SumLongAggregator}. + * This class is generated. Do not edit it. + */ +public final class SumLongAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("sum", ElementType.LONG), + new IntermediateStateDesc("seen", ElementType.BOOLEAN) ); + + private final DriverContext driverContext; + + private final LongState state; + + private final List channels; + + public SumLongAggregatorFunction(DriverContext driverContext, List channels, + LongState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static SumLongAggregatorFunction create(DriverContext driverContext, + List channels) { + return new SumLongAggregatorFunction(driverContext, channels, new LongState(SumLongAggregator.init())); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page) { + Block uncastBlock = page.getBlock(channels.get(0)); + if (uncastBlock.areAllValuesNull()) { + return; + } + LongBlock block = (LongBlock) uncastBlock; + LongVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + } + + private void addRawVector(LongVector vector) { + state.seen(true); + for (int i = 0; i < vector.getPositionCount(); i++) { + state.longValue(SumLongAggregator.combine(state.longValue(), vector.getLong(i))); + } + } + + private void addRawBlock(LongBlock block) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + state.seen(true); + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + state.longValue(SumLongAggregator.combine(state.longValue(), block.getLong(i))); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + LongVector sum = page.getBlock(channels.get(0)).asVector(); + BooleanVector seen = page.getBlock(channels.get(1)).asVector(); + assert sum.getPositionCount() == 1; + assert sum.getPositionCount() == seen.getPositionCount(); + if (seen.getBoolean(0)) { + state.longValue(SumLongAggregator.combine(state.longValue(), sum.getLong(0))); + state.seen(true); + } + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset) { + state.toIntermediate(blocks, offset); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + if (state.seen() == false) { + blocks[offset] = Block.constantNullBlock(1, driverContext.blockFactory()); + return; + } + blocks[offset] = LongBlock.newConstantBlockWith(state.longValue(), 1, driverContext.blockFactory()); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..d72927d181f12 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionSupplier.java @@ -0,0 +1,42 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link SumLongAggregator}. + * This class is generated. Do not edit it. + */ +public final class SumLongAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final BigArrays bigArrays; + + private final List channels; + + public SumLongAggregatorFunctionSupplier(BigArrays bigArrays, List channels) { + this.bigArrays = bigArrays; + this.channels = channels; + } + + @Override + public SumLongAggregatorFunction aggregator(DriverContext driverContext) { + return SumLongAggregatorFunction.create(driverContext, channels); + } + + @Override + public SumLongGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + return SumLongGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return "sum of longs"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..9df3cd2d76cae --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunction.java @@ -0,0 +1,213 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link SumLongAggregator}. + * This class is generated. Do not edit it. + */ +public final class SumLongGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("sum", ElementType.LONG), + new IntermediateStateDesc("seen", ElementType.BOOLEAN) ); + + private final LongArrayState state; + + private final List channels; + + private final DriverContext driverContext; + + public SumLongGroupingAggregatorFunction(List channels, LongArrayState state, + DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static SumLongGroupingAggregatorFunction create(List channels, + DriverContext driverContext) { + return new SumLongGroupingAggregatorFunction(channels, new LongArrayState(driverContext.bigArrays(), SumLongAggregator.init()), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + Block uncastValuesBlock = page.getBlock(channels.get(0)); + if (uncastValuesBlock.areAllValuesNull()) { + state.enableGroupIdTracking(seenGroupIds); + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + } + }; + } + LongBlock valuesBlock = (LongBlock) uncastValuesBlock; + LongVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, LongBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + state.set(groupId, SumLongAggregator.combine(state.getOrDefault(groupId), values.getLong(v))); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, LongVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + state.set(groupId, SumLongAggregator.combine(state.getOrDefault(groupId), values.getLong(groupPosition + positionOffset))); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, LongBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + state.set(groupId, SumLongAggregator.combine(state.getOrDefault(groupId), values.getLong(v))); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, LongVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + state.set(groupId, SumLongAggregator.combine(state.getOrDefault(groupId), values.getLong(groupPosition + positionOffset))); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + LongVector sum = page.getBlock(channels.get(0)).asVector(); + BooleanVector seen = page.getBlock(channels.get(1)).asVector(); + assert sum.getPositionCount() == seen.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + if (seen.getBoolean(groupPosition + positionOffset)) { + state.set(groupId, SumLongAggregator.combine(state.getOrDefault(groupId), sum.getLong(groupPosition + positionOffset))); + } + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + LongArrayState inState = ((SumLongGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + if (inState.hasValue(position)) { + state.set(groupId, SumLongAggregator.combine(state.getOrDefault(groupId), inState.get(position))); + } + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = state.toValuesBlock(selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} From f7fce9d9b8c6ca881a5206cab9cf0ea66d490ebf Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Wed, 4 Oct 2023 11:24:11 -0400 Subject: [PATCH 4/6] Spotless --- .../org/elasticsearch/compute/data/ConstantNullBlock.java | 2 +- .../elasticsearch/compute/data/BlockSerializationTests.java | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java index 8acbfffaa004c..9c00213a33997 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java @@ -69,7 +69,7 @@ public ElementType elementType() { @Override public Block filter(int... positions) { - close(); + Releasables.closeExpectNoException(this); return new ConstantNullBlock(positions.length); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockSerializationTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockSerializationTests.java index 4012416ad4fef..8b958f7bafb8f 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockSerializationTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockSerializationTests.java @@ -8,15 +8,15 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.compute.aggregation.SumLongAggregatorFunction; -import org.elasticsearch.core.Releasables; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.PageCacheRecycler; +import org.elasticsearch.compute.aggregation.SumLongAggregatorFunction; import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.core.Releasables; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.test.EqualsHashCodeTestUtils; From 326b7eb5214d5db90789b58a276e1b376fd60d53 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Wed, 4 Oct 2023 15:21:34 -0400 Subject: [PATCH 5/6] Move BlockFactory --- .../elasticsearch/indices/IndicesService.java | 5 ++ .../compute/data/BooleanBlock.java | 11 ++-- .../compute/data/BytesRefBlock.java | 11 ++-- .../compute/data/DoubleBlock.java | 11 ++-- .../elasticsearch/compute/data/IntBlock.java | 11 ++-- .../elasticsearch/compute/data/LongBlock.java | 11 ++-- .../org/elasticsearch/compute/data/Block.java | 13 ++-- .../compute/data/BlockStreamInput.java | 24 +++++++ .../compute/data/X-Block.java.st | 11 ++-- .../operator/exchange/ExchangeResponse.java | 4 +- .../operator/exchange/ExchangeService.java | 15 ++++- .../compute/data/BasicPageTests.java | 15 ++--- .../compute/data/SerializationTestCase.java | 16 +++-- .../exchange/ExchangeServiceTests.java | 64 ++++++++++++------- .../xpack/esql/lookup/EnrichLookupIT.java | 2 - .../xpack/esql/action/EsqlQueryAction.java | 2 +- .../xpack/esql/action/EsqlQueryResponse.java | 13 +++- .../esql/enrich/EnrichLookupService.java | 7 +- .../xpack/esql/plugin/EsqlPlugin.java | 22 +++---- .../esql/plugin/TransportEsqlQueryAction.java | 19 +----- .../esql/action/EsqlQueryResponseTests.java | 4 +- 21 files changed, 155 insertions(+), 136 deletions(-) create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockStreamInput.java diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesService.java b/server/src/main/java/org/elasticsearch/indices/IndicesService.java index 36ebaa96b0bb2..fd0e19295e2e0 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -1844,4 +1844,9 @@ public DateFieldMapper.DateFieldType getTimestampFieldType(Index index) { public IndexScopedSettings getIndexScopedSettings() { return indexScopedSettings; } + + // TODO move this? + public BigArrays getBigArrays() { + return bigArrays; + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java index 74d5f27d8d5f5..80f396695fc2f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java @@ -12,7 +12,6 @@ import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; -import java.util.function.Supplier; /** * Block that stores boolean values. @@ -42,17 +41,15 @@ default String getWriteableName() { return "BooleanBlock"; } - static NamedWriteableRegistry.Entry namedWriteableEntry(Supplier blockFactory) { - return new NamedWriteableRegistry.Entry(Block.class, "BooleanBlock", in -> readFrom(blockFactory.get(), in)); - } + NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Block.class, "BooleanBlock", BooleanBlock::readFrom); - private static BooleanBlock readFrom(BlockFactory blockFactory, StreamInput in) throws IOException { + private static BooleanBlock readFrom(StreamInput in) throws IOException { final boolean isVector = in.readBoolean(); if (isVector) { - return BooleanVector.readFrom(blockFactory, in).asBlock(); + return BooleanVector.readFrom(((BlockStreamInput) in).blockFactory(), in).asBlock(); } final int positions = in.readVInt(); - try (BooleanBlock.Builder builder = blockFactory.newBooleanBlockBuilder(positions)) { + try (BooleanBlock.Builder builder = ((BlockStreamInput) in).blockFactory().newBooleanBlockBuilder(positions)) { for (int i = 0; i < positions; i++) { if (in.readBoolean()) { builder.appendNull(); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java index 29f5115be3934..9409212a9c998 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java @@ -13,7 +13,6 @@ import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; -import java.util.function.Supplier; /** * Block that stores BytesRef values. @@ -46,17 +45,15 @@ default String getWriteableName() { return "BytesRefBlock"; } - static NamedWriteableRegistry.Entry namedWriteableEntry(Supplier blockFactory) { - return new NamedWriteableRegistry.Entry(Block.class, "BytesRefBlock", in -> readFrom(blockFactory.get(), in)); - } + NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Block.class, "BytesRefBlock", BytesRefBlock::readFrom); - private static BytesRefBlock readFrom(BlockFactory blockFactory, StreamInput in) throws IOException { + private static BytesRefBlock readFrom(StreamInput in) throws IOException { final boolean isVector = in.readBoolean(); if (isVector) { - return BytesRefVector.readFrom(blockFactory, in).asBlock(); + return BytesRefVector.readFrom(((BlockStreamInput) in).blockFactory(), in).asBlock(); } final int positions = in.readVInt(); - try (BytesRefBlock.Builder builder = blockFactory.newBytesRefBlockBuilder(positions)) { + try (BytesRefBlock.Builder builder = ((BlockStreamInput) in).blockFactory().newBytesRefBlockBuilder(positions)) { for (int i = 0; i < positions; i++) { if (in.readBoolean()) { builder.appendNull(); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java index 895d72cec3a4e..806ee6d3680bc 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java @@ -12,7 +12,6 @@ import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; -import java.util.function.Supplier; /** * Block that stores double values. @@ -42,17 +41,15 @@ default String getWriteableName() { return "DoubleBlock"; } - static NamedWriteableRegistry.Entry namedWriteableEntry(Supplier blockFactory) { - return new NamedWriteableRegistry.Entry(Block.class, "DoubleBlock", in -> readFrom(blockFactory.get(), in)); - } + NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Block.class, "DoubleBlock", DoubleBlock::readFrom); - private static DoubleBlock readFrom(BlockFactory blockFactory, StreamInput in) throws IOException { + private static DoubleBlock readFrom(StreamInput in) throws IOException { final boolean isVector = in.readBoolean(); if (isVector) { - return DoubleVector.readFrom(blockFactory, in).asBlock(); + return DoubleVector.readFrom(((BlockStreamInput) in).blockFactory(), in).asBlock(); } final int positions = in.readVInt(); - try (DoubleBlock.Builder builder = blockFactory.newDoubleBlockBuilder(positions)) { + try (DoubleBlock.Builder builder = ((BlockStreamInput) in).blockFactory().newDoubleBlockBuilder(positions)) { for (int i = 0; i < positions; i++) { if (in.readBoolean()) { builder.appendNull(); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java index a36411e7e3d43..580da5e5a7415 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java @@ -12,7 +12,6 @@ import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; -import java.util.function.Supplier; /** * Block that stores int values. @@ -42,17 +41,15 @@ default String getWriteableName() { return "IntBlock"; } - static NamedWriteableRegistry.Entry namedWriteableEntry(Supplier blockFactory) { - return new NamedWriteableRegistry.Entry(Block.class, "IntBlock", in -> readFrom(blockFactory.get(), in)); - } + NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Block.class, "IntBlock", IntBlock::readFrom); - private static IntBlock readFrom(BlockFactory blockFactory, StreamInput in) throws IOException { + private static IntBlock readFrom(StreamInput in) throws IOException { final boolean isVector = in.readBoolean(); if (isVector) { - return IntVector.readFrom(blockFactory, in).asBlock(); + return IntVector.readFrom(((BlockStreamInput) in).blockFactory(), in).asBlock(); } final int positions = in.readVInt(); - try (IntBlock.Builder builder = blockFactory.newIntBlockBuilder(positions)) { + try (IntBlock.Builder builder = ((BlockStreamInput) in).blockFactory().newIntBlockBuilder(positions)) { for (int i = 0; i < positions; i++) { if (in.readBoolean()) { builder.appendNull(); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java index c14b0b7bc7d06..2db757efd7091 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java @@ -12,7 +12,6 @@ import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; -import java.util.function.Supplier; /** * Block that stores long values. @@ -42,17 +41,15 @@ default String getWriteableName() { return "LongBlock"; } - static NamedWriteableRegistry.Entry namedWriteableEntry(Supplier blockFactory) { - return new NamedWriteableRegistry.Entry(Block.class, "LongBlock", in -> readFrom(blockFactory.get(), in)); - } + NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Block.class, "LongBlock", LongBlock::readFrom); - private static LongBlock readFrom(BlockFactory blockFactory, StreamInput in) throws IOException { + private static LongBlock readFrom(StreamInput in) throws IOException { final boolean isVector = in.readBoolean(); if (isVector) { - return LongVector.readFrom(blockFactory, in).asBlock(); + return LongVector.readFrom(((BlockStreamInput) in).blockFactory(), in).asBlock(); } final int positions = in.readVInt(); - try (LongBlock.Builder builder = blockFactory.newLongBlockBuilder(positions)) { + try (LongBlock.Builder builder = ((BlockStreamInput) in).blockFactory().newLongBlockBuilder(positions)) { for (int i = 0; i < positions; i++) { if (in.readBoolean()) { builder.appendNull(); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java index ed720dc554aa3..c5d6780e84685 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java @@ -14,7 +14,6 @@ import org.elasticsearch.core.Releasable; import java.util.List; -import java.util.function.Supplier; /** * A Block is a columnar representation of homogenous data. It has a position (row) count, and @@ -252,13 +251,13 @@ public void close() { } } - static List getNamedWriteables(Supplier blockFactory) { + static List getNamedWriteables() { return List.of( - IntBlock.namedWriteableEntry(blockFactory), - LongBlock.namedWriteableEntry(blockFactory), - DoubleBlock.namedWriteableEntry(blockFactory), - BytesRefBlock.namedWriteableEntry(blockFactory), - BooleanBlock.namedWriteableEntry(blockFactory), + IntBlock.ENTRY, + LongBlock.ENTRY, + DoubleBlock.ENTRY, + BytesRefBlock.ENTRY, + BooleanBlock.ENTRY, ConstantNullBlock.ENTRY ); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockStreamInput.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockStreamInput.java new file mode 100644 index 0000000000000..a5604935acb23 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockStreamInput.java @@ -0,0 +1,24 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.data; + +import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; +import org.elasticsearch.common.io.stream.StreamInput; + +public class BlockStreamInput extends NamedWriteableAwareStreamInput { + private final BlockFactory blockFactory; + + public BlockStreamInput(StreamInput delegate, BlockFactory blockFactory) { + super(delegate, delegate.namedWriteableRegistry()); + this.blockFactory = blockFactory; + } + + BlockFactory blockFactory() { + return blockFactory; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st index 5eb43929f3280..81a0d3de7f8f7 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st @@ -16,7 +16,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; -import java.util.function.Supplier; /** * Block that stores $type$ values. @@ -58,17 +57,15 @@ $endif$ return "$Type$Block"; } - static NamedWriteableRegistry.Entry namedWriteableEntry(Supplier blockFactory) { - return new NamedWriteableRegistry.Entry(Block.class, "$Type$Block", in -> readFrom(blockFactory.get(), in)); - } + NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Block.class, "$Type$Block", $Type$Block::readFrom); - private static $Type$Block readFrom(BlockFactory blockFactory, StreamInput in) throws IOException { + private static $Type$Block readFrom(StreamInput in) throws IOException { final boolean isVector = in.readBoolean(); if (isVector) { - return $Type$Vector.readFrom(blockFactory, in).asBlock(); + return $Type$Vector.readFrom(((BlockStreamInput) in).blockFactory(), in).asBlock(); } final int positions = in.readVInt(); - try ($Type$Block.Builder builder = blockFactory.new$Type$BlockBuilder(positions)) { + try ($Type$Block.Builder builder = ((BlockStreamInput) in).blockFactory().new$Type$BlockBuilder(positions)) { for (int i = 0; i < positions; i++) { if (in.readBoolean()) { builder.appendNull(); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeResponse.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeResponse.java index 9b3da39fe5c74..5904c03a01e44 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeResponse.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeResponse.java @@ -7,8 +7,8 @@ package org.elasticsearch.compute.operator.exchange; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.compute.data.BlockStreamInput; import org.elasticsearch.compute.data.Page; import org.elasticsearch.core.AbstractRefCounted; import org.elasticsearch.core.Nullable; @@ -30,7 +30,7 @@ public ExchangeResponse(Page page, boolean finished) { this.finished = finished; } - public ExchangeResponse(StreamInput in) throws IOException { + public ExchangeResponse(BlockStreamInput in) throws IOException { super(in); this.page = in.readOptionalWriteable(Page::new); this.finished = in.readBoolean(); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java index f6762e33bbc18..ab9582b20d4aa 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java @@ -22,6 +22,8 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AbstractAsyncTask; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockStreamInput; import org.elasticsearch.core.TimeValue; import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.Task; @@ -61,15 +63,17 @@ public final class ExchangeService extends AbstractLifecycleComponent { private final ThreadPool threadPool; private final Executor executor; + private final BlockFactory blockFactory; private final Map sinks = ConcurrentCollections.newConcurrentMap(); private final Map sources = ConcurrentCollections.newConcurrentMap(); private final InactiveSinksReaper inactiveSinksReaper; - public ExchangeService(Settings settings, ThreadPool threadPool, String executorName) { + public ExchangeService(Settings settings, ThreadPool threadPool, String executorName, BlockFactory blockFactory) { this.threadPool = threadPool; this.executor = threadPool.executor(executorName); + this.blockFactory = blockFactory; final var inactiveInterval = settings.getAsTime(INACTIVE_SINKS_INTERVAL_SETTING, TimeValue.timeValueMinutes(5)); this.inactiveSinksReaper = new InactiveSinksReaper(LOGGER, threadPool, this.executor, inactiveInterval); } @@ -250,11 +254,12 @@ protected void runInternal() { * @param remoteNode the node where the remote exchange sink is located */ public RemoteSink newRemoteSink(Task parentTask, String exchangeId, TransportService transportService, DiscoveryNode remoteNode) { - return new TransportRemoteSink(transportService, remoteNode, parentTask, exchangeId, executor); + return new TransportRemoteSink(transportService, blockFactory, remoteNode, parentTask, exchangeId, executor); } record TransportRemoteSink( TransportService transportService, + BlockFactory blockFactory, DiscoveryNode node, Task parentTask, String exchangeId, @@ -269,7 +274,11 @@ public void fetchPageAsync(boolean allSourcesFinished, ActionListener(listener, ExchangeResponse::new, responseExecutor) + new ActionListenerResponseHandler<>( + listener, + in -> new ExchangeResponse(new BlockStreamInput(in, blockFactory)), + responseExecutor + ) ); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicPageTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicPageTests.java index 69d2620d6d863..25aa957e90cff 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicPageTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicPageTests.java @@ -8,12 +8,8 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Iterators; -import org.elasticsearch.common.io.stream.ByteBufferStreamInput; import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.util.BytesRefArray; import org.elasticsearch.core.Releasables; import org.elasticsearch.test.EqualsHashCodeTestUtils; @@ -190,13 +186,10 @@ public void testSerializationListPages() throws IOException { ); try { EqualsHashCodeTestUtils.checkEqualsAndHashCode(origPages, page -> { - BytesStreamOutput out = new BytesStreamOutput(); - out.writeCollection(origPages); - StreamInput in = new NamedWriteableAwareStreamInput( - ByteBufferStreamInput.wrap(BytesReference.toBytes(out.bytes())), - registry - ); - return in.readCollectionAsList(Page::new); + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeCollection(origPages); + return blockStreamInput(out).readCollectionAsList(Page::new); + } }, null, pages -> Releasables.close(() -> Iterators.map(pages.iterator(), p -> p::releaseBlocks))); } finally { Releasables.close(() -> Iterators.map(origPages.iterator(), p -> p::releaseBlocks)); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/SerializationTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/SerializationTestCase.java index a5f0421fd9dff..b0666e89cf79e 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/SerializationTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/SerializationTestCase.java @@ -13,7 +13,6 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.MockBigArrays; @@ -30,7 +29,7 @@ public abstract class SerializationTestCase extends ESTestCase { BigArrays bigArrays; private BlockFactory blockFactory; - NamedWriteableRegistry registry = new NamedWriteableRegistry(Block.getNamedWriteables(() -> blockFactory)); + NamedWriteableRegistry registry = new NamedWriteableRegistry(Block.getNamedWriteables()); @Before public final void newBlockFactory() { @@ -48,17 +47,22 @@ public final void blockFactoryEmpty() { Page serializeDeserializePage(Page origPage) throws IOException { try (BytesStreamOutput out = new BytesStreamOutput()) { origPage.writeTo(out); - StreamInput in = new NamedWriteableAwareStreamInput(ByteBufferStreamInput.wrap(BytesReference.toBytes(out.bytes())), registry); - return new Page(in); + return new Page(blockStreamInput(out)); } } + BlockStreamInput blockStreamInput(BytesStreamOutput out) { + return new BlockStreamInput( + new NamedWriteableAwareStreamInput(ByteBufferStreamInput.wrap(BytesReference.toBytes(out.bytes())), registry), + blockFactory + ); + } + @SuppressWarnings("unchecked") T serializeDeserializeBlock(T origBlock) throws IOException { try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeNamedWriteable(origBlock); - StreamInput in = new NamedWriteableAwareStreamInput(ByteBufferStreamInput.wrap(BytesReference.toBytes(out.bytes())), registry); - return (T) in.readNamedWriteable(Block.class); + return (T) blockStreamInput(out).readNamedWriteable(Block.class); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java index 1ea9db61be8fa..d063883876dfc 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java @@ -14,8 +14,10 @@ import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.cluster.ClusterModule; import org.elasticsearch.cluster.node.VersionInformation; +import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; @@ -31,7 +33,6 @@ import org.elasticsearch.compute.operator.SinkOperator; import org.elasticsearch.compute.operator.SourceOperator; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskCancellationService; import org.elasticsearch.test.ESTestCase; @@ -225,17 +226,21 @@ public boolean needsInput() { @Override public void addInput(Page page) { - assertFalse("already finished", finished); - IntBlock block = page.getBlock(0); - for (int i = 0; i < block.getPositionCount(); i++) { - int v = block.getInt(i); - if (v < maxOutputSeqNo) { - assertTrue(receivedSeqNos.add(v)); - // Early termination - if (receivedSeqNos.size() >= maxOutputSeqNo && randomBoolean()) { - finished = true; + try { + assertFalse("already finished", finished); + IntBlock block = page.getBlock(0); + for (int i = 0; i < block.getPositionCount(); i++) { + int v = block.getInt(i); + if (v < maxOutputSeqNo) { + assertTrue(receivedSeqNos.add(v)); + // Early termination + if (receivedSeqNos.size() >= maxOutputSeqNo && randomBoolean()) { + finished = true; + } } } + } finally { + page.releaseBlocks(); } } @@ -353,10 +358,10 @@ public void testEarlyTerminate() { public void testConcurrentWithTransportActions() throws Exception { MockTransportService node0 = newTransportService(); - ExchangeService exchange0 = new ExchangeService(Settings.EMPTY, threadPool, ESQL_TEST_EXECUTOR); + ExchangeService exchange0 = new ExchangeService(Settings.EMPTY, threadPool, ESQL_TEST_EXECUTOR, blockFactory()); exchange0.registerTransportHandler(node0); MockTransportService node1 = newTransportService(); - ExchangeService exchange1 = new ExchangeService(Settings.EMPTY, threadPool, ESQL_TEST_EXECUTOR); + ExchangeService exchange1 = new ExchangeService(Settings.EMPTY, threadPool, ESQL_TEST_EXECUTOR, blockFactory()); exchange1.registerTransportHandler(node1); AbstractSimpleTransportTestCase.connectToNode(node0, node1.getLocalNode()); @@ -372,13 +377,13 @@ public void testConcurrentWithTransportActions() throws Exception { } } - public void testFailToRespondPage() throws Exception { + public void testFailToRespondPage() { Settings settings = Settings.builder().build(); MockTransportService node0 = newTransportService(); - ExchangeService exchange0 = new ExchangeService(settings, threadPool, ESQL_TEST_EXECUTOR); + ExchangeService exchange0 = new ExchangeService(settings, threadPool, ESQL_TEST_EXECUTOR, blockFactory()); exchange0.registerTransportHandler(node0); MockTransportService node1 = newTransportService(); - ExchangeService exchange1 = new ExchangeService(settings, threadPool, ESQL_TEST_EXECUTOR); + ExchangeService exchange1 = new ExchangeService(settings, threadPool, ESQL_TEST_EXECUTOR, blockFactory()); exchange1.registerTransportHandler(node1); AbstractSimpleTransportTestCase.connectToNode(node0, node1.getLocalNode()); final int maxSeqNo = randomIntBetween(1000, 5000); @@ -428,7 +433,7 @@ public void sendResponse(TransportResponse response) throws IOException { private MockTransportService newTransportService() { List namedWriteables = new ArrayList<>(ClusterModule.getNamedWriteables()); - namedWriteables.addAll(Block.getNamedWriteables(BlockFactory::getNonBreakingInstance)); + namedWriteables.addAll(Block.getNamedWriteables()); NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(namedWriteables); MockTransportService service = MockTransportService.createNewService( Settings.EMPTY, @@ -476,13 +481,24 @@ public void sendResponse(Exception exception) throws IOException { } } - /** - * A {@link DriverContext} with a BigArrays that does not circuit break. - */ - DriverContext driverContext() { - return new DriverContext( - new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new NoneCircuitBreakerService()).withCircuitBreaking(), - BlockFactory.getNonBreakingInstance() - ); + private final List breakers = Collections.synchronizedList(new ArrayList<>()); + + private DriverContext driverContext() { + BlockFactory blockFactory = blockFactory(); + return new DriverContext(blockFactory.bigArrays(), blockFactory); + } + + private BlockFactory blockFactory() { + MockBigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, ByteSizeValue.ofGb(1)); + CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); + breakers.add(breaker); + return new BlockFactory(breaker, bigArrays); + } + + @After + public void allMemoryReleased() { + for (CircuitBreaker breaker : breakers) { + assertThat(breaker.getUsed(), equalTo(0L)); + } } } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/lookup/EnrichLookupIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/lookup/EnrichLookupIT.java index f3e28d6d6bdb4..f9d97cbd910e0 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/lookup/EnrichLookupIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/lookup/EnrichLookupIT.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.esql.lookup; -import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.lucene.util.BytesRef; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; @@ -53,7 +52,6 @@ import static org.hamcrest.Matchers.equalTo; -@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/99826") public class EnrichLookupIT extends AbstractEsqlIntegTestCase { public void testSimple() { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryAction.java index b16b7b78f2eb0..13b5b067f5cc9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryAction.java @@ -15,6 +15,6 @@ public class EsqlQueryAction extends ActionType { public static final String NAME = "indices:data/read/esql"; private EsqlQueryAction() { - super(NAME, EsqlQueryResponse::new); + super(NAME, in -> { throw new IllegalArgumentException("can't transport EsqlQuery"); }); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java index 87d98b0bc61e9..a5194b1695c2c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java @@ -11,11 +11,13 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Iterators; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockStreamInput; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.DoubleBlock; @@ -80,7 +82,14 @@ public EsqlQueryResponse(List columns, List> values) { this.columnar = false; } - public EsqlQueryResponse(StreamInput in) throws IOException { + /** + * Build a reader for the response. + */ + public static Writeable.Reader reader(BlockFactory blockFactory) { + return in -> new EsqlQueryResponse(new BlockStreamInput(in, blockFactory)); + } + + public EsqlQueryResponse(BlockStreamInput in) throws IOException { super(in); this.columns = in.readCollectionAsList(ColumnInfo::new); this.pages = in.readCollectionAsList(Page::new); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java index ca37b498f05ac..06b7874a8708f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockStreamInput; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.lucene.ValueSources; @@ -294,7 +295,8 @@ private static class LookupRequest extends TransportRequest implements IndicesRe this.shardId = new ShardId(in); this.matchType = in.readString(); this.matchField = in.readString(); - this.inputPage = new Page(in); + // TODO real BlockFactory + this.inputPage = new Page(new BlockStreamInput(in, BlockFactory.getNonBreakingInstance())); PlanStreamInput planIn = new PlanStreamInput(in, PlanNameRegistry.INSTANCE, in.namedWriteableRegistry(), null); this.extractFields = planIn.readCollectionAsList(readerFromPlanReader(PlanStreamInput::readNamedExpression)); } @@ -364,7 +366,8 @@ private static class LookupResponse extends TransportResponse { } LookupResponse(StreamInput in) throws IOException { - this.page = new Page(in); + // TODO real BlockFactory + this.page = new Page(new BlockStreamInput(in, BlockFactory.getNonBreakingInstance())); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java index 761bddfb1c2e8..2608d4525b153 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java @@ -13,6 +13,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; @@ -59,6 +60,7 @@ import java.util.Collection; import java.util.List; +import java.util.Objects; import java.util.Set; import java.util.function.Supplier; import java.util.stream.Stream; @@ -84,8 +86,6 @@ public class EsqlPlugin extends Plugin implements ActionPlugin { Setting.Property.NodeScope ); - private BlockFactoryHolder blockFactoryHolder = new BlockFactoryHolder(); - @Override public Collection createComponents( Client client, @@ -103,10 +103,13 @@ public Collection createComponents( AllocationService allocationService, IndicesService indicesService ) { + CircuitBreaker circuitBreaker = indicesService.getBigArrays().breakerService().getBreaker("request"); + Objects.requireNonNull(circuitBreaker, "request circuit breaker wasn't set"); + BlockFactory blockFactory = new BlockFactory(circuitBreaker, indicesService.getBigArrays()); return List.of( new PlanExecutor(new IndexResolver(client, clusterService.getClusterName().value(), EsqlDataTypeRegistry.INSTANCE, Set::of)), - new ExchangeService(clusterService.getSettings(), threadPool, EsqlPlugin.ESQL_THREAD_POOL_NAME), - blockFactoryHolder + new ExchangeService(clusterService.getSettings(), threadPool, EsqlPlugin.ESQL_THREAD_POOL_NAME, blockFactory), + blockFactory ); } @@ -157,7 +160,7 @@ public List getNamedWriteables() { ValuesSourceReaderOperator.Status.ENTRY, SingleValueQuery.ENTRY ).stream(), - Block.getNamedWriteables(blockFactoryHolder).stream() + Block.getNamedWriteables().stream() ).toList(); } @@ -185,13 +188,4 @@ public List> getExecutorBuilders(Settings settings) { ) ); } - - static class BlockFactoryHolder implements Supplier { - BlockFactory blockFactory; - - @Override - public BlockFactory get() { - return blockFactory; - } - } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java index 79e1183b3cbb7..2e51ae27f4851 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java @@ -12,7 +12,6 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; @@ -38,7 +37,6 @@ import java.time.ZoneOffset; import java.util.List; import java.util.Locale; -import java.util.Objects; import java.util.concurrent.Executor; public class TransportEsqlQueryAction extends HandledTransportAction { @@ -63,7 +61,7 @@ public TransportEsqlQueryAction( ClusterService clusterService, ThreadPool threadPool, BigArrays bigArrays, - EsqlPlugin.BlockFactoryHolder blockFactoryHolder + BlockFactory blockFactory ) { // TODO replace SAME when removing workaround for https://github.com/elastic/elasticsearch/issues/97916 super(EsqlQueryAction.NAME, transportService, actionFilters, EsqlQueryRequest::new, EsExecutors.DIRECT_EXECUTOR_SERVICE); @@ -72,7 +70,6 @@ public TransportEsqlQueryAction( this.requestExecutor = threadPool.executor(EsqlPlugin.ESQL_THREAD_POOL_NAME); exchangeService.registerTransportHandler(transportService); this.exchangeService = exchangeService; - var blockFactory = createBlockFactory(bigArrays); this.enrichPolicyResolver = new EnrichPolicyResolver(clusterService, transportService, planExecutor.indexResolver()); this.enrichLookupService = new EnrichLookupService(clusterService, searchService, transportService, bigArrays, blockFactory); this.computeService = new ComputeService( @@ -85,20 +82,6 @@ public TransportEsqlQueryAction( blockFactory ); this.settings = settings; - /* - * This hacks the block factory into a shared place where - * it can be used for deserialization. We'd prefer a less - * strange way to do it, but Plugin doesn't give us access - * to BigArrays, which we need to build the BlockFactory - * up front. - */ - blockFactoryHolder.blockFactory = blockFactory; - } - - static BlockFactory createBlockFactory(BigArrays bigArrays) { - CircuitBreaker circuitBreaker = bigArrays.breakerService().getBreaker("request"); - Objects.requireNonNull(circuitBreaker, "request circuit breaker wasn't set"); - return new BlockFactory(circuitBreaker, bigArrays); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java index 68240ea838dc5..7920e0575fd89 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java @@ -60,7 +60,7 @@ public void blockFactoryEmpty() { @Override protected NamedWriteableRegistry getNamedWriteableRegistry() { - return new NamedWriteableRegistry(Block.getNamedWriteables(() -> blockFactory)); + return new NamedWriteableRegistry(Block.getNamedWriteables()); } @Override @@ -161,7 +161,7 @@ private List deepCopyOfPages(EsqlQueryResponse response) { @Override protected Writeable.Reader instanceReader() { - return EsqlQueryResponse::new; + return EsqlQueryResponse.reader(blockFactory); } @Override From 63e64731f3375e985f1439718a65eaaaab6538da Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Wed, 4 Oct 2023 15:58:37 -0700 Subject: [PATCH 6/6] Fix tests --- .../operator/exchange/ExchangeServiceTests.java | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java index d063883876dfc..78042a8587350 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java @@ -398,18 +398,25 @@ public void messageReceived( ) throws Exception { FilterTransportChannel filterChannel = new FilterTransportChannel(channel) { @Override - public void sendResponse(TransportResponse response) throws IOException { - ExchangeResponse exchangeResponse = (ExchangeResponse) response; - Page page = exchangeResponse.takePage(); + public void sendResponse(TransportResponse transportResponse) throws IOException { + ExchangeResponse origResp = (ExchangeResponse) transportResponse; + Page page = origResp.takePage(); if (page != null) { IntBlock block = page.getBlock(0); for (int i = 0; i < block.getPositionCount(); i++) { if (block.getInt(i) == disconnectOnSeqNo) { + page.releaseBlocks(); throw new IOException("page is too large"); } } } - super.sendResponse(response); + ExchangeResponse newResp = new ExchangeResponse(page, origResp.finished()); + origResp.decRef(); + while (origResp.hasReferences()) { + newResp.incRef(); + origResp.decRef(); + } + super.sendResponse(newResp); } }; handler.messageReceived(request, filterChannel, task);