diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java index 3c5bf2c18c915..b8ea7658a8247 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java @@ -8,12 +8,9 @@ package org.elasticsearch.compute.aggregation.blockhash; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BitArray; -import org.elasticsearch.common.util.BytesRefArray; import org.elasticsearch.common.util.BytesRefHash; import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; import org.elasticsearch.compute.aggregation.SeenGroupIds; @@ -30,8 +27,6 @@ import org.elasticsearch.compute.operator.mvdedupe.MultivalueDedupeInt; import org.elasticsearch.core.ReleasableIterator; -import java.io.IOException; - /** * Maps a {@link BytesRefBlock} column to group ids. * This class is generated. Do not edit it. @@ -197,26 +192,21 @@ public BytesRefBlock[] getKeys() { * without and still read from the block. */ // TODO replace with takeBytesRefsOwnership ?! + final BytesRef spare = new BytesRef(); if (seenNull) { try (var builder = blockFactory.newBytesRefBlockBuilder(Math.toIntExact(hash.size() + 1))) { builder.appendNull(); - BytesRef spare = new BytesRef(); for (long i = 0; i < hash.size(); i++) { builder.appendBytesRef(hash.get(i, spare)); } return new BytesRefBlock[] { builder.build() }; } } - - final int size = Math.toIntExact(hash.size()); - try (BytesStreamOutput out = new BytesStreamOutput()) { - hash.getBytesRefs().writeTo(out); - try (StreamInput in = out.bytes().streamInput()) { - return new BytesRefBlock[] { - blockFactory.newBytesRefArrayVector(new BytesRefArray(in, BigArrays.NON_RECYCLING_INSTANCE), size).asBlock() }; + try (var builder = blockFactory.newBytesRefBlockBuilder(Math.toIntExact(hash.size()))) { + for (long i = 0; i < hash.size(); i++) { + builder.appendBytesRef(hash.get(i, spare)); } - } catch (IOException e) { - throw new IllegalStateException(e); + return new BytesRefBlock[] { builder.build() }; } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/X-BlockHash.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/X-BlockHash.java.st index 7c21cff56d7bb..2a3d1143236ac 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/X-BlockHash.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/X-BlockHash.java.st @@ -9,15 +9,10 @@ package org.elasticsearch.compute.aggregation.blockhash; $if(BytesRef)$ import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.StreamInput; $endif$ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BitArray; -$if(BytesRef)$ -import org.elasticsearch.common.util.BytesRefArray; -$endif$ import org.elasticsearch.common.util.$Hash$; import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; import org.elasticsearch.compute.aggregation.SeenGroupIds; @@ -58,8 +53,6 @@ $endif$ import org.elasticsearch.core.ReleasableIterator; $if(BytesRef)$ -import java.io.IOException; - $else$ import java.util.BitSet; @@ -250,26 +243,21 @@ $if(BytesRef)$ * without and still read from the block. */ // TODO replace with takeBytesRefsOwnership ?! + final BytesRef spare = new BytesRef(); if (seenNull) { try (var builder = blockFactory.newBytesRefBlockBuilder(Math.toIntExact(hash.size() + 1))) { builder.appendNull(); - BytesRef spare = new BytesRef(); for (long i = 0; i < hash.size(); i++) { builder.appendBytesRef(hash.get(i, spare)); } return new BytesRefBlock[] { builder.build() }; } } - - final int size = Math.toIntExact(hash.size()); - try (BytesStreamOutput out = new BytesStreamOutput()) { - hash.getBytesRefs().writeTo(out); - try (StreamInput in = out.bytes().streamInput()) { - return new BytesRefBlock[] { - blockFactory.newBytesRefArrayVector(new BytesRefArray(in, BigArrays.NON_RECYCLING_INSTANCE), size).asBlock() }; + try (var builder = blockFactory.newBytesRefBlockBuilder(Math.toIntExact(hash.size()))) { + for (long i = 0; i < hash.size(); i++) { + builder.appendBytesRef(hash.get(i, spare)); } - } catch (IOException e) { - throw new IllegalStateException(e); + return new BytesRefBlock[] { builder.build() }; } $else$ if (seenNull) {