Skip to content

Commit

Permalink
Remove illegal overload version for hash_counts function
Browse files Browse the repository at this point in the history
The functions:

- hash_counts(setdigest):varchar
- hash_counts(setdigest):map(bigint,smallint)

differ only on the return type.
This situation makes the binding impossible, because the binding is determined based on the type of the arguments.
  • Loading branch information
findinpath authored and martint committed Jun 11, 2021
1 parent 8d6e113 commit a06b507
Show file tree
Hide file tree
Showing 2 changed files with 22 additions and 34 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,7 @@

package io.trino.type.setdigest;

import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.airlift.json.ObjectMapperProvider;
import io.airlift.slice.Slice;
import io.airlift.slice.Slices;
import io.trino.spi.block.Block;
import io.trino.spi.block.BlockBuilder;
import io.trino.spi.function.ScalarFunction;
Expand All @@ -27,7 +23,6 @@
import io.trino.spi.type.StandardTypes;
import io.trino.spi.type.Type;

import java.io.UncheckedIOException;
import java.util.Map;

import static io.trino.spi.type.BigintType.BIGINT;
Expand All @@ -36,8 +31,6 @@

public final class SetDigestFunctions
{
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapperProvider().get();

private SetDigestFunctions()
{
}
Expand Down Expand Up @@ -100,18 +93,4 @@ public static Block hashCounts(@TypeParameter("map(bigint,smallint)") Type mapTy

return (Block) mapType.getObject(blockBuilder, 0);
}

@ScalarFunction
@SqlType(StandardTypes.VARCHAR)
public static Slice hashCounts(@SqlType(SetDigestType.NAME) Slice slice)
{
SetDigest digest = SetDigest.newInstance(slice);

try {
return Slices.utf8Slice(OBJECT_MAPPER.writeValueAsString(digest.getHashCounts()));
}
catch (JsonProcessingException e) {
throw new UncheckedIOException(e);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -14,20 +14,23 @@

package io.trino.type.setdigest;

import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableSet;
import io.airlift.json.ObjectMapperProvider;
import io.airlift.slice.Slice;
import io.trino.spi.block.Block;
import io.trino.spi.block.SingleMapBlock;
import io.trino.spi.type.MapType;
import io.trino.spi.type.TypeOperators;
import org.testng.annotations.Test;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;

import static io.trino.spi.type.BigintType.BIGINT;
import static io.trino.spi.type.SmallintType.SMALLINT;
import static io.trino.type.setdigest.SetDigest.DEFAULT_MAX_HASHES;
import static io.trino.type.setdigest.SetDigest.NUMBER_OF_BUCKETS;
import static io.trino.type.setdigest.SetDigestFunctions.hashCounts;
Expand Down Expand Up @@ -89,7 +92,6 @@ private static void testIntersectionCardinality(int maxHashes1, int numBuckets1,

@Test
public void testHashCounts()
throws Exception
{
SetDigest digest1 = new SetDigest();
digest1.add(0);
Expand All @@ -102,18 +104,25 @@ public void testHashCounts()
digest2.add(2);
digest2.add(2);

ObjectMapper mapper = new ObjectMapperProvider().get();

Slice slice = hashCounts(digest1.serialize());
Map<Long, Short> counts = mapper.readValue(slice.toStringUtf8(), new TypeReference<>() {});
MapType mapType = new MapType(BIGINT, SMALLINT, new TypeOperators());
Block block = hashCounts(mapType, digest1.serialize());
assertTrue(block instanceof SingleMapBlock);
Set<Short> blockValues = new HashSet<>();
for (int i = 1; i < block.getPositionCount(); i += 2) {
blockValues.add(block.getShort(i, 0));
}
Set<Short> expected = ImmutableSet.of((short) 1, (short) 2);
assertEquals(counts.values(), expected);
assertEquals(blockValues, expected);

digest1.mergeWith(digest2);
slice = hashCounts(digest1.serialize());
counts = mapper.readValue(slice.toStringUtf8(), new TypeReference<>() {});
block = hashCounts(mapType, digest1.serialize());
assertTrue(block instanceof SingleMapBlock);
expected = ImmutableSet.of((short) 1, (short) 2, (short) 4);
assertEquals(ImmutableSet.copyOf(counts.values()), expected);
blockValues = new HashSet<>();
for (int i = 1; i < block.getPositionCount(); i += 2) {
blockValues.add(block.getShort(i, 0));
}
assertEquals(blockValues, expected);
}

@Test
Expand Down

0 comments on commit a06b507

Please sign in to comment.