Skip to content

Commit

Permalink
Replace BytesRefHash and clean up alternative implementations
Browse files Browse the repository at this point in the history
Signed-off-by: Ketan Verma <[email protected]>
  • Loading branch information
ketanv3 committed Jul 30, 2023
1 parent a8c07f9 commit 8870419
Show file tree
Hide file tree
Showing 11 changed files with 280 additions and 950 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
- Exclude 'benchmarks' from codecov report ([#8805](https://github.com/opensearch-project/OpenSearch/pull/8805))
- [Refactor] MediaTypeParser to MediaTypeParserRegistry ([#8636](https://github.com/opensearch-project/OpenSearch/pull/8636))
- Create separate SourceLookup instance per segment slice in SignificantTextAggregatorFactory ([#8807](https://github.com/opensearch-project/OpenSearch/pull/8807))
- Performance improvements for BytesRefHash ([#8788](https://github.com/opensearch-project/OpenSearch/pull/8788))

### Deprecated

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@

package org.opensearch.common.util;

import net.openhft.hashing.LongHashFunction;
import org.apache.lucene.util.BytesRef;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
Expand All @@ -23,7 +22,6 @@
import org.openjdk.jmh.annotations.TearDown;
import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.infra.Blackhole;
import org.opensearch.common.lease.Releasable;
import org.opensearch.common.lease.Releasables;

import java.util.HashSet;
Expand All @@ -32,7 +30,7 @@
import java.util.concurrent.TimeUnit;
import java.util.stream.Stream;

@Fork(value = 5)
@Fork(value = 3)
@Warmup(iterations = 1, time = 2)
@Measurement(iterations = 3, time = 5)
@BenchmarkMode(Mode.AverageTime)
Expand All @@ -45,17 +43,14 @@ public class BytesRefHashBenchmark {
public void add(Blackhole bh, Options opts) {
for (int hit = 0; hit < NUM_HITS; hit++) {
BytesRef key = opts.keys[hit % opts.keys.length];
for (HashTable table : opts.tables) {
for (BytesRefHash table : opts.tables) {
bh.consume(table.add(key));
}
}
}

@State(Scope.Benchmark)
public static class Options {
@Param({ "baseline", "compact", "reorganizing" })
public String type;

@Param({
"1",
"2",
Expand Down Expand Up @@ -163,14 +158,16 @@ public static class Options {
@Param({ "8", "32", "128" })
public Integer length;

private HashTable[] tables;
private BytesRefHash[] tables;

private BytesRef[] keys;

@Setup
public void setup() {
assert size <= Math.pow(26, length) : "key length too small to generate the required number of keys";
tables = Stream.generate(this::newHashTable).limit(NUM_TABLES).toArray(HashTable[]::new);
tables = Stream.generate(() -> new BytesRefHash(BigArrays.NON_RECYCLING_INSTANCE))
.limit(NUM_TABLES)
.toArray(BytesRefHash[]::new);
Random random = new Random(0);
Set<BytesRef> seen = new HashSet<>();
keys = new BytesRef[size];
Expand All @@ -193,68 +190,5 @@ public void setup() {
public void tearDown() {
Releasables.close(tables);
}

private HashTable newHashTable() {
switch (type) {
case "baseline":
return new HashTable() {
private final BytesRefHash table = new BytesRefHash(1, 0.6f, BigArrays.NON_RECYCLING_INSTANCE);

@Override
public long add(BytesRef key) {
return table.add(key);
}

@Override
public void close() {
table.close();
}
};
case "compact":
return new HashTable() {
private final CompactBytesRefHash table = new CompactBytesRefHash(
1,
0.6f,
key -> LongHashFunction.xx3().hashBytes(key.bytes, key.offset, key.length),
BigArrays.NON_RECYCLING_INSTANCE
);

@Override
public long add(BytesRef key) {
return table.add(key);
}

@Override
public void close() {
table.close();
}
};
case "reorganizing":
return new HashTable() {
private final ReorganizingBytesRefHash table = new ReorganizingBytesRefHash(
1,
0.6f,
key -> LongHashFunction.xx3().hashBytes(key.bytes, key.offset, key.length),
BigArrays.NON_RECYCLING_INSTANCE
);

@Override
public long add(BytesRef key) {
return table.add(key);
}

@Override
public void close() {
table.close();
}
};
default:
throw new IllegalArgumentException("invalid hash table type: " + type);
}
}
}

private interface HashTable extends Releasable {
long add(BytesRef key);
}
}
Loading

0 comments on commit 8870419

Please sign in to comment.