Skip to content

Commit

Permalink
Merge #71892
Browse files Browse the repository at this point in the history
71892: bench: fix colserde benchmarks r=cucaroach a=cucaroach

And optimization to aid the Go GC broke benchmarks of Serialize
and ArrowToBench, fix them by restoring the nil'd out arrays using a
copy.

Fixes: #71886

Release note: None


Co-authored-by: Tommy Reilly <[email protected]>
  • Loading branch information
craig[bot] and cucaroach committed Oct 25, 2021
2 parents 7c50de2 + 757e898 commit dbf5853
Show file tree
Hide file tree
Showing 2 changed files with 10 additions and 2 deletions.
6 changes: 5 additions & 1 deletion pkg/col/colserde/arrowbatchconverter_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -224,15 +224,19 @@ func BenchmarkArrowBatchConverter(b *testing.B) {
for _, nullFraction := range nullFractions {
setNullFraction(batch, nullFraction)
data, err := c.BatchToArrow(batch)
dataCopy := make([]*array.Data, len(data))
require.NoError(b, err)
testPrefix := fmt.Sprintf("%s/nullFraction=%0.2f", typ.String(), nullFraction)
result := testAllocator.NewMemBatchWithMaxCapacity([]*types.T{typ})
b.Run(testPrefix+"/ArrowToBatch", func(b *testing.B) {
b.SetBytes(numBytes[typIdx])
for i := 0; i < b.N; i++ {
// Since ArrowToBatch eagerly nils things out, we have to make a
// shallow copy each time.
copy(dataCopy, data)
// Using require.NoError here causes large enough allocations to
// affect the result.
if err := c.ArrowToBatch(data, batch.Length(), result); err != nil {
if err := c.ArrowToBatch(dataCopy, batch.Length(), result); err != nil {
b.Fatal(err)
}
if result.Width() != 1 {
Expand Down
6 changes: 5 additions & 1 deletion pkg/col/colserde/record_batch_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -369,11 +369,15 @@ func BenchmarkRecordBatchSerializerInt64(b *testing.B) {
// Only calculate useful bytes.
numBytes := int64(dataLen * 8)
data := []*array.Data{randomDataFromType(rng, typs[0], dataLen, 0 /* nullProbability */)}
dataCopy := make([]*array.Data, len(data))
b.Run(fmt.Sprintf("Serialize/dataLen=%d", dataLen), func(b *testing.B) {
b.SetBytes(numBytes)
for i := 0; i < b.N; i++ {
buf.Reset()
if _, _, err := s.Serialize(&buf, data, dataLen); err != nil {
// Since Serialize eagerly nils things out, we have to make a shallow
// copy each time.
copy(dataCopy, data)
if _, _, err := s.Serialize(&buf, dataCopy, dataLen); err != nil {
b.Fatal(err)
}
}
Expand Down

0 comments on commit dbf5853

Please sign in to comment.