Skip to content

Commit

Permalink
internal/metamorphic: add block-property filter coverage
Browse files Browse the repository at this point in the history
Add test coverage for block-property filters to the metamorphic tests.
Division of keys into blocks are nondeterministic, so this introduces
nondeterminism into Iterator behavior. To account for this, the metamorphic
test's iterator wrapper is adjusted to skip any keys that are eligible for
filtering.

Fix cockroachdb#1725.
  • Loading branch information
jbowens committed Jun 3, 2022
1 parent 0468c50 commit 5f4085a
Show file tree
Hide file tree
Showing 6 changed files with 241 additions and 15 deletions.
18 changes: 18 additions & 0 deletions internal/metamorphic/generator.go
Original file line number Diff line number Diff line change
Expand Up @@ -497,12 +497,30 @@ func (g *generator) newIter() {
lower, upper = upper, lower
}
keyTypes, maskSuffix := g.randKeyTypesAndMask()

// With a low probability, enable automatic filtering of keys with suffixes
// not in the provided range. This filtering occurs both through
// block-property filtering and explicitly within the iterator operations to
// ensure determinism.
var filterMin, filterMax uint64
if g.rng.Intn(10) == 1 {
max := g.cfg.writeSuffixDist.Max()
filterMin, filterMax = g.rng.Uint64n(max)+1, g.rng.Uint64n(max)+1
if filterMin > filterMax {
filterMin, filterMax = filterMax, filterMin
} else if filterMin == filterMax {
filterMax = filterMin + 1
}
}

g.add(&newIterOp{
readerID: readerID,
iterID: iterID,
lower: lower,
upper: upper,
keyTypes: keyTypes,
filterMin: filterMin,
filterMax: filterMax,
rangeKeyMaskSuffix: maskSuffix,
})
}
Expand Down
48 changes: 42 additions & 6 deletions internal/metamorphic/ops.go
Original file line number Diff line number Diff line change
Expand Up @@ -569,6 +569,17 @@ type newIterOp struct {
upper []byte
keyTypes uint32 // pebble.IterKeyType

// If filterMax is >0, this iterator will filter out any keys that have
// suffixes that don't fall within the range [filterMin,filterMax).
// Additionally, the iterator will be constructed with a block-property
// filter that filters out blocks accordingly. Not all OPTIONS hook up the
// corresponding block property collector, so block-filtering may still be
// effectively disabled in some runs. The iterator operations themselves
// however will always skip past any points that should be filtered to
// ensure determinism.
filterMin uint64
filterMax uint64

// rangeKeyMaskSuffix may be set if keyTypes is IterKeyTypePointsAndRanges
// to configure IterOptions.RangeKeyMasking.Suffix.
rangeKeyMaskSuffix []byte
Expand All @@ -591,6 +602,12 @@ func (o *newIterOp) run(t *test, h *history) {
Suffix: o.rangeKeyMaskSuffix,
},
}
if o.filterMax > 0 {
opts.PointKeyFilters = []pebble.BlockPropertyFilter{
newBlockPropertyFilter(o.filterMin, o.filterMax),
}
}

var i *pebble.Iterator
for {
i = r.NewIter(opts)
Expand All @@ -600,7 +617,7 @@ func (o *newIterOp) run(t *test, h *history) {
// close this iter and retry NewIter
_ = i.Close()
}
t.setIter(o.iterID, i)
t.setIter(o.iterID, i, o.filterMin, o.filterMax)

// Trash the bounds to ensure that Pebble doesn't rely on the stability of
// the user-provided bounds.
Expand All @@ -611,8 +628,8 @@ func (o *newIterOp) run(t *test, h *history) {
}

func (o *newIterOp) String() string {
return fmt.Sprintf("%s = %s.NewIter(%q, %q, %d /* key types */, %q /* masking suffix */)",
o.iterID, o.readerID, o.lower, o.upper, o.keyTypes, o.rangeKeyMaskSuffix)
return fmt.Sprintf("%s = %s.NewIter(%q, %q, %d /* key types */, %d, %d, %q /* masking suffix */)",
o.iterID, o.readerID, o.lower, o.upper, o.keyTypes, o.filterMin, o.filterMax, o.rangeKeyMaskSuffix)
}

// newIterUsingCloneOp models a Iterator.Clone operation.
Expand All @@ -627,7 +644,7 @@ func (o *newIterUsingCloneOp) run(t *test, h *history) {
if err != nil {
panic(err)
}
t.setIter(o.iterID, i)
t.setIter(o.iterID, i, iter.filterMin, iter.filterMax)
h.Recordf("%s // %v", o, i.Error())
}

Expand Down Expand Up @@ -672,6 +689,17 @@ type iterSetOptionsOp struct {
upper []byte
keyTypes uint32 // pebble.IterKeyType

// If filterMax is >0, this iterator will filter out any keys that have
// suffixes that don't fall within the range [filterMin,filterMax).
// Additionally, the iterator will be constructed with a block-property
// filter that filters out blocks accordingly. Not all OPTIONS hook up the
// corresponding block property collector, so block-filtering may still be
// effectively disabled in some runs. The iterator operations themselves
// however will always skip past any points that should be filtered to
// ensure determinism.
filterMin uint64
filterMax uint64

// rangeKeyMaskSuffix may be set if keyTypes is IterKeyTypePointsAndRanges
// to configure IterOptions.RangeKeyMasking.Suffix.
rangeKeyMaskSuffix []byte
Expand All @@ -695,6 +723,11 @@ func (o *iterSetOptionsOp) run(t *test, h *history) {
Suffix: o.rangeKeyMaskSuffix,
},
}
if o.filterMax > 0 {
opts.PointKeyFilters = []pebble.BlockPropertyFilter{
newBlockPropertyFilter(o.filterMin, o.filterMax),
}
}

i.SetOptions(opts)

Expand All @@ -703,12 +736,15 @@ func (o *iterSetOptionsOp) run(t *test, h *history) {
rand.Read(lower[:])
rand.Read(upper[:])

// Adjust the iterator's filters.
i.filterMin, i.filterMax = o.filterMin, o.filterMax

h.Recordf("%s // %v", o, i.Error())
}

func (o *iterSetOptionsOp) String() string {
return fmt.Sprintf("%s.SetOptions(%q, %q, %d /* key types */, %q /* masking suffix */)",
o.iterID, o.lower, o.upper, o.keyTypes, o.rangeKeyMaskSuffix)
return fmt.Sprintf("%s.SetOptions(%q, %q, %d /* key types */, %d, %d, %q /* masking suffix */)",
o.iterID, o.lower, o.upper, o.keyTypes, o.filterMin, o.filterMax, o.rangeKeyMaskSuffix)
}

// iterSeekGEOp models an Iterator.SeekGE[WithLimit] operation.
Expand Down
94 changes: 94 additions & 0 deletions internal/metamorphic/options.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,13 +7,16 @@ package metamorphic
import (
"bytes"
"fmt"
"math"
"os"
"path/filepath"

"github.com/cockroachdb/pebble"
"github.com/cockroachdb/pebble/bloom"
"github.com/cockroachdb/pebble/internal/base"
"github.com/cockroachdb/pebble/internal/cache"
"github.com/cockroachdb/pebble/internal/testkeys"
"github.com/cockroachdb/pebble/sstable"
"github.com/cockroachdb/pebble/vfs"
"golang.org/x/exp/rand"
)
Expand Down Expand Up @@ -49,6 +52,10 @@ func parseOptions(opts *testOptions, data string) error {
case "TestOptions.initial_state_path":
opts.initialStatePath = value
return true
case "TestOptions.use_block_property_filter":
opts.useBlockPropertyCollector = true
opts.opts.BlockPropertyCollectors = blockPropertyCollectorConstructors
return true
default:
return false
}
Expand Down Expand Up @@ -78,6 +85,9 @@ func optionsToString(opts *testOptions) string {
if opts.initialStateDesc != "" {
fmt.Fprintf(&buf, " initial_state_desc=%s\n", opts.initialStateDesc)
}
if opts.useBlockPropertyCollector {
fmt.Fprintf(&buf, " use_block_property_filter=%t\n", opts.useBlockPropertyCollector)
}

s := opts.opts.String()
if buf.Len() == 0 {
Expand Down Expand Up @@ -113,6 +123,9 @@ type testOptions struct {
// A human-readable string describing the initial state of the database.
// Empty if the test run begins from an empty database state.
initialStateDesc string
// Use a block property collector, which may be used by block property
// filters.
useBlockPropertyCollector bool
}

func standardOptions() []*testOptions {
Expand Down Expand Up @@ -212,6 +225,10 @@ func standardOptions() []*testOptions {
[Options]
max_writer_concurrency=2
force_writer_parallelism=true
`,
23: `
[TestOptions]
use_block_property_filter=true
`,
}

Expand Down Expand Up @@ -282,6 +299,10 @@ func randomOptions(rng *rand.Rand) *testOptions {
}
testOpts.ingestUsingApply = rng.Intn(2) != 0
testOpts.replaceSingleDelete = rng.Intn(2) != 0
testOpts.useBlockPropertyCollector = rng.Intn(2) != 0
if testOpts.useBlockPropertyCollector {
testOpts.opts.BlockPropertyCollectors = blockPropertyCollectorConstructors
}
return testOpts
}

Expand Down Expand Up @@ -340,3 +361,76 @@ func moveLogs(fs vfs.FS, srcDir, dstDir string) error {
}
return nil
}

const blockPropertyCollectorName = `pebble.internal.metamorphic.suffixes`

var blockPropertyCollectorConstructors = []func() pebble.BlockPropertyCollector{
func() pebble.BlockPropertyCollector {
return sstable.NewBlockIntervalCollector(
blockPropertyCollectorName,
&suffixIntervalCollector{},
nil)
},
}

// newBlockPropertyFilter constructs a new block-property filter that ignores
// blocks containing exclusively suffixed keys where all the suffixes fall
// outside of the range [filterMin, filterMax).
//
// The filter only filters based on data derived from the key. The iteration
// results of this block property filter are deterministic for unsuffixed keys
// and keys with suffixes within the range [filterMin, filterMax). For keys with
// suffixes outside the range, iteration is nondeterministic. To accommodate
// this, the metamorphic test iterators automatically skip keys outside the
// configured filter span.
func newBlockPropertyFilter(filterMin, filterMax uint64) pebble.BlockPropertyFilter {
return sstable.NewBlockIntervalFilter(blockPropertyCollectorName, filterMin, filterMax)
}

var _ sstable.DataBlockIntervalCollector = (*suffixIntervalCollector)(nil)

// suffixIntervalCollector maintains an interval over the timestamps in
// MVCC-like suffixes for keys (e.g. foo@123).
type suffixIntervalCollector struct {
initialized bool
lower, upper uint64
}

// Add implements DataBlockIntervalCollector by adding the timestamp(s) in the
// suffix(es) of this record to the current interval.
//
// Note that range sets and unsets may have multiple suffixes. Range key deletes
// do not have a suffix. All other point keys have a single suffix.
func (c *suffixIntervalCollector) Add(key base.InternalKey, value []byte) error {
i := testkeys.Comparer.Split(key.UserKey)
if i == len(key.UserKey) {
c.initialized = true
c.lower, c.upper = 0, math.MaxUint64
return nil
}
ts, err := testkeys.ParseSuffix(key.UserKey[i:])
if err != nil {
return err
}
uts := uint64(ts)
if !c.initialized {
c.lower, c.upper = uts, uts+1
c.initialized = true
return nil
}
if uts < c.lower {
c.lower = uts
}
if uts >= c.upper {
c.upper = uts + 1
}
return nil
}

// FinishDataBlock implements DataBlockIntervalCollector.
func (c *suffixIntervalCollector) FinishDataBlock() (lower, upper uint64, err error) {
l, u := c.lower, c.upper
c.lower, c.upper = 0, 0
c.initialized = false
return l, u, nil
}
12 changes: 10 additions & 2 deletions internal/metamorphic/parser.go
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ func opArgs(op op) (receiverID *objID, targetID *objID, args []interface{}) {
case *newIndexedBatchOp:
return nil, &t.batchID, nil
case *newIterOp:
return &t.readerID, &t.iterID, []interface{}{&t.lower, &t.upper, &t.keyTypes, &t.rangeKeyMaskSuffix}
return &t.readerID, &t.iterID, []interface{}{&t.lower, &t.upper, &t.keyTypes, &t.filterMin, &t.filterMax, &t.rangeKeyMaskSuffix}
case *newIterUsingCloneOp:
return &t.existingIterID, &t.iterID, nil
case *newSnapshotOp:
Expand All @@ -97,7 +97,7 @@ func opArgs(op op) (receiverID *objID, targetID *objID, args []interface{}) {
case *iterSetBoundsOp:
return &t.iterID, nil, []interface{}{&t.lower, &t.upper}
case *iterSetOptionsOp:
return &t.iterID, nil, []interface{}{&t.lower, &t.upper, &t.keyTypes, &t.rangeKeyMaskSuffix}
return &t.iterID, nil, []interface{}{&t.lower, &t.upper, &t.keyTypes, &t.filterMin, &t.filterMax, &t.rangeKeyMaskSuffix}
case *singleDeleteOp:
return &t.writerID, nil, []interface{}{&t.key, &t.maybeReplaceDelete}
case *rangeKeyDeleteOp:
Expand Down Expand Up @@ -262,6 +262,14 @@ func (p *parser) parseArgs(op op, methodName string, args []interface{}) {
}
*t = uint32(val)

case *uint64:
_, lit := p.scanToken(token.INT)
val, err := strconv.ParseUint(lit, 0, 64)
if err != nil {
panic(err)
}
*t = uint64(val)

case *[]byte:
_, lit := p.scanToken(token.STRING)
s, err := strconv.Unquote(lit)
Expand Down
Loading

0 comments on commit 5f4085a

Please sign in to comment.