Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

metamorphic: fix format major version ratcheting and parsing #2691

Merged
merged 5 commits into from
Jul 1, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 9 additions & 12 deletions metamorphic/generator.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@ import (
"golang.org/x/exp/rand"
)

const maxValueSize = 20

type iterOpts struct {
lower []byte
upper []byte
Expand Down Expand Up @@ -516,15 +518,10 @@ func (g *generator) dbRatchetFormatMajorVersion() {
// version may be behind the database's format major version, in which case
// RatchetFormatMajorVersion should deterministically error.

// TODO(jackson): When FormatDeleteSized is stabilized, return this to just
// using `FormatNewest`.
newestTODO := pebble.FormatNewest
if newestTODO < pebble.ExperimentalFormatDeleteSizedAndObsolete {
newestTODO = pebble.ExperimentalFormatDeleteSizedAndObsolete
}

n := int(newestTODO - minimumFormatMajorVersion)
vers := pebble.FormatMajorVersion(g.rng.Intn(n + 1))
// TODO(jackson): When the latest format major versions ares stabilized,
// return this to just using `FormatNewest`.
n := int(newestFormatMajorVersionTODO - minimumFormatMajorVersion)
vers := pebble.FormatMajorVersion(g.rng.Intn(n+1)) + minimumFormatMajorVersion
g.add(&dbRatchetFormatMajorVersionOp{vers: vers})
}

Expand Down Expand Up @@ -1129,7 +1126,7 @@ func (g *generator) writerRangeKeySet() {
start: start,
end: end,
suffix: suffix,
value: g.randValue(0, 20),
value: g.randValue(0, maxValueSize),
})
}

Expand Down Expand Up @@ -1206,7 +1203,7 @@ func (g *generator) writerMerge() {
writerID: writerID,
// 20% new keys.
key: g.randKeyToWrite(0.2),
value: g.randValue(0, 20),
value: g.randValue(0, maxValueSize),
})
}

Expand All @@ -1220,7 +1217,7 @@ func (g *generator) writerSet() {
writerID: writerID,
// 50% new keys.
key: g.randKeyToWrite(0.5),
value: g.randValue(0, 20),
value: g.randValue(0, maxValueSize),
})
}

Expand Down
3 changes: 1 addition & 2 deletions metamorphic/ops.go
Original file line number Diff line number Diff line change
Expand Up @@ -202,9 +202,8 @@ func (o *deleteOp) run(t *test, h historyRecorder) {
}

func hashSize(index int) uint32 {
const maxSize = 16 << 10 /* 16 KB */
// Fibonacci hash https://probablydance.com/2018/06/16/fibonacci-hashing-the-optimization-that-the-world-forgot-or-a-better-alternative-to-integer-modulo/
return uint32((11400714819323198485 * uint64(index)) % maxSize)
return uint32((11400714819323198485 * uint64(index)) % maxValueSize)
}

func (o *deleteOp) String() string {
Expand Down
11 changes: 10 additions & 1 deletion metamorphic/options.go
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,11 @@ const (
// that are less than defaultFormatMajorVersion but are at least
// minimumFormatMajorVersion.
defaultFormatMajorVersion = pebble.FormatPrePebblev1Marked
// newestFormatMajorVersionTODO is the most recent format major version the
// metamorphic tests should use. This may be greater than
// pebble.FormatNewest when some format major versions are marked as
// experimental.
newestFormatMajorVersionTODO = pebble.ExperimentalFormatVirtualSSTables
)

func parseOptions(
Expand Down Expand Up @@ -340,6 +345,10 @@ func standardOptions() []*TestOptions {
[TestOptions]
enable_value_blocks=true
`,
26: fmt.Sprintf(`
[Options]
format_major_version=%s
`, newestFormatMajorVersionTODO),
}

opts := make([]*TestOptions, len(stdOpts))
Expand Down Expand Up @@ -388,7 +397,7 @@ func randomOptions(
opts.FlushDelayRangeKey = time.Millisecond * time.Duration(5*rng.Intn(245)) // 5-250ms
opts.FlushSplitBytes = 1 << rng.Intn(20) // 1B - 1MB
opts.FormatMajorVersion = minimumFormatMajorVersion
n := int(pebble.FormatNewest - opts.FormatMajorVersion)
n := int(newestFormatMajorVersionTODO - opts.FormatMajorVersion)
opts.FormatMajorVersion += pebble.FormatMajorVersion(rng.Intn(n + 1))
opts.Experimental.L0CompactionConcurrency = 1 + rng.Intn(4) // 1-4
opts.Experimental.LevelMultiplier = 5 << rng.Intn(7) // 5 - 320
Expand Down
6 changes: 3 additions & 3 deletions metamorphic/parser.go
Original file line number Diff line number Diff line change
Expand Up @@ -264,15 +264,15 @@ func (p *parser) parseArgs(op op, methodName string, args []interface{}) {
switch t := args[i].(type) {
case *uint32:
_, lit := p.scanToken(token.INT)
val, err := strconv.ParseUint(lit, 0, 32)
val, err := strconv.ParseUint(lit, 10, 32)
if err != nil {
panic(err)
}
*t = uint32(val)

case *uint64:
_, lit := p.scanToken(token.INT)
val, err := strconv.ParseUint(lit, 0, 64)
val, err := strconv.ParseUint(lit, 10, 64)
if err != nil {
panic(err)
}
Expand Down Expand Up @@ -378,7 +378,7 @@ func (p *parser) parseArgs(op op, methodName string, args []interface{}) {

case *pebble.FormatMajorVersion:
_, lit := p.scanToken(token.INT)
val, err := strconv.ParseUint(lit, 0, 64)
val, err := strconv.ParseUint(lit, 10, 64)
if err != nil {
panic(err)
}
Expand Down
2 changes: 2 additions & 0 deletions sstable/reader_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -492,6 +492,8 @@ func TestReader(t *testing.T) {
}

func TestReaderHideObsolete(t *testing.T) {
t.Skip("#2705")

blockSizes := map[string]int{
"1bytes": 1,
"5bytes": 5,
Expand Down
130 changes: 65 additions & 65 deletions sstable/testdata/writer_value_blocks
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ layout
72 record (21 = 3 [0] + 14 + 4) [restart]
blue@8#16,1:value handle {valueLen:6 blockNum:0 offsetInBlock:5}
93 [restart 72]
101 [trailer compression=none checksum=0x4e65b9b6]
101 [trailer compression=none checksum=0xdc74261]
106 data (29)
106 record (21 = 3 [0] + 14 + 4) [restart]
blue@6#16,1:value handle {valueLen:15 blockNum:1 offsetInBlock:0}
Expand All @@ -146,71 +146,71 @@ layout
173 block:38/29 [restart]
192 [restart 173]
200 [trailer compression=none checksum=0x21d27815]
205 index (30)
205 index (27)
205 block:72/29 [restart]
227 [restart 205]
235 [trailer compression=none checksum=0xba0b26fe]
240 index (22)
240 block:106/29 [restart]
254 [restart 240]
262 [trailer compression=none checksum=0x802be702]
267 top-index (85)
267 block:140/28 [restart]
288 block:173/27 [restart]
308 block:205/30 [restart]
331 block:240/22 [restart]
346 [restart 267]
350 [restart 288]
354 [restart 308]
358 [restart 331]
352 [trailer compression=snappy checksum=0x8bd0d63a]
357 value-block (11)
373 value-block (15)
393 value-index (8)
406 properties (676)
406 obsolete-key (16) [restart]
422 pebble.num.value-blocks (27)
449 pebble.num.values.in.value-blocks (21)
470 pebble.value-blocks.size (21)
491 rocksdb.block.based.table.index.type (43)
534 rocksdb.block.based.table.prefix.filtering (20)
554 rocksdb.block.based.table.whole.key.filtering (23)
577 rocksdb.comparator (37)
614 rocksdb.compression (16)
630 rocksdb.compression_options (106)
736 rocksdb.data.size (14)
750 rocksdb.deleted.keys (15)
765 rocksdb.external_sst_file.global_seqno (41)
806 rocksdb.external_sst_file.version (14)
820 rocksdb.filter.size (15)
835 rocksdb.index.partitions (20)
855 rocksdb.index.size (9)
864 rocksdb.merge.operands (18)
882 rocksdb.merge.operator (24)
906 rocksdb.num.data.blocks (19)
925 rocksdb.num.entries (11)
936 rocksdb.num.range-deletions (19)
955 rocksdb.prefix.extractor.name (31)
986 rocksdb.property.collectors (34)
1020 rocksdb.raw.key.size (16)
1036 rocksdb.raw.value.size (14)
1050 rocksdb.top-level.index.size (24)
1074 [restart 406]
1082 [trailer compression=none checksum=0xbf6fe705]
1087 meta-index (64)
1087 pebble.value_index block:393/8 value-blocks-index-lengths: 1(num), 2(offset), 1(length) [restart]
1114 rocksdb.properties block:406/676 [restart]
1139 [restart 1087]
1143 [restart 1114]
1151 [trailer compression=none checksum=0x5a8a2a98]
1156 footer (53)
1156 checksum type: crc32c
1157 meta: offset=1087, length=64
1160 index: offset=267, length=85
1163 [padding]
1197 version: 4
1201 magic number: 0xf09faab3f09faab3
1209 EOF
224 [restart 205]
232 [trailer compression=none checksum=0xbae26eb3]
237 index (22)
237 block:106/29 [restart]
251 [restart 237]
259 [trailer compression=none checksum=0x802be702]
264 top-index (77)
264 block:140/28 [restart]
285 block:173/27 [restart]
305 block:205/27 [restart]
325 block:237/22 [restart]
340 [restart 264]
344 [restart 285]
348 [restart 305]
352 [restart 325]
341 [trailer compression=snappy checksum=0x6b2d79b]
346 value-block (11)
362 value-block (15)
382 value-index (8)
395 properties (676)
395 obsolete-key (16) [restart]
411 pebble.num.value-blocks (27)
438 pebble.num.values.in.value-blocks (21)
459 pebble.value-blocks.size (21)
480 rocksdb.block.based.table.index.type (43)
523 rocksdb.block.based.table.prefix.filtering (20)
543 rocksdb.block.based.table.whole.key.filtering (23)
566 rocksdb.comparator (37)
603 rocksdb.compression (16)
619 rocksdb.compression_options (106)
725 rocksdb.data.size (14)
739 rocksdb.deleted.keys (15)
754 rocksdb.external_sst_file.global_seqno (41)
795 rocksdb.external_sst_file.version (14)
809 rocksdb.filter.size (15)
824 rocksdb.index.partitions (20)
844 rocksdb.index.size (9)
853 rocksdb.merge.operands (18)
871 rocksdb.merge.operator (24)
895 rocksdb.num.data.blocks (19)
914 rocksdb.num.entries (11)
925 rocksdb.num.range-deletions (19)
944 rocksdb.prefix.extractor.name (31)
975 rocksdb.property.collectors (34)
1009 rocksdb.raw.key.size (16)
1025 rocksdb.raw.value.size (14)
1039 rocksdb.top-level.index.size (24)
1063 [restart 395]
1071 [trailer compression=none checksum=0x8f5517f7]
1076 meta-index (64)
1076 pebble.value_index block:382/8 value-blocks-index-lengths: 1(num), 2(offset), 1(length) [restart]
1103 rocksdb.properties block:395/676 [restart]
1128 [restart 1076]
1132 [restart 1103]
1140 [trailer compression=none checksum=0xb465a0c2]
1145 footer (53)
1145 checksum type: crc32c
1146 meta: offset=1076, length=64
1149 index: offset=264, length=77
1152 [padding]
1186 version: 4
1190 magic number: 0xf09faab3f09faab3
1198 EOF

# Require that [c,e) must be in-place.
build in-place-bound=(c,e)
Expand Down
3 changes: 2 additions & 1 deletion sstable/writer.go
Original file line number Diff line number Diff line change
Expand Up @@ -942,7 +942,8 @@ func (w *Writer) addPoint(key InternalKey, value []byte, forceObsolete bool) err
if err != nil {
return err
}
isObsolete = w.tableFormat >= TableFormatPebblev4 && (isObsolete || forceObsolete)
// Temporarily disable `isObsolete`.
isObsolete = false && w.tableFormat >= TableFormatPebblev4 && (isObsolete || forceObsolete)
w.lastPointKeyInfo.isObsolete = isObsolete
var valueStoredWithKey []byte
var prefix valuePrefix
Expand Down
46 changes: 46 additions & 0 deletions testdata/compaction_iter_delete_sized
Original file line number Diff line number Diff line change
Expand Up @@ -1532,3 +1532,49 @@ next
a#9,0:
.
missized-dels=0

# Test various DELSIZEDs beneath live keys. SETS should be converted to
# SETWITHDELs when they meet a DELSIZED.

define
a.SET.7:foo
a.DELSIZED.5:varint(5)
b.SET.4:bar
b.DELSIZED.2:varint(4)
b.SET.1:bax
c.SET.9:coconut
c.DEL.8:del
c.DELSIZED.5:varint(2)
d.SET.8:dragonfruit
----

iter print-missized-dels
first
next
next
next
next
----
a#7,18:foo
b#4,18:bar
c#9,18:coconut
d#8,1:dragonfruit
.
missized-dels=0

# Test a DELSIZED meeting a MERGE. This counts as a missized DEL—The user can't
# know the value of the most recent MERGE since it's dependent on LSM state.

define
a.DELSIZED.9:varint(4)
a.MERGE.8:fo
a.MERGE.7:o
----

iter print-missized-dels
first
next
----
a#9,0:
.
missized-dels=1