From 92b5eafcbaa0febf353c8302fcbc76b38127d9c5 Mon Sep 17 00:00:00 2001 From: Erik Grinaker Date: Fri, 5 Aug 2022 13:36:41 +0000 Subject: [PATCH] storage: don't synthesize MVCC point tombstones below point keys MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This patch changes `pointSynthesizingIter` (and by extension MVCC scans and gets) to not synthesize MVCC point tombstones below existing point keys, only above them. Point tombstones are still synthesized at the start bound of all MVCC range tombstones regardless. This patch only focuses on the behavioral change, and is not concerned with performance. A later patch will address performance optimizations. Even so, this can significantly improve `MVCCScan` performance with many range keys: ``` MVCCScan_Pebble/rows=10000/versions=1/valueSize=64/numRangeKeys=0-24 2.76ms ± 1% 2.78ms ± 2% ~ (p=0.274 n=8+10) MVCCScan_Pebble/rows=10000/versions=1/valueSize=64/numRangeKeys=1-24 6.34ms ± 1% 5.72ms ± 1% -9.80% (p=0.000 n=10+10) MVCCScan_Pebble/rows=10000/versions=1/valueSize=64/numRangeKeys=100-24 60.1ms ± 7% 23.6ms ± 7% -60.72% (p=0.000 n=10+10) MVCCGet_Pebble/batch=true/versions=1/valueSize=8/numRangeKeys=0-24 2.73µs ± 1% 2.72µs ± 1% ~ (p=0.268 n=9+10) MVCCGet_Pebble/batch=true/versions=1/valueSize=8/numRangeKeys=1-24 5.40µs ± 1% 5.46µs ± 1% +1.18% (p=0.001 n=10+10) MVCCGet_Pebble/batch=true/versions=1/valueSize=8/numRangeKeys=100-24 171µs ± 1% 170µs ± 1% ~ (p=0.247 n=10+10) MVCCGet_Pebble/batch=true/versions=10/valueSize=8/numRangeKeys=0-24 3.87µs ± 1% 3.85µs ± 0% -0.58% (p=0.030 n=10+9) MVCCGet_Pebble/batch=true/versions=10/valueSize=8/numRangeKeys=1-24 7.11µs ± 1% 7.24µs ± 1% +1.83% (p=0.000 n=9+10) MVCCGet_Pebble/batch=true/versions=10/valueSize=8/numRangeKeys=100-24 179µs ± 1% 178µs ± 1% ~ (p=0.063 n=10+10) MVCCGet_Pebble/batch=true/versions=100/valueSize=8/numRangeKeys=0-24 10.4µs ± 5% 10.0µs ± 3% -3.96% (p=0.013 n=10+9) MVCCGet_Pebble/batch=true/versions=100/valueSize=8/numRangeKeys=1-24 15.9µs ± 3% 16.2µs ± 3% +2.11% (p=0.007 n=10+10) MVCCGet_Pebble/batch=true/versions=100/valueSize=8/numRangeKeys=100-24 222µs ± 1% 220µs ± 2% ~ (p=0.063 n=10+10) ``` Release note: None --- pkg/storage/bench_pebble_test.go | 8 +- pkg/storage/point_synthesizing_iter.go | 173 ++++++++++-------- .../mvcc_histories/range_key_point_synthesis | 158 ++++++++++++---- .../mvcc_histories/range_tombstone_scans | 2 - 4 files changed, 225 insertions(+), 116 deletions(-) diff --git a/pkg/storage/bench_pebble_test.go b/pkg/storage/bench_pebble_test.go index 760a583bf452..b76cd64b69bd 100644 --- a/pkg/storage/bench_pebble_test.go +++ b/pkg/storage/bench_pebble_test.go @@ -65,7 +65,7 @@ func BenchmarkMVCCScan_Pebble(b *testing.B) { b.Run(fmt.Sprintf("versions=%d", numVersions), func(b *testing.B) { for _, valueSize := range []int{8, 64, 512} { b.Run(fmt.Sprintf("valueSize=%d", valueSize), func(b *testing.B) { - for _, numRangeKeys := range []int{0, 1} { // TODO(erikgrinaker): 100 + for _, numRangeKeys := range []int{0, 1, 100} { b.Run(fmt.Sprintf("numRangeKeys=%d", numRangeKeys), func(b *testing.B) { runMVCCScan(ctx, b, setupMVCCPebble, benchScanOptions{ benchDataOptions: benchDataOptions{ @@ -130,7 +130,7 @@ func BenchmarkMVCCReverseScan_Pebble(b *testing.B) { b.Run(fmt.Sprintf("versions=%d", numVersions), func(b *testing.B) { for _, valueSize := range []int{8, 64, 512} { b.Run(fmt.Sprintf("valueSize=%d", valueSize), func(b *testing.B) { - for _, numRangeKeys := range []int{0, 1} { // TODO(erikgrinaker): 100 + for _, numRangeKeys := range []int{0, 1, 100} { b.Run(fmt.Sprintf("numRangeKeys=%d", numRangeKeys), func(b *testing.B) { runMVCCScan(ctx, b, setupMVCCPebble, benchScanOptions{ benchDataOptions: benchDataOptions{ @@ -172,7 +172,7 @@ func BenchmarkMVCCGet_Pebble(b *testing.B) { b.Run(fmt.Sprintf("versions=%d", numVersions), func(b *testing.B) { for _, valueSize := range []int{8} { b.Run(fmt.Sprintf("valueSize=%d", valueSize), func(b *testing.B) { - for _, numRangeKeys := range []int{0, 1} { // TODO(erikgrinaker): 100 + for _, numRangeKeys := range []int{0, 1, 100} { b.Run(fmt.Sprintf("numRangeKeys=%d", numRangeKeys), func(b *testing.B) { runMVCCGet(ctx, b, setupMVCCPebble, benchDataOptions{ numVersions: numVersions, @@ -194,7 +194,7 @@ func BenchmarkMVCCComputeStats_Pebble(b *testing.B) { ctx := context.Background() for _, valueSize := range []int{8, 32, 256} { b.Run(fmt.Sprintf("valueSize=%d", valueSize), func(b *testing.B) { - for _, numRangeKeys := range []int{0, 1} { // TODO(erikgrinaker): 100 + for _, numRangeKeys := range []int{0, 1, 100} { b.Run(fmt.Sprintf("numRangeKeys=%d", numRangeKeys), func(b *testing.B) { runMVCCComputeStats(ctx, b, setupMVCCPebble, valueSize, numRangeKeys) }) diff --git a/pkg/storage/point_synthesizing_iter.go b/pkg/storage/point_synthesizing_iter.go index 2f4ecd26aab3..04a4388a4d92 100644 --- a/pkg/storage/point_synthesizing_iter.go +++ b/pkg/storage/point_synthesizing_iter.go @@ -31,10 +31,10 @@ var pointSynthesizingIterPool = sync.Pool{ } // pointSynthesizingIter wraps an MVCCIterator, and synthesizes MVCC point keys -// for MVCC range keys above/below existing point keys, and at the start of -// range keys (truncated to iterator bounds). If emitOnSeekGE is set, it will -// also unconditionally synthesize point keys around a SeekGE seek key if it -// overlaps an MVCC range key. +// for MVCC range keys above existing point keys (not below), and at the start +// of range keys (truncated to iterator bounds). If emitOnSeekGE is set, it will +// also unconditionally synthesize point keys for all MVCC range keys that +// overlap the seek key. // // It does not emit MVCC range keys at all, since these would appear to conflict // with the synthesized point keys. @@ -44,11 +44,14 @@ var pointSynthesizingIterPool = sync.Pool{ // real point key in the underlying iterator. Otherwise, it is positioned on a // synthetic point key given by rangeKeysPos and rangeKeys[rangeKeysIdx]. // +// rangeKeysEnd specifies where to end point synthesis at the current position, +// e.g. the first range key below an existing point key. +// // The relative positioning of pointSynthesizingIter and the underlying iterator // is as follows in the forward direction: // // - atPoint=true: rangeKeysIdx points to a range key following the point key, -// or beyond the slice bounds when there are no further range keys at this +// or beyond rangeKeysEnd when there are no further range keys at this // key position. // // - atPoint=false: the underlying iterator is on a following key or exhausted. @@ -56,7 +59,7 @@ var pointSynthesizingIterPool = sync.Pool{ // point/range key. // // This positioning is mirrored in the reverse direction. For example, when -// atPoint=true and rangeKeys are exhausted, rangeKeysIdx will be len(rangeKeys) +// atPoint=true and rangeKeys are exhausted, rangeKeysIdx will be rangeKeysEnd // in the forward direction and -1 in the reverse direction. Similarly, the // underlying iterator is always >= rangeKeysPos in the forward direction and <= // in reverse. @@ -78,6 +81,10 @@ type pointSynthesizingIter struct { // to synthesize a point for. See struct comment for details. rangeKeysIdx int + // rangeKeysEnd contains the exclusive index at which to stop synthesizing + // point keys, since points are not synthesized below existing point keys. + rangeKeysEnd int + // rangeKeysStart contains the start key of the current rangeKeys stack. It is // only used to memoize rangeKeys for adjacent keys. rangeKeysStart roachpb.Key @@ -90,15 +97,22 @@ type pointSynthesizingIter struct { // following a SeekLT or Prev call. reverse bool - // emitOnSeekGE will synthesize point keys for the SeekGE seek key if it - // overlaps with a range key even if no point key exists. The primary use-case - // is to synthesize point keys for e.g. an MVCCGet that does not match a point - // key but overlaps a range key, which is necessary for conflict checks. + // emitOnSeekGE will synthesize point keys for all range keys that overlap the + // SeekGE seek key, regardless of whether a point key exists there. The + // primary use-case is to synthesize point keys for e.g. an MVCCGet that does + // not match a point key but overlaps a range key, which is necessary for + // conflict checks. // // This is optional, because e.g. pebbleMVCCScanner often uses seeks as an // optimization to skip over old versions of a key, and we don't want to keep // synthesizing point keys every time it skips ahead. // + // Note that these synthesized points are not stable: if the iterator leaves + // the seek key prefix and then reverses direction, points will be synthesized + // according to the normal policy: above existing point keys and at the start + // key of range keys. This parameter is primarily for use with prefix + // iterators where this is not an issue. + // // TODO(erikgrinaker): This could instead check for prefix iterators, or a // separate SeekPrefixGE() method, but we don't currently have APIs for it. emitOnSeekGE bool @@ -152,7 +166,7 @@ func (i *pointSynthesizingIter) iterNext() (bool, error) { return i.updateValid() } -// iterNext is a convenience function that calls iter.Prev() +// iterPrev is a convenience function that calls iter.Prev() // and returns the value of updateValid(). func (i *pointSynthesizingIter) iterPrev() (bool, error) { i.iter.Prev() @@ -178,16 +192,38 @@ func (i *pointSynthesizingIter) updateRangeKeys() { i.rangeKeysStart = append(i.rangeKeysStart[:0], rangeStart...) i.rangeKeys = i.iter.RangeKeys().Versions.Clone() } + if i.rangeKeysPos.Equal(i.rangeKeysStart) { + i.rangeKeysEnd = len(i.rangeKeys) + } else { + i.rangeKeysEnd = 0 + i.extendRangeKeysEnd() + } if !i.reverse { i.rangeKeysIdx = 0 } else { - i.rangeKeysIdx = len(i.rangeKeys) - 1 // NB: -1 is correct with no range keys + i.rangeKeysIdx = i.rangeKeysEnd - 1 // NB: -1 is correct with no range keys } } else { i.clearRangeKeys() } } +// extendRangeKeysEnd extends i.rangeKeysEnd to the current point key's +// timestamp in the underlying iterator. It never reduces i.rangeKeysEnd. +func (i *pointSynthesizingIter) extendRangeKeysEnd() { + if i.iterValid { + if hasPoint, _ := i.iter.HasPointAndRange(); hasPoint { + if p := i.iter.UnsafeKey(); p.Key.Equal(i.rangeKeysPos) && !p.Timestamp.IsEmpty() { + if end := sort.Search(len(i.rangeKeys), func(idx int) bool { + return i.rangeKeys[idx].Timestamp.Less(p.Timestamp) + }); end > i.rangeKeysEnd { + i.rangeKeysEnd = end + } + } + } + } +} + // updateAtPoint updates i.atPoint according to whether the synthesizing // iterator is positioned on the real point key in the underlying iterator. // Requires i.rangeKeys to have been positioned first. @@ -201,8 +237,7 @@ func (i *pointSynthesizingIter) updateAtPoint() { } else if point := i.iter.UnsafeKey(); !point.Key.Equal(i.rangeKeysPos) { i.atPoint = false } else if !i.reverse { - i.atPoint = i.rangeKeysIdx >= len(i.rangeKeys) || - !point.Timestamp.IsSet() || + i.atPoint = i.rangeKeysIdx >= i.rangeKeysEnd || !point.Timestamp.IsSet() || i.rangeKeys[i.rangeKeysIdx].Timestamp.LessEq(point.Timestamp) } else { i.atPoint = i.rangeKeysIdx < 0 || (point.Timestamp.IsSet() && @@ -233,6 +268,7 @@ func (i *pointSynthesizingIter) updatePosition() { if _, err := i.iterNext(); err != nil { return } + i.extendRangeKeysEnd() } i.updateAtPoint() @@ -258,6 +294,7 @@ func (i *pointSynthesizingIter) clearRangeKeys() { i.rangeKeysPos = i.rangeKeysPos[:0] i.rangeKeysStart = i.rangeKeysStart[:0] } + i.rangeKeysEnd = 0 if !i.reverse { i.rangeKeysIdx = 0 } else { @@ -282,62 +319,38 @@ func (i *pointSynthesizingIter) SeekGE(seekKey MVCCKey) { return } - // If we land in the middle of a bare range key and emitOnSeekGE is disabled, - // then skip over it to the next point/range key -- we're only supposed to - // synthesize at the range key start bound and at existing points. - // - // However, if we're seeking to a specific version and don't find an older - // point key at the seek key, then we also need to peek backwards for an - // existing point key above us, which would mandate that we synthesize point - // keys here after all. - // - // TODO(erikgrinaker): It might be faster to first do an unversioned seek to - // look for previous points and then a versioned seek. We can also omit this - // if there are no range keys below the seek timestamp. - // - // TODO(erikgrinaker): We could avoid this in the SeekGE case if we only - // synthesize points above existing points, except in the emitOnSeeGE case - // where no existing point exists. That could also result in fewer synthetic - // points overall. Do we need to synthesize older points? - var positioned bool + // If we land in the middle of a bare range key then skip over it to the next + // point/range key unless emitOnSeekGE is enabled. if !i.emitOnSeekGE && hasRange && !hasPoint && !i.iter.RangeBounds().Key.Equal(i.iter.UnsafeKey().Key) { - if ok, err := i.iterNext(); err != nil { + if ok, _ := i.iterNext(); !ok { + i.updatePosition() return - } else if seekKey.Timestamp.IsSet() && (!ok || !seekKey.Key.Equal(i.iter.UnsafeKey().Key)) { - if ok, err = i.iterPrev(); err != nil { - return - } else if ok { - if hasP, _ := i.iter.HasPointAndRange(); hasP && seekKey.Key.Equal(i.iter.UnsafeKey().Key) { - i.updateRangeKeys() - positioned = true - } - } - if ok, _ = i.iterNext(); !ok { - i.updatePosition() - return - } } hasPoint, hasRange = i.iter.HasPointAndRange() } - if !positioned { - i.updateRangeKeys() + i.updateRangeKeys() - // If we're now at a bare range key, we must either be at the start of it, - // or in the middle with emitOnSeekGE enabled. In either case, we want to - // move the iterator ahead to look for a point key with the same key as the - // start/seek key in order to interleave it. - if hasRange && !hasPoint { - if _, err := i.iterNext(); err != nil { - return - } + // If we're now at a bare range key, we must either be at the start of it, + // or in the middle with emitOnSeekGE enabled. In either case, we want to + // move the iterator ahead to look for a point key with the same key as the + // start/seek key in order to interleave it. + if hasRange && !hasPoint { + if _, err := i.iterNext(); err != nil { + return } + i.extendRangeKeysEnd() + } + + // If emitOnSeekGE, always expose all range keys at the current position. + if hasRange && i.emitOnSeekGE { + i.rangeKeysEnd = len(i.rangeKeys) } // If we're seeking to a specific version, skip newer range keys. if len(i.rangeKeys) > 0 && seekKey.Timestamp.IsSet() && seekKey.Key.Equal(i.rangeKeysPos) { - i.rangeKeysIdx = sort.Search(len(i.rangeKeys), func(idx int) bool { + i.rangeKeysIdx = sort.Search(i.rangeKeysEnd, func(idx int) bool { return i.rangeKeys[idx].Timestamp.LessEq(seekKey.Timestamp) }) } @@ -346,7 +359,7 @@ func (i *pointSynthesizingIter) SeekGE(seekKey MVCCKey) { // It's possible that we seeked past all of the range key versions. In this // case, we have to reposition on the next key (current iter key). - if !i.atPoint && i.rangeKeysIdx >= len(i.rangeKeys) { + if !i.atPoint && i.rangeKeysIdx >= i.rangeKeysEnd { i.updatePosition() } } @@ -378,6 +391,11 @@ func (i *pointSynthesizingIter) SeekIntentGE(seekKey roachpb.Key, txnUUID uuid.U } i.updatePosition() + + // If emitOnSeekGE, always expose all range keys at the current position. + if hasRange && i.emitOnSeekGE { + i.rangeKeysEnd = len(i.rangeKeys) + } } // Next implements MVCCIterator. @@ -403,6 +421,7 @@ func (i *pointSynthesizingIter) Next() { if _, err := i.iterNext(); err != nil { return } + i.extendRangeKeysEnd() } else { i.rangeKeysIdx++ } @@ -410,7 +429,7 @@ func (i *pointSynthesizingIter) Next() { // If we've exhausted the current range keys, update with the underlying // iterator position (which must now be at a later key). - if !i.atPoint && i.rangeKeysIdx >= len(i.rangeKeys) { + if !i.atPoint && i.rangeKeysIdx >= i.rangeKeysEnd { i.updatePosition() } } @@ -465,9 +484,7 @@ func (i *pointSynthesizingIter) SeekLT(seekKey MVCCKey) { // TODO(erikgrinaker): It might be faster to do an unversioned seek from the // next key first to look for points. var positioned bool - if seekKey.Timestamp.IsSet() && hasRange && - (!hasPoint || !i.iter.UnsafeKey().Key.Equal(seekKey.Key)) && - seekKey.Key.Compare(i.iter.RangeBounds().EndKey) < 0 { + if seekKey.Timestamp.IsSet() && hasRange && seekKey.Key.Compare(i.iter.RangeBounds().EndKey) < 0 { if ok, err := i.iterNext(); err != nil { return } else if ok { @@ -488,7 +505,7 @@ func (i *pointSynthesizingIter) SeekLT(seekKey MVCCKey) { // If we're seeking to a specific version, skip over older range keys. if seekKey.Timestamp.IsSet() && seekKey.Key.Equal(i.rangeKeysPos) { - i.rangeKeysIdx = sort.Search(len(i.rangeKeys), func(idx int) bool { + i.rangeKeysIdx = sort.Search(i.rangeKeysEnd, func(idx int) bool { return i.rangeKeys[idx].Timestamp.LessEq(seekKey.Timestamp) }) - 1 } @@ -545,7 +562,7 @@ func (i *pointSynthesizingIter) Valid() (bool, error) { panic(err) } } - if i.iterErr == nil && !i.atPoint && i.rangeKeysIdx >= 0 && i.rangeKeysIdx < len(i.rangeKeys) { + if i.iterErr == nil && !i.atPoint && i.rangeKeysIdx >= 0 && i.rangeKeysIdx < i.rangeKeysEnd { return true, nil // on synthetic point key } return i.iterValid, i.iterErr @@ -561,7 +578,7 @@ func (i *pointSynthesizingIter) UnsafeKey() MVCCKey { if i.atPoint { return i.iter.UnsafeKey() } - if i.rangeKeysIdx >= len(i.rangeKeys) || i.rangeKeysIdx < 0 { + if i.rangeKeysIdx >= i.rangeKeysEnd || i.rangeKeysIdx < 0 { return MVCCKey{} } return MVCCKey{ @@ -671,13 +688,19 @@ func (i *pointSynthesizingIter) assertInvariants() error { } } - // rangeKeysIdx is never more than 1 outside of the slice bounds, and the - // excess depends on the direction: len(rangeKeys) in the forward direction, - // -1 in the reverse. - if i.rangeKeysIdx < 0 || i.rangeKeysIdx >= len(i.rangeKeys) { - if (!i.reverse && i.rangeKeysIdx != len(i.rangeKeys)) || (i.reverse && i.rangeKeysIdx != -1) { - return errors.AssertionFailedf("invalid rangeKeysIdx %d with length %d and reverse=%t", - i.rangeKeysIdx, len(i.rangeKeys), i.reverse) + // rangeKeysEnd is never negative, and never greater than len(i.rangeKeys). + if i.rangeKeysEnd < 0 || i.rangeKeysEnd > len(i.rangeKeys) { + return errors.AssertionFailedf("invalid rangeKeysEnd %d with length %d", + i.rangeKeysEnd, len(i.rangeKeys)) + } + + // rangeKeysIdx is never more than 1 outside of the permitted slice interval + // (0 to rangeKeysEnd), and the excess depends on the direction: rangeKeysEnd + // in the forward direction, -1 in the reverse. + if i.rangeKeysIdx < 0 || i.rangeKeysIdx >= i.rangeKeysEnd { + if (!i.reverse && i.rangeKeysIdx != i.rangeKeysEnd) || (i.reverse && i.rangeKeysIdx != -1) { + return errors.AssertionFailedf("invalid rangeKeysIdx %d with rangeKeysEnd %d and reverse=%t", + i.rangeKeysIdx, i.rangeKeysEnd, i.reverse) } } @@ -707,7 +730,7 @@ func (i *pointSynthesizingIter) assertInvariants() error { } // rangeKeysIdx must be valid if we're not on a point. - if !i.atPoint && (i.rangeKeysIdx < 0 || i.rangeKeysIdx >= len(i.rangeKeys)) { + if !i.atPoint && (i.rangeKeysIdx < 0 || i.rangeKeysIdx >= i.rangeKeysEnd) { return errors.AssertionFailedf("not atPoint with invalid rangeKeysIdx %d at %s", i.rangeKeysIdx, i.rangeKeysPos) } @@ -748,10 +771,10 @@ func (i *pointSynthesizingIter) assertInvariants() error { minIdx = i.rangeKeysIdx maxIdx = i.rangeKeysIdx + 1 } - if minIdx >= 0 && minIdx < len(i.rangeKeys) { + if minIdx >= 0 && minIdx < i.rangeKeysEnd { minKey = MVCCKey{Key: i.rangeKeysPos, Timestamp: i.rangeKeys[minIdx].Timestamp} } - if maxIdx >= 0 && maxIdx < len(i.rangeKeys) { + if maxIdx >= 0 && maxIdx < i.rangeKeysEnd { maxKey = MVCCKey{Key: i.rangeKeysPos, Timestamp: i.rangeKeys[maxIdx].Timestamp} } diff --git a/pkg/storage/testdata/mvcc_histories/range_key_point_synthesis b/pkg/storage/testdata/mvcc_histories/range_key_point_synthesis index 1cbfc7ee2cea..3f46ca4e91af 100644 --- a/pkg/storage/testdata/mvcc_histories/range_key_point_synthesis +++ b/pkg/storage/testdata/mvcc_histories/range_key_point_synthesis @@ -5,10 +5,10 @@ # T # 7 [d7] [j7] # 6 f6 -# 5 o---------------o k5 o-----------o +# 5 o-------------------o k5 o-----------o # 4 x x d4 f4 g4 # 3 o-------o e3 o-------oh3 o---o -# 2 a2 f2 g2 +# 2 a2 d2 f2 g2 # 1 o-------------------o o-----------o # a b c d e f g h i j k l m n o p # @@ -21,6 +21,7 @@ put_rangekey k=l end=o ts=5 put k=a ts=2 v=a2 del k=a ts=4 del k=b ts=4 +put k=d ts=2 v=d2 put k=d ts=4 v=d4 put k=e ts=3 v=e3 put k=f ts=2 v=f2 @@ -29,7 +30,7 @@ put_rangekey k=f end=h ts=3 localTs=4 put k=f ts=4 v=f4 put k=f ts=6 v=f6 put k=g ts=4 v=g4 -put_rangekey k=c end=g ts=5 +put_rangekey k=c end=h ts=5 put k=h ts=3 v=h3 put k=k ts=5 v=k5 with t=A @@ -43,8 +44,7 @@ rangekey: {a-b}/[1.000000000,0=/] rangekey: {b-c}/[3.000000000,0=/ 1.000000000,0=/] rangekey: {c-d}/[5.000000000,0=/ 3.000000000,0=/ 1.000000000,0=/] rangekey: {d-f}/[5.000000000,0=/ 1.000000000,0=/] -rangekey: {f-g}/[5.000000000,0=/ 3.000000000,0={localTs=4.000000000,0}/] -rangekey: {g-h}/[3.000000000,0={localTs=4.000000000,0}/] +rangekey: {f-h}/[5.000000000,0=/ 3.000000000,0={localTs=4.000000000,0}/] rangekey: {h-k}/[1.000000000,0=/] rangekey: {l-n}/[5.000000000,0=/] rangekey: {n-o}/[5.000000000,0=/ 3.000000000,0=/] @@ -54,6 +54,7 @@ data: "b"/4.000000000,0 -> / meta: "d"/0,0 -> txn={id=00000000 key=/Min pri=0.00000000 epo=0 ts=7.000000000,0 min=0,0 seq=0} ts=7.000000000,0 del=false klen=12 vlen=7 mergeTs= txnDidNotUpdateMeta=true data: "d"/7.000000000,0 -> /BYTES/d7 data: "d"/4.000000000,0 -> /BYTES/d4 +data: "d"/2.000000000,0 -> /BYTES/d2 data: "e"/3.000000000,0 -> /BYTES/e3 data: "f"/6.000000000,0 -> /BYTES/f6 data: "f"/4.000000000,0 -> /BYTES/f4 @@ -85,15 +86,16 @@ iter_scan: "d"/0,0=txn={id=00000000 key=/Min pri=0.00000000 epo=0 ts=7.000000000 iter_scan: "d"/7.000000000,0=/BYTES/d7 iter_scan: "d"/5.000000000,0=/ iter_scan: "d"/4.000000000,0=/BYTES/d4 +iter_scan: "d"/2.000000000,0=/BYTES/d2 iter_scan: "d"/1.000000000,0=/ iter_scan: "e"/5.000000000,0=/ iter_scan: "e"/3.000000000,0=/BYTES/e3 -iter_scan: "e"/1.000000000,0=/ iter_scan: "f"/6.000000000,0=/BYTES/f6 iter_scan: "f"/5.000000000,0=/ iter_scan: "f"/4.000000000,0=/BYTES/f4 iter_scan: "f"/3.000000000,0={localTs=4.000000000,0}/ iter_scan: "f"/2.000000000,0=/BYTES/f2 +iter_scan: "g"/5.000000000,0=/ iter_scan: "g"/4.000000000,0=/BYTES/g4 iter_scan: "g"/3.000000000,0={localTs=4.000000000,0}/ iter_scan: "g"/2.000000000,0=/BYTES/g2 @@ -101,7 +103,6 @@ iter_scan: "h"/3.000000000,0=/BYTES/h3 iter_scan: "h"/1.000000000,0=/ iter_scan: "j"/0,0=txn={id=00000000 key=/Min pri=0.00000000 epo=0 ts=7.000000000,0 min=0,0 seq=0} ts=7.000000000,0 del=false klen=12 vlen=7 mergeTs= txnDidNotUpdateMeta=true iter_scan: "j"/7.000000000,0=/BYTES/j7 -iter_scan: "j"/1.000000000,0=/ iter_scan: "k"/5.000000000,0=/BYTES/k5 iter_scan: "l"/5.000000000,0=/ iter_scan: "n"/5.000000000,0=/ @@ -118,7 +119,6 @@ iter_scan: "n"/3.000000000,0=/ iter_scan: "n"/5.000000000,0=/ iter_scan: "l"/5.000000000,0=/ iter_scan: "k"/5.000000000,0=/BYTES/k5 -iter_scan: "j"/1.000000000,0=/ iter_scan: "j"/7.000000000,0=/BYTES/j7 iter_scan: "j"/0,0=txn={id=00000000 key=/Min pri=0.00000000 epo=0 ts=7.000000000,0 min=0,0 seq=0} ts=7.000000000,0 del=false klen=12 vlen=7 mergeTs= txnDidNotUpdateMeta=true iter_scan: "h"/1.000000000,0=/ @@ -126,15 +126,16 @@ iter_scan: "h"/3.000000000,0=/BYTES/h3 iter_scan: "g"/2.000000000,0=/BYTES/g2 iter_scan: "g"/3.000000000,0={localTs=4.000000000,0}/ iter_scan: "g"/4.000000000,0=/BYTES/g4 +iter_scan: "g"/5.000000000,0=/ iter_scan: "f"/2.000000000,0=/BYTES/f2 iter_scan: "f"/3.000000000,0={localTs=4.000000000,0}/ iter_scan: "f"/4.000000000,0=/BYTES/f4 iter_scan: "f"/5.000000000,0=/ iter_scan: "f"/6.000000000,0=/BYTES/f6 -iter_scan: "e"/1.000000000,0=/ iter_scan: "e"/3.000000000,0=/BYTES/e3 iter_scan: "e"/5.000000000,0=/ iter_scan: "d"/1.000000000,0=/ +iter_scan: "d"/2.000000000,0=/BYTES/d2 iter_scan: "d"/4.000000000,0=/BYTES/d4 iter_scan: "d"/5.000000000,0=/ iter_scan: "d"/7.000000000,0=/BYTES/d7 @@ -173,7 +174,7 @@ iter_next_key: "c"/5.000000000,0=/ iter_next_key: "d"/0,0=txn={id=00000000 key=/Min pri=0.00000000 epo=0 ts=7.000000000,0 min=0,0 seq=0} ts=7.000000000,0 del=false klen=12 vlen=7 mergeTs= txnDidNotUpdateMeta=true iter_next_key: "e"/5.000000000,0=/ iter_next_key: "f"/6.000000000,0=/BYTES/f6 -iter_next_key: "g"/4.000000000,0=/BYTES/g4 +iter_next_key: "g"/5.000000000,0=/ iter_next_key: "h"/3.000000000,0=/BYTES/h3 iter_next_key: "j"/0,0=txn={id=00000000 key=/Min pri=0.00000000 epo=0 ts=7.000000000,0 min=0,0 seq=0} ts=7.000000000,0 del=false klen=12 vlen=7 mergeTs= txnDidNotUpdateMeta=true iter_next_key: "k"/5.000000000,0=/BYTES/k5 @@ -206,7 +207,7 @@ iter_seek_ge: "c"/5.000000000,0=/ iter_seek_ge: "d"/0,0=txn={id=00000000 key=/Min pri=0.00000000 epo=0 ts=7.000000000,0 min=0,0 seq=0} ts=7.000000000,0 del=false klen=12 vlen=7 mergeTs= txnDidNotUpdateMeta=true iter_seek_ge: "e"/5.000000000,0=/ iter_seek_ge: "f"/6.000000000,0=/BYTES/f6 -iter_seek_ge: "g"/4.000000000,0=/BYTES/g4 +iter_seek_ge: "g"/5.000000000,0=/ iter_seek_ge: "h"/3.000000000,0=/BYTES/h3 iter_seek_ge: "j"/0,0=txn={id=00000000 key=/Min pri=0.00000000 epo=0 ts=7.000000000,0 min=0,0 seq=0} ts=7.000000000,0 del=false klen=12 vlen=7 mergeTs= txnDidNotUpdateMeta=true iter_seek_ge: "j"/0,0=txn={id=00000000 key=/Min pri=0.00000000 epo=0 ts=7.000000000,0 min=0,0 seq=0} ts=7.000000000,0 del=false klen=12 vlen=7 mergeTs= txnDidNotUpdateMeta=true @@ -240,7 +241,7 @@ iter_seek_ge: "c"/5.000000000,0=/ iter_seek_ge: "d"/0,0=txn={id=00000000 key=/Min pri=0.00000000 epo=0 ts=7.000000000,0 min=0,0 seq=0} ts=7.000000000,0 del=false klen=12 vlen=7 mergeTs= txnDidNotUpdateMeta=true iter_seek_ge: "e"/5.000000000,0=/ iter_seek_ge: "f"/6.000000000,0=/BYTES/f6 -iter_seek_ge: "g"/4.000000000,0=/BYTES/g4 +iter_seek_ge: "g"/5.000000000,0=/ iter_seek_ge: "h"/3.000000000,0=/BYTES/h3 iter_seek_ge: "i"/1.000000000,0=/ iter_seek_ge: "j"/0,0=txn={id=00000000 key=/Min pri=0.00000000 epo=0 ts=7.000000000,0 min=0,0 seq=0} ts=7.000000000,0 del=false klen=12 vlen=7 mergeTs= txnDidNotUpdateMeta=true @@ -272,12 +273,12 @@ iter_seek_lt: "n"/3.000000000,0=/ iter_seek_lt: "l"/5.000000000,0=/ iter_seek_lt: "l"/5.000000000,0=/ iter_seek_lt: "k"/5.000000000,0=/BYTES/k5 -iter_seek_lt: "j"/1.000000000,0=/ +iter_seek_lt: "j"/7.000000000,0=/BYTES/j7 iter_seek_lt: "h"/1.000000000,0=/ iter_seek_lt: "h"/1.000000000,0=/ iter_seek_lt: "g"/2.000000000,0=/BYTES/g2 iter_seek_lt: "f"/2.000000000,0=/BYTES/f2 -iter_seek_lt: "e"/1.000000000,0=/ +iter_seek_lt: "e"/3.000000000,0=/BYTES/e3 iter_seek_lt: "d"/1.000000000,0=/ iter_seek_lt: "c"/1.000000000,0=/ iter_seek_lt: "b"/1.000000000,0=/ @@ -308,7 +309,7 @@ iter_seek_intent_ge: "c"/5.000000000,0=/ iter_seek_intent_ge: "d"/0,0=txn={id=00000000 key=/Min pri=0.00000000 epo=0 ts=7.000000000,0 min=0,0 seq=0} ts=7.000000000,0 del=false klen=12 vlen=7 mergeTs= txnDidNotUpdateMeta=true iter_seek_intent_ge: "e"/5.000000000,0=/ iter_seek_intent_ge: "f"/6.000000000,0=/BYTES/f6 -iter_seek_intent_ge: "g"/4.000000000,0=/BYTES/g4 +iter_seek_intent_ge: "g"/5.000000000,0=/ iter_seek_intent_ge: "h"/3.000000000,0=/BYTES/h3 iter_seek_intent_ge: "j"/0,0=txn={id=00000000 key=/Min pri=0.00000000 epo=0 ts=7.000000000,0 min=0,0 seq=0} ts=7.000000000,0 del=false klen=12 vlen=7 mergeTs= txnDidNotUpdateMeta=true iter_seek_intent_ge: "j"/0,0=txn={id=00000000 key=/Min pri=0.00000000 epo=0 ts=7.000000000,0 min=0,0 seq=0} ts=7.000000000,0 del=false klen=12 vlen=7 mergeTs= txnDidNotUpdateMeta=true @@ -342,7 +343,7 @@ iter_seek_intent_ge: "c"/5.000000000,0=/ iter_seek_intent_ge: "d"/0,0=txn={id=00000000 key=/Min pri=0.00000000 epo=0 ts=7.000000000,0 min=0,0 seq=0} ts=7.000000000,0 del=false klen=12 vlen=7 mergeTs= txnDidNotUpdateMeta=true iter_seek_intent_ge: "e"/5.000000000,0=/ iter_seek_intent_ge: "f"/6.000000000,0=/BYTES/f6 -iter_seek_intent_ge: "g"/4.000000000,0=/BYTES/g4 +iter_seek_intent_ge: "g"/5.000000000,0=/ iter_seek_intent_ge: "h"/3.000000000,0=/BYTES/h3 iter_seek_intent_ge: "i"/1.000000000,0=/ iter_seek_intent_ge: "j"/0,0=txn={id=00000000 key=/Min pri=0.00000000 epo=0 ts=7.000000000,0 min=0,0 seq=0} ts=7.000000000,0 del=false klen=12 vlen=7 mergeTs= txnDidNotUpdateMeta=true @@ -415,8 +416,8 @@ iter_seek_ge: "d"/7.000000000,0=/BYTES/d7 iter_seek_ge: "d"/5.000000000,0=/ iter_seek_ge: "d"/5.000000000,0=/ iter_seek_ge: "d"/4.000000000,0=/BYTES/d4 -iter_seek_ge: "d"/1.000000000,0=/ -iter_seek_ge: "d"/1.000000000,0=/ +iter_seek_ge: "d"/2.000000000,0=/BYTES/d2 +iter_seek_ge: "d"/2.000000000,0=/BYTES/d2 iter_seek_ge: "d"/1.000000000,0=/ run ok @@ -432,8 +433,8 @@ iter_seek_ge: "e"/5.000000000,0=/ iter_seek_ge: "e"/5.000000000,0=/ iter_seek_ge: "e"/3.000000000,0=/BYTES/e3 iter_seek_ge: "e"/3.000000000,0=/BYTES/e3 -iter_seek_ge: "e"/1.000000000,0=/ -iter_seek_ge: "e"/1.000000000,0=/ +iter_seek_ge: "f"/6.000000000,0=/BYTES/f6 +iter_seek_ge: "f"/6.000000000,0=/BYTES/f6 run ok iter_new types=pointsAndRanges pointSynthesis @@ -451,7 +452,7 @@ iter_seek_ge: "f"/5.000000000,0=/ iter_seek_ge: "f"/4.000000000,0=/BYTES/f4 iter_seek_ge: "f"/3.000000000,0={localTs=4.000000000,0}/ iter_seek_ge: "f"/2.000000000,0=/BYTES/f2 -iter_seek_ge: "g"/4.000000000,0=/BYTES/g4 +iter_seek_ge: "g"/5.000000000,0=/ run ok iter_new types=pointsAndRanges pointSynthesis @@ -462,8 +463,8 @@ iter_seek_ge k=g ts=3 iter_seek_ge k=g ts=2 iter_seek_ge k=g ts=1 ---- -iter_seek_ge: "g"/4.000000000,0=/BYTES/g4 -iter_seek_ge: "g"/4.000000000,0=/BYTES/g4 +iter_seek_ge: "g"/5.000000000,0=/ +iter_seek_ge: "g"/5.000000000,0=/ iter_seek_ge: "g"/4.000000000,0=/BYTES/g4 iter_seek_ge: "g"/3.000000000,0={localTs=4.000000000,0}/ iter_seek_ge: "g"/2.000000000,0=/BYTES/g2 @@ -498,8 +499,8 @@ iter_seek_ge k=j ts=1 ---- iter_seek_ge: "j"/7.000000000,0=/BYTES/j7 iter_seek_ge: "j"/7.000000000,0=/BYTES/j7 -iter_seek_ge: "j"/1.000000000,0=/ -iter_seek_ge: "j"/1.000000000,0=/ +iter_seek_ge: "k"/5.000000000,0=/BYTES/k5 +iter_seek_ge: "k"/5.000000000,0=/BYTES/k5 run ok iter_new types=pointsAndRanges pointSynthesis @@ -558,6 +559,34 @@ iter_seek_ge: . iter_seek_ge: . # Versioned seeks with emitOnSeekGE. +run ok +iter_new types=pointsAndRanges pointSynthesis emitOnSeekGE +iter_seek_ge k=e ts=6 +iter_seek_ge k=e ts=5 +iter_seek_ge k=e ts=4 +iter_seek_ge k=e ts=3 +iter_seek_ge k=e ts=2 +iter_seek_ge k=e ts=1 +---- +iter_seek_ge: "e"/5.000000000,0=/ +iter_seek_ge: "e"/5.000000000,0=/ +iter_seek_ge: "e"/3.000000000,0=/BYTES/e3 +iter_seek_ge: "e"/3.000000000,0=/BYTES/e3 +iter_seek_ge: "e"/1.000000000,0=/ +iter_seek_ge: "e"/1.000000000,0=/ + +run ok +iter_new types=pointsAndRanges pointSynthesis emitOnSeekGE +iter_seek_ge k=j ts=8 +iter_seek_ge k=j ts=7 +iter_seek_ge k=j ts=6 +iter_seek_ge k=j ts=1 +---- +iter_seek_ge: "j"/7.000000000,0=/BYTES/j7 +iter_seek_ge: "j"/7.000000000,0=/BYTES/j7 +iter_seek_ge: "j"/1.000000000,0=/ +iter_seek_ge: "j"/1.000000000,0=/ + run ok iter_new types=pointsAndRanges pointSynthesis emitOnSeekGE iter_seek_ge k=l ts=6 @@ -600,6 +629,66 @@ iter_seek_ge: . iter_seek_ge: . iter_seek_ge: . +# Next after emitOnSeekGE also emits tombstones below points, but these are not +# stable following a reversal from a different key prefix. +run ok +iter_new types=pointsAndRanges pointSynthesis emitOnSeekGE +iter_seek_ge k=e +iter_next +iter_next +iter_next +iter_prev +iter_prev +iter_next +iter_next +---- +iter_seek_ge: "e"/5.000000000,0=/ +iter_next: "e"/3.000000000,0=/BYTES/e3 +iter_next: "e"/1.000000000,0=/ +iter_next: "f"/6.000000000,0=/BYTES/f6 +iter_prev: "e"/3.000000000,0=/BYTES/e3 +iter_prev: "e"/5.000000000,0=/ +iter_next: "e"/3.000000000,0=/BYTES/e3 +iter_next: "f"/6.000000000,0=/BYTES/f6 + +run ok +iter_new types=pointsAndRanges pointSynthesis emitOnSeekGE +iter_seek_ge k=j +iter_next +iter_next +iter_next +iter_prev +iter_prev +iter_next +iter_next +---- +iter_seek_ge: "j"/0,0=txn={id=00000000 key=/Min pri=0.00000000 epo=0 ts=7.000000000,0 min=0,0 seq=0} ts=7.000000000,0 del=false klen=12 vlen=7 mergeTs= txnDidNotUpdateMeta=true +iter_next: "j"/7.000000000,0=/BYTES/j7 +iter_next: "j"/1.000000000,0=/ +iter_next: "k"/5.000000000,0=/BYTES/k5 +iter_prev: "j"/7.000000000,0=/BYTES/j7 +iter_prev: "j"/0,0=txn={id=00000000 key=/Min pri=0.00000000 epo=0 ts=7.000000000,0 min=0,0 seq=0} ts=7.000000000,0 del=false klen=12 vlen=7 mergeTs= txnDidNotUpdateMeta=true +iter_next: "j"/7.000000000,0=/BYTES/j7 +iter_next: "k"/5.000000000,0=/BYTES/k5 + +run ok +iter_new types=pointsAndRanges pointSynthesis emitOnSeekGE +iter_seek_ge k=nnn +iter_next +iter_next +iter_prev +iter_prev +iter_next +iter_next +---- +iter_seek_ge: "nnn"/5.000000000,0=/ +iter_next: "nnn"/3.000000000,0=/ +iter_next: . +iter_prev: "n"/3.000000000,0=/ +iter_prev: "n"/5.000000000,0=/ +iter_next: "n"/3.000000000,0=/ +iter_next: . + # Versioned reverse seeks. run ok iter_new types=pointsAndRanges pointSynthesis @@ -656,7 +745,7 @@ iter_seek_lt k=d ts=6 iter_seek_lt k=d ts=7 iter_seek_lt k=d ts=8 ---- -iter_seek_lt: "d"/4.000000000,0=/BYTES/d4 +iter_seek_lt: "d"/2.000000000,0=/BYTES/d2 iter_seek_lt: "d"/4.000000000,0=/BYTES/d4 iter_seek_lt: "d"/4.000000000,0=/BYTES/d4 iter_seek_lt: "d"/5.000000000,0=/ @@ -696,8 +785,8 @@ iter_seek_lt: "f"/3.000000000,0={localTs=4.000000000,0}/ iter_seek_lt: "f"/4.000000000,0=/BYTES/f4 iter_seek_lt: "f"/5.000000000,0=/ iter_seek_lt: "f"/6.000000000,0=/BYTES/f6 -iter_seek_lt: "e"/1.000000000,0=/ -iter_seek_lt: "e"/1.000000000,0=/ +iter_seek_lt: "e"/3.000000000,0=/BYTES/e3 +iter_seek_lt: "e"/3.000000000,0=/BYTES/e3 run ok iter_new types=pointsAndRanges pointSynthesis @@ -711,7 +800,7 @@ iter_seek_lt k=g ts=6 iter_seek_lt: "g"/2.000000000,0=/BYTES/g2 iter_seek_lt: "g"/3.000000000,0={localTs=4.000000000,0}/ iter_seek_lt: "g"/4.000000000,0=/BYTES/g4 -iter_seek_lt: "f"/2.000000000,0=/BYTES/f2 +iter_seek_lt: "g"/5.000000000,0=/ iter_seek_lt: "f"/2.000000000,0=/BYTES/f2 iter_seek_lt: "f"/2.000000000,0=/BYTES/f2 @@ -756,8 +845,8 @@ iter_seek_lt k=k ts=6 ---- iter_seek_lt: "k"/5.000000000,0=/BYTES/k5 iter_seek_lt: "k"/5.000000000,0=/BYTES/k5 -iter_seek_lt: "j"/1.000000000,0=/ -iter_seek_lt: "j"/1.000000000,0=/ +iter_seek_lt: "j"/7.000000000,0=/BYTES/j7 +iter_seek_lt: "j"/7.000000000,0=/BYTES/j7 run ok iter_new types=pointsAndRanges pointSynthesis @@ -855,6 +944,7 @@ iter_scan ---- iter_seek_lt: "f"/2.000000000,0=/BYTES/f2 iter_scan: "f"/2.000000000,0=/BYTES/f2 +iter_scan: "g"/5.000000000,0=/ iter_scan: "g"/4.000000000,0=/BYTES/g4 iter_scan: "g"/3.000000000,0={localTs=4.000000000,0}/ iter_scan: "g"/2.000000000,0=/BYTES/g2 @@ -862,7 +952,6 @@ iter_scan: "h"/3.000000000,0=/BYTES/h3 iter_scan: "h"/1.000000000,0=/ iter_scan: "j"/0,0=txn={id=00000000 key=/Min pri=0.00000000 epo=0 ts=7.000000000,0 min=0,0 seq=0} ts=7.000000000,0 del=false klen=12 vlen=7 mergeTs= txnDidNotUpdateMeta=true iter_scan: "j"/7.000000000,0=/BYTES/j7 -iter_scan: "j"/1.000000000,0=/ iter_scan: "k"/5.000000000,0=/BYTES/k5 iter_scan: "l"/5.000000000,0=/ iter_scan: "n"/5.000000000,0=/ @@ -881,7 +970,6 @@ iter_scan: "h"/3.000000000,0=/BYTES/h3 iter_scan: "h"/1.000000000,0=/ iter_scan: "j"/0,0=txn={id=00000000 key=/Min pri=0.00000000 epo=0 ts=7.000000000,0 min=0,0 seq=0} ts=7.000000000,0 del=false klen=12 vlen=7 mergeTs= txnDidNotUpdateMeta=true iter_scan: "j"/7.000000000,0=/BYTES/j7 -iter_scan: "j"/1.000000000,0=/ iter_scan: "k"/5.000000000,0=/BYTES/k5 iter_scan: "l"/5.000000000,0=/ iter_scan: "n"/5.000000000,0=/ @@ -900,7 +988,7 @@ iter_prev iter_seek_ge: "e"/3.000000000,0=/BYTES/e3 iter_prev: "e"/5.000000000,0=/ iter_next: "e"/3.000000000,0=/BYTES/e3 -iter_next: "e"/1.000000000,0=/ +iter_next: "f"/6.000000000,0=/BYTES/f6 iter_prev: "e"/3.000000000,0=/BYTES/e3 run ok @@ -936,8 +1024,8 @@ iter_prev: "d"/1.000000000,0=/ iter_next_key: "e"/5.000000000,0=/ iter_next: "e"/3.000000000,0=/BYTES/e3 iter_next_key: "f"/6.000000000,0=/BYTES/f6 -iter_prev: "e"/1.000000000,0=/ iter_prev: "e"/3.000000000,0=/BYTES/e3 +iter_prev: "e"/5.000000000,0=/ iter_next_key: "f"/6.000000000,0=/BYTES/f6 iter_next: "f"/5.000000000,0=/ diff --git a/pkg/storage/testdata/mvcc_histories/range_tombstone_scans b/pkg/storage/testdata/mvcc_histories/range_tombstone_scans index d9b95a621909..0a8473900a4d 100644 --- a/pkg/storage/testdata/mvcc_histories/range_tombstone_scans +++ b/pkg/storage/testdata/mvcc_histories/range_tombstone_scans @@ -98,7 +98,6 @@ scan k=a end=z ts=2 tombstones scan: "a" -> / @2.000000000,0 scan: "b" -> / @2.000000000,0 scan: "c" -> / @2.000000000,0 -scan: "d" -> / @2.000000000,0 scan: "f" -> /BYTES/f1 @1.000000000,0 scan: "h" -> /BYTES/h2 @2.000000000,0 @@ -256,7 +255,6 @@ scan k=a end=z ts=2 tombstones reverse ---- scan: "h" -> /BYTES/h2 @2.000000000,0 scan: "f" -> /BYTES/f1 @1.000000000,0 -scan: "d" -> / @2.000000000,0 scan: "c" -> / @2.000000000,0 scan: "b" -> / @2.000000000,0 scan: "a" -> / @2.000000000,0