Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
139020: sql/schemachanger: Add support for storing roles in policies r=spilchen a=spilchen

A previous commit introduced basic support for CREATE/DROP POLICY. This commit extends that functionality by storing roles in the policy descriptor as strings.

The roles are modelled as separate DSC elements, making it easier to modify the roles in a policy, which will be supported with ALTER POLICY.

Epic: CRDB-11724
Informs: #136696
Release note: None

139108: refactor: replace filepath.Walk with filepath.WalkDir r=RaduBerinde a=Gofastasf

Replace filepath.Walk with filepath.WalkDir.

filepath.WalkDir, introduced in Go 1.16, is more performant as it avoids creating unnecessary intermediate os.FileInfo objects. While filepath.Walk calls os.Lstat for every file or directory to retrieve os.FileInfo, filepath.WalkDir provides a fs.DirEntry, which includes file type information without requiring a stat call.

This change reduces unnecessary system calls and aligns with modern Go practices for directory traversal.

139203: scexec: treat BatchTimestampBeforeGCError as permanent r=rafiss a=rafiss

### sql: fix ordering of arguments for IndexBackfillPlanner

The "now" argument was being passed as the "writeAsOf" parameter.

### jobsprotectedts: fix calculation in TryToProtectBeforeGC

This function was attempting to compute 80% of the time until the GC
threshold is reached. However, it did not take into account that the
read was happening with a historical timestamp. Now this is part of the
calculation.

### scexec: treat BatchTimestampBeforeGCError as permanent

This uses the same check and testing that was added in
a7cce24 for the materialized view
backfill.

fixes #126260

Release note (bug fix): Fixed a bug where the error
"batch timestamp T must be after replica GC threshold" could occur
during a schema change backfill operation, and cause the schema change
job to retry infinitely. Now this error is treated as permanent, and
will cause the job to enter the failed state.


Co-authored-by: Matt Spilchen <[email protected]>
Co-authored-by: Gofastasf <[email protected]>
Co-authored-by: Rafi Shamim <[email protected]>
  • Loading branch information
4 people committed Jan 16, 2025
4 parents 470e512 + 4a487d8 + f47addb + 3afe619 commit 0e13eaa
Show file tree
Hide file tree
Showing 53 changed files with 886 additions and 241 deletions.
3 changes: 2 additions & 1 deletion pkg/acceptance/util_docker.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import (
"context"
"crypto/rand"
"fmt"
"io/fs"
"math/big"
"os"
"path/filepath"
Expand Down Expand Up @@ -170,7 +171,7 @@ func testDocker(
// so the files can be used inside a docker container. The caller function is responsible for cleaning up.
// This function doesn't copy the original file permissions and uses 755 for directories and files.
func copyRunfiles(source, destination string) error {
return filepath.WalkDir(source, func(path string, dirEntry os.DirEntry, walkErr error) error {
return filepath.WalkDir(source, func(path string, dirEntry fs.DirEntry, walkErr error) error {
if walkErr != nil {
return walkErr
}
Expand Down
9 changes: 5 additions & 4 deletions pkg/backup/full_cluster_backup_restore_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import (
"context"
gosql "database/sql"
"fmt"
"io/fs"
"os"
"path/filepath"
"strconv"
Expand Down Expand Up @@ -661,14 +662,14 @@ func TestClusterRestoreFailCleanup(t *testing.T) {

// Bugger the backup by removing the SST files. (Note this messes up all of
// the backups, but there is only one at this point.)
if err := filepath.Walk(tempDir, func(path string, info os.FileInfo, err error) error {
if err := filepath.WalkDir(tempDir, func(path string, d fs.DirEntry, err error) error {
if err != nil {
t.Fatal(err)
}
if info.Name() == backupbase.BackupManifestName ||
if d.Name() == backupbase.BackupManifestName ||
!strings.HasSuffix(path, ".sst") ||
info.Name() == backupinfo.BackupMetadataDescriptorsListPath ||
info.Name() == backupinfo.BackupMetadataFilesListPath {
d.Name() == backupinfo.BackupMetadataDescriptorsListPath ||
d.Name() == backupinfo.BackupMetadataFilesListPath {
return nil
}
return os.Remove(path)
Expand Down
5 changes: 3 additions & 2 deletions pkg/blobs/local_storage.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ package blobs
import (
"context"
"io"
"io/fs"
"os"
"path/filepath"
"strings"
Expand Down Expand Up @@ -186,11 +187,11 @@ func (l *LocalStorage) List(pattern string) ([]string, error) {
}
}

if err := filepath.Walk(walkRoot, func(p string, f os.FileInfo, err error) error {
if err := filepath.WalkDir(walkRoot, func(p string, d fs.DirEntry, err error) error {
if err != nil {
return err
}
if f.IsDir() {
if d.IsDir() {
return nil
}
if listingParent && !strings.HasPrefix(p, fullPath) {
Expand Down
13 changes: 7 additions & 6 deletions pkg/ccl/changefeedccl/sink_cloudstorage_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import (
"context"
"fmt"
"io"
"io/fs"
"math"
"net/url"
"os"
Expand Down Expand Up @@ -111,33 +112,33 @@ func TestCloudStorageSink(t *testing.T) {
return false
}

walkFn := func(path string, info os.FileInfo, err error) error {
walkDirFn := func(path string, d fs.DirEntry, err error) error {
if err != nil {
return err
}
if path == absRoot {
return nil
}
if info.IsDir() && !hasChildDirs(path) {
if d.IsDir() && !hasChildDirs(path) {
relPath, _ := filepath.Rel(absRoot, path)
folders = append(folders, relPath)
}
return nil
}

require.NoError(t, filepath.Walk(absRoot, walkFn))
require.NoError(t, filepath.WalkDir(absRoot, walkDirFn))
return folders
}

// slurpDir returns the contents of every file under root (relative to the
// temp dir created above), sorted by the name of the file.
slurpDir := func(t *testing.T) []string {
var files []string
walkFn := func(path string, info os.FileInfo, err error) error {
walkDirFn := func(path string, d fs.DirEntry, err error) error {
if err != nil {
return err
}
if info.IsDir() {
if d.IsDir() {
return nil
}
file, err := os.ReadFile(path)
Expand All @@ -152,7 +153,7 @@ func TestCloudStorageSink(t *testing.T) {
}
absRoot := filepath.Join(externalIODir, testDir(t))
require.NoError(t, os.MkdirAll(absRoot, 0755))
require.NoError(t, filepath.Walk(absRoot, walkFn))
require.NoError(t, filepath.WalkDir(absRoot, walkDirFn))
return files
}

Expand Down
7 changes: 4 additions & 3 deletions pkg/ccl/changefeedccl/testfeed_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ import (
"encoding/base64"
gojson "encoding/json"
"fmt"
"io/fs"
"math/rand"
"net/url"
"os"
Expand Down Expand Up @@ -1493,13 +1494,13 @@ func (c *cloudFeed) Next() (*cdctest.TestFeedMessage, error) {
return nil, err
}

if err := filepath.Walk(c.dir, c.walkDir); err != nil {
if err := filepath.WalkDir(c.dir, c.walkDir); err != nil {
return nil, err
}
}
}

func (c *cloudFeed) walkDir(path string, info os.FileInfo, err error) error {
func (c *cloudFeed) walkDir(path string, d fs.DirEntry, err error) error {
if strings.HasSuffix(path, `.tmp`) {
// File in the process of being written by ExternalStorage. Ignore.
return nil
Expand All @@ -1520,7 +1521,7 @@ func (c *cloudFeed) walkDir(path string, info os.FileInfo, err error) error {
return err
}

if info.IsDir() {
if d.IsDir() {
// Nothing to do for directories.
return nil
}
Expand Down
6 changes: 3 additions & 3 deletions pkg/cli/debug_merge_logs.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ import (
"container/heap"
"context"
"io"
"io/fs"
"os"
"path/filepath"
"regexp"
Expand Down Expand Up @@ -349,12 +350,11 @@ func findLogFiles(
}
var files []fileInfo
for _, p := range paths {
// NB: come go1.16, we should use WalkDir here as it is more efficient.
if err := filepath.Walk(p, func(p string, info os.FileInfo, err error) error {
if err := filepath.WalkDir(p, func(p string, d fs.DirEntry, err error) error {
if err != nil {
return err
}
if info.IsDir() {
if d.IsDir() {
// Don't act on the directory itself, Walk will visit it for us.
return nil
}
Expand Down
5 changes: 3 additions & 2 deletions pkg/cli/gen_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
package cli

import (
"io/fs"
"os"
"path/filepath"
"strings"
Expand All @@ -28,8 +29,8 @@ func TestGenMan(t *testing.T) {

// Ensure we have a sane number of man pages.
count := 0
err := filepath.Walk(manpath, func(path string, info os.FileInfo, err error) error {
if strings.HasSuffix(path, ".1") && !info.IsDir() {
err := filepath.WalkDir(manpath, func(path string, d fs.DirEntry, err error) error {
if strings.HasSuffix(path, ".1") && !d.IsDir() {
count++
}
return nil
Expand Down
7 changes: 4 additions & 3 deletions pkg/cli/userfiletable_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import (
"context"
"fmt"
"io"
"io/fs"
"net/url"
"os"
"path/filepath"
Expand Down Expand Up @@ -446,12 +447,12 @@ func TestUserFileUploadRecursive(t *testing.T) {
dstDir = tc.destination + "/" + filepath.Base(testDir)
}

err = filepath.Walk(testDir,
func(path string, info os.FileInfo, err error) error {
err = filepath.WalkDir(testDir,
func(path string, d fs.DirEntry, err error) error {
if err != nil {
return err
}
if info.IsDir() {
if d.IsDir() {
return nil
}
relPath := strings.TrimPrefix(path, testDir+"/")
Expand Down
4 changes: 2 additions & 2 deletions pkg/cmd/dev/io/os/os.go
Original file line number Diff line number Diff line change
Expand Up @@ -459,10 +459,10 @@ func (o *OS) ListFilesWithSuffix(root, suffix string) ([]string, error) {

output, err := o.Next(command, func() (output string, err error) {
var ret []string
if err := filepath.Walk(root, func(path string, info fs.FileInfo, err error) error {
if err := filepath.WalkDir(root, func(path string, d fs.DirEntry, err error) error {
// If there's an error walking the tree, throw it away -- there's
// nothing interesting we can do with it.
if err != nil || info.IsDir() {
if err != nil || d.IsDir() {
//nolint:returnerrcheck
return nil
}
Expand Down
4 changes: 2 additions & 2 deletions pkg/cmd/roachtest/cluster.go
Original file line number Diff line number Diff line change
Expand Up @@ -2315,11 +2315,11 @@ func (c *clusterImpl) RefetchCertsFromNode(ctx context.Context, node int) error
return errors.Wrap(err, "cluster.StartE")
}
// Need to prevent world readable files or lib/pq will complain.
return filepath.Walk(c.localCertsDir, func(path string, info fs.FileInfo, err error) error {
return filepath.WalkDir(c.localCertsDir, func(path string, d fs.DirEntry, err error) error {
if err != nil {
return errors.Wrap(err, "walking localCertsDir failed")
}
if info.IsDir() {
if d.IsDir() {
return nil
}
return os.Chmod(path, 0600)
Expand Down
3 changes: 2 additions & 1 deletion pkg/cmd/roachtest/tests/network.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ package tests
import (
"context"
"fmt"
"io/fs"
"os"
"path/filepath"
"strconv"
Expand Down Expand Up @@ -94,7 +95,7 @@ func runNetworkAuthentication(ctx context.Context, t test.Test, c cluster.Cluste
require.NoError(t, err)
require.NoError(t, os.RemoveAll(localCertsDir))
require.NoError(t, c.Get(ctx, t.L(), certsDir, localCertsDir, c.Node(1)))
require.NoError(t, filepath.Walk(localCertsDir, func(path string, info os.FileInfo, err error) error {
require.NoError(t, filepath.WalkDir(localCertsDir, func(path string, d fs.DirEntry, err error) error {
// Don't change permissions for the certs directory.
if path == localCertsDir {
return nil
Expand Down
5 changes: 3 additions & 2 deletions pkg/cmd/roachtest/zip_util.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ package main
import (
"archive/zip"
"io"
"io/fs"
"os"
"path/filepath"
"sort"
Expand All @@ -26,11 +27,11 @@ func moveToZipArchive(archiveName string, rootPath string, relPaths ...string) e
z := zip.NewWriter(f)
for _, relPath := range relPaths {
// Walk the given path.
if err := filepath.Walk(filepath.Join(rootPath, relPath), func(path string, info os.FileInfo, err error) error {
if err := filepath.WalkDir(filepath.Join(rootPath, relPath), func(path string, d fs.DirEntry, err error) error {
if err != nil {
return err
}
if info.IsDir() {
if d.IsDir() {
// Let Walk recurse inside.
return nil
}
Expand Down
5 changes: 3 additions & 2 deletions pkg/cmd/roachtest/zip_util_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ package main

import (
"archive/zip"
"io/fs"
"os"
"path/filepath"
"strings"
Expand All @@ -31,9 +32,9 @@ func TestMoveToZipArchive(t *testing.T) {
expectLs := func(expected ...string) {
t.Helper()
var actual []string
require.NoError(t, filepath.Walk(baseDir, func(path string, info os.FileInfo, err error) error {
require.NoError(t, filepath.WalkDir(baseDir, func(path string, d fs.DirEntry, err error) error {
require.NoError(t, err)
if !info.IsDir() {
if !d.IsDir() {
rel, err := filepath.Rel(baseDir, path)
require.NoError(t, err)
actual = append(actual, rel)
Expand Down
5 changes: 3 additions & 2 deletions pkg/cmd/whoownsit/whoownsit.go
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ package main
import (
"flag"
"fmt"
"io/fs"
"log"
"os"
"path/filepath"
Expand Down Expand Up @@ -41,11 +42,11 @@ func main() {
os.Exit(1)
}
}
if err := filepath.Walk(path, func(path string, info os.FileInfo, err error) error {
if err := filepath.WalkDir(path, func(path string, d fs.DirEntry, err error) error {
if err != nil {
return err
}
if !*dirsOnly || info.IsDir() {
if !*dirsOnly || d.IsDir() {
matches := codeOwners.Match(path)
var aliases []string
for _, match := range matches {
Expand Down
Loading

0 comments on commit 0e13eaa

Please sign in to comment.