Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

sql/*: fix improperly wrapped errors #72350

Merged
merged 1 commit into from
Nov 3, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions pkg/geo/geos/geos.go
Original file line number Diff line number Diff line change
Expand Up @@ -194,10 +194,10 @@ func initGEOS(dirs []string) (*C.CR_GEOS, string, error) {
}
err = errors.CombineErrors(
err,
errors.Newf(
"geos: cannot load GEOS from dir %q: %s",
dir,
errors.Wrapf(
newErr,
"geos: cannot load GEOS from dir %q",
dir,
),
)
}
Expand Down
10 changes: 5 additions & 5 deletions pkg/sql/colflow/colrpc/inbox.go
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@ package colrpc

import (
"context"
"fmt"
"io"
"math"
"sync/atomic"
Expand All @@ -33,6 +32,7 @@ import (
"github.com/cockroachdb/cockroach/pkg/util/log"
"github.com/cockroachdb/cockroach/pkg/util/log/logcrash"
"github.com/cockroachdb/cockroach/pkg/util/timeutil"
"github.com/cockroachdb/errors"
"github.com/cockroachdb/logtags"
)

Expand Down Expand Up @@ -208,7 +208,7 @@ func (i *Inbox) RunWithStream(
case readerCtx = <-i.contextCh:
log.VEvent(streamCtx, 2, "Inbox reader arrived")
case <-streamCtx.Done():
return fmt.Errorf("%s: streamCtx while waiting for reader (remote client canceled)", streamCtx.Err())
return errors.Wrap(streamCtx.Err(), "streamCtx error while waiting for reader (remote client canceled)")
case <-flowCtxDone:
// The flow context of the inbox host has been canceled. This can occur
// e.g. when the query is canceled, or when another stream encountered
Expand All @@ -233,7 +233,7 @@ func (i *Inbox) RunWithStream(
return nil
case <-streamCtx.Done():
// The client canceled the stream.
return fmt.Errorf("%s: streamCtx in Inbox stream handler (remote client canceled)", streamCtx.Err())
return errors.Wrap(streamCtx.Err(), "streamCtx error in Inbox stream handler (remote client canceled)")
}
}

Expand All @@ -258,7 +258,7 @@ func (i *Inbox) Init(ctx context.Context) {
select {
case i.stream = <-i.streamCh:
case err := <-i.timeoutCh:
i.errCh <- fmt.Errorf("%s: remote stream arrived too late", err)
i.errCh <- errors.Wrap(err, "remote stream arrived too late")
return err
case <-i.Ctx.Done():
// Our reader canceled the context meaning that it no longer needs
Expand Down Expand Up @@ -325,7 +325,7 @@ func (i *Inbox) Next() coldata.Batch {
// Regardless of the cause we want to propagate such an error as
// expected one in all cases so that the caller could decide on how
// to handle it.
err = pgerror.Newf(pgcode.InternalConnectionFailure, "inbox communication error: %s", err)
err = pgerror.Wrap(err, pgcode.InternalConnectionFailure, "inbox communication error")
i.errCh <- err
colexecerror.ExpectedError(err)
}
Expand Down
2 changes: 1 addition & 1 deletion pkg/sql/exec_util.go
Original file line number Diff line number Diff line change
Expand Up @@ -1798,7 +1798,7 @@ type registrySession interface {
func (r *SessionRegistry) CancelQuery(queryIDStr string) (bool, error) {
queryID, err := StringToClusterWideID(queryIDStr)
if err != nil {
return false, fmt.Errorf("query ID %s malformed: %s", queryID, err)
return false, errors.Wrapf(err, "query ID %s malformed", queryID)
}

r.Lock()
Expand Down
2 changes: 1 addition & 1 deletion pkg/sql/flowinfra/inbound.go
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ func processInboundStreamHelper(
if err != nil {
if err != io.EOF {
// Communication error.
err = pgerror.Newf(pgcode.InternalConnectionFailure, "inbox communication error: %s", err)
err = pgerror.Wrap(err, pgcode.InternalConnectionFailure, "inbox communication error")
sendErrToConsumer(err)
errChan <- err
return
Expand Down
2 changes: 1 addition & 1 deletion pkg/sql/logictest/testdata/logic_test/drop_index
Original file line number Diff line number Diff line change
Expand Up @@ -327,7 +327,7 @@ DROP INDEX t_secondary CASCADE;
ALTER TABLE t DROP COLUMN b;
INSERT INTO t SELECT a + 1 FROM t;

statement error pgcode 23505 duplicate key value: decoding err=column-id "2" does not exist
statement error pgcode 23505 duplicate key value got decoding error: column-id "2" does not exist
UPSERT INTO t SELECT a + 1 FROM t;

statement ok
Expand Down
2 changes: 1 addition & 1 deletion pkg/sql/opt/optgen/cmd/langgen/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@ func generate(compiled *lang.CompiledExpr, out string, genFunc genFunc) error {
if err != nil {
// Write out incorrect source for easier debugging.
b = buf.Bytes()
err = fmt.Errorf("code formatting failed with Go parse error\n%s:%s", out, err)
err = errors.Wrapf(err, "code formatting failed with Go parse error\n%s", out)
}
} else {
b = buf.Bytes()
Expand Down
2 changes: 1 addition & 1 deletion pkg/sql/opt/optgen/cmd/optgen/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -218,7 +218,7 @@ func (g *optgen) generate(compiled *lang.CompiledExpr, genFunc genFunc) error {
// Write out incorrect source for easier debugging.
b = buf.Bytes()
out := g.cmdLine.Lookup("out").Value.String()
err = fmt.Errorf("code formatting failed with Go parse error\n%s:%s", out, err)
err = errors.Wrapf(err, "code formatting failed with Go parse error\n%s", out)
}
} else {
b = buf.Bytes()
Expand Down
2 changes: 1 addition & 1 deletion pkg/sql/opt/optgen/lang/compiler.go
Original file line number Diff line number Diff line change
Expand Up @@ -200,7 +200,7 @@ func (c *Compiler) compileRules(rules RuleSetExpr) bool {

func (c *Compiler) addErr(src *SourceLoc, err error) {
if src != nil {
err = fmt.Errorf("%s: %s", src, err.Error())
err = errors.Wrapf(err, "%s", src)
}
c.errors = append(c.errors, err)
}
Expand Down
6 changes: 3 additions & 3 deletions pkg/sql/opt/props/func_dep_rand_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -432,7 +432,7 @@ func (tc *testConfig) checkAPIs(fd *FuncDepSet, tr testRelation) error {
to: closure,
strict: true,
}); err != nil {
return fmt.Errorf("ComputeClosure%s incorrectly returns %s: %s", cols, closure, err)
return errors.Wrapf(err, "ComputeClosure%s incorrectly returns %s", cols, closure)
}

reduced := fd.ReduceCols(cols)
Expand All @@ -441,15 +441,15 @@ func (tc *testConfig) checkAPIs(fd *FuncDepSet, tr testRelation) error {
to: cols,
strict: true,
}); err != nil {
return fmt.Errorf("ReduceCols%s incorrectly returns %s: %s", cols, reduced, err)
return errors.Wrapf(err, "ReduceCols%s incorrectly returns %s", cols, reduced)
}

var proj FuncDepSet
proj.CopyFrom(fd)
proj.ProjectCols(cols)
// The FDs after projection should still hold on the table.
if err := tr.checkFDs(&proj); err != nil {
return fmt.Errorf("ProjectCols%s incorrectly returns %s: %s", cols, proj.String(), err)
return errors.Wrapf(err, "ProjectCols%s incorrectly returns %s", cols, proj.String())
}
}

Expand Down
11 changes: 6 additions & 5 deletions pkg/sql/pg_metadata_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -123,6 +123,7 @@ import (
"github.com/cockroachdb/cockroach/pkg/testutils/sqlutils"
"github.com/cockroachdb/cockroach/pkg/util/leaktest"
"github.com/cockroachdb/cockroach/pkg/util/log"
"github.com/cockroachdb/errors"
"github.com/cockroachdb/errors/oserror"
"github.com/lib/pq/oid"
)
Expand Down Expand Up @@ -678,7 +679,7 @@ type outputFile struct {
// appendString calls WriteString and panics on error.
func (o outputFile) appendString(s string) {
if _, err := o.f.WriteString(s); err != nil {
panic(fmt.Errorf("error while writing string: %s: %v", s, err))
panic(errors.Wrapf(err, "error while writing string: %s", s))
}
}

Expand All @@ -703,21 +704,21 @@ func rewriteFile(fileName string, f func(*os.File, outputFile)) {

updateFile(tmpName, fileName, func(input *os.File, output outputFile) {
if _, err := io.Copy(output.f, input); err != nil {
panic(fmt.Errorf("problem at rewriting file %s into %s: %v", tmpName, fileName, err))
panic(errors.Wrapf(err, "problem at rewriting file %s into %s", tmpName, fileName))
}
})
}

func updateFile(inputFileName, outputFileName string, f func(input *os.File, output outputFile)) {
input, err := os.Open(inputFileName)
if err != nil {
panic(fmt.Errorf("error opening file %s: %v", inputFileName, err))
panic(errors.Wrapf(err, "error opening file %s", inputFileName))
}
defer dClose(input)

output, err := os.OpenFile(outputFileName, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0644)
if err != nil {
panic(fmt.Errorf("error opening file %s: %v", outputFileName, err))
panic(errors.Wrapf(err, "error opening file %s", outputFileName))
}
defer dClose(output)

Expand Down Expand Up @@ -895,7 +896,7 @@ func (scf schemaCodeFixer) getTableDefinitionsText(unimplementedTables PGMetadat
maxLength := 0
f, err := os.Open(fileName)
if err != nil {
panic(fmt.Errorf("could not open file %s: %v", fileName, err))
panic(errors.Wrapf(err, "could not open file %s", fileName))
}
defer dClose(f)
reader := bufio.NewScanner(f)
Expand Down
24 changes: 16 additions & 8 deletions pkg/sql/pgwire/server.go
Original file line number Diff line number Diff line change
Expand Up @@ -739,17 +739,21 @@ func parseClientProvidedSessionParameters(
// Read a key-value pair from the client.
key, err := buf.GetString()
if err != nil {
return sql.SessionArgs{}, pgerror.Newf(pgcode.ProtocolViolation,
"error reading option key: %s", err)
return sql.SessionArgs{}, pgerror.Wrap(
err, pgcode.ProtocolViolation,
"error reading option key",
)
}
if len(key) == 0 {
// End of parameter list.
break
}
value, err := buf.GetString()
if err != nil {
return sql.SessionArgs{}, pgerror.Newf(pgcode.ProtocolViolation,
"error reading option value: %s", err)
return sql.SessionArgs{}, pgerror.Wrapf(
err, pgcode.ProtocolViolation,
"error reading option value for key %q", key,
)
}

// Case-fold for the key for easier comparison.
Expand Down Expand Up @@ -788,13 +792,17 @@ func parseClientProvidedSessionParameters(

hostS, portS, err := net.SplitHostPort(value)
if err != nil {
return sql.SessionArgs{}, pgerror.Newf(pgcode.ProtocolViolation,
"invalid address format: %v", err)
return sql.SessionArgs{}, pgerror.Wrap(
err, pgcode.ProtocolViolation,
"invalid address format",
)
}
port, err := strconv.Atoi(portS)
if err != nil {
return sql.SessionArgs{}, pgerror.Newf(pgcode.ProtocolViolation,
"remote port is not numeric: %v", err)
return sql.SessionArgs{}, pgerror.Wrap(
err, pgcode.ProtocolViolation,
"remote port is not numeric",
)
}
ip := net.ParseIP(hostS)
if ip == nil {
Expand Down
12 changes: 6 additions & 6 deletions pkg/sql/region_util.go
Original file line number Diff line number Diff line change
Expand Up @@ -663,17 +663,17 @@ func prepareZoneConfigForMultiRegionTable(
return nil, nil
}
if err := newZoneConfig.Validate(); err != nil {
return nil, pgerror.Newf(
pgcode.CheckViolation,
"could not validate zone config: %v",
return nil, pgerror.Wrap(
err,
pgcode.CheckViolation,
"could not validate zone config",
)
}
if err := newZoneConfig.ValidateTandemFields(); err != nil {
return nil, pgerror.Newf(
pgcode.CheckViolation,
"could not validate zone config: %v",
return nil, pgerror.Wrap(
err,
pgcode.CheckViolation,
"could not validate zone config",
)
}
return prepareZoneConfigWrites(
Expand Down
8 changes: 4 additions & 4 deletions pkg/sql/row/errors.go
Original file line number Diff line number Diff line change
Expand Up @@ -115,8 +115,8 @@ func NewUniquenessConstraintViolationError(
) error {
index, names, values, err := DecodeRowInfo(ctx, tableDesc, key, value, false)
if err != nil {
return pgerror.Newf(pgcode.UniqueViolation,
"duplicate key value: decoding err=%s", err)
return pgerror.Wrap(err, pgcode.UniqueViolation,
"duplicate key value got decoding error")
}

// Exclude implicit partitioning columns and hash sharded index columns from
Expand Down Expand Up @@ -156,8 +156,8 @@ func NewLockNotAvailableError(

index, colNames, values, err := DecodeRowInfo(ctx, tableDesc, key, nil, false)
if err != nil {
return pgerror.Newf(pgcode.LockNotAvailable,
"%s: decoding err=%s", baseMsg, err)
return pgerror.Wrapf(err, pgcode.LockNotAvailable,
"%s: got decoding error", baseMsg)
}

return pgerror.Newf(pgcode.LockNotAvailable,
Expand Down
4 changes: 2 additions & 2 deletions pkg/sql/sem/builtins/builtins.go
Original file line number Diff line number Diff line change
Expand Up @@ -4895,7 +4895,7 @@ value if you rely on the HLC for accuracy.`,
},
})
if err := ctx.Txn.Run(ctx.Context, b); err != nil {
return nil, pgerror.Newf(pgcode.InvalidParameterValue, "message: %s", err)
return nil, pgerror.Wrap(err, pgcode.InvalidParameterValue, "error fetching leaseholder")
}
resp := b.RawResponse().Responses[0].GetInner().(*roachpb.LeaseInfoResponse)

Expand Down Expand Up @@ -4990,7 +4990,7 @@ value if you rely on the HLC for accuracy.`,
},
})
if err := ctx.Txn.Run(ctx.Context, b); err != nil {
return nil, pgerror.Newf(pgcode.InvalidParameterValue, "message: %s", err)
return nil, pgerror.Wrap(err, pgcode.InvalidParameterValue, "error fetching range stats")
}
resp := b.RawResponse().Responses[0].GetInner().(*roachpb.RangeStatsResponse).MVCCStats
jsonStr, err := gojson.Marshal(&resp)
Expand Down
12 changes: 6 additions & 6 deletions pkg/sql/sem/tree/eval.go
Original file line number Diff line number Diff line change
Expand Up @@ -2983,8 +2983,8 @@ func MatchLikeEscape(

like, err := optimizedLikeFunc(pattern, caseInsensitive, escapeRune)
if err != nil {
return DBoolFalse, pgerror.Newf(
pgcode.InvalidRegularExpression, "LIKE regexp compilation failed: %v", err)
return DBoolFalse, pgerror.Wrap(
err, pgcode.InvalidRegularExpression, "LIKE regexp compilation failed")
}

if like == nil {
Expand All @@ -3008,8 +3008,8 @@ func ConvertLikeToRegexp(
key := likeKey{s: pattern, caseInsensitive: caseInsensitive, escape: escape}
re, err := ctx.ReCache.GetRegexp(key)
if err != nil {
return nil, pgerror.Newf(
pgcode.InvalidRegularExpression, "LIKE regexp compilation failed: %v", err)
return nil, pgerror.Wrap(
err, pgcode.InvalidRegularExpression, "LIKE regexp compilation failed")
}
return re, nil
}
Expand All @@ -3033,8 +3033,8 @@ func matchLike(ctx *EvalContext, left, right Datum, caseInsensitive bool) (Datum

like, err := optimizedLikeFunc(pattern, caseInsensitive, '\\')
if err != nil {
return DBoolFalse, pgerror.Newf(
pgcode.InvalidRegularExpression, "LIKE regexp compilation failed: %v", err)
return DBoolFalse, pgerror.Wrap(
err, pgcode.InvalidRegularExpression, "LIKE regexp compilation failed")
}

if like == nil {
Expand Down
8 changes: 4 additions & 4 deletions pkg/sql/sem/tree/interval.go
Original file line number Diff line number Diff line change
Expand Up @@ -67,8 +67,8 @@ func (l *intervalLexer) consumeNum() (int64, bool, float64) {
// Try to convert.
value, err := strconv.ParseFloat(l.str[start:l.offset], 64)
if err != nil {
l.err = pgerror.Newf(
pgcode.InvalidDatetimeFormat, "interval: %v", err)
l.err = pgerror.Wrap(
err, pgcode.InvalidDatetimeFormat, "interval")
return 0, false, 0
}
decPart = value
Expand Down Expand Up @@ -108,8 +108,8 @@ func (l *intervalLexer) consumeInt() int64 {

x, err := strconv.ParseInt(l.str[start:l.offset], 10, 64)
if err != nil {
l.err = pgerror.Newf(
pgcode.InvalidDatetimeFormat, "interval: %v", err)
l.err = pgerror.Wrap(
err, pgcode.InvalidDatetimeFormat, "interval")
return 0
}
if start == l.offset {
Expand Down
Loading