Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
54383: sqlmigrations: fail all pre-20.1 non-terminal schema change jobs r=lucy-zhang a=lucy-zhang

This PR adds a sqlmigration to mark all non-terminal, non-migrated
schema change jobs started prior to 20.1 (as indicated by their format
version) as failed.

Closes #51181.

Release note (general change): This change affects schema change jobs
originally initiated on clusters running v19.2 or earlier which have not
reached a terminal state (i.e., `succeeded`, `failed`, or `canceled`),
and which have not finished undergoing an automatic internal migration
to allow them to run in 20.1 clusters. These jobs will now be marked as
`failed` upon upgrading to 20.2. Users who have ongoing schema changes
initiated in 19.2 are advised to wait for them to finish running on 20.1
before upgrading to 20.2. (At the very least, they must wait until at
least the 20.1 internal migration for the job has completed, which is
indicated in the logs).

This may also affect users who have schema change jobs from prior to
20.1 which are stuck in a non-terminal state due to bugs despite making
no progress. In this case, marking the job as failed has no real effect.

54709: sql: allow implicit casting of string to more array types r=otan,arulajmani a=rafiss

Release note (sql change): A string literal like '{X, Y, Z}'
is now automatically casted to an array when appropriate. Support is
added for UUID, Date, Bool, Time, TimeTZ, Timestamp, TimestampTZ, and
Interval arrays. (Int and Decimal were already supported.)

fixes #54672 


54870: roachtest: switch zones used by interleavedpartitioned r=nvanbenschoten a=nvanbenschoten

This commit switches the zones used by the `interleavedpartitioned`
roachtest from `us-west1-b,us-east4-b,us-central1-a` to
`us-east1-b,us-west1-b,europe-west2-b`. This acomplishes two goals:
1. it avoids the use of `us-east4-b`, which has been causing issues
   in nightly tests over the past two weeks.
2. it allows the same cluster to be shared between this roachtest and
   `tpccbench/nodes=9/cpu=4/multi-region`.

Co-authored-by: Lucy Zhang <[email protected]>
Co-authored-by: Rafi Shamim <[email protected]>
Co-authored-by: Nathan VanBenschoten <[email protected]>
  • Loading branch information
4 people committed Sep 29, 2020
4 parents f769bca + 7084beb + 802b8a2 + 48cbc41 commit 0c12d8b
Show file tree
Hide file tree
Showing 7 changed files with 437 additions and 31 deletions.
6 changes: 3 additions & 3 deletions pkg/cmd/roachtest/interleavedpartitioned.go
Original file line number Diff line number Diff line change
Expand Up @@ -120,13 +120,13 @@ func registerInterleaved(r *testRegistry) {
r.Add(testSpec{
Name: "interleavedpartitioned",
Owner: OwnerPartitioning,
Cluster: makeClusterSpec(12, geo(), zones("us-west1-b,us-east4-b,us-central1-a")),
Cluster: makeClusterSpec(12, geo(), zones("us-east1-b,us-west1-b,europe-west2-b")),
Run: func(ctx context.Context, t *test, c *cluster) {
runInterleaved(ctx, t, c,
config{
eastName: `us-east4-b`,
eastName: `europe-west2-b`,
westName: `us-west1-b`,
centralName: `us-central1-a`,
centralName: `us-east1-b`, // us-east is central between us-west and eu-west
initSessions: 1000,
insertPercent: 80,
retrievePercent: 10,
Expand Down
26 changes: 26 additions & 0 deletions pkg/sql/logictest/testdata/logic_test/array
Original file line number Diff line number Diff line change
Expand Up @@ -1750,3 +1750,29 @@ SELECT CASE
END
----
(2,{-23791})

# Test that string literals can be implicitly casted to array types
statement ok
DROP TABLE t;
CREATE TABLE t (
a INT[],
b DECIMAL[],
c UUID[],
d STRING[],
e TIMESTAMPTZ[],
f DATE[],
g INTERVAL[],
h INET[],
i VARBIT[],
j FLOAT[]);
INSERT INTO t VALUES (
'{1, 2}',
'{1.1, 2.2}',
'{18e7b17e-4ead-4e27-bfd5-bb6d11261bb6, 18e7b17e-4ead-4e27-bfd5-bb6d11261bb7}',
'{cat, dog}',
'{2010-09-28 12:00:00.1, 2010-09-29 12:00:00.1}',
'{2010-09-28, 2010-09-29}',
'{PT12H2M, -23:00:00}',
'{192.168.100.128, ::ffff:10.4.3.2}',
'{0101, 11}',
'{12.34, 45.67}');
13 changes: 12 additions & 1 deletion pkg/sql/sem/tree/constant.go
Original file line number Diff line number Diff line change
Expand Up @@ -469,20 +469,31 @@ var (
types.Date,
types.StringArray,
types.IntArray,
types.FloatArray,
types.DecimalArray,
types.BoolArray,
types.Box2D,
types.Geography,
types.Geometry,
types.DecimalArray,
types.Time,
types.TimeTZ,
types.Timestamp,
types.TimestampTZ,
types.Interval,
types.Uuid,
types.DateArray,
types.TimeArray,
types.TimeTZArray,
types.TimestampArray,
types.TimestampTZArray,
types.IntervalArray,
types.UUIDArray,
types.INet,
types.Jsonb,
types.VarBit,
types.AnyEnum,
types.INetArray,
types.VarBitArray,
}
// StrValAvailBytes is the set of types convertible to byte array.
StrValAvailBytes = []*types.T{types.Bytes, types.Uuid, types.String, types.AnyEnum}
Expand Down
223 changes: 196 additions & 27 deletions pkg/sql/sem/tree/constant_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -230,6 +230,27 @@ func TestStringConstantVerifyAvailableTypes(t *testing.T) {
}
}

func mustParseDInt(t *testing.T, s string) tree.Datum {
d, err := tree.ParseDInt(s)
if err != nil {
t.Fatal(err)
}
return d
}
func mustParseDFloat(t *testing.T, s string) tree.Datum {
d, err := tree.ParseDFloat(s)
if err != nil {
t.Fatal(err)
}
return d
}
func mustParseDDecimal(t *testing.T, s string) tree.Datum {
d, err := tree.ParseDDecimal(s)
if err != nil {
t.Fatal(err)
}
return d
}
func mustParseDBool(t *testing.T, s string) tree.Datum {
d, err := tree.ParseDBool(s)
if err != nil {
Expand Down Expand Up @@ -286,45 +307,93 @@ func mustParseDJSON(t *testing.T, s string) tree.Datum {
}
return d
}
func mustParseDStringArray(t *testing.T, s string) tree.Datum {
evalContext := tree.MakeTestingEvalContext(cluster.MakeTestingClusterSettings())
d, _, err := tree.ParseDArrayFromString(&evalContext, s, types.String)
func mustParseDUuid(t *testing.T, s string) tree.Datum {
d, err := tree.ParseDUuidFromString(s)
if err != nil {
t.Fatal(err)
}
return d
}
func mustParseDBox2D(t *testing.T, s string) tree.Datum {
d, err := tree.ParseDBox2D(s)
if err != nil {
t.Fatal(err)
}
return d
}
func mustParseDGeography(t *testing.T, s string) tree.Datum {
d, err := tree.ParseDGeography(s)
if err != nil {
t.Fatal(err)
}
return d
}
func mustParseDGeometry(t *testing.T, s string) tree.Datum {
d, err := tree.ParseDGeometry(s)
if err != nil {
t.Fatal(err)
}
return d
}
func mustParseDIntArray(t *testing.T, s string) tree.Datum {
evalContext := tree.MakeTestingEvalContext(cluster.MakeTestingClusterSettings())
d, _, err := tree.ParseDArrayFromString(&evalContext, s, types.Int)
func mustParseDINet(t *testing.T, s string) tree.Datum {
d, err := tree.ParseDIPAddrFromINetString(s)
if err != nil {
t.Fatal(err)
}
return d
}
func mustParseDDecimalArray(t *testing.T, s string) tree.Datum {
evalContext := tree.MakeTestingEvalContext(cluster.MakeTestingClusterSettings())
d, _, err := tree.ParseDArrayFromString(&evalContext, s, types.Decimal)
func mustParseDVarBit(t *testing.T, s string) tree.Datum {
d, err := tree.ParseDBitArray(s)
if err != nil {
t.Fatal(err)
}
return d
}
func mustParseDArrayOfType(typ *types.T) func(t *testing.T, s string) tree.Datum {
return func(t *testing.T, s string) tree.Datum {
evalContext := tree.MakeTestingEvalContext(cluster.MakeTestingClusterSettings())
d, _, err := tree.ParseDArrayFromString(&evalContext, s, typ)
if err != nil {
t.Fatal(err)
}
return d
}
}

var parseFuncs = map[*types.T]func(*testing.T, string) tree.Datum{
types.String: func(t *testing.T, s string) tree.Datum { return tree.NewDString(s) },
types.Bytes: func(t *testing.T, s string) tree.Datum { return tree.NewDBytes(tree.DBytes(s)) },
types.Bool: mustParseDBool,
types.Date: mustParseDDate,
types.Time: mustParseDTime,
types.TimeTZ: mustParseDTimeTZ,
types.Timestamp: mustParseDTimestamp,
types.TimestampTZ: mustParseDTimestampTZ,
types.Interval: mustParseDInterval,
types.Jsonb: mustParseDJSON,
types.DecimalArray: mustParseDDecimalArray,
types.IntArray: mustParseDIntArray,
types.StringArray: mustParseDStringArray,
types.String: func(t *testing.T, s string) tree.Datum { return tree.NewDString(s) },
types.Bytes: func(t *testing.T, s string) tree.Datum { return tree.NewDBytes(tree.DBytes(s)) },
types.Int: mustParseDInt,
types.Float: mustParseDFloat,
types.Decimal: mustParseDDecimal,
types.Bool: mustParseDBool,
types.Date: mustParseDDate,
types.Time: mustParseDTime,
types.TimeTZ: mustParseDTimeTZ,
types.Timestamp: mustParseDTimestamp,
types.TimestampTZ: mustParseDTimestampTZ,
types.Interval: mustParseDInterval,
types.Jsonb: mustParseDJSON,
types.Uuid: mustParseDUuid,
types.Box2D: mustParseDBox2D,
types.Geography: mustParseDGeography,
types.Geometry: mustParseDGeometry,
types.INet: mustParseDINet,
types.VarBit: mustParseDVarBit,
types.DecimalArray: mustParseDArrayOfType(types.Decimal),
types.FloatArray: mustParseDArrayOfType(types.Float),
types.IntArray: mustParseDArrayOfType(types.Int),
types.StringArray: mustParseDArrayOfType(types.String),
types.BoolArray: mustParseDArrayOfType(types.Bool),
types.UUIDArray: mustParseDArrayOfType(types.Uuid),
types.DateArray: mustParseDArrayOfType(types.Date),
types.TimeArray: mustParseDArrayOfType(types.Time),
types.TimeTZArray: mustParseDArrayOfType(types.TimeTZ),
types.TimestampArray: mustParseDArrayOfType(types.Timestamp),
types.TimestampTZArray: mustParseDArrayOfType(types.TimestampTZ),
types.IntervalArray: mustParseDArrayOfType(types.Interval),
types.INetArray: mustParseDArrayOfType(types.INet),
types.VarBitArray: mustParseDArrayOfType(types.VarBit),
}

func typeSet(tys ...*types.T) map[*types.T]struct{} {
Expand Down Expand Up @@ -391,17 +460,54 @@ func TestStringConstantResolveAvailableTypes(t *testing.T) {
c: tree.NewBytesStrVal("PT12H2M"),
parseOptions: typeSet(types.String, types.Bytes),
},
{
c: tree.NewStrVal("box(0 0, 1 1)"),
parseOptions: typeSet(types.String, types.Bytes, types.Box2D),
},
{
c: tree.NewStrVal("POINT(-100.59 42.94)"),
parseOptions: typeSet(types.String, types.Bytes, types.Geography, types.Geometry),
},
{
c: tree.NewStrVal("192.168.100.128/25"),
parseOptions: typeSet(types.String, types.Bytes, types.INet),
},
{
c: tree.NewStrVal("111000110101"),
parseOptions: typeSet(
types.String,
types.Bytes,
types.VarBit,
types.Int,
types.Float,
types.Decimal,
types.Interval,
types.Jsonb),
},
{
c: tree.NewStrVal(`{"a": 1}`),
parseOptions: typeSet(types.String, types.Bytes, types.Jsonb),
},
{
c: tree.NewStrVal(`{1,2}`),
parseOptions: typeSet(types.String, types.Bytes, types.StringArray, types.IntArray, types.DecimalArray),
c: tree.NewStrVal(`{1,2}`),
parseOptions: typeSet(
types.String,
types.Bytes,
types.StringArray,
types.IntArray,
types.FloatArray,
types.DecimalArray,
types.IntervalArray),
},
{
c: tree.NewStrVal(`{1.5,2.0}`),
parseOptions: typeSet(types.String, types.Bytes, types.StringArray, types.DecimalArray),
c: tree.NewStrVal(`{1.5,2.0}`),
parseOptions: typeSet(
types.String,
types.Bytes,
types.StringArray,
types.FloatArray,
types.DecimalArray,
types.IntervalArray),
},
{
c: tree.NewStrVal(`{a,b}`),
Expand All @@ -411,6 +517,66 @@ func TestStringConstantResolveAvailableTypes(t *testing.T) {
c: tree.NewBytesStrVal(string([]byte{0xff, 0xfe, 0xfd})),
parseOptions: typeSet(types.String, types.Bytes),
},
{
c: tree.NewStrVal(`18e7b17e-4ead-4e27-bfd5-bb6d11261bb6`),
parseOptions: typeSet(types.String, types.Bytes, types.Uuid),
},
{
c: tree.NewStrVal(`{18e7b17e-4ead-4e27-bfd5-bb6d11261bb6, 18e7b17e-4ead-4e27-bfd5-bb6d11261bb7}`),
parseOptions: typeSet(types.String, types.Bytes, types.StringArray, types.UUIDArray),
},
{
c: tree.NewStrVal("{true, false}"),
parseOptions: typeSet(types.String, types.Bytes, types.StringArray, types.BoolArray),
},
{
c: tree.NewStrVal("{2010-09-28, 2010-09-29}"),
parseOptions: typeSet(types.String, types.Bytes, types.StringArray, types.DateArray, types.TimestampArray, types.TimestampTZArray),
},
{
c: tree.NewStrVal("{2010-09-28 12:00:00.1, 2010-09-29 12:00:00.1}"),
parseOptions: typeSet(
types.String,
types.Bytes,
types.StringArray,
types.TimeArray,
types.TimeTZArray,
types.TimestampArray,
types.TimestampTZArray,
types.DateArray),
},
{
c: tree.NewStrVal("{2006-07-08T00:00:00.000000123Z, 2006-07-10T00:00:00.000000123Z}"),
parseOptions: typeSet(
types.String,
types.Bytes,
types.StringArray,
types.TimeArray,
types.TimeTZArray,
types.TimestampArray,
types.TimestampTZArray,
types.DateArray),
},
{
c: tree.NewStrVal("{PT12H2M, -23:00:00}"),
parseOptions: typeSet(types.String, types.Bytes, types.StringArray, types.IntervalArray),
},
{
c: tree.NewStrVal("{192.168.100.128, ::ffff:10.4.3.2}"),
parseOptions: typeSet(types.String, types.Bytes, types.StringArray, types.INetArray),
},
{
c: tree.NewStrVal("{0101, 11}"),
parseOptions: typeSet(
types.String,
types.Bytes,
types.StringArray,
types.IntArray,
types.FloatArray,
types.DecimalArray,
types.IntervalArray,
types.VarBitArray),
},
}

evalCtx := tree.NewTestingEvalContext(cluster.MakeTestingClusterSettings())
Expand All @@ -435,7 +601,10 @@ func TestStringConstantResolveAvailableTypes(t *testing.T) {
res, err = typedExpr.Eval(evalCtx)
}
if err != nil {
if !strings.Contains(err.Error(), "could not parse") && !strings.Contains(err.Error(), "parsing") {
if !strings.Contains(err.Error(), "could not parse") &&
!strings.Contains(err.Error(), "parsing") &&
!strings.Contains(err.Error(), "out of range") &&
!strings.Contains(err.Error(), "exceeds supported") {
// Parsing errors are permitted for this test, but the number of correctly
// parseable types will be verified. Any other error should throw a failure.
t.Errorf("%d: expected resolving %v as available type %s would either succeed"+
Expand Down
Loading

0 comments on commit 0c12d8b

Please sign in to comment.