diff --git a/docs/generated/sql/aggregates.md b/docs/generated/sql/aggregates.md
index 3488d2641593..d21ca6911d1a 100644
--- a/docs/generated/sql/aggregates.md
+++ b/docs/generated/sql/aggregates.md
@@ -39,6 +39,8 @@
nth_value(val: varbit, n: int) → varbit | Returns val evaluated at the row that is the n th row of the window frame (counting from 1); null if no such row.
diff --git a/pkg/ccl/changefeedccl/avro.go b/pkg/ccl/changefeedccl/avro.go
index e96bc5baf39e..c7e969611efd 100644
--- a/pkg/ccl/changefeedccl/avro.go
+++ b/pkg/ccl/changefeedccl/avro.go
@@ -328,6 +328,16 @@ func typeToAvroSchema(typ *types.T) (*avroSchemaField, error) {
return tree.NewDFloat(tree.DFloat(x.(float64))), nil
},
)
+ case types.PGLSNFamily:
+ setNullable(
+ avroSchemaString,
+ func(d tree.Datum, _ interface{}) (interface{}, error) {
+ return d.(*tree.DPGLSN).LSN.String(), nil
+ },
+ func(x interface{}) (tree.Datum, error) {
+ return tree.ParseDPGLSN(x.(string))
+ },
+ )
case types.Box2DFamily:
setNullable(
avroSchemaString,
diff --git a/pkg/ccl/changefeedccl/avro_test.go b/pkg/ccl/changefeedccl/avro_test.go
index 4ae6d9ea1df8..a883d3bc4fa9 100644
--- a/pkg/ccl/changefeedccl/avro_test.go
+++ b/pkg/ccl/changefeedccl/avro_test.go
@@ -463,6 +463,7 @@ func TestAvroSchema(t *testing.T) {
`INT8`: `["null","long"]`,
`INTERVAL`: `["null","string"]`,
`JSONB`: `["null","string"]`,
+ `PG_LSN`: `["null","string"]`,
`STRING`: `["null","string"]`,
`STRING COLLATE fr`: `["null","string"]`,
`TIME`: `["null",{"type":"long","logicalType":"time-micros"}]`,
@@ -485,15 +486,18 @@ func TestAvroSchema(t *testing.T) {
}
colType := typ.SQLString()
- tableDesc, err := parseTableDesc(`CREATE TABLE foo (pk INT PRIMARY KEY, a ` + colType + `)`)
- require.NoError(t, err)
- field, err := columnToAvroSchema(
- cdcevent.ResultColumn{ResultColumn: colinfo.ResultColumn{Typ: tableDesc.PublicColumns()[1].GetType()}},
- )
- require.NoError(t, err)
- schema, err := json.Marshal(field.SchemaType)
- require.NoError(t, err)
- require.Equal(t, goldens[colType], string(schema), `SQL type %s`, colType)
+
+ t.Run(typ.String(), func(t *testing.T) {
+ tableDesc, err := parseTableDesc(`CREATE TABLE foo (pk INT PRIMARY KEY, a ` + colType + `)`)
+ require.NoError(t, err)
+ field, err := columnToAvroSchema(
+ cdcevent.ResultColumn{ResultColumn: colinfo.ResultColumn{Typ: tableDesc.PublicColumns()[1].GetType()}},
+ )
+ require.NoError(t, err)
+ schema, err := json.Marshal(field.SchemaType)
+ require.NoError(t, err)
+ require.Equal(t, goldens[colType], string(schema), `SQL type %s`, colType)
+ })
// Delete from goldens for the following assertion that we don't have any
// unexpectedly unused goldens.
@@ -652,6 +656,12 @@ func TestAvroSchema(t *testing.T) {
sql: `''`,
avro: `{"bytes":""}`,
numRawBytes: 0},
+
+ {
+ sqlType: `PG_LSN`,
+ sql: `'A/0'`,
+ avro: `{"string":"A\/0"}`,
+ },
}
for _, test := range goldens {
diff --git a/pkg/ccl/changefeedccl/encoder_test.go b/pkg/ccl/changefeedccl/encoder_test.go
index e92ffbbafacc..d19ed2fc18fb 100644
--- a/pkg/ccl/changefeedccl/encoder_test.go
+++ b/pkg/ccl/changefeedccl/encoder_test.go
@@ -1151,7 +1151,7 @@ func TestJsonRountrip(t *testing.T) {
switch typ {
case types.Jsonb:
// Unsupported by sql/catalog/colinfo
- case types.TSQuery, types.TSVector:
+ case types.TSQuery, types.TSVector, types.PGLSN:
// Unsupported by pkg/sql/parser
default:
if arrayTyp.InternalType.ArrayContents == typ {
diff --git a/pkg/ccl/logictestccl/tests/3node-tenant/generated_test.go b/pkg/ccl/logictestccl/tests/3node-tenant/generated_test.go
index c84b4abed8a7..816d5b0df5f9 100644
--- a/pkg/ccl/logictestccl/tests/3node-tenant/generated_test.go
+++ b/pkg/ccl/logictestccl/tests/3node-tenant/generated_test.go
@@ -1333,6 +1333,13 @@ func TestTenantLogic_pg_extension(
runLogicTest(t, "pg_extension")
}
+func TestTenantLogic_pg_lsn(
+ t *testing.T,
+) {
+ defer leaktest.AfterTest(t)()
+ runLogicTest(t, "pg_lsn")
+}
+
func TestTenantLogic_pgcrypto_builtins(
t *testing.T,
) {
diff --git a/pkg/sql/catalog/colinfo/col_type_info.go b/pkg/sql/catalog/colinfo/col_type_info.go
index ec400e70b3fd..8c3629f06d95 100644
--- a/pkg/sql/catalog/colinfo/col_type_info.go
+++ b/pkg/sql/catalog/colinfo/col_type_info.go
@@ -105,7 +105,7 @@ func ValidateColumnDefType(ctx context.Context, version clusterversion.Handle, t
return ValidateColumnDefType(ctx, version, t.ArrayContents())
case types.BitFamily, types.IntFamily, types.FloatFamily, types.BoolFamily, types.BytesFamily, types.DateFamily,
- types.INetFamily, types.IntervalFamily, types.JsonFamily, types.OidFamily, types.TimeFamily,
+ types.INetFamily, types.IntervalFamily, types.JsonFamily, types.OidFamily, types.PGLSNFamily, types.TimeFamily,
types.TimestampFamily, types.TimestampTZFamily, types.UuidFamily, types.TimeTZFamily,
types.GeographyFamily, types.GeometryFamily, types.EnumFamily, types.Box2DFamily:
// These types are OK.
diff --git a/pkg/sql/catalog/colinfo/column_type_properties.go b/pkg/sql/catalog/colinfo/column_type_properties.go
index 96ed30d5a633..81a90f8d5410 100644
--- a/pkg/sql/catalog/colinfo/column_type_properties.go
+++ b/pkg/sql/catalog/colinfo/column_type_properties.go
@@ -82,6 +82,7 @@ func CanHaveCompositeKeyEncoding(typ *types.T) bool {
types.GeographyFamily,
types.EnumFamily,
types.Box2DFamily,
+ types.PGLSNFamily,
types.VoidFamily,
types.EncodedKeyFamily,
types.TSQueryFamily,
diff --git a/pkg/sql/exec_util.go b/pkg/sql/exec_util.go
index 1db363cff978..b826db84329e 100644
--- a/pkg/sql/exec_util.go
+++ b/pkg/sql/exec_util.go
@@ -1975,6 +1975,7 @@ func checkResultType(typ *types.T) error {
case types.UuidFamily:
case types.INetFamily:
case types.OidFamily:
+ case types.PGLSNFamily:
case types.TupleFamily:
case types.EnumFamily:
case types.VoidFamily:
diff --git a/pkg/sql/importer/exportparquet.go b/pkg/sql/importer/exportparquet.go
index 21efff7e6e2b..054047d2c1d5 100644
--- a/pkg/sql/importer/exportparquet.go
+++ b/pkg/sql/importer/exportparquet.go
@@ -321,6 +321,14 @@ func NewParquetColumn(typ *types.T, name string, nullable bool) (ParquetColumn,
}
return tree.NewDEnum(e), nil
}
+ case types.PGLSNFamily:
+ populateLogicalStringCol(schemaEl)
+ col.encodeFn = func(d tree.Datum) (interface{}, error) {
+ return []byte(d.(*tree.DPGLSN).LSN.String()), nil
+ }
+ col.DecodeFn = func(x interface{}) (tree.Datum, error) {
+ return tree.ParseDPGLSN(string(x.([]byte)))
+ }
case types.Box2DFamily:
populateLogicalStringCol(schemaEl)
col.encodeFn = func(d tree.Datum) (interface{}, error) {
diff --git a/pkg/sql/logictest/testdata/logic_test/grant_table b/pkg/sql/logictest/testdata/logic_test/grant_table
index 00141e855575..96cc362c79a6 100644
--- a/pkg/sql/logictest/testdata/logic_test/grant_table
+++ b/pkg/sql/logictest/testdata/logic_test/grant_table
@@ -387,6 +387,12 @@ test pg_catalog pg_language publi
test pg_catalog pg_largeobject public SELECT false
test pg_catalog pg_largeobject_metadata public SELECT false
test pg_catalog pg_locks public SELECT false
+test pg_catalog pg_lsn admin ALL false
+test pg_catalog pg_lsn public USAGE false
+test pg_catalog pg_lsn root ALL false
+test pg_catalog pg_lsn[] admin ALL false
+test pg_catalog pg_lsn[] public USAGE false
+test pg_catalog pg_lsn[] root ALL false
test pg_catalog pg_matviews public SELECT false
test pg_catalog pg_namespace public SELECT false
test pg_catalog pg_opclass public SELECT false
@@ -692,6 +698,10 @@ test pg_catalog oidvector admin ALL false
test pg_catalog oidvector root ALL false
test pg_catalog oidvector[] admin ALL false
test pg_catalog oidvector[] root ALL false
+test pg_catalog pg_lsn admin ALL false
+test pg_catalog pg_lsn root ALL false
+test pg_catalog pg_lsn[] admin ALL false
+test pg_catalog pg_lsn[] root ALL false
test pg_catalog record admin ALL false
test pg_catalog record root ALL false
test pg_catalog record[] admin ALL false
@@ -1256,6 +1266,10 @@ a pg_catalog oidvector admin ALL
a pg_catalog oidvector root ALL false
a pg_catalog oidvector[] admin ALL false
a pg_catalog oidvector[] root ALL false
+a pg_catalog pg_lsn admin ALL false
+a pg_catalog pg_lsn root ALL false
+a pg_catalog pg_lsn[] admin ALL false
+a pg_catalog pg_lsn[] root ALL false
a pg_catalog record admin ALL false
a pg_catalog record root ALL false
a pg_catalog record[] admin ALL false
@@ -1424,6 +1438,10 @@ defaultdb pg_catalog oidvector admin ALL
defaultdb pg_catalog oidvector root ALL false
defaultdb pg_catalog oidvector[] admin ALL false
defaultdb pg_catalog oidvector[] root ALL false
+defaultdb pg_catalog pg_lsn admin ALL false
+defaultdb pg_catalog pg_lsn root ALL false
+defaultdb pg_catalog pg_lsn[] admin ALL false
+defaultdb pg_catalog pg_lsn[] root ALL false
defaultdb pg_catalog record admin ALL false
defaultdb pg_catalog record root ALL false
defaultdb pg_catalog record[] admin ALL false
@@ -1592,6 +1610,10 @@ postgres pg_catalog oidvector admin ALL
postgres pg_catalog oidvector root ALL false
postgres pg_catalog oidvector[] admin ALL false
postgres pg_catalog oidvector[] root ALL false
+postgres pg_catalog pg_lsn admin ALL false
+postgres pg_catalog pg_lsn root ALL false
+postgres pg_catalog pg_lsn[] admin ALL false
+postgres pg_catalog pg_lsn[] root ALL false
postgres pg_catalog record admin ALL false
postgres pg_catalog record root ALL false
postgres pg_catalog record[] admin ALL false
@@ -1760,6 +1782,10 @@ system pg_catalog oidvector admin ALL
system pg_catalog oidvector root ALL false
system pg_catalog oidvector[] admin ALL false
system pg_catalog oidvector[] root ALL false
+system pg_catalog pg_lsn admin ALL false
+system pg_catalog pg_lsn root ALL false
+system pg_catalog pg_lsn[] admin ALL false
+system pg_catalog pg_lsn[] root ALL false
system pg_catalog record admin ALL false
system pg_catalog record root ALL false
system pg_catalog record[] admin ALL false
@@ -2268,6 +2294,10 @@ test pg_catalog oidvector admin ALL
test pg_catalog oidvector root ALL false
test pg_catalog oidvector[] admin ALL false
test pg_catalog oidvector[] root ALL false
+test pg_catalog pg_lsn admin ALL false
+test pg_catalog pg_lsn root ALL false
+test pg_catalog pg_lsn[] admin ALL false
+test pg_catalog pg_lsn[] root ALL false
test pg_catalog record admin ALL false
test pg_catalog record root ALL false
test pg_catalog record[] admin ALL false
diff --git a/pkg/sql/logictest/testdata/logic_test/pg_catalog b/pkg/sql/logictest/testdata/logic_test/pg_catalog
index 9cc66a8902b5..9c2ed932e337 100644
--- a/pkg/sql/logictest/testdata/logic_test/pg_catalog
+++ b/pkg/sql/logictest/testdata/logic_test/pg_catalog
@@ -1824,6 +1824,8 @@ oid typname typnamespace typowner typlen typbyval typty
2287 _record 4294967112 NULL -1 false b
2950 uuid 4294967112 NULL 16 true b
2951 _uuid 4294967112 NULL -1 false b
+3220 pg_lsn 4294967112 NULL 8 true b
+3221 _pg_lsn 4294967112 NULL -1 false b
3614 tsvector 4294967112 NULL -1 false b
3615 tsquery 4294967112 NULL -1 false b
3643 _tsvector 4294967112 NULL -1 false b
@@ -1933,6 +1935,8 @@ oid typname typcategory typispreferred typisdefined typdel
2287 _record A false true , 0 2249 0
2950 uuid U false true , 0 0 2951
2951 _uuid A false true , 0 2950 0
+3220 pg_lsn U false true , 0 0 3221
+3221 _pg_lsn A false true , 0 3220 0
3614 tsvector U false true , 0 0 3643
3615 tsquery U false true , 0 0 3645
3643 _tsvector A false true , 0 3614 0
@@ -2042,6 +2046,8 @@ oid typname typinput typoutput typreceive
2287 _record array_in array_out array_recv array_send 0 0 0
2950 uuid uuid_in uuid_out uuid_recv uuid_send 0 0 0
2951 _uuid array_in array_out array_recv array_send 0 0 0
+3220 pg_lsn pg_lsnin pg_lsnout pg_lsnrecv pg_lsnsend 0 0 0
+3221 _pg_lsn array_in array_out array_recv array_send 0 0 0
3614 tsvector tsvectorin tsvectorout tsvectorrecv tsvectorsend 0 0 0
3615 tsquery tsqueryin tsqueryout tsqueryrecv tsquerysend 0 0 0
3643 _tsvector array_in array_out array_recv array_send 0 0 0
@@ -2151,6 +2157,8 @@ oid typname typalign typstorage typnotnull typbasetype ty
2287 _record NULL NULL false 0 -1
2950 uuid NULL NULL false 0 -1
2951 _uuid NULL NULL false 0 -1
+3220 pg_lsn NULL NULL false 0 -1
+3221 _pg_lsn NULL NULL false 0 -1
3614 tsvector NULL NULL false 0 -1
3615 tsquery NULL NULL false 0 -1
3643 _tsvector NULL NULL false 0 -1
@@ -2260,6 +2268,8 @@ oid typname typndims typcollation typdefaultbin typdefault
2287 _record 0 0 NULL NULL NULL
2950 uuid 0 0 NULL NULL NULL
2951 _uuid 0 0 NULL NULL NULL
+3220 pg_lsn 0 0 NULL NULL NULL
+3221 _pg_lsn 0 0 NULL NULL NULL
3614 tsvector 0 0 NULL NULL NULL
3615 tsquery 0 0 NULL NULL NULL
3643 _tsvector 0 0 NULL NULL NULL
@@ -3413,6 +3423,7 @@ oid oprname aggsortop
3802002898 > 3802002898
3457382662 > 3457382662
3421685890 > 3421685890
+3626766962 > 3626766962
1064453514 > 1064453514
1778355034 > 1778355034
3944320082 > 3944320082
@@ -3448,6 +3459,7 @@ oid oprname aggsortop
3842027408 < 3842027408
2897050084 < 2897050084
4132205728 < 4132205728
+1634400784 < 1634400784
2300570720 < 2300570720
3675947880 < 3675947880
3575809104 < 3575809104
diff --git a/pkg/sql/logictest/testdata/logic_test/pg_lsn b/pkg/sql/logictest/testdata/logic_test/pg_lsn
new file mode 100644
index 000000000000..a7075cd5e2c0
--- /dev/null
+++ b/pkg/sql/logictest/testdata/logic_test/pg_lsn
@@ -0,0 +1,34 @@
+query T
+SELECT 'A01F0/1AAA'::pg_lsn
+----
+A01F0/1AAA
+
+statement error could not parse pg_lsn
+SELECT 'A/G'::pg_lsn
+
+statement error could not parse pg_lsn
+SELECT '0G'::pg_lsn
+
+statement ok
+CREATE TABLE pg_lsn_table(id pg_lsn PRIMARY KEY, val pg_lsn)
+
+statement ok
+INSERT INTO pg_lsn_table VALUES ('10/10', 'A01/A100'), ('100/100', 'A01/A1000'), ('FFFFF100/100', 'A001/A100')
+
+query TT
+SELECT * FROM pg_lsn_table ORDER BY id
+----
+10/10 A01/A100
+100/100 A01/A1000
+FFFFF100/100 A001/A100
+
+query TT
+SELECT * FROM pg_lsn_table WHERE id = '10/10' ORDER BY id
+----
+10/10 A01/A100
+
+
+query TT
+SELECT * FROM pg_lsn_table WHERE val = 'A01/A1000' ORDER BY id
+----
+100/100 A01/A1000
diff --git a/pkg/sql/logictest/tests/fakedist-disk/generated_test.go b/pkg/sql/logictest/tests/fakedist-disk/generated_test.go
index cfa85f74dfb3..4efc374c8465 100644
--- a/pkg/sql/logictest/tests/fakedist-disk/generated_test.go
+++ b/pkg/sql/logictest/tests/fakedist-disk/generated_test.go
@@ -1311,6 +1311,13 @@ func TestLogic_pg_extension(
runLogicTest(t, "pg_extension")
}
+func TestLogic_pg_lsn(
+ t *testing.T,
+) {
+ defer leaktest.AfterTest(t)()
+ runLogicTest(t, "pg_lsn")
+}
+
func TestLogic_pgcrypto_builtins(
t *testing.T,
) {
diff --git a/pkg/sql/logictest/tests/fakedist-vec-off/generated_test.go b/pkg/sql/logictest/tests/fakedist-vec-off/generated_test.go
index f488a6d9c37e..5c71acf2f337 100644
--- a/pkg/sql/logictest/tests/fakedist-vec-off/generated_test.go
+++ b/pkg/sql/logictest/tests/fakedist-vec-off/generated_test.go
@@ -1311,6 +1311,13 @@ func TestLogic_pg_extension(
runLogicTest(t, "pg_extension")
}
+func TestLogic_pg_lsn(
+ t *testing.T,
+) {
+ defer leaktest.AfterTest(t)()
+ runLogicTest(t, "pg_lsn")
+}
+
func TestLogic_pgcrypto_builtins(
t *testing.T,
) {
diff --git a/pkg/sql/logictest/tests/fakedist/generated_test.go b/pkg/sql/logictest/tests/fakedist/generated_test.go
index 8975cef885f8..a80d691fff98 100644
--- a/pkg/sql/logictest/tests/fakedist/generated_test.go
+++ b/pkg/sql/logictest/tests/fakedist/generated_test.go
@@ -1325,6 +1325,13 @@ func TestLogic_pg_extension(
runLogicTest(t, "pg_extension")
}
+func TestLogic_pg_lsn(
+ t *testing.T,
+) {
+ defer leaktest.AfterTest(t)()
+ runLogicTest(t, "pg_lsn")
+}
+
func TestLogic_pgcrypto_builtins(
t *testing.T,
) {
diff --git a/pkg/sql/logictest/tests/local-legacy-schema-changer/generated_test.go b/pkg/sql/logictest/tests/local-legacy-schema-changer/generated_test.go
index 54a9fb9df046..32dadfab5f99 100644
--- a/pkg/sql/logictest/tests/local-legacy-schema-changer/generated_test.go
+++ b/pkg/sql/logictest/tests/local-legacy-schema-changer/generated_test.go
@@ -1290,6 +1290,13 @@ func TestLogic_pg_extension(
runLogicTest(t, "pg_extension")
}
+func TestLogic_pg_lsn(
+ t *testing.T,
+) {
+ defer leaktest.AfterTest(t)()
+ runLogicTest(t, "pg_lsn")
+}
+
func TestLogic_pgcrypto_builtins(
t *testing.T,
) {
diff --git a/pkg/sql/logictest/tests/local-mixed-22.2-23.1/generated_test.go b/pkg/sql/logictest/tests/local-mixed-22.2-23.1/generated_test.go
index ab8177fa56fc..68c67f8d9e7b 100644
--- a/pkg/sql/logictest/tests/local-mixed-22.2-23.1/generated_test.go
+++ b/pkg/sql/logictest/tests/local-mixed-22.2-23.1/generated_test.go
@@ -1290,6 +1290,13 @@ func TestLogic_pg_extension(
runLogicTest(t, "pg_extension")
}
+func TestLogic_pg_lsn(
+ t *testing.T,
+) {
+ defer leaktest.AfterTest(t)()
+ runLogicTest(t, "pg_lsn")
+}
+
func TestLogic_pgcrypto_builtins(
t *testing.T,
) {
diff --git a/pkg/sql/logictest/tests/local-vec-off/generated_test.go b/pkg/sql/logictest/tests/local-vec-off/generated_test.go
index 521ac1440511..3ecd2dfa1c2f 100644
--- a/pkg/sql/logictest/tests/local-vec-off/generated_test.go
+++ b/pkg/sql/logictest/tests/local-vec-off/generated_test.go
@@ -1318,6 +1318,13 @@ func TestLogic_pg_extension(
runLogicTest(t, "pg_extension")
}
+func TestLogic_pg_lsn(
+ t *testing.T,
+) {
+ defer leaktest.AfterTest(t)()
+ runLogicTest(t, "pg_lsn")
+}
+
func TestLogic_pgcrypto_builtins(
t *testing.T,
) {
diff --git a/pkg/sql/logictest/tests/local/generated_test.go b/pkg/sql/logictest/tests/local/generated_test.go
index f0e86dde678a..7cfb9aca8fc3 100644
--- a/pkg/sql/logictest/tests/local/generated_test.go
+++ b/pkg/sql/logictest/tests/local/generated_test.go
@@ -1444,6 +1444,13 @@ func TestLogic_pg_extension(
runLogicTest(t, "pg_extension")
}
+func TestLogic_pg_lsn(
+ t *testing.T,
+) {
+ defer leaktest.AfterTest(t)()
+ runLogicTest(t, "pg_lsn")
+}
+
func TestLogic_pgcrypto_builtins(
t *testing.T,
) {
diff --git a/pkg/sql/opt/optbuilder/testdata/aggregate b/pkg/sql/opt/optbuilder/testdata/aggregate
index bf27a55a7789..b1e30cff2e94 100644
--- a/pkg/sql/opt/optbuilder/testdata/aggregate
+++ b/pkg/sql/opt/optbuilder/testdata/aggregate
@@ -223,6 +223,7 @@ array_agg(timestamptz) -> timestamptz[]
array_agg(oid) -> oid[]
array_agg(uuid) -> uuid[]
array_agg(inet) -> inet[]
+array_agg(pg_lsn) -> pg_lsn[]
array_agg(time) -> time[]
array_agg(timetz) -> timetz[]
array_agg(jsonb) -> jsonb[]
diff --git a/pkg/sql/parser/parse_test.go b/pkg/sql/parser/parse_test.go
index 7fca102eb15f..39b91194853f 100644
--- a/pkg/sql/parser/parse_test.go
+++ b/pkg/sql/parser/parse_test.go
@@ -558,7 +558,6 @@ func TestUnimplementedSyntax(t *testing.T) {
{`CREATE TABLE a(b MACADDR8)`, 45813, `macaddr8`, ``},
{`CREATE TABLE a(b MONEY)`, 41578, `money`, ``},
{`CREATE TABLE a(b PATH)`, 21286, `path`, ``},
- {`CREATE TABLE a(b PG_LSN)`, 0, `pg_lsn`, ``},
{`CREATE TABLE a(b POINT)`, 21286, `point`, ``},
{`CREATE TABLE a(b POLYGON)`, 21286, `polygon`, ``},
{`CREATE TABLE a(b TXID_SNAPSHOT)`, 0, `txid_snapshot`, ``},
diff --git a/pkg/sql/pg_catalog.go b/pkg/sql/pg_catalog.go
index 55b03477bc7f..695278cb3d80 100644
--- a/pkg/sql/pg_catalog.go
+++ b/pkg/sql/pg_catalog.go
@@ -4377,6 +4377,7 @@ var datumToTypeCategory = map[types.Family]*tree.DString{
types.ArrayFamily: typCategoryArray,
types.TupleFamily: typCategoryPseudo,
types.OidFamily: typCategoryNumeric,
+ types.PGLSNFamily: typCategoryUserDefined,
types.UuidFamily: typCategoryUserDefined,
types.INetFamily: typCategoryNetworkAddr,
types.UnknownFamily: typCategoryUnknown,
diff --git a/pkg/sql/pgrepl/lsn/lsn.go b/pkg/sql/pgrepl/lsn/lsn.go
index 460b6007d83b..fe10169ef03f 100644
--- a/pkg/sql/pgrepl/lsn/lsn.go
+++ b/pkg/sql/pgrepl/lsn/lsn.go
@@ -26,3 +26,21 @@ func ParseLSN(str string) (LSN, error) {
}
return (LSN(hi) << 32) | LSN(lo), nil
}
+
+func (lsn LSN) Compare(other LSN) int {
+ if lsn > other {
+ return 1
+ }
+ if lsn < other {
+ return -1
+ }
+ return 0
+}
+
+func (lsn LSN) Add(val LSN) LSN {
+ return lsn + val
+}
+
+func (lsn LSN) Sub(val LSN) LSN {
+ return lsn - val
+}
diff --git a/pkg/sql/pgwire/pgwirebase/BUILD.bazel b/pkg/sql/pgwire/pgwirebase/BUILD.bazel
index c26721d3662d..75d0ca7ae59f 100644
--- a/pkg/sql/pgwire/pgwirebase/BUILD.bazel
+++ b/pkg/sql/pgwire/pgwirebase/BUILD.bazel
@@ -24,6 +24,7 @@ go_library(
"//pkg/sql/catalog/colinfo",
"//pkg/sql/lex",
"//pkg/sql/oidext",
+ "//pkg/sql/pgrepl/lsn",
"//pkg/sql/pgwire/pgcode",
"//pkg/sql/pgwire/pgerror",
"//pkg/sql/sem/eval",
diff --git a/pkg/sql/pgwire/pgwirebase/encoding.go b/pkg/sql/pgwire/pgwirebase/encoding.go
index d8d56d19834c..dd251dd72457 100644
--- a/pkg/sql/pgwire/pgwirebase/encoding.go
+++ b/pkg/sql/pgwire/pgwirebase/encoding.go
@@ -27,6 +27,7 @@ import (
"github.com/cockroachdb/cockroach/pkg/settings"
"github.com/cockroachdb/cockroach/pkg/sql/lex"
"github.com/cockroachdb/cockroach/pkg/sql/oidext"
+ "github.com/cockroachdb/cockroach/pkg/sql/pgrepl/lsn"
"github.com/cockroachdb/cockroach/pkg/sql/pgwire/pgcode"
"github.com/cockroachdb/cockroach/pkg/sql/pgwire/pgerror"
"github.com/cockroachdb/cockroach/pkg/sql/sem/eval"
@@ -353,6 +354,8 @@ func DecodeDatum(
return nil, tree.MakeParseError(bs, typ, err)
}
return tree.NewDInt(tree.DInt(i)), nil
+ case oid.T_pg_lsn:
+ return tree.ParseDPGLSN(bs)
case oid.T_oid,
oid.T_regoper,
oid.T_regproc,
@@ -511,6 +514,12 @@ func DecodeDatum(
}
i := int64(binary.BigEndian.Uint64(b))
return tree.NewDInt(tree.DInt(i)), nil
+ case oid.T_pg_lsn:
+ if len(b) < 8 {
+ return nil, pgerror.Newf(pgcode.Syntax, "lsn requires 8 bytes for binary format")
+ }
+ i := int64(binary.BigEndian.Uint64(b))
+ return tree.NewDPGLSN(lsn.LSN(i)), nil
case oid.T_float4:
if len(b) < 4 {
return nil, pgerror.Newf(pgcode.Syntax, "float4 requires 4 bytes for binary format")
diff --git a/pkg/sql/pgwire/testdata/encodings.json b/pkg/sql/pgwire/testdata/encodings.json
index a785529907f6..cf0993a26363 100644
--- a/pkg/sql/pgwire/testdata/encodings.json
+++ b/pkg/sql/pgwire/testdata/encodings.json
@@ -1868,6 +1868,13 @@
"TextAsBinary": [91, 34, 92, 117, 48, 48, 48, 49, 34, 44, 32, 34, 65, 34, 44, 32, 34, 226, 154, 163, 34, 44, 32, 34, 240, 159, 164, 183, 34, 93],
"Binary": [1, 91, 34, 92, 117, 48, 48, 48, 49, 34, 44, 32, 34, 65, 34, 44, 32, 34, 226, 154, 163, 34, 44, 32, 34, 240, 159, 164, 183, 34, 93]
},
+ {
+ "SQL": "'010011F/1ABCDEF0'::pg_lsn",
+ "Oid": 3220,
+ "Text": "10011F/1ABCDEF0",
+ "TextAsBinary": [49, 48, 48, 49, 49, 70, 47, 49, 65, 66, 67, 68, 69, 70, 48],
+ "Binary": [0, 16, 1, 31, 26, 188, 222, 240]
+ },
{
"SQL": "'00:00:00'::time",
"Oid": 1083,
diff --git a/pkg/sql/pgwire/types.go b/pkg/sql/pgwire/types.go
index 464c4baf60a6..4fe6de430072 100644
--- a/pkg/sql/pgwire/types.go
+++ b/pkg/sql/pgwire/types.go
@@ -213,6 +213,11 @@ func writeTextDatumNotNull(
case *tree.DVoid:
b.putInt32(0)
+ case *tree.DPGLSN:
+ s := v.LSN.String()
+ b.putInt32(int32(len(s)))
+ b.write([]byte(s))
+
case *tree.DBox2D:
s := v.Repr()
b.putInt32(int32(len(s)))
@@ -723,6 +728,10 @@ func writeBinaryDatumNotNull(
case *tree.DVoid:
b.putInt32(0)
+ case *tree.DPGLSN:
+ b.putInt32(8)
+ b.putInt64(int64(v.LSN))
+
case *tree.DBox2D:
b.putInt32(32)
b.putInt64(int64(math.Float64bits(v.LoX)))
diff --git a/pkg/sql/randgen/BUILD.bazel b/pkg/sql/randgen/BUILD.bazel
index 4afee3784e34..bf6acfe60d19 100644
--- a/pkg/sql/randgen/BUILD.bazel
+++ b/pkg/sql/randgen/BUILD.bazel
@@ -26,6 +26,7 @@ go_library(
"//pkg/sql/catalog/colinfo",
"//pkg/sql/catalog/descpb",
"//pkg/sql/parser",
+ "//pkg/sql/pgrepl/lsn",
"//pkg/sql/rowenc",
"//pkg/sql/rowenc/keyside",
"//pkg/sql/rowenc/valueside",
diff --git a/pkg/sql/randgen/datum.go b/pkg/sql/randgen/datum.go
index 6af1160a6f56..325d1d0ed213 100644
--- a/pkg/sql/randgen/datum.go
+++ b/pkg/sql/randgen/datum.go
@@ -22,6 +22,7 @@ import (
"github.com/cockroachdb/cockroach/pkg/geo"
"github.com/cockroachdb/cockroach/pkg/geo/geogen"
"github.com/cockroachdb/cockroach/pkg/geo/geopb"
+ "github.com/cockroachdb/cockroach/pkg/sql/pgrepl/lsn"
"github.com/cockroachdb/cockroach/pkg/sql/sem/tree"
"github.com/cockroachdb/cockroach/pkg/sql/types"
"github.com/cockroachdb/cockroach/pkg/util/bitarray"
@@ -125,6 +126,8 @@ func RandDatumWithNullChance(
case types.Box2DFamily:
b := geo.NewCartesianBoundingBox().AddPoint(rng.NormFloat64(), rng.NormFloat64()).AddPoint(rng.NormFloat64(), rng.NormFloat64())
return tree.NewDBox2D(*b)
+ case types.PGLSNFamily:
+ return tree.NewDPGLSN(lsn.LSN(rng.Uint64()))
case types.GeographyFamily:
gm, err := typ.GeoMetadata()
if err != nil {
@@ -710,6 +713,12 @@ var (
tree.DMinIPAddr,
tree.DMaxIPAddr,
},
+ types.PGLSNFamily: {
+ tree.NewDPGLSN(0),
+ tree.NewDPGLSN(math.MaxInt64),
+ tree.NewDPGLSN(math.MaxInt64 + 1),
+ tree.NewDPGLSN(math.MaxUint64),
+ },
types.JsonFamily: func() []tree.Datum {
var res []tree.Datum
for _, s := range []string{
@@ -876,6 +885,9 @@ var (
}
return res
}(),
+ types.PGLSNFamily: {
+ tree.NewDPGLSN(0x1000),
+ },
types.IntervalFamily: func() []tree.Datum {
var res []tree.Datum
for _, nanos := range []int64{
diff --git a/pkg/sql/rowenc/keyside/BUILD.bazel b/pkg/sql/rowenc/keyside/BUILD.bazel
index 7a489f49436e..c1a4fb680c59 100644
--- a/pkg/sql/rowenc/keyside/BUILD.bazel
+++ b/pkg/sql/rowenc/keyside/BUILD.bazel
@@ -14,6 +14,7 @@ go_library(
deps = [
"//pkg/geo",
"//pkg/geo/geopb",
+ "//pkg/sql/pgrepl/lsn",
"//pkg/sql/sem/tree",
"//pkg/sql/types",
"//pkg/util/bitarray",
diff --git a/pkg/sql/rowenc/keyside/decode.go b/pkg/sql/rowenc/keyside/decode.go
index fbcfa28d3901..0e89f260b2e7 100644
--- a/pkg/sql/rowenc/keyside/decode.go
+++ b/pkg/sql/rowenc/keyside/decode.go
@@ -16,6 +16,7 @@ import (
"github.com/cockroachdb/apd/v3"
"github.com/cockroachdb/cockroach/pkg/geo"
"github.com/cockroachdb/cockroach/pkg/geo/geopb"
+ "github.com/cockroachdb/cockroach/pkg/sql/pgrepl/lsn"
"github.com/cockroachdb/cockroach/pkg/sql/sem/tree"
"github.com/cockroachdb/cockroach/pkg/sql/types"
"github.com/cockroachdb/cockroach/pkg/util/bitarray"
@@ -73,6 +74,14 @@ func Decode(
rkey, i, err = encoding.DecodeVarintDescending(key)
}
return a.NewDInt(tree.DInt(i)), rkey, err
+ case types.PGLSNFamily:
+ var i uint64
+ if dir == encoding.Ascending {
+ rkey, i, err = encoding.DecodeUvarintAscending(key)
+ } else {
+ rkey, i, err = encoding.DecodeUvarintDescending(key)
+ }
+ return a.NewDPGLSN(tree.DPGLSN{LSN: lsn.LSN(i)}), rkey, err
case types.FloatFamily:
var f float64
if dir == encoding.Ascending {
diff --git a/pkg/sql/rowenc/keyside/encode.go b/pkg/sql/rowenc/keyside/encode.go
index 19eaee195c94..632f257c9d31 100644
--- a/pkg/sql/rowenc/keyside/encode.go
+++ b/pkg/sql/rowenc/keyside/encode.go
@@ -76,6 +76,11 @@ func Encode(b []byte, val tree.Datum, dir encoding.Direction) ([]byte, error) {
return encoding.EncodeStringDescending(b, string(*t)), nil
case *tree.DVoid:
return encoding.EncodeVoidAscendingOrDescending(b), nil
+ case *tree.DPGLSN:
+ if dir == encoding.Ascending {
+ return encoding.EncodeUvarintAscending(b, uint64(t.LSN)), nil
+ }
+ return encoding.EncodeUvarintDescending(b, uint64(t.LSN)), nil
case *tree.DBox2D:
if dir == encoding.Ascending {
return encoding.EncodeBox2DAscending(b, t.CartesianBoundingBox.BoundingBox)
diff --git a/pkg/sql/rowenc/valueside/BUILD.bazel b/pkg/sql/rowenc/valueside/BUILD.bazel
index c7e756caa392..0d90c58ea369 100644
--- a/pkg/sql/rowenc/valueside/BUILD.bazel
+++ b/pkg/sql/rowenc/valueside/BUILD.bazel
@@ -18,6 +18,7 @@ go_library(
"//pkg/sql/catalog",
"//pkg/sql/catalog/descpb",
"//pkg/sql/lex",
+ "//pkg/sql/pgrepl/lsn",
"//pkg/sql/sem/tree",
"//pkg/sql/types",
"//pkg/util/encoding",
diff --git a/pkg/sql/rowenc/valueside/array.go b/pkg/sql/rowenc/valueside/array.go
index 3033eb647bed..76e6ff230161 100644
--- a/pkg/sql/rowenc/valueside/array.go
+++ b/pkg/sql/rowenc/valueside/array.go
@@ -227,6 +227,8 @@ func DatumTypeToArrayElementEncodingType(t *types.T) (encoding.Type, error) {
return encoding.True, nil
case types.BitFamily:
return encoding.BitArray, nil
+ case types.PGLSNFamily:
+ return encoding.Int, nil
case types.UuidFamily:
return encoding.UUID, nil
case types.INetFamily:
@@ -279,6 +281,8 @@ func encodeArrayElement(b []byte, d tree.Datum) ([]byte, error) {
return encoding.EncodeUntaggedDecimalValue(b, &t.Decimal), nil
case *tree.DDate:
return encoding.EncodeUntaggedIntValue(b, t.UnixEpochDaysWithOrig()), nil
+ case *tree.DPGLSN:
+ return encoding.EncodeUntaggedIntValue(b, int64(t.LSN)), nil
case *tree.DBox2D:
return encoding.EncodeUntaggedBox2DValue(b, t.CartesianBoundingBox.BoundingBox)
case *tree.DGeography:
diff --git a/pkg/sql/rowenc/valueside/decode.go b/pkg/sql/rowenc/valueside/decode.go
index 1ec07581c4b3..224e1042a831 100644
--- a/pkg/sql/rowenc/valueside/decode.go
+++ b/pkg/sql/rowenc/valueside/decode.go
@@ -14,6 +14,7 @@ import (
"github.com/cockroachdb/cockroach/pkg/geo"
"github.com/cockroachdb/cockroach/pkg/sql/catalog"
"github.com/cockroachdb/cockroach/pkg/sql/catalog/descpb"
+ "github.com/cockroachdb/cockroach/pkg/sql/pgrepl/lsn"
"github.com/cockroachdb/cockroach/pkg/sql/sem/tree"
"github.com/cockroachdb/cockroach/pkg/sql/types"
"github.com/cockroachdb/cockroach/pkg/util/encoding"
@@ -115,6 +116,12 @@ func DecodeUntaggedDatum(
return nil, b, err
}
return a.NewDDate(tree.MakeDDate(pgdate.MakeCompatibleDateFromDisk(data))), b, nil
+ case types.PGLSNFamily:
+ b, data, err := encoding.DecodeUntaggedIntValue(buf)
+ if err != nil {
+ return nil, b, err
+ }
+ return a.NewDPGLSN(tree.DPGLSN{LSN: lsn.LSN(data)}), b, nil
case types.Box2DFamily:
b, data, err := encoding.DecodeUntaggedBox2DValue(buf)
if err != nil {
diff --git a/pkg/sql/rowenc/valueside/encode.go b/pkg/sql/rowenc/valueside/encode.go
index 506cd0911efc..2631bc9a6269 100644
--- a/pkg/sql/rowenc/valueside/encode.go
+++ b/pkg/sql/rowenc/valueside/encode.go
@@ -55,6 +55,8 @@ func Encode(appendTo []byte, colID ColumnIDDelta, val tree.Datum, scratch []byte
return encoding.EncodeBytesValue(appendTo, uint32(colID), t.UnsafeBytes()), nil
case *tree.DDate:
return encoding.EncodeIntValue(appendTo, uint32(colID), t.UnixEpochDaysWithOrig()), nil
+ case *tree.DPGLSN:
+ return encoding.EncodeIntValue(appendTo, uint32(colID), int64(t.LSN)), nil
case *tree.DBox2D:
return encoding.EncodeBox2DValue(appendTo, uint32(colID), t.CartesianBoundingBox.BoundingBox)
case *tree.DGeography:
diff --git a/pkg/sql/rowenc/valueside/legacy.go b/pkg/sql/rowenc/valueside/legacy.go
index 74d6628cc472..935988d83f7a 100644
--- a/pkg/sql/rowenc/valueside/legacy.go
+++ b/pkg/sql/rowenc/valueside/legacy.go
@@ -14,6 +14,7 @@ import (
"github.com/cockroachdb/cockroach/pkg/geo"
"github.com/cockroachdb/cockroach/pkg/roachpb"
"github.com/cockroachdb/cockroach/pkg/sql/lex"
+ "github.com/cockroachdb/cockroach/pkg/sql/pgrepl/lsn"
"github.com/cockroachdb/cockroach/pkg/sql/sem/tree"
"github.com/cockroachdb/cockroach/pkg/sql/types"
"github.com/cockroachdb/cockroach/pkg/util/ipaddr"
@@ -87,6 +88,11 @@ func MarshalLegacy(colType *types.T, val tree.Datum) (roachpb.Value, error) {
r.SetBox2D(v.CartesianBoundingBox.BoundingBox)
return r, nil
}
+ case types.PGLSNFamily:
+ if v, ok := val.(*tree.DPGLSN); ok {
+ r.SetInt(int64(v.LSN))
+ return r, nil
+ }
case types.GeographyFamily:
if v, ok := val.(*tree.DGeography); ok {
err := r.SetGeo(v.SpatialObject())
@@ -237,6 +243,12 @@ func UnmarshalLegacy(a *tree.DatumAlloc, typ *types.T, value roachpb.Value) (tre
return nil, err
}
return a.NewDInt(tree.DInt(v)), nil
+ case types.PGLSNFamily:
+ v, err := value.GetInt()
+ if err != nil {
+ return nil, err
+ }
+ return a.NewDPGLSN(tree.DPGLSN{LSN: lsn.LSN(v)}), nil
case types.FloatFamily:
v, err := value.GetFloat()
if err != nil {
diff --git a/pkg/sql/sem/builtins/builtins.go b/pkg/sql/sem/builtins/builtins.go
index c3568c3a9a81..eb242de4d8a2 100644
--- a/pkg/sql/sem/builtins/builtins.go
+++ b/pkg/sql/sem/builtins/builtins.go
@@ -10251,7 +10251,7 @@ func asJSONBuildObjectKey(
case *tree.DBitArray, *tree.DBool, *tree.DBox2D, *tree.DBytes, *tree.DDate,
*tree.DDecimal, *tree.DEnum, *tree.DFloat, *tree.DGeography,
*tree.DGeometry, *tree.DIPAddr, *tree.DInt, *tree.DInterval, *tree.DOid,
- *tree.DOidWrapper, *tree.DTime, *tree.DTimeTZ, *tree.DTimestamp,
+ *tree.DOidWrapper, *tree.DPGLSN, *tree.DTime, *tree.DTimeTZ, *tree.DTimestamp,
*tree.DTSQuery, *tree.DTSVector, *tree.DUuid, *tree.DVoid:
return tree.AsStringWithFlags(d, tree.FmtBareStrings), nil
default:
diff --git a/pkg/sql/sem/builtins/fixed_oids.go b/pkg/sql/sem/builtins/fixed_oids.go
index 27ca04b7fa0a..e346c5543bb3 100644
--- a/pkg/sql/sem/builtins/fixed_oids.go
+++ b/pkg/sql/sem/builtins/fixed_oids.go
@@ -2385,6 +2385,39 @@ var builtinOidsArray = []string{
2412: `crdb_internal.unsafe_lock_replica(range_id: int, lock: bool) -> bool`,
2413: `crdb_internal.fingerprint(span: bytes[], start_time: decimal, all_revisions: bool) -> int`,
2414: `crdb_internal.tenant_span_stats(spans: tuple[]) -> tuple{bytes AS start_key, bytes AS end_key, jsonb AS stats}`,
+ 2415: `pg_lsnrecv(input: anyelement) -> pg_lsn`,
+ 2416: `pg_lsnout(pg_lsn: pg_lsn) -> bytes`,
+ 2417: `pg_lsnin(input: anyelement) -> pg_lsn`,
+ 2418: `pg_lsn(string: string) -> pg_lsn`,
+ 2419: `pg_lsn(pg_lsn: pg_lsn) -> pg_lsn`,
+ 2420: `varchar(pg_lsn: pg_lsn) -> varchar`,
+ 2421: `text(pg_lsn: pg_lsn) -> string`,
+ 2422: `bpchar(pg_lsn: pg_lsn) -> char`,
+ 2423: `name(pg_lsn: pg_lsn) -> name`,
+ 2424: `char(pg_lsn: pg_lsn) -> "char"`,
+ 2425: `max(arg1: pg_lsn) -> anyelement`,
+ 2426: `percentile_disc_impl(arg1: float, arg2: pg_lsn) -> pg_lsn`,
+ 2427: `percentile_disc_impl(arg1: float[], arg2: pg_lsn) -> pg_lsn[]`,
+ 2428: `min(arg1: pg_lsn) -> anyelement`,
+ 2429: `array_cat_agg(arg1: pg_lsn[]) -> pg_lsn[]`,
+ 2430: `array_agg(arg1: pg_lsn) -> pg_lsn[]`,
+ 2431: `array_prepend(elem: pg_lsn, array: pg_lsn[]) -> pg_lsn[]`,
+ 2432: `array_remove(array: pg_lsn[], elem: pg_lsn) -> anyelement`,
+ 2433: `array_positions(array: pg_lsn[], elem: pg_lsn) -> int[]`,
+ 2434: `array_cat(left: pg_lsn[], right: pg_lsn[]) -> pg_lsn[]`,
+ 2435: `array_position(array: pg_lsn[], elem: pg_lsn) -> int`,
+ 2436: `array_replace(array: pg_lsn[], toreplace: pg_lsn, replacewith: pg_lsn) -> anyelement`,
+ 2437: `array_append(array: pg_lsn[], elem: pg_lsn) -> pg_lsn[]`,
+ 2438: `first_value(val: pg_lsn) -> pg_lsn`,
+ 2439: `nth_value(val: pg_lsn, n: int) -> pg_lsn`,
+ 2440: `lag(val: pg_lsn) -> pg_lsn`,
+ 2441: `lag(val: pg_lsn, n: int) -> pg_lsn`,
+ 2442: `lag(val: pg_lsn, n: int, default: pg_lsn) -> pg_lsn`,
+ 2443: `lead(val: pg_lsn) -> pg_lsn`,
+ 2444: `lead(val: pg_lsn, n: int) -> pg_lsn`,
+ 2445: `lead(val: pg_lsn, n: int, default: pg_lsn) -> pg_lsn`,
+ 2446: `last_value(val: pg_lsn) -> pg_lsn`,
+ 2447: `pg_lsnsend(pg_lsn: pg_lsn) -> bytes`,
}
var builtinOidsBySignature map[string]oid.Oid
diff --git a/pkg/sql/sem/cast/cast_map.go b/pkg/sql/sem/cast/cast_map.go
index f49989aef5fc..859b8e916b85 100644
--- a/pkg/sql/sem/cast/cast_map.go
+++ b/pkg/sql/sem/cast/cast_map.go
@@ -65,6 +65,13 @@ var castMap = map[oid.Oid]map[oid.Oid]Cast{
oid.T_varchar: {MaxContext: ContextAssignment, origin: ContextOriginAutomaticIOConversion, Volatility: volatility.Immutable},
oid.T_text: {MaxContext: ContextAssignment, origin: ContextOriginAutomaticIOConversion, Volatility: volatility.Immutable},
},
+ oid.T_pg_lsn: {
+ oid.T_bpchar: {MaxContext: ContextAssignment, origin: ContextOriginAutomaticIOConversion, Volatility: volatility.Immutable},
+ oid.T_char: {MaxContext: ContextAssignment, origin: ContextOriginAutomaticIOConversion, Volatility: volatility.Immutable},
+ oid.T_name: {MaxContext: ContextAssignment, origin: ContextOriginAutomaticIOConversion, Volatility: volatility.Immutable},
+ oid.T_varchar: {MaxContext: ContextAssignment, origin: ContextOriginAutomaticIOConversion, Volatility: volatility.Immutable},
+ oid.T_text: {MaxContext: ContextAssignment, origin: ContextOriginAutomaticIOConversion, Volatility: volatility.Immutable},
+ },
oid.T_bpchar: {
oid.T_bpchar: {MaxContext: ContextImplicit, origin: ContextOriginPgCast, Volatility: volatility.Immutable},
oid.T_char: {MaxContext: ContextAssignment, origin: ContextOriginPgCast, Volatility: volatility.Immutable},
@@ -75,6 +82,7 @@ var castMap = map[oid.Oid]map[oid.Oid]Cast{
oid.T_bit: {MaxContext: ContextExplicit, origin: ContextOriginAutomaticIOConversion, Volatility: volatility.Immutable},
oid.T_bool: {MaxContext: ContextExplicit, origin: ContextOriginAutomaticIOConversion, Volatility: volatility.Immutable},
oidext.T_box2d: {MaxContext: ContextExplicit, origin: ContextOriginAutomaticIOConversion, Volatility: volatility.Immutable},
+ oid.T_pg_lsn: {MaxContext: ContextExplicit, origin: ContextOriginAutomaticIOConversion, Volatility: volatility.Immutable},
oid.T_bytea: {MaxContext: ContextExplicit, origin: ContextOriginAutomaticIOConversion, Volatility: volatility.Immutable},
oid.T_date: {
MaxContext: ContextExplicit,
@@ -160,6 +168,7 @@ var castMap = map[oid.Oid]map[oid.Oid]Cast{
oid.T_bit: {MaxContext: ContextExplicit, origin: ContextOriginAutomaticIOConversion, Volatility: volatility.Immutable},
oid.T_bool: {MaxContext: ContextExplicit, origin: ContextOriginAutomaticIOConversion, Volatility: volatility.Immutable},
oidext.T_box2d: {MaxContext: ContextExplicit, origin: ContextOriginAutomaticIOConversion, Volatility: volatility.Immutable},
+ oid.T_pg_lsn: {MaxContext: ContextExplicit, origin: ContextOriginAutomaticIOConversion, Volatility: volatility.Immutable},
oid.T_bytea: {MaxContext: ContextExplicit, origin: ContextOriginAutomaticIOConversion, Volatility: volatility.Immutable},
oid.T_date: {
MaxContext: ContextExplicit,
@@ -478,6 +487,7 @@ var castMap = map[oid.Oid]map[oid.Oid]Cast{
oid.T_bit: {MaxContext: ContextExplicit, origin: ContextOriginAutomaticIOConversion, Volatility: volatility.Immutable},
oid.T_bool: {MaxContext: ContextExplicit, origin: ContextOriginAutomaticIOConversion, Volatility: volatility.Immutable},
oidext.T_box2d: {MaxContext: ContextExplicit, origin: ContextOriginAutomaticIOConversion, Volatility: volatility.Immutable},
+ oid.T_pg_lsn: {MaxContext: ContextExplicit, origin: ContextOriginAutomaticIOConversion, Volatility: volatility.Immutable},
oid.T_bytea: {MaxContext: ContextExplicit, origin: ContextOriginAutomaticIOConversion, Volatility: volatility.Immutable},
oid.T_date: {
MaxContext: ContextExplicit,
@@ -712,6 +722,7 @@ var castMap = map[oid.Oid]map[oid.Oid]Cast{
oid.T_bit: {MaxContext: ContextExplicit, origin: ContextOriginAutomaticIOConversion, Volatility: volatility.Immutable},
oid.T_bool: {MaxContext: ContextExplicit, origin: ContextOriginAutomaticIOConversion, Volatility: volatility.Immutable},
oidext.T_box2d: {MaxContext: ContextExplicit, origin: ContextOriginAutomaticIOConversion, Volatility: volatility.Immutable},
+ oid.T_pg_lsn: {MaxContext: ContextExplicit, origin: ContextOriginAutomaticIOConversion, Volatility: volatility.Immutable},
oid.T_bytea: {MaxContext: ContextExplicit, origin: ContextOriginAutomaticIOConversion, Volatility: volatility.Immutable},
oid.T_date: {
MaxContext: ContextExplicit,
@@ -943,6 +954,7 @@ var castMap = map[oid.Oid]map[oid.Oid]Cast{
oid.T_bit: {MaxContext: ContextExplicit, origin: ContextOriginAutomaticIOConversion, Volatility: volatility.Immutable},
oid.T_bool: {MaxContext: ContextExplicit, origin: ContextOriginAutomaticIOConversion, Volatility: volatility.Immutable},
oidext.T_box2d: {MaxContext: ContextExplicit, origin: ContextOriginAutomaticIOConversion, Volatility: volatility.Immutable},
+ oid.T_pg_lsn: {MaxContext: ContextExplicit, origin: ContextOriginAutomaticIOConversion, Volatility: volatility.Immutable},
oid.T_bytea: {MaxContext: ContextExplicit, origin: ContextOriginAutomaticIOConversion, Volatility: volatility.Immutable},
oid.T_date: {
MaxContext: ContextExplicit,
diff --git a/pkg/sql/sem/eval/cast.go b/pkg/sql/sem/eval/cast.go
index ec9de9fa5549..adaf4dbf3b78 100644
--- a/pkg/sql/sem/eval/cast.go
+++ b/pkg/sql/sem/eval/cast.go
@@ -432,7 +432,7 @@ func performCastWithoutPrecisionTruncation(
}
case *tree.DBool, *tree.DDecimal:
s = d.String()
- case *tree.DTimestamp, *tree.DDate, *tree.DTime, *tree.DTimeTZ, *tree.DGeography, *tree.DGeometry, *tree.DBox2D:
+ case *tree.DTimestamp, *tree.DDate, *tree.DTime, *tree.DTimeTZ, *tree.DGeography, *tree.DGeometry, *tree.DBox2D, *tree.DPGLSN:
s = tree.AsStringWithFlags(d, tree.FmtBareStrings)
case *tree.DTimestampTZ:
// Convert to context timezone for correct display.
@@ -582,6 +582,16 @@ func performCastWithoutPrecisionTruncation(
return tree.NewDBox2D(*bbox), nil
}
+ case types.PGLSNFamily:
+ switch d := d.(type) {
+ case *tree.DString:
+ return tree.ParseDPGLSN(string(*d))
+ case *tree.DCollatedString:
+ return tree.ParseDPGLSN(d.Contents)
+ case *tree.DPGLSN:
+ return d, nil
+ }
+
case types.GeographyFamily:
switch d := d.(type) {
case *tree.DString:
diff --git a/pkg/sql/sem/tree/BUILD.bazel b/pkg/sql/sem/tree/BUILD.bazel
index 7042bcf0e291..c498171e860b 100644
--- a/pkg/sql/sem/tree/BUILD.bazel
+++ b/pkg/sql/sem/tree/BUILD.bazel
@@ -126,6 +126,7 @@ go_library(
"//pkg/geo/geopb",
"//pkg/sql/lex",
"//pkg/sql/lexbase",
+ "//pkg/sql/pgrepl/lsn",
"//pkg/sql/pgwire/pgcode",
"//pkg/sql/pgwire/pgerror",
"//pkg/sql/privilege",
diff --git a/pkg/sql/sem/tree/constant.go b/pkg/sql/sem/tree/constant.go
index 43b1463fc599..899e721260d7 100644
--- a/pkg/sql/sem/tree/constant.go
+++ b/pkg/sql/sem/tree/constant.go
@@ -542,6 +542,8 @@ var (
types.UUIDArray,
types.INet,
types.Jsonb,
+ types.PGLSN,
+ types.PGLSNArray,
types.TSQuery,
types.TSVector,
types.VarBit,
diff --git a/pkg/sql/sem/tree/constant_test.go b/pkg/sql/sem/tree/constant_test.go
index d153ba0fb097..89382a1ee87b 100644
--- a/pkg/sql/sem/tree/constant_test.go
+++ b/pkg/sql/sem/tree/constant_test.go
@@ -339,6 +339,13 @@ func mustParseDGeometry(t *testing.T, s string) tree.Datum {
}
return d
}
+func mustParseDPGLSN(t *testing.T, s string) tree.Datum {
+ d, err := tree.ParseDPGLSN(s)
+ if err != nil {
+ t.Fatal(err)
+ }
+ return d
+}
func mustParseDINet(t *testing.T, s string) tree.Datum {
d, err := tree.ParseDIPAddrFromINetString(s)
if err != nil {
@@ -398,6 +405,7 @@ var parseFuncs = map[*types.T]func(*testing.T, string) tree.Datum{
types.Geometry: mustParseDGeometry,
types.INet: mustParseDINet,
types.VarBit: mustParseDVarBit,
+ types.PGLSN: mustParseDPGLSN,
types.TSQuery: mustParseDTSQuery,
types.TSVector: mustParseDTSVector,
types.BytesArray: mustParseDArrayOfType(types.Bytes),
@@ -408,6 +416,7 @@ var parseFuncs = map[*types.T]func(*testing.T, string) tree.Datum{
types.BoolArray: mustParseDArrayOfType(types.Bool),
types.UUIDArray: mustParseDArrayOfType(types.Uuid),
types.DateArray: mustParseDArrayOfType(types.Date),
+ types.PGLSNArray: mustParseDArrayOfType(types.PGLSN),
types.TimeArray: mustParseDArrayOfType(types.Time),
types.TimeTZArray: mustParseDArrayOfType(types.TimeTZ),
types.TimestampArray: mustParseDArrayOfType(types.Timestamp),
@@ -415,6 +424,7 @@ var parseFuncs = map[*types.T]func(*testing.T, string) tree.Datum{
types.IntervalArray: mustParseDArrayOfType(types.Interval),
types.INetArray: mustParseDArrayOfType(types.INet),
types.VarBitArray: mustParseDArrayOfType(types.VarBit),
+ types.PGLSNArray: mustParseDArrayOfType(types.PGLSN),
}
func typeSet(tys ...*types.T) map[*types.T]struct{} {
@@ -508,6 +518,10 @@ func TestStringConstantResolveAvailableTypes(t *testing.T) {
types.TSQuery,
),
},
+ {
+ c: tree.NewStrVal("A/1"),
+ parseOptions: typeSet(types.String, types.PGLSN, types.Bytes, types.TSQuery, types.TSVector),
+ },
{
c: tree.NewStrVal(`{"a": 1}`),
parseOptions: typeSet(types.String, types.Bytes, types.Jsonb),
@@ -566,6 +580,11 @@ func TestStringConstantResolveAvailableTypes(t *testing.T) {
parseOptions: typeSet(types.String, types.Bytes, types.BytesArray, types.StringArray, types.DateArray,
types.TimestampArray, types.TimestampTZArray, types.TSVector),
},
+ {
+ c: tree.NewStrVal("{1A/1,2/2A}"),
+ parseOptions: typeSet(types.String, types.PGLSNArray, types.Bytes, types.BytesArray,
+ types.StringArray, types.TSQuery, types.TSVector),
+ },
{
c: tree.NewStrVal("{2010-09-28 12:00:00.1, 2010-09-29 12:00:00.1}"),
parseOptions: typeSet(
@@ -620,57 +639,58 @@ func TestStringConstantResolveAvailableTypes(t *testing.T) {
evalCtx := eval.NewTestingEvalContext(cluster.MakeTestingClusterSettings())
defer evalCtx.Stop(context.Background())
for i, test := range testCases {
- t.Log(test.c.String())
- parseableCount := 0
+ t.Run(test.c.String(), func(t *testing.T) {
+ parseableCount := 0
- // Make sure it can be resolved as each of those types or throws a parsing error.
- for _, availType := range test.c.AvailableTypes() {
+ // Make sure it can be resolved as each of those types or throws a parsing error.
+ for _, availType := range test.c.AvailableTypes() {
- // The enum value in c.AvailableTypes() is AnyEnum, so we will not be able to
- // resolve that exact type. In actual execution, the constant would be resolved
- // as a hydrated enum type instead.
- if availType.Family() == types.EnumFamily {
- continue
- }
+ // The enum value in c.AvailableTypes() is AnyEnum, so we will not be able to
+ // resolve that exact type. In actual execution, the constant would be resolved
+ // as a hydrated enum type instead.
+ if availType.Family() == types.EnumFamily {
+ continue
+ }
- semaCtx := tree.MakeSemaContext()
- typedExpr, err := test.c.ResolveAsType(context.Background(), &semaCtx, availType)
- var res tree.Datum
- if err == nil {
- res, err = eval.Expr(context.Background(), evalCtx, typedExpr)
- }
- if err != nil {
- if !strings.Contains(err.Error(), "could not parse") &&
- !strings.Contains(err.Error(), "parsing") &&
- !strings.Contains(err.Error(), "out of range") &&
- !strings.Contains(err.Error(), "exceeds supported") {
- // Parsing errors are permitted for this test, but the number of correctly
- // parseable types will be verified. Any other error should throw a failure.
- t.Errorf("%d: expected resolving %v as available type %s would either succeed"+
- " or throw a parsing error, found %v",
- i, test.c, availType, err)
+ semaCtx := tree.MakeSemaContext()
+ typedExpr, err := test.c.ResolveAsType(context.Background(), &semaCtx, availType)
+ var res tree.Datum
+ if err == nil {
+ res, err = eval.Expr(context.Background(), evalCtx, typedExpr)
}
- continue
- }
- parseableCount++
+ if err != nil {
+ if !strings.Contains(err.Error(), "could not parse") &&
+ !strings.Contains(err.Error(), "parsing") &&
+ !strings.Contains(err.Error(), "out of range") &&
+ !strings.Contains(err.Error(), "exceeds supported") {
+ // Parsing errors are permitted for this test, but the number of correctly
+ // parseable types will be verified. Any other error should throw a failure.
+ t.Errorf("%d: expected resolving %v as available type %s would either succeed"+
+ " or throw a parsing error, found %v",
+ i, test.c, availType, err)
+ }
+ continue
+ }
+ parseableCount++
- if _, isExpected := test.parseOptions[availType]; !isExpected {
- t.Errorf("%d: type %s not expected to be resolvable from the tree.StrVal %v, found %v",
- i, availType, test.c, res)
- } else {
- expectedDatum := parseFuncs[availType](t, test.c.RawString())
- if res.Compare(evalCtx, expectedDatum) != 0 {
- t.Errorf("%d: type %s expected to be resolved from the tree.StrVal %v to tree.Datum %v"+
- ", found %v",
- i, availType, test.c, expectedDatum, res)
+ if _, isExpected := test.parseOptions[availType]; !isExpected {
+ t.Errorf("%d: type %s not expected to be resolvable from the tree.StrVal %v, found %v",
+ i, availType, test.c, res)
+ } else {
+ expectedDatum := parseFuncs[availType](t, test.c.RawString())
+ if res.Compare(evalCtx, expectedDatum) != 0 {
+ t.Errorf("%d: type %s expected to be resolved from the tree.StrVal %v to tree.Datum %v"+
+ ", found %v",
+ i, availType, test.c, expectedDatum, res)
+ }
}
}
- }
- // Make sure the expected number of types can be resolved from the tree.StrVal.
- if expCount := len(test.parseOptions); parseableCount != expCount {
- t.Errorf("%d: expected %d successfully resolvable types for the tree.StrVal %v, found %d",
- i, expCount, test.c, parseableCount)
- }
+ // Make sure the expected number of types can be resolved from the tree.StrVal.
+ if expCount := len(test.parseOptions); parseableCount != expCount {
+ t.Errorf("%d: expected %d successfully resolvable types for the tree.StrVal %v, found %d",
+ i, expCount, test.c, parseableCount)
+ }
+ })
}
}
diff --git a/pkg/sql/sem/tree/datum.go b/pkg/sql/sem/tree/datum.go
index 285f4334fb60..386671d007e9 100644
--- a/pkg/sql/sem/tree/datum.go
+++ b/pkg/sql/sem/tree/datum.go
@@ -27,6 +27,7 @@ import (
"github.com/cockroachdb/cockroach/pkg/geo"
"github.com/cockroachdb/cockroach/pkg/sql/lex"
"github.com/cockroachdb/cockroach/pkg/sql/lexbase"
+ "github.com/cockroachdb/cockroach/pkg/sql/pgrepl/lsn"
"github.com/cockroachdb/cockroach/pkg/sql/pgwire/pgcode"
"github.com/cockroachdb/cockroach/pkg/sql/pgwire/pgerror"
"github.com/cockroachdb/cockroach/pkg/sql/sessiondatapb"
@@ -3580,6 +3581,132 @@ func (d *DGeometry) Size() uintptr {
return d.Geometry.SpatialObjectRef().MemSize()
}
+type DPGLSN struct {
+ lsn.LSN
+}
+
+// NewDPGLSN returns a new PGLSN Datum.
+func NewDPGLSN(l lsn.LSN) *DPGLSN {
+ return &DPGLSN{LSN: l}
+}
+
+// ParseDPGLSN attempts to pass `str` as a PGLSN type.
+func ParseDPGLSN(str string) (*DPGLSN, error) {
+ v, err := lsn.ParseLSN(str)
+ if err != nil {
+ return nil, errors.Wrapf(err, "could not parse pg_lsn")
+ }
+ return NewDPGLSN(v), nil
+}
+
+// AsDPGLSN attempts to retrieve a *DPGLSN from an Expr, returning a
+// *DPGLSN and a flag signifying whether the assertion was successful. The
+// function should be used instead of direct type assertions wherever a
+// *DPGLSN wrapped by a *DOidWrapper is possible.
+func AsDPGLSN(e Expr) (*DPGLSN, bool) {
+ switch t := e.(type) {
+ case *DPGLSN:
+ return t, true
+ case *DOidWrapper:
+ return AsDPGLSN(t.Wrapped)
+ }
+ return nil, false
+}
+
+// MustBeDPGLSN attempts to retrieve a *DPGLSN from an Expr, panicking
+// if the assertion fails.
+func MustBeDPGLSN(e Expr) *DPGLSN {
+ i, ok := AsDPGLSN(e)
+ if !ok {
+ panic(errors.AssertionFailedf("expected *DPGLSN, found %T", e))
+ }
+ return i
+}
+
+// ResolvedType implements the TypedExpr interface.
+func (*DPGLSN) ResolvedType() *types.T {
+ return types.PGLSN
+}
+
+// Compare implements the Datum interface.
+func (d *DPGLSN) Compare(ctx CompareContext, other Datum) int {
+ res, err := d.CompareError(ctx, other)
+ if err != nil {
+ panic(err)
+ }
+ return res
+}
+
+// CompareError implements the Datum interface.
+func (d *DPGLSN) CompareError(ctx CompareContext, other Datum) (int, error) {
+ if other == DNull {
+ // NULL is less than any non-NULL value.
+ return 1, nil
+ }
+ v, ok := ctx.UnwrapDatum(other).(*DPGLSN)
+ if !ok {
+ return 0, makeUnsupportedComparisonMessage(d, other)
+ }
+ return d.LSN.Compare(v.LSN), nil
+}
+
+// Prev implements the Datum interface.
+func (d *DPGLSN) Prev(ctx CompareContext) (Datum, bool) {
+ if d.IsMin(ctx) {
+ return nil, false
+ }
+ return NewDPGLSN(d.LSN.Sub(1)), true
+}
+
+// Next implements the Datum interface.
+func (d *DPGLSN) Next(ctx CompareContext) (Datum, bool) {
+ if d.IsMax(ctx) {
+ return nil, false
+ }
+ return NewDPGLSN(d.LSN.Add(1)), true
+}
+
+// IsMax implements the Datum interface.
+func (d *DPGLSN) IsMax(ctx CompareContext) bool {
+ return d.LSN == math.MaxUint64
+}
+
+// IsMin implements the Datum interface.
+func (d *DPGLSN) IsMin(ctx CompareContext) bool {
+ return d.LSN == 0
+}
+
+// Max implements the Datum interface.
+func (d *DPGLSN) Max(ctx CompareContext) (Datum, bool) {
+ return NewDPGLSN(math.MaxUint64), false
+}
+
+// Min implements the Datum interface.
+func (d *DPGLSN) Min(ctx CompareContext) (Datum, bool) {
+ return NewDPGLSN(0), false
+}
+
+// AmbiguousFormat implements the Datum interface.
+func (*DPGLSN) AmbiguousFormat() bool { return true }
+
+// Format implements the NodeFormatter interface.
+func (d *DPGLSN) Format(ctx *FmtCtx) {
+ f := ctx.flags
+ bareStrings := f.HasFlags(FmtFlags(lexbase.EncBareStrings))
+ if !bareStrings {
+ ctx.WriteByte('\'')
+ }
+ ctx.WriteString(d.LSN.String())
+ if !bareStrings {
+ ctx.WriteByte('\'')
+ }
+}
+
+// Size implements the Datum interface.
+func (d *DPGLSN) Size() uintptr {
+ return unsafe.Sizeof(*d)
+}
+
// DBox2D is the Datum representation of the Box2D type.
type DBox2D struct {
geo.CartesianBoundingBox
@@ -3863,7 +3990,7 @@ func AsJSON(
// This is RFC3339Nano, but without the TZ fields.
return json.FromString(formatTime(t.UTC(), "2006-01-02T15:04:05.999999999")), nil
case *DDate, *DUuid, *DOid, *DInterval, *DBytes, *DIPAddr, *DTime, *DTimeTZ, *DBitArray, *DBox2D,
- *DTSVector, *DTSQuery:
+ *DTSVector, *DTSQuery, *DPGLSN:
return json.FromString(
AsStringWithFlags(t, FmtBareStrings, FmtDataConversionConfig(dcc), FmtLocation(loc)),
), nil
@@ -5941,6 +6068,7 @@ var baseDatumTypeSizes = map[types.Family]struct {
types.DateFamily: {unsafe.Sizeof(DDate{}), fixedSize},
types.GeographyFamily: {unsafe.Sizeof(DGeography{}), variableSize},
types.GeometryFamily: {unsafe.Sizeof(DGeometry{}), variableSize},
+ types.PGLSNFamily: {unsafe.Sizeof(DPGLSN{}), fixedSize},
types.TimeFamily: {unsafe.Sizeof(DTime(0)), fixedSize},
types.TimeTZFamily: {unsafe.Sizeof(DTimeTZ{}), fixedSize},
types.TimestampFamily: {unsafe.Sizeof(DTimestamp{}), fixedSize},
diff --git a/pkg/sql/sem/tree/datum_alloc.go b/pkg/sql/sem/tree/datum_alloc.go
index a01a3e3ddce2..a97d5be03163 100644
--- a/pkg/sql/sem/tree/datum_alloc.go
+++ b/pkg/sql/sem/tree/datum_alloc.go
@@ -48,6 +48,7 @@ type DatumAlloc struct {
dtupleAlloc []DTuple
doidAlloc []DOid
dvoidAlloc []DVoid
+ dpglsnAlloc []DPGLSN
// stringAlloc is used by all datum types that are strings (DBytes, DString, DEncodedKey).
stringAlloc []string
env CollationEnvironment
@@ -96,6 +97,21 @@ func (a *DatumAlloc) NewDInt(v DInt) *DInt {
return r
}
+// NewDPGLSN allocates a DPGLSN.
+func (a *DatumAlloc) NewDPGLSN(v DPGLSN) *DPGLSN {
+ if a.AllocSize == 0 {
+ a.AllocSize = defaultDatumAllocSize
+ }
+ buf := &a.dpglsnAlloc
+ if len(*buf) == 0 {
+ *buf = make([]DPGLSN, a.AllocSize)
+ }
+ r := &(*buf)[0]
+ *r = v
+ *buf = (*buf)[1:]
+ return r
+}
+
// NewDFloat allocates a DFloat.
func (a *DatumAlloc) NewDFloat(v DFloat) *DFloat {
if a.AllocSize == 0 {
diff --git a/pkg/sql/sem/tree/eval.go b/pkg/sql/sem/tree/eval.go
index 752b85b34a46..c82b7f5920bb 100644
--- a/pkg/sql/sem/tree/eval.go
+++ b/pkg/sql/sem/tree/eval.go
@@ -1513,6 +1513,7 @@ var CmpOps = cmpOpFixups(map[treecmp.ComparisonOperatorSymbol]*CmpOpOverloads{
makeEqFn(types.Interval, types.Interval, volatility.Leakproof),
makeEqFn(types.Jsonb, types.Jsonb, volatility.Immutable),
makeEqFn(types.Oid, types.Oid, volatility.Leakproof),
+ makeEqFn(types.PGLSN, types.PGLSN, volatility.Leakproof),
makeEqFn(types.String, types.String, volatility.Leakproof),
makeEqFn(types.Time, types.Time, volatility.Leakproof),
makeEqFn(types.TimeTZ, types.TimeTZ, volatility.Leakproof),
@@ -1571,6 +1572,7 @@ var CmpOps = cmpOpFixups(map[treecmp.ComparisonOperatorSymbol]*CmpOpOverloads{
makeLtFn(types.Int, types.Int, volatility.Leakproof),
makeLtFn(types.Interval, types.Interval, volatility.Leakproof),
makeLtFn(types.Oid, types.Oid, volatility.Leakproof),
+ makeLtFn(types.PGLSN, types.PGLSN, volatility.Leakproof),
makeLtFn(types.String, types.String, volatility.Leakproof),
makeLtFn(types.Time, types.Time, volatility.Leakproof),
makeLtFn(types.TimeTZ, types.TimeTZ, volatility.Leakproof),
@@ -1628,6 +1630,7 @@ var CmpOps = cmpOpFixups(map[treecmp.ComparisonOperatorSymbol]*CmpOpOverloads{
makeLeFn(types.Int, types.Int, volatility.Leakproof),
makeLeFn(types.Interval, types.Interval, volatility.Leakproof),
makeLeFn(types.Oid, types.Oid, volatility.Leakproof),
+ makeLeFn(types.PGLSN, types.PGLSN, volatility.Leakproof),
makeLeFn(types.String, types.String, volatility.Leakproof),
makeLeFn(types.Time, types.Time, volatility.Leakproof),
makeLeFn(types.TimeTZ, types.TimeTZ, volatility.Leakproof),
@@ -1706,6 +1709,7 @@ var CmpOps = cmpOpFixups(map[treecmp.ComparisonOperatorSymbol]*CmpOpOverloads{
makeIsFn(types.Interval, types.Interval, volatility.Leakproof),
makeIsFn(types.Jsonb, types.Jsonb, volatility.Immutable),
makeIsFn(types.Oid, types.Oid, volatility.Leakproof),
+ makeIsFn(types.PGLSN, types.PGLSN, volatility.Leakproof),
makeIsFn(types.String, types.String, volatility.Leakproof),
makeIsFn(types.Time, types.Time, volatility.Leakproof),
makeIsFn(types.TimeTZ, types.TimeTZ, volatility.Leakproof),
@@ -1772,6 +1776,7 @@ var CmpOps = cmpOpFixups(map[treecmp.ComparisonOperatorSymbol]*CmpOpOverloads{
makeEvalTupleIn(types.Interval, volatility.Leakproof),
makeEvalTupleIn(types.Jsonb, volatility.Leakproof),
makeEvalTupleIn(types.Oid, volatility.Leakproof),
+ makeEvalTupleIn(types.PGLSN, volatility.Leakproof),
makeEvalTupleIn(types.String, volatility.Leakproof),
makeEvalTupleIn(types.Time, volatility.Leakproof),
makeEvalTupleIn(types.TimeTZ, volatility.Leakproof),
diff --git a/pkg/sql/sem/tree/eval_expr_generated.go b/pkg/sql/sem/tree/eval_expr_generated.go
index 7bc21cc7480a..481a4987379c 100644
--- a/pkg/sql/sem/tree/eval_expr_generated.go
+++ b/pkg/sql/sem/tree/eval_expr_generated.go
@@ -216,6 +216,11 @@ func (node *DOidWrapper) Eval(ctx context.Context, v ExprEvaluator) (Datum, erro
return node, nil
}
+// Eval is part of the TypedExpr interface.
+func (node *DPGLSN) Eval(ctx context.Context, v ExprEvaluator) (Datum, error) {
+ return node, nil
+}
+
// Eval is part of the TypedExpr interface.
func (node *DString) Eval(ctx context.Context, v ExprEvaluator) (Datum, error) {
return node, nil
diff --git a/pkg/sql/sem/tree/expr.go b/pkg/sql/sem/tree/expr.go
index 5c17e3d4b58c..7f34f2993082 100644
--- a/pkg/sql/sem/tree/expr.go
+++ b/pkg/sql/sem/tree/expr.go
@@ -1726,6 +1726,7 @@ func (node *DTimeTZ) String() string { return AsString(node) }
func (node *DDecimal) String() string { return AsString(node) }
func (node *DFloat) String() string { return AsString(node) }
func (node *DBox2D) String() string { return AsString(node) }
+func (node *DPGLSN) String() string { return AsString(node) }
func (node *DGeography) String() string { return AsString(node) }
func (node *DGeometry) String() string { return AsString(node) }
func (node *DInt) String() string { return AsString(node) }
diff --git a/pkg/sql/sem/tree/parse_string.go b/pkg/sql/sem/tree/parse_string.go
index a1507d0fa66c..fee776a401e6 100644
--- a/pkg/sql/sem/tree/parse_string.go
+++ b/pkg/sql/sem/tree/parse_string.go
@@ -67,6 +67,8 @@ func ParseAndRequireString(
return nil, false, typErr
}
d, err = ParseDIntervalWithTypeMetadata(intervalStyle(ctx), s, itm)
+ case types.PGLSNFamily:
+ d, err = ParseDPGLSN(s)
case types.Box2DFamily:
d, err = ParseDBox2D(s)
case types.GeographyFamily:
diff --git a/pkg/sql/sem/tree/testutils.go b/pkg/sql/sem/tree/testutils.go
index 9a20c478515c..e573d5307db2 100644
--- a/pkg/sql/sem/tree/testutils.go
+++ b/pkg/sql/sem/tree/testutils.go
@@ -81,6 +81,8 @@ func SampleDatum(t *types.T) Datum {
return j
case types.OidFamily:
return NewDOid(1009)
+ case types.PGLSNFamily:
+ return NewDPGLSN(0x1000000100)
case types.Box2DFamily:
b := geo.NewCartesianBoundingBox().AddPoint(1, 2).AddPoint(3, 4)
return NewDBox2D(*b)
diff --git a/pkg/sql/sem/tree/type_check.go b/pkg/sql/sem/tree/type_check.go
index c581b8dc1f52..d6311cee22cc 100644
--- a/pkg/sql/sem/tree/type_check.go
+++ b/pkg/sql/sem/tree/type_check.go
@@ -1864,6 +1864,12 @@ func (d *DBox2D) TypeCheck(_ context.Context, _ *SemaContext, _ *types.T) (Typed
return d, nil
}
+// TypeCheck implements the Expr interface. It is implemented as an idempotent
+// identity function for Datum.
+func (d *DPGLSN) TypeCheck(_ context.Context, _ *SemaContext, _ *types.T) (TypedExpr, error) {
+ return d, nil
+}
+
// TypeCheck implements the Expr interface. It is implemented as an idempotent
// identity function for Datum.
func (d *DGeography) TypeCheck(_ context.Context, _ *SemaContext, _ *types.T) (TypedExpr, error) {
diff --git a/pkg/sql/sem/tree/walk.go b/pkg/sql/sem/tree/walk.go
index 5b9749786c77..8c582a5f925f 100644
--- a/pkg/sql/sem/tree/walk.go
+++ b/pkg/sql/sem/tree/walk.go
@@ -759,6 +759,9 @@ func (expr *DInterval) Walk(_ Visitor) Expr { return expr }
// Walk implements the Expr interface.
func (expr *DBox2D) Walk(_ Visitor) Expr { return expr }
+// Walk implements the Expr interface.
+func (expr *DPGLSN) Walk(_ Visitor) Expr { return expr }
+
// Walk implements the Expr interface.
func (expr *DGeography) Walk(_ Visitor) Expr { return expr }
diff --git a/pkg/sql/types/oid.go b/pkg/sql/types/oid.go
index 41336ec6b15c..3a727b2c3a33 100644
--- a/pkg/sql/types/oid.go
+++ b/pkg/sql/types/oid.go
@@ -83,6 +83,7 @@ var OidToType = map[oid.Oid]*T{
oid.T_numeric: Decimal,
oid.T_oid: Oid,
oid.T_oidvector: OidVector,
+ oid.T_pg_lsn: PGLSN,
oid.T_record: AnyTuple,
oid.T_regclass: RegClass,
oid.T_regnamespace: RegNamespace,
@@ -130,6 +131,7 @@ var oidToArrayOid = map[oid.Oid]oid.Oid{
oid.T_numeric: oid.T__numeric,
oid.T_oid: oid.T__oid,
oid.T_oidvector: oid.T__oidvector,
+ oid.T_pg_lsn: oid.T__pg_lsn,
oid.T_record: oid.T__record,
oid.T_regclass: oid.T__regclass,
oid.T_regnamespace: oid.T__regnamespace,
@@ -169,6 +171,7 @@ var familyToOid = map[Family]oid.Oid{
TimestampTZFamily: oid.T_timestamptz,
CollatedStringFamily: oid.T_text,
OidFamily: oid.T_oid,
+ PGLSNFamily: oid.T_pg_lsn,
UnknownFamily: oid.T_unknown,
UuidFamily: oid.T_uuid,
ArrayFamily: oid.T_anyarray,
@@ -244,7 +247,7 @@ func CalcArrayOid(elemTyp *T) oid.Oid {
o = oidToArrayOid[o]
}
if o == 0 {
- panic(errors.AssertionFailedf("oid %d couldn't be mapped to array oid", o))
+ panic(errors.AssertionFailedf("oid %d couldn't be mapped to array oid", elemTyp.Oid()))
}
return o
}
diff --git a/pkg/sql/types/types.go b/pkg/sql/types/types.go
index 2c1376ecccde..dce5ee04ad26 100644
--- a/pkg/sql/types/types.go
+++ b/pkg/sql/types/types.go
@@ -458,6 +458,15 @@ var (
},
}
+ // PGLSN is the type representing a PostgreSQL LSN object.
+ PGLSN = &T{
+ InternalType: InternalType{
+ Family: PGLSNFamily,
+ Oid: oid.T_pg_lsn,
+ Locale: &emptyLocale,
+ },
+ }
+
// Void is the type representing void.
Void = &T{
InternalType: InternalType{
@@ -521,6 +530,7 @@ var (
Oid,
Uuid,
INet,
+ PGLSN,
Time,
TimeTZ,
Jsonb,
@@ -598,10 +608,14 @@ var (
UUIDArray = &T{InternalType: InternalType{
Family: ArrayFamily, ArrayContents: Uuid, Oid: oid.T__uuid, Locale: &emptyLocale}}
- // TimeArray is the type of an array value having Date-typed elements.
+ // DateArray is the type of an array value having Date-typed elements.
DateArray = &T{InternalType: InternalType{
Family: ArrayFamily, ArrayContents: Date, Oid: oid.T__date, Locale: &emptyLocale}}
+ // PGLSNArray is the type of an array value having PGLSN-typed elements.
+ PGLSNArray = &T{InternalType: InternalType{
+ Family: ArrayFamily, ArrayContents: PGLSN, Oid: oid.T__pg_lsn, Locale: &emptyLocale}}
+
// TimeArray is the type of an array value having Time-typed elements.
TimeArray = &T{InternalType: InternalType{
Family: ArrayFamily, ArrayContents: Time, Oid: oid.T__time, Locale: &emptyLocale}}
@@ -1451,6 +1465,7 @@ var familyNames = map[Family]string{
IntervalFamily: "interval",
JsonFamily: "jsonb",
OidFamily: "oid",
+ PGLSNFamily: "pg_lsn",
StringFamily: "string",
TimeFamily: "time",
TimestampFamily: "timestamp",
@@ -1718,6 +1733,8 @@ func (t *T) SQLStandardNameWithTypmod(haveTypmod bool, typmod int) string {
default:
panic(errors.AssertionFailedf("unexpected Oid: %v", errors.Safe(t.Oid())))
}
+ case PGLSNFamily:
+ return "pg_lsn"
case StringFamily, CollatedStringFamily:
switch t.Oid() {
case oid.T_text:
@@ -1951,7 +1968,7 @@ func (t *T) SQLStringForError() redact.RedactableString {
IntervalFamily, StringFamily, BytesFamily, TimestampTZFamily, CollatedStringFamily, OidFamily,
UnknownFamily, UuidFamily, INetFamily, TimeFamily, JsonFamily, TimeTZFamily, BitFamily,
GeometryFamily, GeographyFamily, Box2DFamily, VoidFamily, EncodedKeyFamily, TSQueryFamily,
- TSVectorFamily, AnyFamily:
+ TSVectorFamily, AnyFamily, PGLSNFamily:
// These types do not contain other types, and do not require redaction.
return redact.Sprint(redact.SafeString(t.SQLString()))
}
diff --git a/pkg/sql/types/types.proto b/pkg/sql/types/types.proto
index b08f6c8ca1d6..433b9cd37a0f 100644
--- a/pkg/sql/types/types.proto
+++ b/pkg/sql/types/types.proto
@@ -390,6 +390,12 @@ enum Family {
// Oid : T_tsvector
TSVectorFamily = 29;
+ // PGLSNFamily is a type family for the pg_lsn type, which is the type
+ // representing PG LSN objects.
+ // Canonical: types.PGLSN
+ // Oid : T_pg_lsn
+ PGLSNFamily = 30;
+
// AnyFamily is a special type family used during static analysis as a
// wildcard type that matches any other type, including scalar, array, and
// tuple types. Execution-time values should never have this type. As an
diff --git a/pkg/util/parquet/BUILD.bazel b/pkg/util/parquet/BUILD.bazel
index e99fa43466bb..dea384112f3b 100644
--- a/pkg/util/parquet/BUILD.bazel
+++ b/pkg/util/parquet/BUILD.bazel
@@ -14,6 +14,7 @@ go_library(
deps = [
"//pkg/geo",
"//pkg/geo/geopb",
+ "//pkg/sql/pgrepl/lsn",
"//pkg/sql/pgwire/pgcode",
"//pkg/sql/pgwire/pgerror",
"//pkg/sql/sem/catid",
diff --git a/pkg/util/parquet/decoders.go b/pkg/util/parquet/decoders.go
index 9f3f37cf7176..8d79319e7b13 100644
--- a/pkg/util/parquet/decoders.go
+++ b/pkg/util/parquet/decoders.go
@@ -16,6 +16,7 @@ import (
"github.com/apache/arrow/go/v11/parquet"
"github.com/cockroachdb/cockroach/pkg/geo"
"github.com/cockroachdb/cockroach/pkg/geo/geopb"
+ "github.com/cockroachdb/cockroach/pkg/sql/pgrepl/lsn"
"github.com/cockroachdb/cockroach/pkg/sql/sem/tree"
"github.com/cockroachdb/cockroach/pkg/sql/types"
"github.com/cockroachdb/cockroach/pkg/util/bitarray"
@@ -55,6 +56,12 @@ func (stringDecoder) decode(v parquet.ByteArray) (tree.Datum, error) {
return tree.NewDString(string(v)), nil
}
+type pglsnDecoder struct{}
+
+func (pglsnDecoder) decode(v int64) (tree.Datum, error) {
+ return tree.NewDPGLSN(lsn.LSN(v)), nil
+}
+
type int64Decoder struct{}
func (int64Decoder) decode(v int64) (tree.Datum, error) {
@@ -279,6 +286,8 @@ func decoderFromFamilyAndType(typOid oid.Oid, family types.Family) (decoder, err
return dateDecoder{}, nil
case types.Box2DFamily:
return box2DDecoder{}, nil
+ case types.PGLSNFamily:
+ return pglsnDecoder{}, nil
case types.GeographyFamily:
return geographyDecoder{}, nil
case types.GeometryFamily:
@@ -313,6 +322,7 @@ func init() {
var _, _ = stringDecoder{}.decode(parquet.ByteArray{})
var _, _ = int32Decoder{}.decode(0)
var _, _ = int64Decoder{}.decode(0)
+ var _, _ = pglsnDecoder{}.decode(0)
var _, _ = decimalDecoder{}.decode(parquet.ByteArray{})
var _, _ = timestampDecoder{}.decode(parquet.ByteArray{})
var _, _ = timestampTZDecoder{}.decode(parquet.ByteArray{})
diff --git a/pkg/util/parquet/schema.go b/pkg/util/parquet/schema.go
index 920550b9ee18..5e83f4224fdb 100644
--- a/pkg/util/parquet/schema.go
+++ b/pkg/util/parquet/schema.go
@@ -152,6 +152,16 @@ func makeColumn(colName string, typ *types.T, repetitions parquet.Repetition) (d
result.node = schema.NewInt32Node(colName, repetitions, defaultSchemaFieldID)
result.colWriter = scalarWriter(writeInt32)
return result, nil
+ case types.PGLSNFamily:
+ result.node, err = schema.NewPrimitiveNodeLogical(colName,
+ repetitions, schema.NewIntLogicalType(64, true),
+ parquet.Types.Int64, defaultTypeLength,
+ defaultSchemaFieldID)
+ if err != nil {
+ return datumColumn{}, err
+ }
+ result.colWriter = scalarWriter(writePGLSN)
+ return result, nil
case types.DecimalFamily:
// According to PostgresSQL docs, scale or precision of 0 implies max
// precision and scale. This code assumes that CRDB matches this behavior.
diff --git a/pkg/util/parquet/write_functions.go b/pkg/util/parquet/write_functions.go
index 7c3818470844..eaba0efd0f50 100644
--- a/pkg/util/parquet/write_functions.go
+++ b/pkg/util/parquet/write_functions.go
@@ -295,6 +295,20 @@ func writeInt64(
return writeBatch[int64](w, a.int64Batch[:], defLevels, repLevels)
}
+func writePGLSN(
+ d tree.Datum, w file.ColumnChunkWriter, a *batchAlloc, defLevels, repLevels []int16,
+) error {
+ if d == tree.DNull {
+ return writeBatch[int64](w, a.int64Batch[:], defLevels, repLevels)
+ }
+ di, ok := tree.AsDPGLSN(d)
+ if !ok {
+ return pgerror.Newf(pgcode.DatatypeMismatch, "expected DPGLSN, found %T", d)
+ }
+ a.int64Batch[0] = int64(di.LSN)
+ return writeBatch[int64](w, a.int64Batch[:], defLevels, repLevels)
+}
+
func writeBool(
d tree.Datum, w file.ColumnChunkWriter, a *batchAlloc, defLevels, repLevels []int16,
) error {
diff --git a/pkg/workload/rand/rand.go b/pkg/workload/rand/rand.go
index 21bbf780521f..3f3ae86acde0 100644
--- a/pkg/workload/rand/rand.go
+++ b/pkg/workload/rand/rand.go
@@ -429,6 +429,8 @@ func DatumToGoSQL(d tree.Datum) (interface{}, error) {
return geo.SpatialObjectToEWKT(d.Geography.SpatialObject(), 2)
case *tree.DGeometry:
return geo.SpatialObjectToEWKT(d.Geometry.SpatialObject(), 2)
+ case *tree.DPGLSN:
+ return d.LSN.String(), nil
case *tree.DTSQuery:
return d.String(), nil
case *tree.DTSVector:
|