Skip to content

Commit

Permalink
Allow DROP NOT NULL on column of compressed table
Browse files Browse the repository at this point in the history
Remove the restriction on `ALTER TABLE ... ALTER COLUMN DROP NOT NULL`
on compressed tables.

Dropping the `NOT NULL` constraint on a column of a compressed table is
safe since it does not require re-writing any data.
  • Loading branch information
mkindahl committed Nov 18, 2024
1 parent 7740ae8 commit c5338ca
Show file tree
Hide file tree
Showing 5 changed files with 125 additions and 9 deletions.
1 change: 1 addition & 0 deletions src/process_utility.c
Original file line number Diff line number Diff line change
Expand Up @@ -280,6 +280,7 @@ check_alter_table_allowed_on_ht_with_compression(Hypertable *ht, AlterTableStmt
case AT_ReplicaIdentity:
case AT_ReAddStatistics:
case AT_SetCompression:
case AT_DropNotNull:
#if PG15_GE
case AT_SetAccessMethod:
#endif
Expand Down
46 changes: 45 additions & 1 deletion tsl/test/expected/compression_ddl.out
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ CREATE TABLESPACE tablespace1 OWNER :ROLE_DEFAULT_PERM_USER LOCATION :TEST_TABLE
CREATE TABLESPACE tablespace2 OWNER :ROLE_DEFAULT_PERM_USER LOCATION :TEST_TABLESPACE2_PATH;
\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER
SET timezone TO 'America/Los_Angeles';
CREATE TABLE test1 ("Time" timestamptz, i integer, b bigint, t text);
CREATE TABLE test1 ("Time" timestamptz, i integer not null, b bigint, t text);
SELECT table_name from create_hypertable('test1', 'Time', chunk_time_interval=> INTERVAL '1 day');
NOTICE: adding not-null constraint to column "Time"
table_name
Expand Down Expand Up @@ -2535,3 +2535,47 @@ ALTER TABLE compression_drop DROP COLUMN v0;
ERROR: cannot drop orderby or segmentby column from a chunk with compression enabled
\set ON_ERROR_STOP 1
DROP TABLE compression_drop;
SET client_min_messages = ERROR;
CREATE TABLE test2 (ts timestamptz, i integer not null, b bigint, t text);
SELECT table_name from create_hypertable('test2', 'ts', chunk_time_interval=> INTERVAL '1 day');
table_name
------------
test2
(1 row)

INSERT INTO test2
SELECT t,
gen_rand_minstd(),
gen_rand_minstd(),
gen_rand_minstd()::text
FROM generate_series('2018-03-02 1:00'::TIMESTAMPTZ, '2018-03-28 1:00', '1 hour') t;
ALTER TABLE test2 SET (
timescaledb.compress,
timescaledb.compress_segmentby = 'b',
timescaledb.compress_orderby = 'ts DESC'
);
\set ON_ERROR_STOP 0
INSERT INTO test2(ts,b,t) VALUES ('2024-11-18 18:04:51',99,'magic');
ERROR: null value in column "i" of relation "_hyper_44_180_chunk" violates not-null constraint
\set ON_ERROR_STOP 1
ALTER TABLE test2 ALTER COLUMN i DROP NOT NULL;
INSERT INTO test2(ts,b,t) VALUES ('2024-11-18 18:04:51',99,'magic');
SELECT count(*) FROM test2 WHERE i IS NULL;
count
-------
1
(1 row)

SELECT count(compress_chunk(ch)) FROM show_chunks('test2') ch;
count
-------
28
(1 row)

SELECT count(*) FROM test2 WHERE i IS NULL;
count
-------
1
(1 row)

SET client_min_messages = NOTICE;
36 changes: 32 additions & 4 deletions tsl/test/expected/hypercore_ddl.out
Original file line number Diff line number Diff line change
Expand Up @@ -11,15 +11,14 @@ select setseed(1);
(1 row)

create table readings(
time timestamptz unique,
location int,
device int,
time timestamptz not null unique,
location int not null,
device int not null,
temp numeric(4,1),
humidity float,
jdata jsonb
);
select create_hypertable('readings', by_range('time', '1d'::interval));
NOTICE: adding not-null constraint to column "time"
create_hypertable
-------------------
(1,t)
Expand Down Expand Up @@ -100,3 +99,32 @@ select (select count(*) from readings) tuples,
0 | 0
(1 row)

\set ON_ERROR_STOP 0
insert into readings(time,device,temp,humidity,jdata)
values ('2024-01-01 00:00:00', 1, 99.0, 99.0, '{"magic": "yes"}'::jsonb);
ERROR: null value in column "location" of relation "_hyper_1_9_chunk" violates not-null constraint
\set ON_ERROR_STOP 1
-- Test altering column definitions
alter table readings
alter column location drop not null;
-- This should now work.
insert into readings(time,device,temp,humidity,jdata)
values ('2024-01-01 00:00:00', 1, 99.0, 99.0, '{"magic": "yes"}'::jsonb);
select count(*) from readings where location is null;
count
-------
1
(1 row)

select compress_chunk(show_chunks('readings'), hypercore_use_access_method => true);
compress_chunk
-----------------------------------------
_timescaledb_internal._hyper_1_10_chunk
(1 row)

select count(*) from readings where location is null;
count
-------
1
(1 row)

29 changes: 28 additions & 1 deletion tsl/test/sql/compression_ddl.sql
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ CREATE TABLESPACE tablespace2 OWNER :ROLE_DEFAULT_PERM_USER LOCATION :TEST_TABLE

SET timezone TO 'America/Los_Angeles';

CREATE TABLE test1 ("Time" timestamptz, i integer, b bigint, t text);
CREATE TABLE test1 ("Time" timestamptz, i integer not null, b bigint, t text);
SELECT table_name from create_hypertable('test1', 'Time', chunk_time_interval=> INTERVAL '1 day');

INSERT INTO test1 SELECT t, gen_rand_minstd(), gen_rand_minstd(), gen_rand_minstd()::text FROM generate_series('2018-03-02 1:00'::TIMESTAMPTZ, '2018-03-28 1:00', '1 hour') t;
Expand Down Expand Up @@ -1040,3 +1040,30 @@ ALTER TABLE compression_drop DROP COLUMN v0;
\set ON_ERROR_STOP 1

DROP TABLE compression_drop;

SET client_min_messages = ERROR;
CREATE TABLE test2 (ts timestamptz, i integer not null, b bigint, t text);
SELECT table_name from create_hypertable('test2', 'ts', chunk_time_interval=> INTERVAL '1 day');

INSERT INTO test2
SELECT t,
gen_rand_minstd(),
gen_rand_minstd(),
gen_rand_minstd()::text
FROM generate_series('2018-03-02 1:00'::TIMESTAMPTZ, '2018-03-28 1:00', '1 hour') t;

ALTER TABLE test2 SET (
timescaledb.compress,
timescaledb.compress_segmentby = 'b',
timescaledb.compress_orderby = 'ts DESC'
);

\set ON_ERROR_STOP 0
INSERT INTO test2(ts,b,t) VALUES ('2024-11-18 18:04:51',99,'magic');
\set ON_ERROR_STOP 1
ALTER TABLE test2 ALTER COLUMN i DROP NOT NULL;
INSERT INTO test2(ts,b,t) VALUES ('2024-11-18 18:04:51',99,'magic');
SELECT count(*) FROM test2 WHERE i IS NULL;
SELECT count(compress_chunk(ch)) FROM show_chunks('test2') ch;
SELECT count(*) FROM test2 WHERE i IS NULL;
SET client_min_messages = NOTICE;
22 changes: 19 additions & 3 deletions tsl/test/sql/hypercore_ddl.sql
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,9 @@ set enable_incremental_sort = false;
select setseed(1);

create table readings(
time timestamptz unique,
location int,
device int,
time timestamptz not null unique,
location int not null,
device int not null,
temp numeric(4,1),
humidity float,
jdata jsonb
Expand Down Expand Up @@ -86,3 +86,19 @@ select (select count(*) from readings) tuples,
(select count(*) from show_chunks('readings')) chunks;


\set ON_ERROR_STOP 0
insert into readings(time,device,temp,humidity,jdata)
values ('2024-01-01 00:00:00', 1, 99.0, 99.0, '{"magic": "yes"}'::jsonb);
\set ON_ERROR_STOP 1

-- Test altering column definitions
alter table readings
alter column location drop not null;

-- This should now work.
insert into readings(time,device,temp,humidity,jdata)
values ('2024-01-01 00:00:00', 1, 99.0, 99.0, '{"magic": "yes"}'::jsonb);

select count(*) from readings where location is null;
select compress_chunk(show_chunks('readings'), hypercore_use_access_method => true);
select count(*) from readings where location is null;

0 comments on commit c5338ca

Please sign in to comment.