Skip to content

Commit

Permalink
Fix S3ToRedshiftOperator does not support default values on UPSERT (#…
Browse files Browse the repository at this point in the history
…32558)

Co-authored-by: eladkal <[email protected]>
  • Loading branch information
dashton90 and eladkal authored Jul 13, 2023
1 parent bf68e10 commit 145b16c
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 2 deletions.
2 changes: 1 addition & 1 deletion airflow/providers/amazon/aws/transfers/s3_to_redshift.py
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,7 @@ def execute(self, context: Context) -> None:
where_statement = " AND ".join([f"{self.table}.{k} = {copy_destination}.{k}" for k in keys])

sql = [
f"CREATE TABLE {copy_destination} (LIKE {destination});",
f"CREATE TABLE {copy_destination} (LIKE {destination} INCLUDING DEFAULTS);",
copy_statement,
"BEGIN;",
f"DELETE FROM {destination} USING {copy_destination} WHERE {where_statement};",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -222,7 +222,7 @@ def test_upsert(self, mock_run, mock_session, mock_connection, mock_hook):
;
"""
transaction = f"""
CREATE TABLE #{table} (LIKE {schema}.{table});
CREATE TABLE #{table} (LIKE {schema}.{table} INCLUDING DEFAULTS);
{copy_statement}
BEGIN;
DELETE FROM {schema}.{table} USING #{table} WHERE {table}.id = #{table}.id;
Expand Down

0 comments on commit 145b16c

Please sign in to comment.