From 12316b493ca0c1e453f7b49bca99b4101e41fa2c Mon Sep 17 00:00:00 2001 From: Astha Mohta Date: Wed, 29 Jun 2022 16:17:01 +0530 Subject: [PATCH 01/18] changes for testing in postgres --- tests/system/conftest.py | 41 ++++++++++++++++++------ tests/system/test_backup_api.py | 30 +++++++++++++----- tests/system/test_database_api.py | 2 ++ tests/system/test_session_api.py | 52 ++++++++++++++++++++++++------- 4 files changed, 97 insertions(+), 28 deletions(-) diff --git a/tests/system/conftest.py b/tests/system/conftest.py index b7004fa274..8432c78db9 100644 --- a/tests/system/conftest.py +++ b/tests/system/conftest.py @@ -57,6 +57,14 @@ def not_postgres(database_dialect): ) +@pytest.fixture(scope="session") +def not_google_standard_sql(database_dialect): + if database_dialect == DatabaseDialect.GOOGLE_STANDARD_SQL: + pytest.skip( + f"{_helpers.DATABASE_DIALECT_ENVVAR} set to GOOGLE_STANDARD_SQL in environment." + ) + + @pytest.fixture(scope="session") def database_dialect(): return ( @@ -77,7 +85,9 @@ def spanner_client(): credentials=credentials, ) else: - return spanner_v1.Client() # use google.auth.default credentials + return spanner_v1.Client( + client_options={"api_endpoint": "staging-wrenchworks.sandbox.googleapis.com"} + ) # use google.auth.default credentials @pytest.fixture(scope="session") @@ -169,14 +179,27 @@ def shared_instance( def shared_database(shared_instance, database_operation_timeout, database_dialect): database_name = _helpers.unique_id("test_database") pool = spanner_v1.BurstyPool(labels={"testcase": "database_api"}) - database = shared_instance.database( - database_name, - ddl_statements=_helpers.DDL_STATEMENTS, - pool=pool, - database_dialect=database_dialect, - ) - operation = database.create() - operation.result(database_operation_timeout) # raises on failure / timeout. + if(database_dialect == DatabaseDialect.POSTGRESQL): + database = shared_instance.database( + database_name, + pool=pool, + database_dialect=database_dialect, + ) + operation = database.create() + operation.result(database_operation_timeout) # raises on failure / timeout. + + operation = database.update_ddl(ddl_statements=_helpers.DDL_STATEMENTS) + operation.result(database_operation_timeout) # raises on failure / timeout. + + else: + database = shared_instance.database( + database_name, + ddl_statements=_helpers.DDL_STATEMENTS, + pool=pool, + database_dialect=database_dialect, + ) + operation = database.create() + operation.result(database_operation_timeout) # raises on failure / timeout. yield database diff --git a/tests/system/test_backup_api.py b/tests/system/test_backup_api.py index bfcd635e8d..3a72c1a8ea 100644 --- a/tests/system/test_backup_api.py +++ b/tests/system/test_backup_api.py @@ -14,6 +14,7 @@ import datetime import time +from google.cloud.spanner_admin_database_v1.types.common import DatabaseDialect import pytest @@ -96,14 +97,27 @@ def database_version_time(shared_database): def second_database(shared_instance, database_operation_timeout, database_dialect): database_name = _helpers.unique_id("test_database2") pool = spanner_v1.BurstyPool(labels={"testcase": "database_api"}) - database = shared_instance.database( - database_name, - ddl_statements=_helpers.DDL_STATEMENTS, - pool=pool, - database_dialect=database_dialect, - ) - operation = database.create() - operation.result(database_operation_timeout) # raises on failure / timeout. + if(database_dialect == DatabaseDialect.POSTGRESQL): + database = shared_instance.database( + database_name, + pool=pool, + database_dialect=database_dialect, + ) + operation = database.create() + operation.result(database_operation_timeout) # raises on failure / timeout. + + operation = database.update_ddl(ddl_statements=_helpers.DDL_STATEMENTS) + operation.result(database_operation_timeout) # raises on failure / timeout. + + else: + database = shared_instance.database( + database_name, + ddl_statements=_helpers.DDL_STATEMENTS, + pool=pool, + database_dialect=database_dialect, + ) + operation = database.create() + operation.result(database_operation_timeout) # raises on failure / timeout. yield database diff --git a/tests/system/test_database_api.py b/tests/system/test_database_api.py index 1d21a77498..e9e6c69287 100644 --- a/tests/system/test_database_api.py +++ b/tests/system/test_database_api.py @@ -129,6 +129,7 @@ def test_create_database_pitr_success( def test_create_database_with_default_leader_success( not_emulator, # Default leader setting not supported by the emulator + not_postgres, multiregion_instance, databases_to_delete, ): @@ -270,6 +271,7 @@ def test_update_ddl_w_pitr_success( def test_update_ddl_w_default_leader_success( not_emulator, + not_postgres, multiregion_instance, databases_to_delete, ): diff --git a/tests/system/test_session_api.py b/tests/system/test_session_api.py index f211577abd..6cf096c44a 100644 --- a/tests/system/test_session_api.py +++ b/tests/system/test_session_api.py @@ -200,13 +200,29 @@ def sessions_database(shared_instance, database_operation_timeout, database_dialect): database_name = _helpers.unique_id("test_sessions", separator="_") pool = spanner_v1.BurstyPool(labels={"testcase": "session_api"}) - sessions_database = shared_instance.database( + + if(database_dialect == DatabaseDialect.POSTGRESQL): + sessions_database = shared_instance.database( + database_name, + pool=pool, + database_dialect=database_dialect, + ) + + operation = sessions_database.create() + operation.result(database_operation_timeout) + + operation = sessions_database.update_ddl(ddl_statements=_helpers.DDL_STATEMENTS) + operation.result(database_operation_timeout) + + else: + sessions_database = shared_instance.database( database_name, ddl_statements=_helpers.DDL_STATEMENTS, pool=pool, - ) - operation = sessions_database.create() - operation.result(database_operation_timeout) # raises on failure / timeout. + ) + + operation = sessions_database.create() + operation.result(database_operation_timeout) _helpers.retry_has_all_dll(sessions_database.reload)() # Some tests expect there to be a session present in the pool. @@ -1322,16 +1338,30 @@ def test_read_w_index( # Create an alternate dataase w/ index. extra_ddl = ["CREATE INDEX contacts_by_last_name ON contacts(last_name)"] pool = spanner_v1.BurstyPool(labels={"testcase": "read_w_index"}) - temp_db = shared_instance.database( + + if(database_dialect == DatabaseDialect.POSTGRESQL): + temp_db = shared_instance.database( _helpers.unique_id("test_read", separator="_"), - ddl_statements=_helpers.DDL_STATEMENTS + extra_ddl, pool=pool, database_dialect=database_dialect, ) - operation = temp_db.create() - databases_to_delete.append(temp_db) - operation.result(database_operation_timeout) # raises on failure / timeout. + operation = temp_db.create() + operation.result(database_operation_timeout) + operation = temp_db.update_ddl(ddl_statements=_helpers.DDL_STATEMENTS + extra_ddl,) + operation.result(database_operation_timeout) + + else: + temp_db = shared_instance.database( + _helpers.unique_id("test_read", separator="_"), + ddl_statements=_helpers.DDL_STATEMENTS + extra_ddl, + pool=pool, + database_dialect=database_dialect, + ) + operation = temp_db.create() + operation.result(database_operation_timeout) # raises on failure / timeout. + + databases_to_delete.append(temp_db) committed = _set_up_table(temp_db, row_count) with temp_db.snapshot(read_timestamp=committed) as snapshot: @@ -2040,7 +2070,7 @@ def test_execute_sql_w_date_bindings(sessions_database, not_postgres, database_d def test_execute_sql_w_numeric_bindings( - not_emulator, not_postgres, sessions_database, database_dialect + not_emulator, sessions_database, database_dialect ): if database_dialect == DatabaseDialect.POSTGRESQL: _bind_test_helper( @@ -2060,7 +2090,7 @@ def test_execute_sql_w_numeric_bindings( ) -def test_execute_sql_w_json_bindings(not_emulator, sessions_database, database_dialect): +def test_execute_sql_w_json_bindings(not_emulator, not_postgres, sessions_database, database_dialect): _bind_test_helper( sessions_database, database_dialect, From dbbe8884ebd7d18d2b5a755562a8ed3527340b8b Mon Sep 17 00:00:00 2001 From: Astha Mohta Date: Tue, 5 Jul 2022 23:28:29 +0530 Subject: [PATCH 02/18] changes for jsonb --- google/cloud/spanner_v1/param_types.py | 1 + google/cloud/spanner_v1/types/type.py | 1 + tests/system/test_session_api.py | 11 +++++++++++ tests/unit/test_param_types.py | 17 +++++++++++++++++ 4 files changed, 30 insertions(+) diff --git a/google/cloud/spanner_v1/param_types.py b/google/cloud/spanner_v1/param_types.py index 22c4782b8d..0c03f7ecc6 100644 --- a/google/cloud/spanner_v1/param_types.py +++ b/google/cloud/spanner_v1/param_types.py @@ -31,6 +31,7 @@ NUMERIC = Type(code=TypeCode.NUMERIC) JSON = Type(code=TypeCode.JSON) PG_NUMERIC = Type(code=TypeCode.NUMERIC, type_annotation=TypeAnnotationCode.PG_NUMERIC) +PG_JSONB = Type(code=TypeCode.JSON, type_annotation=TypeAnnotationCode.PG_JSONB) def Array(element_type): diff --git a/google/cloud/spanner_v1/types/type.py b/google/cloud/spanner_v1/types/type.py index cacec433d3..b9eb7376f0 100644 --- a/google/cloud/spanner_v1/types/type.py +++ b/google/cloud/spanner_v1/types/type.py @@ -61,6 +61,7 @@ class TypeAnnotationCode(proto.Enum): TYPE_ANNOTATION_CODE_UNSPECIFIED = 0 # INT32 = 1 #unsupported PG_NUMERIC = 2 + PG_JSONB = 3 class Type(proto.Message): diff --git a/tests/system/test_session_api.py b/tests/system/test_session_api.py index 6cf096c44a..ffda175e01 100644 --- a/tests/system/test_session_api.py +++ b/tests/system/test_session_api.py @@ -89,6 +89,7 @@ LIVE_ALL_TYPES_COLUMNS[:1] + LIVE_ALL_TYPES_COLUMNS[1:7:2] + LIVE_ALL_TYPES_COLUMNS[9:17:2] + + ("jsonb_value",) ) AllTypesRowData = collections.namedtuple("AllTypesRowData", LIVE_ALL_TYPES_COLUMNS) @@ -183,6 +184,7 @@ PostGresAllTypesRowData(pkey=107, timestamp_value=SOME_TIME), PostGresAllTypesRowData(pkey=108, timestamp_value=NANO_TIME), PostGresAllTypesRowData(pkey=109, numeric_value=NUMERIC_1), + PostGresAllTypesRowData(pkey=110, jsonb_value=JSON_1), ) if _helpers.USE_EMULATOR: @@ -2099,6 +2101,15 @@ def test_execute_sql_w_json_bindings(not_emulator, not_postgres, sessions_databa [JSON_1, JSON_2], ) +def test_execute_sql_w_jsonb_bindings(not_emulator, not_google_standard_sql, sessions_database, database_dialect): + _bind_test_helper( + sessions_database, + database_dialect, + spanner_v1.param_types.PG_JSONB, + JSON_1, + [JSON_1, JSON_2], + ) + def test_execute_sql_w_query_param_struct(sessions_database, not_postgres): name = "Phred" diff --git a/tests/unit/test_param_types.py b/tests/unit/test_param_types.py index 0d6a17c613..02f41c1f25 100644 --- a/tests/unit/test_param_types.py +++ b/tests/unit/test_param_types.py @@ -54,3 +54,20 @@ def test_it(self): ) self.assertEqual(found, expected) + + +class Test_JsonbParamType(unittest.TestCase): + def test_it(self): + from google.cloud.spanner_v1 import Type + from google.cloud.spanner_v1 import TypeCode + from google.cloud.spanner_v1 import TypeAnnotationCode + from google.cloud.spanner_v1 import param_types + + expected = Type( + code=TypeCode.JSON, + type_annotation=TypeAnnotationCode(TypeAnnotationCode.PG_JSONB), + ) + + found = param_types.PG_JSONB + + self.assertEqual(found, expected) From ba80e5aab9da643882a2b8320737aa88b7c4c821 Mon Sep 17 00:00:00 2001 From: Astha Mohta Date: Thu, 27 Oct 2022 03:13:35 +0530 Subject: [PATCH 03/18] samples --- samples/samples/pg_snippets.py | 692 ++++++++++++++++------------ samples/samples/pg_snippets_test.py | 73 +-- 2 files changed, 427 insertions(+), 338 deletions(-) diff --git a/samples/samples/pg_snippets.py b/samples/samples/pg_snippets.py index 367690dbd8..f95d28afd9 100644 --- a/samples/samples/pg_snippets.py +++ b/samples/samples/pg_snippets.py @@ -28,6 +28,7 @@ from google.cloud import spanner, spanner_admin_database_v1 from google.cloud.spanner_admin_database_v1.types.common import DatabaseDialect from google.cloud.spanner_v1 import param_types +from google.cloud.spanner_v1.data_types import JsonObject OPERATION_TIMEOUT_SECONDS = 240 @@ -38,19 +39,19 @@ def create_instance(instance_id): spanner_client = spanner.Client() config_name = "{}/instanceConfigs/regional-us-central1".format( - spanner_client.project_name + spanner_client.project_name ) instance = spanner_client.instance( - instance_id, - configuration_name=config_name, - display_name="This is a display name.", - node_count=1, - labels={ - "cloud_spanner_samples": "true", - "sample_name": "snippets-create_instance-explicit", - "created": str(int(time.time())), - }, + instance_id, + configuration_name=config_name, + display_name="This is a display name.", + node_count=1, + labels={ + "cloud_spanner_samples": "true", + "sample_name": "snippets-create_instance-explicit", + "created": str(int(time.time())), + }, ) operation = instance.create() @@ -71,8 +72,8 @@ def create_database(instance_id, database_id): instance = spanner_client.instance(instance_id) database = instance.database( - database_id, - database_dialect=DatabaseDialect.POSTGRESQL, + database_id, + database_dialect=DatabaseDialect.POSTGRESQL, ) operation = database.create() @@ -87,22 +88,22 @@ def create_database(instance_id, database_id): def create_table_using_ddl(database_name): spanner_client = spanner.Client() request = spanner_admin_database_v1.UpdateDatabaseDdlRequest( - database=database_name, - statements=[ - """CREATE TABLE Singers ( + database=database_name, + statements=[ + """CREATE TABLE Singers ( SingerId bigint NOT NULL, FirstName character varying(1024), LastName character varying(1024), SingerInfo bytea, PRIMARY KEY (SingerId) )""", - """CREATE TABLE Albums ( + """CREATE TABLE Albums ( SingerId bigint NOT NULL, AlbumId bigint NOT NULL, AlbumTitle character varying(1024), PRIMARY KEY (SingerId, AlbumId) ) INTERLEAVE IN PARENT Singers ON DELETE CASCADE""", - ], + ], ) operation = spanner_client.database_admin_api.update_database_ddl(request) operation.result(OPERATION_TIMEOUT_SECONDS) @@ -124,27 +125,27 @@ def insert_data(instance_id, database_id): with database.batch() as batch: batch.insert( - table="Singers", - columns=("SingerId", "FirstName", "LastName"), - values=[ - (1, "Marc", "Richards"), - (2, "Catalina", "Smith"), - (3, "Alice", "Trentor"), - (4, "Lea", "Martin"), - (5, "David", "Lomond"), - ], + table="Singers", + columns=("SingerId", "FirstName", "LastName"), + values=[ + (1, "Marc", "Richards"), + (2, "Catalina", "Smith"), + (3, "Alice", "Trentor"), + (4, "Lea", "Martin"), + (5, "David", "Lomond"), + ], ) batch.insert( - table="Albums", - columns=("SingerId", "AlbumId", "AlbumTitle"), - values=[ - (1, 1, "Total Junk"), - (1, 2, "Go, Go, Go"), - (2, 1, "Green"), - (2, 2, "Forever Hold Your Peace"), - (2, 3, "Terrified"), - ], + table="Albums", + columns=("SingerId", "AlbumId", "AlbumTitle"), + values=[ + (1, 1, "Total Junk"), + (1, 2, "Go, Go, Go"), + (2, 1, "Green"), + (2, 2, "Forever Hold Your Peace"), + (2, 3, "Terrified"), + ], ) print("Inserted data.") @@ -195,7 +196,7 @@ def query_data(instance_id, database_id): with database.snapshot() as snapshot: results = snapshot.execute_sql( - "SELECT SingerId, AlbumId, AlbumTitle FROM Albums" + "SELECT SingerId, AlbumId, AlbumTitle FROM Albums" ) for row in results: @@ -215,8 +216,7 @@ def read_data(instance_id, database_id): with database.snapshot() as snapshot: keyset = spanner.KeySet(all_=True) results = snapshot.read( - table="Albums", columns=("SingerId", "AlbumId", "AlbumTitle"), - keyset=keyset + table="Albums", columns=("SingerId", "AlbumId", "AlbumTitle"), keyset=keyset ) for row in results: @@ -234,7 +234,7 @@ def add_column(instance_id, database_id): database = instance.database(database_id) operation = database.update_ddl( - ["ALTER TABLE Albums ADD COLUMN MarketingBudget BIGINT"] + ["ALTER TABLE Albums ADD COLUMN MarketingBudget BIGINT"] ) print("Waiting for operation to complete...") @@ -263,9 +263,9 @@ def update_data(instance_id, database_id): with database.batch() as batch: batch.update( - table="Albums", - columns=("SingerId", "AlbumId", "MarketingBudget"), - values=[(1, 1, 100000), (2, 2, 500000)], + table="Albums", + columns=("SingerId", "AlbumId", "MarketingBudget"), + values=[(1, 1, 100000), (2, 2, 500000)], ) print("Updated data.") @@ -294,10 +294,10 @@ def update_albums(transaction): # Read the second album budget. second_album_keyset = spanner.KeySet(keys=[(2, 2)]) second_album_result = transaction.read( - table="Albums", - columns=("MarketingBudget",), - keyset=second_album_keyset, - limit=1, + table="Albums", + columns=("MarketingBudget",), + keyset=second_album_keyset, + limit=1, ) second_album_row = list(second_album_result)[0] second_album_budget = second_album_row[0] @@ -307,16 +307,15 @@ def update_albums(transaction): if second_album_budget < transfer_amount: # Raising an exception will automatically roll back the # transaction. - raise ValueError( - "The second album doesn't have enough funds to transfer") + raise ValueError("The second album doesn't have enough funds to transfer") # Read the first album's budget. first_album_keyset = spanner.KeySet(keys=[(1, 1)]) first_album_result = transaction.read( - table="Albums", - columns=("MarketingBudget",), - keyset=first_album_keyset, - limit=1, + table="Albums", + columns=("MarketingBudget",), + keyset=first_album_keyset, + limit=1, ) first_album_row = list(first_album_result)[0] first_album_budget = first_album_row[0] @@ -325,15 +324,15 @@ def update_albums(transaction): second_album_budget -= transfer_amount first_album_budget += transfer_amount print( - "Setting first album's budget to {} and the second album's " - "budget to {}.".format(first_album_budget, second_album_budget) + "Setting first album's budget to {} and the second album's " + "budget to {}.".format(first_album_budget, second_album_budget) ) # Update the rows. transaction.update( - table="Albums", - columns=("SingerId", "AlbumId", "MarketingBudget"), - values=[(1, 1, first_album_budget), (2, 2, second_album_budget)], + table="Albums", + columns=("SingerId", "AlbumId", "MarketingBudget"), + values=[(1, 1, first_album_budget), (2, 2, second_album_budget)], ) database.run_in_transaction(update_albums) @@ -360,7 +359,7 @@ def query_data_with_new_column(instance_id, database_id): with database.snapshot() as snapshot: results = snapshot.execute_sql( - "SELECT SingerId, AlbumId, MarketingBudget FROM Albums" + "SELECT SingerId, AlbumId, MarketingBudget FROM Albums" ) for row in results: @@ -378,7 +377,7 @@ def add_index(instance_id, database_id): database = instance.database(database_id) operation = database.update_ddl( - ["CREATE INDEX AlbumsByAlbumTitle ON Albums(AlbumTitle)"] + ["CREATE INDEX AlbumsByAlbumTitle ON Albums(AlbumTitle)"] ) print("Waiting for operation to complete...") @@ -407,10 +406,10 @@ def read_data_with_index(instance_id, database_id): with database.snapshot() as snapshot: keyset = spanner.KeySet(all_=True) results = snapshot.read( - table="Albums", - columns=("AlbumId", "AlbumTitle"), - keyset=keyset, - index="AlbumsByAlbumTitle", + table="Albums", + columns=("AlbumId", "AlbumTitle"), + keyset=keyset, + index="AlbumsByAlbumTitle", ) for row in results: @@ -428,10 +427,10 @@ def add_storing_index(instance_id, database_id): database = instance.database(database_id) operation = database.update_ddl( - [ - "CREATE INDEX AlbumsByAlbumTitle2 ON Albums(AlbumTitle)" - "INCLUDE (MarketingBudget)" - ] + [ + "CREATE INDEX AlbumsByAlbumTitle2 ON Albums(AlbumTitle)" + "INCLUDE (MarketingBudget)" + ] ) print("Waiting for operation to complete...") @@ -463,15 +462,14 @@ def read_data_with_storing_index(instance_id, database_id): with database.snapshot() as snapshot: keyset = spanner.KeySet(all_=True) results = snapshot.read( - table="Albums", - columns=("AlbumId", "AlbumTitle", "MarketingBudget"), - keyset=keyset, - index="AlbumsByAlbumTitle2", + table="Albums", + columns=("AlbumId", "AlbumTitle", "MarketingBudget"), + keyset=keyset, + index="AlbumsByAlbumTitle2", ) for row in results: - print("AlbumId: {}, AlbumTitle: {}, " "MarketingBudget: {}".format( - *row)) + print("AlbumId: {}, AlbumTitle: {}, " "MarketingBudget: {}".format(*row)) # [END spanner_postgresql_read_data_with_storing_index] @@ -491,7 +489,7 @@ def read_only_transaction(instance_id, database_id): with database.snapshot(multi_use=True) as snapshot: # Read using SQL. results = snapshot.execute_sql( - "SELECT SingerId, AlbumId, AlbumTitle FROM Albums" + "SELECT SingerId, AlbumId, AlbumTitle FROM Albums" ) print("Results from first read:") @@ -503,8 +501,7 @@ def read_only_transaction(instance_id, database_id): # return the same data. keyset = spanner.KeySet(all_=True) results = snapshot.read( - table="Albums", columns=("SingerId", "AlbumId", "AlbumTitle"), - keyset=keyset + table="Albums", columns=("SingerId", "AlbumId", "AlbumTitle"), keyset=keyset ) print("Results from second read:") @@ -526,11 +523,11 @@ def insert_with_dml(instance_id, database_id): def insert_singers(transaction): row_ct = transaction.execute_update( - "INSERT INTO Singers (SingerId, FirstName, LastName) VALUES " - "(12, 'Melissa', 'Garcia'), " - "(13, 'Russell', 'Morales'), " - "(14, 'Jacqueline', 'Long'), " - "(15, 'Dylan', 'Shaw')" + "INSERT INTO Singers (SingerId, FirstName, LastName) VALUES " + "(12, 'Melissa', 'Garcia'), " + "(13, 'Russell', 'Morales'), " + "(14, 'Jacqueline', 'Long'), " + "(15, 'Dylan', 'Shaw')" ) print("{} record(s) inserted.".format(row_ct)) @@ -549,9 +546,9 @@ def query_data_with_parameter(instance_id, database_id): with database.snapshot() as snapshot: results = snapshot.execute_sql( - "SELECT SingerId, FirstName, LastName FROM Singers " "WHERE LastName = $1", - params={"p1": "Garcia"}, - param_types={"p1": spanner.param_types.STRING}, + "SELECT SingerId, FirstName, LastName FROM Singers " "WHERE LastName = $1", + params={"p1": "Garcia"}, + param_types={"p1": spanner.param_types.STRING}, ) for row in results: @@ -573,7 +570,7 @@ def transfer_budget(transaction): # Transfer marketing budget from one album to another. Performed in a # single transaction to ensure that the transfer is atomic. second_album_result = transaction.execute_sql( - "SELECT MarketingBudget from Albums " "WHERE SingerId = 2 and AlbumId = 2" + "SELECT MarketingBudget from Albums " "WHERE SingerId = 2 and AlbumId = 2" ) second_album_row = list(second_album_result)[0] second_album_budget = second_album_row[0] @@ -585,8 +582,8 @@ def transfer_budget(transaction): # will be rerun by the client library if second_album_budget >= transfer_amount: first_album_result = transaction.execute_sql( - "SELECT MarketingBudget from Albums " - "WHERE SingerId = 1 and AlbumId = 1" + "SELECT MarketingBudget from Albums " + "WHERE SingerId = 1 and AlbumId = 1" ) first_album_row = list(first_album_result)[0] first_album_budget = first_album_row[0] @@ -596,26 +593,26 @@ def transfer_budget(transaction): # Update first album transaction.execute_update( - "UPDATE Albums " - "SET MarketingBudget = $1 " - "WHERE SingerId = 1 and AlbumId = 1", - params={"p1": first_album_budget}, - param_types={"p1": spanner.param_types.INT64}, + "UPDATE Albums " + "SET MarketingBudget = $1 " + "WHERE SingerId = 1 and AlbumId = 1", + params={"p1": first_album_budget}, + param_types={"p1": spanner.param_types.INT64}, ) # Update second album transaction.execute_update( - "UPDATE Albums " - "SET MarketingBudget = $1 " - "WHERE SingerId = 2 and AlbumId = 2", - params={"p1": second_album_budget}, - param_types={"p1": spanner.param_types.INT64}, + "UPDATE Albums " + "SET MarketingBudget = $1 " + "WHERE SingerId = 2 and AlbumId = 2", + params={"p1": second_album_budget}, + param_types={"p1": spanner.param_types.INT64}, ) print( - "Transferred {} from Album2's budget to Album1's".format( - transfer_amount - ) + "Transferred {} from Album2's budget to Album1's".format( + transfer_amount + ) ) database.run_in_transaction(transfer_budget) @@ -636,9 +633,9 @@ def read_stale_data(instance_id, database_id): with database.snapshot(exact_staleness=staleness) as snapshot: keyset = spanner.KeySet(all_=True) results = snapshot.read( - table="Albums", - columns=("SingerId", "AlbumId", "MarketingBudget"), - keyset=keyset, + table="Albums", + columns=("SingerId", "AlbumId", "MarketingBudget"), + keyset=keyset, ) for row in results: @@ -671,13 +668,12 @@ def update_data_with_timestamp(instance_id, database_id): with database.batch() as batch: batch.update( - table="Albums", - columns=( - "SingerId", "AlbumId", "MarketingBudget", "LastUpdateTime"), - values=[ - (1, 1, 1000000, spanner.COMMIT_TIMESTAMP), - (2, 2, 750000, spanner.COMMIT_TIMESTAMP), - ], + table="Albums", + columns=("SingerId", "AlbumId", "MarketingBudget", "LastUpdateTime"), + values=[ + (1, 1, 1000000, spanner.COMMIT_TIMESTAMP), + (2, 2, 750000, spanner.COMMIT_TIMESTAMP), + ], ) print("Updated data.") @@ -695,17 +691,16 @@ def add_timestamp_column(instance_id, database_id): database = instance.database(database_id) operation = database.update_ddl( - [ - "ALTER TABLE Albums ADD COLUMN LastUpdateTime SPANNER.COMMIT_TIMESTAMP"] + ["ALTER TABLE Albums ADD COLUMN LastUpdateTime SPANNER.COMMIT_TIMESTAMP"] ) print("Waiting for operation to complete...") operation.result(OPERATION_TIMEOUT_SECONDS) print( - 'Altered table "Albums" on database {} on instance {}.'.format( - database_id, instance_id - ) + 'Altered table "Albums" on database {} on instance {}.'.format( + database_id, instance_id + ) ) @@ -732,8 +727,8 @@ def query_data_with_timestamp(instance_id, database_id): with database.snapshot() as snapshot: results = snapshot.execute_sql( - "SELECT SingerId, AlbumId, MarketingBudget FROM Albums " - "ORDER BY LastUpdateTime DESC" + "SELECT SingerId, AlbumId, MarketingBudget FROM Albums " + "ORDER BY LastUpdateTime DESC" ) for row in results: @@ -752,9 +747,9 @@ def create_table_with_timestamp(instance_id, database_id): database = instance.database(database_id) request = spanner_admin_database_v1.UpdateDatabaseDdlRequest( - database=database.name, - statements=[ - """CREATE TABLE Performances ( + database=database.name, + statements=[ + """CREATE TABLE Performances ( SingerId BIGINT NOT NULL, VenueId BIGINT NOT NULL, EventDate Date, @@ -762,7 +757,7 @@ def create_table_with_timestamp(instance_id, database_id): LastUpdateTime SPANNER.COMMIT_TIMESTAMP NOT NULL, PRIMARY KEY (SingerId, VenueId, EventDate)) INTERLEAVE IN PARENT Singers ON DELETE CASCADE""" - ], + ], ) operation = spanner_client.database_admin_api.update_database_ddl(request) @@ -770,9 +765,9 @@ def create_table_with_timestamp(instance_id, database_id): operation.result(OPERATION_TIMEOUT_SECONDS) print( - "Created Performances table on database {} on instance {}".format( - database_id, instance_id - ) + "Created Performances table on database {} on instance {}".format( + database_id, instance_id + ) ) @@ -790,14 +785,13 @@ def insert_data_with_timestamp(instance_id, database_id): with database.batch() as batch: batch.insert( - table="Performances", - columns=( - "SingerId", "VenueId", "EventDate", "Revenue", "LastUpdateTime"), - values=[ - (1, 4, "2017-10-05", 11000, spanner.COMMIT_TIMESTAMP), - (1, 19, "2017-11-02", 15000, spanner.COMMIT_TIMESTAMP), - (2, 42, "2017-12-23", 7000, spanner.COMMIT_TIMESTAMP), - ], + table="Performances", + columns=("SingerId", "VenueId", "EventDate", "Revenue", "LastUpdateTime"), + values=[ + (1, 4, "2017-10-05", 11000, spanner.COMMIT_TIMESTAMP), + (1, 19, "2017-11-02", 15000, spanner.COMMIT_TIMESTAMP), + (2, 42, "2017-12-23", 7000, spanner.COMMIT_TIMESTAMP), + ], ) print("Inserted data.") @@ -818,8 +812,8 @@ def insert_data_with_dml(instance_id, database_id): def insert_singers(transaction): row_ct = transaction.execute_update( - "INSERT INTO Singers (SingerId, FirstName, LastName) " - " VALUES (10, 'Virginia', 'Watson')" + "INSERT INTO Singers (SingerId, FirstName, LastName) " + " VALUES (10, 'Virginia', 'Watson')" ) print("{} record(s) inserted.".format(row_ct)) @@ -840,9 +834,9 @@ def update_data_with_dml(instance_id, database_id): def update_albums(transaction): row_ct = transaction.execute_update( - "UPDATE Albums " - "SET MarketingBudget = MarketingBudget * 2 " - "WHERE SingerId = 1 and AlbumId = 1" + "UPDATE Albums " + "SET MarketingBudget = MarketingBudget * 2 " + "WHERE SingerId = 1 and AlbumId = 1" ) print("{} record(s) updated.".format(row_ct)) @@ -863,7 +857,7 @@ def delete_data_with_dml(instance_id, database_id): def delete_singers(transaction): row_ct = transaction.execute_update( - "DELETE FROM Singers WHERE FirstName = 'Alice'" + "DELETE FROM Singers WHERE FirstName = 'Alice'" ) print("{} record(s) deleted.".format(row_ct)) @@ -885,14 +879,14 @@ def dml_write_read_transaction(instance_id, database_id): def write_then_read(transaction): # Insert record. row_ct = transaction.execute_update( - "INSERT INTO Singers (SingerId, FirstName, LastName) " - " VALUES (11, 'Timothy', 'Campbell')" + "INSERT INTO Singers (SingerId, FirstName, LastName) " + " VALUES (11, 'Timothy', 'Campbell')" ) print("{} record(s) inserted.".format(row_ct)) # Read newly inserted record. results = transaction.execute_sql( - "SELECT FirstName, LastName FROM Singers WHERE SingerId = 11" + "SELECT FirstName, LastName FROM Singers WHERE SingerId = 11" ) for result in results: print("FirstName: {}, LastName: {}".format(*result)) @@ -912,7 +906,7 @@ def update_data_with_partitioned_dml(instance_id, database_id): database = instance.database(database_id) row_ct = database.execute_partitioned_dml( - "UPDATE Albums SET MarketingBudget = 100000 WHERE SingerId > 1" + "UPDATE Albums SET MarketingBudget = 100000 WHERE SingerId > 1" ) print("{} records updated.".format(row_ct)) @@ -928,8 +922,7 @@ def delete_data_with_partitioned_dml(instance_id, database_id): instance = spanner_client.instance(instance_id) database = instance.database(database_id) - row_ct = database.execute_partitioned_dml( - "DELETE FROM Singers WHERE SingerId > 10") + row_ct = database.execute_partitioned_dml("DELETE FROM Singers WHERE SingerId > 10") print("{} record(s) deleted.".format(row_ct)) # [END spanner_postgresql_dml_partitioned_delete] @@ -948,20 +941,19 @@ def update_with_batch_dml(instance_id, database_id): database = instance.database(database_id) insert_statement = ( - "INSERT INTO Albums " - "(SingerId, AlbumId, AlbumTitle, MarketingBudget) " - "VALUES (1, 3, 'Test Album Title', 10000)" + "INSERT INTO Albums " + "(SingerId, AlbumId, AlbumTitle, MarketingBudget) " + "VALUES (1, 3, 'Test Album Title', 10000)" ) update_statement = ( - "UPDATE Albums " - "SET MarketingBudget = MarketingBudget * 2 " - "WHERE SingerId = 1 and AlbumId = 3" + "UPDATE Albums " + "SET MarketingBudget = MarketingBudget * 2 " + "WHERE SingerId = 1 and AlbumId = 3" ) def update_albums(transaction): - status, row_cts = transaction.batch_update( - [insert_statement, update_statement]) + status, row_cts = transaction.batch_update([insert_statement, update_statement]) if status.code != OK: # Do handling here. @@ -969,8 +961,7 @@ def update_albums(transaction): # `commit` is called by `run_in_transaction`. return - print( - "Executed {} SQL statements using Batch DML.".format(len(row_cts))) + print("Executed {} SQL statements using Batch DML.".format(len(row_cts))) database.run_in_transaction(update_albums) # [END spanner_postgresql_dml_batch_update] @@ -986,9 +977,9 @@ def create_table_with_datatypes(instance_id, database_id): database = instance.database(database_id) request = spanner_admin_database_v1.UpdateDatabaseDdlRequest( - database=database.name, - statements=[ - """CREATE TABLE Venues ( + database=database.name, + statements=[ + """CREATE TABLE Venues ( VenueId BIGINT NOT NULL, VenueName character varying(100), VenueInfo BYTEA, @@ -998,7 +989,7 @@ def create_table_with_datatypes(instance_id, database_id): Revenue NUMERIC, LastUpdateTime SPANNER.COMMIT_TIMESTAMP NOT NULL, PRIMARY KEY (VenueId))""" - ], + ], ) operation = spanner_client.database_admin_api.update_database_ddl(request) @@ -1006,9 +997,9 @@ def create_table_with_datatypes(instance_id, database_id): operation.result(OPERATION_TIMEOUT_SECONDS) print( - "Created Venues table on database {} on instance {}".format( - database_id, instance_id - ) + "Created Venues table on database {} on instance {}".format( + database_id, instance_id + ) ) # [END spanner_postgresql_create_table_with_datatypes] @@ -1027,49 +1018,49 @@ def insert_datatypes_data(instance_id, database_id): exampleBytes3 = base64.b64encode("Hello World 3".encode()) with database.batch() as batch: batch.insert( - table="Venues", - columns=( - "VenueId", - "VenueName", - "VenueInfo", - "Capacity", - "OutdoorVenue", - "PopularityScore", - "Revenue", - "LastUpdateTime", - ), - values=[ - ( - 4, - "Venue 4", - exampleBytes1, - 1800, - False, - 0.85543, - decimal.Decimal("215100.10"), - spanner.COMMIT_TIMESTAMP, + table="Venues", + columns=( + "VenueId", + "VenueName", + "VenueInfo", + "Capacity", + "OutdoorVenue", + "PopularityScore", + "Revenue", + "LastUpdateTime", ), - ( - 19, - "Venue 19", - exampleBytes2, - 6300, - True, - 0.98716, - decimal.Decimal("1200100.00"), - spanner.COMMIT_TIMESTAMP, - ), - ( - 42, - "Venue 42", - exampleBytes3, - 3000, - False, - 0.72598, - decimal.Decimal("390650.99"), - spanner.COMMIT_TIMESTAMP, - ), - ], + values=[ + ( + 4, + "Venue 4", + exampleBytes1, + 1800, + False, + 0.85543, + decimal.Decimal("215100.10"), + spanner.COMMIT_TIMESTAMP, + ), + ( + 19, + "Venue 19", + exampleBytes2, + 6300, + True, + 0.98716, + decimal.Decimal("1200100.00"), + spanner.COMMIT_TIMESTAMP, + ), + ( + 42, + "Venue 42", + exampleBytes3, + 3000, + False, + 0.72598, + decimal.Decimal("390650.99"), + spanner.COMMIT_TIMESTAMP, + ), + ], ) print("Inserted data.") @@ -1091,10 +1082,10 @@ def query_data_with_bool(instance_id, database_id): with database.snapshot() as snapshot: results = snapshot.execute_sql( - "SELECT VenueId, VenueName, OutdoorVenue FROM Venues " - "WHERE OutdoorVenue = $1", - params=param, - param_types=param_type, + "SELECT VenueId, VenueName, OutdoorVenue FROM Venues " + "WHERE OutdoorVenue = $1", + params=param, + param_types=param_type, ) for row in results: @@ -1117,9 +1108,9 @@ def query_data_with_bytes(instance_id, database_id): with database.snapshot() as snapshot: results = snapshot.execute_sql( - "SELECT VenueId, VenueName FROM Venues " "WHERE VenueInfo = $1", - params=param, - param_types=param_type, + "SELECT VenueId, VenueName FROM Venues " "WHERE VenueInfo = $1", + params=param, + param_types=param_type, ) for row in results: @@ -1142,15 +1133,14 @@ def query_data_with_float(instance_id, database_id): with database.snapshot() as snapshot: results = snapshot.execute_sql( - "SELECT VenueId, VenueName, PopularityScore FROM Venues " - "WHERE PopularityScore > $1", - params=param, - param_types=param_type, + "SELECT VenueId, VenueName, PopularityScore FROM Venues " + "WHERE PopularityScore > $1", + params=param, + param_types=param_type, ) for row in results: - print( - "VenueId: {}, VenueName: {}, PopularityScore: {}".format(*row)) + print("VenueId: {}, VenueName: {}, PopularityScore: {}".format(*row)) # [END spanner_postgresql_query_with_float_parameter] @@ -1169,9 +1159,9 @@ def query_data_with_int(instance_id, database_id): with database.snapshot() as snapshot: results = snapshot.execute_sql( - "SELECT VenueId, VenueName, Capacity FROM Venues " "WHERE Capacity >= $1", - params=param, - param_types=param_type, + "SELECT VenueId, VenueName, Capacity FROM Venues " "WHERE Capacity >= $1", + params=param, + param_types=param_type, ) for row in results: @@ -1194,9 +1184,9 @@ def query_data_with_string(instance_id, database_id): with database.snapshot() as snapshot: results = snapshot.execute_sql( - "SELECT VenueId, VenueName FROM Venues " "WHERE VenueName = $1", - params=param, - param_types=param_type, + "SELECT VenueId, VenueName FROM Venues " "WHERE VenueName = $1", + params=param, + param_types=param_type, ) for row in results: @@ -1217,18 +1207,19 @@ def query_data_with_timestamp_parameter(instance_id, database_id): # [END spanner_postgresql_query_with_timestamp_parameter] # Avoid time drift on the local machine. # https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4197. - example_timestamp = (datetime.datetime.utcnow() + datetime.timedelta(days=1) - ).isoformat() + "Z" + example_timestamp = ( + datetime.datetime.utcnow() + datetime.timedelta(days=1) + ).isoformat() + "Z" # [START spanner_postgresql_query_with_timestamp_parameter] param = {"p1": example_timestamp} param_type = {"p1": param_types.TIMESTAMP} with database.snapshot() as snapshot: results = snapshot.execute_sql( - "SELECT VenueId, VenueName, LastUpdateTime FROM Venues " - "WHERE LastUpdateTime < $1", - params=param, - param_types=param_type, + "SELECT VenueId, VenueName, LastUpdateTime FROM Venues " + "WHERE LastUpdateTime < $1", + params=param, + param_types=param_type, ) for row in results: @@ -1255,13 +1246,13 @@ def update_data_with_numeric(instance_id, database_id): with database.batch() as batch: batch.update( - table="Venues", - columns=("VenueId", "Revenue"), - values=[ - (4, decimal.Decimal("35000")), - (19, decimal.Decimal("104500")), - (42, decimal.Decimal("99999999999999999999999999999.99")), - ], + table="Venues", + columns=("VenueId", "Revenue"), + values=[ + (4, decimal.Decimal("35000")), + (19, decimal.Decimal("104500")), + (42, decimal.Decimal("99999999999999999999999999999.99")), + ], ) print("Updated data.") @@ -1285,9 +1276,9 @@ def query_data_with_numeric_parameter(instance_id, database_id): with database.snapshot() as snapshot: results = snapshot.execute_sql( - "SELECT VenueId, Revenue FROM Venues WHERE Revenue < $1", - params=param, - param_types=param_type, + "SELECT VenueId, Revenue FROM Venues WHERE Revenue < $1", + params=param, + param_types=param_type, ) for row in results: @@ -1301,17 +1292,17 @@ def create_client_with_query_options(instance_id, database_id): # instance_id = "your-spanner-instance" # database_id = "your-spanner-db-id" spanner_client = spanner.Client( - query_options={ - "optimizer_version": "1", - "optimizer_statistics_package": "latest", - } + query_options={ + "optimizer_version": "1", + "optimizer_statistics_package": "latest", + } ) instance = spanner_client.instance(instance_id) database = instance.database(database_id) with database.snapshot() as snapshot: results = snapshot.execute_sql( - "SELECT VenueId, VenueName, LastUpdateTime FROM Venues" + "SELECT VenueId, VenueName, LastUpdateTime FROM Venues" ) for row in results: @@ -1330,11 +1321,11 @@ def query_data_with_query_options(instance_id, database_id): with database.snapshot() as snapshot: results = snapshot.execute_sql( - "SELECT VenueId, VenueName, LastUpdateTime FROM Venues", - query_options={ - "optimizer_version": "1", - "optimizer_statistics_package": "latest", - }, + "SELECT VenueId, VenueName, LastUpdateTime FROM Venues", + query_options={ + "optimizer_version": "1", + "optimizer_statistics_package": "latest", + }, ) for row in results: @@ -1342,15 +1333,120 @@ def query_data_with_query_options(instance_id, database_id): # [END spanner_postgresql_query_with_query_options] +# [START spanner_postgresql_jsonb_add_column] +def add_jsonb_column(instance_id, database_id): + """Adds a new JSONB column to the Venues table in the example database.""" + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + + database = instance.database(database_id) + + operation = database.update_ddl( + ["ALTER TABLE Venues ADD COLUMN VenueDetails JSONB"] + ) + + print("Waiting for operation to complete...") + operation.result(OPERATION_TIMEOUT_SECONDS) + + print( + 'Altered table "Venues" on database {} on instance {}.'.format( + database_id, instance_id + ) + ) + + +# [END spanner_postgresql_jsonb_add_column] + + +# [START spanner_postgresql_jsonb_update_data] +def update_data_with_jsonb(instance_id, database_id): + """Updates Venues tables in the database with the JSONB + column. + + This updates the `VenueDetails` column which must be created before + running this sample. You can add the column by running the + `add_jsonb_column` sample or by running this DDL statement + against your database: + + ALTER TABLE Venues ADD COLUMN VenueDetails JSONB + """ + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + + database = instance.database(database_id) + """ + PG JSONB takes the last value in the case of duplicate keys. + PG JSONB sorts first by key length and then lexicographically with + equivalent key length. + """ + with database.batch() as batch: + batch.update( + table="Venues", + columns=("VenueId", "VenueDetails"), + values=[ + ( + 4, + JsonObject( + [ + JsonObject({"name": None, "open": True}), + JsonObject( + {"name": "room 2", "open": False, "name": "room 3"} + ), + ] + ), + ), + (19, JsonObject(rating=9, open=True)), + ( + 42, + JsonObject( + { + "name": None, + "open": {"Monday": True, "Tuesday": False}, + "tags": ["large", "airy"], + } + ), + ), + ], + ) + + print("Updated data.") + + +# [END spanner_postgresql_jsonb_update_data] + + +def query_data_with_jsonb_parameter(instance_id, database_id): + """Queries sample data using SQL with a JSON parameter.""" + # [START spanner_postgresql_jsonb_query_parameter] + # instance_id = "your-spanner-instance" + # database_id = "your-spanner-db-id" + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + param = {"p1": 2} + param_type = {"p1": param_types.INT64} + + with database.snapshot() as snapshot: + results = snapshot.execute_sql( + "SELECT venueid, venuedetails FROM Venues" + + " WHERE CAST(venuedetails ->> 'rating' AS INTEGER) > $1", + params=param, + param_types=param_type, + ) + + for row in results: + print("VenueId: {}, VenueDetails: {}".format(*row)) + # [END spanner_postgresql_jsonb_query_parameter] + + if __name__ == "__main__": # noqa: C901 parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter + description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter ) parser.add_argument("instance_id", help="Your Cloud Spanner instance ID.") parser.add_argument( - "--database-id", help="Your Cloud Spanner database ID.", - default="example_db" + "--database-id", help="Your Cloud Spanner database ID.", default="example_db" ) subparsers = parser.add_subparsers(dest="command") @@ -1364,92 +1460,78 @@ def query_data_with_query_options(instance_id, database_id): subparsers.add_parser("add_column", help=add_column.__doc__) subparsers.add_parser("update_data", help=update_data.__doc__) subparsers.add_parser( - "query_data_with_new_column", help=query_data_with_new_column.__doc__ + "query_data_with_new_column", help=query_data_with_new_column.__doc__ ) - subparsers.add_parser("read_write_transaction", - help=read_write_transaction.__doc__) - subparsers.add_parser("read_only_transaction", - help=read_only_transaction.__doc__) + subparsers.add_parser("read_write_transaction", help=read_write_transaction.__doc__) + subparsers.add_parser("read_only_transaction", help=read_only_transaction.__doc__) subparsers.add_parser("add_index", help=add_index.__doc__) - subparsers.add_parser("read_data_with_index", - help=read_data_with_index.__doc__) + subparsers.add_parser("read_data_with_index", help=read_data_with_index.__doc__) subparsers.add_parser("add_storing_index", help=add_storing_index.__doc__) - subparsers.add_parser("read_data_with_storing_index", - help=read_data_with_storing_index.__doc__) subparsers.add_parser( - "create_table_with_timestamp", help=create_table_with_timestamp.__doc__ + "read_data_with_storing_index", help=read_data_with_storing_index.__doc__ + ) + subparsers.add_parser( + "create_table_with_timestamp", help=create_table_with_timestamp.__doc__ ) subparsers.add_parser( - "insert_data_with_timestamp", help=insert_data_with_timestamp.__doc__ + "insert_data_with_timestamp", help=insert_data_with_timestamp.__doc__ ) - subparsers.add_parser("add_timestamp_column", - help=add_timestamp_column.__doc__) + subparsers.add_parser("add_timestamp_column", help=add_timestamp_column.__doc__) subparsers.add_parser( - "update_data_with_timestamp", help=update_data_with_timestamp.__doc__ + "update_data_with_timestamp", help=update_data_with_timestamp.__doc__ ) subparsers.add_parser( - "query_data_with_timestamp", help=query_data_with_timestamp.__doc__ + "query_data_with_timestamp", help=query_data_with_timestamp.__doc__ ) - subparsers.add_parser("insert_data_with_dml", - help=insert_data_with_dml.__doc__) - subparsers.add_parser("update_data_with_dml", - help=update_data_with_dml.__doc__) - subparsers.add_parser("delete_data_with_dml", - help=delete_data_with_dml.__doc__) + subparsers.add_parser("insert_data_with_dml", help=insert_data_with_dml.__doc__) + subparsers.add_parser("update_data_with_dml", help=update_data_with_dml.__doc__) + subparsers.add_parser("delete_data_with_dml", help=delete_data_with_dml.__doc__) subparsers.add_parser( - "dml_write_read_transaction", help=dml_write_read_transaction.__doc__ + "dml_write_read_transaction", help=dml_write_read_transaction.__doc__ ) subparsers.add_parser("insert_with_dml", help=insert_with_dml.__doc__) subparsers.add_parser( - "query_data_with_parameter", help=query_data_with_parameter.__doc__ + "query_data_with_parameter", help=query_data_with_parameter.__doc__ ) subparsers.add_parser( - "write_with_dml_transaction", help=write_with_dml_transaction.__doc__ + "write_with_dml_transaction", help=write_with_dml_transaction.__doc__ ) subparsers.add_parser( - "update_data_with_partitioned_dml", - help=update_data_with_partitioned_dml.__doc__, + "update_data_with_partitioned_dml", + help=update_data_with_partitioned_dml.__doc__, ) subparsers.add_parser( - "delete_data_with_partitioned_dml", - help=delete_data_with_partitioned_dml.__doc__, + "delete_data_with_partitioned_dml", + help=delete_data_with_partitioned_dml.__doc__, ) - subparsers.add_parser("update_with_batch_dml", - help=update_with_batch_dml.__doc__) + subparsers.add_parser("update_with_batch_dml", help=update_with_batch_dml.__doc__) subparsers.add_parser( - "create_table_with_datatypes", help=create_table_with_datatypes.__doc__ + "create_table_with_datatypes", help=create_table_with_datatypes.__doc__ ) - subparsers.add_parser("insert_datatypes_data", - help=insert_datatypes_data.__doc__) - subparsers.add_parser("query_data_with_bool", - help=query_data_with_bool.__doc__) - subparsers.add_parser("query_data_with_bytes", - help=query_data_with_bytes.__doc__) - subparsers.add_parser("query_data_with_float", - help=query_data_with_float.__doc__) - subparsers.add_parser("query_data_with_int", - help=query_data_with_int.__doc__) - subparsers.add_parser("query_data_with_string", - help=query_data_with_string.__doc__) + subparsers.add_parser("insert_datatypes_data", help=insert_datatypes_data.__doc__) + subparsers.add_parser("query_data_with_bool", help=query_data_with_bool.__doc__) + subparsers.add_parser("query_data_with_bytes", help=query_data_with_bytes.__doc__) + subparsers.add_parser("query_data_with_float", help=query_data_with_float.__doc__) + subparsers.add_parser("query_data_with_int", help=query_data_with_int.__doc__) + subparsers.add_parser("query_data_with_string", help=query_data_with_string.__doc__) subparsers.add_parser( - "query_data_with_timestamp_parameter", - help=query_data_with_timestamp_parameter.__doc__, + "query_data_with_timestamp_parameter", + help=query_data_with_timestamp_parameter.__doc__, ) subparsers.add_parser( - "update_data_with_numeric", - help=update_data_with_numeric.__doc__, + "update_data_with_numeric", + help=update_data_with_numeric.__doc__, ) subparsers.add_parser( - "query_data_with_numeric_parameter", - help=query_data_with_numeric_parameter.__doc__, + "query_data_with_numeric_parameter", + help=query_data_with_numeric_parameter.__doc__, ) subparsers.add_parser( - "query_data_with_query_options", - help=query_data_with_query_options.__doc__ + "query_data_with_query_options", help=query_data_with_query_options.__doc__ ) subparsers.add_parser( - "create_client_with_query_options", - help=create_client_with_query_options.__doc__, + "create_client_with_query_options", + help=create_client_with_query_options.__doc__, ) args = parser.parse_args() diff --git a/samples/samples/pg_snippets_test.py b/samples/samples/pg_snippets_test.py index 2716880832..5b185838ac 100644 --- a/samples/samples/pg_snippets_test.py +++ b/samples/samples/pg_snippets_test.py @@ -188,8 +188,7 @@ def test_read_write_transaction(capsys, instance_id, sample_database): @pytest.mark.dependency(depends=["add_column"]) def test_query_data_with_new_column(capsys, instance_id, sample_database): - snippets.query_data_with_new_column(instance_id, - sample_database.database_id) + snippets.query_data_with_new_column(instance_id, sample_database.database_id) out, _ = capsys.readouterr() assert "SingerId: 1, AlbumId: 1, MarketingBudget: 300000" in out assert "SingerId: 2, AlbumId: 2, MarketingBudget: 300000" in out @@ -220,8 +219,7 @@ def test_add_storing_index(capsys, instance_id, sample_database): @pytest.mark.dependency(depends=["add_storing_index"]) def test_read_data_with_storing_index(capsys, instance_id, sample_database): - snippets.read_data_with_storing_index(instance_id, - sample_database.database_id) + snippets.read_data_with_storing_index(instance_id, sample_database.database_id) out, _ = capsys.readouterr() assert "300000" in out @@ -243,8 +241,7 @@ def test_add_timestamp_column(capsys, instance_id, sample_database): @pytest.mark.dependency(depends=["add_timestamp_column"]) def test_update_data_with_timestamp(capsys, instance_id, sample_database): - snippets.update_data_with_timestamp(instance_id, - sample_database.database_id) + snippets.update_data_with_timestamp(instance_id, sample_database.database_id) out, _ = capsys.readouterr() assert "Updated data" in out @@ -259,16 +256,14 @@ def test_query_data_with_timestamp(capsys, instance_id, sample_database): @pytest.mark.dependency(name="create_table_with_timestamp") def test_create_table_with_timestamp(capsys, instance_id, sample_database): - snippets.create_table_with_timestamp(instance_id, - sample_database.database_id) + snippets.create_table_with_timestamp(instance_id, sample_database.database_id) out, _ = capsys.readouterr() assert "Created Performances table on database" in out @pytest.mark.dependency(depends=["create_table_with_timestamp"]) def test_insert_data_with_timestamp(capsys, instance_id, sample_database): - snippets.insert_data_with_timestamp(instance_id, - sample_database.database_id) + snippets.insert_data_with_timestamp(instance_id, sample_database.database_id) out, _ = capsys.readouterr() assert "Inserted data." in out @@ -296,8 +291,7 @@ def test_delete_data_with_dml(capsys, instance_id, sample_database): @pytest.mark.dependency(name="dml_write_read_transaction") def test_dml_write_read_transaction(capsys, instance_id, sample_database): - snippets.dml_write_read_transaction(instance_id, - sample_database.database_id) + snippets.dml_write_read_transaction(instance_id, sample_database.database_id) out, _ = capsys.readouterr() assert "1 record(s) inserted." in out assert "FirstName: Timothy, LastName: Campbell" in out @@ -319,24 +313,21 @@ def test_query_data_with_parameter(capsys, instance_id, sample_database): @pytest.mark.dependency(depends=["add_column"]) def test_write_with_dml_transaction(capsys, instance_id, sample_database): - snippets.write_with_dml_transaction(instance_id, - sample_database.database_id) + snippets.write_with_dml_transaction(instance_id, sample_database.database_id) out, _ = capsys.readouterr() assert "Transferred 200000 from Album2's budget to Album1's" in out @pytest.mark.dependency(depends=["add_column"]) def update_data_with_partitioned_dml(capsys, instance_id, sample_database): - snippets.update_data_with_partitioned_dml(instance_id, - sample_database.database_id) + snippets.update_data_with_partitioned_dml(instance_id, sample_database.database_id) out, _ = capsys.readouterr() assert "3 record(s) updated" in out @pytest.mark.dependency(depends=["insert_with_dml"]) def test_delete_data_with_partitioned_dml(capsys, instance_id, sample_database): - snippets.delete_data_with_partitioned_dml(instance_id, - sample_database.database_id) + snippets.delete_data_with_partitioned_dml(instance_id, sample_database.database_id) out, _ = capsys.readouterr() assert "5 record(s) deleted" in out @@ -350,15 +341,14 @@ def test_update_with_batch_dml(capsys, instance_id, sample_database): @pytest.mark.dependency(name="create_table_with_datatypes") def test_create_table_with_datatypes(capsys, instance_id, sample_database): - snippets.create_table_with_datatypes(instance_id, - sample_database.database_id) + snippets.create_table_with_datatypes(instance_id, sample_database.database_id) out, _ = capsys.readouterr() assert "Created Venues table on database" in out @pytest.mark.dependency( - name="insert_datatypes_data", - depends=["create_table_with_datatypes"], + name="insert_datatypes_data", + depends=["create_table_with_datatypes"], ) def test_insert_datatypes_data(capsys, instance_id, sample_database): snippets.insert_datatypes_data(instance_id, sample_database.database_id) @@ -411,19 +401,16 @@ def test_update_data_with_numeric(capsys, instance_id, sample_database): @pytest.mark.dependency(depends=["insert_datatypes_data"]) -def test_query_data_with_numeric_parameter(capsys, instance_id, - sample_database): - snippets.query_data_with_numeric_parameter(instance_id, - sample_database.database_id) +def test_query_data_with_numeric_parameter(capsys, instance_id, sample_database): + snippets.query_data_with_numeric_parameter(instance_id, sample_database.database_id) out, _ = capsys.readouterr() assert "VenueId: 4, Revenue: 35000" in out @pytest.mark.dependency(depends=["insert_datatypes_data"]) -def test_query_data_with_timestamp_parameter(capsys, instance_id, - sample_database): +def test_query_data_with_timestamp_parameter(capsys, instance_id, sample_database): snippets.query_data_with_timestamp_parameter( - instance_id, sample_database.database_id + instance_id, sample_database.database_id ) out, _ = capsys.readouterr() assert "VenueId: 4, VenueName: Venue 4, LastUpdateTime:" in out @@ -433,8 +420,7 @@ def test_query_data_with_timestamp_parameter(capsys, instance_id, @pytest.mark.dependency(depends=["insert_datatypes_data"]) def test_query_data_with_query_options(capsys, instance_id, sample_database): - snippets.query_data_with_query_options(instance_id, - sample_database.database_id) + snippets.query_data_with_query_options(instance_id, sample_database.database_id) out, _ = capsys.readouterr() assert "VenueId: 4, VenueName: Venue 4, LastUpdateTime:" in out assert "VenueId: 19, VenueName: Venue 19, LastUpdateTime:" in out @@ -443,9 +429,30 @@ def test_query_data_with_query_options(capsys, instance_id, sample_database): @pytest.mark.dependency(depends=["insert_datatypes_data"]) def test_create_client_with_query_options(capsys, instance_id, sample_database): - snippets.create_client_with_query_options(instance_id, - sample_database.database_id) + snippets.create_client_with_query_options(instance_id, sample_database.database_id) out, _ = capsys.readouterr() assert "VenueId: 4, VenueName: Venue 4, LastUpdateTime:" in out assert "VenueId: 19, VenueName: Venue 19, LastUpdateTime:" in out assert "VenueId: 42, VenueName: Venue 42, LastUpdateTime:" in out + + +@pytest.mark.dependency(name="add_jsonb_column", depends=["insert_datatypes_data"]) +def test_add_jsonb_column(capsys, instance_id, sample_database): + snippets.add_jsonb_column(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "Waiting for operation to complete..." in out + assert 'Altered table "Venues" on database ' in out + + +@pytest.mark.dependency(name="update_data_with_jsonb", depends=["add_jsonb_column"]) +def test_update_data_with_jsonb(capsys, instance_id, sample_database): + snippets.update_data_with_jsonb(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "Updated data." in out + + +@pytest.mark.dependency(depends=["update_data_with_jsonb"]) +def test_query_data_with_jsonb_parameter(capsys, instance_id, sample_database): + snippets.query_data_with_jsonb_parameter(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "VenueId: 19, VenueDetails: {'open': True, 'rating': 9}" in out From 4910f592840332cfad72c0b416a6bd828c0ac8cd Mon Sep 17 00:00:00 2001 From: Astha Mohta Date: Thu, 27 Oct 2022 03:32:10 +0530 Subject: [PATCH 04/18] linting --- samples/samples/pg_snippets.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/samples/samples/pg_snippets.py b/samples/samples/pg_snippets.py index f95d28afd9..678cbc4435 100644 --- a/samples/samples/pg_snippets.py +++ b/samples/samples/pg_snippets.py @@ -1376,9 +1376,10 @@ def update_data_with_jsonb(instance_id, database_id): database = instance.database(database_id) """ PG JSONB takes the last value in the case of duplicate keys. - PG JSONB sorts first by key length and then lexicographically with + PG JSONB sorts first by key length and then lexicographically with equivalent key length. """ + with database.batch() as batch: batch.update( table="Venues", @@ -1390,7 +1391,7 @@ def update_data_with_jsonb(instance_id, database_id): [ JsonObject({"name": None, "open": True}), JsonObject( - {"name": "room 2", "open": False, "name": "room 3"} + {"name": "room 2", "open": False} ), ] ), From 856381590e06ef38f8254ced047d011a2fe46f77 Mon Sep 17 00:00:00 2001 From: Astha Mohta Date: Thu, 27 Oct 2022 10:37:29 +0530 Subject: [PATCH 05/18] linting --- samples/samples/pg_snippets.py | 20 +++++++++++++++++++- tests/system/conftest.py | 4 +++- tests/system/test_session_api.py | 5 ++++- 3 files changed, 26 insertions(+), 3 deletions(-) diff --git a/samples/samples/pg_snippets.py b/samples/samples/pg_snippets.py index 678cbc4435..c976af5db1 100644 --- a/samples/samples/pg_snippets.py +++ b/samples/samples/pg_snippets.py @@ -1417,7 +1417,7 @@ def update_data_with_jsonb(instance_id, database_id): def query_data_with_jsonb_parameter(instance_id, database_id): - """Queries sample data using SQL with a JSON parameter.""" + """Queries sample data using SQL with a JSONB parameter.""" # [START spanner_postgresql_jsonb_query_parameter] # instance_id = "your-spanner-instance" # database_id = "your-spanner-db-id" @@ -1534,6 +1534,18 @@ def query_data_with_jsonb_parameter(instance_id, database_id): "create_client_with_query_options", help=create_client_with_query_options.__doc__, ) + subparsers.add_parser( + "add_jsonb_column", + help=add_jsonb_column.__doc__, + ) + subparsers.add_parser( + "update_data_with_jsonb", + help=update_data_with_jsonb.__doc__, + ) + subparsers.add_parser( + "query_data_with_jsonb_parameter", + help=query_data_with_jsonb_parameter.__doc__, + ) args = parser.parse_args() @@ -1623,3 +1635,9 @@ def query_data_with_jsonb_parameter(instance_id, database_id): query_data_with_query_options(args.instance_id, args.database_id) elif args.command == "create_client_with_query_options": create_client_with_query_options(args.instance_id, args.database_id) + elif args.command == "add_jsonb_column": + add_jsonb_column(args.instance_id, args.database_id) + elif args.command == "update_data_with_jsonb": + update_data_with_jsonb(args.instance_id, args.database_id) + elif args.command == "query_data_with_jsonb_parameter": + query_data_with_jsonb_parameter(args.instance_id, args.database_id) diff --git a/tests/system/conftest.py b/tests/system/conftest.py index d687837d56..26404ef346 100644 --- a/tests/system/conftest.py +++ b/tests/system/conftest.py @@ -86,7 +86,9 @@ def spanner_client(): ) else: return spanner_v1.Client( - client_options={"api_endpoint": "staging-wrenchworks.sandbox.googleapis.com"} + client_options={ + "api_endpoint": "staging-wrenchworks.sandbox.googleapis.com" + } ) # use google.auth.default credentials diff --git a/tests/system/test_session_api.py b/tests/system/test_session_api.py index 2b8c8a4819..d91bed938a 100644 --- a/tests/system/test_session_api.py +++ b/tests/system/test_session_api.py @@ -2106,7 +2106,10 @@ def test_execute_sql_w_json_bindings( [JSON_1, JSON_2], ) -def test_execute_sql_w_jsonb_bindings(not_emulator, not_google_standard_sql, sessions_database, database_dialect): + +def test_execute_sql_w_jsonb_bindings( + not_emulator, not_google_standard_sql, sessions_database, database_dialect +): _bind_test_helper( sessions_database, database_dialect, From 427e9d1b80a654381c9c4dc9355e5cd72ae1008b Mon Sep 17 00:00:00 2001 From: Astha Mohta Date: Wed, 2 Nov 2022 18:10:26 +0530 Subject: [PATCH 06/18] Revert "linting" This reverts commit 856381590e06ef38f8254ced047d011a2fe46f77. --- samples/samples/pg_snippets.py | 20 +------------------- tests/system/conftest.py | 4 +--- tests/system/test_session_api.py | 5 +---- 3 files changed, 3 insertions(+), 26 deletions(-) diff --git a/samples/samples/pg_snippets.py b/samples/samples/pg_snippets.py index c976af5db1..678cbc4435 100644 --- a/samples/samples/pg_snippets.py +++ b/samples/samples/pg_snippets.py @@ -1417,7 +1417,7 @@ def update_data_with_jsonb(instance_id, database_id): def query_data_with_jsonb_parameter(instance_id, database_id): - """Queries sample data using SQL with a JSONB parameter.""" + """Queries sample data using SQL with a JSON parameter.""" # [START spanner_postgresql_jsonb_query_parameter] # instance_id = "your-spanner-instance" # database_id = "your-spanner-db-id" @@ -1534,18 +1534,6 @@ def query_data_with_jsonb_parameter(instance_id, database_id): "create_client_with_query_options", help=create_client_with_query_options.__doc__, ) - subparsers.add_parser( - "add_jsonb_column", - help=add_jsonb_column.__doc__, - ) - subparsers.add_parser( - "update_data_with_jsonb", - help=update_data_with_jsonb.__doc__, - ) - subparsers.add_parser( - "query_data_with_jsonb_parameter", - help=query_data_with_jsonb_parameter.__doc__, - ) args = parser.parse_args() @@ -1635,9 +1623,3 @@ def query_data_with_jsonb_parameter(instance_id, database_id): query_data_with_query_options(args.instance_id, args.database_id) elif args.command == "create_client_with_query_options": create_client_with_query_options(args.instance_id, args.database_id) - elif args.command == "add_jsonb_column": - add_jsonb_column(args.instance_id, args.database_id) - elif args.command == "update_data_with_jsonb": - update_data_with_jsonb(args.instance_id, args.database_id) - elif args.command == "query_data_with_jsonb_parameter": - query_data_with_jsonb_parameter(args.instance_id, args.database_id) diff --git a/tests/system/conftest.py b/tests/system/conftest.py index 26404ef346..d687837d56 100644 --- a/tests/system/conftest.py +++ b/tests/system/conftest.py @@ -86,9 +86,7 @@ def spanner_client(): ) else: return spanner_v1.Client( - client_options={ - "api_endpoint": "staging-wrenchworks.sandbox.googleapis.com" - } + client_options={"api_endpoint": "staging-wrenchworks.sandbox.googleapis.com"} ) # use google.auth.default credentials diff --git a/tests/system/test_session_api.py b/tests/system/test_session_api.py index d91bed938a..2b8c8a4819 100644 --- a/tests/system/test_session_api.py +++ b/tests/system/test_session_api.py @@ -2106,10 +2106,7 @@ def test_execute_sql_w_json_bindings( [JSON_1, JSON_2], ) - -def test_execute_sql_w_jsonb_bindings( - not_emulator, not_google_standard_sql, sessions_database, database_dialect -): +def test_execute_sql_w_jsonb_bindings(not_emulator, not_google_standard_sql, sessions_database, database_dialect): _bind_test_helper( sessions_database, database_dialect, From 1ed17c5a92cb4095cc65cd31a4a703759f3a1e27 Mon Sep 17 00:00:00 2001 From: Astha Mohta Date: Wed, 2 Nov 2022 18:10:42 +0530 Subject: [PATCH 07/18] Revert "linting" This reverts commit 4910f592840332cfad72c0b416a6bd828c0ac8cd. --- samples/samples/pg_snippets.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/samples/samples/pg_snippets.py b/samples/samples/pg_snippets.py index 678cbc4435..f95d28afd9 100644 --- a/samples/samples/pg_snippets.py +++ b/samples/samples/pg_snippets.py @@ -1376,10 +1376,9 @@ def update_data_with_jsonb(instance_id, database_id): database = instance.database(database_id) """ PG JSONB takes the last value in the case of duplicate keys. - PG JSONB sorts first by key length and then lexicographically with + PG JSONB sorts first by key length and then lexicographically with equivalent key length. """ - with database.batch() as batch: batch.update( table="Venues", @@ -1391,7 +1390,7 @@ def update_data_with_jsonb(instance_id, database_id): [ JsonObject({"name": None, "open": True}), JsonObject( - {"name": "room 2", "open": False} + {"name": "room 2", "open": False, "name": "room 3"} ), ] ), From 38feacb1b95c1f8fbd8d16474118c89175caf6c9 Mon Sep 17 00:00:00 2001 From: Astha Mohta Date: Wed, 2 Nov 2022 18:11:10 +0530 Subject: [PATCH 08/18] Revert "samples" This reverts commit ba80e5aab9da643882a2b8320737aa88b7c4c821. --- samples/samples/pg_snippets.py | 692 ++++++++++++---------------- samples/samples/pg_snippets_test.py | 73 ++- 2 files changed, 338 insertions(+), 427 deletions(-) diff --git a/samples/samples/pg_snippets.py b/samples/samples/pg_snippets.py index f95d28afd9..367690dbd8 100644 --- a/samples/samples/pg_snippets.py +++ b/samples/samples/pg_snippets.py @@ -28,7 +28,6 @@ from google.cloud import spanner, spanner_admin_database_v1 from google.cloud.spanner_admin_database_v1.types.common import DatabaseDialect from google.cloud.spanner_v1 import param_types -from google.cloud.spanner_v1.data_types import JsonObject OPERATION_TIMEOUT_SECONDS = 240 @@ -39,19 +38,19 @@ def create_instance(instance_id): spanner_client = spanner.Client() config_name = "{}/instanceConfigs/regional-us-central1".format( - spanner_client.project_name + spanner_client.project_name ) instance = spanner_client.instance( - instance_id, - configuration_name=config_name, - display_name="This is a display name.", - node_count=1, - labels={ - "cloud_spanner_samples": "true", - "sample_name": "snippets-create_instance-explicit", - "created": str(int(time.time())), - }, + instance_id, + configuration_name=config_name, + display_name="This is a display name.", + node_count=1, + labels={ + "cloud_spanner_samples": "true", + "sample_name": "snippets-create_instance-explicit", + "created": str(int(time.time())), + }, ) operation = instance.create() @@ -72,8 +71,8 @@ def create_database(instance_id, database_id): instance = spanner_client.instance(instance_id) database = instance.database( - database_id, - database_dialect=DatabaseDialect.POSTGRESQL, + database_id, + database_dialect=DatabaseDialect.POSTGRESQL, ) operation = database.create() @@ -88,22 +87,22 @@ def create_database(instance_id, database_id): def create_table_using_ddl(database_name): spanner_client = spanner.Client() request = spanner_admin_database_v1.UpdateDatabaseDdlRequest( - database=database_name, - statements=[ - """CREATE TABLE Singers ( + database=database_name, + statements=[ + """CREATE TABLE Singers ( SingerId bigint NOT NULL, FirstName character varying(1024), LastName character varying(1024), SingerInfo bytea, PRIMARY KEY (SingerId) )""", - """CREATE TABLE Albums ( + """CREATE TABLE Albums ( SingerId bigint NOT NULL, AlbumId bigint NOT NULL, AlbumTitle character varying(1024), PRIMARY KEY (SingerId, AlbumId) ) INTERLEAVE IN PARENT Singers ON DELETE CASCADE""", - ], + ], ) operation = spanner_client.database_admin_api.update_database_ddl(request) operation.result(OPERATION_TIMEOUT_SECONDS) @@ -125,27 +124,27 @@ def insert_data(instance_id, database_id): with database.batch() as batch: batch.insert( - table="Singers", - columns=("SingerId", "FirstName", "LastName"), - values=[ - (1, "Marc", "Richards"), - (2, "Catalina", "Smith"), - (3, "Alice", "Trentor"), - (4, "Lea", "Martin"), - (5, "David", "Lomond"), - ], + table="Singers", + columns=("SingerId", "FirstName", "LastName"), + values=[ + (1, "Marc", "Richards"), + (2, "Catalina", "Smith"), + (3, "Alice", "Trentor"), + (4, "Lea", "Martin"), + (5, "David", "Lomond"), + ], ) batch.insert( - table="Albums", - columns=("SingerId", "AlbumId", "AlbumTitle"), - values=[ - (1, 1, "Total Junk"), - (1, 2, "Go, Go, Go"), - (2, 1, "Green"), - (2, 2, "Forever Hold Your Peace"), - (2, 3, "Terrified"), - ], + table="Albums", + columns=("SingerId", "AlbumId", "AlbumTitle"), + values=[ + (1, 1, "Total Junk"), + (1, 2, "Go, Go, Go"), + (2, 1, "Green"), + (2, 2, "Forever Hold Your Peace"), + (2, 3, "Terrified"), + ], ) print("Inserted data.") @@ -196,7 +195,7 @@ def query_data(instance_id, database_id): with database.snapshot() as snapshot: results = snapshot.execute_sql( - "SELECT SingerId, AlbumId, AlbumTitle FROM Albums" + "SELECT SingerId, AlbumId, AlbumTitle FROM Albums" ) for row in results: @@ -216,7 +215,8 @@ def read_data(instance_id, database_id): with database.snapshot() as snapshot: keyset = spanner.KeySet(all_=True) results = snapshot.read( - table="Albums", columns=("SingerId", "AlbumId", "AlbumTitle"), keyset=keyset + table="Albums", columns=("SingerId", "AlbumId", "AlbumTitle"), + keyset=keyset ) for row in results: @@ -234,7 +234,7 @@ def add_column(instance_id, database_id): database = instance.database(database_id) operation = database.update_ddl( - ["ALTER TABLE Albums ADD COLUMN MarketingBudget BIGINT"] + ["ALTER TABLE Albums ADD COLUMN MarketingBudget BIGINT"] ) print("Waiting for operation to complete...") @@ -263,9 +263,9 @@ def update_data(instance_id, database_id): with database.batch() as batch: batch.update( - table="Albums", - columns=("SingerId", "AlbumId", "MarketingBudget"), - values=[(1, 1, 100000), (2, 2, 500000)], + table="Albums", + columns=("SingerId", "AlbumId", "MarketingBudget"), + values=[(1, 1, 100000), (2, 2, 500000)], ) print("Updated data.") @@ -294,10 +294,10 @@ def update_albums(transaction): # Read the second album budget. second_album_keyset = spanner.KeySet(keys=[(2, 2)]) second_album_result = transaction.read( - table="Albums", - columns=("MarketingBudget",), - keyset=second_album_keyset, - limit=1, + table="Albums", + columns=("MarketingBudget",), + keyset=second_album_keyset, + limit=1, ) second_album_row = list(second_album_result)[0] second_album_budget = second_album_row[0] @@ -307,15 +307,16 @@ def update_albums(transaction): if second_album_budget < transfer_amount: # Raising an exception will automatically roll back the # transaction. - raise ValueError("The second album doesn't have enough funds to transfer") + raise ValueError( + "The second album doesn't have enough funds to transfer") # Read the first album's budget. first_album_keyset = spanner.KeySet(keys=[(1, 1)]) first_album_result = transaction.read( - table="Albums", - columns=("MarketingBudget",), - keyset=first_album_keyset, - limit=1, + table="Albums", + columns=("MarketingBudget",), + keyset=first_album_keyset, + limit=1, ) first_album_row = list(first_album_result)[0] first_album_budget = first_album_row[0] @@ -324,15 +325,15 @@ def update_albums(transaction): second_album_budget -= transfer_amount first_album_budget += transfer_amount print( - "Setting first album's budget to {} and the second album's " - "budget to {}.".format(first_album_budget, second_album_budget) + "Setting first album's budget to {} and the second album's " + "budget to {}.".format(first_album_budget, second_album_budget) ) # Update the rows. transaction.update( - table="Albums", - columns=("SingerId", "AlbumId", "MarketingBudget"), - values=[(1, 1, first_album_budget), (2, 2, second_album_budget)], + table="Albums", + columns=("SingerId", "AlbumId", "MarketingBudget"), + values=[(1, 1, first_album_budget), (2, 2, second_album_budget)], ) database.run_in_transaction(update_albums) @@ -359,7 +360,7 @@ def query_data_with_new_column(instance_id, database_id): with database.snapshot() as snapshot: results = snapshot.execute_sql( - "SELECT SingerId, AlbumId, MarketingBudget FROM Albums" + "SELECT SingerId, AlbumId, MarketingBudget FROM Albums" ) for row in results: @@ -377,7 +378,7 @@ def add_index(instance_id, database_id): database = instance.database(database_id) operation = database.update_ddl( - ["CREATE INDEX AlbumsByAlbumTitle ON Albums(AlbumTitle)"] + ["CREATE INDEX AlbumsByAlbumTitle ON Albums(AlbumTitle)"] ) print("Waiting for operation to complete...") @@ -406,10 +407,10 @@ def read_data_with_index(instance_id, database_id): with database.snapshot() as snapshot: keyset = spanner.KeySet(all_=True) results = snapshot.read( - table="Albums", - columns=("AlbumId", "AlbumTitle"), - keyset=keyset, - index="AlbumsByAlbumTitle", + table="Albums", + columns=("AlbumId", "AlbumTitle"), + keyset=keyset, + index="AlbumsByAlbumTitle", ) for row in results: @@ -427,10 +428,10 @@ def add_storing_index(instance_id, database_id): database = instance.database(database_id) operation = database.update_ddl( - [ - "CREATE INDEX AlbumsByAlbumTitle2 ON Albums(AlbumTitle)" - "INCLUDE (MarketingBudget)" - ] + [ + "CREATE INDEX AlbumsByAlbumTitle2 ON Albums(AlbumTitle)" + "INCLUDE (MarketingBudget)" + ] ) print("Waiting for operation to complete...") @@ -462,14 +463,15 @@ def read_data_with_storing_index(instance_id, database_id): with database.snapshot() as snapshot: keyset = spanner.KeySet(all_=True) results = snapshot.read( - table="Albums", - columns=("AlbumId", "AlbumTitle", "MarketingBudget"), - keyset=keyset, - index="AlbumsByAlbumTitle2", + table="Albums", + columns=("AlbumId", "AlbumTitle", "MarketingBudget"), + keyset=keyset, + index="AlbumsByAlbumTitle2", ) for row in results: - print("AlbumId: {}, AlbumTitle: {}, " "MarketingBudget: {}".format(*row)) + print("AlbumId: {}, AlbumTitle: {}, " "MarketingBudget: {}".format( + *row)) # [END spanner_postgresql_read_data_with_storing_index] @@ -489,7 +491,7 @@ def read_only_transaction(instance_id, database_id): with database.snapshot(multi_use=True) as snapshot: # Read using SQL. results = snapshot.execute_sql( - "SELECT SingerId, AlbumId, AlbumTitle FROM Albums" + "SELECT SingerId, AlbumId, AlbumTitle FROM Albums" ) print("Results from first read:") @@ -501,7 +503,8 @@ def read_only_transaction(instance_id, database_id): # return the same data. keyset = spanner.KeySet(all_=True) results = snapshot.read( - table="Albums", columns=("SingerId", "AlbumId", "AlbumTitle"), keyset=keyset + table="Albums", columns=("SingerId", "AlbumId", "AlbumTitle"), + keyset=keyset ) print("Results from second read:") @@ -523,11 +526,11 @@ def insert_with_dml(instance_id, database_id): def insert_singers(transaction): row_ct = transaction.execute_update( - "INSERT INTO Singers (SingerId, FirstName, LastName) VALUES " - "(12, 'Melissa', 'Garcia'), " - "(13, 'Russell', 'Morales'), " - "(14, 'Jacqueline', 'Long'), " - "(15, 'Dylan', 'Shaw')" + "INSERT INTO Singers (SingerId, FirstName, LastName) VALUES " + "(12, 'Melissa', 'Garcia'), " + "(13, 'Russell', 'Morales'), " + "(14, 'Jacqueline', 'Long'), " + "(15, 'Dylan', 'Shaw')" ) print("{} record(s) inserted.".format(row_ct)) @@ -546,9 +549,9 @@ def query_data_with_parameter(instance_id, database_id): with database.snapshot() as snapshot: results = snapshot.execute_sql( - "SELECT SingerId, FirstName, LastName FROM Singers " "WHERE LastName = $1", - params={"p1": "Garcia"}, - param_types={"p1": spanner.param_types.STRING}, + "SELECT SingerId, FirstName, LastName FROM Singers " "WHERE LastName = $1", + params={"p1": "Garcia"}, + param_types={"p1": spanner.param_types.STRING}, ) for row in results: @@ -570,7 +573,7 @@ def transfer_budget(transaction): # Transfer marketing budget from one album to another. Performed in a # single transaction to ensure that the transfer is atomic. second_album_result = transaction.execute_sql( - "SELECT MarketingBudget from Albums " "WHERE SingerId = 2 and AlbumId = 2" + "SELECT MarketingBudget from Albums " "WHERE SingerId = 2 and AlbumId = 2" ) second_album_row = list(second_album_result)[0] second_album_budget = second_album_row[0] @@ -582,8 +585,8 @@ def transfer_budget(transaction): # will be rerun by the client library if second_album_budget >= transfer_amount: first_album_result = transaction.execute_sql( - "SELECT MarketingBudget from Albums " - "WHERE SingerId = 1 and AlbumId = 1" + "SELECT MarketingBudget from Albums " + "WHERE SingerId = 1 and AlbumId = 1" ) first_album_row = list(first_album_result)[0] first_album_budget = first_album_row[0] @@ -593,26 +596,26 @@ def transfer_budget(transaction): # Update first album transaction.execute_update( - "UPDATE Albums " - "SET MarketingBudget = $1 " - "WHERE SingerId = 1 and AlbumId = 1", - params={"p1": first_album_budget}, - param_types={"p1": spanner.param_types.INT64}, + "UPDATE Albums " + "SET MarketingBudget = $1 " + "WHERE SingerId = 1 and AlbumId = 1", + params={"p1": first_album_budget}, + param_types={"p1": spanner.param_types.INT64}, ) # Update second album transaction.execute_update( - "UPDATE Albums " - "SET MarketingBudget = $1 " - "WHERE SingerId = 2 and AlbumId = 2", - params={"p1": second_album_budget}, - param_types={"p1": spanner.param_types.INT64}, + "UPDATE Albums " + "SET MarketingBudget = $1 " + "WHERE SingerId = 2 and AlbumId = 2", + params={"p1": second_album_budget}, + param_types={"p1": spanner.param_types.INT64}, ) print( - "Transferred {} from Album2's budget to Album1's".format( - transfer_amount - ) + "Transferred {} from Album2's budget to Album1's".format( + transfer_amount + ) ) database.run_in_transaction(transfer_budget) @@ -633,9 +636,9 @@ def read_stale_data(instance_id, database_id): with database.snapshot(exact_staleness=staleness) as snapshot: keyset = spanner.KeySet(all_=True) results = snapshot.read( - table="Albums", - columns=("SingerId", "AlbumId", "MarketingBudget"), - keyset=keyset, + table="Albums", + columns=("SingerId", "AlbumId", "MarketingBudget"), + keyset=keyset, ) for row in results: @@ -668,12 +671,13 @@ def update_data_with_timestamp(instance_id, database_id): with database.batch() as batch: batch.update( - table="Albums", - columns=("SingerId", "AlbumId", "MarketingBudget", "LastUpdateTime"), - values=[ - (1, 1, 1000000, spanner.COMMIT_TIMESTAMP), - (2, 2, 750000, spanner.COMMIT_TIMESTAMP), - ], + table="Albums", + columns=( + "SingerId", "AlbumId", "MarketingBudget", "LastUpdateTime"), + values=[ + (1, 1, 1000000, spanner.COMMIT_TIMESTAMP), + (2, 2, 750000, spanner.COMMIT_TIMESTAMP), + ], ) print("Updated data.") @@ -691,16 +695,17 @@ def add_timestamp_column(instance_id, database_id): database = instance.database(database_id) operation = database.update_ddl( - ["ALTER TABLE Albums ADD COLUMN LastUpdateTime SPANNER.COMMIT_TIMESTAMP"] + [ + "ALTER TABLE Albums ADD COLUMN LastUpdateTime SPANNER.COMMIT_TIMESTAMP"] ) print("Waiting for operation to complete...") operation.result(OPERATION_TIMEOUT_SECONDS) print( - 'Altered table "Albums" on database {} on instance {}.'.format( - database_id, instance_id - ) + 'Altered table "Albums" on database {} on instance {}.'.format( + database_id, instance_id + ) ) @@ -727,8 +732,8 @@ def query_data_with_timestamp(instance_id, database_id): with database.snapshot() as snapshot: results = snapshot.execute_sql( - "SELECT SingerId, AlbumId, MarketingBudget FROM Albums " - "ORDER BY LastUpdateTime DESC" + "SELECT SingerId, AlbumId, MarketingBudget FROM Albums " + "ORDER BY LastUpdateTime DESC" ) for row in results: @@ -747,9 +752,9 @@ def create_table_with_timestamp(instance_id, database_id): database = instance.database(database_id) request = spanner_admin_database_v1.UpdateDatabaseDdlRequest( - database=database.name, - statements=[ - """CREATE TABLE Performances ( + database=database.name, + statements=[ + """CREATE TABLE Performances ( SingerId BIGINT NOT NULL, VenueId BIGINT NOT NULL, EventDate Date, @@ -757,7 +762,7 @@ def create_table_with_timestamp(instance_id, database_id): LastUpdateTime SPANNER.COMMIT_TIMESTAMP NOT NULL, PRIMARY KEY (SingerId, VenueId, EventDate)) INTERLEAVE IN PARENT Singers ON DELETE CASCADE""" - ], + ], ) operation = spanner_client.database_admin_api.update_database_ddl(request) @@ -765,9 +770,9 @@ def create_table_with_timestamp(instance_id, database_id): operation.result(OPERATION_TIMEOUT_SECONDS) print( - "Created Performances table on database {} on instance {}".format( - database_id, instance_id - ) + "Created Performances table on database {} on instance {}".format( + database_id, instance_id + ) ) @@ -785,13 +790,14 @@ def insert_data_with_timestamp(instance_id, database_id): with database.batch() as batch: batch.insert( - table="Performances", - columns=("SingerId", "VenueId", "EventDate", "Revenue", "LastUpdateTime"), - values=[ - (1, 4, "2017-10-05", 11000, spanner.COMMIT_TIMESTAMP), - (1, 19, "2017-11-02", 15000, spanner.COMMIT_TIMESTAMP), - (2, 42, "2017-12-23", 7000, spanner.COMMIT_TIMESTAMP), - ], + table="Performances", + columns=( + "SingerId", "VenueId", "EventDate", "Revenue", "LastUpdateTime"), + values=[ + (1, 4, "2017-10-05", 11000, spanner.COMMIT_TIMESTAMP), + (1, 19, "2017-11-02", 15000, spanner.COMMIT_TIMESTAMP), + (2, 42, "2017-12-23", 7000, spanner.COMMIT_TIMESTAMP), + ], ) print("Inserted data.") @@ -812,8 +818,8 @@ def insert_data_with_dml(instance_id, database_id): def insert_singers(transaction): row_ct = transaction.execute_update( - "INSERT INTO Singers (SingerId, FirstName, LastName) " - " VALUES (10, 'Virginia', 'Watson')" + "INSERT INTO Singers (SingerId, FirstName, LastName) " + " VALUES (10, 'Virginia', 'Watson')" ) print("{} record(s) inserted.".format(row_ct)) @@ -834,9 +840,9 @@ def update_data_with_dml(instance_id, database_id): def update_albums(transaction): row_ct = transaction.execute_update( - "UPDATE Albums " - "SET MarketingBudget = MarketingBudget * 2 " - "WHERE SingerId = 1 and AlbumId = 1" + "UPDATE Albums " + "SET MarketingBudget = MarketingBudget * 2 " + "WHERE SingerId = 1 and AlbumId = 1" ) print("{} record(s) updated.".format(row_ct)) @@ -857,7 +863,7 @@ def delete_data_with_dml(instance_id, database_id): def delete_singers(transaction): row_ct = transaction.execute_update( - "DELETE FROM Singers WHERE FirstName = 'Alice'" + "DELETE FROM Singers WHERE FirstName = 'Alice'" ) print("{} record(s) deleted.".format(row_ct)) @@ -879,14 +885,14 @@ def dml_write_read_transaction(instance_id, database_id): def write_then_read(transaction): # Insert record. row_ct = transaction.execute_update( - "INSERT INTO Singers (SingerId, FirstName, LastName) " - " VALUES (11, 'Timothy', 'Campbell')" + "INSERT INTO Singers (SingerId, FirstName, LastName) " + " VALUES (11, 'Timothy', 'Campbell')" ) print("{} record(s) inserted.".format(row_ct)) # Read newly inserted record. results = transaction.execute_sql( - "SELECT FirstName, LastName FROM Singers WHERE SingerId = 11" + "SELECT FirstName, LastName FROM Singers WHERE SingerId = 11" ) for result in results: print("FirstName: {}, LastName: {}".format(*result)) @@ -906,7 +912,7 @@ def update_data_with_partitioned_dml(instance_id, database_id): database = instance.database(database_id) row_ct = database.execute_partitioned_dml( - "UPDATE Albums SET MarketingBudget = 100000 WHERE SingerId > 1" + "UPDATE Albums SET MarketingBudget = 100000 WHERE SingerId > 1" ) print("{} records updated.".format(row_ct)) @@ -922,7 +928,8 @@ def delete_data_with_partitioned_dml(instance_id, database_id): instance = spanner_client.instance(instance_id) database = instance.database(database_id) - row_ct = database.execute_partitioned_dml("DELETE FROM Singers WHERE SingerId > 10") + row_ct = database.execute_partitioned_dml( + "DELETE FROM Singers WHERE SingerId > 10") print("{} record(s) deleted.".format(row_ct)) # [END spanner_postgresql_dml_partitioned_delete] @@ -941,19 +948,20 @@ def update_with_batch_dml(instance_id, database_id): database = instance.database(database_id) insert_statement = ( - "INSERT INTO Albums " - "(SingerId, AlbumId, AlbumTitle, MarketingBudget) " - "VALUES (1, 3, 'Test Album Title', 10000)" + "INSERT INTO Albums " + "(SingerId, AlbumId, AlbumTitle, MarketingBudget) " + "VALUES (1, 3, 'Test Album Title', 10000)" ) update_statement = ( - "UPDATE Albums " - "SET MarketingBudget = MarketingBudget * 2 " - "WHERE SingerId = 1 and AlbumId = 3" + "UPDATE Albums " + "SET MarketingBudget = MarketingBudget * 2 " + "WHERE SingerId = 1 and AlbumId = 3" ) def update_albums(transaction): - status, row_cts = transaction.batch_update([insert_statement, update_statement]) + status, row_cts = transaction.batch_update( + [insert_statement, update_statement]) if status.code != OK: # Do handling here. @@ -961,7 +969,8 @@ def update_albums(transaction): # `commit` is called by `run_in_transaction`. return - print("Executed {} SQL statements using Batch DML.".format(len(row_cts))) + print( + "Executed {} SQL statements using Batch DML.".format(len(row_cts))) database.run_in_transaction(update_albums) # [END spanner_postgresql_dml_batch_update] @@ -977,9 +986,9 @@ def create_table_with_datatypes(instance_id, database_id): database = instance.database(database_id) request = spanner_admin_database_v1.UpdateDatabaseDdlRequest( - database=database.name, - statements=[ - """CREATE TABLE Venues ( + database=database.name, + statements=[ + """CREATE TABLE Venues ( VenueId BIGINT NOT NULL, VenueName character varying(100), VenueInfo BYTEA, @@ -989,7 +998,7 @@ def create_table_with_datatypes(instance_id, database_id): Revenue NUMERIC, LastUpdateTime SPANNER.COMMIT_TIMESTAMP NOT NULL, PRIMARY KEY (VenueId))""" - ], + ], ) operation = spanner_client.database_admin_api.update_database_ddl(request) @@ -997,9 +1006,9 @@ def create_table_with_datatypes(instance_id, database_id): operation.result(OPERATION_TIMEOUT_SECONDS) print( - "Created Venues table on database {} on instance {}".format( - database_id, instance_id - ) + "Created Venues table on database {} on instance {}".format( + database_id, instance_id + ) ) # [END spanner_postgresql_create_table_with_datatypes] @@ -1018,49 +1027,49 @@ def insert_datatypes_data(instance_id, database_id): exampleBytes3 = base64.b64encode("Hello World 3".encode()) with database.batch() as batch: batch.insert( - table="Venues", - columns=( - "VenueId", - "VenueName", - "VenueInfo", - "Capacity", - "OutdoorVenue", - "PopularityScore", - "Revenue", - "LastUpdateTime", + table="Venues", + columns=( + "VenueId", + "VenueName", + "VenueInfo", + "Capacity", + "OutdoorVenue", + "PopularityScore", + "Revenue", + "LastUpdateTime", + ), + values=[ + ( + 4, + "Venue 4", + exampleBytes1, + 1800, + False, + 0.85543, + decimal.Decimal("215100.10"), + spanner.COMMIT_TIMESTAMP, ), - values=[ - ( - 4, - "Venue 4", - exampleBytes1, - 1800, - False, - 0.85543, - decimal.Decimal("215100.10"), - spanner.COMMIT_TIMESTAMP, - ), - ( - 19, - "Venue 19", - exampleBytes2, - 6300, - True, - 0.98716, - decimal.Decimal("1200100.00"), - spanner.COMMIT_TIMESTAMP, - ), - ( - 42, - "Venue 42", - exampleBytes3, - 3000, - False, - 0.72598, - decimal.Decimal("390650.99"), - spanner.COMMIT_TIMESTAMP, - ), - ], + ( + 19, + "Venue 19", + exampleBytes2, + 6300, + True, + 0.98716, + decimal.Decimal("1200100.00"), + spanner.COMMIT_TIMESTAMP, + ), + ( + 42, + "Venue 42", + exampleBytes3, + 3000, + False, + 0.72598, + decimal.Decimal("390650.99"), + spanner.COMMIT_TIMESTAMP, + ), + ], ) print("Inserted data.") @@ -1082,10 +1091,10 @@ def query_data_with_bool(instance_id, database_id): with database.snapshot() as snapshot: results = snapshot.execute_sql( - "SELECT VenueId, VenueName, OutdoorVenue FROM Venues " - "WHERE OutdoorVenue = $1", - params=param, - param_types=param_type, + "SELECT VenueId, VenueName, OutdoorVenue FROM Venues " + "WHERE OutdoorVenue = $1", + params=param, + param_types=param_type, ) for row in results: @@ -1108,9 +1117,9 @@ def query_data_with_bytes(instance_id, database_id): with database.snapshot() as snapshot: results = snapshot.execute_sql( - "SELECT VenueId, VenueName FROM Venues " "WHERE VenueInfo = $1", - params=param, - param_types=param_type, + "SELECT VenueId, VenueName FROM Venues " "WHERE VenueInfo = $1", + params=param, + param_types=param_type, ) for row in results: @@ -1133,14 +1142,15 @@ def query_data_with_float(instance_id, database_id): with database.snapshot() as snapshot: results = snapshot.execute_sql( - "SELECT VenueId, VenueName, PopularityScore FROM Venues " - "WHERE PopularityScore > $1", - params=param, - param_types=param_type, + "SELECT VenueId, VenueName, PopularityScore FROM Venues " + "WHERE PopularityScore > $1", + params=param, + param_types=param_type, ) for row in results: - print("VenueId: {}, VenueName: {}, PopularityScore: {}".format(*row)) + print( + "VenueId: {}, VenueName: {}, PopularityScore: {}".format(*row)) # [END spanner_postgresql_query_with_float_parameter] @@ -1159,9 +1169,9 @@ def query_data_with_int(instance_id, database_id): with database.snapshot() as snapshot: results = snapshot.execute_sql( - "SELECT VenueId, VenueName, Capacity FROM Venues " "WHERE Capacity >= $1", - params=param, - param_types=param_type, + "SELECT VenueId, VenueName, Capacity FROM Venues " "WHERE Capacity >= $1", + params=param, + param_types=param_type, ) for row in results: @@ -1184,9 +1194,9 @@ def query_data_with_string(instance_id, database_id): with database.snapshot() as snapshot: results = snapshot.execute_sql( - "SELECT VenueId, VenueName FROM Venues " "WHERE VenueName = $1", - params=param, - param_types=param_type, + "SELECT VenueId, VenueName FROM Venues " "WHERE VenueName = $1", + params=param, + param_types=param_type, ) for row in results: @@ -1207,19 +1217,18 @@ def query_data_with_timestamp_parameter(instance_id, database_id): # [END spanner_postgresql_query_with_timestamp_parameter] # Avoid time drift on the local machine. # https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4197. - example_timestamp = ( - datetime.datetime.utcnow() + datetime.timedelta(days=1) - ).isoformat() + "Z" + example_timestamp = (datetime.datetime.utcnow() + datetime.timedelta(days=1) + ).isoformat() + "Z" # [START spanner_postgresql_query_with_timestamp_parameter] param = {"p1": example_timestamp} param_type = {"p1": param_types.TIMESTAMP} with database.snapshot() as snapshot: results = snapshot.execute_sql( - "SELECT VenueId, VenueName, LastUpdateTime FROM Venues " - "WHERE LastUpdateTime < $1", - params=param, - param_types=param_type, + "SELECT VenueId, VenueName, LastUpdateTime FROM Venues " + "WHERE LastUpdateTime < $1", + params=param, + param_types=param_type, ) for row in results: @@ -1246,13 +1255,13 @@ def update_data_with_numeric(instance_id, database_id): with database.batch() as batch: batch.update( - table="Venues", - columns=("VenueId", "Revenue"), - values=[ - (4, decimal.Decimal("35000")), - (19, decimal.Decimal("104500")), - (42, decimal.Decimal("99999999999999999999999999999.99")), - ], + table="Venues", + columns=("VenueId", "Revenue"), + values=[ + (4, decimal.Decimal("35000")), + (19, decimal.Decimal("104500")), + (42, decimal.Decimal("99999999999999999999999999999.99")), + ], ) print("Updated data.") @@ -1276,9 +1285,9 @@ def query_data_with_numeric_parameter(instance_id, database_id): with database.snapshot() as snapshot: results = snapshot.execute_sql( - "SELECT VenueId, Revenue FROM Venues WHERE Revenue < $1", - params=param, - param_types=param_type, + "SELECT VenueId, Revenue FROM Venues WHERE Revenue < $1", + params=param, + param_types=param_type, ) for row in results: @@ -1292,17 +1301,17 @@ def create_client_with_query_options(instance_id, database_id): # instance_id = "your-spanner-instance" # database_id = "your-spanner-db-id" spanner_client = spanner.Client( - query_options={ - "optimizer_version": "1", - "optimizer_statistics_package": "latest", - } + query_options={ + "optimizer_version": "1", + "optimizer_statistics_package": "latest", + } ) instance = spanner_client.instance(instance_id) database = instance.database(database_id) with database.snapshot() as snapshot: results = snapshot.execute_sql( - "SELECT VenueId, VenueName, LastUpdateTime FROM Venues" + "SELECT VenueId, VenueName, LastUpdateTime FROM Venues" ) for row in results: @@ -1321,11 +1330,11 @@ def query_data_with_query_options(instance_id, database_id): with database.snapshot() as snapshot: results = snapshot.execute_sql( - "SELECT VenueId, VenueName, LastUpdateTime FROM Venues", - query_options={ - "optimizer_version": "1", - "optimizer_statistics_package": "latest", - }, + "SELECT VenueId, VenueName, LastUpdateTime FROM Venues", + query_options={ + "optimizer_version": "1", + "optimizer_statistics_package": "latest", + }, ) for row in results: @@ -1333,120 +1342,15 @@ def query_data_with_query_options(instance_id, database_id): # [END spanner_postgresql_query_with_query_options] -# [START spanner_postgresql_jsonb_add_column] -def add_jsonb_column(instance_id, database_id): - """Adds a new JSONB column to the Venues table in the example database.""" - spanner_client = spanner.Client() - instance = spanner_client.instance(instance_id) - - database = instance.database(database_id) - - operation = database.update_ddl( - ["ALTER TABLE Venues ADD COLUMN VenueDetails JSONB"] - ) - - print("Waiting for operation to complete...") - operation.result(OPERATION_TIMEOUT_SECONDS) - - print( - 'Altered table "Venues" on database {} on instance {}.'.format( - database_id, instance_id - ) - ) - - -# [END spanner_postgresql_jsonb_add_column] - - -# [START spanner_postgresql_jsonb_update_data] -def update_data_with_jsonb(instance_id, database_id): - """Updates Venues tables in the database with the JSONB - column. - - This updates the `VenueDetails` column which must be created before - running this sample. You can add the column by running the - `add_jsonb_column` sample or by running this DDL statement - against your database: - - ALTER TABLE Venues ADD COLUMN VenueDetails JSONB - """ - spanner_client = spanner.Client() - instance = spanner_client.instance(instance_id) - - database = instance.database(database_id) - """ - PG JSONB takes the last value in the case of duplicate keys. - PG JSONB sorts first by key length and then lexicographically with - equivalent key length. - """ - with database.batch() as batch: - batch.update( - table="Venues", - columns=("VenueId", "VenueDetails"), - values=[ - ( - 4, - JsonObject( - [ - JsonObject({"name": None, "open": True}), - JsonObject( - {"name": "room 2", "open": False, "name": "room 3"} - ), - ] - ), - ), - (19, JsonObject(rating=9, open=True)), - ( - 42, - JsonObject( - { - "name": None, - "open": {"Monday": True, "Tuesday": False}, - "tags": ["large", "airy"], - } - ), - ), - ], - ) - - print("Updated data.") - - -# [END spanner_postgresql_jsonb_update_data] - - -def query_data_with_jsonb_parameter(instance_id, database_id): - """Queries sample data using SQL with a JSON parameter.""" - # [START spanner_postgresql_jsonb_query_parameter] - # instance_id = "your-spanner-instance" - # database_id = "your-spanner-db-id" - spanner_client = spanner.Client() - instance = spanner_client.instance(instance_id) - database = instance.database(database_id) - - param = {"p1": 2} - param_type = {"p1": param_types.INT64} - - with database.snapshot() as snapshot: - results = snapshot.execute_sql( - "SELECT venueid, venuedetails FROM Venues" - + " WHERE CAST(venuedetails ->> 'rating' AS INTEGER) > $1", - params=param, - param_types=param_type, - ) - - for row in results: - print("VenueId: {}, VenueDetails: {}".format(*row)) - # [END spanner_postgresql_jsonb_query_parameter] - - if __name__ == "__main__": # noqa: C901 parser = argparse.ArgumentParser( - description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter ) parser.add_argument("instance_id", help="Your Cloud Spanner instance ID.") parser.add_argument( - "--database-id", help="Your Cloud Spanner database ID.", default="example_db" + "--database-id", help="Your Cloud Spanner database ID.", + default="example_db" ) subparsers = parser.add_subparsers(dest="command") @@ -1460,78 +1364,92 @@ def query_data_with_jsonb_parameter(instance_id, database_id): subparsers.add_parser("add_column", help=add_column.__doc__) subparsers.add_parser("update_data", help=update_data.__doc__) subparsers.add_parser( - "query_data_with_new_column", help=query_data_with_new_column.__doc__ + "query_data_with_new_column", help=query_data_with_new_column.__doc__ ) - subparsers.add_parser("read_write_transaction", help=read_write_transaction.__doc__) - subparsers.add_parser("read_only_transaction", help=read_only_transaction.__doc__) + subparsers.add_parser("read_write_transaction", + help=read_write_transaction.__doc__) + subparsers.add_parser("read_only_transaction", + help=read_only_transaction.__doc__) subparsers.add_parser("add_index", help=add_index.__doc__) - subparsers.add_parser("read_data_with_index", help=read_data_with_index.__doc__) + subparsers.add_parser("read_data_with_index", + help=read_data_with_index.__doc__) subparsers.add_parser("add_storing_index", help=add_storing_index.__doc__) + subparsers.add_parser("read_data_with_storing_index", + help=read_data_with_storing_index.__doc__) subparsers.add_parser( - "read_data_with_storing_index", help=read_data_with_storing_index.__doc__ - ) - subparsers.add_parser( - "create_table_with_timestamp", help=create_table_with_timestamp.__doc__ + "create_table_with_timestamp", help=create_table_with_timestamp.__doc__ ) subparsers.add_parser( - "insert_data_with_timestamp", help=insert_data_with_timestamp.__doc__ + "insert_data_with_timestamp", help=insert_data_with_timestamp.__doc__ ) - subparsers.add_parser("add_timestamp_column", help=add_timestamp_column.__doc__) + subparsers.add_parser("add_timestamp_column", + help=add_timestamp_column.__doc__) subparsers.add_parser( - "update_data_with_timestamp", help=update_data_with_timestamp.__doc__ + "update_data_with_timestamp", help=update_data_with_timestamp.__doc__ ) subparsers.add_parser( - "query_data_with_timestamp", help=query_data_with_timestamp.__doc__ + "query_data_with_timestamp", help=query_data_with_timestamp.__doc__ ) - subparsers.add_parser("insert_data_with_dml", help=insert_data_with_dml.__doc__) - subparsers.add_parser("update_data_with_dml", help=update_data_with_dml.__doc__) - subparsers.add_parser("delete_data_with_dml", help=delete_data_with_dml.__doc__) + subparsers.add_parser("insert_data_with_dml", + help=insert_data_with_dml.__doc__) + subparsers.add_parser("update_data_with_dml", + help=update_data_with_dml.__doc__) + subparsers.add_parser("delete_data_with_dml", + help=delete_data_with_dml.__doc__) subparsers.add_parser( - "dml_write_read_transaction", help=dml_write_read_transaction.__doc__ + "dml_write_read_transaction", help=dml_write_read_transaction.__doc__ ) subparsers.add_parser("insert_with_dml", help=insert_with_dml.__doc__) subparsers.add_parser( - "query_data_with_parameter", help=query_data_with_parameter.__doc__ + "query_data_with_parameter", help=query_data_with_parameter.__doc__ ) subparsers.add_parser( - "write_with_dml_transaction", help=write_with_dml_transaction.__doc__ + "write_with_dml_transaction", help=write_with_dml_transaction.__doc__ ) subparsers.add_parser( - "update_data_with_partitioned_dml", - help=update_data_with_partitioned_dml.__doc__, + "update_data_with_partitioned_dml", + help=update_data_with_partitioned_dml.__doc__, ) subparsers.add_parser( - "delete_data_with_partitioned_dml", - help=delete_data_with_partitioned_dml.__doc__, + "delete_data_with_partitioned_dml", + help=delete_data_with_partitioned_dml.__doc__, ) - subparsers.add_parser("update_with_batch_dml", help=update_with_batch_dml.__doc__) + subparsers.add_parser("update_with_batch_dml", + help=update_with_batch_dml.__doc__) subparsers.add_parser( - "create_table_with_datatypes", help=create_table_with_datatypes.__doc__ + "create_table_with_datatypes", help=create_table_with_datatypes.__doc__ ) - subparsers.add_parser("insert_datatypes_data", help=insert_datatypes_data.__doc__) - subparsers.add_parser("query_data_with_bool", help=query_data_with_bool.__doc__) - subparsers.add_parser("query_data_with_bytes", help=query_data_with_bytes.__doc__) - subparsers.add_parser("query_data_with_float", help=query_data_with_float.__doc__) - subparsers.add_parser("query_data_with_int", help=query_data_with_int.__doc__) - subparsers.add_parser("query_data_with_string", help=query_data_with_string.__doc__) + subparsers.add_parser("insert_datatypes_data", + help=insert_datatypes_data.__doc__) + subparsers.add_parser("query_data_with_bool", + help=query_data_with_bool.__doc__) + subparsers.add_parser("query_data_with_bytes", + help=query_data_with_bytes.__doc__) + subparsers.add_parser("query_data_with_float", + help=query_data_with_float.__doc__) + subparsers.add_parser("query_data_with_int", + help=query_data_with_int.__doc__) + subparsers.add_parser("query_data_with_string", + help=query_data_with_string.__doc__) subparsers.add_parser( - "query_data_with_timestamp_parameter", - help=query_data_with_timestamp_parameter.__doc__, + "query_data_with_timestamp_parameter", + help=query_data_with_timestamp_parameter.__doc__, ) subparsers.add_parser( - "update_data_with_numeric", - help=update_data_with_numeric.__doc__, + "update_data_with_numeric", + help=update_data_with_numeric.__doc__, ) subparsers.add_parser( - "query_data_with_numeric_parameter", - help=query_data_with_numeric_parameter.__doc__, + "query_data_with_numeric_parameter", + help=query_data_with_numeric_parameter.__doc__, ) subparsers.add_parser( - "query_data_with_query_options", help=query_data_with_query_options.__doc__ + "query_data_with_query_options", + help=query_data_with_query_options.__doc__ ) subparsers.add_parser( - "create_client_with_query_options", - help=create_client_with_query_options.__doc__, + "create_client_with_query_options", + help=create_client_with_query_options.__doc__, ) args = parser.parse_args() diff --git a/samples/samples/pg_snippets_test.py b/samples/samples/pg_snippets_test.py index 5b185838ac..2716880832 100644 --- a/samples/samples/pg_snippets_test.py +++ b/samples/samples/pg_snippets_test.py @@ -188,7 +188,8 @@ def test_read_write_transaction(capsys, instance_id, sample_database): @pytest.mark.dependency(depends=["add_column"]) def test_query_data_with_new_column(capsys, instance_id, sample_database): - snippets.query_data_with_new_column(instance_id, sample_database.database_id) + snippets.query_data_with_new_column(instance_id, + sample_database.database_id) out, _ = capsys.readouterr() assert "SingerId: 1, AlbumId: 1, MarketingBudget: 300000" in out assert "SingerId: 2, AlbumId: 2, MarketingBudget: 300000" in out @@ -219,7 +220,8 @@ def test_add_storing_index(capsys, instance_id, sample_database): @pytest.mark.dependency(depends=["add_storing_index"]) def test_read_data_with_storing_index(capsys, instance_id, sample_database): - snippets.read_data_with_storing_index(instance_id, sample_database.database_id) + snippets.read_data_with_storing_index(instance_id, + sample_database.database_id) out, _ = capsys.readouterr() assert "300000" in out @@ -241,7 +243,8 @@ def test_add_timestamp_column(capsys, instance_id, sample_database): @pytest.mark.dependency(depends=["add_timestamp_column"]) def test_update_data_with_timestamp(capsys, instance_id, sample_database): - snippets.update_data_with_timestamp(instance_id, sample_database.database_id) + snippets.update_data_with_timestamp(instance_id, + sample_database.database_id) out, _ = capsys.readouterr() assert "Updated data" in out @@ -256,14 +259,16 @@ def test_query_data_with_timestamp(capsys, instance_id, sample_database): @pytest.mark.dependency(name="create_table_with_timestamp") def test_create_table_with_timestamp(capsys, instance_id, sample_database): - snippets.create_table_with_timestamp(instance_id, sample_database.database_id) + snippets.create_table_with_timestamp(instance_id, + sample_database.database_id) out, _ = capsys.readouterr() assert "Created Performances table on database" in out @pytest.mark.dependency(depends=["create_table_with_timestamp"]) def test_insert_data_with_timestamp(capsys, instance_id, sample_database): - snippets.insert_data_with_timestamp(instance_id, sample_database.database_id) + snippets.insert_data_with_timestamp(instance_id, + sample_database.database_id) out, _ = capsys.readouterr() assert "Inserted data." in out @@ -291,7 +296,8 @@ def test_delete_data_with_dml(capsys, instance_id, sample_database): @pytest.mark.dependency(name="dml_write_read_transaction") def test_dml_write_read_transaction(capsys, instance_id, sample_database): - snippets.dml_write_read_transaction(instance_id, sample_database.database_id) + snippets.dml_write_read_transaction(instance_id, + sample_database.database_id) out, _ = capsys.readouterr() assert "1 record(s) inserted." in out assert "FirstName: Timothy, LastName: Campbell" in out @@ -313,21 +319,24 @@ def test_query_data_with_parameter(capsys, instance_id, sample_database): @pytest.mark.dependency(depends=["add_column"]) def test_write_with_dml_transaction(capsys, instance_id, sample_database): - snippets.write_with_dml_transaction(instance_id, sample_database.database_id) + snippets.write_with_dml_transaction(instance_id, + sample_database.database_id) out, _ = capsys.readouterr() assert "Transferred 200000 from Album2's budget to Album1's" in out @pytest.mark.dependency(depends=["add_column"]) def update_data_with_partitioned_dml(capsys, instance_id, sample_database): - snippets.update_data_with_partitioned_dml(instance_id, sample_database.database_id) + snippets.update_data_with_partitioned_dml(instance_id, + sample_database.database_id) out, _ = capsys.readouterr() assert "3 record(s) updated" in out @pytest.mark.dependency(depends=["insert_with_dml"]) def test_delete_data_with_partitioned_dml(capsys, instance_id, sample_database): - snippets.delete_data_with_partitioned_dml(instance_id, sample_database.database_id) + snippets.delete_data_with_partitioned_dml(instance_id, + sample_database.database_id) out, _ = capsys.readouterr() assert "5 record(s) deleted" in out @@ -341,14 +350,15 @@ def test_update_with_batch_dml(capsys, instance_id, sample_database): @pytest.mark.dependency(name="create_table_with_datatypes") def test_create_table_with_datatypes(capsys, instance_id, sample_database): - snippets.create_table_with_datatypes(instance_id, sample_database.database_id) + snippets.create_table_with_datatypes(instance_id, + sample_database.database_id) out, _ = capsys.readouterr() assert "Created Venues table on database" in out @pytest.mark.dependency( - name="insert_datatypes_data", - depends=["create_table_with_datatypes"], + name="insert_datatypes_data", + depends=["create_table_with_datatypes"], ) def test_insert_datatypes_data(capsys, instance_id, sample_database): snippets.insert_datatypes_data(instance_id, sample_database.database_id) @@ -401,16 +411,19 @@ def test_update_data_with_numeric(capsys, instance_id, sample_database): @pytest.mark.dependency(depends=["insert_datatypes_data"]) -def test_query_data_with_numeric_parameter(capsys, instance_id, sample_database): - snippets.query_data_with_numeric_parameter(instance_id, sample_database.database_id) +def test_query_data_with_numeric_parameter(capsys, instance_id, + sample_database): + snippets.query_data_with_numeric_parameter(instance_id, + sample_database.database_id) out, _ = capsys.readouterr() assert "VenueId: 4, Revenue: 35000" in out @pytest.mark.dependency(depends=["insert_datatypes_data"]) -def test_query_data_with_timestamp_parameter(capsys, instance_id, sample_database): +def test_query_data_with_timestamp_parameter(capsys, instance_id, + sample_database): snippets.query_data_with_timestamp_parameter( - instance_id, sample_database.database_id + instance_id, sample_database.database_id ) out, _ = capsys.readouterr() assert "VenueId: 4, VenueName: Venue 4, LastUpdateTime:" in out @@ -420,7 +433,8 @@ def test_query_data_with_timestamp_parameter(capsys, instance_id, sample_databas @pytest.mark.dependency(depends=["insert_datatypes_data"]) def test_query_data_with_query_options(capsys, instance_id, sample_database): - snippets.query_data_with_query_options(instance_id, sample_database.database_id) + snippets.query_data_with_query_options(instance_id, + sample_database.database_id) out, _ = capsys.readouterr() assert "VenueId: 4, VenueName: Venue 4, LastUpdateTime:" in out assert "VenueId: 19, VenueName: Venue 19, LastUpdateTime:" in out @@ -429,30 +443,9 @@ def test_query_data_with_query_options(capsys, instance_id, sample_database): @pytest.mark.dependency(depends=["insert_datatypes_data"]) def test_create_client_with_query_options(capsys, instance_id, sample_database): - snippets.create_client_with_query_options(instance_id, sample_database.database_id) + snippets.create_client_with_query_options(instance_id, + sample_database.database_id) out, _ = capsys.readouterr() assert "VenueId: 4, VenueName: Venue 4, LastUpdateTime:" in out assert "VenueId: 19, VenueName: Venue 19, LastUpdateTime:" in out assert "VenueId: 42, VenueName: Venue 42, LastUpdateTime:" in out - - -@pytest.mark.dependency(name="add_jsonb_column", depends=["insert_datatypes_data"]) -def test_add_jsonb_column(capsys, instance_id, sample_database): - snippets.add_jsonb_column(instance_id, sample_database.database_id) - out, _ = capsys.readouterr() - assert "Waiting for operation to complete..." in out - assert 'Altered table "Venues" on database ' in out - - -@pytest.mark.dependency(name="update_data_with_jsonb", depends=["add_jsonb_column"]) -def test_update_data_with_jsonb(capsys, instance_id, sample_database): - snippets.update_data_with_jsonb(instance_id, sample_database.database_id) - out, _ = capsys.readouterr() - assert "Updated data." in out - - -@pytest.mark.dependency(depends=["update_data_with_jsonb"]) -def test_query_data_with_jsonb_parameter(capsys, instance_id, sample_database): - snippets.query_data_with_jsonb_parameter(instance_id, sample_database.database_id) - out, _ = capsys.readouterr() - assert "VenueId: 19, VenueDetails: {'open': True, 'rating': 9}" in out From 38cf5c0eceebb60939cafd0cfc2bc4afd511ec6d Mon Sep 17 00:00:00 2001 From: Astha Mohta Date: Wed, 2 Nov 2022 18:29:34 +0530 Subject: [PATCH 09/18] samples --- samples/samples/pg_snippets.py | 125 ++++++++++++++++++++++++++++ samples/samples/pg_snippets_test.py | 22 +++++ tests/_fixtures.py | 1 + tests/system/conftest.py | 4 +- tests/system/test_session_api.py | 2 +- 5 files changed, 150 insertions(+), 4 deletions(-) diff --git a/samples/samples/pg_snippets.py b/samples/samples/pg_snippets.py index 367690dbd8..085d3c5441 100644 --- a/samples/samples/pg_snippets.py +++ b/samples/samples/pg_snippets.py @@ -28,6 +28,7 @@ from google.cloud import spanner, spanner_admin_database_v1 from google.cloud.spanner_admin_database_v1.types.common import DatabaseDialect from google.cloud.spanner_v1 import param_types +from google.cloud.spanner_v1.data_types import JsonObject OPERATION_TIMEOUT_SECONDS = 240 @@ -1342,6 +1343,112 @@ def query_data_with_query_options(instance_id, database_id): # [END spanner_postgresql_query_with_query_options] +# [START spanner_postgresql_jsonb_add_column] +def add_jsonb_column(instance_id, database_id): + """Adds a new JSONB column to the Venues table in the example database.""" + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + + database = instance.database(database_id) + + operation = database.update_ddl( + ["ALTER TABLE Venues ADD COLUMN VenueDetails JSONB"] + ) + + print("Waiting for operation to complete...") + operation.result(OPERATION_TIMEOUT_SECONDS) + + print( + 'Altered table "Venues" on database {} on instance {}.'.format( + database_id, instance_id + ) + ) + + +# [END spanner_postgresql_jsonb_add_column] + + +# [START spanner_postgresql_jsonb_update_data] +def update_data_with_jsonb(instance_id, database_id): + """Updates Venues tables in the database with the JSONB + column. + This updates the `VenueDetails` column which must be created before + running this sample. You can add the column by running the + `add_jsonb_column` sample or by running this DDL statement + against your database: + ALTER TABLE Venues ADD COLUMN VenueDetails JSONB + """ + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + + database = instance.database(database_id) + """ + PG JSONB takes the last value in the case of duplicate keys. + PG JSONB sorts first by key length and then lexicographically with + equivalent key length. + """ + + with database.batch() as batch: + batch.update( + table="Venues", + columns=("VenueId", "VenueDetails"), + values=[ + ( + 4, + JsonObject( + [ + JsonObject({"name": None, "open": True}), + JsonObject( + {"name": "room 2", "open": False} + ), + ] + ), + ), + (19, JsonObject(rating=9, open=True)), + ( + 42, + JsonObject( + { + "name": None, + "open": {"Monday": True, "Tuesday": False}, + "tags": ["large", "airy"], + } + ), + ), + ], + ) + + print("Updated data.") + + +# [END spanner_postgresql_jsonb_update_data] + + +def query_data_with_jsonb_parameter(instance_id, database_id): + """Queries sample data using SQL with a JSONB parameter.""" + # [START spanner_postgresql_jsonb_query_parameter] + # instance_id = "your-spanner-instance" + # database_id = "your-spanner-db-id" + spanner_client = spanner.Client() + instance = spanner_client.instance(instance_id) + database = instance.database(database_id) + + param = {"p1": 2} + param_type = {"p1": param_types.INT64} + + with database.snapshot() as snapshot: + results = snapshot.execute_sql( + "SELECT venueid, venuedetails FROM Venues" + + " WHERE CAST(venuedetails ->> 'rating' AS INTEGER) > $1", + params=param, + param_types=param_type, + ) + + for row in results: + print("VenueId: {}, VenueDetails: {}".format(*row)) + # [END spanner_postgresql_jsonb_query_parameter] + + if __name__ == "__main__": # noqa: C901 parser = argparse.ArgumentParser( description=__doc__, @@ -1451,6 +1558,18 @@ def query_data_with_query_options(instance_id, database_id): "create_client_with_query_options", help=create_client_with_query_options.__doc__, ) + subparsers.add_parser( + "add_jsonb_column", + help=add_jsonb_column.__doc__, + ) + subparsers.add_parser( + "update_data_with_jsonb", + help=update_data_with_jsonb.__doc__, + ) + subparsers.add_parser( + "query_data_with_jsonb_parameter", + help=query_data_with_jsonb_parameter.__doc__, + ) args = parser.parse_args() @@ -1540,3 +1659,9 @@ def query_data_with_query_options(instance_id, database_id): query_data_with_query_options(args.instance_id, args.database_id) elif args.command == "create_client_with_query_options": create_client_with_query_options(args.instance_id, args.database_id) + elif args.command == "add_jsonb_column": + add_jsonb_column(args.instance_id, args.database_id) + elif args.command == "update_data_with_jsonb": + update_data_with_jsonb(args.instance_id, args.database_id) + elif args.command == "query_data_with_jsonb_parameter": + query_data_with_jsonb_parameter(args.instance_id, args.database_id) diff --git a/samples/samples/pg_snippets_test.py b/samples/samples/pg_snippets_test.py index 2716880832..8937f34b7c 100644 --- a/samples/samples/pg_snippets_test.py +++ b/samples/samples/pg_snippets_test.py @@ -449,3 +449,25 @@ def test_create_client_with_query_options(capsys, instance_id, sample_database): assert "VenueId: 4, VenueName: Venue 4, LastUpdateTime:" in out assert "VenueId: 19, VenueName: Venue 19, LastUpdateTime:" in out assert "VenueId: 42, VenueName: Venue 42, LastUpdateTime:" in out + + +@pytest.mark.dependency(name="add_jsonb_column", depends=["insert_datatypes_data"]) +def test_add_jsonb_column(capsys, instance_id, sample_database): + snippets.add_jsonb_column(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "Waiting for operation to complete..." in out + assert 'Altered table "Venues" on database ' in out + + +@pytest.mark.dependency(name="update_data_with_jsonb", depends=["add_jsonb_column"]) +def test_update_data_with_jsonb(capsys, instance_id, sample_database): + snippets.update_data_with_jsonb(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "Updated data." in out + + +@pytest.mark.dependency(depends=["update_data_with_jsonb"]) +def test_query_data_with_jsonb_parameter(capsys, instance_id, sample_database): + snippets.query_data_with_jsonb_parameter(instance_id, sample_database.database_id) + out, _ = capsys.readouterr() + assert "VenueId: 19, VenueDetails: {'open': True, 'rating': 9}" in out diff --git a/tests/_fixtures.py b/tests/_fixtures.py index cea3054156..7bf55ee232 100644 --- a/tests/_fixtures.py +++ b/tests/_fixtures.py @@ -136,6 +136,7 @@ string_value VARCHAR(16), timestamp_value TIMESTAMPTZ, numeric_value NUMERIC, + jsonb_value JSONB, PRIMARY KEY (pkey) ); CREATE TABLE counters ( name VARCHAR(1024), diff --git a/tests/system/conftest.py b/tests/system/conftest.py index d687837d56..3d6706b582 100644 --- a/tests/system/conftest.py +++ b/tests/system/conftest.py @@ -85,9 +85,7 @@ def spanner_client(): credentials=credentials, ) else: - return spanner_v1.Client( - client_options={"api_endpoint": "staging-wrenchworks.sandbox.googleapis.com"} - ) # use google.auth.default credentials + return spanner_v1.Client() # use google.auth.default credentials @pytest.fixture(scope="session") diff --git a/tests/system/test_session_api.py b/tests/system/test_session_api.py index 2b8c8a4819..efbe966f11 100644 --- a/tests/system/test_session_api.py +++ b/tests/system/test_session_api.py @@ -191,7 +191,7 @@ if _helpers.USE_EMULATOR: ALL_TYPES_COLUMNS = EMULATOR_ALL_TYPES_COLUMNS ALL_TYPES_ROWDATA = EMULATOR_ALL_TYPES_ROWDATA -elif _helpers.DATABASE_DIALECT: +elif _helpers.DATABASE_DIALECT == 'POSTGRESQL': ALL_TYPES_COLUMNS = POSTGRES_ALL_TYPES_COLUMNS ALL_TYPES_ROWDATA = POSTGRES_ALL_TYPES_ROWDATA else: From 5559bdb48a402098500736f9a4f1b9f8e666415f Mon Sep 17 00:00:00 2001 From: Astha Mohta Date: Wed, 2 Nov 2022 18:30:23 +0530 Subject: [PATCH 10/18] lint --- logsAstha.log | 3 +++ tests/system/test_session_api.py | 7 +++++-- 2 files changed, 8 insertions(+), 2 deletions(-) create mode 100644 logsAstha.log diff --git a/logsAstha.log b/logsAstha.log new file mode 100644 index 0000000000..bf0b5b1e58 --- /dev/null +++ b/logsAstha.log @@ -0,0 +1,3 @@ +hey!! +hey!!2 +hey!!2 diff --git a/tests/system/test_session_api.py b/tests/system/test_session_api.py index efbe966f11..d4a2c74c2f 100644 --- a/tests/system/test_session_api.py +++ b/tests/system/test_session_api.py @@ -191,7 +191,7 @@ if _helpers.USE_EMULATOR: ALL_TYPES_COLUMNS = EMULATOR_ALL_TYPES_COLUMNS ALL_TYPES_ROWDATA = EMULATOR_ALL_TYPES_ROWDATA -elif _helpers.DATABASE_DIALECT == 'POSTGRESQL': +elif _helpers.DATABASE_DIALECT == "POSTGRESQL": ALL_TYPES_COLUMNS = POSTGRES_ALL_TYPES_COLUMNS ALL_TYPES_ROWDATA = POSTGRES_ALL_TYPES_ROWDATA else: @@ -2106,7 +2106,10 @@ def test_execute_sql_w_json_bindings( [JSON_1, JSON_2], ) -def test_execute_sql_w_jsonb_bindings(not_emulator, not_google_standard_sql, sessions_database, database_dialect): + +def test_execute_sql_w_jsonb_bindings( + not_emulator, not_google_standard_sql, sessions_database, database_dialect +): _bind_test_helper( sessions_database, database_dialect, From 86ad0dffc0dda2ea3d09bc281c9d5b601baa3b62 Mon Sep 17 00:00:00 2001 From: Astha Mohta Date: Wed, 2 Nov 2022 18:33:22 +0530 Subject: [PATCH 11/18] changes as per comments --- samples/samples/pg_snippets.py | 18 ------------------ 1 file changed, 18 deletions(-) diff --git a/samples/samples/pg_snippets.py b/samples/samples/pg_snippets.py index 085d3c5441..5f1578b2e2 100644 --- a/samples/samples/pg_snippets.py +++ b/samples/samples/pg_snippets.py @@ -1558,18 +1558,6 @@ def query_data_with_jsonb_parameter(instance_id, database_id): "create_client_with_query_options", help=create_client_with_query_options.__doc__, ) - subparsers.add_parser( - "add_jsonb_column", - help=add_jsonb_column.__doc__, - ) - subparsers.add_parser( - "update_data_with_jsonb", - help=update_data_with_jsonb.__doc__, - ) - subparsers.add_parser( - "query_data_with_jsonb_parameter", - help=query_data_with_jsonb_parameter.__doc__, - ) args = parser.parse_args() @@ -1659,9 +1647,3 @@ def query_data_with_jsonb_parameter(instance_id, database_id): query_data_with_query_options(args.instance_id, args.database_id) elif args.command == "create_client_with_query_options": create_client_with_query_options(args.instance_id, args.database_id) - elif args.command == "add_jsonb_column": - add_jsonb_column(args.instance_id, args.database_id) - elif args.command == "update_data_with_jsonb": - update_data_with_jsonb(args.instance_id, args.database_id) - elif args.command == "query_data_with_jsonb_parameter": - query_data_with_jsonb_parameter(args.instance_id, args.database_id) From 9876985472e1b58315be2d050bce2af37ab22c40 Mon Sep 17 00:00:00 2001 From: Astha Mohta Date: Wed, 2 Nov 2022 18:34:20 +0530 Subject: [PATCH 12/18] removing file --- logsAstha.log | 3 --- 1 file changed, 3 deletions(-) delete mode 100644 logsAstha.log diff --git a/logsAstha.log b/logsAstha.log deleted file mode 100644 index bf0b5b1e58..0000000000 --- a/logsAstha.log +++ /dev/null @@ -1,3 +0,0 @@ -hey!! -hey!!2 -hey!!2 From 9c3ecc792a80f1c4a3da3826a6a3a135a7c60aa6 Mon Sep 17 00:00:00 2001 From: Astha Mohta Date: Wed, 2 Nov 2022 19:18:56 +0530 Subject: [PATCH 13/18] changes as per review --- samples/samples/pg_snippets.py | 16 +++++++++++++++- tests/system/test_session_api.py | 2 +- 2 files changed, 16 insertions(+), 2 deletions(-) diff --git a/samples/samples/pg_snippets.py b/samples/samples/pg_snippets.py index 5f1578b2e2..896ee0bd01 100644 --- a/samples/samples/pg_snippets.py +++ b/samples/samples/pg_snippets.py @@ -1345,7 +1345,21 @@ def query_data_with_query_options(instance_id, database_id): # [START spanner_postgresql_jsonb_add_column] def add_jsonb_column(instance_id, database_id): - """Adds a new JSONB column to the Venues table in the example database.""" + """ + Alters Venues tables in the database adding a JSONB column. + You can create the table by running the `create_table_with_datatypes` + sample or by running this DDL statement against your database: + CREATE TABLE Venues ( + VenueId BIGINT NOT NULL, + VenueName character varying(100), + VenueInfo BYTEA, + Capacity BIGINT, + OutdoorVenue BOOL, + PopularityScore FLOAT8, + Revenue NUMERIC, + LastUpdateTime SPANNER.COMMIT_TIMESTAMP NOT NULL, + PRIMARY KEY (VenueId)) + """ spanner_client = spanner.Client() instance = spanner_client.instance(instance_id) diff --git a/tests/system/test_session_api.py b/tests/system/test_session_api.py index d4a2c74c2f..8e7b65d95e 100644 --- a/tests/system/test_session_api.py +++ b/tests/system/test_session_api.py @@ -121,7 +121,7 @@ AllTypesRowData(pkey=108, timestamp_value=NANO_TIME), AllTypesRowData(pkey=109, numeric_value=NUMERIC_1), AllTypesRowData(pkey=110, json_value=JSON_1), - AllTypesRowData(pkey=111, json_value=[JSON_1, JSON_2]), + AllTypesRowData(pkey=111, json_value=JsonObject([JSON_1, JSON_2])), # empty array values AllTypesRowData(pkey=201, int_array=[]), AllTypesRowData(pkey=202, bool_array=[]), From 113dc833f6cc15c5eacc42b6af83c8f4922fa67c Mon Sep 17 00:00:00 2001 From: Astha Mohta <35952883+asthamohta@users.noreply.github.com> Date: Thu, 3 Nov 2022 13:44:15 +0530 Subject: [PATCH 14/18] Update pg_snippets.py --- samples/samples/pg_snippets.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/samples/samples/pg_snippets.py b/samples/samples/pg_snippets.py index 896ee0bd01..26628a44b3 100644 --- a/samples/samples/pg_snippets.py +++ b/samples/samples/pg_snippets.py @@ -1437,10 +1437,9 @@ def update_data_with_jsonb(instance_id, database_id): # [END spanner_postgresql_jsonb_update_data] - +# [START spanner_postgresql_jsonb_query_parameter] def query_data_with_jsonb_parameter(instance_id, database_id): """Queries sample data using SQL with a JSONB parameter.""" - # [START spanner_postgresql_jsonb_query_parameter] # instance_id = "your-spanner-instance" # database_id = "your-spanner-db-id" spanner_client = spanner.Client() From 28416e8ade97fb17fd1fa4895d851934aacf8829 Mon Sep 17 00:00:00 2001 From: Astha Mohta <35952883+asthamohta@users.noreply.github.com> Date: Thu, 3 Nov 2022 13:45:37 +0530 Subject: [PATCH 15/18] Update pg_snippets.py --- samples/samples/pg_snippets.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/samples/samples/pg_snippets.py b/samples/samples/pg_snippets.py index 26628a44b3..e1c3e3bd74 100644 --- a/samples/samples/pg_snippets.py +++ b/samples/samples/pg_snippets.py @@ -1459,7 +1459,9 @@ def query_data_with_jsonb_parameter(instance_id, database_id): for row in results: print("VenueId: {}, VenueDetails: {}".format(*row)) - # [END spanner_postgresql_jsonb_query_parameter] + + +# [END spanner_postgresql_jsonb_query_parameter] if __name__ == "__main__": # noqa: C901 From e13e695e6404f18d594450e45511b6bfc0ab17f0 Mon Sep 17 00:00:00 2001 From: Astha Mohta <35952883+asthamohta@users.noreply.github.com> Date: Thu, 3 Nov 2022 13:58:47 +0530 Subject: [PATCH 16/18] Update pg_snippets.py --- samples/samples/pg_snippets.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/samples/samples/pg_snippets.py b/samples/samples/pg_snippets.py index e1c3e3bd74..d7fbcbe5c5 100644 --- a/samples/samples/pg_snippets.py +++ b/samples/samples/pg_snippets.py @@ -1360,9 +1360,11 @@ def add_jsonb_column(instance_id, database_id): LastUpdateTime SPANNER.COMMIT_TIMESTAMP NOT NULL, PRIMARY KEY (VenueId)) """ + # instance_id = "your-spanner-instance" + # database_id = "your-spanner-db-id" + spanner_client = spanner.Client() instance = spanner_client.instance(instance_id) - database = instance.database(database_id) operation = database.update_ddl( @@ -1392,10 +1394,13 @@ def update_data_with_jsonb(instance_id, database_id): against your database: ALTER TABLE Venues ADD COLUMN VenueDetails JSONB """ + # instance_id = "your-spanner-instance" + # database_id = "your-spanner-db-id" + spanner_client = spanner.Client() instance = spanner_client.instance(instance_id) - database = instance.database(database_id) + """ PG JSONB takes the last value in the case of duplicate keys. PG JSONB sorts first by key length and then lexicographically with @@ -1442,6 +1447,7 @@ def query_data_with_jsonb_parameter(instance_id, database_id): """Queries sample data using SQL with a JSONB parameter.""" # instance_id = "your-spanner-instance" # database_id = "your-spanner-db-id" + spanner_client = spanner.Client() instance = spanner_client.instance(instance_id) database = instance.database(database_id) From c25a6dfecf005165e69d86e34b0ff4a8162587b2 Mon Sep 17 00:00:00 2001 From: Astha Mohta <35952883+asthamohta@users.noreply.github.com> Date: Thu, 3 Nov 2022 16:03:02 +0530 Subject: [PATCH 17/18] Update pg_snippets.py --- samples/samples/pg_snippets.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/samples/samples/pg_snippets.py b/samples/samples/pg_snippets.py index d7fbcbe5c5..ed5d5bc0c7 100644 --- a/samples/samples/pg_snippets.py +++ b/samples/samples/pg_snippets.py @@ -1396,11 +1396,11 @@ def update_data_with_jsonb(instance_id, database_id): """ # instance_id = "your-spanner-instance" # database_id = "your-spanner-db-id" - + spanner_client = spanner.Client() instance = spanner_client.instance(instance_id) database = instance.database(database_id) - + """ PG JSONB takes the last value in the case of duplicate keys. PG JSONB sorts first by key length and then lexicographically with @@ -1447,7 +1447,7 @@ def query_data_with_jsonb_parameter(instance_id, database_id): """Queries sample data using SQL with a JSONB parameter.""" # instance_id = "your-spanner-instance" # database_id = "your-spanner-db-id" - + spanner_client = spanner.Client() instance = spanner_client.instance(instance_id) database = instance.database(database_id) @@ -1465,8 +1465,8 @@ def query_data_with_jsonb_parameter(instance_id, database_id): for row in results: print("VenueId: {}, VenueDetails: {}".format(*row)) - - + + # [END spanner_postgresql_jsonb_query_parameter] From efa6fdf88365e95b43d95b8b5563499ddf8d1a4a Mon Sep 17 00:00:00 2001 From: Astha Mohta <35952883+asthamohta@users.noreply.github.com> Date: Fri, 4 Nov 2022 14:10:05 +0530 Subject: [PATCH 18/18] Update pg_snippets.py --- samples/samples/pg_snippets.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/samples/pg_snippets.py b/samples/samples/pg_snippets.py index ed5d5bc0c7..87215b69b8 100644 --- a/samples/samples/pg_snippets.py +++ b/samples/samples/pg_snippets.py @@ -1362,7 +1362,7 @@ def add_jsonb_column(instance_id, database_id): """ # instance_id = "your-spanner-instance" # database_id = "your-spanner-db-id" - + spanner_client = spanner.Client() instance = spanner_client.instance(instance_id) database = instance.database(database_id)