From 974606287a052e1145a95c488e86ffdceb10550d Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 11 Aug 2021 22:42:32 -0400 Subject: [PATCH] tests: allow prerelease dependency versions under Python 3.9 (#479) Also, drop use of 'pytz', which is no longer depended on by `google-api-core` / `google-cloud-core`. Instead, use either `datetime.timezone.utc` or `google.cloud._helpers.UTC`, depending on usage. --- docs/snapshot-usage.rst | 3 +- noxfile.py | 4 ++ owlbot.py | 95 ++++++++++++++++++++++---------- samples/samples/conftest.py | 6 +- samples/samples/snippets.py | 28 +++++----- samples/samples/snippets_test.py | 9 +-- testing/constraints-3.9.txt | 2 + tests/unit/test__helpers.py | 23 ++++---- tests/unit/test_backup.py | 4 +- 9 files changed, 107 insertions(+), 67 deletions(-) diff --git a/docs/snapshot-usage.rst b/docs/snapshot-usage.rst index 311ea8f3ca..0f00686a54 100644 --- a/docs/snapshot-usage.rst +++ b/docs/snapshot-usage.rst @@ -24,8 +24,7 @@ reads as of a given timestamp: .. code:: python import datetime - from pytz import UTC - TIMESTAMP = datetime.datetime.utcnow().replace(tzinfo=UTC) + TIMESTAMP = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc) with database.snapshot(read_timestamp=TIMESTAMP) as snapshot: ... diff --git a/noxfile.py b/noxfile.py index 6579eecd49..c72dff470d 100644 --- a/noxfile.py +++ b/noxfile.py @@ -105,7 +105,11 @@ def default(session): *session.posargs, ) + # XXX Work around Kokoro image's older pip, which borks the OT install. + session.run("pip", "install", "--upgrade", "pip") session.install("-e", ".[tracing]", "-c", constraints_path) + # XXX: Dump installed versions to debug OT issue + session.run("pip", "list") # Run py.test against the unit tests with OpenTelemetry. session.run( diff --git a/owlbot.py b/owlbot.py index 770f6bf0eb..8ac551b811 100644 --- a/owlbot.py +++ b/owlbot.py @@ -23,12 +23,16 @@ common = gcp.CommonTemplates() -# This is a customized version of the s.get_staging_dirs() function from synthtool to -# cater for copying 3 different folders from googleapis-gen -# which are spanner, spanner/admin/instance and spanner/admin/database. -# Source https://github.com/googleapis/synthtool/blob/master/synthtool/transforms.py#L280 + def get_staging_dirs( - default_version: Optional[str] = None, sub_directory: Optional[str] = None + # This is a customized version of the s.get_staging_dirs() function + # from synthtool to # cater for copying 3 different folders from + # googleapis-gen: + # spanner, spanner/admin/instance and spanner/admin/database. + # Source: + # https://github.com/googleapis/synthtool/blob/master/synthtool/transforms.py#L280 + default_version: Optional[str] = None, + sub_directory: Optional[str] = None, ) -> List[Path]: """Returns the list of directories, one per version, copied from https://github.com/googleapis/googleapis-gen. Will return in lexical sorting @@ -63,46 +67,69 @@ def get_staging_dirs( else: return [] + spanner_default_version = "v1" spanner_admin_instance_default_version = "v1" spanner_admin_database_default_version = "v1" for library in get_staging_dirs(spanner_default_version, "spanner"): # Work around gapic generator bug https://github.com/googleapis/gapic-generator-python/issues/902 - s.replace(library / f"google/cloud/spanner_{library.name}/types/transaction.py", - r""". + s.replace( + library / f"google/cloud/spanner_{library.name}/types/transaction.py", + r""". Attributes:""", - r""".\n + r""".\n Attributes:""", ) # Work around gapic generator bug https://github.com/googleapis/gapic-generator-python/issues/902 - s.replace(library / f"google/cloud/spanner_{library.name}/types/transaction.py", - r""". + s.replace( + library / f"google/cloud/spanner_{library.name}/types/transaction.py", + r""". Attributes:""", - r""".\n + r""".\n Attributes:""", ) # Remove headings from docstring. Requested change upstream in cl/377290854 due to https://google.aip.dev/192#formatting. - s.replace(library / f"google/cloud/spanner_{library.name}/types/transaction.py", + s.replace( + library / f"google/cloud/spanner_{library.name}/types/transaction.py", """\n ==.*?==\n""", ":", ) # Remove headings from docstring. Requested change upstream in cl/377290854 due to https://google.aip.dev/192#formatting. - s.replace(library / f"google/cloud/spanner_{library.name}/types/transaction.py", + s.replace( + library / f"google/cloud/spanner_{library.name}/types/transaction.py", """\n --.*?--\n""", ":", ) - s.move(library, excludes=["google/cloud/spanner/**", "*.*", "docs/index.rst", "google/cloud/spanner_v1/__init__.py"]) + s.move( + library, + excludes=[ + "google/cloud/spanner/**", + "*.*", + "docs/index.rst", + "google/cloud/spanner_v1/__init__.py", + ], + ) -for library in get_staging_dirs(spanner_admin_instance_default_version, "spanner_admin_instance"): - s.move(library, excludes=["google/cloud/spanner_admin_instance/**", "*.*", "docs/index.rst"]) +for library in get_staging_dirs( + spanner_admin_instance_default_version, "spanner_admin_instance" +): + s.move( + library, + excludes=["google/cloud/spanner_admin_instance/**", "*.*", "docs/index.rst"], + ) -for library in get_staging_dirs(spanner_admin_database_default_version, "spanner_admin_database"): - s.move(library, excludes=["google/cloud/spanner_admin_database/**", "*.*", "docs/index.rst"]) +for library in get_staging_dirs( + spanner_admin_database_default_version, "spanner_admin_database" +): + s.move( + library, + excludes=["google/cloud/spanner_admin_database/**", "*.*", "docs/index.rst"], + ) s.remove_staging_dirs() @@ -116,9 +143,11 @@ def get_staging_dirs( s.replace( ".kokoro/build.sh", "# Remove old nox", - "# Set up creating a new instance for each system test run\n" - "export GOOGLE_CLOUD_TESTS_CREATE_SPANNER_INSTANCE=true\n" - "\n\g<0>", + """\ +# Set up creating a new instance for each system test run +export GOOGLE_CLOUD_TESTS_CREATE_SPANNER_INSTANCE=true + +# Remove old nox""", ) # Update samples folder in CONTRIBUTING.rst @@ -134,15 +163,21 @@ def get_staging_dirs( # Customize noxfile.py # ---------------------------------------------------------------------------- + def place_before(path, text, *before_text, escape=None): replacement = "\n".join(before_text) + "\n" + text if escape: for c in escape: - text = text.replace(c, '\\' + c) + text = text.replace(c, "\\" + c) s.replace([path], text, replacement) + open_telemetry_test = """ + # XXX Work around Kokoro image's older pip, which borks the OT install. + session.run("pip", "install", "--upgrade", "pip") session.install("-e", ".[tracing]", "-c", constraints_path) + # XXX: Dump installed versions to debug OT issue + session.run("pip", "list") # Run py.test against the unit tests with OpenTelemetry. session.run( @@ -164,10 +199,10 @@ def place_before(path, text, *before_text, escape=None): "noxfile.py", "@nox.session(python=UNIT_TEST_PYTHON_VERSIONS)", open_telemetry_test, - escape="()" + escape="()", ) -skip_tests_if_env_var_not_set ="""# Sanity check: Only run tests if the environment variable is set. +skip_tests_if_env_var_not_set = """# Sanity check: Only run tests if the environment variable is set. if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", "") and not os.environ.get( "SPANNER_EMULATOR_HOST", "" ): @@ -180,7 +215,7 @@ def place_before(path, text, *before_text, escape=None): "noxfile.py", "# Install pyopenssl for mTLS testing.", skip_tests_if_env_var_not_set, - escape="()" + escape="()", ) s.replace( @@ -190,25 +225,25 @@ def place_before(path, text, *before_text, escape=None): "--cov=tests/unit",""", """\"--cov=google.cloud.spanner", "--cov=google.cloud", - "--cov=tests.unit",""" + "--cov=tests.unit",""", ) s.replace( "noxfile.py", - """session.install\("-e", "."\)""", - """session.install("-e", ".[tracing]")""" + r"""session.install\("-e", "."\)""", + """session.install("-e", ".[tracing]")""", ) s.replace( "noxfile.py", - """# Install all test dependencies, then install this package into the + r"""# Install all test dependencies, then install this package into the # virtualenv's dist-packages. session.install\("mock", "pytest", "google-cloud-testutils", "-c", constraints_path\) session.install\("-e", ".", "-c", constraints_path\)""", """# Install all test dependencies, then install this package into the # virtualenv's dist-packages. session.install("mock", "pytest", "google-cloud-testutils", "-c", constraints_path) - session.install("-e", ".[tracing]", "-c", constraints_path)""" + session.install("-e", ".[tracing]", "-c", constraints_path)""", ) s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/samples/samples/conftest.py b/samples/samples/conftest.py index f4d21c6926..6b047a31da 100644 --- a/samples/samples/conftest.py +++ b/samples/samples/conftest.py @@ -24,6 +24,8 @@ import pytest from test_utils import retry +retry_429 = retry.RetryErrors(exceptions.ResourceExhausted, delay=15) + @pytest.fixture(scope="module") def sample_name(): @@ -47,7 +49,7 @@ def scrub_instance_ignore_not_found(to_scrub): for backup_pb in to_scrub.list_backups(): backup.Backup.from_pb(backup_pb, to_scrub).delete() - to_scrub.delete() + retry_429(to_scrub.delete)() except exceptions.NotFound: pass @@ -107,7 +109,6 @@ def sample_instance( "created": str(int(time.time())), }, ) - retry_429 = retry.RetryErrors(exceptions.ResourceExhausted, delay=15) op = retry_429(sample_instance.create)() op.result(120) # block until completion @@ -143,7 +144,6 @@ def multi_region_instance( "created": str(int(time.time())) }, ) - retry_429 = retry.RetryErrors(exceptions.ResourceExhausted, delay=15) op = retry_429(multi_region_instance.create)() op.result(120) # block until completion diff --git a/samples/samples/snippets.py b/samples/samples/snippets.py index 0cc68856ea..9005d9a131 100644 --- a/samples/samples/snippets.py +++ b/samples/samples/snippets.py @@ -30,6 +30,8 @@ from google.cloud import spanner from google.cloud.spanner_v1 import param_types +OPERATION_TIMEOUT_SECONDS = 240 + # [START spanner_create_instance] def create_instance(instance_id): @@ -55,7 +57,7 @@ def create_instance(instance_id): operation = instance.create() print("Waiting for operation to complete...") - operation.result(120) + operation.result(OPERATION_TIMEOUT_SECONDS) print("Created instance {}".format(instance_id)) @@ -87,7 +89,7 @@ def create_instance_with_processing_units(instance_id, processing_units): operation = instance.create() print("Waiting for operation to complete...") - operation.result(120) + operation.result(OPERATION_TIMEOUT_SECONDS) print("Created instance {} with {} processing units".format( instance_id, instance.processing_units)) @@ -170,7 +172,7 @@ def create_database(instance_id, database_id): operation = database.create() print("Waiting for operation to complete...") - operation.result(120) + operation.result(OPERATION_TIMEOUT_SECONDS) print("Created database {} on instance {}".format(database_id, instance_id)) @@ -206,7 +208,7 @@ def create_database_with_encryption_key(instance_id, database_id, kms_key_name): operation = database.create() print("Waiting for operation to complete...") - operation.result(120) + operation.result(OPERATION_TIMEOUT_SECONDS) print("Database {} created with encryption key {}".format( database.name, database.encryption_config.kms_key_name)) @@ -245,7 +247,7 @@ def create_database_with_default_leader( operation = database.create() print("Waiting for operation to complete...") - operation.result(120) + operation.result(OPERATION_TIMEOUT_SECONDS) database.reload() @@ -271,7 +273,7 @@ def update_database_with_default_leader( operation = database.update_ddl(["ALTER DATABASE {}" " SET OPTIONS (default_leader = '{}')".format(database_id, default_leader)]) - operation.result(120) + operation.result(OPERATION_TIMEOUT_SECONDS) database.reload() @@ -499,7 +501,7 @@ def add_index(instance_id, database_id): ) print("Waiting for operation to complete...") - operation.result(120) + operation.result(OPERATION_TIMEOUT_SECONDS) print("Added the AlbumsByAlbumTitle index.") @@ -598,7 +600,7 @@ def add_storing_index(instance_id, database_id): ) print("Waiting for operation to complete...") - operation.result(120) + operation.result(OPERATION_TIMEOUT_SECONDS) print("Added the AlbumsByAlbumTitle2 index.") @@ -651,7 +653,7 @@ def add_column(instance_id, database_id): ) print("Waiting for operation to complete...") - operation.result(120) + operation.result(OPERATION_TIMEOUT_SECONDS) print("Added the MarketingBudget column.") @@ -816,7 +818,7 @@ def create_table_with_timestamp(instance_id, database_id): ) print("Waiting for operation to complete...") - operation.result(120) + operation.result(OPERATION_TIMEOUT_SECONDS) print( "Created Performances table on database {} on instance {}".format( @@ -871,7 +873,7 @@ def add_timestamp_column(instance_id, database_id): ) print("Waiting for operation to complete...") - operation.result(120) + operation.result(OPERATION_TIMEOUT_SECONDS) print( 'Altered table "Albums" on database {} on instance {}.'.format( @@ -964,7 +966,7 @@ def add_numeric_column(instance_id, database_id): operation = database.update_ddl(["ALTER TABLE Venues ADD COLUMN Revenue NUMERIC"]) print("Waiting for operation to complete...") - operation.result(120) + operation.result(OPERATION_TIMEOUT_SECONDS) print( 'Altered table "Venues" on database {} on instance {}.'.format( @@ -1564,7 +1566,7 @@ def create_table_with_datatypes(instance_id, database_id): ) print("Waiting for operation to complete...") - operation.result(120) + operation.result(OPERATION_TIMEOUT_SECONDS) print( "Created Venues table on database {} on instance {}".format( diff --git a/samples/samples/snippets_test.py b/samples/samples/snippets_test.py index 636b4b5e91..7a6134ff8d 100644 --- a/samples/samples/snippets_test.py +++ b/samples/samples/snippets_test.py @@ -40,6 +40,8 @@ INTERLEAVE IN PARENT Singers ON DELETE CASCADE """ +retry_429 = RetryErrors(exceptions.ResourceExhausted, delay=15) + @pytest.fixture(scope="module") def sample_name(): @@ -96,9 +98,9 @@ def default_leader(): def test_create_instance_explicit(spanner_client, create_instance_id): # Rather than re-use 'sample_isntance', we create a new instance, to # ensure that the 'create_instance' snippet is tested. - snippets.create_instance(create_instance_id) + retry_429(snippets.create_instance)(create_instance_id) instance = spanner_client.instance(create_instance_id) - instance.delete() + retry_429(instance.delete)() def test_create_database_explicit(sample_instance, create_database_id): @@ -111,7 +113,6 @@ def test_create_database_explicit(sample_instance, create_database_id): def test_create_instance_with_processing_units(capsys, lci_instance_id): processing_units = 500 - retry_429 = RetryErrors(exceptions.ResourceExhausted, delay=15) retry_429(snippets.create_instance_with_processing_units)( lci_instance_id, processing_units, ) @@ -120,7 +121,7 @@ def test_create_instance_with_processing_units(capsys, lci_instance_id): assert "{} processing units".format(processing_units) in out spanner_client = spanner.Client() instance = spanner_client.instance(lci_instance_id) - instance.delete() + retry_429(instance.delete)() def test_create_database_with_encryption_config(capsys, instance_id, cmek_database_id, kms_key_name): diff --git a/testing/constraints-3.9.txt b/testing/constraints-3.9.txt index e69de29bb2..6d34489a53 100644 --- a/testing/constraints-3.9.txt +++ b/testing/constraints-3.9.txt @@ -0,0 +1,2 @@ +# Allow prerelease requirements +--pre diff --git a/tests/unit/test__helpers.py b/tests/unit/test__helpers.py index 2ee66ed154..cfdcea1ea0 100644 --- a/tests/unit/test__helpers.py +++ b/tests/unit/test__helpers.py @@ -192,12 +192,12 @@ def test_w_date(self): self.assertEqual(value_pb.string_value, today.isoformat()) def test_w_timestamp_w_nanos(self): - import pytz + import datetime from google.protobuf.struct_pb2 import Value from google.api_core import datetime_helpers when = datetime_helpers.DatetimeWithNanoseconds( - 2016, 12, 20, 21, 13, 47, nanosecond=123456789, tzinfo=pytz.UTC + 2016, 12, 20, 21, 13, 47, nanosecond=123456789, tzinfo=datetime.timezone.utc ) value_pb = self._callFUT(when) self.assertIsInstance(value_pb, Value) @@ -214,26 +214,23 @@ def test_w_listvalue(self): def test_w_datetime(self): import datetime - import pytz from google.protobuf.struct_pb2 import Value from google.api_core import datetime_helpers - now = datetime.datetime.utcnow().replace(tzinfo=pytz.UTC) + now = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc) value_pb = self._callFUT(now) self.assertIsInstance(value_pb, Value) self.assertEqual(value_pb.string_value, datetime_helpers.to_rfc3339(now)) def test_w_timestamp_w_tz(self): import datetime - import pytz from google.protobuf.struct_pb2 import Value - when = datetime.datetime( - 2021, 2, 8, 0, 0, 0, tzinfo=pytz.timezone("US/Mountain") - ) + zone = datetime.timezone(datetime.timedelta(hours=+1), name="CET") + when = datetime.datetime(2021, 2, 8, 0, 0, 0, tzinfo=zone) value_pb = self._callFUT(when) self.assertIsInstance(value_pb, Value) - self.assertEqual(value_pb.string_value, "2021-02-08T07:00:00.000000Z") + self.assertEqual(value_pb.string_value, "2021-02-07T23:00:00.000000Z") def test_w_unknown_type(self): with self.assertRaises(ValueError): @@ -463,14 +460,14 @@ def test_w_date(self): self.assertEqual(self._callFUT(value_pb, field_type), VALUE) def test_w_timestamp_wo_nanos(self): - import pytz + import datetime from google.protobuf.struct_pb2 import Value from google.api_core import datetime_helpers from google.cloud.spanner_v1 import Type from google.cloud.spanner_v1 import TypeCode value = datetime_helpers.DatetimeWithNanoseconds( - 2016, 12, 20, 21, 13, 47, microsecond=123456, tzinfo=pytz.UTC + 2016, 12, 20, 21, 13, 47, microsecond=123456, tzinfo=datetime.timezone.utc ) field_type = Type(code=TypeCode.TIMESTAMP) value_pb = Value(string_value=datetime_helpers.to_rfc3339(value)) @@ -480,14 +477,14 @@ def test_w_timestamp_wo_nanos(self): self.assertEqual(parsed, value) def test_w_timestamp_w_nanos(self): - import pytz + import datetime from google.protobuf.struct_pb2 import Value from google.api_core import datetime_helpers from google.cloud.spanner_v1 import Type from google.cloud.spanner_v1 import TypeCode value = datetime_helpers.DatetimeWithNanoseconds( - 2016, 12, 20, 21, 13, 47, nanosecond=123456789, tzinfo=pytz.UTC + 2016, 12, 20, 21, 13, 47, nanosecond=123456789, tzinfo=datetime.timezone.utc ) field_type = Type(code=TypeCode.TIMESTAMP) value_pb = Value(string_value=datetime_helpers.to_rfc3339(value)) diff --git a/tests/unit/test_backup.py b/tests/unit/test_backup.py index e80e455dbf..035a2c9605 100644 --- a/tests/unit/test_backup.py +++ b/tests/unit/test_backup.py @@ -331,7 +331,7 @@ def test_create_success(self): from google.cloud.spanner_admin_database_v1 import CreateBackupEncryptionConfig from datetime import datetime from datetime import timedelta - from pytz import UTC + from datetime import timezone op_future = object() client = _Client() @@ -340,7 +340,7 @@ def test_create_success(self): instance = _Instance(self.INSTANCE_NAME, client=client) version_timestamp = datetime.utcnow() - timedelta(minutes=5) - version_timestamp = version_timestamp.replace(tzinfo=UTC) + version_timestamp = version_timestamp.replace(tzinfo=timezone.utc) expire_timestamp = self._make_timestamp() encryption_config = {"encryption_type": 3, "kms_key_name": "key_name"} backup = self._make_one(