diff --git a/appengine/flexible/numpy/requirements.txt b/appengine/flexible/numpy/requirements.txt index f5ac4b59c7d1..64688be63f67 100644 --- a/appengine/flexible/numpy/requirements.txt +++ b/appengine/flexible/numpy/requirements.txt @@ -1,3 +1,3 @@ Flask==1.1.2 gunicorn==20.0.4 -numpy==1.18.2 +numpy==1.18.3 diff --git a/appengine/flexible/scipy/requirements.txt b/appengine/flexible/scipy/requirements.txt index 56a414b320c7..1191b9d779fb 100644 --- a/appengine/flexible/scipy/requirements.txt +++ b/appengine/flexible/scipy/requirements.txt @@ -1,6 +1,6 @@ Flask==1.1.2 gunicorn==20.0.4 imageio==2.8.0 -numpy==1.18.2 +numpy==1.18.3 pillow==7.1.1 scipy==1.4.1 diff --git a/bigquery/pandas-gbq-migration/requirements.txt b/bigquery/pandas-gbq-migration/requirements.txt index 80849dd2f512..8eecbe38c8cc 100644 --- a/bigquery/pandas-gbq-migration/requirements.txt +++ b/bigquery/pandas-gbq-migration/requirements.txt @@ -4,4 +4,4 @@ pandas==0.25.3; python_version > '3.0' pandas==0.24.2; python_version < '3.0' pandas-gbq==0.13.1 pyarrow==0.15.1 -grpcio==1.27.2 +grpcio==1.28.1 diff --git a/cloud-sql/sql-server/sqlalchemy/requirements.txt b/cloud-sql/sql-server/sqlalchemy/requirements.txt index dd4016dd1589..41d5312a3cbe 100644 --- a/cloud-sql/sql-server/sqlalchemy/requirements.txt +++ b/cloud-sql/sql-server/sqlalchemy/requirements.txt @@ -1,4 +1,4 @@ -Flask==1.1.1 +Flask==1.1.2 SQLAlchemy==1.3.13 pyodbc==4.0.30 diff --git a/datastore/cloud-ndb/flask_app_test.py b/datastore/cloud-ndb/flask_app_test.py index 8296a1ef09ff..f48085a54a42 100644 --- a/datastore/cloud-ndb/flask_app_test.py +++ b/datastore/cloud-ndb/flask_app_test.py @@ -14,8 +14,9 @@ import uuid -import pytest +import backoff +import pytest from google.cloud import ndb import flask_app @@ -39,9 +40,13 @@ def test_index(test_book): flask_app.app.testing = True client = flask_app.app.test_client() - r = client.get('/') - assert r.status_code == 200 - assert test_book.title in r.data.decode('utf-8') + @backoff.on_exception(backoff.expo, AssertionError, max_time=60) + def eventually_consistent_test(): + r = client.get('/') + assert r.status_code == 200 + assert test_book.title in r.data.decode('utf-8') + + eventually_consistent_test() def test_ndb_wsgi_middleware(): diff --git a/datastore/cloud-ndb/quickstart_test.py b/datastore/cloud-ndb/quickstart_test.py index 2e597d39638e..77deca15d907 100644 --- a/datastore/cloud-ndb/quickstart_test.py +++ b/datastore/cloud-ndb/quickstart_test.py @@ -14,6 +14,7 @@ import uuid +import backoff import pytest import quickstart @@ -34,6 +35,10 @@ def test_book(): def test_quickstart(capsys, test_book): - quickstart.list_books() - out, _ = capsys.readouterr() - assert test_book.title in out + @backoff.on_exception(backoff.expo, AssertionError, max_time=60) + def eventually_consistent_test(): + quickstart.list_books() + out, _ = capsys.readouterr() + assert test_book.title in out + + eventually_consistent_test() diff --git a/datastore/cloud-ndb/requirements-test.txt b/datastore/cloud-ndb/requirements-test.txt index 781d4326c947..8855f3cf1f88 100644 --- a/datastore/cloud-ndb/requirements-test.txt +++ b/datastore/cloud-ndb/requirements-test.txt @@ -1 +1,2 @@ +backoff==1.10.0 pytest==5.3.2 diff --git a/datastore/schedule-export/README.md b/datastore/schedule-export/README.md new file mode 100644 index 000000000000..a8501cddc34b --- /dev/null +++ b/datastore/schedule-export/README.md @@ -0,0 +1,5 @@ +# Scheduling Datastore exports with Cloud Functions and Cloud Scheduler + +This sample application demonstrates how to schedule exports of your Datastore entities. To deploy this sample, see: + +[Scheduling exports](https://cloud.google.com/datastore/docs/schedule-export) diff --git a/datastore/schedule-export/main.py b/datastore/schedule-export/main.py new file mode 100644 index 000000000000..f6381e0bce54 --- /dev/null +++ b/datastore/schedule-export/main.py @@ -0,0 +1,43 @@ +import base64 +import json +import os + +from googleapiclient.discovery import build + +datastore = build('datastore', 'v1') +project_id = os.environ.get('GCP_PROJECT') + + +def datastore_export(event, context): + '''Triggers a Datastore export from a Cloud Scheduler job. + + Args: + event (dict): event[data] must contain a json object encoded in + base-64. Cloud Scheduler encodes payloads in base-64 by default. + Object must include a 'bucket' value and can include 'kinds' + and 'namespaceIds' values. + context (google.cloud.functions.Context): The Cloud Functions event + metadata. + ''' + + json_data = json.loads(base64.b64decode(event['data']).decode('utf-8')) + bucket = json_data['bucket'] + entity_filter = {} + + if 'kinds' in json_data: + entity_filter['kinds'] = json_data['kinds'] + + if 'namespaceIds' in json_data: + entity_filter['namespaceIds'] = json_data['namespaceIds'] + + request_body = { + 'outputUrlPrefix': bucket, + 'entityFilter': entity_filter + } + + export_request = datastore.projects().export( + projectId=project_id, + body=request_body + ) + response = export_request.execute() + print(response) diff --git a/datastore/schedule-export/requirements-test.txt b/datastore/schedule-export/requirements-test.txt new file mode 100644 index 000000000000..7e2ff41603d3 --- /dev/null +++ b/datastore/schedule-export/requirements-test.txt @@ -0,0 +1 @@ +pytest==5.3.2 \ No newline at end of file diff --git a/datastore/schedule-export/requirements.txt b/datastore/schedule-export/requirements.txt new file mode 100644 index 000000000000..70101dfd7f9e --- /dev/null +++ b/datastore/schedule-export/requirements.txt @@ -0,0 +1 @@ +google-api-python-client>=1.7.12 \ No newline at end of file diff --git a/datastore/schedule-export/schedule_export_test.py b/datastore/schedule-export/schedule_export_test.py new file mode 100644 index 000000000000..daf72cf019d5 --- /dev/null +++ b/datastore/schedule-export/schedule_export_test.py @@ -0,0 +1,74 @@ +# Copyright 2019 Google LLC All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import base64 + +from unittest.mock import Mock + +import main + +mock_context = Mock() +mock_context.event_id = '617187464135194' +mock_context.timestamp = '2020-04-15T22:09:03.761Z' + + +def test_datastore_export(capsys): + # Test an export without an entity filter + bucket = 'gs://my-bucket' + json_string = '{{ "bucket": "{bucket}" }}'.format(bucket=bucket) + + # Encode data like Cloud Scheduler + data = bytes(json_string, 'utf-8') + data_encoded = base64.b64encode(data) + event = {"data": data_encoded} + + # Mock the Datastore service + mockDatastore = Mock() + main.datastore = mockDatastore + + # Call tested function + main.datastore_export(event, mock_context) + out, err = capsys.readouterr() + export_args = mockDatastore.projects().export.call_args[1] + req_body = export_args['body'] + # Assert request includes test values + assert req_body['outputUrlPrefix'] == bucket + + +def test_datastore_export_entity_filter(capsys): + # Test an export with an entity filter + bucket = 'gs://my-bucket' + kinds = 'Users,Tasks' + namespaceIds = 'Customer831,Customer157' + json_string = '{{ "bucket": "{bucket}", "kinds": "{kinds}", "namespaceIds": "{namespaceIds}" }}'.format( + bucket=bucket, kinds=kinds, namespaceIds=namespaceIds) + + # Encode data like Cloud Scheduler + data = bytes(json_string, 'utf-8') + data_encoded = base64.b64encode(data) + event = {"data": data_encoded} + + # Mock the Datastore service + mockDatastore = Mock() + main.datastore = mockDatastore + + # Call tested function + main.datastore_export(event, mock_context) + out, err = capsys.readouterr() + export_args = mockDatastore.projects().export.call_args[1] + req_body = export_args['body'] + # Assert request includes test values + assert req_body['outputUrlPrefix'] == bucket + assert req_body['entityFilter']['kinds'] == kinds + assert req_body['entityFilter']['namespaceIds'] == namespaceIds diff --git a/healthcare/api-client/v1/hl7v2/hl7v2_messages_test.py b/healthcare/api-client/v1/hl7v2/hl7v2_messages_test.py index 891aad5976ff..d0ca903d0fb7 100644 --- a/healthcare/api-client/v1/hl7v2/hl7v2_messages_test.py +++ b/healthcare/api-client/v1/hl7v2/hl7v2_messages_test.py @@ -17,8 +17,8 @@ import sys import uuid +import backoff from googleapiclient.errors import HttpError -from retrying import retry # Add datasets for bootstrapping datasets for testing sys.path.append(os.path.join(os.path.dirname(__file__), "..", "datasets")) # noqa @@ -36,18 +36,9 @@ label_value = "TRUE" -def retry_if_server_exception(exception): - return isinstance(exception, (HttpError)) - - @pytest.fixture(scope="module") def test_dataset(): - @retry( - wait_exponential_multiplier=1000, - wait_exponential_max=10000, - stop_max_attempt_number=10, - retry_on_exception=retry_if_server_exception, - ) + @backoff.on_exception(backoff.expo, HttpError, max_time=60) def create(): try: datasets.create_dataset(project_id, cloud_region, dataset_id) @@ -65,12 +56,7 @@ def create(): yield # Clean up - @retry( - wait_exponential_multiplier=1000, - wait_exponential_max=10000, - stop_max_attempt_number=10, - retry_on_exception=retry_if_server_exception, - ) + @backoff.on_exception(backoff.expo, HttpError, max_time=60) def clean_up(): try: datasets.delete_dataset(project_id, cloud_region, dataset_id) @@ -86,12 +72,7 @@ def clean_up(): @pytest.fixture(scope="module") def test_hl7v2_store(): - @retry( - wait_exponential_multiplier=1000, - wait_exponential_max=10000, - stop_max_attempt_number=10, - retry_on_exception=retry_if_server_exception, - ) + @backoff.on_exception(backoff.expo, HttpError, max_time=60) def create(): try: hl7v2_stores.create_hl7v2_store( @@ -115,12 +96,7 @@ def create(): yield # Clean up - @retry( - wait_exponential_multiplier=1000, - wait_exponential_max=10000, - stop_max_attempt_number=10, - retry_on_exception=retry_if_server_exception, - ) + @backoff.on_exception(backoff.expo, HttpError, max_time=60) def clean_up(): try: hl7v2_stores.delete_hl7v2_store( @@ -145,12 +121,20 @@ def test_CRUD_hl7v2_message(test_dataset, test_hl7v2_store, capsys): project_id, cloud_region, dataset_id, hl7v2_store_id, hl7v2_message_file ) - hl7v2_messages_list = hl7v2_messages.list_hl7v2_messages( - project_id, cloud_region, dataset_id, hl7v2_store_id - ) + @backoff.on_exception(backoff.expo, AssertionError, max_time=60) + def run_eventually_consistent_test(): + hl7v2_messages_list = hl7v2_messages.list_hl7v2_messages( + project_id, cloud_region, dataset_id, hl7v2_store_id + ) + + assert len(hl7v2_messages_list) > 0 + hl7v2_message_name = hl7v2_messages_list[0].get("name") + elms = hl7v2_message_name.split("/", 9) + assert len(elms) >= 10 + hl7v2_message_id = elms[9] + return hl7v2_message_id - hl7v2_message_name = hl7v2_messages_list[0].get("name") - hl7v2_message_id = hl7v2_message_name.split("/", 9)[9] + hl7v2_message_id = run_eventually_consistent_test() hl7v2_messages.get_hl7v2_message( project_id, cloud_region, dataset_id, hl7v2_store_id, hl7v2_message_id @@ -173,12 +157,20 @@ def test_ingest_hl7v2_message(test_dataset, test_hl7v2_store, capsys): project_id, cloud_region, dataset_id, hl7v2_store_id, hl7v2_message_file ) - hl7v2_messages_list = hl7v2_messages.list_hl7v2_messages( - project_id, cloud_region, dataset_id, hl7v2_store_id - ) + @backoff.on_exception(backoff.expo, AssertionError, max_time=60) + def run_eventually_consistent_test(): + hl7v2_messages_list = hl7v2_messages.list_hl7v2_messages( + project_id, cloud_region, dataset_id, hl7v2_store_id + ) - hl7v2_message_name = hl7v2_messages_list[0].get("name") - hl7v2_message_id = hl7v2_message_name.split("/", 9)[9] + assert len(hl7v2_messages_list) > 0 + hl7v2_message_name = hl7v2_messages_list[0].get("name") + elms = hl7v2_message_name.split("/", 9) + assert len(elms) >= 10 + hl7v2_message_id = elms[9] + return hl7v2_message_id + + hl7v2_message_id = run_eventually_consistent_test() hl7v2_messages.get_hl7v2_message( project_id, cloud_region, dataset_id, hl7v2_store_id, hl7v2_message_id @@ -201,13 +193,20 @@ def test_patch_hl7v2_message(test_dataset, test_hl7v2_store, capsys): project_id, cloud_region, dataset_id, hl7v2_store_id, hl7v2_message_file ) - hl7v2_messages_list = hl7v2_messages.list_hl7v2_messages( - project_id, cloud_region, dataset_id, hl7v2_store_id - ) + @backoff.on_exception(backoff.expo, AssertionError, max_time=60) + def run_eventually_consistent_test(): + hl7v2_messages_list = hl7v2_messages.list_hl7v2_messages( + project_id, cloud_region, dataset_id, hl7v2_store_id + ) + + assert len(hl7v2_messages_list) > 0 + hl7v2_message_name = hl7v2_messages_list[0].get("name") + elms = hl7v2_message_name.split("/", 9) + assert len(elms) >= 10 + hl7v2_message_id = elms[9] + return hl7v2_message_id - assert len(hl7v2_messages_list) > 0 - hl7v2_message_name = hl7v2_messages_list[0].get("name") - hl7v2_message_id = hl7v2_message_name.split("/", 9)[9] + hl7v2_message_id = run_eventually_consistent_test() hl7v2_messages.patch_hl7v2_message( project_id, diff --git a/healthcare/api-client/v1/hl7v2/hl7v2_stores_test.py b/healthcare/api-client/v1/hl7v2/hl7v2_stores_test.py index aa836f5b5fd1..48f9afa5f0c0 100644 --- a/healthcare/api-client/v1/hl7v2/hl7v2_stores_test.py +++ b/healthcare/api-client/v1/hl7v2/hl7v2_stores_test.py @@ -17,8 +17,8 @@ import sys import uuid +import backoff from googleapiclient.errors import HttpError -from retrying import retry # Add datasets for bootstrapping datasets for testing sys.path.append(os.path.join(os.path.dirname(__file__), "..", "datasets")) # noqa @@ -38,12 +38,7 @@ def retry_if_server_exception(exception): @pytest.fixture(scope="module") def test_dataset(): - @retry( - wait_exponential_multiplier=1000, - wait_exponential_max=10000, - stop_max_attempt_number=10, - retry_on_exception=retry_if_server_exception, - ) + @backoff.on_exception(backoff.expo, HttpError, max_time=60) def create(): try: datasets.create_dataset(project_id, cloud_region, dataset_id) @@ -61,12 +56,7 @@ def create(): yield # Clean up - @retry( - wait_exponential_multiplier=1000, - wait_exponential_max=10000, - stop_max_attempt_number=10, - retry_on_exception=retry_if_server_exception, - ) + @backoff.on_exception(backoff.expo, HttpError, max_time=60) def clean_up(): try: datasets.delete_dataset(project_id, cloud_region, dataset_id) @@ -82,12 +72,7 @@ def clean_up(): @pytest.fixture(scope="module") def test_hl7v2_store(): - @retry( - wait_exponential_multiplier=1000, - wait_exponential_max=10000, - stop_max_attempt_number=10, - retry_on_exception=retry_if_server_exception, - ) + @backoff.on_exception(backoff.expo, HttpError, max_time=60) def create(): try: hl7v2_stores.create_hl7v2_store( @@ -111,12 +96,7 @@ def create(): yield # Clean up - @retry( - wait_exponential_multiplier=1000, - wait_exponential_max=10000, - stop_max_attempt_number=10, - retry_on_exception=retry_if_server_exception, - ) + @backoff.on_exception(backoff.expo, HttpError, max_time=60) def clean_up(): try: hl7v2_stores.delete_hl7v2_store( @@ -141,12 +121,7 @@ def crud_hl7v2_store_id(): yield hl7v2_store_id # Clean up - @retry( - wait_exponential_multiplier=1000, - wait_exponential_max=10000, - stop_max_attempt_number=10, - retry_on_exception=retry_if_server_exception, - ) + @backoff.on_exception(backoff.expo, HttpError, max_time=60) def clean_up(): try: hl7v2_stores.delete_hl7v2_store( diff --git a/healthcare/api-client/v1/hl7v2/requirements-test.txt b/healthcare/api-client/v1/hl7v2/requirements-test.txt index 826c2b7161c4..132ae92cb323 100644 --- a/healthcare/api-client/v1/hl7v2/requirements-test.txt +++ b/healthcare/api-client/v1/hl7v2/requirements-test.txt @@ -1,2 +1,2 @@ pytest==5.3.2 -retrying==1.3.3 +backoff==1.10.0 diff --git a/iot/api-client/mqtt_example/cloudiot_mqtt_example_test.py b/iot/api-client/mqtt_example/cloudiot_mqtt_example_test.py index 4f57b31ca280..50b6accd4134 100644 --- a/iot/api-client/mqtt_example/cloudiot_mqtt_example_test.py +++ b/iot/api-client/mqtt_example/cloudiot_mqtt_example_test.py @@ -15,67 +15,38 @@ import os import sys import time -import uuid - -from google.cloud import pubsub - -# Add manager for bootstrapping device registry / device for testing -sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'manager')) # noqa -from flaky import flaky -import manager import pytest +# Add manager for bootstrapping device registry / device for testing +sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'manager')) # noqa import cloudiot_mqtt_example - +import manager +from fixtures import test_topic # noqa +from fixtures import test_registry_id # noqa +from fixtures import test_device_id # noqa +from fixtures import device_and_gateways # noqa cloud_region = 'us-central1' -device_id_template = 'test-device-{}' ca_cert_path = 'resources/roots.pem' -rsa_cert_path = 'resources/rsa_cert.pem' rsa_private_path = 'resources/rsa_private.pem' -topic_id = 'test-device-events-{}'.format(int(time.time())) - project_id = os.environ['GCLOUD_PROJECT'] service_account_json = os.environ['GOOGLE_APPLICATION_CREDENTIALS'] -pubsub_topic = 'projects/{}/topics/{}'.format(project_id, topic_id) -registry_id = 'test-registry-{}-{}'.format(uuid.uuid1(), int(time.time())) - mqtt_bridge_hostname = 'mqtt.googleapis.com' mqtt_bridge_port = 443 -@pytest.fixture(scope='module') -def test_topic(): - topic = manager.create_iot_topic(project_id, topic_id) - - yield topic - - pubsub_client = pubsub.PublisherClient() - topic_path = pubsub_client.topic_path(project_id, topic_id) - pubsub_client.delete_topic(topic_path) - - -def test_event(test_topic, capsys): - device_id = device_id_template.format('RSA256') - manager.open_registry( - service_account_json, project_id, cloud_region, pubsub_topic, - registry_id) - - manager.create_rs256_device( - service_account_json, project_id, cloud_region, registry_id, - device_id, rsa_cert_path) - +def test_event(test_topic, test_registry_id, test_device_id, capsys): # noqa manager.get_device( - service_account_json, project_id, cloud_region, registry_id, - device_id) + service_account_json, project_id, cloud_region, test_registry_id, + test_device_id) sub_topic = 'events' - mqtt_topic = '/devices/{}/{}'.format(device_id, sub_topic) + mqtt_topic = '/devices/{}/{}'.format(test_device_id, sub_topic) client = cloudiot_mqtt_example.get_client( - project_id, cloud_region, registry_id, device_id, + project_id, cloud_region, test_registry_id, test_device_id, rsa_private_path, 'RS256', ca_cert_path, 'mqtt.googleapis.com', 443) @@ -85,39 +56,23 @@ def test_event(test_topic, capsys): client.loop_stop() manager.get_state( - service_account_json, project_id, cloud_region, registry_id, - device_id) - - manager.delete_device( - service_account_json, project_id, cloud_region, registry_id, - device_id) - - manager.delete_registry( - service_account_json, project_id, cloud_region, registry_id) + service_account_json, project_id, cloud_region, test_registry_id, + test_device_id) out, _ = capsys.readouterr() assert 'on_publish' in out -def test_state(test_topic, capsys): - device_id = device_id_template.format('RSA256') - manager.open_registry( - service_account_json, project_id, cloud_region, pubsub_topic, - registry_id) - - manager.create_rs256_device( - service_account_json, project_id, cloud_region, registry_id, - device_id, rsa_cert_path) - +def test_state(test_topic, test_registry_id, test_device_id, capsys): # noqa manager.get_device( - service_account_json, project_id, cloud_region, registry_id, - device_id) + service_account_json, project_id, cloud_region, test_registry_id, + test_device_id) sub_topic = 'state' - mqtt_topic = '/devices/{}/{}'.format(device_id, sub_topic) + mqtt_topic = '/devices/{}/{}'.format(test_device_id, sub_topic) client = cloudiot_mqtt_example.get_client( - project_id, cloud_region, registry_id, device_id, + project_id, cloud_region, test_registry_id, test_device_id, rsa_private_path, 'RS256', ca_cert_path, 'mqtt.googleapis.com', 443) client.publish(mqtt_topic, 'state test', qos=1) @@ -128,37 +83,21 @@ def test_state(test_topic, capsys): client.loop_stop() manager.get_state( - service_account_json, project_id, cloud_region, registry_id, - device_id) - - manager.delete_device( - service_account_json, project_id, cloud_region, registry_id, - device_id) - - manager.delete_registry( - service_account_json, project_id, cloud_region, registry_id) + service_account_json, project_id, cloud_region, test_registry_id, + test_device_id) out, _ = capsys.readouterr() assert 'on_publish' in out assert 'binary_data: "state test"' in out -def test_config(test_topic, capsys): - device_id = device_id_template.format('RSA256') - manager.open_registry( - service_account_json, project_id, cloud_region, pubsub_topic, - registry_id) - - manager.create_rs256_device( - service_account_json, project_id, cloud_region, registry_id, - device_id, rsa_cert_path) - +def test_config(test_topic, test_registry_id, test_device_id, capsys): # noqa manager.get_device( - service_account_json, project_id, cloud_region, registry_id, - device_id) + service_account_json, project_id, cloud_region, test_registry_id, + test_device_id) client = cloudiot_mqtt_example.get_client( - project_id, cloud_region, registry_id, device_id, + project_id, cloud_region, test_registry_id, test_device_id, rsa_private_path, 'RS256', ca_cert_path, 'mqtt.googleapis.com', 443) client.loop_start() @@ -168,43 +107,19 @@ def test_config(test_topic, capsys): client.loop_stop() manager.get_state( - service_account_json, project_id, cloud_region, registry_id, - device_id) - - manager.delete_device( - service_account_json, project_id, cloud_region, registry_id, - device_id) - - manager.delete_registry( - service_account_json, project_id, cloud_region, registry_id) + service_account_json, project_id, cloud_region, test_registry_id, + test_device_id) out, _ = capsys.readouterr() assert "Received message" in out - assert '/devices/{}/config'.format(device_id) in out - + assert '/devices/{}/config'.format(test_device_id) in out -@flaky(max_runs=5, min_passes=1) -def test_receive_command(capsys): - device_id = device_id_template.format('RSA256') - manager.create_registry( - service_account_json, project_id, cloud_region, pubsub_topic, - registry_id) - - exists = False - devices = manager.list_devices( - service_account_json, project_id, cloud_region, registry_id) - for device in devices: - if device.get('id') == device_id: - exists = True - - if not exists: - manager.create_rs256_device( - service_account_json, project_id, cloud_region, registry_id, - device_id, rsa_cert_path) +@pytest.mark.flaky(max_runs=5, min_passes=1) +def test_receive_command(test_registry_id, test_device_id, capsys): # noqa # Exercize the functionality client = cloudiot_mqtt_example.get_client( - project_id, cloud_region, registry_id, device_id, + project_id, cloud_region, test_registry_id, test_device_id, rsa_private_path, 'RS256', ca_cert_path, 'mqtt.googleapis.com', 443) client.loop_start() @@ -215,42 +130,23 @@ def test_receive_command(capsys): time.sleep(1) manager.send_command( - service_account_json, project_id, cloud_region, registry_id, - device_id, 'me want cookies') + service_account_json, project_id, cloud_region, test_registry_id, + test_device_id, 'me want cookies') # Process commands for i in range(1, 5): client.loop() time.sleep(1) - # Clean up - manager.delete_device( - service_account_json, project_id, cloud_region, registry_id, - device_id) - manager.delete_registry( - service_account_json, project_id, cloud_region, registry_id) - out, _ = capsys.readouterr() assert 'on_connect' in out # Verify can connect assert '\'me want cookies\'' in out # Verify can receive command -@flaky(max_runs=5, min_passes=1) -def test_gateway_listen_for_bound_device_configs(test_topic, capsys): - gateway_id = device_id_template.format('RS256') - device_id = device_id_template.format('noauthbind') - manager.create_registry( - service_account_json, project_id, cloud_region, pubsub_topic, - registry_id) - manager.create_gateway( - service_account_json, project_id, cloud_region, registry_id, - None, gateway_id, rsa_cert_path, 'RS256') - manager.create_device( - service_account_json, project_id, cloud_region, registry_id, - device_id) - manager.bind_device_to_gateway( - service_account_json, project_id, cloud_region, registry_id, - device_id, gateway_id) +@pytest.mark.flaky(max_runs=5, min_passes=1) +def test_gateway_listen_for_bound_device_configs( + test_topic, test_registry_id, device_and_gateways, capsys): # noqa + (device_id, gateway_id, _) = device_and_gateways # Setup for listening for config messages num_messages = 0 @@ -259,44 +155,19 @@ def test_gateway_listen_for_bound_device_configs(test_topic, capsys): # Connect the gateway cloudiot_mqtt_example.listen_for_messages( - service_account_json, project_id, cloud_region, registry_id, - device_id, gateway_id, num_messages, rsa_private_path, - 'RS256', ca_cert_path, mqtt_bridge_hostname, mqtt_bridge_port, - jwt_exp_time, listen_time, None) - - # Clean up - manager.unbind_device_from_gateway( - service_account_json, project_id, cloud_region, registry_id, - device_id, gateway_id) - manager.delete_device( - service_account_json, project_id, cloud_region, registry_id, - device_id) - manager.delete_device( - service_account_json, project_id, cloud_region, registry_id, - gateway_id) - manager.delete_registry( - service_account_json, project_id, cloud_region, registry_id) + service_account_json, project_id, cloud_region, test_registry_id, + device_id, gateway_id, num_messages, rsa_private_path, 'RS256', + ca_cert_path, mqtt_bridge_hostname, mqtt_bridge_port, jwt_exp_time, + listen_time, None) out, _ = capsys.readouterr() assert 'Received message' in out -@flaky(max_runs=5, min_passes=1) -def test_gateway_send_data_for_device(test_topic, capsys): - gateway_id = device_id_template.format('RS256') - device_id = device_id_template.format('noauthbind') - manager.create_registry( - service_account_json, project_id, cloud_region, pubsub_topic, - registry_id) - manager.create_gateway( - service_account_json, project_id, cloud_region, registry_id, - None, gateway_id, rsa_cert_path, 'RS256') - manager.create_device( - service_account_json, project_id, cloud_region, registry_id, - device_id) - manager.bind_device_to_gateway( - service_account_json, project_id, cloud_region, registry_id, - device_id, gateway_id) +@pytest.mark.flaky(max_runs=5, min_passes=1) +def test_gateway_send_data_for_device( + test_topic, test_registry_id, device_and_gateways, capsys): # noqa + (device_id, gateway_id, _) = device_and_gateways # Setup for listening for config messages num_messages = 5 @@ -305,44 +176,19 @@ def test_gateway_send_data_for_device(test_topic, capsys): # Connect the gateway cloudiot_mqtt_example.send_data_from_bound_device( - service_account_json, project_id, cloud_region, registry_id, - device_id, gateway_id, num_messages, rsa_private_path, - 'RS256', ca_cert_path, mqtt_bridge_hostname, mqtt_bridge_port, - jwt_exp_time, listen_time) - - # Clean up - manager.unbind_device_from_gateway( - service_account_json, project_id, cloud_region, registry_id, - device_id, gateway_id) - manager.delete_device( - service_account_json, project_id, cloud_region, registry_id, - device_id) - manager.delete_device( - service_account_json, project_id, cloud_region, registry_id, - gateway_id) - manager.delete_registry( - service_account_json, project_id, cloud_region, registry_id) + service_account_json, project_id, cloud_region, test_registry_id, + device_id, gateway_id, num_messages, rsa_private_path, 'RS256', + ca_cert_path, mqtt_bridge_hostname, mqtt_bridge_port, jwt_exp_time, + listen_time) out, _ = capsys.readouterr() assert 'Publishing message 5/5' in out assert 'Out of memory' not in out # Indicates could not connect -def test_gateway_trigger_error_topic(test_topic, capsys): - gateway_id = device_id_template.format('RS256-err') - device_id = device_id_template.format('noauthbind') - manager.create_registry( - service_account_json, project_id, cloud_region, pubsub_topic, - registry_id) - manager.create_gateway( - service_account_json, project_id, cloud_region, registry_id, - None, gateway_id, rsa_cert_path, 'RS256') - manager.create_device( - service_account_json, project_id, cloud_region, registry_id, - device_id) - manager.bind_device_to_gateway( - service_account_json, project_id, cloud_region, registry_id, - device_id, gateway_id) +def test_gateway_trigger_error_topic( + test_topic, test_registry_id, device_and_gateways, capsys): # noqa + (device_id, _, gateway_id) = device_and_gateways # Setup for listening for config messages num_messages = 4 @@ -353,29 +199,14 @@ def trigger_error(client): # Connect the gateway cloudiot_mqtt_example.listen_for_messages( - service_account_json, project_id, cloud_region, registry_id, - device_id, gateway_id, num_messages, rsa_private_path, - 'RS256', ca_cert_path, 'mqtt.googleapis.com', 443, - 20, 42, trigger_error) + service_account_json, project_id, cloud_region, test_registry_id, + device_id, gateway_id, num_messages, rsa_private_path, 'RS256', + ca_cert_path, 'mqtt.googleapis.com', 443, 20, 42, trigger_error) # Try to connect the gateway aagin on 8883 cloudiot_mqtt_example.listen_for_messages( - service_account_json, project_id, cloud_region, registry_id, - device_id, gateway_id, num_messages, rsa_private_path, - 'RS256', ca_cert_path, 'mqtt.googleapis.com', 8883, - 20, 15, trigger_error) - - # Clean up - manager.unbind_device_from_gateway( - service_account_json, project_id, cloud_region, registry_id, - device_id, gateway_id) - manager.delete_device( - service_account_json, project_id, cloud_region, registry_id, - device_id) - manager.delete_device( - service_account_json, project_id, cloud_region, registry_id, - gateway_id) - manager.delete_registry( - service_account_json, project_id, cloud_region, registry_id) + service_account_json, project_id, cloud_region, test_registry_id, + device_id, gateway_id, num_messages, rsa_private_path, 'RS256', + ca_cert_path, 'mqtt.googleapis.com', 8883, 20, 15, trigger_error) out, _ = capsys.readouterr() assert 'GATEWAY_ATTACHMENT_ERROR' in out diff --git a/iot/api-client/mqtt_example/cloudiot_mqtt_image.py b/iot/api-client/mqtt_example/cloudiot_mqtt_image.py index 41e4963c0303..60d7ded3e255 100644 --- a/iot/api-client/mqtt_example/cloudiot_mqtt_image.py +++ b/iot/api-client/mqtt_example/cloudiot_mqtt_image.py @@ -18,6 +18,7 @@ import io import os import sys +import threading import time from google.cloud import pubsub @@ -51,15 +52,18 @@ def transmit_image( # [END iot_mqtt_image] -def receive_image(project_id, sub_name, prefix, extension, duration): +def receive_image(project_id, subscription_path, prefix, extension, timeout): """Receieve images transmitted to a PubSub subscription.""" subscriber = pubsub.SubscriberClient() - subscription_path = subscriber.subscription_path(project_id, sub_name) global count count = 0 file_pattern = '{}-{}.{}' + # Set up a callback to acknowledge a message. This closes around an event + # so that it can signal that it is done and the main thread can continue. + job_done = threading.Event() + def callback(message): global count try: @@ -71,17 +75,18 @@ def callback(message): file_pattern.format(prefix, count, extension), 'wb') as f: f.write(image_data) message.ack() + # Signal to the main thread that we can exit. + job_done.set() except binascii.Error: message.ack() # To move forward if a message can't be processed subscriber.subscribe(subscription_path, callback=callback) - sleep_count = 0 print('Listening for messages on {}'.format(subscription_path)) - while sleep_count < duration: - time.sleep(1) - sleep_count = sleep_count + 1 + finished = job_done.wait(timeout=timeout) + if not finished: + print("No event received before the timeout.") def parse_command_line_args(): diff --git a/iot/api-client/mqtt_example/cloudiot_mqtt_image_test.py b/iot/api-client/mqtt_example/cloudiot_mqtt_image_test.py index bfebd4a99cef..2a41857982ff 100644 --- a/iot/api-client/mqtt_example/cloudiot_mqtt_image_test.py +++ b/iot/api-client/mqtt_example/cloudiot_mqtt_image_test.py @@ -14,122 +14,57 @@ # limitations under the License. import os import sys -import time -import uuid - -from google.cloud import pubsub - -import pytest # Add manager as library -sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'manager')) # noqa +sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'manager')) # noqa import cloudiot_mqtt_image -from flaky import flaky import manager +from fixtures import test_topic # noqa +from fixtures import test_subscription # noqa +from fixtures import test_registry_id # noqa +from fixtures import test_device_id # noqa cloud_region = 'us-central1' -device_id_template = 'test-device-{}' ca_cert_path = 'resources/roots.pem' -rsa_cert_path = 'resources/rsa_cert.pem' rsa_private_path = 'resources/rsa_private.pem' -topic_id = 'test-device-events-{}'.format(int(time.time())) -subscription_name = 'test-device-images-{}'.format(int(time.time())) - project_id = os.environ['GCLOUD_PROJECT'] service_account_json = os.environ['GOOGLE_APPLICATION_CREDENTIALS'] - -pubsub_topic = 'projects/{}/topics/{}'.format(project_id, topic_id) -registry_id = 'test-registry-{}-{}'.format(uuid.uuid1(), int(time.time())) - image_path = './resources/owlister_hootie.png' -mqtt_bridge_hostname = 'mqtt.googleapis.com' -mqtt_bridge_port = 443 - - -@pytest.fixture(scope='module') -def test_topic(): - topic = manager.create_iot_topic(project_id, topic_id) - - yield topic - - pubsub_client = pubsub.PublisherClient() - topic_path = pubsub_client.topic_path(project_id, topic_id) - pubsub_client.delete_topic(topic_path) - -def test_image(test_topic, capsys): +def test_image(test_topic, test_registry_id, test_device_id, capsys): # noqa """Send an inage to a device registry""" - device_id = device_id_template.format('RSA256') - manager.open_registry( - service_account_json, project_id, cloud_region, pubsub_topic, - registry_id) - - manager.create_rs256_device( - service_account_json, project_id, cloud_region, registry_id, - device_id, rsa_cert_path) - manager.get_device( - service_account_json, project_id, cloud_region, registry_id, - device_id) + service_account_json, project_id, cloud_region, test_registry_id, + test_device_id) cloudiot_mqtt_image.transmit_image( - cloud_region, registry_id, device_id, rsa_private_path, ca_cert_path, - image_path, project_id, service_account_json) - - # Clean up - manager.delete_device( - service_account_json, project_id, cloud_region, registry_id, - device_id) - manager.delete_registry( - service_account_json, project_id, cloud_region, registry_id) + cloud_region, test_registry_id, test_device_id, rsa_private_path, + ca_cert_path, image_path, project_id, service_account_json) out, _ = capsys.readouterr() assert 'on_publish' in out -@flaky(max_runs=5, min_passes=1) -def test_image_recv(test_topic, capsys): +def test_image_recv( + test_topic, # noqa + test_subscription, # noqa + test_registry_id, # noqa + test_device_id, # noqa + capsys): """Transmit an image with IoT Core and receive it from PubSub""" - subscriber = pubsub.SubscriberClient() - topic_path = subscriber.topic_path(project_id, topic_id) - subscription_path = subscriber.subscription_path( - project_id, subscription_name) - - subscriber.create_subscription(subscription_path, topic_path) - time.sleep(10) - - device_id = device_id_template.format('RSA256') - manager.open_registry( - service_account_json, project_id, cloud_region, pubsub_topic, - registry_id) - - manager.create_rs256_device( - service_account_json, project_id, cloud_region, registry_id, - device_id, rsa_cert_path) manager.get_device( - service_account_json, project_id, cloud_region, registry_id, - device_id) + service_account_json, project_id, cloud_region, test_registry_id, + test_device_id) cloudiot_mqtt_image.transmit_image( - cloud_region, registry_id, device_id, rsa_private_path, ca_cert_path, - image_path, project_id, service_account_json) - - time.sleep(10) + cloud_region, test_registry_id, test_device_id, rsa_private_path, + ca_cert_path, image_path, project_id, service_account_json) cloudiot_mqtt_image.receive_image( - project_id, subscription_name, 'test', 'png', 60) - - # Clean up - subscriber.delete_subscription(subscription_path) - - manager.delete_device( - service_account_json, project_id, cloud_region, registry_id, - device_id) - manager.delete_registry( - service_account_json, project_id, cloud_region, registry_id) + project_id, test_subscription.name, 'test', 'png', 120) out, _ = capsys.readouterr() assert 'Received image' in out diff --git a/iot/api-client/mqtt_example/fixtures.py b/iot/api-client/mqtt_example/fixtures.py new file mode 100644 index 000000000000..4fc7be9aa7e2 --- /dev/null +++ b/iot/api-client/mqtt_example/fixtures.py @@ -0,0 +1,218 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import os +import sys +import time +import uuid + +import backoff +from googleapiclient.errors import HttpError +from google.cloud import pubsub +from google.api_core.exceptions import AlreadyExists +from google.api_core.exceptions import NotFound +import pytest + +# Add manager as library +sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'manager')) # noqa +import manager + + +cloud_region = 'us-central1' +device_id_template = 'test-device-{}' +rsa_cert_path = 'resources/rsa_cert.pem' +topic_id = 'test-device-events-{}'.format(uuid.uuid4()) +subscription_name = 'test-device-images-{}'.format(uuid.uuid4()) +project_id = os.environ['GCLOUD_PROJECT'] +service_account_json = os.environ['GOOGLE_APPLICATION_CREDENTIALS'] +registry_id = 'test-registry-{}-{}'.format(uuid.uuid4(), int(time.time())) + + +@pytest.fixture(scope='session') +def test_topic(): + pubsub_client = pubsub.PublisherClient() + try: + topic = manager.create_iot_topic(project_id, topic_id) + except AlreadyExists as e: + print("The topic already exists, detail: {}".format(str(e))) + # Ignore the error, fetch the topic + topic = pubsub_client.get_topic( + pubsub_client.topic_path(project_id, topic_id)) + + yield topic + + topic_path = pubsub_client.topic_path(project_id, topic_id) + try: + pubsub_client.delete_topic(topic_path) + except NotFound as e: + # We ignore this case. + print("The topic doesn't exist: detail: {}".format(str(e))) + + +@pytest.fixture(scope='session') +def test_subscription(test_topic): + subscriber = pubsub.SubscriberClient() + subscription_path = subscriber.subscription_path( + project_id, subscription_name) + + try: + subscription = subscriber.create_subscription( + subscription_path, test_topic.name) + except AlreadyExists as e: + print("The topic already exists, detail: {}".format(str(e))) + # Ignore the error, fetch the subscription + subscription = subscriber.get_subscription(subscription_path) + + yield subscription + + try: + subscriber.delete_subscription(subscription_path) + except NotFound as e: + # We ignore this case. + print("The subscription doesn't exist: detail: {}".format(str(e))) + + +@pytest.fixture(scope='session') +def test_registry_id(test_topic): + @backoff.on_exception(backoff.expo, HttpError, max_time=60) + def create_registry(): + manager.open_registry( + service_account_json, project_id, cloud_region, test_topic.name, + registry_id) + + create_registry() + + yield registry_id + + @backoff.on_exception(backoff.expo, HttpError, max_time=60) + def delete_registry(): + try: + manager.delete_registry( + service_account_json, project_id, cloud_region, registry_id) + except NotFound as e: + # We ignore this case. + print("The registry doesn't exist: detail: {}".format(str(e))) + + delete_registry() + + +@pytest.fixture(scope='session') +def test_device_id(test_registry_id): + device_id = device_id_template.format('RSA256') + + @backoff.on_exception(backoff.expo, HttpError, max_time=60) + def create_device(): + try: + manager.create_rs256_device( + service_account_json, project_id, cloud_region, test_registry_id, + device_id, rsa_cert_path) + except AlreadyExists as e: + # We ignore this case. + print("The device already exists: detail: {}".format(str(e))) + + create_device() + + yield device_id + + @backoff.on_exception(backoff.expo, HttpError, max_time=60) + def delete_device(): + try: + manager.delete_device( + service_account_json, project_id, cloud_region, + test_registry_id, device_id) + except NotFound as e: + # We ignore this case. + print("The device doesn't exist: detail: {}".format(str(e))) + + delete_device() + + +@pytest.fixture(scope='module') +def device_and_gateways(test_registry_id): + device_id = device_id_template.format('noauthbind') + gateway_id = device_id_template.format('RS256') + bad_gateway_id = device_id_template.format('RS256-err') + + @backoff.on_exception(backoff.expo, HttpError, max_time=60) + def create_device(): + manager.create_device( + service_account_json, project_id, cloud_region, test_registry_id, + device_id) + create_device() + + @backoff.on_exception(backoff.expo, HttpError, max_time=60) + def create_gateways(): + manager.create_gateway( + service_account_json, project_id, cloud_region, test_registry_id, + None, gateway_id, rsa_cert_path, 'RS256') + manager.create_gateway( + service_account_json, project_id, cloud_region, test_registry_id, + None, bad_gateway_id, rsa_cert_path, 'RS256') + + create_gateways() + + @backoff.on_exception(backoff.expo, HttpError, max_time=60) + def bind_device_to_gateways(): + manager.bind_device_to_gateway( + service_account_json, project_id, cloud_region, test_registry_id, + device_id, gateway_id) + manager.bind_device_to_gateway( + service_account_json, project_id, cloud_region, test_registry_id, + device_id, bad_gateway_id) + + bind_device_to_gateways() + + yield (device_id, gateway_id, bad_gateway_id) + + @backoff.on_exception(backoff.expo, HttpError, max_time=60) + def unbind(): + manager.unbind_device_from_gateway( + service_account_json, project_id, cloud_region, test_registry_id, + device_id, gateway_id) + manager.unbind_device_from_gateway( + service_account_json, project_id, cloud_region, test_registry_id, + device_id, bad_gateway_id) + + unbind() + + @backoff.on_exception(backoff.expo, HttpError, max_time=60) + def delete_device(): + try: + manager.delete_device( + service_account_json, project_id, cloud_region, + test_registry_id, device_id) + except NotFound as e: + # We ignore this case. + print("The device doesn't exist: detail: {}".format(str(e))) + + delete_device() + + @backoff.on_exception(backoff.expo, HttpError, max_time=60) + def delete_gateways(): + try: + manager.delete_device( + service_account_json, project_id, cloud_region, + test_registry_id, gateway_id) + except NotFound as e: + # We ignore this case. + print("The gateway doesn't exist: detail: {}".format(str(e))) + try: + manager.delete_device( + service_account_json, project_id, cloud_region, + test_registry_id, bad_gateway_id) + except NotFound as e: + # We ignore this case. + print("The gateway doesn't exist: detail: {}".format(str(e))) + + delete_gateways() diff --git a/iot/api-client/mqtt_example/requirements-test.txt b/iot/api-client/mqtt_example/requirements-test.txt index 781d4326c947..132ae92cb323 100644 --- a/iot/api-client/mqtt_example/requirements-test.txt +++ b/iot/api-client/mqtt_example/requirements-test.txt @@ -1 +1,2 @@ pytest==5.3.2 +backoff==1.10.0 diff --git a/language/classify_text/requirements.txt b/language/classify_text/requirements.txt index bdbf1b3a587e..80d612f8c0d7 100644 --- a/language/classify_text/requirements.txt +++ b/language/classify_text/requirements.txt @@ -1,2 +1,2 @@ google-cloud-language==1.3.0 -numpy==1.18.2 +numpy==1.18.3 diff --git a/storage/s3-sdk/requirements.txt b/storage/s3-sdk/requirements.txt index 543fd88042a5..2c95b1b20640 100644 --- a/storage/s3-sdk/requirements.txt +++ b/storage/s3-sdk/requirements.txt @@ -1 +1 @@ -boto3==1.12.39 +boto3==1.12.41 diff --git a/vision/cloud-client/detect/detect.py b/vision/cloud-client/detect/detect.py index a7d3eca8759b..a3976ffb7b32 100644 --- a/vision/cloud-client/detect/detect.py +++ b/vision/cloud-client/detect/detect.py @@ -836,7 +836,7 @@ def async_detect_document(gcs_source_uri, gcs_destination_uri): requests=[async_request]) print('Waiting for the operation to finish.') - operation.result(timeout=180) + operation.result(timeout=300) # Once the request has completed and the output has been # written to GCS, we can list all the output files. diff --git a/vision/cloud-client/detect/detect_test.py b/vision/cloud-client/detect/detect_test.py index 5a8774c12fbd..e577df758b7a 100644 --- a/vision/cloud-client/detect/detect_test.py +++ b/vision/cloud-client/detect/detect_test.py @@ -16,6 +16,7 @@ import uuid from google.cloud import storage +import pytest import detect @@ -208,6 +209,7 @@ def test_detect_crop_hints_uri(capsys): assert 'bounds: ' in out +@pytest.mark.flaky def test_async_detect_document(capsys): storage_client = storage.Client() bucket = storage_client.get_bucket(BUCKET) diff --git a/vision/cloud-client/detect/requirements-test.txt b/vision/cloud-client/detect/requirements-test.txt index 781d4326c947..1b569cb4f2c7 100644 --- a/vision/cloud-client/detect/requirements-test.txt +++ b/vision/cloud-client/detect/requirements-test.txt @@ -1 +1,2 @@ pytest==5.3.2 +flaky==3.6.1