Skip to content

Commit

Permalink
Merge branch 'master' into fix-container_registry
Browse files Browse the repository at this point in the history
  • Loading branch information
Takashi Matsuo authored Apr 21, 2020
2 parents c627e63 + d8dd870 commit 68bf8b0
Show file tree
Hide file tree
Showing 25 changed files with 512 additions and 410 deletions.
2 changes: 1 addition & 1 deletion appengine/flexible/numpy/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
Flask==1.1.2
gunicorn==20.0.4
numpy==1.18.2
numpy==1.18.3
2 changes: 1 addition & 1 deletion appengine/flexible/scipy/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
Flask==1.1.2
gunicorn==20.0.4
imageio==2.8.0
numpy==1.18.2
numpy==1.18.3
pillow==7.1.1
scipy==1.4.1
2 changes: 1 addition & 1 deletion bigquery/pandas-gbq-migration/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,4 @@ pandas==0.25.3; python_version > '3.0'
pandas==0.24.2; python_version < '3.0'
pandas-gbq==0.13.1
pyarrow==0.15.1
grpcio==1.27.2
grpcio==1.28.1
2 changes: 1 addition & 1 deletion cloud-sql/sql-server/sqlalchemy/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
Flask==1.1.1
Flask==1.1.2
SQLAlchemy==1.3.13
pyodbc==4.0.30

13 changes: 9 additions & 4 deletions datastore/cloud-ndb/flask_app_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,9 @@

import uuid

import pytest

import backoff
import pytest
from google.cloud import ndb

import flask_app
Expand All @@ -39,9 +40,13 @@ def test_index(test_book):
flask_app.app.testing = True
client = flask_app.app.test_client()

r = client.get('/')
assert r.status_code == 200
assert test_book.title in r.data.decode('utf-8')
@backoff.on_exception(backoff.expo, AssertionError, max_time=60)
def eventually_consistent_test():
r = client.get('/')
assert r.status_code == 200
assert test_book.title in r.data.decode('utf-8')

eventually_consistent_test()


def test_ndb_wsgi_middleware():
Expand Down
11 changes: 8 additions & 3 deletions datastore/cloud-ndb/quickstart_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@

import uuid

import backoff
import pytest

import quickstart
Expand All @@ -34,6 +35,10 @@ def test_book():


def test_quickstart(capsys, test_book):
quickstart.list_books()
out, _ = capsys.readouterr()
assert test_book.title in out
@backoff.on_exception(backoff.expo, AssertionError, max_time=60)
def eventually_consistent_test():
quickstart.list_books()
out, _ = capsys.readouterr()
assert test_book.title in out

eventually_consistent_test()
1 change: 1 addition & 0 deletions datastore/cloud-ndb/requirements-test.txt
Original file line number Diff line number Diff line change
@@ -1 +1,2 @@
backoff==1.10.0
pytest==5.3.2
5 changes: 5 additions & 0 deletions datastore/schedule-export/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
# Scheduling Datastore exports with Cloud Functions and Cloud Scheduler

This sample application demonstrates how to schedule exports of your Datastore entities. To deploy this sample, see:

[Scheduling exports](https://cloud.google.com/datastore/docs/schedule-export)
43 changes: 43 additions & 0 deletions datastore/schedule-export/main.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
import base64
import json
import os

from googleapiclient.discovery import build

datastore = build('datastore', 'v1')
project_id = os.environ.get('GCP_PROJECT')


def datastore_export(event, context):
'''Triggers a Datastore export from a Cloud Scheduler job.
Args:
event (dict): event[data] must contain a json object encoded in
base-64. Cloud Scheduler encodes payloads in base-64 by default.
Object must include a 'bucket' value and can include 'kinds'
and 'namespaceIds' values.
context (google.cloud.functions.Context): The Cloud Functions event
metadata.
'''

json_data = json.loads(base64.b64decode(event['data']).decode('utf-8'))
bucket = json_data['bucket']
entity_filter = {}

if 'kinds' in json_data:
entity_filter['kinds'] = json_data['kinds']

if 'namespaceIds' in json_data:
entity_filter['namespaceIds'] = json_data['namespaceIds']

request_body = {
'outputUrlPrefix': bucket,
'entityFilter': entity_filter
}

export_request = datastore.projects().export(
projectId=project_id,
body=request_body
)
response = export_request.execute()
print(response)
1 change: 1 addition & 0 deletions datastore/schedule-export/requirements-test.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
pytest==5.3.2
1 change: 1 addition & 0 deletions datastore/schedule-export/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
google-api-python-client>=1.7.12
74 changes: 74 additions & 0 deletions datastore/schedule-export/schedule_export_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
# Copyright 2019 Google LLC All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import base64

from unittest.mock import Mock

import main

mock_context = Mock()
mock_context.event_id = '617187464135194'
mock_context.timestamp = '2020-04-15T22:09:03.761Z'


def test_datastore_export(capsys):
# Test an export without an entity filter
bucket = 'gs://my-bucket'
json_string = '{{ "bucket": "{bucket}" }}'.format(bucket=bucket)

# Encode data like Cloud Scheduler
data = bytes(json_string, 'utf-8')
data_encoded = base64.b64encode(data)
event = {"data": data_encoded}

# Mock the Datastore service
mockDatastore = Mock()
main.datastore = mockDatastore

# Call tested function
main.datastore_export(event, mock_context)
out, err = capsys.readouterr()
export_args = mockDatastore.projects().export.call_args[1]
req_body = export_args['body']
# Assert request includes test values
assert req_body['outputUrlPrefix'] == bucket


def test_datastore_export_entity_filter(capsys):
# Test an export with an entity filter
bucket = 'gs://my-bucket'
kinds = 'Users,Tasks'
namespaceIds = 'Customer831,Customer157'
json_string = '{{ "bucket": "{bucket}", "kinds": "{kinds}", "namespaceIds": "{namespaceIds}" }}'.format(
bucket=bucket, kinds=kinds, namespaceIds=namespaceIds)

# Encode data like Cloud Scheduler
data = bytes(json_string, 'utf-8')
data_encoded = base64.b64encode(data)
event = {"data": data_encoded}

# Mock the Datastore service
mockDatastore = Mock()
main.datastore = mockDatastore

# Call tested function
main.datastore_export(event, mock_context)
out, err = capsys.readouterr()
export_args = mockDatastore.projects().export.call_args[1]
req_body = export_args['body']
# Assert request includes test values
assert req_body['outputUrlPrefix'] == bucket
assert req_body['entityFilter']['kinds'] == kinds
assert req_body['entityFilter']['namespaceIds'] == namespaceIds
89 changes: 44 additions & 45 deletions healthcare/api-client/v1/hl7v2/hl7v2_messages_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@
import sys
import uuid

import backoff
from googleapiclient.errors import HttpError
from retrying import retry

# Add datasets for bootstrapping datasets for testing
sys.path.append(os.path.join(os.path.dirname(__file__), "..", "datasets")) # noqa
Expand All @@ -36,18 +36,9 @@
label_value = "TRUE"


def retry_if_server_exception(exception):
return isinstance(exception, (HttpError))


@pytest.fixture(scope="module")
def test_dataset():
@retry(
wait_exponential_multiplier=1000,
wait_exponential_max=10000,
stop_max_attempt_number=10,
retry_on_exception=retry_if_server_exception,
)
@backoff.on_exception(backoff.expo, HttpError, max_time=60)
def create():
try:
datasets.create_dataset(project_id, cloud_region, dataset_id)
Expand All @@ -65,12 +56,7 @@ def create():
yield

# Clean up
@retry(
wait_exponential_multiplier=1000,
wait_exponential_max=10000,
stop_max_attempt_number=10,
retry_on_exception=retry_if_server_exception,
)
@backoff.on_exception(backoff.expo, HttpError, max_time=60)
def clean_up():
try:
datasets.delete_dataset(project_id, cloud_region, dataset_id)
Expand All @@ -86,12 +72,7 @@ def clean_up():

@pytest.fixture(scope="module")
def test_hl7v2_store():
@retry(
wait_exponential_multiplier=1000,
wait_exponential_max=10000,
stop_max_attempt_number=10,
retry_on_exception=retry_if_server_exception,
)
@backoff.on_exception(backoff.expo, HttpError, max_time=60)
def create():
try:
hl7v2_stores.create_hl7v2_store(
Expand All @@ -115,12 +96,7 @@ def create():
yield

# Clean up
@retry(
wait_exponential_multiplier=1000,
wait_exponential_max=10000,
stop_max_attempt_number=10,
retry_on_exception=retry_if_server_exception,
)
@backoff.on_exception(backoff.expo, HttpError, max_time=60)
def clean_up():
try:
hl7v2_stores.delete_hl7v2_store(
Expand All @@ -145,12 +121,20 @@ def test_CRUD_hl7v2_message(test_dataset, test_hl7v2_store, capsys):
project_id, cloud_region, dataset_id, hl7v2_store_id, hl7v2_message_file
)

hl7v2_messages_list = hl7v2_messages.list_hl7v2_messages(
project_id, cloud_region, dataset_id, hl7v2_store_id
)
@backoff.on_exception(backoff.expo, AssertionError, max_time=60)
def run_eventually_consistent_test():
hl7v2_messages_list = hl7v2_messages.list_hl7v2_messages(
project_id, cloud_region, dataset_id, hl7v2_store_id
)

assert len(hl7v2_messages_list) > 0
hl7v2_message_name = hl7v2_messages_list[0].get("name")
elms = hl7v2_message_name.split("/", 9)
assert len(elms) >= 10
hl7v2_message_id = elms[9]
return hl7v2_message_id

hl7v2_message_name = hl7v2_messages_list[0].get("name")
hl7v2_message_id = hl7v2_message_name.split("/", 9)[9]
hl7v2_message_id = run_eventually_consistent_test()

hl7v2_messages.get_hl7v2_message(
project_id, cloud_region, dataset_id, hl7v2_store_id, hl7v2_message_id
Expand All @@ -173,12 +157,20 @@ def test_ingest_hl7v2_message(test_dataset, test_hl7v2_store, capsys):
project_id, cloud_region, dataset_id, hl7v2_store_id, hl7v2_message_file
)

hl7v2_messages_list = hl7v2_messages.list_hl7v2_messages(
project_id, cloud_region, dataset_id, hl7v2_store_id
)
@backoff.on_exception(backoff.expo, AssertionError, max_time=60)
def run_eventually_consistent_test():
hl7v2_messages_list = hl7v2_messages.list_hl7v2_messages(
project_id, cloud_region, dataset_id, hl7v2_store_id
)

hl7v2_message_name = hl7v2_messages_list[0].get("name")
hl7v2_message_id = hl7v2_message_name.split("/", 9)[9]
assert len(hl7v2_messages_list) > 0
hl7v2_message_name = hl7v2_messages_list[0].get("name")
elms = hl7v2_message_name.split("/", 9)
assert len(elms) >= 10
hl7v2_message_id = elms[9]
return hl7v2_message_id

hl7v2_message_id = run_eventually_consistent_test()

hl7v2_messages.get_hl7v2_message(
project_id, cloud_region, dataset_id, hl7v2_store_id, hl7v2_message_id
Expand All @@ -201,13 +193,20 @@ def test_patch_hl7v2_message(test_dataset, test_hl7v2_store, capsys):
project_id, cloud_region, dataset_id, hl7v2_store_id, hl7v2_message_file
)

hl7v2_messages_list = hl7v2_messages.list_hl7v2_messages(
project_id, cloud_region, dataset_id, hl7v2_store_id
)
@backoff.on_exception(backoff.expo, AssertionError, max_time=60)
def run_eventually_consistent_test():
hl7v2_messages_list = hl7v2_messages.list_hl7v2_messages(
project_id, cloud_region, dataset_id, hl7v2_store_id
)

assert len(hl7v2_messages_list) > 0
hl7v2_message_name = hl7v2_messages_list[0].get("name")
elms = hl7v2_message_name.split("/", 9)
assert len(elms) >= 10
hl7v2_message_id = elms[9]
return hl7v2_message_id

assert len(hl7v2_messages_list) > 0
hl7v2_message_name = hl7v2_messages_list[0].get("name")
hl7v2_message_id = hl7v2_message_name.split("/", 9)[9]
hl7v2_message_id = run_eventually_consistent_test()

hl7v2_messages.patch_hl7v2_message(
project_id,
Expand Down
Loading

0 comments on commit 68bf8b0

Please sign in to comment.