Skip to content

Commit

Permalink
fix: address issues with concurrent BigQuery tests [(#3426)](GoogleCl…
Browse files Browse the repository at this point in the history
  • Loading branch information
2 people authored and plamut committed Jul 22, 2020
1 parent 340c4fc commit 706a39a
Show file tree
Hide file tree
Showing 6 changed files with 88 additions and 57 deletions.
19 changes: 18 additions & 1 deletion samples/snippets/authorized_view_tutorial.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
# limitations under the License.


def run_authorized_view_tutorial():
def run_authorized_view_tutorial(override_values={}):
# Note to user: This is a group email for testing purposes. Replace with
# your own group email address when running this code.
analyst_group_email = '[email protected]'
Expand All @@ -28,6 +28,14 @@ def run_authorized_view_tutorial():
client = bigquery.Client()
source_dataset_id = 'github_source_data'

# [END bigquery_authorized_view_tutorial]
# [END bigquery_avt_create_source_dataset]
# To facilitate testing, we replace values with alternatives
# provided by the testing harness.
source_dataset_id = override_values.get("source_dataset_id", source_dataset_id)
# [START bigquery_authorized_view_tutorial]
# [START bigquery_avt_create_source_dataset]

source_dataset = bigquery.Dataset(client.dataset(source_dataset_id))
# Specify the geographic location where the dataset should reside.
source_dataset.location = 'US'
Expand Down Expand Up @@ -57,6 +65,15 @@ def run_authorized_view_tutorial():
# Create a separate dataset to store your view
# [START bigquery_avt_create_shared_dataset]
shared_dataset_id = 'shared_views'

# [END bigquery_authorized_view_tutorial]
# [END bigquery_avt_create_shared_dataset]
# To facilitate testing, we replace values with alternatives
# provided by the testing harness.
shared_dataset_id = override_values.get("shared_dataset_id", shared_dataset_id)
# [START bigquery_authorized_view_tutorial]
# [START bigquery_avt_create_shared_dataset]

shared_dataset = bigquery.Dataset(client.dataset(shared_dataset_id))
shared_dataset.location = 'US'
shared_dataset = client.create_dataset(shared_dataset) # API request
Expand Down
28 changes: 15 additions & 13 deletions samples/snippets/authorized_view_tutorial_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import uuid

from google.cloud import bigquery
import pytest

Expand All @@ -24,24 +26,24 @@ def client():


@pytest.fixture
def to_delete(client):
def datasets_to_delete(client):
doomed = []
yield doomed
for item in doomed:
if isinstance(item, (bigquery.Dataset, bigquery.DatasetReference)):
client.delete_dataset(item, delete_contents=True)
elif isinstance(item, (bigquery.Table, bigquery.TableReference)):
client.delete_table(item)
else:
item.delete()
client.delete_dataset(item, delete_contents=True)


def test_authorized_view_tutorial(client, to_delete):
source_dataset_ref = client.dataset('github_source_data')
shared_dataset_ref = client.dataset('shared_views')
to_delete.extend([source_dataset_ref, shared_dataset_ref])
def test_authorized_view_tutorial(client, datasets_to_delete):
override_values = {
"source_dataset_id": "github_source_data_{}".format(str(uuid.uuid4()).replace("-", "_")),
"shared_dataset_id": "shared_views_{}".format(str(uuid.uuid4()).replace("-", "_")),
}
source_dataset_ref = client.dataset(override_values["source_dataset_id"])
shared_dataset_ref = client.dataset(override_values["shared_dataset_id"])
datasets_to_delete.extend([override_values["source_dataset_id"],
override_values["shared_dataset_id"]])

authorized_view_tutorial.run_authorized_view_tutorial()
authorized_view_tutorial.run_authorized_view_tutorial(override_values)

source_dataset = client.get_dataset(source_dataset_ref)
shared_dataset = client.get_dataset(shared_dataset_ref)
Expand All @@ -55,7 +57,7 @@ def test_authorized_view_tutorial(client, to_delete):
if entry.entity_type == 'view']
expected_view_ref = {
'projectId': client.project,
'datasetId': 'shared_views',
'datasetId': override_values["shared_dataset_id"],
'tableId': 'github_analyst_view',
}
assert len(authorized_view_entries) == 1
Expand Down
12 changes: 9 additions & 3 deletions samples/snippets/natality_tutorial.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
# limitations under the License.


def run_natality_tutorial():
def run_natality_tutorial(override_values={}):
# [START bigquery_query_natality_tutorial]
"""Create a Google BigQuery linear regression input table.
Expand All @@ -37,8 +37,14 @@ def run_natality_tutorial():
client = bigquery.Client()

# Prepare a reference to a new dataset for storing the query results.
dataset_ref = client.dataset('natality_regression')
dataset = bigquery.Dataset(dataset_ref)
dataset_id = 'natality_regression'
# [END bigquery_query_natality_tutorial]
# To facilitate testing, we replace values with alternatives
# provided by the testing harness.
dataset_id = override_values.get("dataset_id", dataset_id)
# [START bigquery_query_natality_tutorial]

dataset = bigquery.Dataset(client.dataset(dataset_id))

# Create the new BigQuery dataset.
dataset = client.create_dataset(dataset)
Expand Down
38 changes: 21 additions & 17 deletions samples/snippets/natality_tutorial_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,31 +12,35 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import uuid

from google.cloud import bigquery
from google.cloud import exceptions
import pytest

import natality_tutorial


def dataset_exists(dataset, client):
try:
client.get_dataset(dataset)
return True
except exceptions.NotFound:
return False
@pytest.fixture(scope='module')
def client():
return bigquery.Client()


def test_natality_tutorial():
client = bigquery.Client()
dataset_ref = client.dataset('natality_regression')
assert not dataset_exists(dataset_ref, client)
@pytest.fixture
def datasets_to_delete(client):
doomed = []
yield doomed
for item in doomed:
client.delete_dataset(item, delete_contents=True)

natality_tutorial.run_natality_tutorial()

assert dataset_exists(dataset_ref, client)
def test_natality_tutorial(client, datasets_to_delete):
override_values = {
"dataset_id": "natality_regression_{}".format(str(uuid.uuid4()).replace("-", "_")),
}
datasets_to_delete.append(override_values["dataset_id"])

table = client.get_table(
bigquery.Table(dataset_ref.table('regression_input')))
assert table.num_rows > 0
natality_tutorial.run_natality_tutorial(override_values)

client.delete_dataset(dataset_ref, delete_contents=True)
table_ref = bigquery.Dataset(client.dataset(override_values["dataset_id"])).table("regression_input")
table = client.get_table(table_ref)
assert table.num_rows > 0
8 changes: 7 additions & 1 deletion samples/snippets/quickstart.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
# limitations under the License.


def run_quickstart():
def run_quickstart(override_values={}):
# [START bigquery_quickstart]
# Imports the Google Cloud client library
from google.cloud import bigquery
Expand All @@ -26,6 +26,12 @@ def run_quickstart():
# The name for the new dataset
dataset_id = 'my_new_dataset'

# [END bigquery_quickstart]
# To facilitate testing, we replace values with alternatives
# provided by the testing harness.
dataset_id = override_values.get("dataset_id", dataset_id)
# [START bigquery_quickstart]

# Prepares a reference to the new dataset
dataset_ref = bigquery_client.dataset(dataset_id)
dataset = bigquery.Dataset(dataset_ref)
Expand Down
40 changes: 18 additions & 22 deletions samples/snippets/quickstart_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import uuid

from google.cloud import bigquery
from google.cloud.exceptions import NotFound
import pytest

import quickstart
Expand All @@ -24,31 +25,26 @@
DATASET_ID = 'my_new_dataset'


@pytest.fixture
def temporary_dataset():
"""Fixture that ensures the test dataset does not exist before or
after a test."""
bigquery_client = bigquery.Client()
dataset_ref = bigquery_client.dataset(DATASET_ID)

if dataset_exists(dataset_ref, bigquery_client):
bigquery_client.delete_dataset(dataset_ref)
@pytest.fixture(scope='module')
def client():
return bigquery.Client()

yield

if dataset_exists(dataset_ref, bigquery_client):
bigquery_client.delete_dataset(dataset_ref)
@pytest.fixture
def datasets_to_delete(client):
doomed = []
yield doomed
for item in doomed:
client.delete_dataset(item, delete_contents=True)


def dataset_exists(dataset, client):
try:
client.get_dataset(dataset)
return True
except NotFound:
return False
def test_quickstart(capsys, client, datasets_to_delete):

override_values = {
"dataset_id": "my_new_dataset_{}".format(str(uuid.uuid4()).replace("-", "_")),
}
datasets_to_delete.append(override_values["dataset_id"])

def test_quickstart(capsys, temporary_dataset):
quickstart.run_quickstart()
quickstart.run_quickstart(override_values)
out, _ = capsys.readouterr()
assert DATASET_ID in out
assert override_values["dataset_id"] in out

0 comments on commit 706a39a

Please sign in to comment.