Skip to content

Commit

Permalink
test(samples): use try/finally for clusters and use pytest-xdist (#360)
Browse files Browse the repository at this point in the history
  • Loading branch information
busunkim96 authored Mar 8, 2022
1 parent c3ac64a commit 816fbb4
Show file tree
Hide file tree
Showing 5 changed files with 77 additions and 53 deletions.
22 changes: 13 additions & 9 deletions dataproc/snippets/create_cluster_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
import os
import uuid

from google.api_core.exceptions import NotFound
from google.cloud import dataproc_v1 as dataproc
import pytest

Expand All @@ -34,15 +35,18 @@ def teardown():
client_options={"api_endpoint": f"{REGION}-dataproc.googleapis.com:443"}
)
# Client library function
operation = cluster_client.delete_cluster(
request={
"project_id": PROJECT_ID,
"region": REGION,
"cluster_name": CLUSTER_NAME,
}
)
# Wait for cluster to delete
operation.result()
try:
operation = cluster_client.delete_cluster(
request={
"project_id": PROJECT_ID,
"region": REGION,
"cluster_name": CLUSTER_NAME,
}
)
# Wait for cluster to delete
operation.result()
except NotFound:
print("Cluster already deleted")


def test_cluster_create(capsys):
Expand Down
21 changes: 12 additions & 9 deletions dataproc/snippets/quickstart/quickstart_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,12 +39,10 @@


@pytest.fixture(autouse=True)
def setup_teardown():
def blob():
storage_client = storage.Client()

@backoff.on_exception(backoff.expo,
ServiceUnavailable,
max_tries=5)
@backoff.on_exception(backoff.expo, ServiceUnavailable, max_tries=5)
def create_bucket():
return storage_client.create_bucket(STAGING_BUCKET)

Expand All @@ -54,12 +52,20 @@ def create_bucket():

yield

blob.delete()
bucket.delete()


@pytest.fixture(autouse=True)
def cluster():
yield

# The quickstart sample deletes the cluster, but if the test fails
# before cluster deletion occurs, it can be manually deleted here.
cluster_client = dataproc.ClusterControllerClient(
client_options={"api_endpoint": "{}-dataproc.googleapis.com:443".format(REGION)}
)

# The quickstart sample deletes the cluster, but if the test fails
# before cluster deletion occurs, it can be manually deleted here.
clusters = cluster_client.list_clusters(
request={"project_id": PROJECT_ID, "region": REGION}
)
Expand All @@ -74,9 +80,6 @@ def create_bucket():
}
)

blob.delete()
bucket.delete()


def test_quickstart(capsys):
quickstart.quickstart(PROJECT_ID, REGION, CLUSTER_NAME, JOB_FILE_PATH)
Expand Down
1 change: 1 addition & 0 deletions dataproc/snippets/requirements-test.txt
Original file line number Diff line number Diff line change
@@ -1 +1,2 @@
pytest==7.0.1
pytest-xdist==2.5.0
42 changes: 26 additions & 16 deletions dataproc/snippets/submit_job_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
import os
import uuid

from google.api_core.exceptions import NotFound
from google.cloud import dataproc_v1 as dataproc
import pytest

Expand All @@ -36,25 +37,34 @@

@pytest.fixture(autouse=True)
def setup_teardown():
cluster_client = dataproc.ClusterControllerClient(
client_options={"api_endpoint": "{}-dataproc.googleapis.com:443".format(REGION)}
)
try:
cluster_client = dataproc.ClusterControllerClient(
client_options={
"api_endpoint": "{}-dataproc.googleapis.com:443".format(REGION)
}
)

# Create the cluster.
operation = cluster_client.create_cluster(
request={"project_id": PROJECT_ID, "region": REGION, "cluster": CLUSTER}
)
operation.result()
# Create the cluster.
operation = cluster_client.create_cluster(
request={"project_id": PROJECT_ID, "region": REGION, "cluster": CLUSTER}
)
operation.result()

yield
yield

cluster_client.delete_cluster(
request={
"project_id": PROJECT_ID,
"region": REGION,
"cluster_name": CLUSTER_NAME,
}
)
finally:
try:
operation = cluster_client.delete_cluster(
request={
"project_id": PROJECT_ID,
"region": REGION,
"cluster_name": CLUSTER_NAME,
}
)
operation.result()

except NotFound:
print("Cluster already deleted")


def test_submit_job(capsys):
Expand Down
44 changes: 25 additions & 19 deletions dataproc/snippets/update_cluster_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
import os
import uuid

from google.api_core.exceptions import NotFound
from google.cloud.dataproc_v1.services.cluster_controller.client import (
ClusterControllerClient,
)
Expand All @@ -41,25 +42,6 @@
}


@pytest.fixture(autouse=True)
def setup_teardown(cluster_client):
# Create the cluster.
operation = cluster_client.create_cluster(
request={"project_id": PROJECT_ID, "region": REGION, "cluster": CLUSTER}
)
operation.result()

yield

cluster_client.delete_cluster(
request={
"project_id": PROJECT_ID,
"region": REGION,
"cluster_name": CLUSTER_NAME,
}
)


@pytest.fixture
def cluster_client():
cluster_client = ClusterControllerClient(
Expand All @@ -68,6 +50,30 @@ def cluster_client():
return cluster_client


@pytest.fixture(autouse=True)
def setup_teardown(cluster_client):
try:
# Create the cluster.
operation = cluster_client.create_cluster(
request={"project_id": PROJECT_ID, "region": REGION, "cluster": CLUSTER}
)
operation.result()

yield
finally:
try:
operation = cluster_client.delete_cluster(
request={
"project_id": PROJECT_ID,
"region": REGION,
"cluster_name": CLUSTER_NAME,
}
)
operation.result()
except NotFound:
print("Cluster already deleted")


def test_update_cluster(capsys, cluster_client: ClusterControllerClient):
# Wrapper function for client library function
update_cluster.update_cluster(PROJECT_ID, REGION, CLUSTER_NAME, NEW_NUM_INSTANCES)
Expand Down

0 comments on commit 816fbb4

Please sign in to comment.