Skip to content

Commit

Permalink
Merge pull request #96 from GoogleCloudPlatform/test-cleanup
Browse files Browse the repository at this point in the history
Cleaning up base test case
  • Loading branch information
Jonathan Wayne Parrott committed Sep 16, 2015
2 parents 1bfaa4d + c651359 commit 0078fd5
Show file tree
Hide file tree
Showing 17 changed files with 105 additions and 104 deletions.
18 changes: 12 additions & 6 deletions CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -81,16 +81,22 @@ If you want to run the Google App Engine tests, you will need:

$ export GAE_PYTHONPATH=<path your AppeEngine sdk>

To run the bigquery tests, you'll need to create a bigquery dataset:
To run the bigquery tests:

* Create a dataset in your project named `test_dataset`.
* Create a table named `test_table2`, upload ``tests/resources/data.csv`` and give it the following schema:

Name STRING
Age INTEGER
Weight FLOAT
IsMagic BOOLEAN
gcloud alpha bigquery datasets create test_dataset

* Load sample data into google cloud storage (for import tests):

gsutil cp tests/resources/data.csv gs://$TEST_BUCKET_NAME/data.csv

* Load the sample data into a table named `test_table` (for export and streaming tests):

gcloud alpha bigquery import \
gs://$TEST_BUCKET_NAME/data.csv \
test_dataset/test_table \
--schema-file tests/resources/schema.json

### Test environments

Expand Down
8 changes: 2 additions & 6 deletions appengine/bigquery/main_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ class TestAuthSample(tests.AppEngineTestbedCase):
def setUp(self):
super(TestAuthSample, self).setUp()
self.app = webtest.TestApp(main.app)
main.PROJECTID = self.project_id

def test_anonymous_get(self):
response = self.app.get('/')
Expand Down Expand Up @@ -55,12 +56,7 @@ def test_oauthed_get(self, *args):
{'status': '200'})

with mock.patch.object(main.decorator, 'http', return_value=mock_http):
original_projectid = main.PROJECTID
try:
main.PROJECTID = self.constants['projectId']
response = self.app.get('/')
finally:
main.PROJECTID = original_projectid
response = self.app.get('/')

# Should make the api call
self.assertEqual(response.status_int, 200)
Expand Down
14 changes: 9 additions & 5 deletions bigquery/samples/async_query_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,13 +21,17 @@
class TestAsyncQuery(tests.CloudBaseTest):

def test_async_query(self):
query = (
'SELECT corpus FROM publicdata:samples.shakespeare '
'GROUP BY corpus;')

with tests.capture_stdout() as stdout:
main(
self.constants['projectId'],
self.constants['query'],
False,
5,
5)
project_id=self.project_id,
query_string=query,
batch=False,
num_retries=5,
interval=1)

value = stdout.getvalue().strip().split('\n').pop()

Expand Down
44 changes: 26 additions & 18 deletions bigquery/samples/export_data_to_cloud_storage_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,33 +19,41 @@


class TestExportTableToGCS(CloudBaseTest):
dataset_id = 'test_dataset'
table_id = 'test_table'

def test_export_table_csv(self):
cloud_storage_output_uri = \
'gs://{}/output.csv'.format(self.bucket_name)
main(
self.constants['cloudStorageOutputURI'],
self.constants['projectId'],
self.constants['datasetId'],
self.constants['newTableId'],
5,
1,
cloud_storage_output_uri,
self.project_id,
self.dataset_id,
self.table_id,
num_retries=5,
interval=1,
export_format="CSV")

def test_export_table_json(self):
cloud_storage_output_uri = \
'gs://{}/output.json'.format(self.bucket_name)
main(
self.constants['cloudStorageOutputURI'],
self.constants['projectId'],
self.constants['datasetId'],
self.constants['newTableId'],
5,
1,
cloud_storage_output_uri,
self.project_id,
self.dataset_id,
self.table_id,
num_retries=5,
interval=1,
export_format="NEWLINE_DELIMITED_JSON")

def test_export_table_avro(self):
cloud_storage_output_uri = \
'gs://{}/output.avro'.format(self.bucket_name)
main(
self.constants['cloudStorageOutputURI'],
self.constants['projectId'],
self.constants['datasetId'],
self.constants['newTableId'],
5,
1,
cloud_storage_output_uri,
self.project_id,
self.dataset_id,
self.table_id,
num_retries=5,
interval=1,
export_format="AVRO")
2 changes: 1 addition & 1 deletion bigquery/samples/getting_started_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
class TestGettingStarted(tests.CloudBaseTest):
def test_main(self):
with tests.capture_stdout() as mock_stdout:
main(self.constants['projectId'])
main(self.project_id)

stdout = mock_stdout.getvalue()
self.assertRegexpMatches(stdout, re.compile(
Expand Down
2 changes: 1 addition & 1 deletion bigquery/samples/list_datasets_projects_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ class TestListDatasetsProjects(tests.CloudBaseTest):

def test_main(self):
with tests.capture_stdout() as mock_stdout:
main(self.constants['projectId'])
main(self.project_id)

stdout = mock_stdout.getvalue()

Expand Down
20 changes: 13 additions & 7 deletions bigquery/samples/load_data_from_csv_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,12 +20,18 @@


class TestLoadDataFromCSV(CloudBaseTest):
dataset_id = 'test_dataset'
table_id = 'test_import_table'

def test_load_table(self):
cloud_storage_input_uri = 'gs://{}/data.csv'.format(self.bucket_name)
schema_file = os.path.join(self.resource_path, 'schema.json')

main(
self.constants['projectId'],
self.constants['datasetId'],
self.constants['newTableId'],
os.path.join(self.resource_path, 'schema.json'),
self.constants['cloudStorageInputURI'],
1,
5)
self.project_id,
self.dataset_id,
self.table_id,
schema_file=schema_file,
data_path=cloud_storage_input_uri,
poll_interval=1,
num_retries=5)
10 changes: 6 additions & 4 deletions bigquery/samples/streaming_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,8 @@


class TestStreaming(CloudBaseTest):
dataset_id = 'test_dataset'
table_id = 'test_table'

def test_stream_row_to_bigquery(self):
with open(
Expand All @@ -33,10 +35,10 @@ def test_stream_row_to_bigquery(self):

with capture_stdout() as stdout:
streaming.main(
self.constants['projectId'],
self.constants['datasetId'],
self.constants['newTableId'],
5)
self.project_id,
self.dataset_id,
self.table_id,
num_retries=5)

results = stdout.getvalue().split('\n')
self.assertIsNotNone(json.loads(results[0]))
12 changes: 8 additions & 4 deletions bigquery/samples/sync_query_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,12 +21,16 @@
class TestSyncQuery(CloudBaseTest):

def test_sync_query(self):
query = (
'SELECT corpus FROM publicdata:samples.shakespeare '
'GROUP BY corpus;')

with capture_stdout() as stdout:
main(
self.constants['projectId'],
self.constants['query'],
30,
5)
project_id=self.project_id,
query=query,
timeout=30,
num_retries=5)

result = stdout.getvalue().split('\n')[0]
self.assertIsNotNone(json.loads(result))
Original file line number Diff line number Diff line change
Expand Up @@ -20,4 +20,4 @@ class BlogTestCase(CloudBaseTest):
"""Simple test case that ensures the blog code doesn't throw any errors."""

def test_main(self):
main(self.constants['projectId'])
main(self.project_id)
Original file line number Diff line number Diff line change
Expand Up @@ -20,4 +20,4 @@ class WikiTestCase(CloudBaseTest):
"""Simple test case that ensures the wiki code doesn't throw any errors."""

def test_main(self):
main(self.constants['projectId'])
main(self.project_id)
7 changes: 1 addition & 6 deletions monitoring/samples/auth_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import os
import re

import tests
Expand All @@ -21,13 +20,9 @@

class TestTimeseriesList(tests.CloudBaseTest):

@classmethod
def setUpClass(cls):
cls.test_project_id = os.environ.get(tests.PROJECT_ID_ENV)

def test_main(self):
with tests.capture_stdout() as stdout:
auth.main(self.test_project_id)
auth.main(self.project_id)
output = stdout.getvalue().strip()
self.assertRegexpMatches(
output, re.compile(r'Timeseries.list raw response:\s*'
Expand Down
2 changes: 1 addition & 1 deletion storage/api/compose_objects_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ class TestComposeObjects(CloudBaseTest):
def test_main(self):
args = [
'ignored_command_name',
self.constants['bucketName'],
self.bucket_name,
'dest.txt',
os.path.join(self.resource_path, 'file1.txt'),
os.path.join(self.resource_path, 'file2.txt'),
Expand Down
2 changes: 1 addition & 1 deletion storage/api/list_objects_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,6 @@ class TestListObjects(CloudBaseTest):
def test_main(self):
args = [
'ignored_command_name',
self.constants['bucketName']
self.bucket_name
]
main(args)
4 changes: 0 additions & 4 deletions tests/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,18 +14,14 @@

from .utils import (
AppEngineTestbedCase,
BUCKET_NAME_ENV,
capture_stdout,
CloudBaseTest,
PROJECT_ID_ENV,
RESOURCE_PATH)


__all__ = [
'AppEngineTestbedCase',
'BUCKET_NAME_ENV',
'capture_stdout',
'CloudBaseTest',
'PROJECT_ID_ENV',
'RESOURCE_PATH'
]
8 changes: 0 additions & 8 deletions tests/resources/constants.json

This file was deleted.

Loading

0 comments on commit 0078fd5

Please sign in to comment.