Skip to content

Commit

Permalink
Retry entire test_paginate_files() storage system test.
Browse files Browse the repository at this point in the history
This is not just "taking the easy way out", it's really
the most appropriate fix since there are so many
assertions that can fail due to eventual consistency. (For
example, asking for 2 blobs should have a next page when 3
are in the bucket, but this may break down due to eventual
consistency.)

Fixes googleapis#2217.

Also

- restoring test_second_level() to pre-googleapis#2252 (by retrying
  the entire test case)
- restoring test_list_files() to pre-googleapis#2181 (by retrying
  the entire test case)
- adding retries around remaining test cases that call
  list_blobs(): test_root_level_w_delimiter(),
  test_first_level() and test_third_level()
- adding a helper to empty a bucket in setUpClass() helper
  (which also uses list_blobs()) in both TestStoragePseudoHierarchy
  and TestStorageListFiles
  • Loading branch information
dhermes committed Sep 9, 2016
1 parent 49b3f1b commit da6b9eb
Showing 1 changed file with 28 additions and 29 deletions.
57 changes: 28 additions & 29 deletions system_tests/storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@

from system_test_utils import unique_resource_id
from retry import RetryErrors
from retry import RetryResult


HTTP = httplib2.Http()
Expand All @@ -44,6 +43,19 @@ def _bad_copy(bad_request):
error_predicate=_bad_copy)


def _empty_bucket(bucket):
"""Empty a bucket of all existing blobs.
This accounts (partially) for the eventual consistency of the
list blobs API call.
"""
for blob in bucket.list_blobs():
try:
blob.delete()
except exceptions.NotFound: # eventual consistency
pass


class Config(object):
"""Run-time configuration to be modified at set-up.
Expand Down Expand Up @@ -216,8 +228,7 @@ class TestStorageListFiles(TestStorageFiles):
def setUpClass(cls):
super(TestStorageListFiles, cls).setUpClass()
# Make sure bucket empty before beginning.
for blob in cls.bucket.list_blobs():
blob.delete()
_empty_bucket(cls.bucket)

logo_path = cls.FILES['logo']['path']
blob = storage.Blob(cls.FILENAMES[0], bucket=cls.bucket)
Expand All @@ -235,18 +246,13 @@ def tearDownClass(cls):
for blob in cls.suite_blobs_to_delete:
blob.delete()

@RetryErrors(unittest.TestCase.failureException)
def test_list_files(self):
def _all_in_list(blobs):
return len(blobs) == len(self.FILENAMES)

def _all_blobs():
return list(self.bucket.list_blobs())

retry = RetryResult(_all_in_list)
all_blobs = retry(_all_blobs)()
all_blobs = list(self.bucket.list_blobs())
self.assertEqual(sorted(blob.name for blob in all_blobs),
sorted(self.FILENAMES))

@RetryErrors(unittest.TestCase.failureException)
def test_paginate_files(self):
truncation_size = 1
count = len(self.FILENAMES) - truncation_size
Expand Down Expand Up @@ -277,11 +283,7 @@ class TestStoragePseudoHierarchy(TestStorageFiles):
def setUpClass(cls):
super(TestStoragePseudoHierarchy, cls).setUpClass()
# Make sure bucket empty before beginning.
for blob in cls.bucket.list_blobs():
try:
blob.delete()
except exceptions.NotFound: # eventual consistency
pass
_empty_bucket(cls.bucket)

simple_path = cls.FILES['simple']['path']
blob = storage.Blob(cls.FILENAMES[0], bucket=cls.bucket)
Expand All @@ -297,6 +299,7 @@ def tearDownClass(cls):
for blob in cls.suite_blobs_to_delete:
blob.delete()

@RetryErrors(unittest.TestCase.failureException)
def test_root_level_w_delimiter(self):
iterator = self.bucket.list_blobs(delimiter='/')
response = iterator.get_next_page_response()
Expand All @@ -306,6 +309,7 @@ def test_root_level_w_delimiter(self):
self.assertTrue(iterator.next_page_token is None)
self.assertEqual(iterator.prefixes, set(['parent/']))

@RetryErrors(unittest.TestCase.failureException)
def test_first_level(self):
iterator = self.bucket.list_blobs(delimiter='/', prefix='parent/')
response = iterator.get_next_page_response()
Expand All @@ -315,30 +319,25 @@ def test_first_level(self):
self.assertTrue(iterator.next_page_token is None)
self.assertEqual(iterator.prefixes, set(['parent/child/']))

@RetryErrors(unittest.TestCase.failureException)
def test_second_level(self):
expected_names = [
'parent/child/file21.txt',
'parent/child/file22.txt',
]

def _all_in_list(pair):
_, blobs = pair
return [blob.name for blob in blobs] == expected_names

def _all_blobs():
iterator = self.bucket.list_blobs(delimiter='/',
prefix='parent/child/')
response = iterator.get_next_page_response()
blobs = list(iterator.get_items_from_response(response))
return iterator, blobs

retry = RetryResult(_all_in_list)
iterator, _ = retry(_all_blobs)()
iterator = self.bucket.list_blobs(delimiter='/',
prefix='parent/child/')
response = iterator.get_next_page_response()
blobs = list(iterator.get_items_from_response(response))
self.assertEqual([blob.name for blob in blobs],
expected_names)
self.assertEqual(iterator.page_number, 1)
self.assertTrue(iterator.next_page_token is None)
self.assertEqual(iterator.prefixes,
set(['parent/child/grand/', 'parent/child/other/']))

@RetryErrors(unittest.TestCase.failureException)
def test_third_level(self):
# Pseudo-hierarchy can be arbitrarily deep, subject to the limit
# of 1024 characters in the UTF-8 encoded name:
Expand Down

0 comments on commit da6b9eb

Please sign in to comment.