Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Playing with retries. #2040

Merged
merged 6 commits into from
Aug 2, 2016
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 20 additions & 2 deletions system_tests/bigquery.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,9 @@
from gcloud import _helpers
from gcloud.environment_vars import TESTS_PROJECT
from gcloud import bigquery
from gcloud.exceptions import Forbidden

from retry import Retry
from system_test_utils import unique_resource_id


Expand Down Expand Up @@ -90,7 +92,15 @@ def test_update_dataset(self):
after = [grant for grant in dataset.access_grants
if grant.entity_id != 'projectWriters']
dataset.access_grants = after
dataset.update()

# We need to wait to stay within the rate limits.
# The alternative outcome is a 403 Forbidden response from upstream.
# See: https://cloud.google.com/bigquery/quota-policy
@Retry(Forbidden, tries=2, delay=30)
def update_dataset():
dataset.update()

This comment was marked as spam.


update_dataset()
self.assertEqual(len(dataset.access_grants), len(after))
for found, expected in zip(dataset.access_grants, after):
self.assertEqual(found.role, expected.role)
Expand Down Expand Up @@ -188,7 +198,15 @@ def test_patch_table(self):
def test_update_table(self):
dataset = Config.CLIENT.dataset(DATASET_NAME)
self.assertFalse(dataset.exists())
dataset.create()

# We need to wait to stay within the rate limits.
# The alternative outcome is a 403 Forbidden response from upstream.
# See: https://cloud.google.com/bigquery/quota-policy
@Retry(Forbidden, tries=2, delay=30)
def create_dataset():
dataset.create()

create_dataset()

This comment was marked as spam.

self.to_delete.append(dataset)
TABLE_NAME = 'test_table'
full_name = bigquery.SchemaField('full_name', 'STRING',
Expand Down
55 changes: 55 additions & 0 deletions system_tests/retry.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
import time
from functools import wraps

import six


class Retry(object):
"""Retry class for retrying eventually consistent resources in testing."""

def __init__(self, exception, tries=4, delay=3, backoff=2, logger=None):
"""Retry calling the decorated function using an exponential backoff.

:type exception: Exception or tuple of Exceptions
:param exception: The exception to check or may be a tuple of
exceptions to check.

:type tries: int
:param tries: Number of times to try (not retry) before giving up.

:type delay: int
:param delay: Initial delay between retries in seconds.

:type backoff: int
:param backoff: Backoff multiplier e.g. value of 2 will double the
delay each retry.

:type logger: logging.Logger instance
:param logger: Logger to use. If None, print.
"""

self.exception = exception
self.tries = tries
self.delay = delay
self.backoff = backoff
self.logger = logger.warning if logger else six.print_

def __call__(self, to_wrap):
@wraps(to_wrap)
def wrapped_function(*args, **kwargs):
tries_counter = self.tries
delay = self.delay
while tries_counter > 0:
try:
return to_wrap(*args, **kwargs)
except self.exception as caught_exception:
msg = ("%s, Trying again in %d seconds..." %
(str(caught_exception), delay))
self.logger(msg)

time.sleep(delay)
tries_counter -= 1
delay *= self.backoff
return to_wrap(*args, **kwargs)

return wrapped_function