From 48fe2816fb8e4833eea8c118866c84c10e686d00 Mon Sep 17 00:00:00 2001 From: florimondmanca Date: Sat, 18 Jan 2020 09:46:54 +0100 Subject: [PATCH 1/5] Add .sleep() to backends --- httpx/backends/asyncio.py | 3 +++ httpx/backends/auto.py | 3 +++ httpx/backends/base.py | 3 +++ httpx/backends/trio.py | 3 +++ 4 files changed, 12 insertions(+) diff --git a/httpx/backends/asyncio.py b/httpx/backends/asyncio.py index 8d1025748b..7371be7908 100644 --- a/httpx/backends/asyncio.py +++ b/httpx/backends/asyncio.py @@ -225,6 +225,9 @@ async def open_uds_stream( return SocketStream(stream_reader=stream_reader, stream_writer=stream_writer) + async def sleep(self, seconds: float) -> None: + await asyncio.sleep(seconds) + def time(self) -> float: loop = asyncio.get_event_loop() return loop.time() diff --git a/httpx/backends/auto.py b/httpx/backends/auto.py index 7a8c597822..935a2804d7 100644 --- a/httpx/backends/auto.py +++ b/httpx/backends/auto.py @@ -41,6 +41,9 @@ async def open_uds_stream( ) -> BaseSocketStream: return await self.backend.open_uds_stream(path, hostname, ssl_context, timeout) + async def sleep(self, seconds: float) -> None: + await self.backend.sleep(seconds) + def time(self) -> float: return self.backend.time() diff --git a/httpx/backends/base.py b/httpx/backends/base.py index 964d09449f..0c01709328 100644 --- a/httpx/backends/base.py +++ b/httpx/backends/base.py @@ -111,6 +111,9 @@ async def open_uds_stream( ) -> BaseSocketStream: raise NotImplementedError() # pragma: no cover + async def sleep(self, seconds: float) -> None: + raise NotImplementedError() # pragma: no cover + def time(self) -> float: raise NotImplementedError() # pragma: no cover diff --git a/httpx/backends/trio.py b/httpx/backends/trio.py index 33e93e9677..e6bf208d63 100644 --- a/httpx/backends/trio.py +++ b/httpx/backends/trio.py @@ -131,6 +131,9 @@ async def open_uds_stream( raise ConnectTimeout() + async def sleep(self, seconds: float) -> None: + await trio.sleep(seconds) + def time(self) -> float: return trio.current_time() From 6b3ae742f23a8d5e6a2f022d7822c797bcb46978 Mon Sep 17 00:00:00 2001 From: florimondmanca Date: Sat, 18 Jan 2020 09:48:05 +0100 Subject: [PATCH 2/5] Add docs --- docs/advanced.md | 56 ++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 56 insertions(+) diff --git a/docs/advanced.md b/docs/advanced.md index 1d2e4ca0af..4a26a1c55e 100644 --- a/docs/advanced.md +++ b/docs/advanced.md @@ -466,3 +466,59 @@ If you do need to make HTTPS connections to a local server, for example to test >>> r Response <200 OK> ``` + +## Retries + +Communicating with a peer over a network is by essence subject to errors. HTTPX provides built-in retry functionality to increase the resilience to unexpected issues such as network faults or connection issues. + +The default behavior is to retry at most 3 times on connection and network errors before marking the request as failed and bubbling up any exceptions. The delay between retries is increased each time to prevent overloading the requested server. + +### Setting and disabling retries + +You can set retries for an individual request: + +```python +# Using the top-level API: +httpx.get('https://www.example.org', retries=5) + +# Using a client instance: +with httpx.Client() as client: + client.get("https://www.example.org", retries=5) +``` + +Or disable retries for an individual request: + +```python +# Using the top-level API: +httpx.get('https://www.example.org', retries=None) + +# Using a client instance: +with httpx.Client() as client: + client.get("https://www.example.org", retries=None) +``` + +### Setting default retries on the client + +You can set the retry behavior on a client instance, which results in the given behavior being used as the default for requests made with this client: + +```python +client = httpx.Client() # Default behavior: retry at most 3 times. +client = httpx.Client(retries=5) # Retry at most 5 times. +client = httpx.Client(retries=None) # Disable retries by default. +``` + +### Fine-tuning the retries configuration + +The `retries` argument also accepts an instance of `httpx.Retries()`, in case you need more fine-grained control over the retries behavior. It accepts the following parameters: + +- `limit`: the maximum number of retryable errors to retry on. +- `backoff_factor`: a number representing how fast to increase the retry delay. For example, a value of `0.2` (the default) corresponds to this sequence of delays: `(0s, 0.2s, 0.4s, 0.8s, 1.6s, ...)`. + +```python +import httpx + +# Retry at most 5 times, and space out retries further away +# in time than the default (0s, 1s, 2s, 4s, ...). +retries = httpx.Retries(limit=5, backoff_factor=1.0) +response = httpx.get('https://www.example.com', retries=retries) +``` From 24d45e0d1c4285bf99af6b17c31a18522abf5ad0 Mon Sep 17 00:00:00 2001 From: florimondmanca Date: Sat, 18 Jan 2020 12:32:28 +0100 Subject: [PATCH 3/5] Add retries --- httpx/__init__.py | 8 +- httpx/client.py | 68 +++++++++++++++- httpx/config.py | 70 +++++++++++++++- httpx/exceptions.py | 9 +++ httpx/retries.py | 191 ++++++++++++++++++++++++++++++++++++++++++++ 5 files changed, 341 insertions(+), 5 deletions(-) create mode 100644 httpx/retries.py diff --git a/httpx/__init__.py b/httpx/__init__.py index 4a133e8efd..9f2497c563 100644 --- a/httpx/__init__.py +++ b/httpx/__init__.py @@ -2,7 +2,7 @@ from .api import delete, get, head, options, patch, post, put, request, stream from .auth import Auth, BasicAuth, DigestAuth from .client import AsyncClient, Client -from .config import PoolLimits, Proxy, Timeout +from .config import PoolLimits, Proxy, Retries, Timeout from .dispatch.asgi import ASGIDispatch from .dispatch.wsgi import WSGIDispatch from .exceptions import ( @@ -25,9 +25,11 @@ StreamConsumed, TimeoutException, TooManyRedirects, + TooManyRetries, WriteTimeout, ) from .models import URL, Cookies, Headers, QueryParams, Request, Response +from .retries import RetryLimits, RetryOnConnectionFailures from .status_codes import StatusCode, codes __all__ = [ @@ -54,6 +56,10 @@ "PoolLimits", "Proxy", "Timeout", + "Retries", + "RetryLimits", + "RetryOnConnectionFailures", + "TooManyRetries", "ConnectTimeout", "CookieConflict", "ConnectionClosed", diff --git a/httpx/client.py b/httpx/client.py index 9eec6641bb..bec80df974 100644 --- a/httpx/client.py +++ b/httpx/client.py @@ -5,16 +5,19 @@ import hstspreload from .auth import Auth, AuthTypes, BasicAuth, FunctionAuth -from .backends.base import ConcurrencyBackend +from .backends.base import ConcurrencyBackend, lookup_backend from .config import ( DEFAULT_MAX_REDIRECTS, DEFAULT_POOL_LIMITS, + DEFAULT_RETRIES_CONFIG, DEFAULT_TIMEOUT_CONFIG, UNSET, CertTypes, PoolLimits, ProxiesTypes, Proxy, + Retries, + RetriesTypes, Timeout, TimeoutTypes, UnsetType, @@ -33,6 +36,7 @@ RedirectLoop, RequestBodyUnavailable, TooManyRedirects, + TooManyRetries, ) from .models import ( URL, @@ -64,6 +68,7 @@ def __init__( headers: HeaderTypes = None, cookies: CookieTypes = None, timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + retries: RetriesTypes = DEFAULT_RETRIES_CONFIG, max_redirects: int = DEFAULT_MAX_REDIRECTS, base_url: URLTypes = None, trust_env: bool = True, @@ -81,6 +86,7 @@ def __init__( self._headers = Headers(headers) self._cookies = Cookies(cookies) self.timeout = Timeout(timeout) + self.retries = Retries(retries) self.max_redirects = max_redirects self.trust_env = trust_env self.netrc = NetRCInfo() @@ -941,6 +947,7 @@ def __init__( http2: bool = False, proxies: ProxiesTypes = None, timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + retries: RetriesTypes = DEFAULT_RETRIES_CONFIG, pool_limits: PoolLimits = DEFAULT_POOL_LIMITS, max_redirects: int = DEFAULT_MAX_REDIRECTS, base_url: URLTypes = None, @@ -956,6 +963,7 @@ def __init__( headers=headers, cookies=cookies, timeout=timeout, + retries=retries, max_redirects=max_redirects, base_url=base_url, trust_env=trust_env, @@ -1106,10 +1114,16 @@ async def send( timeout = self.timeout if isinstance(timeout, UnsetType) else Timeout(timeout) + retries = self.retries + auth = self.build_auth(request, auth) - response = await self.send_handling_redirects( - request, auth=auth, timeout=timeout, allow_redirects=allow_redirects, + response = await self.send_handling_retries( + request, + auth=auth, + timeout=timeout, + retries=retries, + allow_redirects=allow_redirects, ) if not stream: @@ -1120,6 +1134,54 @@ async def send( return response + async def send_handling_retries( + self, + request: Request, + auth: Auth, + retries: Retries, + timeout: Timeout, + allow_redirects: bool = True, + ) -> Response: + backend = lookup_backend() + + delays = retries.get_delays() + retry_flow = retries.retry_flow(request) + + # Initialize the generators. + next(delays) + request = next(retry_flow) + + while True: + try: + response = await self.send_handling_redirects( + request, + auth=auth, + timeout=timeout, + allow_redirects=allow_redirects, + ) + except HTTPError as exc: + logger.debug(f"HTTP Request failed: {exc!r}") + try: + request = retry_flow.throw(type(exc), exc, exc.__traceback__) + except (TooManyRetries, HTTPError): + raise + else: + delay = next(delays) + logger.debug(f"Retrying in {delay} seconds") + await backend.sleep(delay) + else: + try: + request = retry_flow.send(response) + except TooManyRetries: + raise + except StopIteration: + return response + else: + delay = next(delays) + logger.debug(f"Retrying in {delay} seconds") + await backend.sleep(delay) + continue + async def send_handling_redirects( self, request: Request, diff --git a/httpx/config.py b/httpx/config.py index d31086493c..34ca5c6b46 100644 --- a/httpx/config.py +++ b/httpx/config.py @@ -1,3 +1,4 @@ +import itertools import os import ssl import typing @@ -5,7 +6,8 @@ import certifi -from .models import URL, Headers, HeaderTypes, URLTypes +from .models import URL, Headers, HeaderTypes, Request, Response, URLTypes +from .retries import DontRetry, RetryLimits, RetryOnConnectionFailures from .utils import get_ca_bundle_from_env, get_logger CertTypes = typing.Union[str, typing.Tuple[str, str], typing.Tuple[str, str, str]] @@ -16,6 +18,7 @@ ProxiesTypes = typing.Union[ URLTypes, "Proxy", typing.Dict[URLTypes, typing.Union[URLTypes, "Proxy"]] ] +RetriesTypes = typing.Union[int, "RetryLimits", "Retries"] DEFAULT_CIPHERS = ":".join( @@ -337,6 +340,71 @@ def __repr__(self) -> str: ) +class Retries: + """ + Retries configuration. + + Holds a retry limiting policy, and implements a configurable exponential + backoff algorithm. + + **Usage**: + + ```python + httpx.Retries() # Default: at most 3 retries on connection failures. + httpx.Retries(0) # Disable retries. + httpx.Retries(5) # At most 5 retries on connection failures. + httpx.Retries( # at most 3 retries on connection failures, with slower backoff. + 5, backoff_factor=1.0 + ) + # Custom retry limiting policy. + httpx.Retries(RetryOnSomeCondition(...)) + # At most 5 retries on connection failures, custom policy for other errors. + httpx.Retries(httpx.RetryOnConnectionFailures(5) | RetryOnSomeOtherCondition(...)) + ``` + """ + + def __init__( + self, + limits: RetriesTypes = 3, + *, + backoff_factor: typing.Union[float, UnsetType] = UNSET, + ) -> None: + if isinstance(limits, int): + limits = RetryOnConnectionFailures(limits) if limits > 0 else DontRetry() + elif isinstance(limits, Retries): + if isinstance(backoff_factor, UnsetType): + backoff_factor = limits.backoff_factor + limits = limits.limits + else: + assert isinstance(limits, RetryLimits) + + if isinstance(backoff_factor, UnsetType): + backoff_factor = 0.2 + + assert backoff_factor > 0 + self.limits: RetryLimits = limits + self.backoff_factor: float = backoff_factor + + def get_delays(self) -> typing.Iterator[float]: + """ + Used by clients to determine how long to wait before issuing a new request. + """ + yield 0 # Send the initial request. + yield 0 # Retry immediately. + for n in itertools.count(2): + yield self.backoff_factor * (2 ** (n - 2)) + + def retry_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + """ + Used by clients to determine what to do when failing to receive a response, + or when a response was received. + + Delegates to the retry limiting policy. + """ + yield from self.limits.retry_flow(request) + + DEFAULT_TIMEOUT_CONFIG = Timeout(timeout=5.0) +DEFAULT_RETRIES_CONFIG = Retries(3, backoff_factor=0.2) DEFAULT_POOL_LIMITS = PoolLimits(soft_limit=10, hard_limit=100) DEFAULT_MAX_REDIRECTS = 20 diff --git a/httpx/exceptions.py b/httpx/exceptions.py index 7efe6fb3c9..951636b0b7 100644 --- a/httpx/exceptions.py +++ b/httpx/exceptions.py @@ -113,6 +113,15 @@ class NotRedirectResponse(RedirectError): """ +# Retries... + + +class TooManyRetries(HTTPError): + """ + The maximum number of retries allowed for a request was exceeded. + """ + + # Stream exceptions... diff --git a/httpx/retries.py b/httpx/retries.py new file mode 100644 index 0000000000..23ad284798 --- /dev/null +++ b/httpx/retries.py @@ -0,0 +1,191 @@ +import typing + +from .exceptions import ( + ConnectTimeout, + HTTPError, + NetworkError, + PoolTimeout, + TooManyRetries, +) +from .models import Request, Response +from .utils import get_logger + +logger = get_logger(__name__) + + +class RetryLimits: + """ + Base class for retry limiting policies. + """ + + def retry_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + """ + Execute the retry flow. + + To dispatch a request, you should `yield` it, and prepare for the following + situations: + + * The request resulted in an `httpx.HTTPError`. If it should be retried on, + you should make any necessary modifications to the request, and continue + yielding. If you've exceeded the maximum number of retries, wrap the error + in `httpx.TooManyRetries()` and raise the result. If it shouldn't be retried + on, re-`raise` the error as-is. + * The request went through and resulted in the client sending back a `response`. + If it should be retried on (e.g. because it is an error response), you + should make any necessary modifications to the request, and continue yielding. + Otherwise, `return` to terminate the retry flow. + + Note that modifying the request may cause downstream mechanisms that rely + on request signing to fail. For example, this could be the case of + certain authentication schemes. + + A typical pseudo-code implementation based on a while-loop and try/except + blocks may look like this... + + ```python + while True: + try: + response = yield request + except httpx.HTTPError as exc: + if not has_retries_left(): + raise TooManyRetries(exc) + if should_retry_on_exception(exc): + increment_retries_left() + # (Optionally modify the request here.) + continue + else: + raise + else: + if should_retry_on_response(response): + # (Optionally modify the request here.) + continue + return + ``` + """ + raise NotImplementedError + + def __or__(self, other: typing.Any) -> "RetryLimits": + if not isinstance(other, RetryLimits): + raise NotImplementedError + return _OrRetries(self, other) + + +class _OrRetries(RetryLimits): + """ + Helper for composing retry limits. + """ + + def __init__(self, left: RetryLimits, right: RetryLimits) -> None: + self.left = left + self.right = right + + def retry_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + left_flow = self.left.retry_flow(request) + right_flow = self.right.retry_flow(request) + + request = next(left_flow) + request = next(right_flow) + + while True: + try: + response = yield request + except HTTPError as exc: + try: + request = left_flow.throw(type(exc), exc, exc.__traceback__) + except TooManyRetries: + raise + except HTTPError: + try: + request = right_flow.throw(type(exc), exc, exc.__traceback__) + except TooManyRetries: + raise + except HTTPError: + raise + else: + continue + else: + continue + else: + try: + request = left_flow.send(response) + except TooManyRetries: + raise + except StopIteration: + try: + request = right_flow.send(response) + except TooManyRetries: + raise + except StopIteration: + return + else: + continue + else: + continue + + +class DontRetry(RetryLimits): + def retry_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + # Send the initial request, and never retry. + # Don't raise a `TooManyRetries` exception because this should really be + # a no-op implementation. + yield request + + +class RetryOnConnectionFailures(RetryLimits): + """ + Retry when failing to establish a connection, or when a network + error occurred. + """ + + _RETRYABLE_EXCEPTIONS: typing.Sequence[typing.Type[HTTPError]] = ( + ConnectTimeout, + PoolTimeout, + NetworkError, + ) + _RETRYABLE_METHODS: typing.Container[str] = frozenset( + ("HEAD", "GET", "PUT", "DELETE", "OPTIONS", "TRACE") + ) + + def __init__(self, limit: int = 3) -> None: + assert limit >= 0 + self.limit = limit + + def _should_retry_on_exception(self, exc: HTTPError) -> bool: + for exc_cls in self._RETRYABLE_EXCEPTIONS: + if isinstance(exc, exc_cls): + break + else: + logger.debug(f"not_retryable exc_type={type(exc)}") + return False + + assert exc.request is not None + method = exc.request.method.upper() + if method not in self._RETRYABLE_METHODS: + logger.debug(f"not_retryable method={method!r}") + return False + + return True + + def retry_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + retries_left = self.limit + + while True: + try: + _ = yield request + except HTTPError as exc: + # Failed to get a response... + + if not retries_left: + raise TooManyRetries(exc, request=request) + + if self._should_retry_on_exception(exc): + retries_left -= 1 + continue + + # Raise the exception for other retry limits involved to handle, + # or for bubbling up to the client. + raise + else: + # We managed to get a response without connection/network + # failures, so we're done here. + return From 89d04cc1ed7258d9fc9afd7cd8726a84c35eb5f0 Mon Sep 17 00:00:00 2001 From: florimondmanca Date: Sat, 18 Jan 2020 13:48:26 +0100 Subject: [PATCH 4/5] Refine docs, interfaces and implementation --- docs/advanced.md | 87 ++++++++++++++++++++++++++++++++--------------- httpx/__init__.py | 3 +- httpx/config.py | 62 +++++++++++++++++++-------------- httpx/retries.py | 66 ++++++++++++++--------------------- 4 files changed, 122 insertions(+), 96 deletions(-) diff --git a/docs/advanced.md b/docs/advanced.md index 4a26a1c55e..343d05762c 100644 --- a/docs/advanced.md +++ b/docs/advanced.md @@ -475,50 +475,81 @@ The default behavior is to retry at most 3 times on connection and network error ### Setting and disabling retries -You can set retries for an individual request: +You can set the retry behavior on a client instance, which results in the given behavior being used for all requests made with this client: ```python -# Using the top-level API: -httpx.get('https://www.example.org', retries=5) - -# Using a client instance: -with httpx.Client() as client: - client.get("https://www.example.org", retries=5) +client = httpx.Client() # Retry at most 3 times on connection failures. +client = httpx.Client(retries=5) # Retry at most 5 times on connection failures. +client = httpx.Client(retries=0) # Disable retries. ``` -Or disable retries for an individual request: +### Fine-tuning the retries configuration -```python -# Using the top-level API: -httpx.get('https://www.example.org', retries=None) +When instantiating a client, the `retries` argument may be one of the following... -# Using a client instance: -with httpx.Client() as client: - client.get("https://www.example.org", retries=None) -``` +* An integer, representing the maximum number connection failures to retry on. Use `0` to disable retries entirely. -### Setting default retries on the client +```python +client = httpx.Client(retries=5) +``` -You can set the retry behavior on a client instance, which results in the given behavior being used as the default for requests made with this client: +* An `httpx.Retries()` instance. It accepts the number of connection failures to retry on as a positional argument. The `backoff_factor` keyword argument that specifies how fast the time to wait before issuing a retry request should be increased. By default this is `0.2`, which corresponds to issuing a new request after `(0s, 0.2s, 0.4s, 0.8s, ...)`. (Note that a lot of errors are immediately resolved after retrying, so HTTPX will always issue the initial retry right away.) ```python -client = httpx.Client() # Default behavior: retry at most 3 times. -client = httpx.Client(retries=5) # Retry at most 5 times. -client = httpx.Client(retries=None) # Disable retries by default. +# Retry at most 5 times on connection failures, +# and issue new requests after `(0s, 0.5s, 1s, 2s, 4s, ...)` +retries = httpx.Retries(5, backoff_factor=0.5) +client = httpx.Client(retries=retries) ``` -### Fine-tuning the retries configuration +### Advanced retries customization -The `retries` argument also accepts an instance of `httpx.Retries()`, in case you need more fine-grained control over the retries behavior. It accepts the following parameters: +The first argument to `httpx.Retries()` can also be a subclass of `httpx.RetryLimits`. This is useful if you want to replace or extend the default behavior of retrying on connection failures. -- `limit`: the maximum number of retryable errors to retry on. -- `backoff_factor`: a number representing how fast to increase the retry delay. For example, a value of `0.2` (the default) corresponds to this sequence of delays: `(0s, 0.2s, 0.4s, 0.8s, 1.6s, ...)`. +The `httpx.RetryLimits` subclass should implement the `.retry_flow()` method, `yield` any request to be made, and prepare for the following situations... + +* (A) The request resulted in an `httpx.HTTPError`. If it shouldn't be retried on, `raise` the error as-is. If it should be retried on, you should make any necessary modifications to the request, and continue yielding. If you've exceeded a maximum number of retries, wrap the error in `httpx.TooManyRetries()`, and raise the result. +* (B) The request went through and resulted in the client sending back a `response`. If it shouldn't be retried on, `return` to terminate the retry flow. If it should be retried on (e.g. because it is an error response), you should make any necessary modifications to the request, and continue yielding. If you've exceeded a maximum number of retries, wrap the response in `httpx.TooManyRetries()`, and raise the result. + +As an example, here's how you could implement a custom retry limiting policy that retries on certain status codes: ```python import httpx -# Retry at most 5 times, and space out retries further away -# in time than the default (0s, 1s, 2s, 4s, ...). -retries = httpx.Retries(limit=5, backoff_factor=1.0) -response = httpx.get('https://www.example.com', retries=retries) +class RetryOnStatusCodes(httpx.RetryLimits): + def __init__(self, limit, status_codes): + self.limit = limit + self.status_codes = status_codes + + def retry_flow(self, request): + retries_left = self.limit + + while True: + response = yield request + + if response.status_code not in self.status_codes: + return + + if retries_left == 0: + try: + response.raise_for_status() + except httpx.HTTPError as exc: + raise httpx.TooManyRetries(exc, response=response) + else: + raise httpx.TooManyRetries(response=response) + + retries_left -= 1 +``` + +To use a custom policy: + +* Explicitly pass the number of times to retry on connection failures as a first positional argument to `httpx.Retries()`. (Use `0` to not retry on these failures.) +* Pass the custom policy as a second positional argument. + +For example... + +```python +# Retry at most 3 times on connection failures, and at most three times +# on '429 Too Many Requests', '502 Bad Gateway', or '503 Service Unavailable'. +retries = httpx.Retries(3, RetryOnStatusCodes(3, status_codes={429, 502, 503})) ``` diff --git a/httpx/__init__.py b/httpx/__init__.py index 9f2497c563..69d57cb6d9 100644 --- a/httpx/__init__.py +++ b/httpx/__init__.py @@ -29,7 +29,7 @@ WriteTimeout, ) from .models import URL, Cookies, Headers, QueryParams, Request, Response -from .retries import RetryLimits, RetryOnConnectionFailures +from .retries import RetryLimits from .status_codes import StatusCode, codes __all__ = [ @@ -58,7 +58,6 @@ "Timeout", "Retries", "RetryLimits", - "RetryOnConnectionFailures", "TooManyRetries", "ConnectTimeout", "CookieConflict", diff --git a/httpx/config.py b/httpx/config.py index 34ca5c6b46..06b5dcee7e 100644 --- a/httpx/config.py +++ b/httpx/config.py @@ -346,45 +346,57 @@ class Retries: Holds a retry limiting policy, and implements a configurable exponential backoff algorithm. - - **Usage**: - - ```python - httpx.Retries() # Default: at most 3 retries on connection failures. - httpx.Retries(0) # Disable retries. - httpx.Retries(5) # At most 5 retries on connection failures. - httpx.Retries( # at most 3 retries on connection failures, with slower backoff. - 5, backoff_factor=1.0 - ) - # Custom retry limiting policy. - httpx.Retries(RetryOnSomeCondition(...)) - # At most 5 retries on connection failures, custom policy for other errors. - httpx.Retries(httpx.RetryOnConnectionFailures(5) | RetryOnSomeOtherCondition(...)) - ``` """ def __init__( self, - limits: RetriesTypes = 3, - *, - backoff_factor: typing.Union[float, UnsetType] = UNSET, + *retries: RetriesTypes, + backoff_factor: float = None, ) -> None: - if isinstance(limits, int): - limits = RetryOnConnectionFailures(limits) if limits > 0 else DontRetry() - elif isinstance(limits, Retries): - if isinstance(backoff_factor, UnsetType): + limits: RetriesTypes + + if len(retries) == 0: + limits = RetryOnConnectionFailures(3) + elif len(retries) == 1: + limits = retries[0] + if isinstance(limits, int): + limits = ( + RetryOnConnectionFailures(limits) if limits > 0 else DontRetry() + ) + elif isinstance(limits, Retries): + assert backoff_factor is None backoff_factor = limits.backoff_factor - limits = limits.limits + limits = limits.limits + else: + raise NotImplementedError( + "Passing a `RetryLimits` subclass as a single argument " + "is not supported. You must explicitly pass the number of times " + "to retry on connection failures. " + "For example: `Retries(3, MyRetryLimits(...))`." + ) + elif len(retries) == 2: + default, custom = retries + assert isinstance(custom, RetryLimits) + limits = Retries(default).limits | custom else: - assert isinstance(limits, RetryLimits) + raise NotImplementedError( + "Composing more than 2 retry limits is not supported yet." + ) - if isinstance(backoff_factor, UnsetType): + if backoff_factor is None: backoff_factor = 0.2 assert backoff_factor > 0 self.limits: RetryLimits = limits self.backoff_factor: float = backoff_factor + def __eq__(self, other: typing.Any) -> bool: + return ( + isinstance(other, Retries) + and self.limits == other.limits + and self.backoff_factor == other.backoff_factor + ) + def get_delays(self) -> typing.Iterator[float]: """ Used by clients to determine how long to wait before issuing a new request. diff --git a/httpx/retries.py b/httpx/retries.py index 23ad284798..07f0e02c45 100644 --- a/httpx/retries.py +++ b/httpx/retries.py @@ -22,51 +22,20 @@ def retry_flow(self, request: Request) -> typing.Generator[Request, Response, No """ Execute the retry flow. - To dispatch a request, you should `yield` it, and prepare for the following - situations: - - * The request resulted in an `httpx.HTTPError`. If it should be retried on, - you should make any necessary modifications to the request, and continue - yielding. If you've exceeded the maximum number of retries, wrap the error - in `httpx.TooManyRetries()` and raise the result. If it shouldn't be retried - on, re-`raise` the error as-is. - * The request went through and resulted in the client sending back a `response`. - If it should be retried on (e.g. because it is an error response), you - should make any necessary modifications to the request, and continue yielding. - Otherwise, `return` to terminate the retry flow. - - Note that modifying the request may cause downstream mechanisms that rely - on request signing to fail. For example, this could be the case of - certain authentication schemes. - - A typical pseudo-code implementation based on a while-loop and try/except - blocks may look like this... - - ```python - while True: - try: - response = yield request - except httpx.HTTPError as exc: - if not has_retries_left(): - raise TooManyRetries(exc) - if should_retry_on_exception(exc): - increment_retries_left() - # (Optionally modify the request here.) - continue - else: - raise - else: - if should_retry_on_response(response): - # (Optionally modify the request here.) - continue - return - ``` + To dispatch a request, you should `yield` it, and prepare for either + getting a response, or an `HTTPError` being raised. + + In each case, decide whether to retry: + + * If so, continue yielding, unless a maximum number of retries was exceeded. + In that case, raise a `TooManyRetries` exception. + * Otherwise, `return`, or `raise` the exception. """ - raise NotImplementedError + raise NotImplementedError # pragma: no cover def __or__(self, other: typing.Any) -> "RetryLimits": if not isinstance(other, RetryLimits): - raise NotImplementedError + raise NotImplementedError # pragma: no cover return _OrRetries(self, other) @@ -79,6 +48,13 @@ def __init__(self, left: RetryLimits, right: RetryLimits) -> None: self.left = left self.right = right + def __eq__(self, other: typing.Any) -> bool: + return ( + isinstance(other, _OrRetries) + and self.left == other.left + and self.right == other.right + ) + def retry_flow(self, request: Request) -> typing.Generator[Request, Response, None]: left_flow = self.left.retry_flow(request) right_flow = self.right.retry_flow(request) @@ -124,6 +100,9 @@ def retry_flow(self, request: Request) -> typing.Generator[Request, Response, No class DontRetry(RetryLimits): + def __eq__(self, other: typing.Any) -> bool: + return type(other) == DontRetry + def retry_flow(self, request: Request) -> typing.Generator[Request, Response, None]: # Send the initial request, and never retry. # Don't raise a `TooManyRetries` exception because this should really be @@ -150,6 +129,11 @@ def __init__(self, limit: int = 3) -> None: assert limit >= 0 self.limit = limit + def __eq__(self, other: typing.Any) -> bool: + return ( + isinstance(other, RetryOnConnectionFailures) and self.limit == other.limit + ) + def _should_retry_on_exception(self, exc: HTTPError) -> bool: for exc_cls in self._RETRYABLE_EXCEPTIONS: if isinstance(exc, exc_cls): From 9d48d7044a11038414338ee6b97a8c2a4133adf6 Mon Sep 17 00:00:00 2001 From: florimondmanca Date: Sat, 18 Jan 2020 14:06:47 +0100 Subject: [PATCH 5/5] Lint, fix tests --- httpx/config.py | 6 +----- tests/test_timeouts.py | 6 ++++-- 2 files changed, 5 insertions(+), 7 deletions(-) diff --git a/httpx/config.py b/httpx/config.py index 06b5dcee7e..5efe122956 100644 --- a/httpx/config.py +++ b/httpx/config.py @@ -348,11 +348,7 @@ class Retries: backoff algorithm. """ - def __init__( - self, - *retries: RetriesTypes, - backoff_factor: float = None, - ) -> None: + def __init__(self, *retries: RetriesTypes, backoff_factor: float = None) -> None: limits: RetriesTypes if len(retries) == 0: diff --git a/tests/test_timeouts.py b/tests/test_timeouts.py index e394e0e301..8754449231 100644 --- a/tests/test_timeouts.py +++ b/tests/test_timeouts.py @@ -26,7 +26,7 @@ async def test_write_timeout(server): async def test_connect_timeout(server): timeout = httpx.Timeout(connect_timeout=1e-6) - async with httpx.AsyncClient(timeout=timeout) as client: + async with httpx.AsyncClient(timeout=timeout, retries=0) as client: with pytest.raises(httpx.ConnectTimeout): # See https://stackoverflow.com/questions/100841/ await client.get("http://10.255.255.1/") @@ -37,7 +37,9 @@ async def test_pool_timeout(server): pool_limits = httpx.PoolLimits(hard_limit=1) timeout = httpx.Timeout(pool_timeout=1e-4) - async with httpx.AsyncClient(pool_limits=pool_limits, timeout=timeout) as client: + async with httpx.AsyncClient( + pool_limits=pool_limits, timeout=timeout, retries=0 + ) as client: async with client.stream("GET", server.url): with pytest.raises(httpx.PoolTimeout): await client.get("http://localhost:8000/")