Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add better support for throttling exceptions #11

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 16 additions & 0 deletions opnieuw/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,25 @@
#
# Licensed under the 3-clause BSD license, see the LICENSE file in the repository root.

from typing import Union


class RetryException(Exception):
"""
Defines a custom RetryException that can be raised for specific errors we
want to retry on.
"""


class BackoffAndRetryException(Exception):
"""
A custom exception that can be raised for specific errors when a fixed
wait time before retrying is in order. This can be particularly useful
when requests are throttled and the response includes a Retry-After header.

This will reset the counters for `max_calls_total`
and `retry_window_after_first_call_in_seconds`
"""

def __init__(self, seconds: Union[int, float]):
self.seconds = seconds
67 changes: 55 additions & 12 deletions opnieuw/retries.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,8 @@
)

from .clock import Clock, MonotonicClock
from .exceptions import BackoffAndRetryException


logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -162,6 +164,10 @@ def retry(
- `namespace` - A name with which the wait behavior can be controlled
using the `opnieuw.test_util.retry_immediately` contextmanager.

Note: if the decorated function raises a `BackoffAndRetryException`, the
counters for `max_calls_total` and `retry_window_after_first_call_in_seconds`
will reset.

This function will:

- Calculate how to fit `max_calls_total` executions of function in the
Expand Down Expand Up @@ -202,24 +208,43 @@ def foo() -> None:
Opnieuw is based on a retry algorithm off of:
https://aws.amazon.com/blogs/architecture/exponential-backoff-and-jitter/
"""

def get_state_generator():
""" Create a generator object that produces DoCall and DoWait objects. """
return iter(
__retry_state_namespaces[namespace](
MonotonicClock(),
max_calls_total=max_calls_total,
retry_window_after_first_call_in_seconds=retry_window_after_first_call_in_seconds,
)
)

def decorator(f: F) -> F:
@functools.wraps(f)
def wrapper(*args: Any, **kwargs: Any) -> Any:

last_exception = None

retry_state = __retry_state_namespaces[namespace](
MonotonicClock(),
max_calls_total=max_calls_total,
retry_window_after_first_call_in_seconds=retry_window_after_first_call_in_seconds,
)
retry_state = get_state_generator()

for retry_action in retry_state:
while True:
try:
retry_action = next(retry_state)
except StopIteration:
break

if isinstance(retry_action, DoCall):
try:
return f(*args, **kwargs)

except BackoffAndRetryException as e:
last_exception = e
logger.debug(
f"Encountered fixed backoff, sleeping for {e.seconds}"
)
time.sleep(e.seconds)
retry_state = get_state_generator()

except retry_on_exceptions as e:
last_exception = e

Expand Down Expand Up @@ -267,24 +292,42 @@ def retry_async(
retry_window_after_first_call_in_seconds: int = 60,
namespace: Optional[str] = None,
) -> Callable[[AF], AF]:
def get_state_generator():
""" Create a generator object that produces DoCall and DoWait objects. """
return iter(
__retry_state_namespaces[namespace](
MonotonicClock(),
max_calls_total=max_calls_total,
retry_window_after_first_call_in_seconds=retry_window_after_first_call_in_seconds,
)
)

def decorator(f: AF) -> AF:
@functools.wraps(f)
async def wrapper(*args: Any, **kwargs: Any) -> Any:

last_exception = None

retry_state = __retry_state_namespaces[namespace](
MonotonicClock(),
max_calls_total=max_calls_total,
retry_window_after_first_call_in_seconds=retry_window_after_first_call_in_seconds,
)
retry_state = get_state_generator()

for retry_action in retry_state:
while True:
try:
retry_action = next(retry_state)
except StopIteration:
break

if isinstance(retry_action, DoCall):
try:
return await f(*args, **kwargs)

except BackoffAndRetryException as e:
last_exception = e
logger.debug(
f"Encountered fixed backoff, sleeping for {e.seconds}"
)
await asyncio.sleep(e.seconds)
retry_state = get_state_generator()

except retry_on_exceptions as e:
last_exception = e

Expand Down
31 changes: 31 additions & 0 deletions tests/test_opnieuw.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,11 @@
from opnieuw.clock import TestClock, MonotonicClock
from opnieuw.retries import RetryState, DoCall, retry
from opnieuw.test_util import retry_immediately
from opnieuw.exceptions import BackoffAndRetryException


class CustomBackoffThrottleError(BackoffAndRetryException):
pass


class TestRetryState(unittest.TestCase):
Expand Down Expand Up @@ -110,6 +115,32 @@ def test_mixed_states(self) -> None:
with retry_immediately("bar_retry"):
self.test_retry_with_waits()

@retry(
retry_on_exceptions=ValueError,
max_calls_total=4,
retry_window_after_first_call_in_seconds=10,
)
def namespaced_retry_fixed_backoff(self) -> None:
self.counter += 1
if self.counter == 4:
raise CustomBackoffThrottleError(seconds=1)
raise ValueError

def test_fixed_backoff_reset_max_calls(self):
self.counter = 0
start = time.monotonic()

with retry_immediately():
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Hmm, so this does mean that backoff throttle waits are not included in the retry_immediately. I would expect them to be skipped as well. This test would still be valuable because it tests that we can make twice the number of calls, due to the counter reset.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yeah, that is a little unexpected 👍

self.assertRaises(ValueError, self.namespaced_retry_fixed_backoff)

# expect to sleep at least 1 second for the BackoffAndRetry
end = time.monotonic()
runtime_seconds = end - start
self.assertGreater(runtime_seconds, 1)

# we exhaust max_calls_total twice
assert self.counter == 8


if __name__ == "__main__":
unittest.main()