Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 24 additions & 3 deletions pygerduty/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,15 +5,19 @@
from .exceptions import Error, IntegrationAPIError, BadRequest, NotFound
from six import string_types
from six.moves import urllib
from random import randint
from time import sleep

ISO8601_FORMAT = "%Y-%m-%dT%H:%M:%SZ"


class Requester(object):
def __init__(self, timeout=10, proxies=None, parse_datetime=False):
def __init__(self, timeout=10, proxies=None, parse_datetime=False, min_backoff=15.0, max_retries=5):
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Previously the timeout was 10s with no retries; with the retries + backoff it'll be a minimum 75s before the request will give up by default.

I'd prefer to lower the default values (maybe min_backoff=5 and retries=3), and we should provide a way for users to plumb through their own values, here: https://github.com/dropbox/pygerduty/blob/master/pygerduty/v2.py#L611

self.timeout = timeout
self.json_loader = json.loads
self.min_backoff = min_backoff
self.max_retries = max_retries

self.json_loader = json.loads
if parse_datetime:
self.json_loader = _json_loader

Expand All @@ -22,7 +26,11 @@ def __init__(self, timeout=10, proxies=None, parse_datetime=False):
handlers.append(urllib.request.ProxyHandler(proxies))
self.opener = urllib.request.build_opener(*handlers)

def execute_request(self, request):
def execute_request(self, request, retry_count=0):
if retry_count > 0:
exponential_backoff = self.min_backoff * (2 ** retry_count)
randomness = randint(0, 1000) / 1000.0
sleep(exponential_backoff + randomness)

try:
response = (self.opener.open(request, timeout=self.timeout).
Expand All @@ -38,7 +46,20 @@ def execute_request(self, request):
raise NotFound("URL ({0}) Not Found.".format(
request.get_full_url()))
elif err.code == 429:
if retry_count < self.max_retries:
return self.execute_request(request, retry_count + 1)
else:
raise
elif err.code / 100 == 5:
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Probably want integer division here, err.code // 100

if retry_count < self.max_retries:
return self.execute_request(request, retry_count + 1)
else:
raise
else:
raise
except urllib.error.URLError:
if retry_count < self.max_retries:
return self.execute_request(request, retry_count + 1)
else:
raise

Expand Down
7 changes: 6 additions & 1 deletion pygerduty/v2.py
Original file line number Diff line number Diff line change
Expand Up @@ -175,7 +175,12 @@ def list(self, **kwargs):
break

def count(self, **kwargs):
path = "{0}/count".format(self.name)
path = "{0}".format(self.name)
kwargs["total"] = "true"
if self.base_container:
path = "{0}/{1}/{2}".format(
self.base_container.collection.name,
self.base_container.id, self.name)
response = self.pagerduty.request("GET", path, query_params=kwargs)
return response.get("total", None)

Expand Down
Loading