Skip to content

Commit

Permalink
Make the maximum retries a provider setting
Browse files Browse the repository at this point in the history
During local testing, all 429 failures succeeded upon first retry.
The issue turned out to be "namespace lock contention" instead of
"two many requests". It is reasonable to set the default maximum
value to 2 retires per upload.
  • Loading branch information
cslzchen committed Oct 1, 2018
1 parent 6f52bcf commit 550c649
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 1 deletion.
3 changes: 2 additions & 1 deletion waterbutler/providers/dropbox/provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,7 @@ class DropboxProvider(provider.BaseProvider):
BASE_URL = pd_settings.BASE_URL
CONTIGUOUS_UPLOAD_SIZE_LIMIT = pd_settings.CONTIGUOUS_UPLOAD_SIZE_LIMIT
CHUNK_SIZE = pd_settings.CHUNK_SIZE
MAX_429_RETRIES = pd_settings.MAX_429_RETRIES

def __init__(self, auth, credentials, settings):
super().__init__(auth, credentials, settings)
Expand Down Expand Up @@ -311,7 +312,7 @@ async def _contiguous_upload(self,
chunk = await stream.read()

rate_limit_retry = 0
while rate_limit_retry < 2:
while rate_limit_retry < self.MAX_429_RETRIES:
file_stream = streams.FileStreamReader(file_cache)
resp = await self.make_request(
'POST',
Expand Down
2 changes: 2 additions & 0 deletions waterbutler/providers/dropbox/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,3 +10,5 @@
CONTIGUOUS_UPLOAD_SIZE_LIMIT = int(config.get('CONTIGUOUS_UPLOAD_SIZE_LIMIT', 150000000)) # 150 MB

CHUNK_SIZE = int(config.get('CHUNK_SIZE', 4000000)) # 4 MB

MAX_429_RETRIES = int(config.get('MAX_429_RETRIES', 2))

0 comments on commit 550c649

Please sign in to comment.