From 550c649f14a817294a8bb755e79e31edae48aef1 Mon Sep 17 00:00:00 2001 From: Longze Chen Date: Tue, 11 Sep 2018 23:24:21 -0400 Subject: [PATCH] Make the maximum retries a provider setting During local testing, all 429 failures succeeded upon first retry. The issue turned out to be "namespace lock contention" instead of "two many requests". It is reasonable to set the default maximum value to 2 retires per upload. --- waterbutler/providers/dropbox/provider.py | 3 ++- waterbutler/providers/dropbox/settings.py | 2 ++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/waterbutler/providers/dropbox/provider.py b/waterbutler/providers/dropbox/provider.py index c841c8caf..dcaa19633 100644 --- a/waterbutler/providers/dropbox/provider.py +++ b/waterbutler/providers/dropbox/provider.py @@ -61,6 +61,7 @@ class DropboxProvider(provider.BaseProvider): BASE_URL = pd_settings.BASE_URL CONTIGUOUS_UPLOAD_SIZE_LIMIT = pd_settings.CONTIGUOUS_UPLOAD_SIZE_LIMIT CHUNK_SIZE = pd_settings.CHUNK_SIZE + MAX_429_RETRIES = pd_settings.MAX_429_RETRIES def __init__(self, auth, credentials, settings): super().__init__(auth, credentials, settings) @@ -311,7 +312,7 @@ async def _contiguous_upload(self, chunk = await stream.read() rate_limit_retry = 0 - while rate_limit_retry < 2: + while rate_limit_retry < self.MAX_429_RETRIES: file_stream = streams.FileStreamReader(file_cache) resp = await self.make_request( 'POST', diff --git a/waterbutler/providers/dropbox/settings.py b/waterbutler/providers/dropbox/settings.py index 4978a170e..192dacab6 100644 --- a/waterbutler/providers/dropbox/settings.py +++ b/waterbutler/providers/dropbox/settings.py @@ -10,3 +10,5 @@ CONTIGUOUS_UPLOAD_SIZE_LIMIT = int(config.get('CONTIGUOUS_UPLOAD_SIZE_LIMIT', 150000000)) # 150 MB CHUNK_SIZE = int(config.get('CHUNK_SIZE', 4000000)) # 4 MB + +MAX_429_RETRIES = int(config.get('MAX_429_RETRIES', 2))