[PATCH] D38281: [llvmlab] Add timeout and retries for fetching builds.
Volodymyr Sapsai via Phabricator via llvm-commits
llvm-commits at lists.llvm.org
Fri Sep 29 10:26:37 PDT 2017
This revision was automatically updated to reflect the committed changes.
Closed by commit rL314540: [llvmlab] Add timeout and retries for fetching builds. (authored by vsapsai).
Repository:
rL LLVM
https://reviews.llvm.org/D38281
Files:
zorg/trunk/llvmbisect/llvmlab/gcs.py
Index: zorg/trunk/llvmbisect/llvmlab/gcs.py
===================================================================
--- zorg/trunk/llvmbisect/llvmlab/gcs.py
+++ zorg/trunk/llvmbisect/llvmlab/gcs.py
@@ -3,22 +3,38 @@
"""
import os
import requests
+import urllib3
# Root URL to use for our queries.
GCS = "https://www.googleapis.com/storage/v1/"
DEFAULT_BUCKET = "llvm-build-artifacts"
BUCKET = os.getenv("BUCKET", DEFAULT_BUCKET)
+class HttpClient(object):
+ def __init__(self):
+ self.session = requests.Session()
+ # Retry after 0s, 0.2s, 0.4s.
+ retry = urllib3.util.retry.Retry(total=3, backoff_factor=0.1)
+ adapter = requests.adapters.HTTPAdapter(max_retries=retry)
+ self.session.mount('https://', adapter)
+
+ def get(self, url, **kwargs):
+ if "timeout" not in kwargs:
+ kwargs["timeout"] = 5 # seconds
+ return self.session.get(url, **kwargs)
+
+HTTP_CLIENT = HttpClient()
+
def fetch_builders():
"""Each build kind is stored as a folder in the GCS bucket.
List all the folders in the bucket, which is our list of possible
compilers.
"""
params = {'delimiter': "/", 'fields': "prefixes,nextPageToken"}
- r = requests.get(GCS + "b/" + BUCKET + "/o", params=params)
+ r = HTTP_CLIENT.get(GCS + "b/" + BUCKET + "/o", params=params)
r.raise_for_status()
reply_data = r.json()
assert "nextPageToken" not in reply_data.keys(), "Too many builders!"
@@ -36,13 +52,13 @@
params = {'delimiter': "/",
"fields": "nextPageToken,kind,items(name, mediaLink)",
'prefix': project + "/"}
- r = requests.get(GCS + "b/" + BUCKET + "/o", params=params)
+ r = HTTP_CLIENT.get(GCS + "b/" + BUCKET + "/o", params=params)
r.raise_for_status()
reply_data = r.json()
all_data['items'].extend(reply_data['items'])
while reply_data.get('nextPageToken'):
params['pageToken'] = reply_data['nextPageToken']
- r = requests.get(GCS + "b/" + BUCKET + "/o", params=params)
+ r = HTTP_CLIENT.get(GCS + "b/" + BUCKET + "/o", params=params)
r.raise_for_status()
reply_data = r.json()
all_data['items'].extend(reply_data['items'])
@@ -54,7 +70,7 @@
def get_compiler(url, filename):
"""Get the compiler at the url, and save to filename."""
- r = requests.get(url)
+ r = HTTP_CLIENT.get(url)
r.raise_for_status()
with open(filename, 'wb') as fd:
for chunk in r.iter_content(CHUNK_SIZE):
-------------- next part --------------
A non-text attachment was scrubbed...
Name: D38281.117179.patch
Type: text/x-patch
Size: 2523 bytes
Desc: not available
URL: <http://lists.llvm.org/pipermail/llvm-commits/attachments/20170929/72541146/attachment.bin>
More information about the llvm-commits
mailing list