[3.14] gh-134262: Catch both URLError and ConnectionError in retries (GH-135365) (#135611)

Co-authored-by: Emma Smith <emma@emmatyping.dev>
This commit is contained in:
Miss Islington (bot) 2025-06-17 14:16:17 +02:00 committed by GitHub
parent 3233cff84e
commit 91d9e9e64e
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 5 additions and 4 deletions

View file

@ -5,8 +5,9 @@
import pathlib
import sys
import time
import urllib.error
import urllib.request
import zipfile
from urllib.request import urlretrieve
def retrieve_with_retries(download_location, output_path, reporthook,
@ -14,12 +15,12 @@ def retrieve_with_retries(download_location, output_path, reporthook,
"""Download a file with exponential backoff retry and save to disk."""
for attempt in range(max_retries + 1):
try:
resp = urlretrieve(
resp = urllib.request.urlretrieve(
download_location,
output_path,
reporthook=reporthook,
)
except ConnectionError as ex:
except (urllib.error.URLError, ConnectionError) as ex:
if attempt == max_retries:
msg = f"Download from {download_location} failed."
raise OSError(msg) from ex

View file

@ -172,7 +172,7 @@ def download_with_retries(download_location: str,
for attempt in range(max_retries + 1):
try:
resp = urllib.request.urlopen(download_location)
except urllib.error.URLError as ex:
except (urllib.error.URLError, ConnectionError) as ex:
if attempt == max_retries:
msg = f"Download from {download_location} failed."
raise OSError(msg) from ex