import subprocess import urllib.request import urllib.error from time import sleep from threading import Lock import http.client _print_lock = Lock() def safe_print(*args, **kwargs): with _print_lock: print(*args, **kwargs) def run(args: list, **kwargs): p = subprocess.run(args, **kwargs) if p.returncode != 0: raise OSError(f'convert returned {p.returncode} ('+' '.join(args)+')') def download_file(url, output, handle_http_errors=True) -> bool: tries_left = 3 ok = False while tries_left > 0: try: urllib.request.urlretrieve(url, output) ok = True break except http.client.RemoteDisconnected: ok = False print(' caught an exception, sleeping for 2 seconds and retrying...') sleep(2) tries_left -= 1 except urllib.error.HTTPError as e: if not handle_http_errors: raise e else: print(f' failed to download {url}: {str(e)}') return False return ok