Skip to content
This repository has been archived by the owner on May 7, 2024. It is now read-only.

Commit

Permalink
Update download.py
Browse files Browse the repository at this point in the history
  • Loading branch information
YichenLi00 authored Mar 18, 2024
1 parent 89f9767 commit 2fa58a0
Showing 1 changed file with 20 additions and 13 deletions.
33 changes: 20 additions & 13 deletions src/download.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
from time import sleep
import concurrent.futures
from functools import partial
import threading



class RequestLimitReached(Exception):
Expand Down Expand Up @@ -65,7 +67,7 @@ def check_file_authentic(save_path):
return False


def req_url(dl_file, max_retry=5, headers=None, proxies=None):
def req_url(dl_file, max_retry=5, headers=None, proxies=None, wait_event=None):
"""Download file"""
url = dl_file[0]
save_path = dl_file[1]
Expand All @@ -90,20 +92,29 @@ def req_url(dl_file, max_retry=5, headers=None, proxies=None):
}
proxies = proxies if proxies else { "http": "", "https":"", }

for i in range(max_retry):
attempt = 0
while attempt < max_retry:
if wait_event.is_set():
print(f"Waiting due to rate limit, attempt {attempt}")
wait_event.wait() # Wait until the event is cleared
try:
r = requests.get(url, headers=headers, proxies=proxies)
if r.text.startswith("You can only make 350 requests every 15min"):
raise RequestLimitReached("Request limit reached")
wait_event.set()
print(f"Request limit reached, waiting for 15 minutes. Attempt {attempt}")
sleep(15 * 60)
wait_event.clear() # Clear the event to resume all threads
attempt += 1
continue
with open(save_path, "wb") as f:
f.write(r.content)
return 'Downloaded: ' + str(save_path)
except RequestLimitReached as e:
return str(e)
except Exception as e:
exception = e
sleep(0.4)
return 'File request exception (retry {}): {} - {}'.format(i, exception, save_path)
attempt += 1
return 'File request exception (retry {}): {} - {}'.format(attempt, exception, save_path)




Expand Down Expand Up @@ -144,17 +155,13 @@ def download_repo(config):
files.append((file_url, save_path))

partial_req = partial(req_url, max_retry=max_retry, headers=headers, proxies=proxies)
limit_reached = False
wait_event = threading.Event()
with concurrent.futures.ThreadPoolExecutor(max_workers=max_conns) as executor:
future_to_url = (executor.submit(partial_req, dl_file) for dl_file in files)
partial_req = partial(req_url, max_retry=max_retry, headers=headers, proxies=proxies, wait_event=wait_event)
future_to_url = {executor.submit(partial_req, dl_file): dl_file for dl_file in files}
for future in concurrent.futures.as_completed(future_to_url):
if limit_reached:
break # Stop the download process if the limit is reached
try:
data = future.result()
if "Request limit reached" in data:
print(data)
limit_reached = True
except Exception as exc:
data = str(type(exc))
finally:
Expand Down

0 comments on commit 2fa58a0

Please sign in to comment.