Skip to content

Commit

Permalink
fix: ranking wasnt followed by downloader
Browse files Browse the repository at this point in the history
  • Loading branch information
Gaisberg authored and Gaisberg committed Sep 10, 2024
1 parent f5c849f commit 578ae8f
Show file tree
Hide file tree
Showing 3 changed files with 29 additions and 21 deletions.
41 changes: 25 additions & 16 deletions src/program/downloaders/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from program.media.item import MediaItem
from utils.logger import logger
from program.settings.manager import settings_manager

from .alldebrid import AllDebridDownloader
from .realdebrid import RealDebridDownloader
Expand All @@ -12,6 +13,7 @@ class Downloader:
def __init__(self):
self.key = "downloader"
self.initialized = False
self.speed_mode = settings_manager.settings.downloaders.prefer_speed_over_quality
self.service = next((service for service in [
RealDebridDownloader(),
#AllDebridDownloader(),
Expand Down Expand Up @@ -61,9 +63,15 @@ def get_cached_streams(self, hashes: list[str], needed_media, break_on_first = T
# Using a list to share the state, booleans are immutable
break_pointer = [False, break_on_first]
results = []
priority_index = 0

with ThreadPoolExecutor(thread_name_prefix="Dowloader") as executor:
with ThreadPoolExecutor(thread_name_prefix="Downloader") as executor:
futures = []

def cancel_all():
for f in futures:
f.cancel()

for chunk in chunks:
future = executor.submit(self.service.process_hashes, chunk, needed_media, break_pointer)
futures.append(future)
Expand All @@ -73,22 +81,23 @@ def get_cached_streams(self, hashes: list[str], needed_media, break_on_first = T
_result = future.result()
except CancelledError:
continue
if isinstance(_result, dict) and len(_result) > 0:
results.append(_result)
if break_on_first:
for future in futures:
future.cancel()

# # Ensure results are checked in the order of the hashes list
# prioritized_results = []
# for hash in hashes:
# for result in results:
# if result.get("infohash") == hash:
# prioritized_results.append(result)
# if break_on_first and result.get("matched_files"):
# break
# results = prioritized_results
for infohash, container in _result.items():
result = {"infohash": infohash, **container}
# Cached
if container.get("matched_files", False):
results.append(result)
if break_on_first and self.speed_mode:
cancel_all()
return results
elif infohash == hashes[priority_index] and break_on_first:
results = [result]
cancel_all()
return results
# Uncached
elif infohash == hashes[priority_index]:
priority_index += 1

results.sort(key=lambda x: hashes.index(x["infohash"]))
return results

def download(self, item, active_stream: dict) -> str:
Expand Down
8 changes: 3 additions & 5 deletions src/program/downloaders/realdebrid.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,12 +64,9 @@ def validate(self) -> bool:
logger.error("Couldn't parse user data response from Real-Debrid.")
return False

def process_hashes(self, chunk: list[str], needed_media: dict, break_pointer: list[bool]) -> dict | bool:
def process_hashes(self, chunk: list[str], needed_media: dict, break_pointer: list[bool]) -> dict:
cached_containers = self.get_cached_containers(chunk, needed_media, break_pointer)
for infohash, container in cached_containers.items():
if container.get("matched_files"):
return {"infohash": infohash, **container}
return {}
return cached_containers

def download_cached(self, active_stream: dict) -> str:
torrent_id = add_torrent(active_stream.get("infohash"))
Expand All @@ -84,6 +81,7 @@ def get_cached_containers(self, infohashes: list[str], needed_media: dict, break
response = get_instant_availability(infohashes)

for infohash in infohashes:
cached_containers[infohash] = {}
if break_pointer[1] and break_pointer[0]:
break
data = response.get(infohash, {})
Expand Down
1 change: 1 addition & 0 deletions src/program/settings/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@ class TorboxModel(Observable):

class DownloadersModel(Observable):
video_extensions: List[str] = ["mp4", "mkv", "avi"]
prefer_speed_over_quality: bool = False
# movie_filesize_min: int = 200 # MB
# movie_filesize_max: int = -1 # MB (-1 is no limit)
# episode_filesize_min: int = 40 # MB
Expand Down

0 comments on commit 578ae8f

Please sign in to comment.