[O] Better torrent caching

This commit is contained in:
2026-03-09 00:58:31 -04:00
parent 747c5f69c6
commit 51284967fb
4 changed files with 49 additions and 24 deletions
+31 -6
View File
@@ -3,10 +3,9 @@ import hashlib
from pathlib import Path
from functools import wraps
def with_disk_cache(subdir_name: str):
def _disk_cache_decorator(subdir_name: str, ext: str, read_func, write_func):
"""
A decorator to cache function results to a local JSON file.
The cache file is stored in `data/<subdir_name>/<key>.json`.
Generic internal caching decorator handling filename hashing and io abstraction.
"""
def decorator(func):
@wraps(func)
@@ -21,18 +20,18 @@ def with_disk_cache(subdir_name: str):
else:
key = hashlib.md5(val.encode()).hexdigest()
cache_p = Path(__file__).parent / 'data' / subdir_name / f"{key}.json"
cache_p = Path(__file__).parent / 'data' / subdir_name / f"{key}{ext}"
if cache_p.is_file():
try:
return json.loads(cache_p.read_text(encoding="utf-8"))
return read_func(cache_p)
except Exception:
pass
result = func(*args, **kwargs)
cache_p.parent.mkdir(parents=True, exist_ok=True)
cache_p.write_text(json.dumps(result, ensure_ascii=False, indent=2), encoding="utf-8")
write_func(cache_p, result)
# Write arguments to a .txt file for easy lookup
txt_p = cache_p.with_suffix('.txt')
@@ -41,3 +40,29 @@ def with_disk_cache(subdir_name: str):
return result
return wrapper
return decorator
def with_disk_cache(subdir_name: str):
"""
A decorator to cache function results to a local JSON file.
The cache file is stored in `data/<subdir_name>/<key>.json`.
"""
return _disk_cache_decorator(
subdir_name,
".json",
read_func=lambda p: json.loads(p.read_text(encoding="utf-8")),
write_func=lambda p, res: p.write_text(json.dumps(res, ensure_ascii=False, indent=2), encoding="utf-8")
)
def with_binary_disk_cache(subdir_name: str, ext: str = ".bin"):
"""
A decorator to cache binary function results to a local file.
The cache file is stored in `data/<subdir_name>/<key><ext>`.
"""
return _disk_cache_decorator(
subdir_name,
ext,
read_func=lambda p: p.read_bytes(),
write_func=lambda p, res: p.write_bytes(res)
)
+2 -1
View File
@@ -3,7 +3,7 @@ import requests
import tomllib
from pathlib import Path
from utils import with_disk_cache
from utils import with_disk_cache, with_binary_disk_cache
config = tomllib.loads(Path("config.toml").read_text())
@@ -74,6 +74,7 @@ def mteam_imdb_info(id: str) -> dict:
return response.json()
@with_binary_disk_cache('generate_mteam_download_token', ext=".torrent")
def generate_mteam_download_token(torrent_id: str) -> bytes:
"""
Generate an M-Team download token for a specific torrent ID and download the torrent content.
+12 -7
View File
@@ -17,16 +17,18 @@ def get_qb_client() -> Client:
return qb
def download_torrent(qb_client: Client, torrent_source: str, save_path: str) -> str:
def download_torrent(qb_client: Client, torrent_source: str | bytes, save_path: str) -> str:
"""
4. Calls qb api to download a torrent to a messy directory.
:param qb_client: Authenticated qbittorrentapi.Client
:param torrent_source: File path to a .torrent file, or a magnet link / URL.
:param torrent_source: File path to a .torrent file, a magnet link / URL, or raw bytes.
:param save_path: The directory where the torrent should be downloaded (e.g. the messy folder).
:return: Response from the API.
"""
if os.path.isfile(torrent_source):
if isinstance(torrent_source, bytes):
return qb_client.torrents_add(torrent_files={"upload.torrent": torrent_source}, save_path=save_path)
elif os.path.isfile(torrent_source):
# Open and read the bytes explicitly so that qb uploads the file data,
# negating local path security issues on the remote instance
with open(torrent_source, "rb") as f:
@@ -64,12 +66,15 @@ def get_torrent_file_tree(qb_client: Client, torrent_hash: str) -> list:
print(f"Error fetching file tree for {torrent_hash}: {e}")
return []
def get_torrent_hash(filepath: str) -> str:
def get_torrent_hash(source: str | bytes) -> str:
"""
Parses a local .torrent file and computes its info hash directly.
Parses a local .torrent file or raw bytes and computes its info hash directly.
"""
try:
with open(filepath, "rb") as f:
if isinstance(source, bytes):
torrent_data = bencodepy.decode(source)
else:
with open(source, "rb") as f:
torrent_data = bencodepy.decode(f.read())
# Info dictionary is under b"info"
@@ -79,5 +84,5 @@ def get_torrent_hash(filepath: str) -> str:
# Calculate SHA1 hash of the bencoded info dictionary
return hashlib.sha1(info_encoded).hexdigest()
except Exception as e:
print(f"Could not parse torrent hash from {filepath}: {e}")
print(f"Could not parse torrent hash: {e}")
return ""
+3 -9
View File
@@ -86,19 +86,13 @@ def process_imdb_workflow(imdb_id: str, dl_dir: str = "/data/qb", jellyfin_dir:
print(f"\n=== [3] Downloading .torrent for ID: {tid} ===")
torrent_bytes = generate_mteam_download_token(tid)
# Save straight to local directory
torrent_path = f"{tid}.torrent"
with open(torrent_path, "wb") as f:
f.write(torrent_bytes)
print(f"Saved .torrent to {torrent_path}")
print(f"\n=== [4] Adding torrent to qBittorrent ===")
download_torrent(qb, torrent_path, dl_dir)
download_torrent(qb, torrent_bytes, dl_dir)
# Parse local hash directly instead of hoping qB orders correctly
t_hash = get_torrent_hash(torrent_path)
t_hash = get_torrent_hash(torrent_bytes)
if not t_hash:
print(f"Could not compute hash for {torrent_path}, skipping!")
print(f"Could not compute hash for {tid}, skipping!")
continue
print(f"\n=== [5] Waiting for download to finish ===")