Improve request handling and qBittorrent categories

This commit is contained in:
2026-01-24 21:48:55 +13:00
parent 030480410b
commit cf4277d10c
13 changed files with 398 additions and 70 deletions

View File

@@ -26,7 +26,7 @@ def triage_snapshot(snapshot: Snapshot) -> TriageResult:
recommendations.append(
TriageRecommendation(
action_id="readd_to_arr",
title="Add it to the library queue",
title="Push to Sonarr/Radarr",
reason="Sonarr/Radarr has not created the entry for this request.",
risk="medium",
)

View File

@@ -58,3 +58,13 @@ class JellyfinClient(ApiClient):
response = await client.get(url, headers=headers)
response.raise_for_status()
return response.json()
async def refresh_library(self, recursive: bool = True) -> None:
if not self.base_url or not self.api_key:
return None
url = f"{self.base_url}/Library/Refresh"
headers = {"X-Emby-Token": self.api_key}
params = {"Recursive": "true" if recursive else "false"}
async with httpx.AsyncClient(timeout=10.0) as client:
response = await client.post(url, headers=headers, params=params)
response.raise_for_status()

View File

@@ -1,5 +1,6 @@
from typing import Any, Dict, Optional
import httpx
import logging
from .base import ApiClient
@@ -8,6 +9,7 @@ class QBittorrentClient(ApiClient):
super().__init__(base_url, None)
self.username = username
self.password = password
self.logger = logging.getLogger(__name__)
def configured(self) -> bool:
return bool(self.base_url and self.username and self.password)
@@ -72,6 +74,14 @@ class QBittorrentClient(ApiClient):
raise
async def add_torrent_url(self, url: str, category: Optional[str] = None) -> None:
url_host = None
if isinstance(url, str) and "://" in url:
url_host = url.split("://", 1)[-1].split("/", 1)[0]
self.logger.warning(
"qBittorrent add_torrent_url invoked: category=%s host=%s",
category,
url_host or "unknown",
)
data: Dict[str, Any] = {"urls": url}
if category:
data["category"] = category

View File

@@ -46,3 +46,12 @@ class RadarrClient(ApiClient):
async def grab_release(self, guid: str, indexer_id: int) -> Optional[Dict[str, Any]]:
return await self.post("/api/v3/release", payload={"guid": guid, "indexerId": indexer_id})
async def push_release(self, payload: Dict[str, Any]) -> Optional[Dict[str, Any]]:
return await self.post("/api/v3/release/push", payload=payload)
async def download_release(self, guid: str, indexer_id: int) -> Optional[Dict[str, Any]]:
return await self.post(
"/api/v3/command",
payload={"name": "DownloadRelease", "guid": guid, "indexerId": indexer_id},
)

View File

@@ -53,3 +53,12 @@ class SonarrClient(ApiClient):
async def grab_release(self, guid: str, indexer_id: int) -> Optional[Dict[str, Any]]:
return await self.post("/api/v3/release", payload={"guid": guid, "indexerId": indexer_id})
async def push_release(self, payload: Dict[str, Any]) -> Optional[Dict[str, Any]]:
return await self.post("/api/v3/release/push", payload=payload)
async def download_release(self, guid: str, indexer_id: int) -> Optional[Dict[str, Any]]:
return await self.post(
"/api/v3/command",
payload={"name": "DownloadRelease", "guid": guid, "indexerId": indexer_id},
)

View File

@@ -70,6 +70,10 @@ class Settings(BaseSettings):
sonarr_root_folder: Optional[str] = Field(
default=None, validation_alias=AliasChoices("SONARR_ROOT_FOLDER")
)
sonarr_qbittorrent_category: Optional[str] = Field(
default="sonarr",
validation_alias=AliasChoices("SONARR_QBITTORRENT_CATEGORY"),
)
radarr_base_url: Optional[str] = Field(
default=None, validation_alias=AliasChoices("RADARR_URL", "RADARR_BASE_URL")
@@ -83,6 +87,10 @@ class Settings(BaseSettings):
radarr_root_folder: Optional[str] = Field(
default=None, validation_alias=AliasChoices("RADARR_ROOT_FOLDER")
)
radarr_qbittorrent_category: Optional[str] = Field(
default="radarr",
validation_alias=AliasChoices("RADARR_QBITTORRENT_CATEGORY"),
)
prowlarr_base_url: Optional[str] = Field(
default=None, validation_alias=AliasChoices("PROWLARR_URL", "PROWLARR_BASE_URL")

View File

@@ -609,6 +609,22 @@ def get_request_cache_count() -> int:
return int(row[0] or 0)
def update_request_cache_title(
request_id: int, title: str, year: Optional[int] = None
) -> None:
if not title:
return
with _connect() as conn:
conn.execute(
"""
UPDATE requests_cache
SET title = ?, year = COALESCE(?, year)
WHERE request_id = ?
""",
(title, year, request_id),
)
def prune_duplicate_requests_cache() -> int:
with _connect() as conn:
cursor = conn.execute(

View File

@@ -20,6 +20,7 @@ from ..db import (
clear_requests_cache,
clear_history,
cleanup_history,
update_request_cache_title,
)
from ..runtime import get_runtime_settings
from ..clients.sonarr import SonarrClient
@@ -56,10 +57,12 @@ SETTING_KEYS: List[str] = [
"sonarr_api_key",
"sonarr_quality_profile_id",
"sonarr_root_folder",
"sonarr_qbittorrent_category",
"radarr_base_url",
"radarr_api_key",
"radarr_quality_profile_id",
"radarr_root_folder",
"radarr_qbittorrent_category",
"prowlarr_base_url",
"prowlarr_api_key",
"qbittorrent_base_url",
@@ -274,7 +277,28 @@ async def read_logs(lines: int = 200) -> Dict[str, Any]:
@router.get("/requests/cache")
async def requests_cache(limit: int = 50) -> Dict[str, Any]:
return {"rows": get_request_cache_overview(limit)}
rows = get_request_cache_overview(limit)
missing_titles = [row for row in rows if not row.get("title")]
if missing_titles:
runtime = get_runtime_settings()
client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key)
if client.configured():
for row in missing_titles:
request_id = row.get("request_id")
if not isinstance(request_id, int):
continue
details = await requests_router._get_request_details(client, request_id)
if not isinstance(details, dict):
continue
payload = requests_router._parse_request_payload(details)
title = payload.get("title")
if not title:
continue
row["title"] = title
if payload.get("year"):
row["year"] = payload.get("year")
update_request_cache_title(request_id, title, payload.get("year"))
return {"rows": rows}
@router.post("/branding/logo")

View File

@@ -68,6 +68,7 @@ _artwork_prefetch_state: Dict[str, Any] = {
"finished_at": None,
}
_artwork_prefetch_task: Optional[asyncio.Task] = None
_media_endpoint_supported: Optional[bool] = None
STATUS_LABELS = {
1: "Waiting for approval",
@@ -269,10 +270,17 @@ async def _hydrate_title_from_tmdb(
async def _hydrate_media_details(client: JellyseerrClient, media_id: Optional[int]) -> Optional[Dict[str, Any]]:
if not media_id:
return None
global _media_endpoint_supported
if _media_endpoint_supported is False:
return None
try:
details = await client.get_media(int(media_id))
except httpx.HTTPStatusError:
except httpx.HTTPStatusError as exc:
if exc.response is not None and exc.response.status_code == 405:
_media_endpoint_supported = False
logger.info("Jellyseerr media endpoint rejected GET requests; skipping media lookups.")
return None
_media_endpoint_supported = True
return details if isinstance(details, dict) else None
@@ -393,14 +401,23 @@ async def _sync_all_requests(client: JellyseerrClient) -> int:
continue
payload = _parse_request_payload(item)
request_id = payload.get("request_id")
cached_title = None
if isinstance(request_id, int):
if not payload.get("title"):
cached = get_request_cache_by_id(request_id)
if cached and cached.get("title"):
cached_title = cached.get("title")
if not payload.get("title") or not payload.get("media_id"):
logger.debug("Jellyseerr sync hydrate request_id=%s", request_id)
details = await _get_request_details(client, request_id)
if isinstance(details, dict):
payload = _parse_request_payload(details)
item = details
if not payload.get("title") and payload.get("media_id"):
if (
not payload.get("title")
and payload.get("media_id")
and (not payload.get("tmdb_id") or not payload.get("media_type"))
):
media_details = await _hydrate_media_details(client, payload.get("media_id"))
if isinstance(media_details, dict):
media_title = media_details.get("title") or media_details.get("name")
@@ -428,7 +445,7 @@ async def _sync_all_requests(client: JellyseerrClient) -> int:
if isinstance(details, dict):
item = details
payload = _parse_request_payload(details)
if not payload.get("title") and payload.get("tmdb_id"):
if not payload.get("title") and payload.get("tmdb_id") and payload.get("media_type"):
hydrated_title, hydrated_year = await _hydrate_title_from_tmdb(
client, payload.get("media_type"), payload.get("tmdb_id")
)
@@ -436,6 +453,8 @@ async def _sync_all_requests(client: JellyseerrClient) -> int:
payload["title"] = hydrated_title
if hydrated_year:
payload["year"] = hydrated_year
if not payload.get("title") and cached_title:
payload["title"] = cached_title
if not isinstance(payload.get("request_id"), int):
continue
payload_json = json.dumps(item, ensure_ascii=True)
@@ -516,6 +535,7 @@ async def _sync_delta_requests(client: JellyseerrClient) -> int:
if isinstance(request_id, int):
cached = get_request_cache_by_id(request_id)
incoming_updated = payload.get("updated_at")
cached_title = cached.get("title") if cached else None
if cached and incoming_updated and cached.get("updated_at") == incoming_updated and cached.get("title"):
continue
if not payload.get("title") or not payload.get("media_id"):
@@ -523,7 +543,11 @@ async def _sync_delta_requests(client: JellyseerrClient) -> int:
if isinstance(details, dict):
payload = _parse_request_payload(details)
item = details
if not payload.get("title") and payload.get("media_id"):
if (
not payload.get("title")
and payload.get("media_id")
and (not payload.get("tmdb_id") or not payload.get("media_type"))
):
media_details = await _hydrate_media_details(client, payload.get("media_id"))
if isinstance(media_details, dict):
media_title = media_details.get("title") or media_details.get("name")
@@ -551,7 +575,7 @@ async def _sync_delta_requests(client: JellyseerrClient) -> int:
if isinstance(details, dict):
payload = _parse_request_payload(details)
item = details
if not payload.get("title") and payload.get("tmdb_id"):
if not payload.get("title") and payload.get("tmdb_id") and payload.get("media_type"):
hydrated_title, hydrated_year = await _hydrate_title_from_tmdb(
client, payload.get("media_type"), payload.get("tmdb_id")
)
@@ -559,6 +583,8 @@ async def _sync_delta_requests(client: JellyseerrClient) -> int:
payload["title"] = hydrated_title
if hydrated_year:
payload["year"] = hydrated_year
if not payload.get("title") and cached_title:
payload["title"] = cached_title
if not isinstance(payload.get("request_id"), int):
continue
payload_json = json.dumps(item, ensure_ascii=True)
@@ -999,6 +1025,97 @@ def _normalize_categories(categories: Any) -> List[str]:
return names
def _normalize_indexer_name(value: Optional[str]) -> str:
if not isinstance(value, str):
return ""
return "".join(ch for ch in value.lower().strip() if ch.isalnum())
def _resolve_arr_indexer_id(
indexers: Any, indexer_name: Optional[str], indexer_id: Optional[int], service_label: str
) -> Optional[int]:
if not isinstance(indexers, list):
return None
if not indexer_name:
if indexer_id is None:
return None
by_id = next(
(item for item in indexers if isinstance(item, dict) and item.get("id") == indexer_id),
None,
)
if by_id and by_id.get("id") is not None:
logger.debug("%s indexer id match: %s", service_label, by_id.get("id"))
return int(by_id["id"])
return None
target = indexer_name.lower().strip()
target_compact = _normalize_indexer_name(indexer_name)
exact = next(
(
item
for item in indexers
if isinstance(item, dict)
and str(item.get("name", "")).lower().strip() == target
),
None,
)
if exact and exact.get("id") is not None:
logger.debug("%s indexer match: '%s' -> %s", service_label, indexer_name, exact.get("id"))
return int(exact["id"])
compact = next(
(
item
for item in indexers
if isinstance(item, dict)
and _normalize_indexer_name(str(item.get("name", ""))) == target_compact
),
None,
)
if compact and compact.get("id") is not None:
logger.debug("%s indexer compact match: '%s' -> %s", service_label, indexer_name, compact.get("id"))
return int(compact["id"])
contains = next(
(
item
for item in indexers
if isinstance(item, dict)
and target in str(item.get("name", "")).lower()
),
None,
)
if contains and contains.get("id") is not None:
logger.debug("%s indexer contains match: '%s' -> %s", service_label, indexer_name, contains.get("id"))
return int(contains["id"])
logger.warning(
"%s indexer not found for name '%s'. Check indexer names in the Arr app.",
service_label,
indexer_name,
)
return None
async def _fallback_qbittorrent_download(download_url: Optional[str], category: str) -> bool:
if not download_url:
return False
runtime = get_runtime_settings()
client = QBittorrentClient(
runtime.qbittorrent_base_url,
runtime.qbittorrent_username,
runtime.qbittorrent_password,
)
if not client.configured():
return False
await client.add_torrent_url(download_url, category=category)
return True
def _resolve_qbittorrent_category(value: Optional[str], default: str) -> str:
if isinstance(value, str):
cleaned = value.strip()
if cleaned:
return cleaned
return default
def _filter_prowlarr_results(results: Any, request_type: RequestType) -> List[Dict[str, Any]]:
if not isinstance(results, list):
return []
@@ -1607,36 +1724,91 @@ async def action_grab(
snapshot = await build_snapshot(request_id)
guid = payload.get("guid")
indexer_id = payload.get("indexerId")
download_url = payload.get("downloadUrl")
indexer_name = payload.get("indexerName") or payload.get("indexer")
download_url = payload.get("downloadUrl")
release_title = payload.get("title")
release_size = payload.get("size")
release_protocol = payload.get("protocol") or "torrent"
release_publish = payload.get("publishDate")
release_seeders = payload.get("seeders")
release_leechers = payload.get("leechers")
if not guid or not indexer_id:
raise HTTPException(status_code=400, detail="Missing guid or indexerId")
logger.info(
"Grab requested: request_id=%s guid=%s indexer_id=%s indexer_name=%s has_download_url=%s has_title=%s",
request_id,
guid,
indexer_id,
indexer_name,
bool(download_url),
bool(release_title),
)
push_payload = None
if download_url and release_title:
push_payload = {
"title": release_title,
"downloadUrl": download_url,
"protocol": release_protocol,
"publishDate": release_publish,
"size": release_size,
"indexer": indexer_name,
"guid": guid,
"seeders": release_seeders,
"leechers": release_leechers,
}
runtime = get_runtime_settings()
if snapshot.request_type.value == "tv":
client = SonarrClient(runtime.sonarr_base_url, runtime.sonarr_api_key)
if not client.configured():
raise HTTPException(status_code=400, detail="Sonarr not configured")
try:
if indexer_name:
indexers = await client.get_indexers()
if isinstance(indexers, list):
matched = next(
(
item
for item in indexers
if isinstance(item, dict)
and str(item.get("name", "")).lower() == str(indexer_name).lower()
),
None,
)
if matched and matched.get("id") is not None:
indexer_id = int(matched["id"])
response = await client.grab_release(str(guid), int(indexer_id))
indexers = await client.get_indexers()
resolved_indexer_id = _resolve_arr_indexer_id(indexers, indexer_name, indexer_id, "Sonarr")
response = None
action_message = "Grab sent to Sonarr."
if resolved_indexer_id is not None:
indexer_id = resolved_indexer_id
logger.info("Sonarr grab: attempting DownloadRelease command.")
try:
response = await client.download_release(str(guid), int(indexer_id))
except httpx.HTTPStatusError as exc:
if exc.response is not None and exc.response.status_code in {404, 405}:
logger.info("Sonarr grab: DownloadRelease unsupported; will try release push.")
response = None
else:
raise
if response is None and push_payload:
logger.info("Sonarr grab: attempting release push.")
try:
response = await client.push_release(push_payload)
except httpx.HTTPStatusError as exc:
if exc.response is not None and exc.response.status_code == 404:
logger.info("Sonarr grab: release push not supported.")
else:
raise
if response is None:
category = _resolve_qbittorrent_category(
runtime.sonarr_qbittorrent_category, "sonarr"
)
qbittorrent_added = await _fallback_qbittorrent_download(download_url, category)
if qbittorrent_added:
action_message = f"Grab sent to qBittorrent (category {category})."
response = {"qbittorrent": "queued"}
else:
if resolved_indexer_id is None:
detail = "Indexer not found in Sonarr and no release push available."
elif not push_payload:
detail = "Sonarr did not accept the grab request (no release URL available)."
else:
detail = "Sonarr did not accept the grab request (DownloadRelease unsupported)."
raise HTTPException(status_code=400, detail=detail)
except httpx.HTTPStatusError as exc:
raise HTTPException(status_code=502, detail=f"Sonarr grab failed: {exc}") from exc
await asyncio.to_thread(
save_action, request_id, "grab", "Grab release", "ok", "Grab sent to Sonarr."
save_action, request_id, "grab", "Grab release", "ok", action_message
)
return {"status": "ok", "response": response}
if snapshot.request_type.value == "movie":
@@ -1644,25 +1816,50 @@ async def action_grab(
if not client.configured():
raise HTTPException(status_code=400, detail="Radarr not configured")
try:
if indexer_name:
indexers = await client.get_indexers()
if isinstance(indexers, list):
matched = next(
(
item
for item in indexers
if isinstance(item, dict)
and str(item.get("name", "")).lower() == str(indexer_name).lower()
),
None,
)
if matched and matched.get("id") is not None:
indexer_id = int(matched["id"])
response = await client.grab_release(str(guid), int(indexer_id))
indexers = await client.get_indexers()
resolved_indexer_id = _resolve_arr_indexer_id(indexers, indexer_name, indexer_id, "Radarr")
response = None
action_message = "Grab sent to Radarr."
if resolved_indexer_id is not None:
indexer_id = resolved_indexer_id
logger.info("Radarr grab: attempting DownloadRelease command.")
try:
response = await client.download_release(str(guid), int(indexer_id))
except httpx.HTTPStatusError as exc:
if exc.response is not None and exc.response.status_code in {404, 405}:
logger.info("Radarr grab: DownloadRelease unsupported; will try release push.")
response = None
else:
raise
if response is None and push_payload:
logger.info("Radarr grab: attempting release push.")
try:
response = await client.push_release(push_payload)
except httpx.HTTPStatusError as exc:
if exc.response is not None and exc.response.status_code == 404:
logger.info("Radarr grab: release push not supported.")
else:
raise
if response is None:
category = _resolve_qbittorrent_category(
runtime.radarr_qbittorrent_category, "radarr"
)
qbittorrent_added = await _fallback_qbittorrent_download(download_url, category)
if qbittorrent_added:
action_message = f"Grab sent to qBittorrent (category {category})."
response = {"qbittorrent": "queued"}
else:
if resolved_indexer_id is None:
detail = "Indexer not found in Radarr and no release push available."
elif not push_payload:
detail = "Radarr did not accept the grab request (no release URL available)."
else:
detail = "Radarr did not accept the grab request (DownloadRelease unsupported)."
raise HTTPException(status_code=400, detail=detail)
except httpx.HTTPStatusError as exc:
raise HTTPException(status_code=502, detail=f"Radarr grab failed: {exc}") from exc
await asyncio.to_thread(
save_action, request_id, "grab", "Grab release", "ok", "Grab sent to Radarr."
save_action, request_id, "grab", "Grab release", "ok", action_message
)
return {"status": "ok", "response": response}

View File

@@ -11,9 +11,14 @@ from ..clients.radarr import RadarrClient
from ..clients.prowlarr import ProwlarrClient
from ..clients.qbittorrent import QBittorrentClient
from ..runtime import get_runtime_settings
from ..db import save_snapshot, get_request_cache_payload
from ..db import save_snapshot, get_request_cache_payload, get_recent_snapshots, get_setting, set_setting
from ..models import ActionOption, NormalizedState, RequestType, Snapshot, TimelineHop
logger = logging.getLogger(__name__)
JELLYFIN_SCAN_COOLDOWN_SECONDS = 300
_jellyfin_scan_key = "jellyfin_scan_last_at"
STATUS_LABELS = {
1: "Waiting for approval",
@@ -41,6 +46,35 @@ def _pick_first(value: Any) -> Optional[Dict[str, Any]]:
return None
async def _maybe_refresh_jellyfin(snapshot: Snapshot) -> None:
if snapshot.state not in {NormalizedState.available, NormalizedState.completed}:
return
runtime = get_runtime_settings()
client = JellyfinClient(runtime.jellyfin_base_url, runtime.jellyfin_api_key)
if not client.configured():
return
last_scan = get_setting(_jellyfin_scan_key)
if last_scan:
try:
parsed = datetime.fromisoformat(last_scan.replace("Z", "+00:00"))
if (datetime.now(timezone.utc) - parsed).total_seconds() < JELLYFIN_SCAN_COOLDOWN_SECONDS:
return
except ValueError:
pass
previous = await asyncio.to_thread(get_recent_snapshots, snapshot.request_id, 1)
if previous:
prev_state = previous[0].get("state")
if prev_state in {NormalizedState.available.value, NormalizedState.completed.value}:
return
try:
await client.refresh_library()
except Exception as exc:
logger.warning("Jellyfin library refresh failed: %s", exc)
return
set_setting(_jellyfin_scan_key, datetime.now(timezone.utc).isoformat())
logger.info("Jellyfin library refresh triggered: request_id=%s", snapshot.request_id)
def _queue_records(queue: Any) -> List[Dict[str, Any]]:
if isinstance(queue, dict):
records = queue.get("records")
@@ -381,10 +415,6 @@ async def build_snapshot(request_id: str) -> Snapshot:
if arr_state is None:
arr_state = "unknown"
if arr_state == "missing" and media_status_code in {4}:
arr_state = "available"
elif arr_state == "missing" and media_status_code in {6}:
arr_state = "added"
timeline.append(TimelineHop(service="Sonarr/Radarr", status=arr_state, details=arr_details))
@@ -524,7 +554,7 @@ async def build_snapshot(request_id: str) -> Snapshot:
snapshot.state_reason = "Waiting for download to start in qBittorrent."
elif arr_state == "missing" and derived_approved:
snapshot.state = NormalizedState.needs_add
snapshot.state_reason = "Approved, but not added to the library yet."
snapshot.state_reason = "Approved, but not yet added to Sonarr/Radarr."
elif arr_state == "searching":
snapshot.state = NormalizedState.searching
snapshot.state_reason = "Searching for a matching release."
@@ -548,7 +578,7 @@ async def build_snapshot(request_id: str) -> Snapshot:
actions.append(
ActionOption(
id="readd_to_arr",
label="Add to the library queue (Sonarr/Radarr)",
label="Push to Sonarr/Radarr",
risk="medium",
)
)
@@ -604,5 +634,6 @@ async def build_snapshot(request_id: str) -> Snapshot:
},
}
await _maybe_refresh_jellyfin(snapshot)
await asyncio.to_thread(save_snapshot, snapshot)
return snapshot