Improve request handling and qBittorrent categories

This commit is contained in:
2026-01-24 21:48:55 +13:00
parent 030480410b
commit cf4277d10c
13 changed files with 398 additions and 70 deletions

View File

@@ -68,6 +68,7 @@ _artwork_prefetch_state: Dict[str, Any] = {
"finished_at": None,
}
_artwork_prefetch_task: Optional[asyncio.Task] = None
_media_endpoint_supported: Optional[bool] = None
STATUS_LABELS = {
1: "Waiting for approval",
@@ -269,10 +270,17 @@ async def _hydrate_title_from_tmdb(
async def _hydrate_media_details(client: JellyseerrClient, media_id: Optional[int]) -> Optional[Dict[str, Any]]:
if not media_id:
return None
global _media_endpoint_supported
if _media_endpoint_supported is False:
return None
try:
details = await client.get_media(int(media_id))
except httpx.HTTPStatusError:
except httpx.HTTPStatusError as exc:
if exc.response is not None and exc.response.status_code == 405:
_media_endpoint_supported = False
logger.info("Jellyseerr media endpoint rejected GET requests; skipping media lookups.")
return None
_media_endpoint_supported = True
return details if isinstance(details, dict) else None
@@ -393,14 +401,23 @@ async def _sync_all_requests(client: JellyseerrClient) -> int:
continue
payload = _parse_request_payload(item)
request_id = payload.get("request_id")
cached_title = None
if isinstance(request_id, int):
if not payload.get("title"):
cached = get_request_cache_by_id(request_id)
if cached and cached.get("title"):
cached_title = cached.get("title")
if not payload.get("title") or not payload.get("media_id"):
logger.debug("Jellyseerr sync hydrate request_id=%s", request_id)
details = await _get_request_details(client, request_id)
if isinstance(details, dict):
payload = _parse_request_payload(details)
item = details
if not payload.get("title") and payload.get("media_id"):
if (
not payload.get("title")
and payload.get("media_id")
and (not payload.get("tmdb_id") or not payload.get("media_type"))
):
media_details = await _hydrate_media_details(client, payload.get("media_id"))
if isinstance(media_details, dict):
media_title = media_details.get("title") or media_details.get("name")
@@ -428,7 +445,7 @@ async def _sync_all_requests(client: JellyseerrClient) -> int:
if isinstance(details, dict):
item = details
payload = _parse_request_payload(details)
if not payload.get("title") and payload.get("tmdb_id"):
if not payload.get("title") and payload.get("tmdb_id") and payload.get("media_type"):
hydrated_title, hydrated_year = await _hydrate_title_from_tmdb(
client, payload.get("media_type"), payload.get("tmdb_id")
)
@@ -436,6 +453,8 @@ async def _sync_all_requests(client: JellyseerrClient) -> int:
payload["title"] = hydrated_title
if hydrated_year:
payload["year"] = hydrated_year
if not payload.get("title") and cached_title:
payload["title"] = cached_title
if not isinstance(payload.get("request_id"), int):
continue
payload_json = json.dumps(item, ensure_ascii=True)
@@ -516,6 +535,7 @@ async def _sync_delta_requests(client: JellyseerrClient) -> int:
if isinstance(request_id, int):
cached = get_request_cache_by_id(request_id)
incoming_updated = payload.get("updated_at")
cached_title = cached.get("title") if cached else None
if cached and incoming_updated and cached.get("updated_at") == incoming_updated and cached.get("title"):
continue
if not payload.get("title") or not payload.get("media_id"):
@@ -523,7 +543,11 @@ async def _sync_delta_requests(client: JellyseerrClient) -> int:
if isinstance(details, dict):
payload = _parse_request_payload(details)
item = details
if not payload.get("title") and payload.get("media_id"):
if (
not payload.get("title")
and payload.get("media_id")
and (not payload.get("tmdb_id") or not payload.get("media_type"))
):
media_details = await _hydrate_media_details(client, payload.get("media_id"))
if isinstance(media_details, dict):
media_title = media_details.get("title") or media_details.get("name")
@@ -551,7 +575,7 @@ async def _sync_delta_requests(client: JellyseerrClient) -> int:
if isinstance(details, dict):
payload = _parse_request_payload(details)
item = details
if not payload.get("title") and payload.get("tmdb_id"):
if not payload.get("title") and payload.get("tmdb_id") and payload.get("media_type"):
hydrated_title, hydrated_year = await _hydrate_title_from_tmdb(
client, payload.get("media_type"), payload.get("tmdb_id")
)
@@ -559,6 +583,8 @@ async def _sync_delta_requests(client: JellyseerrClient) -> int:
payload["title"] = hydrated_title
if hydrated_year:
payload["year"] = hydrated_year
if not payload.get("title") and cached_title:
payload["title"] = cached_title
if not isinstance(payload.get("request_id"), int):
continue
payload_json = json.dumps(item, ensure_ascii=True)
@@ -999,6 +1025,97 @@ def _normalize_categories(categories: Any) -> List[str]:
return names
def _normalize_indexer_name(value: Optional[str]) -> str:
if not isinstance(value, str):
return ""
return "".join(ch for ch in value.lower().strip() if ch.isalnum())
def _resolve_arr_indexer_id(
indexers: Any, indexer_name: Optional[str], indexer_id: Optional[int], service_label: str
) -> Optional[int]:
if not isinstance(indexers, list):
return None
if not indexer_name:
if indexer_id is None:
return None
by_id = next(
(item for item in indexers if isinstance(item, dict) and item.get("id") == indexer_id),
None,
)
if by_id and by_id.get("id") is not None:
logger.debug("%s indexer id match: %s", service_label, by_id.get("id"))
return int(by_id["id"])
return None
target = indexer_name.lower().strip()
target_compact = _normalize_indexer_name(indexer_name)
exact = next(
(
item
for item in indexers
if isinstance(item, dict)
and str(item.get("name", "")).lower().strip() == target
),
None,
)
if exact and exact.get("id") is not None:
logger.debug("%s indexer match: '%s' -> %s", service_label, indexer_name, exact.get("id"))
return int(exact["id"])
compact = next(
(
item
for item in indexers
if isinstance(item, dict)
and _normalize_indexer_name(str(item.get("name", ""))) == target_compact
),
None,
)
if compact and compact.get("id") is not None:
logger.debug("%s indexer compact match: '%s' -> %s", service_label, indexer_name, compact.get("id"))
return int(compact["id"])
contains = next(
(
item
for item in indexers
if isinstance(item, dict)
and target in str(item.get("name", "")).lower()
),
None,
)
if contains and contains.get("id") is not None:
logger.debug("%s indexer contains match: '%s' -> %s", service_label, indexer_name, contains.get("id"))
return int(contains["id"])
logger.warning(
"%s indexer not found for name '%s'. Check indexer names in the Arr app.",
service_label,
indexer_name,
)
return None
async def _fallback_qbittorrent_download(download_url: Optional[str], category: str) -> bool:
if not download_url:
return False
runtime = get_runtime_settings()
client = QBittorrentClient(
runtime.qbittorrent_base_url,
runtime.qbittorrent_username,
runtime.qbittorrent_password,
)
if not client.configured():
return False
await client.add_torrent_url(download_url, category=category)
return True
def _resolve_qbittorrent_category(value: Optional[str], default: str) -> str:
if isinstance(value, str):
cleaned = value.strip()
if cleaned:
return cleaned
return default
def _filter_prowlarr_results(results: Any, request_type: RequestType) -> List[Dict[str, Any]]:
if not isinstance(results, list):
return []
@@ -1607,36 +1724,91 @@ async def action_grab(
snapshot = await build_snapshot(request_id)
guid = payload.get("guid")
indexer_id = payload.get("indexerId")
download_url = payload.get("downloadUrl")
indexer_name = payload.get("indexerName") or payload.get("indexer")
download_url = payload.get("downloadUrl")
release_title = payload.get("title")
release_size = payload.get("size")
release_protocol = payload.get("protocol") or "torrent"
release_publish = payload.get("publishDate")
release_seeders = payload.get("seeders")
release_leechers = payload.get("leechers")
if not guid or not indexer_id:
raise HTTPException(status_code=400, detail="Missing guid or indexerId")
logger.info(
"Grab requested: request_id=%s guid=%s indexer_id=%s indexer_name=%s has_download_url=%s has_title=%s",
request_id,
guid,
indexer_id,
indexer_name,
bool(download_url),
bool(release_title),
)
push_payload = None
if download_url and release_title:
push_payload = {
"title": release_title,
"downloadUrl": download_url,
"protocol": release_protocol,
"publishDate": release_publish,
"size": release_size,
"indexer": indexer_name,
"guid": guid,
"seeders": release_seeders,
"leechers": release_leechers,
}
runtime = get_runtime_settings()
if snapshot.request_type.value == "tv":
client = SonarrClient(runtime.sonarr_base_url, runtime.sonarr_api_key)
if not client.configured():
raise HTTPException(status_code=400, detail="Sonarr not configured")
try:
if indexer_name:
indexers = await client.get_indexers()
if isinstance(indexers, list):
matched = next(
(
item
for item in indexers
if isinstance(item, dict)
and str(item.get("name", "")).lower() == str(indexer_name).lower()
),
None,
)
if matched and matched.get("id") is not None:
indexer_id = int(matched["id"])
response = await client.grab_release(str(guid), int(indexer_id))
indexers = await client.get_indexers()
resolved_indexer_id = _resolve_arr_indexer_id(indexers, indexer_name, indexer_id, "Sonarr")
response = None
action_message = "Grab sent to Sonarr."
if resolved_indexer_id is not None:
indexer_id = resolved_indexer_id
logger.info("Sonarr grab: attempting DownloadRelease command.")
try:
response = await client.download_release(str(guid), int(indexer_id))
except httpx.HTTPStatusError as exc:
if exc.response is not None and exc.response.status_code in {404, 405}:
logger.info("Sonarr grab: DownloadRelease unsupported; will try release push.")
response = None
else:
raise
if response is None and push_payload:
logger.info("Sonarr grab: attempting release push.")
try:
response = await client.push_release(push_payload)
except httpx.HTTPStatusError as exc:
if exc.response is not None and exc.response.status_code == 404:
logger.info("Sonarr grab: release push not supported.")
else:
raise
if response is None:
category = _resolve_qbittorrent_category(
runtime.sonarr_qbittorrent_category, "sonarr"
)
qbittorrent_added = await _fallback_qbittorrent_download(download_url, category)
if qbittorrent_added:
action_message = f"Grab sent to qBittorrent (category {category})."
response = {"qbittorrent": "queued"}
else:
if resolved_indexer_id is None:
detail = "Indexer not found in Sonarr and no release push available."
elif not push_payload:
detail = "Sonarr did not accept the grab request (no release URL available)."
else:
detail = "Sonarr did not accept the grab request (DownloadRelease unsupported)."
raise HTTPException(status_code=400, detail=detail)
except httpx.HTTPStatusError as exc:
raise HTTPException(status_code=502, detail=f"Sonarr grab failed: {exc}") from exc
await asyncio.to_thread(
save_action, request_id, "grab", "Grab release", "ok", "Grab sent to Sonarr."
save_action, request_id, "grab", "Grab release", "ok", action_message
)
return {"status": "ok", "response": response}
if snapshot.request_type.value == "movie":
@@ -1644,25 +1816,50 @@ async def action_grab(
if not client.configured():
raise HTTPException(status_code=400, detail="Radarr not configured")
try:
if indexer_name:
indexers = await client.get_indexers()
if isinstance(indexers, list):
matched = next(
(
item
for item in indexers
if isinstance(item, dict)
and str(item.get("name", "")).lower() == str(indexer_name).lower()
),
None,
)
if matched and matched.get("id") is not None:
indexer_id = int(matched["id"])
response = await client.grab_release(str(guid), int(indexer_id))
indexers = await client.get_indexers()
resolved_indexer_id = _resolve_arr_indexer_id(indexers, indexer_name, indexer_id, "Radarr")
response = None
action_message = "Grab sent to Radarr."
if resolved_indexer_id is not None:
indexer_id = resolved_indexer_id
logger.info("Radarr grab: attempting DownloadRelease command.")
try:
response = await client.download_release(str(guid), int(indexer_id))
except httpx.HTTPStatusError as exc:
if exc.response is not None and exc.response.status_code in {404, 405}:
logger.info("Radarr grab: DownloadRelease unsupported; will try release push.")
response = None
else:
raise
if response is None and push_payload:
logger.info("Radarr grab: attempting release push.")
try:
response = await client.push_release(push_payload)
except httpx.HTTPStatusError as exc:
if exc.response is not None and exc.response.status_code == 404:
logger.info("Radarr grab: release push not supported.")
else:
raise
if response is None:
category = _resolve_qbittorrent_category(
runtime.radarr_qbittorrent_category, "radarr"
)
qbittorrent_added = await _fallback_qbittorrent_download(download_url, category)
if qbittorrent_added:
action_message = f"Grab sent to qBittorrent (category {category})."
response = {"qbittorrent": "queued"}
else:
if resolved_indexer_id is None:
detail = "Indexer not found in Radarr and no release push available."
elif not push_payload:
detail = "Radarr did not accept the grab request (no release URL available)."
else:
detail = "Radarr did not accept the grab request (DownloadRelease unsupported)."
raise HTTPException(status_code=400, detail=detail)
except httpx.HTTPStatusError as exc:
raise HTTPException(status_code=502, detail=f"Radarr grab failed: {exc}") from exc
await asyncio.to_thread(
save_action, request_id, "grab", "Grab release", "ok", "Grab sent to Radarr."
save_action, request_id, "grab", "Grab release", "ok", action_message
)
return {"status": "ok", "response": response}