Improve request handling and qBittorrent categories

This commit is contained in:
2026-01-24 21:48:55 +13:00
parent 030480410b
commit cf4277d10c
13 changed files with 398 additions and 70 deletions

View File

@@ -26,7 +26,7 @@ def triage_snapshot(snapshot: Snapshot) -> TriageResult:
recommendations.append( recommendations.append(
TriageRecommendation( TriageRecommendation(
action_id="readd_to_arr", action_id="readd_to_arr",
title="Add it to the library queue", title="Push to Sonarr/Radarr",
reason="Sonarr/Radarr has not created the entry for this request.", reason="Sonarr/Radarr has not created the entry for this request.",
risk="medium", risk="medium",
) )

View File

@@ -58,3 +58,13 @@ class JellyfinClient(ApiClient):
response = await client.get(url, headers=headers) response = await client.get(url, headers=headers)
response.raise_for_status() response.raise_for_status()
return response.json() return response.json()
async def refresh_library(self, recursive: bool = True) -> None:
if not self.base_url or not self.api_key:
return None
url = f"{self.base_url}/Library/Refresh"
headers = {"X-Emby-Token": self.api_key}
params = {"Recursive": "true" if recursive else "false"}
async with httpx.AsyncClient(timeout=10.0) as client:
response = await client.post(url, headers=headers, params=params)
response.raise_for_status()

View File

@@ -1,5 +1,6 @@
from typing import Any, Dict, Optional from typing import Any, Dict, Optional
import httpx import httpx
import logging
from .base import ApiClient from .base import ApiClient
@@ -8,6 +9,7 @@ class QBittorrentClient(ApiClient):
super().__init__(base_url, None) super().__init__(base_url, None)
self.username = username self.username = username
self.password = password self.password = password
self.logger = logging.getLogger(__name__)
def configured(self) -> bool: def configured(self) -> bool:
return bool(self.base_url and self.username and self.password) return bool(self.base_url and self.username and self.password)
@@ -72,6 +74,14 @@ class QBittorrentClient(ApiClient):
raise raise
async def add_torrent_url(self, url: str, category: Optional[str] = None) -> None: async def add_torrent_url(self, url: str, category: Optional[str] = None) -> None:
url_host = None
if isinstance(url, str) and "://" in url:
url_host = url.split("://", 1)[-1].split("/", 1)[0]
self.logger.warning(
"qBittorrent add_torrent_url invoked: category=%s host=%s",
category,
url_host or "unknown",
)
data: Dict[str, Any] = {"urls": url} data: Dict[str, Any] = {"urls": url}
if category: if category:
data["category"] = category data["category"] = category

View File

@@ -46,3 +46,12 @@ class RadarrClient(ApiClient):
async def grab_release(self, guid: str, indexer_id: int) -> Optional[Dict[str, Any]]: async def grab_release(self, guid: str, indexer_id: int) -> Optional[Dict[str, Any]]:
return await self.post("/api/v3/release", payload={"guid": guid, "indexerId": indexer_id}) return await self.post("/api/v3/release", payload={"guid": guid, "indexerId": indexer_id})
async def push_release(self, payload: Dict[str, Any]) -> Optional[Dict[str, Any]]:
return await self.post("/api/v3/release/push", payload=payload)
async def download_release(self, guid: str, indexer_id: int) -> Optional[Dict[str, Any]]:
return await self.post(
"/api/v3/command",
payload={"name": "DownloadRelease", "guid": guid, "indexerId": indexer_id},
)

View File

@@ -53,3 +53,12 @@ class SonarrClient(ApiClient):
async def grab_release(self, guid: str, indexer_id: int) -> Optional[Dict[str, Any]]: async def grab_release(self, guid: str, indexer_id: int) -> Optional[Dict[str, Any]]:
return await self.post("/api/v3/release", payload={"guid": guid, "indexerId": indexer_id}) return await self.post("/api/v3/release", payload={"guid": guid, "indexerId": indexer_id})
async def push_release(self, payload: Dict[str, Any]) -> Optional[Dict[str, Any]]:
return await self.post("/api/v3/release/push", payload=payload)
async def download_release(self, guid: str, indexer_id: int) -> Optional[Dict[str, Any]]:
return await self.post(
"/api/v3/command",
payload={"name": "DownloadRelease", "guid": guid, "indexerId": indexer_id},
)

View File

@@ -70,6 +70,10 @@ class Settings(BaseSettings):
sonarr_root_folder: Optional[str] = Field( sonarr_root_folder: Optional[str] = Field(
default=None, validation_alias=AliasChoices("SONARR_ROOT_FOLDER") default=None, validation_alias=AliasChoices("SONARR_ROOT_FOLDER")
) )
sonarr_qbittorrent_category: Optional[str] = Field(
default="sonarr",
validation_alias=AliasChoices("SONARR_QBITTORRENT_CATEGORY"),
)
radarr_base_url: Optional[str] = Field( radarr_base_url: Optional[str] = Field(
default=None, validation_alias=AliasChoices("RADARR_URL", "RADARR_BASE_URL") default=None, validation_alias=AliasChoices("RADARR_URL", "RADARR_BASE_URL")
@@ -83,6 +87,10 @@ class Settings(BaseSettings):
radarr_root_folder: Optional[str] = Field( radarr_root_folder: Optional[str] = Field(
default=None, validation_alias=AliasChoices("RADARR_ROOT_FOLDER") default=None, validation_alias=AliasChoices("RADARR_ROOT_FOLDER")
) )
radarr_qbittorrent_category: Optional[str] = Field(
default="radarr",
validation_alias=AliasChoices("RADARR_QBITTORRENT_CATEGORY"),
)
prowlarr_base_url: Optional[str] = Field( prowlarr_base_url: Optional[str] = Field(
default=None, validation_alias=AliasChoices("PROWLARR_URL", "PROWLARR_BASE_URL") default=None, validation_alias=AliasChoices("PROWLARR_URL", "PROWLARR_BASE_URL")

View File

@@ -609,6 +609,22 @@ def get_request_cache_count() -> int:
return int(row[0] or 0) return int(row[0] or 0)
def update_request_cache_title(
request_id: int, title: str, year: Optional[int] = None
) -> None:
if not title:
return
with _connect() as conn:
conn.execute(
"""
UPDATE requests_cache
SET title = ?, year = COALESCE(?, year)
WHERE request_id = ?
""",
(title, year, request_id),
)
def prune_duplicate_requests_cache() -> int: def prune_duplicate_requests_cache() -> int:
with _connect() as conn: with _connect() as conn:
cursor = conn.execute( cursor = conn.execute(

View File

@@ -20,6 +20,7 @@ from ..db import (
clear_requests_cache, clear_requests_cache,
clear_history, clear_history,
cleanup_history, cleanup_history,
update_request_cache_title,
) )
from ..runtime import get_runtime_settings from ..runtime import get_runtime_settings
from ..clients.sonarr import SonarrClient from ..clients.sonarr import SonarrClient
@@ -56,10 +57,12 @@ SETTING_KEYS: List[str] = [
"sonarr_api_key", "sonarr_api_key",
"sonarr_quality_profile_id", "sonarr_quality_profile_id",
"sonarr_root_folder", "sonarr_root_folder",
"sonarr_qbittorrent_category",
"radarr_base_url", "radarr_base_url",
"radarr_api_key", "radarr_api_key",
"radarr_quality_profile_id", "radarr_quality_profile_id",
"radarr_root_folder", "radarr_root_folder",
"radarr_qbittorrent_category",
"prowlarr_base_url", "prowlarr_base_url",
"prowlarr_api_key", "prowlarr_api_key",
"qbittorrent_base_url", "qbittorrent_base_url",
@@ -274,7 +277,28 @@ async def read_logs(lines: int = 200) -> Dict[str, Any]:
@router.get("/requests/cache") @router.get("/requests/cache")
async def requests_cache(limit: int = 50) -> Dict[str, Any]: async def requests_cache(limit: int = 50) -> Dict[str, Any]:
return {"rows": get_request_cache_overview(limit)} rows = get_request_cache_overview(limit)
missing_titles = [row for row in rows if not row.get("title")]
if missing_titles:
runtime = get_runtime_settings()
client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key)
if client.configured():
for row in missing_titles:
request_id = row.get("request_id")
if not isinstance(request_id, int):
continue
details = await requests_router._get_request_details(client, request_id)
if not isinstance(details, dict):
continue
payload = requests_router._parse_request_payload(details)
title = payload.get("title")
if not title:
continue
row["title"] = title
if payload.get("year"):
row["year"] = payload.get("year")
update_request_cache_title(request_id, title, payload.get("year"))
return {"rows": rows}
@router.post("/branding/logo") @router.post("/branding/logo")

View File

@@ -68,6 +68,7 @@ _artwork_prefetch_state: Dict[str, Any] = {
"finished_at": None, "finished_at": None,
} }
_artwork_prefetch_task: Optional[asyncio.Task] = None _artwork_prefetch_task: Optional[asyncio.Task] = None
_media_endpoint_supported: Optional[bool] = None
STATUS_LABELS = { STATUS_LABELS = {
1: "Waiting for approval", 1: "Waiting for approval",
@@ -269,10 +270,17 @@ async def _hydrate_title_from_tmdb(
async def _hydrate_media_details(client: JellyseerrClient, media_id: Optional[int]) -> Optional[Dict[str, Any]]: async def _hydrate_media_details(client: JellyseerrClient, media_id: Optional[int]) -> Optional[Dict[str, Any]]:
if not media_id: if not media_id:
return None return None
global _media_endpoint_supported
if _media_endpoint_supported is False:
return None
try: try:
details = await client.get_media(int(media_id)) details = await client.get_media(int(media_id))
except httpx.HTTPStatusError: except httpx.HTTPStatusError as exc:
if exc.response is not None and exc.response.status_code == 405:
_media_endpoint_supported = False
logger.info("Jellyseerr media endpoint rejected GET requests; skipping media lookups.")
return None return None
_media_endpoint_supported = True
return details if isinstance(details, dict) else None return details if isinstance(details, dict) else None
@@ -393,14 +401,23 @@ async def _sync_all_requests(client: JellyseerrClient) -> int:
continue continue
payload = _parse_request_payload(item) payload = _parse_request_payload(item)
request_id = payload.get("request_id") request_id = payload.get("request_id")
cached_title = None
if isinstance(request_id, int): if isinstance(request_id, int):
if not payload.get("title"):
cached = get_request_cache_by_id(request_id)
if cached and cached.get("title"):
cached_title = cached.get("title")
if not payload.get("title") or not payload.get("media_id"): if not payload.get("title") or not payload.get("media_id"):
logger.debug("Jellyseerr sync hydrate request_id=%s", request_id) logger.debug("Jellyseerr sync hydrate request_id=%s", request_id)
details = await _get_request_details(client, request_id) details = await _get_request_details(client, request_id)
if isinstance(details, dict): if isinstance(details, dict):
payload = _parse_request_payload(details) payload = _parse_request_payload(details)
item = details item = details
if not payload.get("title") and payload.get("media_id"): if (
not payload.get("title")
and payload.get("media_id")
and (not payload.get("tmdb_id") or not payload.get("media_type"))
):
media_details = await _hydrate_media_details(client, payload.get("media_id")) media_details = await _hydrate_media_details(client, payload.get("media_id"))
if isinstance(media_details, dict): if isinstance(media_details, dict):
media_title = media_details.get("title") or media_details.get("name") media_title = media_details.get("title") or media_details.get("name")
@@ -428,7 +445,7 @@ async def _sync_all_requests(client: JellyseerrClient) -> int:
if isinstance(details, dict): if isinstance(details, dict):
item = details item = details
payload = _parse_request_payload(details) payload = _parse_request_payload(details)
if not payload.get("title") and payload.get("tmdb_id"): if not payload.get("title") and payload.get("tmdb_id") and payload.get("media_type"):
hydrated_title, hydrated_year = await _hydrate_title_from_tmdb( hydrated_title, hydrated_year = await _hydrate_title_from_tmdb(
client, payload.get("media_type"), payload.get("tmdb_id") client, payload.get("media_type"), payload.get("tmdb_id")
) )
@@ -436,6 +453,8 @@ async def _sync_all_requests(client: JellyseerrClient) -> int:
payload["title"] = hydrated_title payload["title"] = hydrated_title
if hydrated_year: if hydrated_year:
payload["year"] = hydrated_year payload["year"] = hydrated_year
if not payload.get("title") and cached_title:
payload["title"] = cached_title
if not isinstance(payload.get("request_id"), int): if not isinstance(payload.get("request_id"), int):
continue continue
payload_json = json.dumps(item, ensure_ascii=True) payload_json = json.dumps(item, ensure_ascii=True)
@@ -516,6 +535,7 @@ async def _sync_delta_requests(client: JellyseerrClient) -> int:
if isinstance(request_id, int): if isinstance(request_id, int):
cached = get_request_cache_by_id(request_id) cached = get_request_cache_by_id(request_id)
incoming_updated = payload.get("updated_at") incoming_updated = payload.get("updated_at")
cached_title = cached.get("title") if cached else None
if cached and incoming_updated and cached.get("updated_at") == incoming_updated and cached.get("title"): if cached and incoming_updated and cached.get("updated_at") == incoming_updated and cached.get("title"):
continue continue
if not payload.get("title") or not payload.get("media_id"): if not payload.get("title") or not payload.get("media_id"):
@@ -523,7 +543,11 @@ async def _sync_delta_requests(client: JellyseerrClient) -> int:
if isinstance(details, dict): if isinstance(details, dict):
payload = _parse_request_payload(details) payload = _parse_request_payload(details)
item = details item = details
if not payload.get("title") and payload.get("media_id"): if (
not payload.get("title")
and payload.get("media_id")
and (not payload.get("tmdb_id") or not payload.get("media_type"))
):
media_details = await _hydrate_media_details(client, payload.get("media_id")) media_details = await _hydrate_media_details(client, payload.get("media_id"))
if isinstance(media_details, dict): if isinstance(media_details, dict):
media_title = media_details.get("title") or media_details.get("name") media_title = media_details.get("title") or media_details.get("name")
@@ -551,7 +575,7 @@ async def _sync_delta_requests(client: JellyseerrClient) -> int:
if isinstance(details, dict): if isinstance(details, dict):
payload = _parse_request_payload(details) payload = _parse_request_payload(details)
item = details item = details
if not payload.get("title") and payload.get("tmdb_id"): if not payload.get("title") and payload.get("tmdb_id") and payload.get("media_type"):
hydrated_title, hydrated_year = await _hydrate_title_from_tmdb( hydrated_title, hydrated_year = await _hydrate_title_from_tmdb(
client, payload.get("media_type"), payload.get("tmdb_id") client, payload.get("media_type"), payload.get("tmdb_id")
) )
@@ -559,6 +583,8 @@ async def _sync_delta_requests(client: JellyseerrClient) -> int:
payload["title"] = hydrated_title payload["title"] = hydrated_title
if hydrated_year: if hydrated_year:
payload["year"] = hydrated_year payload["year"] = hydrated_year
if not payload.get("title") and cached_title:
payload["title"] = cached_title
if not isinstance(payload.get("request_id"), int): if not isinstance(payload.get("request_id"), int):
continue continue
payload_json = json.dumps(item, ensure_ascii=True) payload_json = json.dumps(item, ensure_ascii=True)
@@ -999,6 +1025,97 @@ def _normalize_categories(categories: Any) -> List[str]:
return names return names
def _normalize_indexer_name(value: Optional[str]) -> str:
if not isinstance(value, str):
return ""
return "".join(ch for ch in value.lower().strip() if ch.isalnum())
def _resolve_arr_indexer_id(
indexers: Any, indexer_name: Optional[str], indexer_id: Optional[int], service_label: str
) -> Optional[int]:
if not isinstance(indexers, list):
return None
if not indexer_name:
if indexer_id is None:
return None
by_id = next(
(item for item in indexers if isinstance(item, dict) and item.get("id") == indexer_id),
None,
)
if by_id and by_id.get("id") is not None:
logger.debug("%s indexer id match: %s", service_label, by_id.get("id"))
return int(by_id["id"])
return None
target = indexer_name.lower().strip()
target_compact = _normalize_indexer_name(indexer_name)
exact = next(
(
item
for item in indexers
if isinstance(item, dict)
and str(item.get("name", "")).lower().strip() == target
),
None,
)
if exact and exact.get("id") is not None:
logger.debug("%s indexer match: '%s' -> %s", service_label, indexer_name, exact.get("id"))
return int(exact["id"])
compact = next(
(
item
for item in indexers
if isinstance(item, dict)
and _normalize_indexer_name(str(item.get("name", ""))) == target_compact
),
None,
)
if compact and compact.get("id") is not None:
logger.debug("%s indexer compact match: '%s' -> %s", service_label, indexer_name, compact.get("id"))
return int(compact["id"])
contains = next(
(
item
for item in indexers
if isinstance(item, dict)
and target in str(item.get("name", "")).lower()
),
None,
)
if contains and contains.get("id") is not None:
logger.debug("%s indexer contains match: '%s' -> %s", service_label, indexer_name, contains.get("id"))
return int(contains["id"])
logger.warning(
"%s indexer not found for name '%s'. Check indexer names in the Arr app.",
service_label,
indexer_name,
)
return None
async def _fallback_qbittorrent_download(download_url: Optional[str], category: str) -> bool:
if not download_url:
return False
runtime = get_runtime_settings()
client = QBittorrentClient(
runtime.qbittorrent_base_url,
runtime.qbittorrent_username,
runtime.qbittorrent_password,
)
if not client.configured():
return False
await client.add_torrent_url(download_url, category=category)
return True
def _resolve_qbittorrent_category(value: Optional[str], default: str) -> str:
if isinstance(value, str):
cleaned = value.strip()
if cleaned:
return cleaned
return default
def _filter_prowlarr_results(results: Any, request_type: RequestType) -> List[Dict[str, Any]]: def _filter_prowlarr_results(results: Any, request_type: RequestType) -> List[Dict[str, Any]]:
if not isinstance(results, list): if not isinstance(results, list):
return [] return []
@@ -1607,36 +1724,91 @@ async def action_grab(
snapshot = await build_snapshot(request_id) snapshot = await build_snapshot(request_id)
guid = payload.get("guid") guid = payload.get("guid")
indexer_id = payload.get("indexerId") indexer_id = payload.get("indexerId")
download_url = payload.get("downloadUrl")
indexer_name = payload.get("indexerName") or payload.get("indexer") indexer_name = payload.get("indexerName") or payload.get("indexer")
download_url = payload.get("downloadUrl")
release_title = payload.get("title")
release_size = payload.get("size")
release_protocol = payload.get("protocol") or "torrent"
release_publish = payload.get("publishDate")
release_seeders = payload.get("seeders")
release_leechers = payload.get("leechers")
if not guid or not indexer_id: if not guid or not indexer_id:
raise HTTPException(status_code=400, detail="Missing guid or indexerId") raise HTTPException(status_code=400, detail="Missing guid or indexerId")
logger.info(
"Grab requested: request_id=%s guid=%s indexer_id=%s indexer_name=%s has_download_url=%s has_title=%s",
request_id,
guid,
indexer_id,
indexer_name,
bool(download_url),
bool(release_title),
)
push_payload = None
if download_url and release_title:
push_payload = {
"title": release_title,
"downloadUrl": download_url,
"protocol": release_protocol,
"publishDate": release_publish,
"size": release_size,
"indexer": indexer_name,
"guid": guid,
"seeders": release_seeders,
"leechers": release_leechers,
}
runtime = get_runtime_settings() runtime = get_runtime_settings()
if snapshot.request_type.value == "tv": if snapshot.request_type.value == "tv":
client = SonarrClient(runtime.sonarr_base_url, runtime.sonarr_api_key) client = SonarrClient(runtime.sonarr_base_url, runtime.sonarr_api_key)
if not client.configured(): if not client.configured():
raise HTTPException(status_code=400, detail="Sonarr not configured") raise HTTPException(status_code=400, detail="Sonarr not configured")
try: try:
if indexer_name: indexers = await client.get_indexers()
indexers = await client.get_indexers() resolved_indexer_id = _resolve_arr_indexer_id(indexers, indexer_name, indexer_id, "Sonarr")
if isinstance(indexers, list): response = None
matched = next( action_message = "Grab sent to Sonarr."
( if resolved_indexer_id is not None:
item indexer_id = resolved_indexer_id
for item in indexers logger.info("Sonarr grab: attempting DownloadRelease command.")
if isinstance(item, dict) try:
and str(item.get("name", "")).lower() == str(indexer_name).lower() response = await client.download_release(str(guid), int(indexer_id))
), except httpx.HTTPStatusError as exc:
None, if exc.response is not None and exc.response.status_code in {404, 405}:
) logger.info("Sonarr grab: DownloadRelease unsupported; will try release push.")
if matched and matched.get("id") is not None: response = None
indexer_id = int(matched["id"]) else:
response = await client.grab_release(str(guid), int(indexer_id)) raise
if response is None and push_payload:
logger.info("Sonarr grab: attempting release push.")
try:
response = await client.push_release(push_payload)
except httpx.HTTPStatusError as exc:
if exc.response is not None and exc.response.status_code == 404:
logger.info("Sonarr grab: release push not supported.")
else:
raise
if response is None:
category = _resolve_qbittorrent_category(
runtime.sonarr_qbittorrent_category, "sonarr"
)
qbittorrent_added = await _fallback_qbittorrent_download(download_url, category)
if qbittorrent_added:
action_message = f"Grab sent to qBittorrent (category {category})."
response = {"qbittorrent": "queued"}
else:
if resolved_indexer_id is None:
detail = "Indexer not found in Sonarr and no release push available."
elif not push_payload:
detail = "Sonarr did not accept the grab request (no release URL available)."
else:
detail = "Sonarr did not accept the grab request (DownloadRelease unsupported)."
raise HTTPException(status_code=400, detail=detail)
except httpx.HTTPStatusError as exc: except httpx.HTTPStatusError as exc:
raise HTTPException(status_code=502, detail=f"Sonarr grab failed: {exc}") from exc raise HTTPException(status_code=502, detail=f"Sonarr grab failed: {exc}") from exc
await asyncio.to_thread( await asyncio.to_thread(
save_action, request_id, "grab", "Grab release", "ok", "Grab sent to Sonarr." save_action, request_id, "grab", "Grab release", "ok", action_message
) )
return {"status": "ok", "response": response} return {"status": "ok", "response": response}
if snapshot.request_type.value == "movie": if snapshot.request_type.value == "movie":
@@ -1644,25 +1816,50 @@ async def action_grab(
if not client.configured(): if not client.configured():
raise HTTPException(status_code=400, detail="Radarr not configured") raise HTTPException(status_code=400, detail="Radarr not configured")
try: try:
if indexer_name: indexers = await client.get_indexers()
indexers = await client.get_indexers() resolved_indexer_id = _resolve_arr_indexer_id(indexers, indexer_name, indexer_id, "Radarr")
if isinstance(indexers, list): response = None
matched = next( action_message = "Grab sent to Radarr."
( if resolved_indexer_id is not None:
item indexer_id = resolved_indexer_id
for item in indexers logger.info("Radarr grab: attempting DownloadRelease command.")
if isinstance(item, dict) try:
and str(item.get("name", "")).lower() == str(indexer_name).lower() response = await client.download_release(str(guid), int(indexer_id))
), except httpx.HTTPStatusError as exc:
None, if exc.response is not None and exc.response.status_code in {404, 405}:
) logger.info("Radarr grab: DownloadRelease unsupported; will try release push.")
if matched and matched.get("id") is not None: response = None
indexer_id = int(matched["id"]) else:
response = await client.grab_release(str(guid), int(indexer_id)) raise
if response is None and push_payload:
logger.info("Radarr grab: attempting release push.")
try:
response = await client.push_release(push_payload)
except httpx.HTTPStatusError as exc:
if exc.response is not None and exc.response.status_code == 404:
logger.info("Radarr grab: release push not supported.")
else:
raise
if response is None:
category = _resolve_qbittorrent_category(
runtime.radarr_qbittorrent_category, "radarr"
)
qbittorrent_added = await _fallback_qbittorrent_download(download_url, category)
if qbittorrent_added:
action_message = f"Grab sent to qBittorrent (category {category})."
response = {"qbittorrent": "queued"}
else:
if resolved_indexer_id is None:
detail = "Indexer not found in Radarr and no release push available."
elif not push_payload:
detail = "Radarr did not accept the grab request (no release URL available)."
else:
detail = "Radarr did not accept the grab request (DownloadRelease unsupported)."
raise HTTPException(status_code=400, detail=detail)
except httpx.HTTPStatusError as exc: except httpx.HTTPStatusError as exc:
raise HTTPException(status_code=502, detail=f"Radarr grab failed: {exc}") from exc raise HTTPException(status_code=502, detail=f"Radarr grab failed: {exc}") from exc
await asyncio.to_thread( await asyncio.to_thread(
save_action, request_id, "grab", "Grab release", "ok", "Grab sent to Radarr." save_action, request_id, "grab", "Grab release", "ok", action_message
) )
return {"status": "ok", "response": response} return {"status": "ok", "response": response}

View File

@@ -11,9 +11,14 @@ from ..clients.radarr import RadarrClient
from ..clients.prowlarr import ProwlarrClient from ..clients.prowlarr import ProwlarrClient
from ..clients.qbittorrent import QBittorrentClient from ..clients.qbittorrent import QBittorrentClient
from ..runtime import get_runtime_settings from ..runtime import get_runtime_settings
from ..db import save_snapshot, get_request_cache_payload from ..db import save_snapshot, get_request_cache_payload, get_recent_snapshots, get_setting, set_setting
from ..models import ActionOption, NormalizedState, RequestType, Snapshot, TimelineHop from ..models import ActionOption, NormalizedState, RequestType, Snapshot, TimelineHop
logger = logging.getLogger(__name__)
JELLYFIN_SCAN_COOLDOWN_SECONDS = 300
_jellyfin_scan_key = "jellyfin_scan_last_at"
STATUS_LABELS = { STATUS_LABELS = {
1: "Waiting for approval", 1: "Waiting for approval",
@@ -41,6 +46,35 @@ def _pick_first(value: Any) -> Optional[Dict[str, Any]]:
return None return None
async def _maybe_refresh_jellyfin(snapshot: Snapshot) -> None:
if snapshot.state not in {NormalizedState.available, NormalizedState.completed}:
return
runtime = get_runtime_settings()
client = JellyfinClient(runtime.jellyfin_base_url, runtime.jellyfin_api_key)
if not client.configured():
return
last_scan = get_setting(_jellyfin_scan_key)
if last_scan:
try:
parsed = datetime.fromisoformat(last_scan.replace("Z", "+00:00"))
if (datetime.now(timezone.utc) - parsed).total_seconds() < JELLYFIN_SCAN_COOLDOWN_SECONDS:
return
except ValueError:
pass
previous = await asyncio.to_thread(get_recent_snapshots, snapshot.request_id, 1)
if previous:
prev_state = previous[0].get("state")
if prev_state in {NormalizedState.available.value, NormalizedState.completed.value}:
return
try:
await client.refresh_library()
except Exception as exc:
logger.warning("Jellyfin library refresh failed: %s", exc)
return
set_setting(_jellyfin_scan_key, datetime.now(timezone.utc).isoformat())
logger.info("Jellyfin library refresh triggered: request_id=%s", snapshot.request_id)
def _queue_records(queue: Any) -> List[Dict[str, Any]]: def _queue_records(queue: Any) -> List[Dict[str, Any]]:
if isinstance(queue, dict): if isinstance(queue, dict):
records = queue.get("records") records = queue.get("records")
@@ -381,10 +415,6 @@ async def build_snapshot(request_id: str) -> Snapshot:
if arr_state is None: if arr_state is None:
arr_state = "unknown" arr_state = "unknown"
if arr_state == "missing" and media_status_code in {4}:
arr_state = "available"
elif arr_state == "missing" and media_status_code in {6}:
arr_state = "added"
timeline.append(TimelineHop(service="Sonarr/Radarr", status=arr_state, details=arr_details)) timeline.append(TimelineHop(service="Sonarr/Radarr", status=arr_state, details=arr_details))
@@ -524,7 +554,7 @@ async def build_snapshot(request_id: str) -> Snapshot:
snapshot.state_reason = "Waiting for download to start in qBittorrent." snapshot.state_reason = "Waiting for download to start in qBittorrent."
elif arr_state == "missing" and derived_approved: elif arr_state == "missing" and derived_approved:
snapshot.state = NormalizedState.needs_add snapshot.state = NormalizedState.needs_add
snapshot.state_reason = "Approved, but not added to the library yet." snapshot.state_reason = "Approved, but not yet added to Sonarr/Radarr."
elif arr_state == "searching": elif arr_state == "searching":
snapshot.state = NormalizedState.searching snapshot.state = NormalizedState.searching
snapshot.state_reason = "Searching for a matching release." snapshot.state_reason = "Searching for a matching release."
@@ -548,7 +578,7 @@ async def build_snapshot(request_id: str) -> Snapshot:
actions.append( actions.append(
ActionOption( ActionOption(
id="readd_to_arr", id="readd_to_arr",
label="Add to the library queue (Sonarr/Radarr)", label="Push to Sonarr/Radarr",
risk="medium", risk="medium",
) )
) )
@@ -604,5 +634,6 @@ async def build_snapshot(request_id: str) -> Snapshot:
}, },
} }
await _maybe_refresh_jellyfin(snapshot)
await asyncio.to_thread(save_snapshot, snapshot) await asyncio.to_thread(save_snapshot, snapshot)
return snapshot return snapshot

View File

@@ -1,6 +1,6 @@
'use client' 'use client'
import { useEffect, useMemo, useState } from 'react' import { useCallback, useEffect, useMemo, useState } from 'react'
import { useRouter } from 'next/navigation' import { useRouter } from 'next/navigation'
import { authFetch, clearToken, getApiBase, getToken } from '../lib/auth' import { authFetch, clearToken, getApiBase, getToken } from '../lib/auth'
import AdminShell from '../ui/AdminShell' import AdminShell from '../ui/AdminShell'
@@ -107,7 +107,7 @@ export default function SettingsPage({ section }: SettingsPageProps) {
const [maintenanceStatus, setMaintenanceStatus] = useState<string | null>(null) const [maintenanceStatus, setMaintenanceStatus] = useState<string | null>(null)
const [maintenanceBusy, setMaintenanceBusy] = useState(false) const [maintenanceBusy, setMaintenanceBusy] = useState(false)
const loadSettings = async () => { const loadSettings = useCallback(async () => {
const baseUrl = getApiBase() const baseUrl = getApiBase()
const response = await authFetch(`${baseUrl}/admin/settings`) const response = await authFetch(`${baseUrl}/admin/settings`)
if (!response.ok) { if (!response.ok) {
@@ -139,9 +139,9 @@ export default function SettingsPage({ section }: SettingsPageProps) {
} }
setFormValues(initialValues) setFormValues(initialValues)
setStatus(null) setStatus(null)
} }, [router])
const loadArtworkPrefetchStatus = async () => { const loadArtworkPrefetchStatus = useCallback(async () => {
try { try {
const baseUrl = getApiBase() const baseUrl = getApiBase()
const response = await authFetch(`${baseUrl}/admin/requests/artwork/status`) const response = await authFetch(`${baseUrl}/admin/requests/artwork/status`)
@@ -153,10 +153,9 @@ export default function SettingsPage({ section }: SettingsPageProps) {
} catch (err) { } catch (err) {
console.error(err) console.error(err)
} }
} }, [])
const loadOptions = useCallback(async (service: 'sonarr' | 'radarr') => {
const loadOptions = async (service: 'sonarr' | 'radarr') => {
try { try {
const baseUrl = getApiBase() const baseUrl = getApiBase()
const response = await authFetch(`${baseUrl}/admin/${service}/options`) const response = await authFetch(`${baseUrl}/admin/${service}/options`)
@@ -185,7 +184,7 @@ export default function SettingsPage({ section }: SettingsPageProps) {
setRadarrError('Could not load Radarr options.') setRadarrError('Could not load Radarr options.')
} }
} }
} }, [])
useEffect(() => { useEffect(() => {
const load = async () => { const load = async () => {
@@ -213,7 +212,7 @@ export default function SettingsPage({ section }: SettingsPageProps) {
if (section === 'radarr') { if (section === 'radarr') {
void loadOptions('radarr') void loadOptions('radarr')
} }
}, [router, section]) }, [loadArtworkPrefetchStatus, loadOptions, loadSettings, router, section])
const groupedSettings = useMemo(() => { const groupedSettings = useMemo(() => {
const groups: Record<string, AdminSetting[]> = {} const groups: Record<string, AdminSetting[]> = {}
@@ -271,10 +270,12 @@ export default function SettingsPage({ section }: SettingsPageProps) {
sonarr_api_key: 'API key for Sonarr.', sonarr_api_key: 'API key for Sonarr.',
sonarr_quality_profile_id: 'Quality profile used when adding TV shows.', sonarr_quality_profile_id: 'Quality profile used when adding TV shows.',
sonarr_root_folder: 'Root folder where Sonarr stores TV shows.', sonarr_root_folder: 'Root folder where Sonarr stores TV shows.',
sonarr_qbittorrent_category: 'qBittorrent category for manual Sonarr downloads.',
radarr_base_url: 'Radarr server URL for movies.', radarr_base_url: 'Radarr server URL for movies.',
radarr_api_key: 'API key for Radarr.', radarr_api_key: 'API key for Radarr.',
radarr_quality_profile_id: 'Quality profile used when adding movies.', radarr_quality_profile_id: 'Quality profile used when adding movies.',
radarr_root_folder: 'Root folder where Radarr stores movies.', radarr_root_folder: 'Root folder where Radarr stores movies.',
radarr_qbittorrent_category: 'qBittorrent category for manual Radarr downloads.',
prowlarr_base_url: 'Prowlarr server URL for indexer searches.', prowlarr_base_url: 'Prowlarr server URL for indexer searches.',
prowlarr_api_key: 'API key for Prowlarr.', prowlarr_api_key: 'API key for Prowlarr.',
qbittorrent_base_url: 'qBittorrent server URL for download status.', qbittorrent_base_url: 'qBittorrent server URL for download status.',
@@ -472,7 +473,7 @@ export default function SettingsPage({ section }: SettingsPageProps) {
active = false active = false
clearInterval(timer) clearInterval(timer)
} }
}, [artworkPrefetch?.status]) }, [artworkPrefetch])
useEffect(() => { useEffect(() => {
if (!artworkPrefetch || artworkPrefetch.status === 'running') { if (!artworkPrefetch || artworkPrefetch.status === 'running') {
@@ -482,7 +483,7 @@ export default function SettingsPage({ section }: SettingsPageProps) {
setArtworkPrefetch(null) setArtworkPrefetch(null)
}, 5000) }, 5000)
return () => clearTimeout(timer) return () => clearTimeout(timer)
}, [artworkPrefetch?.status]) }, [artworkPrefetch])
useEffect(() => { useEffect(() => {
if (!requestsSync || requestsSync.status !== 'running') { if (!requestsSync || requestsSync.status !== 'running') {
@@ -510,7 +511,7 @@ export default function SettingsPage({ section }: SettingsPageProps) {
active = false active = false
clearInterval(timer) clearInterval(timer)
} }
}, [requestsSync?.status]) }, [requestsSync])
useEffect(() => { useEffect(() => {
if (!requestsSync || requestsSync.status === 'running') { if (!requestsSync || requestsSync.status === 'running') {
@@ -520,9 +521,9 @@ export default function SettingsPage({ section }: SettingsPageProps) {
setRequestsSync(null) setRequestsSync(null)
}, 5000) }, 5000)
return () => clearTimeout(timer) return () => clearTimeout(timer)
}, [requestsSync?.status]) }, [requestsSync])
const loadLogs = async () => { const loadLogs = useCallback(async () => {
setLogsStatus(null) setLogsStatus(null)
try { try {
const baseUrl = getApiBase() const baseUrl = getApiBase()
@@ -547,7 +548,7 @@ export default function SettingsPage({ section }: SettingsPageProps) {
: 'Could not load logs.' : 'Could not load logs.'
setLogsStatus(message) setLogsStatus(message)
} }
} }, [logsCount])
useEffect(() => { useEffect(() => {
if (!showLogs) { if (!showLogs) {
@@ -558,7 +559,7 @@ export default function SettingsPage({ section }: SettingsPageProps) {
void loadLogs() void loadLogs()
}, 5000) }, 5000)
return () => clearInterval(timer) return () => clearInterval(timer)
}, [logsCount, showLogs]) }, [loadLogs, showLogs])
const loadCache = async () => { const loadCache = async () => {
setCacheStatus(null) setCacheStatus(null)

View File

@@ -132,7 +132,7 @@ export default function HowItWorksPage() {
</section> </section>
<section className="how-callout"> <section className="how-callout">
<h2>Why Magent sometimes says "waiting"</h2> <h2>Why Magent sometimes says &quot;waiting&quot;</h2>
<p> <p>
If the search helper cannot find a match yet, Magent will say there is nothing to grab. If the search helper cannot find a match yet, Magent will say there is nothing to grab.
That does not mean it is broken. It usually means the release is not available yet. That does not mean it is broken. It usually means the release is not available yet.

View File

@@ -1,5 +1,6 @@
'use client' 'use client'
import Image from 'next/image'
import { useEffect, useState } from 'react' import { useEffect, useState } from 'react'
import { useRouter } from 'next/navigation' import { useRouter } from 'next/navigation'
import { authFetch, clearToken, getApiBase, getToken } from '../../lib/auth' import { authFetch, clearToken, getApiBase, getToken } from '../../lib/auth'
@@ -33,6 +34,7 @@ type ReleaseOption = {
seeders?: number seeders?: number
leechers?: number leechers?: number
protocol?: string protocol?: string
publishDate?: string
infoUrl?: string infoUrl?: string
downloadUrl?: string downloadUrl?: string
} }
@@ -123,7 +125,7 @@ const friendlyState = (value: string) => {
const map: Record<string, string> = { const map: Record<string, string> = {
REQUESTED: 'Waiting for approval', REQUESTED: 'Waiting for approval',
APPROVED: 'Approved and queued', APPROVED: 'Approved and queued',
NEEDS_ADD: 'Needs adding to the library', NEEDS_ADD: 'Push to Sonarr/Radarr',
ADDED_TO_ARR: 'Added to the library queue', ADDED_TO_ARR: 'Added to the library queue',
SEARCHING: 'Searching for releases', SEARCHING: 'Searching for releases',
GRABBED: 'Download queued', GRABBED: 'Download queued',
@@ -155,7 +157,7 @@ const friendlyTimelineStatus = (service: string, status: string) => {
} }
if (service === 'Sonarr/Radarr') { if (service === 'Sonarr/Radarr') {
const map: Record<string, string> = { const map: Record<string, string> = {
missing: 'Not added yet', missing: 'Push to Sonarr/Radarr',
added: 'Added to the library queue', added: 'Added to the library queue',
searching: 'Searching for releases', searching: 'Searching for releases',
available: 'Ready to watch', available: 'Ready to watch',
@@ -250,7 +252,7 @@ export default function RequestTimelinePage({ params }: { params: { id: string }
} }
load() load()
}, [params.id]) }, [params.id, router])
if (loading) { if (loading) {
return ( return (
@@ -274,9 +276,11 @@ export default function RequestTimelinePage({ params }: { params: { id: string }
const downloadHop = snapshot.timeline.find((hop) => hop.service === 'qBittorrent') const downloadHop = snapshot.timeline.find((hop) => hop.service === 'qBittorrent')
const downloadState = downloadHop?.details?.summary ?? downloadHop?.status ?? 'Unknown' const downloadState = downloadHop?.details?.summary ?? downloadHop?.status ?? 'Unknown'
const jellyfinAvailable = Boolean(snapshot.raw?.jellyfin?.available) const jellyfinAvailable = Boolean(snapshot.raw?.jellyfin?.available)
const arrStageLabel =
snapshot.state === 'NEEDS_ADD' ? 'Push to Sonarr/Radarr' : 'Library queue'
const pipelineSteps = [ const pipelineSteps = [
{ key: 'Jellyseerr', label: 'Jellyseerr' }, { key: 'Jellyseerr', label: 'Jellyseerr' },
{ key: 'Sonarr/Radarr', label: 'Library queue' }, { key: 'Sonarr/Radarr', label: arrStageLabel },
{ key: 'Prowlarr', label: 'Search' }, { key: 'Prowlarr', label: 'Search' },
{ key: 'qBittorrent', label: 'Download' }, { key: 'qBittorrent', label: 'Download' },
{ key: 'Jellyfin', label: 'Jellyfin' }, { key: 'Jellyfin', label: 'Jellyfin' },
@@ -308,11 +312,14 @@ export default function RequestTimelinePage({ params }: { params: { id: string }
<div className="request-header"> <div className="request-header">
<div className="request-header-main"> <div className="request-header-main">
{resolvedPoster && ( {resolvedPoster && (
<img <Image
className="request-poster" className="request-poster"
src={resolvedPoster} src={resolvedPoster}
alt={`${snapshot.title} poster`} alt={`${snapshot.title} poster`}
loading="lazy" width={90}
height={135}
sizes="90px"
unoptimized
/> />
)} )}
<div> <div>
@@ -592,6 +599,12 @@ export default function RequestTimelinePage({ params }: { params: { id: string }
indexerId: release.indexerId, indexerId: release.indexerId,
indexerName: release.indexer, indexerName: release.indexer,
downloadUrl: release.downloadUrl, downloadUrl: release.downloadUrl,
title: release.title,
size: release.size,
protocol: release.protocol,
publishDate: release.publishDate,
seeders: release.seeders,
leechers: release.leechers,
}), }),
} }
) )