Files
Magent/backend/app/routers/requests.py

1602 lines
65 KiB
Python

from typing import Any, Dict, List, Optional, Tuple
import asyncio
import httpx
import json
import logging
import time
from urllib.parse import quote
from datetime import datetime, timezone, timedelta
from fastapi import APIRouter, HTTPException, Depends
from ..clients.jellyseerr import JellyseerrClient
from ..clients.qbittorrent import QBittorrentClient
from ..clients.radarr import RadarrClient
from ..clients.sonarr import SonarrClient
from ..clients.prowlarr import ProwlarrClient
from ..ai.triage import triage_snapshot
from ..auth import get_current_user
from ..runtime import get_runtime_settings
from .images import cache_tmdb_image
from ..db import (
save_action,
get_recent_actions,
get_recent_snapshots,
get_cached_requests,
get_cached_requests_since,
get_cached_request_by_media_id,
get_request_cache_by_id,
get_request_cache_payload,
get_request_cache_last_updated,
get_request_cache_count,
get_request_cache_payloads,
prune_duplicate_requests_cache,
upsert_request_cache,
get_setting,
set_setting,
cleanup_history,
)
from ..models import Snapshot, TriageResult, RequestType
from ..services.snapshot import build_snapshot
router = APIRouter(prefix="/requests", tags=["requests"], dependencies=[Depends(get_current_user)])
CACHE_TTL_SECONDS = 600
_detail_cache: Dict[str, Tuple[float, Dict[str, Any]]] = {}
REQUEST_CACHE_TTL_SECONDS = 600
logger = logging.getLogger(__name__)
_sync_state: Dict[str, Any] = {
"status": "idle",
"stored": 0,
"total": None,
"skip": 0,
"message": None,
"started_at": None,
"finished_at": None,
}
_sync_task: Optional[asyncio.Task] = None
_sync_last_key = "requests_sync_last_at"
RECENT_CACHE_MAX_DAYS = 180
RECENT_CACHE_TTL_SECONDS = 300
_recent_cache: Dict[str, Any] = {"items": [], "updated_at": None}
_artwork_prefetch_state: Dict[str, Any] = {
"status": "idle",
"processed": 0,
"total": 0,
"message": "",
"started_at": None,
"finished_at": None,
}
_artwork_prefetch_task: Optional[asyncio.Task] = None
STATUS_LABELS = {
1: "Waiting for approval",
2: "Approved",
3: "Declined",
4: "Ready to watch",
5: "Working on it",
6: "Partially ready",
}
def _cache_get(key: str) -> Optional[Dict[str, Any]]:
cached = _detail_cache.get(key)
if not cached:
return None
expires_at, payload = cached
if expires_at < time.time():
_detail_cache.pop(key, None)
return None
return payload
def _cache_set(key: str, payload: Dict[str, Any]) -> None:
_detail_cache[key] = (time.time() + CACHE_TTL_SECONDS, payload)
def _status_label(value: Any) -> str:
if isinstance(value, int):
return STATUS_LABELS.get(value, f"Status {value}")
return "Unknown"
def _normalize_username(value: Any) -> Optional[str]:
if not isinstance(value, str):
return None
normalized = value.strip().lower()
return normalized if normalized else None
def _request_matches_user(request_data: Any, username: str) -> bool:
requested_by = None
if isinstance(request_data, dict):
requested_by = request_data.get("requestedBy") or request_data.get("requestedByUser")
if requested_by is None:
requested_by = request_data.get("requestedByName") or request_data.get("requestedByUsername")
if isinstance(requested_by, dict):
candidates = [
requested_by.get("username"),
requested_by.get("displayName"),
requested_by.get("name"),
requested_by.get("email"),
]
else:
candidates = [requested_by]
username_norm = _normalize_username(username)
if not username_norm:
return False
for candidate in candidates:
candidate_norm = _normalize_username(candidate)
if not candidate_norm:
continue
if "@" in candidate_norm:
candidate_norm = candidate_norm.split("@", 1)[0]
if candidate_norm == username_norm:
return True
return False
def _normalize_requested_by(request_data: Any) -> Optional[str]:
if not isinstance(request_data, dict):
return None
requested_by = request_data.get("requestedBy")
if isinstance(requested_by, dict):
for key in ("username", "displayName", "name", "email"):
value = requested_by.get(key)
normalized = _normalize_username(value)
if normalized and "@" in normalized:
normalized = normalized.split("@", 1)[0]
if normalized:
return normalized
normalized = _normalize_username(requested_by)
if normalized and "@" in normalized:
normalized = normalized.split("@", 1)[0]
return normalized
def _format_upstream_error(service: str, exc: httpx.HTTPStatusError) -> str:
response = exc.response
status = response.status_code if response is not None else "unknown"
body = ""
if response is not None:
try:
payload = response.json()
body = json.dumps(payload, ensure_ascii=True)
except ValueError:
body = response.text
body = body.strip() if body else ""
if body:
return f"{service} error {status}: {body}"
return f"{service} error {status}."
def _request_display_name(request_data: Any) -> Optional[str]:
if not isinstance(request_data, dict):
return None
requested_by = request_data.get("requestedBy")
if isinstance(requested_by, dict):
for key in ("displayName", "username", "name", "email"):
value = requested_by.get(key)
if isinstance(value, str) and value.strip():
return value.strip()
if isinstance(requested_by, str) and requested_by.strip():
return requested_by.strip()
return None
def _parse_request_payload(item: Dict[str, Any]) -> Dict[str, Any]:
media = item.get("media") or {}
media_id = media.get("id") or item.get("mediaId")
media_type = media.get("mediaType") or item.get("type")
tmdb_id = media.get("tmdbId") or item.get("tmdbId")
title = media.get("title") or media.get("name") or item.get("title") or item.get("name")
year = media.get("year") or item.get("year")
created_at = item.get("createdAt") or item.get("addedAt") or item.get("updatedAt")
updated_at = item.get("updatedAt") or created_at
requested_by = _request_display_name(item)
requested_by_norm = _normalize_requested_by(item)
return {
"request_id": item.get("id"),
"media_id": media_id,
"media_type": media_type,
"tmdb_id": tmdb_id,
"status": item.get("status"),
"title": title,
"year": year,
"requested_by": requested_by,
"requested_by_norm": requested_by_norm,
"created_at": created_at,
"updated_at": updated_at,
}
def _extract_artwork_paths(item: Dict[str, Any]) -> tuple[Optional[str], Optional[str]]:
media = item.get("media") or {}
poster_path = None
backdrop_path = None
if isinstance(media, dict):
poster_path = media.get("posterPath") or media.get("poster_path")
backdrop_path = media.get("backdropPath") or media.get("backdrop_path")
if not poster_path:
poster_path = item.get("posterPath") or item.get("poster_path")
if not backdrop_path:
backdrop_path = item.get("backdropPath") or item.get("backdrop_path")
return poster_path, backdrop_path
async def _get_request_details(client: JellyseerrClient, request_id: int) -> Optional[Dict[str, Any]]:
cache_key = f"request:{request_id}"
cached = _cache_get(cache_key)
if isinstance(cached, dict):
return cached
try:
fetched = await client.get_request(str(request_id))
except httpx.HTTPStatusError:
return None
if isinstance(fetched, dict):
_cache_set(cache_key, fetched)
return fetched
return None
async def _hydrate_title_from_tmdb(
client: JellyseerrClient, media_type: Optional[str], tmdb_id: Optional[int]
) -> tuple[Optional[str], Optional[int]]:
if not tmdb_id or not media_type:
return None, None
try:
if media_type == "movie":
details = await client.get_movie(int(tmdb_id))
if isinstance(details, dict):
title = details.get("title")
release_date = details.get("releaseDate")
year = int(release_date[:4]) if release_date else None
return title, year
if media_type == "tv":
details = await client.get_tv(int(tmdb_id))
if isinstance(details, dict):
title = details.get("name") or details.get("title")
first_air = details.get("firstAirDate")
year = int(first_air[:4]) if first_air else None
return title, year
except httpx.HTTPStatusError:
return None, None
return None, None
async def _hydrate_media_details(client: JellyseerrClient, media_id: Optional[int]) -> Optional[Dict[str, Any]]:
if not media_id:
return None
try:
details = await client.get_media(int(media_id))
except httpx.HTTPStatusError:
return None
return details if isinstance(details, dict) else None
async def _hydrate_artwork_from_tmdb(
client: JellyseerrClient, media_type: Optional[str], tmdb_id: Optional[int]
) -> tuple[Optional[str], Optional[str]]:
if not tmdb_id or not media_type:
return None, None
try:
if media_type == "movie":
details = await client.get_movie(int(tmdb_id))
if isinstance(details, dict):
return (
details.get("posterPath") or details.get("poster_path"),
details.get("backdropPath") or details.get("backdrop_path"),
)
if media_type == "tv":
details = await client.get_tv(int(tmdb_id))
if isinstance(details, dict):
return (
details.get("posterPath") or details.get("poster_path"),
details.get("backdropPath") or details.get("backdrop_path"),
)
except httpx.HTTPStatusError:
return None, None
return None, None
def _artwork_url(path: Optional[str], size: str, cache_mode: str) -> Optional[str]:
if not path:
return None
if not path.startswith("/"):
path = f"/{path}"
if cache_mode == "cache":
return f"/images/tmdb?path={quote(path)}&size={size}"
return f"https://image.tmdb.org/t/p/{size}{path}"
def _cache_is_stale(last_updated: Optional[str]) -> bool:
if not last_updated:
return True
runtime = get_runtime_settings()
ttl_seconds = max(60, int(runtime.requests_sync_ttl_minutes or 1440) * 60)
try:
parsed = datetime.fromisoformat(last_updated.replace("Z", "+00:00"))
now = datetime.now(timezone.utc)
return (now - parsed).total_seconds() > ttl_seconds
except ValueError:
return True
def _parse_time(value: Optional[str], fallback_hour: int, fallback_minute: int) -> tuple[int, int]:
if isinstance(value, str) and ":" in value:
parts = value.strip().split(":")
if len(parts) == 2:
try:
hour = int(parts[0])
minute = int(parts[1])
if 0 <= hour <= 23 and 0 <= minute <= 59:
return hour, minute
except ValueError:
pass
return fallback_hour, fallback_minute
def _seconds_until(hour: int, minute: int) -> int:
now = datetime.now(timezone.utc).astimezone()
target = now.replace(hour=hour, minute=minute, second=0, microsecond=0)
if target <= now:
target = target + timedelta(days=1)
return int((target - now).total_seconds())
async def _sync_all_requests(client: JellyseerrClient) -> int:
take = 50
skip = 0
stored = 0
cache_mode = (get_runtime_settings().artwork_cache_mode or "remote").lower()
logger.info("Jellyseerr sync starting: take=%s", take)
_sync_state.update(
{
"status": "running",
"stored": 0,
"total": None,
"skip": 0,
"message": "Starting sync",
"started_at": datetime.now(timezone.utc).isoformat(),
"finished_at": None,
}
)
while True:
try:
response = await client.get_recent_requests(take=take, skip=skip)
except httpx.HTTPError as exc:
logger.warning("Jellyseerr sync failed at skip=%s: %s", skip, exc)
_sync_state.update({"status": "failed", "message": f"Sync failed: {exc}"})
break
if not isinstance(response, dict):
logger.warning("Jellyseerr sync stopped: non-dict response at skip=%s", skip)
_sync_state.update({"status": "failed", "message": "Invalid response"})
break
if _sync_state["total"] is None:
page_info = response.get("pageInfo") or {}
total = (
page_info.get("totalResults")
or page_info.get("total")
or response.get("totalResults")
or response.get("total")
)
if isinstance(total, int):
_sync_state["total"] = total
items = response.get("results") or []
if not isinstance(items, list) or not items:
logger.info("Jellyseerr sync completed: no more results at skip=%s", skip)
break
for item in items:
if not isinstance(item, dict):
continue
payload = _parse_request_payload(item)
request_id = payload.get("request_id")
if isinstance(request_id, int):
if not payload.get("title") or not payload.get("media_id"):
logger.debug("Jellyseerr sync hydrate request_id=%s", request_id)
details = await _get_request_details(client, request_id)
if isinstance(details, dict):
payload = _parse_request_payload(details)
item = details
if not payload.get("title") and payload.get("media_id"):
media_details = await _hydrate_media_details(client, payload.get("media_id"))
if isinstance(media_details, dict):
media_title = media_details.get("title") or media_details.get("name")
if media_title:
payload["title"] = media_title
if not payload.get("year") and media_details.get("year"):
payload["year"] = media_details.get("year")
if not payload.get("tmdb_id") and media_details.get("tmdbId"):
payload["tmdb_id"] = media_details.get("tmdbId")
if not payload.get("media_type") and media_details.get("mediaType"):
payload["media_type"] = media_details.get("mediaType")
if isinstance(item, dict):
existing_media = item.get("media")
if isinstance(existing_media, dict):
merged = dict(media_details)
for key, value in existing_media.items():
if value is not None:
merged[key] = value
item["media"] = merged
else:
item["media"] = media_details
poster_path, backdrop_path = _extract_artwork_paths(item)
if cache_mode == "cache" and not (poster_path or backdrop_path):
details = await _get_request_details(client, request_id)
if isinstance(details, dict):
item = details
payload = _parse_request_payload(details)
if not payload.get("title") and payload.get("tmdb_id"):
hydrated_title, hydrated_year = await _hydrate_title_from_tmdb(
client, payload.get("media_type"), payload.get("tmdb_id")
)
if hydrated_title:
payload["title"] = hydrated_title
if hydrated_year:
payload["year"] = hydrated_year
if not isinstance(payload.get("request_id"), int):
continue
payload_json = json.dumps(item, ensure_ascii=True)
upsert_request_cache(
request_id=payload.get("request_id"),
media_id=payload.get("media_id"),
media_type=payload.get("media_type"),
status=payload.get("status"),
title=payload.get("title"),
year=payload.get("year"),
requested_by=payload.get("requested_by"),
requested_by_norm=payload.get("requested_by_norm"),
created_at=payload.get("created_at"),
updated_at=payload.get("updated_at"),
payload_json=payload_json,
)
stored += 1
_sync_state["stored"] = stored
if len(items) < take:
logger.info("Jellyseerr sync completed: stored=%s", stored)
break
skip += take
_sync_state["skip"] = skip
_sync_state["message"] = f"Synced {stored} requests"
logger.info("Jellyseerr sync progress: stored=%s skip=%s", stored, skip)
_sync_state.update(
{
"status": "completed",
"stored": stored,
"message": f"Sync complete: {stored} requests",
"finished_at": datetime.now(timezone.utc).isoformat(),
}
)
set_setting(_sync_last_key, datetime.now(timezone.utc).isoformat())
_refresh_recent_cache_from_db()
return stored
async def _sync_delta_requests(client: JellyseerrClient) -> int:
take = 50
skip = 0
stored = 0
unchanged_pages = 0
cache_mode = (get_runtime_settings().artwork_cache_mode or "remote").lower()
logger.info("Jellyseerr delta sync starting: take=%s", take)
_sync_state.update(
{
"status": "running",
"stored": 0,
"total": None,
"skip": 0,
"message": "Starting delta sync",
"started_at": datetime.now(timezone.utc).isoformat(),
"finished_at": None,
}
)
while True:
try:
response = await client.get_recent_requests(take=take, skip=skip)
except httpx.HTTPError as exc:
logger.warning("Jellyseerr delta sync failed at skip=%s: %s", skip, exc)
_sync_state.update({"status": "failed", "message": f"Delta sync failed: {exc}"})
break
if not isinstance(response, dict):
logger.warning("Jellyseerr delta sync stopped: non-dict response at skip=%s", skip)
_sync_state.update({"status": "failed", "message": "Invalid response"})
break
items = response.get("results") or []
if not isinstance(items, list) or not items:
logger.info("Jellyseerr delta sync completed: no more results at skip=%s", skip)
break
page_changed = False
for item in items:
if not isinstance(item, dict):
continue
payload = _parse_request_payload(item)
request_id = payload.get("request_id")
if isinstance(request_id, int):
cached = get_request_cache_by_id(request_id)
incoming_updated = payload.get("updated_at")
if cached and incoming_updated and cached.get("updated_at") == incoming_updated and cached.get("title"):
continue
if not payload.get("title") or not payload.get("media_id"):
details = await _get_request_details(client, request_id)
if isinstance(details, dict):
payload = _parse_request_payload(details)
item = details
if not payload.get("title") and payload.get("media_id"):
media_details = await _hydrate_media_details(client, payload.get("media_id"))
if isinstance(media_details, dict):
media_title = media_details.get("title") or media_details.get("name")
if media_title:
payload["title"] = media_title
if not payload.get("year") and media_details.get("year"):
payload["year"] = media_details.get("year")
if not payload.get("tmdb_id") and media_details.get("tmdbId"):
payload["tmdb_id"] = media_details.get("tmdbId")
if not payload.get("media_type") and media_details.get("mediaType"):
payload["media_type"] = media_details.get("mediaType")
if isinstance(item, dict):
existing_media = item.get("media")
if isinstance(existing_media, dict):
merged = dict(media_details)
for key, value in existing_media.items():
if value is not None:
merged[key] = value
item["media"] = merged
else:
item["media"] = media_details
poster_path, backdrop_path = _extract_artwork_paths(item)
if cache_mode == "cache" and not (poster_path or backdrop_path):
details = await _get_request_details(client, request_id)
if isinstance(details, dict):
payload = _parse_request_payload(details)
item = details
if not payload.get("title") and payload.get("tmdb_id"):
hydrated_title, hydrated_year = await _hydrate_title_from_tmdb(
client, payload.get("media_type"), payload.get("tmdb_id")
)
if hydrated_title:
payload["title"] = hydrated_title
if hydrated_year:
payload["year"] = hydrated_year
if not isinstance(payload.get("request_id"), int):
continue
payload_json = json.dumps(item, ensure_ascii=True)
upsert_request_cache(
request_id=payload.get("request_id"),
media_id=payload.get("media_id"),
media_type=payload.get("media_type"),
status=payload.get("status"),
title=payload.get("title"),
year=payload.get("year"),
requested_by=payload.get("requested_by"),
requested_by_norm=payload.get("requested_by_norm"),
created_at=payload.get("created_at"),
updated_at=payload.get("updated_at"),
payload_json=payload_json,
)
stored += 1
page_changed = True
_sync_state["stored"] = stored
if not page_changed:
unchanged_pages += 1
else:
unchanged_pages = 0
if len(items) < take or unchanged_pages >= 2:
logger.info("Jellyseerr delta sync completed: stored=%s", stored)
break
skip += take
_sync_state["skip"] = skip
_sync_state["message"] = f"Delta synced {stored} requests"
logger.info("Jellyseerr delta sync progress: stored=%s skip=%s", stored, skip)
deduped = prune_duplicate_requests_cache()
if deduped:
logger.info("Jellyseerr delta sync removed duplicate rows: %s", deduped)
_sync_state.update(
{
"status": "completed",
"stored": stored,
"message": f"Delta sync complete: {stored} updated",
"finished_at": datetime.now(timezone.utc).isoformat(),
}
)
set_setting(_sync_last_key, datetime.now(timezone.utc).isoformat())
_refresh_recent_cache_from_db()
return stored
async def _prefetch_artwork_cache(client: JellyseerrClient) -> None:
runtime = get_runtime_settings()
cache_mode = (runtime.artwork_cache_mode or "remote").lower()
if cache_mode != "cache":
_artwork_prefetch_state.update(
{
"status": "failed",
"message": "Artwork cache mode is not set to cache.",
"finished_at": datetime.now(timezone.utc).isoformat(),
}
)
return
total = get_request_cache_count()
_artwork_prefetch_state.update(
{
"status": "running",
"processed": 0,
"total": total,
"message": "Starting artwork prefetch",
"started_at": datetime.now(timezone.utc).isoformat(),
"finished_at": None,
}
)
offset = 0
limit = 200
processed = 0
while True:
batch = get_request_cache_payloads(limit=limit, offset=offset)
if not batch:
break
for row in batch:
payload = row.get("payload")
if not isinstance(payload, dict):
processed += 1
continue
poster_path, backdrop_path = _extract_artwork_paths(payload)
if not (poster_path or backdrop_path) and client.configured():
media = payload.get("media") or {}
tmdb_id = media.get("tmdbId") or payload.get("tmdbId")
media_type = media.get("mediaType") or payload.get("type")
if tmdb_id and media_type:
hydrated_poster, hydrated_backdrop = await _hydrate_artwork_from_tmdb(
client, media_type, tmdb_id
)
poster_path = poster_path or hydrated_poster
backdrop_path = backdrop_path or hydrated_backdrop
if hydrated_poster or hydrated_backdrop:
media = dict(media) if isinstance(media, dict) else {}
if hydrated_poster:
media["posterPath"] = hydrated_poster
if hydrated_backdrop:
media["backdropPath"] = hydrated_backdrop
payload["media"] = media
parsed = _parse_request_payload(payload)
request_id = parsed.get("request_id")
if isinstance(request_id, int):
upsert_request_cache(
request_id=request_id,
media_id=parsed.get("media_id"),
media_type=parsed.get("media_type"),
status=parsed.get("status"),
title=parsed.get("title"),
year=parsed.get("year"),
requested_by=parsed.get("requested_by"),
requested_by_norm=parsed.get("requested_by_norm"),
created_at=parsed.get("created_at"),
updated_at=parsed.get("updated_at"),
payload_json=json.dumps(payload, ensure_ascii=True),
)
if poster_path:
try:
await cache_tmdb_image(poster_path, "w185")
await cache_tmdb_image(poster_path, "w342")
except httpx.HTTPError:
pass
if backdrop_path:
try:
await cache_tmdb_image(backdrop_path, "w780")
except httpx.HTTPError:
pass
processed += 1
if processed % 25 == 0:
_artwork_prefetch_state.update(
{"processed": processed, "message": f"Cached artwork for {processed} requests"}
)
offset += limit
_artwork_prefetch_state.update(
{
"status": "completed",
"processed": processed,
"message": f"Artwork cached for {processed} requests",
"finished_at": datetime.now(timezone.utc).isoformat(),
}
)
async def start_artwork_prefetch(base_url: Optional[str], api_key: Optional[str]) -> Dict[str, Any]:
global _artwork_prefetch_task
if _artwork_prefetch_task and not _artwork_prefetch_task.done():
return dict(_artwork_prefetch_state)
client = JellyseerrClient(base_url, api_key)
_artwork_prefetch_state.update(
{
"status": "running",
"processed": 0,
"total": get_request_cache_count(),
"message": "Starting artwork prefetch",
"started_at": datetime.now(timezone.utc).isoformat(),
"finished_at": None,
}
)
async def _runner() -> None:
try:
await _prefetch_artwork_cache(client)
except Exception:
logger.exception("Artwork prefetch failed")
_artwork_prefetch_state.update(
{
"status": "failed",
"message": "Artwork prefetch failed.",
"finished_at": datetime.now(timezone.utc).isoformat(),
}
)
_artwork_prefetch_task = asyncio.create_task(_runner())
return dict(_artwork_prefetch_state)
def get_artwork_prefetch_state() -> Dict[str, Any]:
return dict(_artwork_prefetch_state)
async def _ensure_requests_cache(client: JellyseerrClient) -> None:
last_sync = get_setting(_sync_last_key)
last_updated = last_sync or get_request_cache_last_updated()
if _cache_is_stale(last_updated):
logger.info("Requests cache stale or empty, starting sync.")
await _sync_all_requests(client)
else:
logger.debug("Requests cache fresh: last_sync=%s", last_updated)
def _refresh_recent_cache_from_db() -> None:
since_iso = (datetime.now(timezone.utc) - timedelta(days=RECENT_CACHE_MAX_DAYS)).isoformat()
items = get_cached_requests_since(since_iso)
_recent_cache["items"] = items
_recent_cache["updated_at"] = datetime.now(timezone.utc).isoformat()
def _recent_cache_stale() -> bool:
updated_at = _recent_cache.get("updated_at")
if not updated_at:
return True
try:
parsed = datetime.fromisoformat(updated_at)
except ValueError:
return True
return (datetime.now(timezone.utc) - parsed).total_seconds() > RECENT_CACHE_TTL_SECONDS
def _get_recent_from_cache(
requested_by_norm: Optional[str],
limit: int,
offset: int,
since_iso: Optional[str],
) -> List[Dict[str, Any]]:
items = _recent_cache.get("items") or []
results = []
for item in items:
if requested_by_norm and item.get("requested_by_norm") != requested_by_norm:
continue
if since_iso and item.get("created_at") and item["created_at"] < since_iso:
continue
results.append(item)
return results[offset : offset + limit]
async def startup_warmup_requests_cache() -> None:
runtime = get_runtime_settings()
client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key)
if not client.configured():
return
try:
await _ensure_requests_cache(client)
except httpx.HTTPError as exc:
logger.warning("Requests warmup skipped: %s", exc)
return
_refresh_recent_cache_from_db()
async def run_requests_poll_loop() -> None:
while True:
runtime = get_runtime_settings()
interval = max(60, int(runtime.requests_poll_interval_seconds or 300))
client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key)
if client.configured():
try:
await _ensure_requests_cache(client)
except httpx.HTTPError as exc:
logger.debug("Requests poll skipped: %s", exc)
await asyncio.sleep(interval)
async def run_requests_delta_loop() -> None:
while True:
runtime = get_runtime_settings()
interval = max(60, int(runtime.requests_delta_sync_interval_minutes or 5) * 60)
client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key)
if client.configured():
if _sync_task and not _sync_task.done():
logger.debug("Delta sync skipped: another sync is running.")
else:
try:
await _sync_delta_requests(client)
except httpx.HTTPError as exc:
logger.debug("Delta sync skipped: %s", exc)
await asyncio.sleep(interval)
async def run_daily_requests_full_sync() -> None:
while True:
runtime = get_runtime_settings()
hour, minute = _parse_time(runtime.requests_full_sync_time, 0, 0)
await asyncio.sleep(_seconds_until(hour, minute))
runtime = get_runtime_settings()
client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key)
if not client.configured():
logger.info("Daily full sync skipped: Jellyseerr not configured.")
continue
if _sync_task and not _sync_task.done():
logger.info("Daily full sync skipped: another sync is running.")
continue
try:
await _sync_all_requests(client)
except httpx.HTTPError as exc:
logger.warning("Daily full sync failed: %s", exc)
async def run_daily_db_cleanup() -> None:
while True:
runtime = get_runtime_settings()
hour, minute = _parse_time(runtime.requests_cleanup_time, 2, 0)
await asyncio.sleep(_seconds_until(hour, minute))
runtime = get_runtime_settings()
result = cleanup_history(int(runtime.requests_cleanup_days or 90))
logger.info("Daily cleanup complete: %s", result)
async def start_requests_sync(base_url: Optional[str], api_key: Optional[str]) -> Dict[str, Any]:
global _sync_task
if _sync_task and not _sync_task.done():
return dict(_sync_state)
if not base_url:
_sync_state.update({"status": "failed", "message": "Jellyseerr not configured"})
return dict(_sync_state)
client = JellyseerrClient(base_url, api_key)
_sync_state.update(
{
"status": "running",
"stored": 0,
"total": None,
"skip": 0,
"message": "Starting sync",
"started_at": datetime.now(timezone.utc).isoformat(),
"finished_at": None,
}
)
async def _runner() -> None:
try:
await _sync_all_requests(client)
except Exception as exc:
logger.exception("Jellyseerr sync failed")
_sync_state.update(
{
"status": "failed",
"message": f"Sync failed: {exc}",
"finished_at": datetime.now(timezone.utc).isoformat(),
}
)
_sync_task = asyncio.create_task(_runner())
return dict(_sync_state)
async def start_requests_delta_sync(base_url: Optional[str], api_key: Optional[str]) -> Dict[str, Any]:
global _sync_task
if _sync_task and not _sync_task.done():
return dict(_sync_state)
if not base_url:
_sync_state.update({"status": "failed", "message": "Jellyseerr not configured"})
return dict(_sync_state)
client = JellyseerrClient(base_url, api_key)
_sync_state.update(
{
"status": "running",
"stored": 0,
"total": None,
"skip": 0,
"message": "Starting delta sync",
"started_at": datetime.now(timezone.utc).isoformat(),
"finished_at": None,
}
)
async def _runner() -> None:
try:
await _sync_delta_requests(client)
except Exception as exc:
logger.exception("Jellyseerr delta sync failed")
_sync_state.update(
{
"status": "failed",
"message": f"Delta sync failed: {exc}",
"finished_at": datetime.now(timezone.utc).isoformat(),
}
)
_sync_task = asyncio.create_task(_runner())
return dict(_sync_state)
def get_requests_sync_state() -> Dict[str, Any]:
return dict(_sync_state)
async def _ensure_request_access(
client: JellyseerrClient, request_id: int, user: Dict[str, str]
) -> None:
if user.get("role") == "admin":
return
runtime = get_runtime_settings()
mode = (runtime.requests_data_source or "prefer_cache").lower()
cached = get_request_cache_payload(request_id)
if mode != "always_js" and cached is not None:
logger.debug("access cache hit: request_id=%s mode=%s", request_id, mode)
if _request_matches_user(cached, user.get("username", "")):
return
raise HTTPException(status_code=403, detail="Request not accessible for this user")
logger.debug("access cache miss: request_id=%s mode=%s", request_id, mode)
details = await _get_request_details(client, request_id)
if details is None or not _request_matches_user(details, user.get("username", "")):
raise HTTPException(status_code=403, detail="Request not accessible for this user")
def _build_recent_map(response: Dict[str, Any]) -> Dict[int, Dict[str, Any]]:
mapping: Dict[int, Dict[str, Any]] = {}
for item in response.get("results", []):
media = item.get("media") or {}
media_id = media.get("id") or item.get("mediaId")
request_id = item.get("id")
status = item.get("status")
if isinstance(media_id, int) and isinstance(request_id, int):
mapping[media_id] = {
"requestId": request_id,
"status": status,
"statusLabel": _status_label(status),
}
return mapping
def _queue_records(queue: Any) -> List[Dict[str, Any]]:
if isinstance(queue, dict):
records = queue.get("records")
if isinstance(records, list):
return records
if isinstance(queue, list):
return queue
return []
def _download_ids(records: List[Dict[str, Any]]) -> List[str]:
ids = []
for record in records:
download_id = record.get("downloadId") or record.get("download_id")
if isinstance(download_id, str) and download_id:
ids.append(download_id)
return ids
def _normalize_categories(categories: Any) -> List[str]:
names = []
if isinstance(categories, list):
for cat in categories:
if isinstance(cat, dict):
name = cat.get("name")
if isinstance(name, str):
names.append(name.lower())
return names
def _filter_prowlarr_results(results: Any, request_type: RequestType) -> List[Dict[str, Any]]:
if not isinstance(results, list):
return []
keep = []
for item in results:
if not isinstance(item, dict):
continue
categories = _normalize_categories(item.get("categories"))
if request_type == RequestType.movie:
if not any("movies" in name for name in categories):
continue
elif request_type == RequestType.tv:
if not any(name.startswith("tv") or "tv/" in name for name in categories):
continue
keep.append(
{
"title": item.get("title"),
"indexer": item.get("indexer"),
"indexerId": item.get("indexerId"),
"guid": item.get("guid"),
"size": item.get("size"),
"seeders": item.get("seeders"),
"leechers": item.get("leechers"),
"publishDate": item.get("publishDate"),
"infoUrl": item.get("infoUrl"),
"downloadUrl": item.get("downloadUrl"),
"protocol": item.get("protocol"),
}
)
keep.sort(key=lambda item: (item.get("seeders") or 0), reverse=True)
return keep[:10]
def _missing_episode_ids_by_season(episodes: Any) -> Dict[int, List[int]]:
if not isinstance(episodes, list):
return {}
grouped: Dict[int, List[int]] = {}
for episode in episodes:
if not isinstance(episode, dict):
continue
if not episode.get("monitored", True):
continue
if episode.get("hasFile"):
continue
season_number = episode.get("seasonNumber")
episode_id = episode.get("id")
if isinstance(season_number, int) and isinstance(episode_id, int):
grouped.setdefault(season_number, []).append(episode_id)
return grouped
async def _resolve_root_folder_path(client: Any, root_folder: str, service_name: str) -> str:
if root_folder.isdigit():
folders = await client.get_root_folders()
if isinstance(folders, list):
for folder in folders:
if folder.get("id") == int(root_folder):
path = folder.get("path")
if isinstance(path, str) and path:
return path
raise HTTPException(status_code=400, detail=f"{service_name} root folder id {root_folder} not found")
return root_folder
@router.get("/{request_id}/snapshot", response_model=Snapshot)
async def get_snapshot(request_id: str, user: Dict[str, str] = Depends(get_current_user)) -> Snapshot:
runtime = get_runtime_settings()
client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key)
if client.configured():
await _ensure_request_access(client, int(request_id), user)
return await build_snapshot(request_id)
@router.get("/recent")
async def recent_requests(
take: int = 6,
skip: int = 0,
days: int = 90,
user: Dict[str, str] = Depends(get_current_user),
) -> dict:
runtime = get_runtime_settings()
client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key)
if not client.configured():
raise HTTPException(status_code=400, detail="Jellyseerr not configured")
try:
await _ensure_requests_cache(client)
except httpx.HTTPStatusError as exc:
raise HTTPException(status_code=502, detail=str(exc)) from exc
username_norm = _normalize_username(user.get("username", ""))
requested_by = None if user.get("role") == "admin" else username_norm
since_iso = None
if days > 0:
since_iso = (datetime.now(timezone.utc) - timedelta(days=days)).isoformat()
if _recent_cache_stale():
_refresh_recent_cache_from_db()
rows = _get_recent_from_cache(requested_by, take, skip, since_iso)
cache_mode = (runtime.artwork_cache_mode or "remote").lower()
mode = (runtime.requests_data_source or "prefer_cache").lower()
allow_remote = mode == "always_js"
allow_title_hydrate = mode == "prefer_cache"
allow_artwork_hydrate = allow_remote or allow_title_hydrate
results = []
for row in rows:
status = row.get("status")
title = row.get("title")
title_is_placeholder = (
isinstance(title, str)
and row.get("request_id") is not None
and title.strip().lower() == f"request {row.get('request_id')}"
)
year = row.get("year")
details = None
if row.get("request_id") and mode != "always_js":
cached_payload = get_request_cache_payload(int(row["request_id"]))
if isinstance(cached_payload, dict):
details = cached_payload
if (not title or title_is_placeholder) and row.get("request_id"):
if details is None and (allow_remote or allow_title_hydrate):
details = await _get_request_details(client, int(row["request_id"]))
if isinstance(details, dict):
payload = _parse_request_payload(details)
title = payload.get("title") or title
year = payload.get("year") or year
if not title and payload.get("tmdb_id") and (allow_remote or allow_title_hydrate):
hydrated_title, hydrated_year = await _hydrate_title_from_tmdb(
client, payload.get("media_type"), payload.get("tmdb_id")
)
if hydrated_title:
title = hydrated_title
if hydrated_year:
year = hydrated_year
if allow_remote and isinstance(payload.get("request_id"), int):
upsert_request_cache(
request_id=payload.get("request_id"),
media_id=payload.get("media_id"),
media_type=payload.get("media_type"),
status=payload.get("status"),
title=title or payload.get("title"),
year=year or payload.get("year"),
requested_by=payload.get("requested_by"),
requested_by_norm=payload.get("requested_by_norm"),
created_at=payload.get("created_at"),
updated_at=payload.get("updated_at"),
payload_json=json.dumps(details, ensure_ascii=True),
)
row["title"] = title
row["year"] = year
row["media_type"] = payload.get("media_type") or row.get("media_type")
row["status"] = payload.get("status") or row.get("status")
if details is None and row.get("request_id") and allow_remote:
details = await _get_request_details(client, int(row["request_id"]))
poster_path = None
backdrop_path = None
if isinstance(details, dict):
media = details.get("media") or {}
if isinstance(media, dict):
poster_path = media.get("posterPath") or media.get("poster_path")
backdrop_path = media.get("backdropPath") or media.get("backdrop_path")
tmdb_id = media.get("tmdbId") or details.get("tmdbId")
else:
tmdb_id = details.get("tmdbId")
media_type = media.get("mediaType") if isinstance(media, dict) else None
media_type = media_type or details.get("type") or row.get("media_type")
if not poster_path and tmdb_id and allow_artwork_hydrate:
hydrated_poster, hydrated_backdrop = await _hydrate_artwork_from_tmdb(
client, media_type, tmdb_id
)
poster_path = poster_path or hydrated_poster
backdrop_path = backdrop_path or hydrated_backdrop
if (hydrated_poster or hydrated_backdrop) and isinstance(details, dict):
media = dict(media) if isinstance(media, dict) else {}
if hydrated_poster:
media["posterPath"] = hydrated_poster
if hydrated_backdrop:
media["backdropPath"] = hydrated_backdrop
details["media"] = media
payload = _parse_request_payload(details)
if isinstance(payload.get("request_id"), int):
upsert_request_cache(
request_id=payload.get("request_id"),
media_id=payload.get("media_id"),
media_type=payload.get("media_type"),
status=payload.get("status"),
title=payload.get("title"),
year=payload.get("year"),
requested_by=payload.get("requested_by"),
requested_by_norm=payload.get("requested_by_norm"),
created_at=payload.get("created_at"),
updated_at=payload.get("updated_at"),
payload_json=json.dumps(details, ensure_ascii=True),
)
results.append(
{
"id": row.get("request_id"),
"title": title,
"year": year,
"type": row.get("media_type"),
"status": status,
"statusLabel": _status_label(status),
"mediaId": row.get("media_id"),
"artwork": {
"poster_url": _artwork_url(poster_path, "w185", cache_mode),
"backdrop_url": _artwork_url(backdrop_path, "w780", cache_mode),
},
}
)
return {"results": results}
@router.get("/search")
async def search_requests(
query: str, page: int = 1, user: Dict[str, str] = Depends(get_current_user)
) -> dict:
runtime = get_runtime_settings()
client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key)
if not client.configured():
raise HTTPException(status_code=400, detail="Jellyseerr not configured")
try:
response = await client.search(query=query, page=page)
except httpx.HTTPStatusError as exc:
raise HTTPException(status_code=502, detail=str(exc)) from exc
if not isinstance(response, dict):
return {"results": []}
try:
await _ensure_requests_cache(client)
except httpx.HTTPStatusError:
pass
results = []
for item in response.get("results", []):
media_type = item.get("mediaType")
title = item.get("title") or item.get("name")
year = None
if item.get("releaseDate"):
year = int(item["releaseDate"][:4])
if item.get("firstAirDate"):
year = int(item["firstAirDate"][:4])
request_id = None
status = None
status_label = None
media_info = item.get("mediaInfo") or {}
media_info_id = media_info.get("id")
requests = media_info.get("requests")
if isinstance(requests, list) and requests:
request_id = requests[0].get("id")
status = requests[0].get("status")
status_label = _status_label(status)
elif isinstance(media_info_id, int):
username_norm = _normalize_username(user.get("username", ""))
requested_by = None if user.get("role") == "admin" else username_norm
cached = get_cached_request_by_media_id(media_info_id, requested_by_norm=requested_by)
if cached:
request_id = cached.get("request_id")
status = cached.get("status")
status_label = _status_label(status)
if user.get("role") != "admin":
if isinstance(request_id, int):
details = await _get_request_details(client, request_id)
if not _request_matches_user(details, user.get("username", "")):
continue
else:
continue
results.append(
{
"title": title,
"year": year,
"type": media_type,
"tmdbId": item.get("id"),
"requestId": request_id,
"status": status,
"statusLabel": status_label,
}
)
return {"results": results}
@router.post("/{request_id}/ai/triage", response_model=TriageResult)
async def ai_triage(request_id: str, user: Dict[str, str] = Depends(get_current_user)) -> TriageResult:
runtime = get_runtime_settings()
client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key)
if client.configured():
await _ensure_request_access(client, int(request_id), user)
snapshot = await build_snapshot(request_id)
return triage_snapshot(snapshot)
@router.post("/{request_id}/actions/search")
async def action_search(request_id: str, user: Dict[str, str] = Depends(get_current_user)) -> dict:
runtime = get_runtime_settings()
client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key)
if client.configured():
await _ensure_request_access(client, int(request_id), user)
snapshot = await build_snapshot(request_id)
prowlarr_results: List[Dict[str, Any]] = []
prowlarr = ProwlarrClient(runtime.prowlarr_base_url, runtime.prowlarr_api_key)
if not prowlarr.configured():
raise HTTPException(status_code=400, detail="Prowlarr not configured")
query = snapshot.title
if snapshot.year:
query = f"{query} {snapshot.year}"
try:
results = await prowlarr.search(query=query)
prowlarr_results = _filter_prowlarr_results(results, snapshot.request_type)
except httpx.HTTPStatusError:
prowlarr_results = []
await asyncio.to_thread(
save_action,
request_id,
"search_releases",
"Search and choose a download",
"ok",
f"Found {len(prowlarr_results)} releases.",
)
return {"status": "ok", "releases": prowlarr_results}
@router.post("/{request_id}/actions/search_auto")
async def action_search_auto(request_id: str, user: Dict[str, str] = Depends(get_current_user)) -> dict:
runtime = get_runtime_settings()
client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key)
if client.configured():
await _ensure_request_access(client, int(request_id), user)
snapshot = await build_snapshot(request_id)
arr_item = snapshot.raw.get("arr", {}).get("item")
if not isinstance(arr_item, dict):
raise HTTPException(status_code=404, detail="Item not found in Sonarr/Radarr")
if snapshot.request_type.value == "tv":
client = SonarrClient(runtime.sonarr_base_url, runtime.sonarr_api_key)
if not client.configured():
raise HTTPException(status_code=400, detail="Sonarr not configured")
episodes = await client.get_episodes(int(arr_item["id"]))
missing_by_season = _missing_episode_ids_by_season(episodes)
if not missing_by_season:
message = "No missing monitored episodes found."
await asyncio.to_thread(
save_action, request_id, "search_auto", "Search and auto-download", "ok", message
)
return {"status": "ok", "message": message, "searched": []}
responses = []
for season_number in sorted(missing_by_season.keys()):
episode_ids = missing_by_season[season_number]
if episode_ids:
response = await client.search_episodes(episode_ids)
responses.append(
{"season": season_number, "episodeCount": len(episode_ids), "response": response}
)
message = "Search sent to Sonarr."
await asyncio.to_thread(
save_action, request_id, "search_auto", "Search and auto-download", "ok", message
)
return {"status": "ok", "message": message, "searched": responses}
if snapshot.request_type.value == "movie":
client = RadarrClient(runtime.radarr_base_url, runtime.radarr_api_key)
if not client.configured():
raise HTTPException(status_code=400, detail="Radarr not configured")
response = await client.search(int(arr_item["id"]))
message = "Search sent to Radarr."
await asyncio.to_thread(
save_action, request_id, "search_auto", "Search and auto-download", "ok", message
)
return {"status": "ok", "message": message, "response": response}
raise HTTPException(status_code=400, detail="Unknown request type")
@router.post("/{request_id}/actions/qbit/resume")
async def action_resume(request_id: str, user: Dict[str, str] = Depends(get_current_user)) -> dict:
runtime = get_runtime_settings()
client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key)
if client.configured():
await _ensure_request_access(client, int(request_id), user)
snapshot = await build_snapshot(request_id)
queue = snapshot.raw.get("arr", {}).get("queue")
download_ids = _download_ids(_queue_records(queue))
if not download_ids:
message = "Nothing to force resume."
await asyncio.to_thread(
save_action, request_id, "resume_torrent", "Resume torrent", "ok", message
)
return {"status": "ok", "message": message}
runtime = get_runtime_settings()
client = QBittorrentClient(
runtime.qbittorrent_base_url,
runtime.qbittorrent_username,
runtime.qbittorrent_password,
)
if not client.configured():
raise HTTPException(status_code=400, detail="qBittorrent not configured")
try:
torrents = await client.get_torrents_by_hashes("|".join(download_ids))
torrent_list = torrents if isinstance(torrents, list) else []
downloading_states = {"downloading", "stalleddl", "queueddl", "checkingdl", "forceddl"}
if torrent_list and all(
str(t.get("state", "")).lower() in downloading_states for t in torrent_list
):
message = "No need to force resume. Already downloading."
await asyncio.to_thread(
save_action, request_id, "resume_torrent", "Resume torrent", "ok", message
)
return {"status": "ok", "message": message}
await client.resume_torrents("|".join(download_ids))
except httpx.HTTPStatusError as exc:
raise HTTPException(status_code=502, detail=str(exc)) from exc
message = "Resume sent to qBittorrent."
await asyncio.to_thread(
save_action, request_id, "resume_torrent", "Resume torrent", "ok", message
)
return {"status": "ok", "resumed": download_ids, "message": message}
@router.post("/{request_id}/actions/readd")
async def action_readd(request_id: str, user: Dict[str, str] = Depends(get_current_user)) -> dict:
runtime = get_runtime_settings()
client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key)
if client.configured():
await _ensure_request_access(client, int(request_id), user)
snapshot = await build_snapshot(request_id)
jelly = snapshot.raw.get("jellyseerr") or {}
media = jelly.get("media") or {}
if snapshot.request_type.value == "tv":
tvdb_id = media.get("tvdbId")
if not tvdb_id:
raise HTTPException(status_code=400, detail="Missing tvdbId for series")
title = snapshot.title
if title in {None, "", "Unknown"}:
title = (
media.get("name")
or media.get("title")
or jelly.get("title")
or jelly.get("name")
)
if not runtime.sonarr_quality_profile_id or not runtime.sonarr_root_folder:
raise HTTPException(status_code=400, detail="Sonarr profile/root not configured")
client = SonarrClient(runtime.sonarr_base_url, runtime.sonarr_api_key)
if not client.configured():
raise HTTPException(status_code=400, detail="Sonarr not configured")
try:
existing = await client.get_series_by_tvdb_id(int(tvdb_id))
except httpx.HTTPStatusError as exc:
detail = _format_upstream_error("Sonarr", exc)
await asyncio.to_thread(
save_action, request_id, "readd_to_arr", "Re-add to Sonarr/Radarr", "failed", detail
)
raise HTTPException(status_code=502, detail=detail) from exc
if isinstance(existing, list) and existing:
series_id = existing[0].get("id")
message = f"Already in Sonarr (seriesId {series_id})."
await asyncio.to_thread(
save_action, request_id, "readd_to_arr", "Re-add to Sonarr/Radarr", "ok", message
)
return {"status": "ok", "message": message, "seriesId": series_id}
root_folder = await _resolve_root_folder_path(client, runtime.sonarr_root_folder, "Sonarr")
try:
response = await client.add_series(
int(tvdb_id), runtime.sonarr_quality_profile_id, root_folder, title=title
)
except httpx.HTTPStatusError as exc:
detail = _format_upstream_error("Sonarr", exc)
await asyncio.to_thread(
save_action, request_id, "readd_to_arr", "Re-add to Sonarr/Radarr", "failed", detail
)
raise HTTPException(status_code=502, detail=detail) from exc
await asyncio.to_thread(
save_action,
request_id,
"readd_to_arr",
"Re-add to Sonarr/Radarr",
"ok",
f"Re-added in Sonarr to {root_folder}.",
)
return {"status": "ok", "response": response, "rootFolder": root_folder}
if snapshot.request_type.value == "movie":
tmdb_id = media.get("tmdbId")
if not tmdb_id:
raise HTTPException(status_code=400, detail="Missing tmdbId for movie")
if not runtime.radarr_quality_profile_id or not runtime.radarr_root_folder:
raise HTTPException(status_code=400, detail="Radarr profile/root not configured")
client = RadarrClient(runtime.radarr_base_url, runtime.radarr_api_key)
if not client.configured():
raise HTTPException(status_code=400, detail="Radarr not configured")
try:
existing = await client.get_movie_by_tmdb_id(int(tmdb_id))
except httpx.HTTPStatusError as exc:
detail = _format_upstream_error("Radarr", exc)
await asyncio.to_thread(
save_action, request_id, "readd_to_arr", "Re-add to Sonarr/Radarr", "failed", detail
)
raise HTTPException(status_code=502, detail=detail) from exc
if isinstance(existing, list) and existing:
movie_id = existing[0].get("id")
message = f"Already in Radarr (movieId {movie_id})."
await asyncio.to_thread(
save_action, request_id, "readd_to_arr", "Re-add to Sonarr/Radarr", "ok", message
)
return {"status": "ok", "message": message, "movieId": movie_id}
root_folder = await _resolve_root_folder_path(client, runtime.radarr_root_folder, "Radarr")
try:
response = await client.add_movie(
int(tmdb_id), runtime.radarr_quality_profile_id, root_folder
)
except httpx.HTTPStatusError as exc:
detail = _format_upstream_error("Radarr", exc)
await asyncio.to_thread(
save_action, request_id, "readd_to_arr", "Re-add to Sonarr/Radarr", "failed", detail
)
raise HTTPException(status_code=502, detail=detail) from exc
await asyncio.to_thread(
save_action,
request_id,
"readd_to_arr",
"Re-add to Sonarr/Radarr",
"ok",
f"Re-added in Radarr to {root_folder}.",
)
return {"status": "ok", "response": response, "rootFolder": root_folder}
raise HTTPException(status_code=400, detail="Unknown request type")
@router.get("/{request_id}/history")
async def request_history(
request_id: str, limit: int = 10, user: Dict[str, str] = Depends(get_current_user)
) -> dict:
runtime = get_runtime_settings()
client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key)
if client.configured():
await _ensure_request_access(client, int(request_id), user)
snapshots = await asyncio.to_thread(get_recent_snapshots, request_id, limit)
return {"snapshots": snapshots}
@router.get("/{request_id}/actions")
async def request_actions(
request_id: str, limit: int = 10, user: Dict[str, str] = Depends(get_current_user)
) -> dict:
runtime = get_runtime_settings()
client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key)
if client.configured():
await _ensure_request_access(client, int(request_id), user)
actions = await asyncio.to_thread(get_recent_actions, request_id, limit)
return {"actions": actions}
@router.post("/{request_id}/actions/grab")
async def action_grab(
request_id: str, payload: Dict[str, Any], user: Dict[str, str] = Depends(get_current_user)
) -> dict:
runtime = get_runtime_settings()
client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key)
if client.configured():
await _ensure_request_access(client, int(request_id), user)
snapshot = await build_snapshot(request_id)
guid = payload.get("guid")
indexer_id = payload.get("indexerId")
if not guid or not indexer_id:
raise HTTPException(status_code=400, detail="Missing guid or indexerId")
runtime = get_runtime_settings()
if snapshot.request_type.value == "tv":
client = SonarrClient(runtime.sonarr_base_url, runtime.sonarr_api_key)
if not client.configured():
raise HTTPException(status_code=400, detail="Sonarr not configured")
try:
response = await client.grab_release(str(guid), int(indexer_id))
except httpx.HTTPStatusError as exc:
raise HTTPException(status_code=502, detail=str(exc)) from exc
await asyncio.to_thread(
save_action, request_id, "grab", "Grab release", "ok", "Grab sent to Sonarr."
)
return {"status": "ok", "response": response}
if snapshot.request_type.value == "movie":
client = RadarrClient(runtime.radarr_base_url, runtime.radarr_api_key)
if not client.configured():
raise HTTPException(status_code=400, detail="Radarr not configured")
try:
response = await client.grab_release(str(guid), int(indexer_id))
except httpx.HTTPStatusError as exc:
raise HTTPException(status_code=502, detail=str(exc)) from exc
await asyncio.to_thread(
save_action, request_id, "grab", "Grab release", "ok", "Grab sent to Radarr."
)
return {"status": "ok", "response": response}
raise HTTPException(status_code=400, detail="Unknown request type")