2066 lines
82 KiB
Python
2066 lines
82 KiB
Python
from typing import Any, Dict, List, Optional, Tuple
|
|
import asyncio
|
|
import httpx
|
|
import json
|
|
import logging
|
|
import os
|
|
import time
|
|
from urllib.parse import quote
|
|
from datetime import datetime, timezone, timedelta
|
|
from fastapi import APIRouter, HTTPException, Depends
|
|
|
|
from ..clients.jellyseerr import JellyseerrClient
|
|
from ..clients.jellyfin import JellyfinClient
|
|
from ..clients.qbittorrent import QBittorrentClient
|
|
from ..clients.radarr import RadarrClient
|
|
from ..clients.sonarr import SonarrClient
|
|
from ..clients.prowlarr import ProwlarrClient
|
|
from ..ai.triage import triage_snapshot
|
|
from ..auth import get_current_user
|
|
from ..runtime import get_runtime_settings
|
|
from .images import cache_tmdb_image, is_tmdb_cached
|
|
from ..db import (
|
|
save_action,
|
|
get_recent_actions,
|
|
get_recent_snapshots,
|
|
get_cached_requests,
|
|
get_cached_requests_since,
|
|
get_cached_request_by_media_id,
|
|
get_request_cache_by_id,
|
|
get_request_cache_payload,
|
|
get_request_cache_last_updated,
|
|
get_request_cache_count,
|
|
get_request_cache_payloads,
|
|
get_request_cache_payloads_missing,
|
|
repair_request_cache_titles,
|
|
prune_duplicate_requests_cache,
|
|
upsert_request_cache,
|
|
upsert_artwork_cache_status,
|
|
get_artwork_cache_missing_count,
|
|
get_artwork_cache_status_count,
|
|
get_setting,
|
|
set_setting,
|
|
update_artwork_cache_stats,
|
|
cleanup_history,
|
|
)
|
|
from ..models import Snapshot, TriageResult, RequestType
|
|
from ..services.snapshot import build_snapshot
|
|
|
|
router = APIRouter(prefix="/requests", tags=["requests"], dependencies=[Depends(get_current_user)])
|
|
|
|
CACHE_TTL_SECONDS = 600
|
|
_detail_cache: Dict[str, Tuple[float, Dict[str, Any]]] = {}
|
|
REQUEST_CACHE_TTL_SECONDS = 600
|
|
logger = logging.getLogger(__name__)
|
|
_sync_state: Dict[str, Any] = {
|
|
"status": "idle",
|
|
"stored": 0,
|
|
"total": None,
|
|
"skip": 0,
|
|
"message": None,
|
|
"started_at": None,
|
|
"finished_at": None,
|
|
}
|
|
_sync_task: Optional[asyncio.Task] = None
|
|
_sync_last_key = "requests_sync_last_at"
|
|
RECENT_CACHE_MAX_DAYS = 180
|
|
RECENT_CACHE_TTL_SECONDS = 300
|
|
_recent_cache: Dict[str, Any] = {"items": [], "updated_at": None}
|
|
_artwork_prefetch_state: Dict[str, Any] = {
|
|
"status": "idle",
|
|
"processed": 0,
|
|
"total": 0,
|
|
"message": "",
|
|
"only_missing": False,
|
|
"started_at": None,
|
|
"finished_at": None,
|
|
}
|
|
_artwork_prefetch_task: Optional[asyncio.Task] = None
|
|
_media_endpoint_supported: Optional[bool] = None
|
|
|
|
STATUS_LABELS = {
|
|
1: "Waiting for approval",
|
|
2: "Approved",
|
|
3: "Declined",
|
|
4: "Ready to watch",
|
|
5: "Working on it",
|
|
6: "Partially ready",
|
|
}
|
|
|
|
|
|
def _cache_get(key: str) -> Optional[Dict[str, Any]]:
|
|
cached = _detail_cache.get(key)
|
|
if not cached:
|
|
return None
|
|
expires_at, payload = cached
|
|
if expires_at < time.time():
|
|
_detail_cache.pop(key, None)
|
|
return None
|
|
return payload
|
|
|
|
|
|
def _cache_set(key: str, payload: Dict[str, Any]) -> None:
|
|
_detail_cache[key] = (time.time() + CACHE_TTL_SECONDS, payload)
|
|
|
|
|
|
def _status_label(value: Any) -> str:
|
|
if isinstance(value, int):
|
|
return STATUS_LABELS.get(value, f"Status {value}")
|
|
return "Unknown"
|
|
|
|
|
|
def _normalize_username(value: Any) -> Optional[str]:
|
|
if not isinstance(value, str):
|
|
return None
|
|
normalized = value.strip().lower()
|
|
if not normalized:
|
|
return None
|
|
if "@" in normalized:
|
|
normalized = normalized.split("@", 1)[0]
|
|
return normalized if normalized else None
|
|
|
|
|
|
def _request_matches_user(request_data: Any, username: str) -> bool:
|
|
requested_by = None
|
|
if isinstance(request_data, dict):
|
|
requested_by = request_data.get("requestedBy") or request_data.get("requestedByUser")
|
|
if requested_by is None:
|
|
requested_by = request_data.get("requestedByName") or request_data.get("requestedByUsername")
|
|
if isinstance(requested_by, dict):
|
|
candidates = [
|
|
requested_by.get("username"),
|
|
requested_by.get("displayName"),
|
|
requested_by.get("name"),
|
|
requested_by.get("email"),
|
|
]
|
|
else:
|
|
candidates = [requested_by]
|
|
|
|
username_norm = _normalize_username(username)
|
|
if not username_norm:
|
|
return False
|
|
|
|
for candidate in candidates:
|
|
candidate_norm = _normalize_username(candidate)
|
|
if not candidate_norm:
|
|
continue
|
|
if "@" in candidate_norm:
|
|
candidate_norm = candidate_norm.split("@", 1)[0]
|
|
if candidate_norm == username_norm:
|
|
return True
|
|
return False
|
|
|
|
|
|
def _normalize_requested_by(request_data: Any) -> Optional[str]:
|
|
if not isinstance(request_data, dict):
|
|
return None
|
|
requested_by = request_data.get("requestedBy")
|
|
if isinstance(requested_by, dict):
|
|
for key in ("username", "displayName", "name", "email"):
|
|
value = requested_by.get(key)
|
|
normalized = _normalize_username(value)
|
|
if normalized and "@" in normalized:
|
|
normalized = normalized.split("@", 1)[0]
|
|
if normalized:
|
|
return normalized
|
|
normalized = _normalize_username(requested_by)
|
|
if normalized and "@" in normalized:
|
|
normalized = normalized.split("@", 1)[0]
|
|
return normalized
|
|
|
|
def _extract_requested_by_id(request_data: Any) -> Optional[int]:
|
|
if not isinstance(request_data, dict):
|
|
return None
|
|
requested_by = request_data.get("requestedBy") or request_data.get("requestedByUser")
|
|
if isinstance(requested_by, dict):
|
|
for key in ("id", "userId", "Id"):
|
|
value = requested_by.get(key)
|
|
if value is None:
|
|
continue
|
|
try:
|
|
return int(value)
|
|
except (TypeError, ValueError):
|
|
continue
|
|
return None
|
|
|
|
|
|
def _format_upstream_error(service: str, exc: httpx.HTTPStatusError) -> str:
|
|
response = exc.response
|
|
status = response.status_code if response is not None else "unknown"
|
|
body = ""
|
|
if response is not None:
|
|
try:
|
|
payload = response.json()
|
|
body = json.dumps(payload, ensure_ascii=True)
|
|
except ValueError:
|
|
body = response.text
|
|
body = body.strip() if body else ""
|
|
if body:
|
|
return f"{service} error {status}: {body}"
|
|
return f"{service} error {status}."
|
|
|
|
|
|
def _request_display_name(request_data: Any) -> Optional[str]:
|
|
if not isinstance(request_data, dict):
|
|
return None
|
|
requested_by = request_data.get("requestedBy")
|
|
if isinstance(requested_by, dict):
|
|
for key in ("displayName", "username", "name", "email"):
|
|
value = requested_by.get(key)
|
|
if isinstance(value, str) and value.strip():
|
|
return value.strip()
|
|
if isinstance(requested_by, str) and requested_by.strip():
|
|
return requested_by.strip()
|
|
return None
|
|
|
|
|
|
def _parse_request_payload(item: Dict[str, Any]) -> Dict[str, Any]:
|
|
media = item.get("media") or {}
|
|
media_id = media.get("id") or item.get("mediaId")
|
|
media_type = media.get("mediaType") or item.get("type")
|
|
tmdb_id = media.get("tmdbId") or item.get("tmdbId")
|
|
title = media.get("title") or media.get("name") or item.get("title") or item.get("name")
|
|
year = media.get("year") or item.get("year")
|
|
created_at = item.get("createdAt") or item.get("addedAt") or item.get("updatedAt")
|
|
updated_at = item.get("updatedAt") or created_at
|
|
requested_by = _request_display_name(item)
|
|
requested_by_norm = _normalize_requested_by(item)
|
|
requested_by_id = _extract_requested_by_id(item)
|
|
return {
|
|
"request_id": item.get("id"),
|
|
"media_id": media_id,
|
|
"media_type": media_type,
|
|
"tmdb_id": tmdb_id,
|
|
"status": item.get("status"),
|
|
"title": title,
|
|
"year": year,
|
|
"requested_by": requested_by,
|
|
"requested_by_norm": requested_by_norm,
|
|
"requested_by_id": requested_by_id,
|
|
"created_at": created_at,
|
|
"updated_at": updated_at,
|
|
}
|
|
|
|
|
|
def _extract_artwork_paths(item: Dict[str, Any]) -> tuple[Optional[str], Optional[str]]:
|
|
media = item.get("media") or {}
|
|
poster_path = None
|
|
backdrop_path = None
|
|
if isinstance(media, dict):
|
|
poster_path = media.get("posterPath") or media.get("poster_path")
|
|
backdrop_path = media.get("backdropPath") or media.get("backdrop_path")
|
|
if not poster_path:
|
|
poster_path = item.get("posterPath") or item.get("poster_path")
|
|
if not backdrop_path:
|
|
backdrop_path = item.get("backdropPath") or item.get("backdrop_path")
|
|
return poster_path, backdrop_path
|
|
|
|
def _extract_tmdb_lookup(payload: Dict[str, Any]) -> tuple[Optional[int], Optional[str]]:
|
|
media = payload.get("media") or {}
|
|
if not isinstance(media, dict):
|
|
media = {}
|
|
tmdb_id = media.get("tmdbId") or payload.get("tmdbId")
|
|
media_type = (
|
|
media.get("mediaType")
|
|
or payload.get("mediaType")
|
|
or payload.get("type")
|
|
)
|
|
try:
|
|
tmdb_id = int(tmdb_id) if tmdb_id is not None else None
|
|
except (TypeError, ValueError):
|
|
tmdb_id = None
|
|
if isinstance(media_type, str):
|
|
media_type = media_type.strip().lower() or None
|
|
else:
|
|
media_type = None
|
|
return tmdb_id, media_type
|
|
|
|
|
|
def _artwork_missing_for_payload(payload: Dict[str, Any]) -> bool:
|
|
poster_path, backdrop_path = _extract_artwork_paths(payload)
|
|
tmdb_id, media_type = _extract_tmdb_lookup(payload)
|
|
can_hydrate = bool(tmdb_id and media_type)
|
|
if poster_path:
|
|
if not is_tmdb_cached(poster_path, "w185") or not is_tmdb_cached(poster_path, "w342"):
|
|
return True
|
|
elif can_hydrate:
|
|
return True
|
|
if backdrop_path:
|
|
if not is_tmdb_cached(backdrop_path, "w780"):
|
|
return True
|
|
elif can_hydrate:
|
|
return True
|
|
return False
|
|
|
|
|
|
def _compute_cached_flags(
|
|
poster_path: Optional[str],
|
|
backdrop_path: Optional[str],
|
|
cache_mode: str,
|
|
poster_cached: Optional[bool] = None,
|
|
backdrop_cached: Optional[bool] = None,
|
|
) -> tuple[bool, bool]:
|
|
if cache_mode != "cache":
|
|
return True, True
|
|
poster = poster_cached
|
|
backdrop = backdrop_cached
|
|
if poster is None:
|
|
poster = bool(poster_path) and is_tmdb_cached(poster_path, "w185") and is_tmdb_cached(
|
|
poster_path, "w342"
|
|
)
|
|
if backdrop is None:
|
|
backdrop = bool(backdrop_path) and is_tmdb_cached(backdrop_path, "w780")
|
|
return bool(poster), bool(backdrop)
|
|
|
|
|
|
def _upsert_artwork_status(
|
|
payload: Dict[str, Any],
|
|
cache_mode: str,
|
|
poster_cached: Optional[bool] = None,
|
|
backdrop_cached: Optional[bool] = None,
|
|
) -> None:
|
|
parsed = _parse_request_payload(payload)
|
|
request_id = parsed.get("request_id")
|
|
if not isinstance(request_id, int):
|
|
return
|
|
tmdb_id, media_type = _extract_tmdb_lookup(payload)
|
|
poster_path, backdrop_path = _extract_artwork_paths(payload)
|
|
has_tmdb = bool(tmdb_id and media_type)
|
|
poster_cached_flag, backdrop_cached_flag = _compute_cached_flags(
|
|
poster_path, backdrop_path, cache_mode, poster_cached, backdrop_cached
|
|
)
|
|
upsert_artwork_cache_status(
|
|
request_id=request_id,
|
|
tmdb_id=tmdb_id,
|
|
media_type=media_type,
|
|
poster_path=poster_path,
|
|
backdrop_path=backdrop_path,
|
|
has_tmdb=has_tmdb,
|
|
poster_cached=poster_cached_flag,
|
|
backdrop_cached=backdrop_cached_flag,
|
|
)
|
|
|
|
|
|
def _collect_artwork_cache_disk_stats() -> tuple[int, int]:
|
|
cache_root = os.path.join(os.getcwd(), "data", "artwork")
|
|
total_bytes = 0
|
|
total_files = 0
|
|
if not os.path.isdir(cache_root):
|
|
return 0, 0
|
|
for root, _, files in os.walk(cache_root):
|
|
for name in files:
|
|
path = os.path.join(root, name)
|
|
try:
|
|
total_bytes += os.path.getsize(path)
|
|
total_files += 1
|
|
except OSError:
|
|
continue
|
|
return total_bytes, total_files
|
|
|
|
|
|
async def _get_request_details(client: JellyseerrClient, request_id: int) -> Optional[Dict[str, Any]]:
|
|
cache_key = f"request:{request_id}"
|
|
cached = _cache_get(cache_key)
|
|
if isinstance(cached, dict):
|
|
return cached
|
|
try:
|
|
fetched = await client.get_request(str(request_id))
|
|
except httpx.HTTPStatusError:
|
|
return None
|
|
if isinstance(fetched, dict):
|
|
_cache_set(cache_key, fetched)
|
|
return fetched
|
|
return None
|
|
|
|
|
|
async def _hydrate_title_from_tmdb(
|
|
client: JellyseerrClient, media_type: Optional[str], tmdb_id: Optional[int]
|
|
) -> tuple[Optional[str], Optional[int]]:
|
|
if not tmdb_id or not media_type:
|
|
return None, None
|
|
try:
|
|
if media_type == "movie":
|
|
details = await client.get_movie(int(tmdb_id))
|
|
if isinstance(details, dict):
|
|
title = details.get("title")
|
|
release_date = details.get("releaseDate")
|
|
year = int(release_date[:4]) if release_date else None
|
|
return title, year
|
|
if media_type == "tv":
|
|
details = await client.get_tv(int(tmdb_id))
|
|
if isinstance(details, dict):
|
|
title = details.get("name") or details.get("title")
|
|
first_air = details.get("firstAirDate")
|
|
year = int(first_air[:4]) if first_air else None
|
|
return title, year
|
|
except httpx.HTTPStatusError:
|
|
return None, None
|
|
return None, None
|
|
|
|
|
|
async def _hydrate_media_details(client: JellyseerrClient, media_id: Optional[int]) -> Optional[Dict[str, Any]]:
|
|
if not media_id:
|
|
return None
|
|
global _media_endpoint_supported
|
|
if _media_endpoint_supported is False:
|
|
return None
|
|
try:
|
|
details = await client.get_media(int(media_id))
|
|
except httpx.HTTPStatusError as exc:
|
|
if exc.response is not None and exc.response.status_code == 405:
|
|
_media_endpoint_supported = False
|
|
logger.info("Jellyseerr media endpoint rejected GET requests; skipping media lookups.")
|
|
return None
|
|
_media_endpoint_supported = True
|
|
return details if isinstance(details, dict) else None
|
|
|
|
|
|
async def _hydrate_artwork_from_tmdb(
|
|
client: JellyseerrClient, media_type: Optional[str], tmdb_id: Optional[int]
|
|
) -> tuple[Optional[str], Optional[str]]:
|
|
if not tmdb_id or not media_type:
|
|
return None, None
|
|
try:
|
|
if media_type == "movie":
|
|
details = await client.get_movie(int(tmdb_id))
|
|
if isinstance(details, dict):
|
|
return (
|
|
details.get("posterPath") or details.get("poster_path"),
|
|
details.get("backdropPath") or details.get("backdrop_path"),
|
|
)
|
|
if media_type == "tv":
|
|
details = await client.get_tv(int(tmdb_id))
|
|
if isinstance(details, dict):
|
|
return (
|
|
details.get("posterPath") or details.get("poster_path"),
|
|
details.get("backdropPath") or details.get("backdrop_path"),
|
|
)
|
|
except httpx.HTTPStatusError:
|
|
return None, None
|
|
return None, None
|
|
|
|
|
|
def _artwork_url(path: Optional[str], size: str, cache_mode: str) -> Optional[str]:
|
|
if not path:
|
|
return None
|
|
if not path.startswith("/"):
|
|
path = f"/{path}"
|
|
if cache_mode == "cache":
|
|
return f"/images/tmdb?path={quote(path)}&size={size}"
|
|
return f"https://image.tmdb.org/t/p/{size}{path}"
|
|
|
|
|
|
def _cache_is_stale(last_updated: Optional[str]) -> bool:
|
|
if not last_updated:
|
|
return True
|
|
runtime = get_runtime_settings()
|
|
ttl_seconds = max(60, int(runtime.requests_sync_ttl_minutes or 1440) * 60)
|
|
try:
|
|
parsed = datetime.fromisoformat(last_updated.replace("Z", "+00:00"))
|
|
now = datetime.now(timezone.utc)
|
|
return (now - parsed).total_seconds() > ttl_seconds
|
|
except ValueError:
|
|
return True
|
|
|
|
|
|
def _parse_time(value: Optional[str], fallback_hour: int, fallback_minute: int) -> tuple[int, int]:
|
|
if isinstance(value, str) and ":" in value:
|
|
parts = value.strip().split(":")
|
|
if len(parts) == 2:
|
|
try:
|
|
hour = int(parts[0])
|
|
minute = int(parts[1])
|
|
if 0 <= hour <= 23 and 0 <= minute <= 59:
|
|
return hour, minute
|
|
except ValueError:
|
|
pass
|
|
return fallback_hour, fallback_minute
|
|
|
|
|
|
def _seconds_until(hour: int, minute: int) -> int:
|
|
now = datetime.now(timezone.utc).astimezone()
|
|
target = now.replace(hour=hour, minute=minute, second=0, microsecond=0)
|
|
if target <= now:
|
|
target = target + timedelta(days=1)
|
|
return int((target - now).total_seconds())
|
|
|
|
|
|
async def _sync_all_requests(client: JellyseerrClient) -> int:
|
|
take = 50
|
|
skip = 0
|
|
stored = 0
|
|
cache_mode = (get_runtime_settings().artwork_cache_mode or "remote").lower()
|
|
logger.info("Jellyseerr sync starting: take=%s", take)
|
|
_sync_state.update(
|
|
{
|
|
"status": "running",
|
|
"stored": 0,
|
|
"total": None,
|
|
"skip": 0,
|
|
"message": "Starting sync",
|
|
"started_at": datetime.now(timezone.utc).isoformat(),
|
|
"finished_at": None,
|
|
}
|
|
)
|
|
while True:
|
|
try:
|
|
response = await client.get_recent_requests(take=take, skip=skip)
|
|
except httpx.HTTPError as exc:
|
|
logger.warning("Jellyseerr sync failed at skip=%s: %s", skip, exc)
|
|
_sync_state.update({"status": "failed", "message": f"Sync failed: {exc}"})
|
|
break
|
|
if not isinstance(response, dict):
|
|
logger.warning("Jellyseerr sync stopped: non-dict response at skip=%s", skip)
|
|
_sync_state.update({"status": "failed", "message": "Invalid response"})
|
|
break
|
|
if _sync_state["total"] is None:
|
|
page_info = response.get("pageInfo") or {}
|
|
total = (
|
|
page_info.get("totalResults")
|
|
or page_info.get("total")
|
|
or response.get("totalResults")
|
|
or response.get("total")
|
|
)
|
|
if isinstance(total, int):
|
|
_sync_state["total"] = total
|
|
items = response.get("results") or []
|
|
if not isinstance(items, list) or not items:
|
|
logger.info("Jellyseerr sync completed: no more results at skip=%s", skip)
|
|
break
|
|
for item in items:
|
|
if not isinstance(item, dict):
|
|
continue
|
|
payload = _parse_request_payload(item)
|
|
request_id = payload.get("request_id")
|
|
cached_title = None
|
|
if isinstance(request_id, int):
|
|
if not payload.get("title"):
|
|
cached = get_request_cache_by_id(request_id)
|
|
if cached and cached.get("title"):
|
|
cached_title = cached.get("title")
|
|
if not payload.get("title") or not payload.get("media_id"):
|
|
logger.debug("Jellyseerr sync hydrate request_id=%s", request_id)
|
|
details = await _get_request_details(client, request_id)
|
|
if isinstance(details, dict):
|
|
payload = _parse_request_payload(details)
|
|
item = details
|
|
if (
|
|
not payload.get("title")
|
|
and payload.get("media_id")
|
|
and (not payload.get("tmdb_id") or not payload.get("media_type"))
|
|
):
|
|
media_details = await _hydrate_media_details(client, payload.get("media_id"))
|
|
if isinstance(media_details, dict):
|
|
media_title = media_details.get("title") or media_details.get("name")
|
|
if media_title:
|
|
payload["title"] = media_title
|
|
if not payload.get("year") and media_details.get("year"):
|
|
payload["year"] = media_details.get("year")
|
|
if not payload.get("tmdb_id") and media_details.get("tmdbId"):
|
|
payload["tmdb_id"] = media_details.get("tmdbId")
|
|
if not payload.get("media_type") and media_details.get("mediaType"):
|
|
payload["media_type"] = media_details.get("mediaType")
|
|
if isinstance(item, dict):
|
|
existing_media = item.get("media")
|
|
if isinstance(existing_media, dict):
|
|
merged = dict(media_details)
|
|
for key, value in existing_media.items():
|
|
if value is not None:
|
|
merged[key] = value
|
|
item["media"] = merged
|
|
else:
|
|
item["media"] = media_details
|
|
poster_path, backdrop_path = _extract_artwork_paths(item)
|
|
if cache_mode == "cache" and not (poster_path or backdrop_path):
|
|
details = await _get_request_details(client, request_id)
|
|
if isinstance(details, dict):
|
|
item = details
|
|
payload = _parse_request_payload(details)
|
|
if not payload.get("title") and payload.get("tmdb_id") and payload.get("media_type"):
|
|
hydrated_title, hydrated_year = await _hydrate_title_from_tmdb(
|
|
client, payload.get("media_type"), payload.get("tmdb_id")
|
|
)
|
|
if hydrated_title:
|
|
payload["title"] = hydrated_title
|
|
if hydrated_year:
|
|
payload["year"] = hydrated_year
|
|
if not payload.get("title") and cached_title:
|
|
payload["title"] = cached_title
|
|
if not isinstance(payload.get("request_id"), int):
|
|
continue
|
|
payload_json = json.dumps(item, ensure_ascii=True)
|
|
upsert_request_cache(
|
|
request_id=payload.get("request_id"),
|
|
media_id=payload.get("media_id"),
|
|
media_type=payload.get("media_type"),
|
|
status=payload.get("status"),
|
|
title=payload.get("title"),
|
|
year=payload.get("year"),
|
|
requested_by=payload.get("requested_by"),
|
|
requested_by_norm=payload.get("requested_by_norm"),
|
|
requested_by_id=payload.get("requested_by_id"),
|
|
created_at=payload.get("created_at"),
|
|
updated_at=payload.get("updated_at"),
|
|
payload_json=payload_json,
|
|
)
|
|
if isinstance(item, dict):
|
|
_upsert_artwork_status(item, cache_mode)
|
|
stored += 1
|
|
_sync_state["stored"] = stored
|
|
if len(items) < take:
|
|
logger.info("Jellyseerr sync completed: stored=%s", stored)
|
|
break
|
|
skip += take
|
|
_sync_state["skip"] = skip
|
|
_sync_state["message"] = f"Synced {stored} requests"
|
|
logger.info("Jellyseerr sync progress: stored=%s skip=%s", stored, skip)
|
|
_sync_state.update(
|
|
{
|
|
"status": "completed",
|
|
"stored": stored,
|
|
"message": f"Sync complete: {stored} requests",
|
|
"finished_at": datetime.now(timezone.utc).isoformat(),
|
|
}
|
|
)
|
|
set_setting(_sync_last_key, datetime.now(timezone.utc).isoformat())
|
|
_refresh_recent_cache_from_db()
|
|
if cache_mode == "cache":
|
|
update_artwork_cache_stats(
|
|
missing_count=get_artwork_cache_missing_count(),
|
|
total_requests=get_request_cache_count(),
|
|
)
|
|
return stored
|
|
|
|
|
|
async def _sync_delta_requests(client: JellyseerrClient) -> int:
|
|
take = 50
|
|
skip = 0
|
|
stored = 0
|
|
unchanged_pages = 0
|
|
cache_mode = (get_runtime_settings().artwork_cache_mode or "remote").lower()
|
|
logger.info("Jellyseerr delta sync starting: take=%s", take)
|
|
_sync_state.update(
|
|
{
|
|
"status": "running",
|
|
"stored": 0,
|
|
"total": None,
|
|
"skip": 0,
|
|
"message": "Starting delta sync",
|
|
"started_at": datetime.now(timezone.utc).isoformat(),
|
|
"finished_at": None,
|
|
}
|
|
)
|
|
while True:
|
|
try:
|
|
response = await client.get_recent_requests(take=take, skip=skip)
|
|
except httpx.HTTPError as exc:
|
|
logger.warning("Jellyseerr delta sync failed at skip=%s: %s", skip, exc)
|
|
_sync_state.update({"status": "failed", "message": f"Delta sync failed: {exc}"})
|
|
break
|
|
if not isinstance(response, dict):
|
|
logger.warning("Jellyseerr delta sync stopped: non-dict response at skip=%s", skip)
|
|
_sync_state.update({"status": "failed", "message": "Invalid response"})
|
|
break
|
|
items = response.get("results") or []
|
|
if not isinstance(items, list) or not items:
|
|
logger.info("Jellyseerr delta sync completed: no more results at skip=%s", skip)
|
|
break
|
|
page_changed = False
|
|
for item in items:
|
|
if not isinstance(item, dict):
|
|
continue
|
|
payload = _parse_request_payload(item)
|
|
request_id = payload.get("request_id")
|
|
if isinstance(request_id, int):
|
|
cached = get_request_cache_by_id(request_id)
|
|
incoming_updated = payload.get("updated_at")
|
|
cached_title = cached.get("title") if cached else None
|
|
if cached and incoming_updated and cached.get("updated_at") == incoming_updated and cached.get("title"):
|
|
continue
|
|
if not payload.get("title") or not payload.get("media_id"):
|
|
details = await _get_request_details(client, request_id)
|
|
if isinstance(details, dict):
|
|
payload = _parse_request_payload(details)
|
|
item = details
|
|
if (
|
|
not payload.get("title")
|
|
and payload.get("media_id")
|
|
and (not payload.get("tmdb_id") or not payload.get("media_type"))
|
|
):
|
|
media_details = await _hydrate_media_details(client, payload.get("media_id"))
|
|
if isinstance(media_details, dict):
|
|
media_title = media_details.get("title") or media_details.get("name")
|
|
if media_title:
|
|
payload["title"] = media_title
|
|
if not payload.get("year") and media_details.get("year"):
|
|
payload["year"] = media_details.get("year")
|
|
if not payload.get("tmdb_id") and media_details.get("tmdbId"):
|
|
payload["tmdb_id"] = media_details.get("tmdbId")
|
|
if not payload.get("media_type") and media_details.get("mediaType"):
|
|
payload["media_type"] = media_details.get("mediaType")
|
|
if isinstance(item, dict):
|
|
existing_media = item.get("media")
|
|
if isinstance(existing_media, dict):
|
|
merged = dict(media_details)
|
|
for key, value in existing_media.items():
|
|
if value is not None:
|
|
merged[key] = value
|
|
item["media"] = merged
|
|
else:
|
|
item["media"] = media_details
|
|
poster_path, backdrop_path = _extract_artwork_paths(item)
|
|
if cache_mode == "cache" and not (poster_path or backdrop_path):
|
|
details = await _get_request_details(client, request_id)
|
|
if isinstance(details, dict):
|
|
payload = _parse_request_payload(details)
|
|
item = details
|
|
if not payload.get("title") and payload.get("tmdb_id") and payload.get("media_type"):
|
|
hydrated_title, hydrated_year = await _hydrate_title_from_tmdb(
|
|
client, payload.get("media_type"), payload.get("tmdb_id")
|
|
)
|
|
if hydrated_title:
|
|
payload["title"] = hydrated_title
|
|
if hydrated_year:
|
|
payload["year"] = hydrated_year
|
|
if not payload.get("title") and cached_title:
|
|
payload["title"] = cached_title
|
|
if not isinstance(payload.get("request_id"), int):
|
|
continue
|
|
payload_json = json.dumps(item, ensure_ascii=True)
|
|
upsert_request_cache(
|
|
request_id=payload.get("request_id"),
|
|
media_id=payload.get("media_id"),
|
|
media_type=payload.get("media_type"),
|
|
status=payload.get("status"),
|
|
title=payload.get("title"),
|
|
year=payload.get("year"),
|
|
requested_by=payload.get("requested_by"),
|
|
requested_by_norm=payload.get("requested_by_norm"),
|
|
requested_by_id=payload.get("requested_by_id"),
|
|
created_at=payload.get("created_at"),
|
|
updated_at=payload.get("updated_at"),
|
|
payload_json=payload_json,
|
|
)
|
|
if isinstance(item, dict):
|
|
_upsert_artwork_status(item, cache_mode)
|
|
stored += 1
|
|
page_changed = True
|
|
_sync_state["stored"] = stored
|
|
if not page_changed:
|
|
unchanged_pages += 1
|
|
else:
|
|
unchanged_pages = 0
|
|
if len(items) < take or unchanged_pages >= 2:
|
|
logger.info("Jellyseerr delta sync completed: stored=%s", stored)
|
|
break
|
|
skip += take
|
|
_sync_state["skip"] = skip
|
|
_sync_state["message"] = f"Delta synced {stored} requests"
|
|
logger.info("Jellyseerr delta sync progress: stored=%s skip=%s", stored, skip)
|
|
deduped = prune_duplicate_requests_cache()
|
|
if deduped:
|
|
logger.info("Jellyseerr delta sync removed duplicate rows: %s", deduped)
|
|
_sync_state.update(
|
|
{
|
|
"status": "completed",
|
|
"stored": stored,
|
|
"message": f"Delta sync complete: {stored} updated",
|
|
"finished_at": datetime.now(timezone.utc).isoformat(),
|
|
}
|
|
)
|
|
set_setting(_sync_last_key, datetime.now(timezone.utc).isoformat())
|
|
_refresh_recent_cache_from_db()
|
|
if cache_mode == "cache":
|
|
update_artwork_cache_stats(
|
|
missing_count=get_artwork_cache_missing_count(),
|
|
total_requests=get_request_cache_count(),
|
|
)
|
|
return stored
|
|
|
|
|
|
async def _prefetch_artwork_cache(
|
|
client: JellyseerrClient,
|
|
only_missing: bool = False,
|
|
total: Optional[int] = None,
|
|
use_missing_query: bool = False,
|
|
) -> None:
|
|
runtime = get_runtime_settings()
|
|
cache_mode = (runtime.artwork_cache_mode or "remote").lower()
|
|
if cache_mode != "cache":
|
|
_artwork_prefetch_state.update(
|
|
{
|
|
"status": "failed",
|
|
"message": "Artwork cache mode is not set to cache.",
|
|
"finished_at": datetime.now(timezone.utc).isoformat(),
|
|
}
|
|
)
|
|
return
|
|
|
|
total = total if total is not None else get_request_cache_count()
|
|
_artwork_prefetch_state.update(
|
|
{
|
|
"status": "running",
|
|
"processed": 0,
|
|
"total": total,
|
|
"message": "Starting missing artwork prefetch"
|
|
if only_missing
|
|
else "Starting artwork prefetch",
|
|
"only_missing": only_missing,
|
|
"started_at": datetime.now(timezone.utc).isoformat(),
|
|
"finished_at": None,
|
|
}
|
|
)
|
|
if only_missing and total == 0:
|
|
_artwork_prefetch_state.update(
|
|
{
|
|
"status": "completed",
|
|
"processed": 0,
|
|
"message": "No missing artwork to cache.",
|
|
"finished_at": datetime.now(timezone.utc).isoformat(),
|
|
}
|
|
)
|
|
return
|
|
offset = 0
|
|
limit = 200
|
|
processed = 0
|
|
while True:
|
|
if use_missing_query:
|
|
batch = get_request_cache_payloads_missing(limit=limit, offset=offset)
|
|
else:
|
|
batch = get_request_cache_payloads(limit=limit, offset=offset)
|
|
if not batch:
|
|
break
|
|
for row in batch:
|
|
payload = row.get("payload")
|
|
if not isinstance(payload, dict):
|
|
if not only_missing:
|
|
processed += 1
|
|
continue
|
|
if only_missing and not use_missing_query and not _artwork_missing_for_payload(payload):
|
|
continue
|
|
poster_path, backdrop_path = _extract_artwork_paths(payload)
|
|
tmdb_id, media_type = _extract_tmdb_lookup(payload)
|
|
if (not poster_path or not backdrop_path) and client.configured() and tmdb_id and media_type:
|
|
media = payload.get("media") or {}
|
|
hydrated_poster, hydrated_backdrop = await _hydrate_artwork_from_tmdb(
|
|
client, media_type, tmdb_id
|
|
)
|
|
poster_path = poster_path or hydrated_poster
|
|
backdrop_path = backdrop_path or hydrated_backdrop
|
|
if hydrated_poster or hydrated_backdrop:
|
|
media = dict(media) if isinstance(media, dict) else {}
|
|
if hydrated_poster:
|
|
media["posterPath"] = hydrated_poster
|
|
if hydrated_backdrop:
|
|
media["backdropPath"] = hydrated_backdrop
|
|
payload["media"] = media
|
|
parsed = _parse_request_payload(payload)
|
|
request_id = parsed.get("request_id")
|
|
if isinstance(request_id, int):
|
|
upsert_request_cache(
|
|
request_id=request_id,
|
|
media_id=parsed.get("media_id"),
|
|
media_type=parsed.get("media_type"),
|
|
status=parsed.get("status"),
|
|
title=parsed.get("title"),
|
|
year=parsed.get("year"),
|
|
requested_by=parsed.get("requested_by"),
|
|
requested_by_norm=parsed.get("requested_by_norm"),
|
|
requested_by_id=parsed.get("requested_by_id"),
|
|
created_at=parsed.get("created_at"),
|
|
updated_at=parsed.get("updated_at"),
|
|
payload_json=json.dumps(payload, ensure_ascii=True),
|
|
)
|
|
poster_cached_flag = False
|
|
backdrop_cached_flag = False
|
|
if poster_path:
|
|
try:
|
|
poster_cached_flag = bool(
|
|
await cache_tmdb_image(poster_path, "w185")
|
|
) and bool(await cache_tmdb_image(poster_path, "w342"))
|
|
except httpx.HTTPError:
|
|
poster_cached_flag = False
|
|
if backdrop_path:
|
|
try:
|
|
backdrop_cached_flag = bool(await cache_tmdb_image(backdrop_path, "w780"))
|
|
except httpx.HTTPError:
|
|
backdrop_cached_flag = False
|
|
_upsert_artwork_status(
|
|
payload,
|
|
cache_mode,
|
|
poster_cached=poster_cached_flag if poster_path else None,
|
|
backdrop_cached=backdrop_cached_flag if backdrop_path else None,
|
|
)
|
|
processed += 1
|
|
if processed % 25 == 0:
|
|
_artwork_prefetch_state.update(
|
|
{"processed": processed, "message": f"Cached artwork for {processed} requests"}
|
|
)
|
|
offset += limit
|
|
|
|
total_requests = get_request_cache_count()
|
|
missing_count = get_artwork_cache_missing_count()
|
|
cache_bytes, cache_files = _collect_artwork_cache_disk_stats()
|
|
update_artwork_cache_stats(
|
|
cache_bytes=cache_bytes,
|
|
cache_files=cache_files,
|
|
missing_count=missing_count,
|
|
total_requests=total_requests,
|
|
)
|
|
_artwork_prefetch_state.update(
|
|
{
|
|
"status": "completed",
|
|
"processed": processed,
|
|
"message": f"Artwork cached for {processed} requests",
|
|
"finished_at": datetime.now(timezone.utc).isoformat(),
|
|
}
|
|
)
|
|
|
|
|
|
async def start_artwork_prefetch(
|
|
base_url: Optional[str], api_key: Optional[str], only_missing: bool = False
|
|
) -> Dict[str, Any]:
|
|
global _artwork_prefetch_task
|
|
if _artwork_prefetch_task and not _artwork_prefetch_task.done():
|
|
return dict(_artwork_prefetch_state)
|
|
client = JellyseerrClient(base_url, api_key)
|
|
status_count = get_artwork_cache_status_count()
|
|
total_requests = get_request_cache_count()
|
|
use_missing_query = only_missing and status_count >= total_requests and total_requests > 0
|
|
if only_missing and use_missing_query:
|
|
total = get_artwork_cache_missing_count()
|
|
else:
|
|
total = total_requests
|
|
_artwork_prefetch_state.update(
|
|
{
|
|
"status": "running",
|
|
"processed": 0,
|
|
"total": total,
|
|
"message": "Seeding artwork cache status"
|
|
if only_missing and not use_missing_query
|
|
else ("Starting missing artwork prefetch" if only_missing else "Starting artwork prefetch"),
|
|
"only_missing": only_missing,
|
|
"started_at": datetime.now(timezone.utc).isoformat(),
|
|
"finished_at": None,
|
|
}
|
|
)
|
|
if only_missing and total == 0:
|
|
_artwork_prefetch_state.update(
|
|
{
|
|
"status": "completed",
|
|
"processed": 0,
|
|
"message": "No missing artwork to cache.",
|
|
"finished_at": datetime.now(timezone.utc).isoformat(),
|
|
}
|
|
)
|
|
return dict(_artwork_prefetch_state)
|
|
|
|
async def _runner() -> None:
|
|
try:
|
|
await _prefetch_artwork_cache(
|
|
client,
|
|
only_missing=only_missing,
|
|
total=total,
|
|
use_missing_query=use_missing_query,
|
|
)
|
|
except Exception:
|
|
logger.exception("Artwork prefetch failed")
|
|
_artwork_prefetch_state.update(
|
|
{
|
|
"status": "failed",
|
|
"message": "Artwork prefetch failed.",
|
|
"finished_at": datetime.now(timezone.utc).isoformat(),
|
|
}
|
|
)
|
|
|
|
_artwork_prefetch_task = asyncio.create_task(_runner())
|
|
return dict(_artwork_prefetch_state)
|
|
|
|
|
|
def get_artwork_prefetch_state() -> Dict[str, Any]:
|
|
return dict(_artwork_prefetch_state)
|
|
|
|
|
|
async def _ensure_requests_cache(client: JellyseerrClient) -> None:
|
|
last_sync = get_setting(_sync_last_key)
|
|
last_updated = last_sync or get_request_cache_last_updated()
|
|
if _cache_is_stale(last_updated):
|
|
logger.info("Requests cache stale or empty, starting sync.")
|
|
await _sync_all_requests(client)
|
|
else:
|
|
logger.debug("Requests cache fresh: last_sync=%s", last_updated)
|
|
|
|
|
|
def _refresh_recent_cache_from_db() -> None:
|
|
since_iso = (datetime.now(timezone.utc) - timedelta(days=RECENT_CACHE_MAX_DAYS)).isoformat()
|
|
items = get_cached_requests_since(since_iso)
|
|
_recent_cache["items"] = items
|
|
_recent_cache["updated_at"] = datetime.now(timezone.utc).isoformat()
|
|
|
|
|
|
def _recent_cache_stale() -> bool:
|
|
updated_at = _recent_cache.get("updated_at")
|
|
if not updated_at:
|
|
return True
|
|
try:
|
|
parsed = datetime.fromisoformat(updated_at)
|
|
except ValueError:
|
|
return True
|
|
return (datetime.now(timezone.utc) - parsed).total_seconds() > RECENT_CACHE_TTL_SECONDS
|
|
|
|
|
|
def _parse_iso_datetime(value: Optional[str]) -> Optional[datetime]:
|
|
if not value:
|
|
return None
|
|
try:
|
|
parsed = datetime.fromisoformat(value.replace("Z", "+00:00"))
|
|
except ValueError:
|
|
return None
|
|
if parsed.tzinfo is None:
|
|
return parsed.replace(tzinfo=timezone.utc)
|
|
return parsed
|
|
|
|
|
|
def _get_recent_from_cache(
|
|
requested_by_norm: Optional[str],
|
|
requested_by_id: Optional[int],
|
|
limit: int,
|
|
offset: int,
|
|
since_iso: Optional[str],
|
|
) -> List[Dict[str, Any]]:
|
|
items = _recent_cache.get("items") or []
|
|
results = []
|
|
since_dt = _parse_iso_datetime(since_iso)
|
|
for item in items:
|
|
if requested_by_id is not None:
|
|
if item.get("requested_by_id") != requested_by_id:
|
|
continue
|
|
elif requested_by_norm and item.get("requested_by_norm") != requested_by_norm:
|
|
continue
|
|
if since_dt:
|
|
candidate = item.get("created_at") or item.get("updated_at")
|
|
item_dt = _parse_iso_datetime(candidate)
|
|
if not item_dt or item_dt < since_dt:
|
|
continue
|
|
results.append(item)
|
|
return results[offset : offset + limit]
|
|
|
|
|
|
async def startup_warmup_requests_cache() -> None:
|
|
runtime = get_runtime_settings()
|
|
client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key)
|
|
if client.configured():
|
|
try:
|
|
await _ensure_requests_cache(client)
|
|
except httpx.HTTPError as exc:
|
|
logger.warning("Requests warmup skipped: %s", exc)
|
|
repaired = repair_request_cache_titles()
|
|
if repaired:
|
|
logger.info("Requests cache titles repaired: %s", repaired)
|
|
_refresh_recent_cache_from_db()
|
|
|
|
|
|
async def run_requests_poll_loop() -> None:
|
|
while True:
|
|
runtime = get_runtime_settings()
|
|
interval = max(60, int(runtime.requests_poll_interval_seconds or 300))
|
|
client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key)
|
|
if client.configured():
|
|
try:
|
|
await _ensure_requests_cache(client)
|
|
except httpx.HTTPError as exc:
|
|
logger.debug("Requests poll skipped: %s", exc)
|
|
await asyncio.sleep(interval)
|
|
|
|
|
|
async def run_requests_delta_loop() -> None:
|
|
while True:
|
|
runtime = get_runtime_settings()
|
|
interval = max(60, int(runtime.requests_delta_sync_interval_minutes or 5) * 60)
|
|
client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key)
|
|
if client.configured():
|
|
if _sync_task and not _sync_task.done():
|
|
logger.debug("Delta sync skipped: another sync is running.")
|
|
else:
|
|
try:
|
|
await _sync_delta_requests(client)
|
|
except httpx.HTTPError as exc:
|
|
logger.debug("Delta sync skipped: %s", exc)
|
|
await asyncio.sleep(interval)
|
|
|
|
|
|
async def run_daily_requests_full_sync() -> None:
|
|
while True:
|
|
runtime = get_runtime_settings()
|
|
hour, minute = _parse_time(runtime.requests_full_sync_time, 0, 0)
|
|
await asyncio.sleep(_seconds_until(hour, minute))
|
|
runtime = get_runtime_settings()
|
|
client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key)
|
|
if not client.configured():
|
|
logger.info("Daily full sync skipped: Jellyseerr not configured.")
|
|
continue
|
|
if _sync_task and not _sync_task.done():
|
|
logger.info("Daily full sync skipped: another sync is running.")
|
|
continue
|
|
try:
|
|
await _sync_all_requests(client)
|
|
except httpx.HTTPError as exc:
|
|
logger.warning("Daily full sync failed: %s", exc)
|
|
|
|
|
|
async def run_daily_db_cleanup() -> None:
|
|
while True:
|
|
runtime = get_runtime_settings()
|
|
hour, minute = _parse_time(runtime.requests_cleanup_time, 2, 0)
|
|
await asyncio.sleep(_seconds_until(hour, minute))
|
|
runtime = get_runtime_settings()
|
|
result = cleanup_history(int(runtime.requests_cleanup_days or 90))
|
|
logger.info("Daily cleanup complete: %s", result)
|
|
|
|
|
|
async def start_requests_sync(base_url: Optional[str], api_key: Optional[str]) -> Dict[str, Any]:
|
|
global _sync_task
|
|
if _sync_task and not _sync_task.done():
|
|
return dict(_sync_state)
|
|
if not base_url:
|
|
_sync_state.update({"status": "failed", "message": "Jellyseerr not configured"})
|
|
return dict(_sync_state)
|
|
client = JellyseerrClient(base_url, api_key)
|
|
_sync_state.update(
|
|
{
|
|
"status": "running",
|
|
"stored": 0,
|
|
"total": None,
|
|
"skip": 0,
|
|
"message": "Starting sync",
|
|
"started_at": datetime.now(timezone.utc).isoformat(),
|
|
"finished_at": None,
|
|
}
|
|
)
|
|
|
|
async def _runner() -> None:
|
|
try:
|
|
await _sync_all_requests(client)
|
|
except Exception as exc:
|
|
logger.exception("Jellyseerr sync failed")
|
|
_sync_state.update(
|
|
{
|
|
"status": "failed",
|
|
"message": f"Sync failed: {exc}",
|
|
"finished_at": datetime.now(timezone.utc).isoformat(),
|
|
}
|
|
)
|
|
|
|
_sync_task = asyncio.create_task(_runner())
|
|
return dict(_sync_state)
|
|
|
|
|
|
async def start_requests_delta_sync(base_url: Optional[str], api_key: Optional[str]) -> Dict[str, Any]:
|
|
global _sync_task
|
|
if _sync_task and not _sync_task.done():
|
|
return dict(_sync_state)
|
|
if not base_url:
|
|
_sync_state.update({"status": "failed", "message": "Jellyseerr not configured"})
|
|
return dict(_sync_state)
|
|
client = JellyseerrClient(base_url, api_key)
|
|
_sync_state.update(
|
|
{
|
|
"status": "running",
|
|
"stored": 0,
|
|
"total": None,
|
|
"skip": 0,
|
|
"message": "Starting delta sync",
|
|
"started_at": datetime.now(timezone.utc).isoformat(),
|
|
"finished_at": None,
|
|
}
|
|
)
|
|
|
|
async def _runner() -> None:
|
|
try:
|
|
await _sync_delta_requests(client)
|
|
except Exception as exc:
|
|
logger.exception("Jellyseerr delta sync failed")
|
|
_sync_state.update(
|
|
{
|
|
"status": "failed",
|
|
"message": f"Delta sync failed: {exc}",
|
|
"finished_at": datetime.now(timezone.utc).isoformat(),
|
|
}
|
|
)
|
|
|
|
_sync_task = asyncio.create_task(_runner())
|
|
return dict(_sync_state)
|
|
|
|
|
|
def get_requests_sync_state() -> Dict[str, Any]:
|
|
return dict(_sync_state)
|
|
|
|
|
|
async def _ensure_request_access(
|
|
client: JellyseerrClient, request_id: int, user: Dict[str, str]
|
|
) -> None:
|
|
if user.get("role") == "admin":
|
|
return
|
|
runtime = get_runtime_settings()
|
|
mode = (runtime.requests_data_source or "prefer_cache").lower()
|
|
cached = get_request_cache_payload(request_id)
|
|
if mode != "always_js":
|
|
if cached is None:
|
|
logger.debug("access cache miss: request_id=%s mode=%s", request_id, mode)
|
|
raise HTTPException(status_code=404, detail="Request not found in cache")
|
|
logger.debug("access cache hit: request_id=%s mode=%s", request_id, mode)
|
|
if _request_matches_user(cached, user.get("username", "")):
|
|
return
|
|
raise HTTPException(status_code=403, detail="Request not accessible for this user")
|
|
logger.debug("access cache miss: request_id=%s mode=%s", request_id, mode)
|
|
details = await _get_request_details(client, request_id)
|
|
if details is None or not _request_matches_user(details, user.get("username", "")):
|
|
raise HTTPException(status_code=403, detail="Request not accessible for this user")
|
|
|
|
|
|
def _build_recent_map(response: Dict[str, Any]) -> Dict[int, Dict[str, Any]]:
|
|
mapping: Dict[int, Dict[str, Any]] = {}
|
|
for item in response.get("results", []):
|
|
media = item.get("media") or {}
|
|
media_id = media.get("id") or item.get("mediaId")
|
|
request_id = item.get("id")
|
|
status = item.get("status")
|
|
if isinstance(media_id, int) and isinstance(request_id, int):
|
|
mapping[media_id] = {
|
|
"requestId": request_id,
|
|
"status": status,
|
|
"statusLabel": _status_label(status),
|
|
}
|
|
return mapping
|
|
|
|
|
|
def _queue_records(queue: Any) -> List[Dict[str, Any]]:
|
|
if isinstance(queue, dict):
|
|
records = queue.get("records")
|
|
if isinstance(records, list):
|
|
return records
|
|
if isinstance(queue, list):
|
|
return queue
|
|
return []
|
|
|
|
|
|
def _download_ids(records: List[Dict[str, Any]]) -> List[str]:
|
|
ids = []
|
|
for record in records:
|
|
download_id = record.get("downloadId") or record.get("download_id")
|
|
if isinstance(download_id, str) and download_id:
|
|
ids.append(download_id)
|
|
return ids
|
|
|
|
|
|
def _normalize_categories(categories: Any) -> List[str]:
|
|
names = []
|
|
if isinstance(categories, list):
|
|
for cat in categories:
|
|
if isinstance(cat, dict):
|
|
name = cat.get("name")
|
|
if isinstance(name, str):
|
|
names.append(name.lower())
|
|
return names
|
|
|
|
|
|
def _normalize_indexer_name(value: Optional[str]) -> str:
|
|
if not isinstance(value, str):
|
|
return ""
|
|
return "".join(ch for ch in value.lower().strip() if ch.isalnum())
|
|
|
|
|
|
def _log_arr_http_error(service_label: str, action: str, exc: httpx.HTTPStatusError) -> None:
|
|
if exc.response is None:
|
|
logger.warning("%s %s failed: %s", service_label, action, exc)
|
|
return
|
|
status = exc.response.status_code
|
|
body = exc.response.text
|
|
if isinstance(body, str):
|
|
body = body.strip()
|
|
if len(body) > 800:
|
|
body = f"{body[:800]}...(truncated)"
|
|
logger.warning("%s %s failed: status=%s body=%s", service_label, action, status, body)
|
|
|
|
|
|
def _format_rejections(rejections: Any) -> Optional[str]:
|
|
if isinstance(rejections, str):
|
|
return rejections.strip() or None
|
|
if isinstance(rejections, list):
|
|
reasons = []
|
|
for item in rejections:
|
|
reason = None
|
|
if isinstance(item, dict):
|
|
reason = (
|
|
item.get("reason")
|
|
or item.get("message")
|
|
or item.get("errorMessage")
|
|
)
|
|
if not reason and item is not None:
|
|
reason = str(item)
|
|
if isinstance(reason, str) and reason.strip():
|
|
reasons.append(reason.strip())
|
|
if reasons:
|
|
return "; ".join(reasons)
|
|
return None
|
|
|
|
|
|
def _release_push_accepted(response: Any) -> tuple[bool, Optional[str]]:
|
|
if not isinstance(response, dict):
|
|
return True, None
|
|
rejections = response.get("rejections") or response.get("rejectionReasons")
|
|
reason = _format_rejections(rejections)
|
|
if reason:
|
|
return False, reason
|
|
if response.get("rejected") is True:
|
|
return False, "rejected"
|
|
if response.get("downloadAllowed") is False:
|
|
return False, "download not allowed"
|
|
if response.get("approved") is False:
|
|
return False, "not approved"
|
|
return True, None
|
|
|
|
|
|
def _resolve_arr_indexer_id(
|
|
indexers: Any, indexer_name: Optional[str], indexer_id: Optional[int], service_label: str
|
|
) -> Optional[int]:
|
|
if not isinstance(indexers, list):
|
|
return None
|
|
if not indexer_name:
|
|
if indexer_id is None:
|
|
return None
|
|
by_id = next(
|
|
(item for item in indexers if isinstance(item, dict) and item.get("id") == indexer_id),
|
|
None,
|
|
)
|
|
if by_id and by_id.get("id") is not None:
|
|
logger.debug("%s indexer id match: %s", service_label, by_id.get("id"))
|
|
return int(by_id["id"])
|
|
return None
|
|
target = indexer_name.lower().strip()
|
|
target_compact = _normalize_indexer_name(indexer_name)
|
|
exact = next(
|
|
(
|
|
item
|
|
for item in indexers
|
|
if isinstance(item, dict)
|
|
and str(item.get("name", "")).lower().strip() == target
|
|
),
|
|
None,
|
|
)
|
|
if exact and exact.get("id") is not None:
|
|
logger.debug("%s indexer match: '%s' -> %s", service_label, indexer_name, exact.get("id"))
|
|
return int(exact["id"])
|
|
compact = next(
|
|
(
|
|
item
|
|
for item in indexers
|
|
if isinstance(item, dict)
|
|
and _normalize_indexer_name(str(item.get("name", ""))) == target_compact
|
|
),
|
|
None,
|
|
)
|
|
if compact and compact.get("id") is not None:
|
|
logger.debug("%s indexer compact match: '%s' -> %s", service_label, indexer_name, compact.get("id"))
|
|
return int(compact["id"])
|
|
contains = next(
|
|
(
|
|
item
|
|
for item in indexers
|
|
if isinstance(item, dict)
|
|
and target in str(item.get("name", "")).lower()
|
|
),
|
|
None,
|
|
)
|
|
if contains and contains.get("id") is not None:
|
|
logger.debug("%s indexer contains match: '%s' -> %s", service_label, indexer_name, contains.get("id"))
|
|
return int(contains["id"])
|
|
logger.warning(
|
|
"%s indexer not found for name '%s'. Check indexer names in the Arr app.",
|
|
service_label,
|
|
indexer_name,
|
|
)
|
|
return None
|
|
|
|
|
|
async def _fallback_qbittorrent_download(download_url: Optional[str], category: str) -> bool:
|
|
if not download_url:
|
|
return False
|
|
runtime = get_runtime_settings()
|
|
client = QBittorrentClient(
|
|
runtime.qbittorrent_base_url,
|
|
runtime.qbittorrent_username,
|
|
runtime.qbittorrent_password,
|
|
)
|
|
if not client.configured():
|
|
return False
|
|
await client.add_torrent_url(download_url, category=category)
|
|
return True
|
|
|
|
|
|
def _resolve_qbittorrent_category(value: Optional[str], default: str) -> str:
|
|
if isinstance(value, str):
|
|
cleaned = value.strip()
|
|
if cleaned:
|
|
return cleaned
|
|
return default
|
|
|
|
|
|
def _filter_prowlarr_results(results: Any, request_type: RequestType) -> List[Dict[str, Any]]:
|
|
if not isinstance(results, list):
|
|
return []
|
|
keep = []
|
|
for item in results:
|
|
if not isinstance(item, dict):
|
|
continue
|
|
categories = _normalize_categories(item.get("categories"))
|
|
if request_type == RequestType.movie:
|
|
if not any("movies" in name for name in categories):
|
|
continue
|
|
elif request_type == RequestType.tv:
|
|
if not any(name.startswith("tv") or "tv/" in name for name in categories):
|
|
continue
|
|
keep.append(
|
|
{
|
|
"title": item.get("title"),
|
|
"indexer": item.get("indexer"),
|
|
"indexerId": item.get("indexerId"),
|
|
"guid": item.get("guid"),
|
|
"size": item.get("size"),
|
|
"seeders": item.get("seeders"),
|
|
"leechers": item.get("leechers"),
|
|
"publishDate": item.get("publishDate"),
|
|
"infoUrl": item.get("infoUrl"),
|
|
"downloadUrl": item.get("downloadUrl"),
|
|
"protocol": item.get("protocol"),
|
|
}
|
|
)
|
|
keep.sort(key=lambda item: (item.get("seeders") or 0), reverse=True)
|
|
return keep[:10]
|
|
|
|
|
|
def _missing_episode_ids_by_season(episodes: Any) -> Dict[int, List[int]]:
|
|
if not isinstance(episodes, list):
|
|
return {}
|
|
grouped: Dict[int, List[int]] = {}
|
|
for episode in episodes:
|
|
if not isinstance(episode, dict):
|
|
continue
|
|
if not episode.get("monitored", True):
|
|
continue
|
|
if episode.get("hasFile"):
|
|
continue
|
|
season_number = episode.get("seasonNumber")
|
|
episode_id = episode.get("id")
|
|
if isinstance(season_number, int) and isinstance(episode_id, int):
|
|
grouped.setdefault(season_number, []).append(episode_id)
|
|
return grouped
|
|
|
|
|
|
async def _resolve_root_folder_path(client: Any, root_folder: str, service_name: str) -> str:
|
|
if root_folder.isdigit():
|
|
folders = await client.get_root_folders()
|
|
if isinstance(folders, list):
|
|
for folder in folders:
|
|
if folder.get("id") == int(root_folder):
|
|
path = folder.get("path")
|
|
if isinstance(path, str) and path:
|
|
return path
|
|
raise HTTPException(status_code=400, detail=f"{service_name} root folder id {root_folder} not found")
|
|
return root_folder
|
|
|
|
|
|
@router.get("/{request_id}/snapshot", response_model=Snapshot)
|
|
async def get_snapshot(request_id: str, user: Dict[str, str] = Depends(get_current_user)) -> Snapshot:
|
|
runtime = get_runtime_settings()
|
|
client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key)
|
|
if client.configured():
|
|
await _ensure_request_access(client, int(request_id), user)
|
|
return await build_snapshot(request_id)
|
|
|
|
|
|
@router.get("/recent")
|
|
async def recent_requests(
|
|
take: int = 6,
|
|
skip: int = 0,
|
|
days: int = 90,
|
|
user: Dict[str, str] = Depends(get_current_user),
|
|
) -> dict:
|
|
runtime = get_runtime_settings()
|
|
client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key)
|
|
mode = (runtime.requests_data_source or "prefer_cache").lower()
|
|
allow_remote = mode == "always_js"
|
|
if allow_remote:
|
|
if not client.configured():
|
|
raise HTTPException(status_code=400, detail="Jellyseerr not configured")
|
|
try:
|
|
await _ensure_requests_cache(client)
|
|
except httpx.HTTPStatusError as exc:
|
|
raise HTTPException(status_code=502, detail=str(exc)) from exc
|
|
|
|
username_norm = _normalize_username(user.get("username", ""))
|
|
requested_by_id = user.get("jellyseerr_user_id")
|
|
requested_by = None if user.get("role") == "admin" else username_norm
|
|
requested_by_id = None if user.get("role") == "admin" else requested_by_id
|
|
since_iso = None
|
|
if days > 0:
|
|
since_iso = (datetime.now(timezone.utc) - timedelta(days=days)).isoformat()
|
|
if _recent_cache_stale():
|
|
_refresh_recent_cache_from_db()
|
|
rows = _get_recent_from_cache(requested_by, requested_by_id, take, skip, since_iso)
|
|
cache_mode = (runtime.artwork_cache_mode or "remote").lower()
|
|
allow_title_hydrate = False
|
|
allow_artwork_hydrate = client.configured()
|
|
jellyfin = JellyfinClient(runtime.jellyfin_base_url, runtime.jellyfin_api_key)
|
|
jellyfin_cache: Dict[str, bool] = {}
|
|
|
|
async def _jellyfin_available(
|
|
title_value: Optional[str], year_value: Optional[int], media_type_value: Optional[str]
|
|
) -> bool:
|
|
if not jellyfin.configured() or not title_value:
|
|
return False
|
|
cache_key = f"{media_type_value or ''}:{title_value.lower()}:{year_value or ''}"
|
|
cached_value = jellyfin_cache.get(cache_key)
|
|
if cached_value is not None:
|
|
return cached_value
|
|
types = ["Movie"] if media_type_value == "movie" else ["Series"]
|
|
try:
|
|
search = await jellyfin.search_items(title_value, types)
|
|
except Exception:
|
|
jellyfin_cache[cache_key] = False
|
|
return False
|
|
if isinstance(search, dict):
|
|
items = search.get("Items") or search.get("items") or []
|
|
for item in items:
|
|
if not isinstance(item, dict):
|
|
continue
|
|
name = item.get("Name") or item.get("title")
|
|
year = item.get("ProductionYear") or item.get("Year")
|
|
if name and name.strip().lower() == title_value.strip().lower():
|
|
if year_value and year and int(year) != int(year_value):
|
|
continue
|
|
jellyfin_cache[cache_key] = True
|
|
return True
|
|
jellyfin_cache[cache_key] = False
|
|
return False
|
|
results = []
|
|
for row in rows:
|
|
status = row.get("status")
|
|
title = row.get("title")
|
|
title_is_placeholder = (
|
|
isinstance(title, str)
|
|
and row.get("request_id") is not None
|
|
and title.strip().lower() == f"request {row.get('request_id')}"
|
|
)
|
|
year = row.get("year")
|
|
details = None
|
|
if row.get("request_id") and mode != "always_js":
|
|
cached_payload = get_request_cache_payload(int(row["request_id"]))
|
|
if isinstance(cached_payload, dict):
|
|
details = cached_payload
|
|
if (not title or title_is_placeholder) and row.get("request_id"):
|
|
if details is None and (allow_remote or allow_title_hydrate):
|
|
details = await _get_request_details(client, int(row["request_id"]))
|
|
if isinstance(details, dict):
|
|
payload = _parse_request_payload(details)
|
|
title = payload.get("title") or title
|
|
year = payload.get("year") or year
|
|
if not title and payload.get("tmdb_id") and (allow_remote or allow_title_hydrate):
|
|
hydrated_title, hydrated_year = await _hydrate_title_from_tmdb(
|
|
client, payload.get("media_type"), payload.get("tmdb_id")
|
|
)
|
|
if hydrated_title:
|
|
title = hydrated_title
|
|
if hydrated_year:
|
|
year = hydrated_year
|
|
if allow_remote and isinstance(payload.get("request_id"), int):
|
|
upsert_request_cache(
|
|
request_id=payload.get("request_id"),
|
|
media_id=payload.get("media_id"),
|
|
media_type=payload.get("media_type"),
|
|
status=payload.get("status"),
|
|
title=title or payload.get("title"),
|
|
year=year or payload.get("year"),
|
|
requested_by=payload.get("requested_by"),
|
|
requested_by_norm=payload.get("requested_by_norm"),
|
|
requested_by_id=payload.get("requested_by_id"),
|
|
created_at=payload.get("created_at"),
|
|
updated_at=payload.get("updated_at"),
|
|
payload_json=json.dumps(details, ensure_ascii=True),
|
|
)
|
|
row["title"] = title
|
|
row["year"] = year
|
|
row["media_type"] = payload.get("media_type") or row.get("media_type")
|
|
row["status"] = payload.get("status") or row.get("status")
|
|
if details is None and row.get("request_id") and allow_remote:
|
|
details = await _get_request_details(client, int(row["request_id"]))
|
|
|
|
poster_path = None
|
|
backdrop_path = None
|
|
if isinstance(details, dict):
|
|
media = details.get("media") or {}
|
|
if isinstance(media, dict):
|
|
poster_path = media.get("posterPath") or media.get("poster_path")
|
|
backdrop_path = media.get("backdropPath") or media.get("backdrop_path")
|
|
tmdb_id = media.get("tmdbId") or details.get("tmdbId")
|
|
else:
|
|
tmdb_id = details.get("tmdbId")
|
|
media_type = media.get("mediaType") if isinstance(media, dict) else None
|
|
media_type = media_type or details.get("type") or row.get("media_type")
|
|
if not poster_path and tmdb_id and allow_artwork_hydrate:
|
|
hydrated_poster, hydrated_backdrop = await _hydrate_artwork_from_tmdb(
|
|
client, media_type, tmdb_id
|
|
)
|
|
poster_path = poster_path or hydrated_poster
|
|
backdrop_path = backdrop_path or hydrated_backdrop
|
|
if (hydrated_poster or hydrated_backdrop) and isinstance(details, dict):
|
|
media = dict(media) if isinstance(media, dict) else {}
|
|
if hydrated_poster:
|
|
media["posterPath"] = hydrated_poster
|
|
if hydrated_backdrop:
|
|
media["backdropPath"] = hydrated_backdrop
|
|
details["media"] = media
|
|
payload = _parse_request_payload(details)
|
|
if isinstance(payload.get("request_id"), int):
|
|
upsert_request_cache(
|
|
request_id=payload.get("request_id"),
|
|
media_id=payload.get("media_id"),
|
|
media_type=payload.get("media_type"),
|
|
status=payload.get("status"),
|
|
title=payload.get("title"),
|
|
year=payload.get("year"),
|
|
requested_by=payload.get("requested_by"),
|
|
requested_by_norm=payload.get("requested_by_norm"),
|
|
requested_by_id=payload.get("requested_by_id"),
|
|
created_at=payload.get("created_at"),
|
|
updated_at=payload.get("updated_at"),
|
|
payload_json=json.dumps(details, ensure_ascii=True),
|
|
)
|
|
status_label = _status_label(status)
|
|
if status_label == "Working on it":
|
|
is_available = await _jellyfin_available(title, year, row.get("media_type"))
|
|
if is_available:
|
|
status_label = "Available"
|
|
results.append(
|
|
{
|
|
"id": row.get("request_id"),
|
|
"title": title,
|
|
"year": year,
|
|
"type": row.get("media_type"),
|
|
"status": status,
|
|
"statusLabel": status_label,
|
|
"mediaId": row.get("media_id"),
|
|
"artwork": {
|
|
"poster_url": _artwork_url(poster_path, "w185", cache_mode),
|
|
"backdrop_url": _artwork_url(backdrop_path, "w780", cache_mode),
|
|
},
|
|
}
|
|
)
|
|
|
|
return {"results": results}
|
|
|
|
|
|
@router.get("/search")
|
|
async def search_requests(
|
|
query: str, page: int = 1, user: Dict[str, str] = Depends(get_current_user)
|
|
) -> dict:
|
|
runtime = get_runtime_settings()
|
|
client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key)
|
|
if not client.configured():
|
|
raise HTTPException(status_code=400, detail="Jellyseerr not configured")
|
|
|
|
try:
|
|
response = await client.search(query=query, page=page)
|
|
except httpx.HTTPStatusError as exc:
|
|
raise HTTPException(status_code=502, detail=str(exc)) from exc
|
|
|
|
if not isinstance(response, dict):
|
|
return {"results": []}
|
|
|
|
try:
|
|
await _ensure_requests_cache(client)
|
|
except httpx.HTTPStatusError:
|
|
pass
|
|
|
|
results = []
|
|
for item in response.get("results", []):
|
|
media_type = item.get("mediaType")
|
|
title = item.get("title") or item.get("name")
|
|
year = None
|
|
if item.get("releaseDate"):
|
|
year = int(item["releaseDate"][:4])
|
|
if item.get("firstAirDate"):
|
|
year = int(item["firstAirDate"][:4])
|
|
|
|
request_id = None
|
|
status = None
|
|
status_label = None
|
|
media_info = item.get("mediaInfo") or {}
|
|
media_info_id = media_info.get("id")
|
|
requests = media_info.get("requests")
|
|
if isinstance(requests, list) and requests:
|
|
request_id = requests[0].get("id")
|
|
status = requests[0].get("status")
|
|
status_label = _status_label(status)
|
|
elif isinstance(media_info_id, int):
|
|
username_norm = _normalize_username(user.get("username", ""))
|
|
requested_by_id = user.get("jellyseerr_user_id")
|
|
requested_by = None if user.get("role") == "admin" else username_norm
|
|
requested_by_id = None if user.get("role") == "admin" else requested_by_id
|
|
cached = get_cached_request_by_media_id(
|
|
media_info_id,
|
|
requested_by_norm=requested_by,
|
|
requested_by_id=requested_by_id,
|
|
)
|
|
if cached:
|
|
request_id = cached.get("request_id")
|
|
status = cached.get("status")
|
|
status_label = _status_label(status)
|
|
|
|
if user.get("role") != "admin":
|
|
if isinstance(request_id, int):
|
|
details = await _get_request_details(client, request_id)
|
|
if not _request_matches_user(details, user.get("username", "")):
|
|
continue
|
|
else:
|
|
continue
|
|
|
|
results.append(
|
|
{
|
|
"title": title,
|
|
"year": year,
|
|
"type": media_type,
|
|
"tmdbId": item.get("id"),
|
|
"requestId": request_id,
|
|
"status": status,
|
|
"statusLabel": status_label,
|
|
}
|
|
)
|
|
|
|
return {"results": results}
|
|
|
|
|
|
@router.post("/{request_id}/ai/triage", response_model=TriageResult)
|
|
async def ai_triage(request_id: str, user: Dict[str, str] = Depends(get_current_user)) -> TriageResult:
|
|
runtime = get_runtime_settings()
|
|
client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key)
|
|
if client.configured():
|
|
await _ensure_request_access(client, int(request_id), user)
|
|
snapshot = await build_snapshot(request_id)
|
|
return triage_snapshot(snapshot)
|
|
|
|
|
|
@router.post("/{request_id}/actions/search")
|
|
async def action_search(request_id: str, user: Dict[str, str] = Depends(get_current_user)) -> dict:
|
|
runtime = get_runtime_settings()
|
|
client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key)
|
|
if client.configured():
|
|
await _ensure_request_access(client, int(request_id), user)
|
|
snapshot = await build_snapshot(request_id)
|
|
prowlarr_results: List[Dict[str, Any]] = []
|
|
prowlarr = ProwlarrClient(runtime.prowlarr_base_url, runtime.prowlarr_api_key)
|
|
if not prowlarr.configured():
|
|
raise HTTPException(status_code=400, detail="Prowlarr not configured")
|
|
query = snapshot.title
|
|
if snapshot.year:
|
|
query = f"{query} {snapshot.year}"
|
|
try:
|
|
results = await prowlarr.search(query=query)
|
|
prowlarr_results = _filter_prowlarr_results(results, snapshot.request_type)
|
|
except httpx.HTTPStatusError:
|
|
prowlarr_results = []
|
|
|
|
await asyncio.to_thread(
|
|
save_action,
|
|
request_id,
|
|
"search_releases",
|
|
"Search and choose a download",
|
|
"ok",
|
|
f"Found {len(prowlarr_results)} releases.",
|
|
)
|
|
return {"status": "ok", "releases": prowlarr_results}
|
|
|
|
|
|
@router.post("/{request_id}/actions/search_auto")
|
|
async def action_search_auto(request_id: str, user: Dict[str, str] = Depends(get_current_user)) -> dict:
|
|
runtime = get_runtime_settings()
|
|
client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key)
|
|
if client.configured():
|
|
await _ensure_request_access(client, int(request_id), user)
|
|
snapshot = await build_snapshot(request_id)
|
|
arr_item = snapshot.raw.get("arr", {}).get("item")
|
|
if not isinstance(arr_item, dict):
|
|
raise HTTPException(status_code=404, detail="Item not found in Sonarr/Radarr")
|
|
|
|
if snapshot.request_type.value == "tv":
|
|
client = SonarrClient(runtime.sonarr_base_url, runtime.sonarr_api_key)
|
|
if not client.configured():
|
|
raise HTTPException(status_code=400, detail="Sonarr not configured")
|
|
episodes = await client.get_episodes(int(arr_item["id"]))
|
|
missing_by_season = _missing_episode_ids_by_season(episodes)
|
|
if not missing_by_season:
|
|
message = "No missing monitored episodes found."
|
|
await asyncio.to_thread(
|
|
save_action, request_id, "search_auto", "Search and auto-download", "ok", message
|
|
)
|
|
return {"status": "ok", "message": message, "searched": []}
|
|
responses = []
|
|
for season_number in sorted(missing_by_season.keys()):
|
|
episode_ids = missing_by_season[season_number]
|
|
if episode_ids:
|
|
response = await client.search_episodes(episode_ids)
|
|
responses.append(
|
|
{"season": season_number, "episodeCount": len(episode_ids), "response": response}
|
|
)
|
|
message = "Search sent to Sonarr."
|
|
await asyncio.to_thread(
|
|
save_action, request_id, "search_auto", "Search and auto-download", "ok", message
|
|
)
|
|
return {"status": "ok", "message": message, "searched": responses}
|
|
if snapshot.request_type.value == "movie":
|
|
client = RadarrClient(runtime.radarr_base_url, runtime.radarr_api_key)
|
|
if not client.configured():
|
|
raise HTTPException(status_code=400, detail="Radarr not configured")
|
|
response = await client.search(int(arr_item["id"]))
|
|
message = "Search sent to Radarr."
|
|
await asyncio.to_thread(
|
|
save_action, request_id, "search_auto", "Search and auto-download", "ok", message
|
|
)
|
|
return {"status": "ok", "message": message, "response": response}
|
|
|
|
raise HTTPException(status_code=400, detail="Unknown request type")
|
|
|
|
|
|
@router.post("/{request_id}/actions/qbit/resume")
|
|
async def action_resume(request_id: str, user: Dict[str, str] = Depends(get_current_user)) -> dict:
|
|
runtime = get_runtime_settings()
|
|
client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key)
|
|
if client.configured():
|
|
await _ensure_request_access(client, int(request_id), user)
|
|
snapshot = await build_snapshot(request_id)
|
|
queue = snapshot.raw.get("arr", {}).get("queue")
|
|
download_ids = _download_ids(_queue_records(queue))
|
|
if not download_ids:
|
|
message = "Nothing to force resume."
|
|
await asyncio.to_thread(
|
|
save_action, request_id, "resume_torrent", "Resume torrent", "ok", message
|
|
)
|
|
return {"status": "ok", "message": message}
|
|
|
|
runtime = get_runtime_settings()
|
|
client = QBittorrentClient(
|
|
runtime.qbittorrent_base_url,
|
|
runtime.qbittorrent_username,
|
|
runtime.qbittorrent_password,
|
|
)
|
|
if not client.configured():
|
|
raise HTTPException(status_code=400, detail="qBittorrent not configured")
|
|
|
|
try:
|
|
torrents = await client.get_torrents_by_hashes("|".join(download_ids))
|
|
torrent_list = torrents if isinstance(torrents, list) else []
|
|
downloading_states = {"downloading", "stalleddl", "queueddl", "checkingdl", "forceddl"}
|
|
if torrent_list and all(
|
|
str(t.get("state", "")).lower() in downloading_states for t in torrent_list
|
|
):
|
|
message = "No need to force resume. Already downloading."
|
|
await asyncio.to_thread(
|
|
save_action, request_id, "resume_torrent", "Resume torrent", "ok", message
|
|
)
|
|
return {"status": "ok", "message": message}
|
|
await client.resume_torrents("|".join(download_ids))
|
|
except httpx.HTTPStatusError as exc:
|
|
raise HTTPException(status_code=502, detail=str(exc)) from exc
|
|
message = "Resume sent to qBittorrent."
|
|
await asyncio.to_thread(
|
|
save_action, request_id, "resume_torrent", "Resume torrent", "ok", message
|
|
)
|
|
return {"status": "ok", "resumed": download_ids, "message": message}
|
|
|
|
|
|
@router.post("/{request_id}/actions/readd")
|
|
async def action_readd(request_id: str, user: Dict[str, str] = Depends(get_current_user)) -> dict:
|
|
runtime = get_runtime_settings()
|
|
client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key)
|
|
if client.configured():
|
|
await _ensure_request_access(client, int(request_id), user)
|
|
snapshot = await build_snapshot(request_id)
|
|
jelly = snapshot.raw.get("jellyseerr") or {}
|
|
media = jelly.get("media") or {}
|
|
|
|
if snapshot.request_type.value == "tv":
|
|
tvdb_id = media.get("tvdbId")
|
|
if not tvdb_id:
|
|
raise HTTPException(status_code=400, detail="Missing tvdbId for series")
|
|
title = snapshot.title
|
|
if title in {None, "", "Unknown"}:
|
|
title = (
|
|
media.get("name")
|
|
or media.get("title")
|
|
or jelly.get("title")
|
|
or jelly.get("name")
|
|
)
|
|
if not runtime.sonarr_quality_profile_id or not runtime.sonarr_root_folder:
|
|
raise HTTPException(status_code=400, detail="Sonarr profile/root not configured")
|
|
client = SonarrClient(runtime.sonarr_base_url, runtime.sonarr_api_key)
|
|
if not client.configured():
|
|
raise HTTPException(status_code=400, detail="Sonarr not configured")
|
|
try:
|
|
existing = await client.get_series_by_tvdb_id(int(tvdb_id))
|
|
except httpx.HTTPStatusError as exc:
|
|
detail = _format_upstream_error("Sonarr", exc)
|
|
await asyncio.to_thread(
|
|
save_action, request_id, "readd_to_arr", "Re-add to Sonarr/Radarr", "failed", detail
|
|
)
|
|
raise HTTPException(status_code=502, detail=detail) from exc
|
|
if isinstance(existing, list) and existing:
|
|
series_id = existing[0].get("id")
|
|
message = f"Already in Sonarr (seriesId {series_id})."
|
|
await asyncio.to_thread(
|
|
save_action, request_id, "readd_to_arr", "Re-add to Sonarr/Radarr", "ok", message
|
|
)
|
|
return {"status": "ok", "message": message, "seriesId": series_id}
|
|
root_folder = await _resolve_root_folder_path(client, runtime.sonarr_root_folder, "Sonarr")
|
|
try:
|
|
response = await client.add_series(
|
|
int(tvdb_id), runtime.sonarr_quality_profile_id, root_folder, title=title
|
|
)
|
|
except httpx.HTTPStatusError as exc:
|
|
detail = _format_upstream_error("Sonarr", exc)
|
|
await asyncio.to_thread(
|
|
save_action, request_id, "readd_to_arr", "Re-add to Sonarr/Radarr", "failed", detail
|
|
)
|
|
raise HTTPException(status_code=502, detail=detail) from exc
|
|
await asyncio.to_thread(
|
|
save_action,
|
|
request_id,
|
|
"readd_to_arr",
|
|
"Re-add to Sonarr/Radarr",
|
|
"ok",
|
|
f"Re-added in Sonarr to {root_folder}.",
|
|
)
|
|
return {"status": "ok", "response": response, "rootFolder": root_folder}
|
|
|
|
if snapshot.request_type.value == "movie":
|
|
tmdb_id = media.get("tmdbId")
|
|
if not tmdb_id:
|
|
raise HTTPException(status_code=400, detail="Missing tmdbId for movie")
|
|
if not runtime.radarr_quality_profile_id or not runtime.radarr_root_folder:
|
|
raise HTTPException(status_code=400, detail="Radarr profile/root not configured")
|
|
client = RadarrClient(runtime.radarr_base_url, runtime.radarr_api_key)
|
|
if not client.configured():
|
|
raise HTTPException(status_code=400, detail="Radarr not configured")
|
|
try:
|
|
existing = await client.get_movie_by_tmdb_id(int(tmdb_id))
|
|
except httpx.HTTPStatusError as exc:
|
|
detail = _format_upstream_error("Radarr", exc)
|
|
await asyncio.to_thread(
|
|
save_action, request_id, "readd_to_arr", "Re-add to Sonarr/Radarr", "failed", detail
|
|
)
|
|
raise HTTPException(status_code=502, detail=detail) from exc
|
|
if isinstance(existing, list) and existing:
|
|
movie_id = existing[0].get("id")
|
|
message = f"Already in Radarr (movieId {movie_id})."
|
|
await asyncio.to_thread(
|
|
save_action, request_id, "readd_to_arr", "Re-add to Sonarr/Radarr", "ok", message
|
|
)
|
|
return {"status": "ok", "message": message, "movieId": movie_id}
|
|
root_folder = await _resolve_root_folder_path(client, runtime.radarr_root_folder, "Radarr")
|
|
try:
|
|
response = await client.add_movie(
|
|
int(tmdb_id), runtime.radarr_quality_profile_id, root_folder
|
|
)
|
|
except httpx.HTTPStatusError as exc:
|
|
detail = _format_upstream_error("Radarr", exc)
|
|
await asyncio.to_thread(
|
|
save_action, request_id, "readd_to_arr", "Re-add to Sonarr/Radarr", "failed", detail
|
|
)
|
|
raise HTTPException(status_code=502, detail=detail) from exc
|
|
await asyncio.to_thread(
|
|
save_action,
|
|
request_id,
|
|
"readd_to_arr",
|
|
"Re-add to Sonarr/Radarr",
|
|
"ok",
|
|
f"Re-added in Radarr to {root_folder}.",
|
|
)
|
|
return {"status": "ok", "response": response, "rootFolder": root_folder}
|
|
|
|
raise HTTPException(status_code=400, detail="Unknown request type")
|
|
|
|
|
|
@router.get("/{request_id}/history")
|
|
async def request_history(
|
|
request_id: str, limit: int = 10, user: Dict[str, str] = Depends(get_current_user)
|
|
) -> dict:
|
|
runtime = get_runtime_settings()
|
|
client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key)
|
|
if client.configured():
|
|
await _ensure_request_access(client, int(request_id), user)
|
|
snapshots = await asyncio.to_thread(get_recent_snapshots, request_id, limit)
|
|
return {"snapshots": snapshots}
|
|
|
|
|
|
@router.get("/{request_id}/actions")
|
|
async def request_actions(
|
|
request_id: str, limit: int = 10, user: Dict[str, str] = Depends(get_current_user)
|
|
) -> dict:
|
|
runtime = get_runtime_settings()
|
|
client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key)
|
|
if client.configured():
|
|
await _ensure_request_access(client, int(request_id), user)
|
|
actions = await asyncio.to_thread(get_recent_actions, request_id, limit)
|
|
return {"actions": actions}
|
|
|
|
|
|
@router.post("/{request_id}/actions/grab")
|
|
async def action_grab(
|
|
request_id: str, payload: Dict[str, Any], user: Dict[str, str] = Depends(get_current_user)
|
|
) -> dict:
|
|
runtime = get_runtime_settings()
|
|
client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key)
|
|
if client.configured():
|
|
await _ensure_request_access(client, int(request_id), user)
|
|
snapshot = await build_snapshot(request_id)
|
|
guid = payload.get("guid")
|
|
indexer_id = payload.get("indexerId")
|
|
indexer_name = payload.get("indexerName") or payload.get("indexer")
|
|
download_url = payload.get("downloadUrl")
|
|
release_title = payload.get("title")
|
|
release_size = payload.get("size")
|
|
release_protocol = payload.get("protocol") or "torrent"
|
|
release_publish = payload.get("publishDate")
|
|
release_seeders = payload.get("seeders")
|
|
release_leechers = payload.get("leechers")
|
|
if not guid or not indexer_id:
|
|
raise HTTPException(status_code=400, detail="Missing guid or indexerId")
|
|
|
|
logger.info(
|
|
"Grab requested: request_id=%s guid=%s indexer_id=%s indexer_name=%s has_download_url=%s has_title=%s",
|
|
request_id,
|
|
guid,
|
|
indexer_id,
|
|
indexer_name,
|
|
bool(download_url),
|
|
bool(release_title),
|
|
)
|
|
|
|
runtime = get_runtime_settings()
|
|
if not download_url:
|
|
raise HTTPException(status_code=400, detail="Missing downloadUrl")
|
|
if snapshot.request_type.value == "tv":
|
|
category = _resolve_qbittorrent_category(runtime.sonarr_qbittorrent_category, "sonarr")
|
|
if snapshot.request_type.value == "movie":
|
|
category = _resolve_qbittorrent_category(runtime.radarr_qbittorrent_category, "radarr")
|
|
if snapshot.request_type.value not in {"tv", "movie"}:
|
|
raise HTTPException(status_code=400, detail="Unknown request type")
|
|
|
|
qbittorrent_added = await _fallback_qbittorrent_download(download_url, category)
|
|
if not qbittorrent_added:
|
|
raise HTTPException(status_code=400, detail="Failed to add torrent to qBittorrent")
|
|
action_message = f"Grab sent to qBittorrent (category {category})."
|
|
await asyncio.to_thread(
|
|
save_action, request_id, "grab", "Grab release", "ok", action_message
|
|
)
|
|
return {"status": "ok", "response": {"qbittorrent": "queued"}}
|