Persist Seerr media failure suppression and reduce sync error noise

This commit is contained in:
2026-03-01 22:53:38 +13:00
parent aae2c3d418
commit b068a6066e
8 changed files with 389 additions and 67 deletions

View File

@@ -1 +1 @@
0103262231 0103262251

View File

@@ -1,7 +1,8 @@
BUILD_NUMBER = "0103262231" BUILD_NUMBER = "0103262251"
CHANGELOG = '2026-01-22\\n- Initial commit\\n- Ignore build artifacts\\n- Update README\\n- Update README with Docker-first guide\\n\\n2026-01-23\\n- Fix cache titles via Seerr media lookup\\n- Split search actions and improve download options\\n- Fallback manual grab to qBittorrent\\n- Hide header actions when signed out\\n- Add feedback form and webhook\\n- Fix cache titles and move feedback link\\n- Show available status on landing when in Jellyfin\\n- Add default branding assets when missing\\n- Use bundled branding assets\\n- Remove password fields from users page\\n- Add Docker Hub compose override\\n- Fix backend Dockerfile paths for root context\\n- Copy public assets into frontend image\\n- Use backend branding assets for logo and favicon\\n\\n2026-01-24\\n- Route grabs through Sonarr/Radarr only\\n- Document fix buttons in how-it-works\\n- Clarify how-it-works steps and fixes\\n- Map Prowlarr releases to Arr indexers for manual grab\\n- Improve request handling and qBittorrent categories\\n\\n2026-01-25\\n- Add site banner, build number, and changelog\\n- Automate build number tagging and sync\\n- Improve mobile header layout\\n- Move account actions into avatar menu\\n- Add user stats and activity tracking\\n- Add Jellyfin login cache and admin-only stats\\n- Tidy request sync controls\\n- Seed branding logo from bundled assets\\n- Serve bundled branding assets by default\\n- Harden request cache titles and cache-only reads\\n- Build 2501262041\\n\\n2026-01-26\\n- Fix cache title hydration\\n- Fix sync progress bar animation\\n\\n2026-01-27\\n- Add cache control artwork stats\\n- Improve cache stats performance (build 271261145)\\n- Fix backend cache stats import (build 271261149)\\n- Clarify request sync settings (build 271261159)\\n- Bump build number to 271261202\\n- Fix request titles in snapshots (build 271261219)\\n- Fix snapshot title fallback (build 271261228)\\n- Add cache load spinner (build 271261238)\\n- Bump build number (process 2) 271261322\\n- Add service test buttons (build 271261335)\\n- Fallback to TMDB when artwork cache fails (build 271261524)\\n- Hydrate missing artwork from Seerr (build 271261539)\\n\\n2026-01-29\\n- release: 2901262036\\n- release: 2901262044\\n- release: 2901262102\\n- Hardcode build number in backend\\n- Bake build number and changelog\\n- Update full changelog\\n- Tidy full changelog\\n- Build 2901262240: cache users\n\n2026-01-30\n- Merge backend and frontend into one container' CHANGELOG = '2026-01-22\\n- Initial commit\\n- Ignore build artifacts\\n- Update README\\n- Update README with Docker-first guide\\n\\n2026-01-23\\n- Fix cache titles via Seerr media lookup\\n- Split search actions and improve download options\\n- Fallback manual grab to qBittorrent\\n- Hide header actions when signed out\\n- Add feedback form and webhook\\n- Fix cache titles and move feedback link\\n- Show available status on landing when in Jellyfin\\n- Add default branding assets when missing\\n- Use bundled branding assets\\n- Remove password fields from users page\\n- Add Docker Hub compose override\\n- Fix backend Dockerfile paths for root context\\n- Copy public assets into frontend image\\n- Use backend branding assets for logo and favicon\\n\\n2026-01-24\\n- Route grabs through Sonarr/Radarr only\\n- Document fix buttons in how-it-works\\n- Clarify how-it-works steps and fixes\\n- Map Prowlarr releases to Arr indexers for manual grab\\n- Improve request handling and qBittorrent categories\\n\\n2026-01-25\\n- Add site banner, build number, and changelog\\n- Automate build number tagging and sync\\n- Improve mobile header layout\\n- Move account actions into avatar menu\\n- Add user stats and activity tracking\\n- Add Jellyfin login cache and admin-only stats\\n- Tidy request sync controls\\n- Seed branding logo from bundled assets\\n- Serve bundled branding assets by default\\n- Harden request cache titles and cache-only reads\\n- Build 2501262041\\n\\n2026-01-26\\n- Fix cache title hydration\\n- Fix sync progress bar animation\\n\\n2026-01-27\\n- Add cache control artwork stats\\n- Improve cache stats performance (build 271261145)\\n- Fix backend cache stats import (build 271261149)\\n- Clarify request sync settings (build 271261159)\\n- Bump build number to 271261202\\n- Fix request titles in snapshots (build 271261219)\\n- Fix snapshot title fallback (build 271261228)\\n- Add cache load spinner (build 271261238)\\n- Bump build number (process 2) 271261322\\n- Add service test buttons (build 271261335)\\n- Fallback to TMDB when artwork cache fails (build 271261524)\\n- Hydrate missing artwork from Seerr (build 271261539)\\n\\n2026-01-29\\n- release: 2901262036\\n- release: 2901262044\\n- release: 2901262102\\n- Hardcode build number in backend\\n- Bake build number and changelog\\n- Update full changelog\\n- Tidy full changelog\\n- Build 2901262240: cache users\n\n2026-01-30\n- Merge backend and frontend into one container'

View File

@@ -18,6 +18,17 @@ class ApiClient:
def headers(self) -> Dict[str, str]: def headers(self) -> Dict[str, str]:
return {"X-Api-Key": self.api_key} if self.api_key else {} return {"X-Api-Key": self.api_key} if self.api_key else {}
def _response_summary(self, response: Optional[httpx.Response]) -> Optional[Any]:
if response is None:
return None
try:
payload = sanitize_value(response.json())
except ValueError:
payload = sanitize_value(response.text)
if isinstance(payload, str) and len(payload) > 500:
return f"{payload[:500]}..."
return payload
async def _request( async def _request(
self, self,
method: str, method: str,
@@ -60,6 +71,20 @@ class ApiClient:
if not response.content: if not response.content:
return None return None
return response.json() return response.json()
except httpx.HTTPStatusError as exc:
duration_ms = round((time.perf_counter() - started_at) * 1000, 2)
response = exc.response
status = response.status_code if response is not None else "unknown"
log_fn = self.logger.error if isinstance(status, int) and status >= 500 else self.logger.warning
log_fn(
"outbound request returned error method=%s url=%s status=%s duration_ms=%s response=%s",
method,
url,
status,
duration_ms,
self._response_summary(response),
)
raise
except Exception: except Exception:
duration_ms = round((time.perf_counter() - started_at) * 1000, 2) duration_ms = round((time.perf_counter() - started_at) * 1000, 2)
self.logger.exception( self.logger.exception(

View File

@@ -11,6 +11,11 @@ from .security import hash_password, verify_password
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
SEERR_MEDIA_FAILURE_SHORT_SUPPRESS_HOURS = 6
SEERR_MEDIA_FAILURE_RETRY_SUPPRESS_HOURS = 24
SEERR_MEDIA_FAILURE_PERSISTENT_SUPPRESS_DAYS = 30
SEERR_MEDIA_FAILURE_PERSISTENT_THRESHOLD = 3
def _db_path() -> str: def _db_path() -> str:
path = settings.sqlite_path or "data/magent.db" path = settings.sqlite_path or "data/magent.db"
@@ -271,6 +276,22 @@ def init_db() -> None:
) )
""" """
) )
conn.execute(
"""
CREATE TABLE IF NOT EXISTS seerr_media_failures (
media_type TEXT NOT NULL,
tmdb_id INTEGER NOT NULL,
status_code INTEGER,
error_message TEXT,
failure_count INTEGER NOT NULL DEFAULT 1,
first_failed_at TEXT NOT NULL,
last_failed_at TEXT NOT NULL,
suppress_until TEXT NOT NULL,
is_persistent INTEGER NOT NULL DEFAULT 0,
PRIMARY KEY (media_type, tmdb_id)
)
"""
)
conn.execute( conn.execute(
""" """
CREATE INDEX IF NOT EXISTS idx_requests_cache_created_at CREATE INDEX IF NOT EXISTS idx_requests_cache_created_at
@@ -289,6 +310,12 @@ def init_db() -> None:
ON artwork_cache_status (updated_at) ON artwork_cache_status (updated_at)
""" """
) )
conn.execute(
"""
CREATE INDEX IF NOT EXISTS idx_seerr_media_failures_suppress_until
ON seerr_media_failures (suppress_until)
"""
)
conn.execute( conn.execute(
""" """
CREATE TABLE IF NOT EXISTS user_activity ( CREATE TABLE IF NOT EXISTS user_activity (
@@ -2226,6 +2253,154 @@ def get_settings_overrides() -> Dict[str, str]:
return overrides return overrides
def get_seerr_media_failure(media_type: Optional[str], tmdb_id: Optional[int]) -> Optional[Dict[str, Any]]:
if not media_type or not tmdb_id:
return None
normalized_media_type = str(media_type).strip().lower()
try:
normalized_tmdb_id = int(tmdb_id)
except (TypeError, ValueError):
return None
with _connect() as conn:
row = conn.execute(
"""
SELECT media_type, tmdb_id, status_code, error_message, failure_count,
first_failed_at, last_failed_at, suppress_until, is_persistent
FROM seerr_media_failures
WHERE media_type = ? AND tmdb_id = ?
""",
(normalized_media_type, normalized_tmdb_id),
).fetchone()
if not row:
return None
return {
"media_type": row[0],
"tmdb_id": row[1],
"status_code": row[2],
"error_message": row[3],
"failure_count": row[4],
"first_failed_at": row[5],
"last_failed_at": row[6],
"suppress_until": row[7],
"is_persistent": bool(row[8]),
}
def is_seerr_media_failure_suppressed(media_type: Optional[str], tmdb_id: Optional[int]) -> bool:
record = get_seerr_media_failure(media_type, tmdb_id)
if not record:
return False
suppress_until = _parse_datetime_value(record.get("suppress_until"))
if suppress_until and suppress_until > datetime.now(timezone.utc):
return True
clear_seerr_media_failure(media_type, tmdb_id)
return False
def record_seerr_media_failure(
media_type: Optional[str],
tmdb_id: Optional[int],
*,
status_code: Optional[int] = None,
error_message: Optional[str] = None,
) -> Dict[str, Any]:
if not media_type or not tmdb_id:
return {}
normalized_media_type = str(media_type).strip().lower()
normalized_tmdb_id = int(tmdb_id)
now = datetime.now(timezone.utc)
existing = get_seerr_media_failure(normalized_media_type, normalized_tmdb_id)
failure_count = int(existing.get("failure_count", 0)) + 1 if existing else 1
is_persistent = failure_count >= SEERR_MEDIA_FAILURE_PERSISTENT_THRESHOLD
if is_persistent:
suppress_until = now + timedelta(days=SEERR_MEDIA_FAILURE_PERSISTENT_SUPPRESS_DAYS)
elif failure_count >= 2:
suppress_until = now + timedelta(hours=SEERR_MEDIA_FAILURE_RETRY_SUPPRESS_HOURS)
else:
suppress_until = now + timedelta(hours=SEERR_MEDIA_FAILURE_SHORT_SUPPRESS_HOURS)
payload = {
"media_type": normalized_media_type,
"tmdb_id": normalized_tmdb_id,
"status_code": status_code,
"error_message": error_message,
"failure_count": failure_count,
"first_failed_at": existing.get("first_failed_at") if existing else now.isoformat(),
"last_failed_at": now.isoformat(),
"suppress_until": suppress_until.isoformat(),
"is_persistent": is_persistent,
}
with _connect() as conn:
conn.execute(
"""
INSERT INTO seerr_media_failures (
media_type,
tmdb_id,
status_code,
error_message,
failure_count,
first_failed_at,
last_failed_at,
suppress_until,
is_persistent
)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(media_type, tmdb_id) DO UPDATE SET
status_code = excluded.status_code,
error_message = excluded.error_message,
failure_count = excluded.failure_count,
first_failed_at = excluded.first_failed_at,
last_failed_at = excluded.last_failed_at,
suppress_until = excluded.suppress_until,
is_persistent = excluded.is_persistent
""",
(
payload["media_type"],
payload["tmdb_id"],
payload["status_code"],
payload["error_message"],
payload["failure_count"],
payload["first_failed_at"],
payload["last_failed_at"],
payload["suppress_until"],
1 if payload["is_persistent"] else 0,
),
)
logger.warning(
"seerr_media_failure upsert: media_type=%s tmdb_id=%s status=%s failure_count=%s suppress_until=%s persistent=%s",
payload["media_type"],
payload["tmdb_id"],
payload["status_code"],
payload["failure_count"],
payload["suppress_until"],
payload["is_persistent"],
)
return payload
def clear_seerr_media_failure(media_type: Optional[str], tmdb_id: Optional[int]) -> None:
if not media_type or not tmdb_id:
return
normalized_media_type = str(media_type).strip().lower()
try:
normalized_tmdb_id = int(tmdb_id)
except (TypeError, ValueError):
return
with _connect() as conn:
deleted = conn.execute(
"""
DELETE FROM seerr_media_failures
WHERE media_type = ? AND tmdb_id = ?
""",
(normalized_media_type, normalized_tmdb_id),
).rowcount
if deleted:
logger.info(
"seerr_media_failure cleared: media_type=%s tmdb_id=%s",
normalized_media_type,
normalized_tmdb_id,
)
def run_integrity_check() -> str: def run_integrity_check() -> str:
with _connect() as conn: with _connect() as conn:
row = conn.execute("PRAGMA integrity_check").fetchone() row = conn.execute("PRAGMA integrity_check").fetchone()

View File

@@ -42,6 +42,9 @@ from ..db import (
set_setting, set_setting,
update_artwork_cache_stats, update_artwork_cache_stats,
cleanup_history, cleanup_history,
is_seerr_media_failure_suppressed,
record_seerr_media_failure,
clear_seerr_media_failure,
) )
from ..models import Snapshot, TriageResult, RequestType from ..models import Snapshot, TriageResult, RequestType
from ..services.snapshot import build_snapshot from ..services.snapshot import build_snapshot
@@ -50,6 +53,8 @@ router = APIRouter(prefix="/requests", tags=["requests"], dependencies=[Depends(
CACHE_TTL_SECONDS = 600 CACHE_TTL_SECONDS = 600
_detail_cache: Dict[str, Tuple[float, Dict[str, Any]]] = {} _detail_cache: Dict[str, Tuple[float, Dict[str, Any]]] = {}
FAILED_DETAIL_CACHE_TTL_SECONDS = 3600
_failed_detail_cache: Dict[str, float] = {}
REQUEST_CACHE_TTL_SECONDS = 600 REQUEST_CACHE_TTL_SECONDS = 600
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
_sync_state: Dict[str, Any] = { _sync_state: Dict[str, Any] = {
@@ -100,6 +105,45 @@ def _cache_get(key: str) -> Optional[Dict[str, Any]]:
def _cache_set(key: str, payload: Dict[str, Any]) -> None: def _cache_set(key: str, payload: Dict[str, Any]) -> None:
_detail_cache[key] = (time.time() + CACHE_TTL_SECONDS, payload) _detail_cache[key] = (time.time() + CACHE_TTL_SECONDS, payload)
_failed_detail_cache.pop(key, None)
def _failure_cache_has(key: str) -> bool:
expires_at = _failed_detail_cache.get(key)
if not expires_at:
return False
if expires_at < time.time():
_failed_detail_cache.pop(key, None)
return False
return True
def _failure_cache_set(key: str, ttl_seconds: int = FAILED_DETAIL_CACHE_TTL_SECONDS) -> None:
_failed_detail_cache[key] = time.time() + ttl_seconds
def _extract_http_error_message(exc: httpx.HTTPStatusError) -> Optional[str]:
response = exc.response
if response is None:
return None
try:
payload = response.json()
except ValueError:
payload = response.text
if isinstance(payload, dict):
message = payload.get("message") or payload.get("error")
return str(message).strip() if message else json.dumps(payload, ensure_ascii=True)
if isinstance(payload, str):
trimmed = payload.strip()
return trimmed or None
return str(payload)
def _should_persist_seerr_media_failure(exc: httpx.HTTPStatusError) -> bool:
response = exc.response
if response is None:
return False
return response.status_code == 404 or response.status_code >= 500
def _status_label(value: Any) -> str: def _status_label(value: Any) -> str:
@@ -383,9 +427,12 @@ async def _get_request_details(client: JellyseerrClient, request_id: int) -> Opt
cached = _cache_get(cache_key) cached = _cache_get(cache_key)
if isinstance(cached, dict): if isinstance(cached, dict):
return cached return cached
if _failure_cache_has(cache_key):
return None
try: try:
fetched = await client.get_request(str(request_id)) fetched = await client.get_request(str(request_id))
except httpx.HTTPStatusError: except httpx.HTTPStatusError:
_failure_cache_set(cache_key)
return None return None
if isinstance(fetched, dict): if isinstance(fetched, dict):
_cache_set(cache_key, fetched) _cache_set(cache_key, fetched)
@@ -393,54 +440,80 @@ async def _get_request_details(client: JellyseerrClient, request_id: int) -> Opt
return None return None
async def _get_media_details(
client: JellyseerrClient, media_type: Optional[str], tmdb_id: Optional[int]
) -> Optional[Dict[str, Any]]:
if not tmdb_id or not media_type:
return None
normalized_media_type = str(media_type).strip().lower()
if normalized_media_type not in {"movie", "tv"}:
return None
cache_key = f"media:{normalized_media_type}:{int(tmdb_id)}"
cached = _cache_get(cache_key)
if isinstance(cached, dict):
return cached
if is_seerr_media_failure_suppressed(normalized_media_type, int(tmdb_id)):
logger.debug(
"Seerr media hydration suppressed from db: media_type=%s tmdb_id=%s",
normalized_media_type,
tmdb_id,
)
_failure_cache_set(cache_key, ttl_seconds=FAILED_DETAIL_CACHE_TTL_SECONDS)
return None
if _failure_cache_has(cache_key):
return None
try:
if normalized_media_type == "movie":
fetched = await client.get_movie(int(tmdb_id))
else:
fetched = await client.get_tv(int(tmdb_id))
except httpx.HTTPStatusError as exc:
_failure_cache_set(cache_key)
if _should_persist_seerr_media_failure(exc):
record_seerr_media_failure(
normalized_media_type,
int(tmdb_id),
status_code=exc.response.status_code if exc.response is not None else None,
error_message=_extract_http_error_message(exc),
)
return None
if isinstance(fetched, dict):
clear_seerr_media_failure(normalized_media_type, int(tmdb_id))
_cache_set(cache_key, fetched)
return fetched
return None
async def _hydrate_title_from_tmdb( async def _hydrate_title_from_tmdb(
client: JellyseerrClient, media_type: Optional[str], tmdb_id: Optional[int] client: JellyseerrClient, media_type: Optional[str], tmdb_id: Optional[int]
) -> tuple[Optional[str], Optional[int]]: ) -> tuple[Optional[str], Optional[int]]:
if not tmdb_id or not media_type: details = await _get_media_details(client, media_type, tmdb_id)
if not isinstance(details, dict):
return None, None return None, None
try: normalized_media_type = str(media_type).strip().lower() if media_type else None
if media_type == "movie": if normalized_media_type == "movie":
details = await client.get_movie(int(tmdb_id))
if isinstance(details, dict):
title = details.get("title") title = details.get("title")
release_date = details.get("releaseDate") release_date = details.get("releaseDate")
year = int(release_date[:4]) if release_date else None year = int(release_date[:4]) if release_date else None
return title, year return title, year
if media_type == "tv": if normalized_media_type == "tv":
details = await client.get_tv(int(tmdb_id))
if isinstance(details, dict):
title = details.get("name") or details.get("title") title = details.get("name") or details.get("title")
first_air = details.get("firstAirDate") first_air = details.get("firstAirDate")
year = int(first_air[:4]) if first_air else None year = int(first_air[:4]) if first_air else None
return title, year return title, year
except httpx.HTTPStatusError:
return None, None
return None, None return None, None
async def _hydrate_artwork_from_tmdb( async def _hydrate_artwork_from_tmdb(
client: JellyseerrClient, media_type: Optional[str], tmdb_id: Optional[int] client: JellyseerrClient, media_type: Optional[str], tmdb_id: Optional[int]
) -> tuple[Optional[str], Optional[str]]: ) -> tuple[Optional[str], Optional[str]]:
if not tmdb_id or not media_type: details = await _get_media_details(client, media_type, tmdb_id)
if not isinstance(details, dict):
return None, None return None, None
try:
if media_type == "movie":
details = await client.get_movie(int(tmdb_id))
if isinstance(details, dict):
return ( return (
details.get("posterPath") or details.get("poster_path"), details.get("posterPath") or details.get("poster_path"),
details.get("backdropPath") or details.get("backdrop_path"), details.get("backdropPath") or details.get("backdrop_path"),
) )
if media_type == "tv":
details = await client.get_tv(int(tmdb_id))
if isinstance(details, dict):
return (
details.get("posterPath") or details.get("poster_path"),
details.get("backdropPath") or details.get("backdrop_path"),
)
except httpx.HTTPStatusError:
return None, None
return None, None
def _artwork_url(path: Optional[str], size: str, cache_mode: str) -> Optional[str]: def _artwork_url(path: Optional[str], size: str, cache_mode: str) -> Optional[str]:

View File

@@ -3,6 +3,7 @@ import asyncio
import logging import logging
from datetime import datetime, timezone from datetime import datetime, timezone
from urllib.parse import quote from urllib.parse import quote
import httpx
from ..clients.jellyseerr import JellyseerrClient from ..clients.jellyseerr import JellyseerrClient
from ..clients.jellyfin import JellyfinClient from ..clients.jellyfin import JellyfinClient
@@ -18,6 +19,9 @@ from ..db import (
get_recent_snapshots, get_recent_snapshots,
get_setting, get_setting,
set_setting, set_setting,
is_seerr_media_failure_suppressed,
record_seerr_media_failure,
clear_seerr_media_failure,
) )
from ..models import ActionOption, NormalizedState, RequestType, Snapshot, TimelineHop from ..models import ActionOption, NormalizedState, RequestType, Snapshot, TimelineHop
@@ -53,6 +57,59 @@ def _pick_first(value: Any) -> Optional[Dict[str, Any]]:
return None return None
def _extract_http_error_message(exc: httpx.HTTPStatusError) -> Optional[str]:
response = exc.response
if response is None:
return None
try:
payload = response.json()
except ValueError:
payload = response.text
if isinstance(payload, dict):
message = payload.get("message") or payload.get("error")
return str(message).strip() if message else str(payload)
if isinstance(payload, str):
trimmed = payload.strip()
return trimmed or None
return str(payload)
def _should_persist_seerr_media_failure(exc: httpx.HTTPStatusError) -> bool:
response = exc.response
if response is None:
return False
return response.status_code == 404 or response.status_code >= 500
async def _get_seerr_media_details(
jellyseerr: JellyseerrClient, request_type: RequestType, tmdb_id: int
) -> Optional[Dict[str, Any]]:
media_type = request_type.value
if media_type not in {"movie", "tv"}:
return None
if is_seerr_media_failure_suppressed(media_type, tmdb_id):
logger.debug("Seerr snapshot hydration suppressed: media_type=%s tmdb_id=%s", media_type, tmdb_id)
return None
try:
if request_type == RequestType.movie:
details = await jellyseerr.get_movie(int(tmdb_id))
else:
details = await jellyseerr.get_tv(int(tmdb_id))
except httpx.HTTPStatusError as exc:
if _should_persist_seerr_media_failure(exc):
record_seerr_media_failure(
media_type,
int(tmdb_id),
status_code=exc.response.status_code if exc.response is not None else None,
error_message=_extract_http_error_message(exc),
)
return None
if isinstance(details, dict):
clear_seerr_media_failure(media_type, int(tmdb_id))
return details
return None
async def _maybe_refresh_jellyfin(snapshot: Snapshot) -> None: async def _maybe_refresh_jellyfin(snapshot: Snapshot) -> None:
if snapshot.state not in {NormalizedState.available, NormalizedState.completed}: if snapshot.state not in {NormalizedState.available, NormalizedState.completed}:
return return
@@ -300,22 +357,13 @@ async def build_snapshot(request_id: str) -> Snapshot:
if snapshot.title in {None, "", "Unknown"} and allow_remote: if snapshot.title in {None, "", "Unknown"} and allow_remote:
tmdb_id = jelly_request.get("media", {}).get("tmdbId") tmdb_id = jelly_request.get("media", {}).get("tmdbId")
if tmdb_id: if tmdb_id:
try: details = await _get_seerr_media_details(jellyseerr, snapshot.request_type, int(tmdb_id))
if snapshot.request_type == RequestType.movie:
details = await jellyseerr.get_movie(int(tmdb_id))
if isinstance(details, dict): if isinstance(details, dict):
if snapshot.request_type == RequestType.movie:
snapshot.title = details.get("title") or snapshot.title snapshot.title = details.get("title") or snapshot.title
release_date = details.get("releaseDate") release_date = details.get("releaseDate")
snapshot.year = int(release_date[:4]) if release_date else snapshot.year snapshot.year = int(release_date[:4]) if release_date else snapshot.year
poster_path = poster_path or details.get("posterPath") or details.get("poster_path")
backdrop_path = (
backdrop_path
or details.get("backdropPath")
or details.get("backdrop_path")
)
elif snapshot.request_type == RequestType.tv: elif snapshot.request_type == RequestType.tv:
details = await jellyseerr.get_tv(int(tmdb_id))
if isinstance(details, dict):
snapshot.title = details.get("name") or details.get("title") or snapshot.title snapshot.title = details.get("name") or details.get("title") or snapshot.title
first_air = details.get("firstAirDate") first_air = details.get("firstAirDate")
snapshot.year = int(first_air[:4]) if first_air else snapshot.year snapshot.year = int(first_air[:4]) if first_air else snapshot.year
@@ -325,8 +373,6 @@ async def build_snapshot(request_id: str) -> Snapshot:
or details.get("backdropPath") or details.get("backdropPath")
or details.get("backdrop_path") or details.get("backdrop_path")
) )
except Exception:
pass
cache_mode = (runtime.artwork_cache_mode or "remote").lower() cache_mode = (runtime.artwork_cache_mode or "remote").lower()
snapshot.artwork = { snapshot.artwork = {

View File

@@ -1,12 +1,12 @@
{ {
"name": "magent-frontend", "name": "magent-frontend",
"version": "0103262231", "version": "0103262251",
"lockfileVersion": 3, "lockfileVersion": 3,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "magent-frontend", "name": "magent-frontend",
"version": "0103262231", "version": "0103262251",
"dependencies": { "dependencies": {
"next": "16.1.6", "next": "16.1.6",
"react": "19.2.4", "react": "19.2.4",
@@ -977,3 +977,4 @@

View File

@@ -1,7 +1,7 @@
{ {
"name": "magent-frontend", "name": "magent-frontend",
"private": true, "private": true,
"version": "0103262231", "version": "0103262251",
"scripts": { "scripts": {
"dev": "next dev", "dev": "next dev",
"build": "next build", "build": "next build",
@@ -23,3 +23,4 @@