diff --git a/.build_number b/.build_number
index c1c622e..4efba10 100644
--- a/.build_number
+++ b/.build_number
@@ -1 +1 @@
-251261817
+251261937
diff --git a/backend/app/db.py b/backend/app/db.py
index 80dc6bf..225014b 100644
--- a/backend/app/db.py
+++ b/backend/app/db.py
@@ -24,6 +24,58 @@ def _connect() -> sqlite3.Connection:
return sqlite3.connect(_db_path())
+def _normalize_title_value(title: Optional[str]) -> Optional[str]:
+ if not isinstance(title, str):
+ return None
+ trimmed = title.strip()
+ return trimmed if trimmed else None
+
+
+def _normalize_year_value(year: Optional[Any]) -> Optional[int]:
+ if isinstance(year, int):
+ return year
+ if isinstance(year, str):
+ trimmed = year.strip()
+ if trimmed.isdigit():
+ return int(trimmed)
+ return None
+
+
+def _is_placeholder_title(title: Optional[str], request_id: Optional[int]) -> bool:
+ if not isinstance(title, str):
+ return True
+ normalized = title.strip().lower()
+ if not normalized:
+ return True
+ if normalized == "untitled":
+ return True
+ if request_id and normalized == f"request {request_id}":
+ return True
+ return False
+
+
+def _extract_title_year_from_payload(payload_json: Optional[str]) -> tuple[Optional[str], Optional[int]]:
+ if not payload_json:
+ return None, None
+ try:
+ payload = json.loads(payload_json)
+ except json.JSONDecodeError:
+ return None, None
+ if not isinstance(payload, dict):
+ return None, None
+ media = payload.get("media") or {}
+ title = None
+ year = None
+ if isinstance(media, dict):
+ title = media.get("title") or media.get("name")
+ year = media.get("year")
+ if not title:
+ title = payload.get("title") or payload.get("name")
+ if year is None:
+ year = payload.get("year")
+ return _normalize_title_value(title), _normalize_year_value(year)
+
+
def init_db() -> None:
with _connect() as conn:
conn.execute(
@@ -603,7 +655,34 @@ def upsert_request_cache(
updated_at: Optional[str],
payload_json: str,
) -> None:
+ normalized_title = _normalize_title_value(title)
+ normalized_year = _normalize_year_value(year)
+ derived_title = None
+ derived_year = None
+ if not normalized_title or normalized_year is None:
+ derived_title, derived_year = _extract_title_year_from_payload(payload_json)
+ if _is_placeholder_title(normalized_title, request_id):
+ normalized_title = None
+ if derived_title and not normalized_title:
+ normalized_title = derived_title
+ if normalized_year is None and derived_year is not None:
+ normalized_year = derived_year
with _connect() as conn:
+ existing_title = None
+ existing_year = None
+ if normalized_title is None or normalized_year is None:
+ row = conn.execute(
+ "SELECT title, year FROM requests_cache WHERE request_id = ?",
+ (request_id,),
+ ).fetchone()
+ if row:
+ existing_title, existing_year = row[0], row[1]
+ if _is_placeholder_title(existing_title, request_id):
+ existing_title = None
+ if normalized_title is None and existing_title:
+ normalized_title = existing_title
+ if normalized_year is None and existing_year is not None:
+ normalized_year = existing_year
conn.execute(
"""
INSERT INTO requests_cache (
@@ -637,8 +716,8 @@ def upsert_request_cache(
media_id,
media_type,
status,
- title,
- year,
+ normalized_title,
+ normalized_year,
requested_by,
requested_by_norm,
created_at,
@@ -741,22 +820,11 @@ def get_cached_requests(
title = row[4]
year = row[5]
if (not title or not year) and row[8]:
- try:
- payload = json.loads(row[8])
- if isinstance(payload, dict):
- media = payload.get("media") or {}
- if not title:
- title = (
- (media.get("title") if isinstance(media, dict) else None)
- or (media.get("name") if isinstance(media, dict) else None)
- or payload.get("title")
- or payload.get("name")
- )
- if not year:
- year = media.get("year") if isinstance(media, dict) else None
- year = year or payload.get("year")
- except json.JSONDecodeError:
- pass
+ derived_title, derived_year = _extract_title_year_from_payload(row[8])
+ if not title:
+ title = derived_title
+ if not year:
+ year = derived_year
results.append(
{
"request_id": row[0],
@@ -788,18 +856,8 @@ def get_request_cache_overview(limit: int = 50) -> list[Dict[str, Any]]:
for row in rows:
title = row[4]
if not title and row[9]:
- try:
- payload = json.loads(row[9])
- if isinstance(payload, dict):
- media = payload.get("media") or {}
- title = (
- (media.get("title") if isinstance(media, dict) else None)
- or (media.get("name") if isinstance(media, dict) else None)
- or payload.get("title")
- or payload.get("name")
- )
- except json.JSONDecodeError:
- title = row[4]
+ derived_title, _ = _extract_title_year_from_payload(row[9])
+ title = derived_title or row[4]
results.append(
{
"request_id": row[0],
@@ -825,7 +883,9 @@ def get_request_cache_count() -> int:
def update_request_cache_title(
request_id: int, title: str, year: Optional[int] = None
) -> None:
- if not title:
+ normalized_title = _normalize_title_value(title)
+ normalized_year = _normalize_year_value(year)
+ if not normalized_title:
return
with _connect() as conn:
conn.execute(
@@ -834,10 +894,38 @@ def update_request_cache_title(
SET title = ?, year = COALESCE(?, year)
WHERE request_id = ?
""",
- (title, year, request_id),
+ (normalized_title, normalized_year, request_id),
)
+def repair_request_cache_titles() -> int:
+ updated = 0
+ with _connect() as conn:
+ rows = conn.execute(
+ """
+ SELECT request_id, title, year, payload_json
+ FROM requests_cache
+ """
+ ).fetchall()
+ for row in rows:
+ request_id, title, year, payload_json = row
+ if not _is_placeholder_title(title, request_id):
+ continue
+ derived_title, derived_year = _extract_title_year_from_payload(payload_json)
+ if not derived_title:
+ continue
+ conn.execute(
+ """
+ UPDATE requests_cache
+ SET title = ?, year = COALESCE(?, year)
+ WHERE request_id = ?
+ """,
+ (derived_title, derived_year, request_id),
+ )
+ updated += 1
+ return updated
+
+
def prune_duplicate_requests_cache() -> int:
with _connect() as conn:
cursor = conn.execute(
diff --git a/backend/app/routers/admin.py b/backend/app/routers/admin.py
index 286216f..0d2779e 100644
--- a/backend/app/routers/admin.py
+++ b/backend/app/routers/admin.py
@@ -21,6 +21,7 @@ from ..db import (
clear_history,
cleanup_history,
update_request_cache_title,
+ repair_request_cache_titles,
)
from ..runtime import get_runtime_settings
from ..clients.sonarr import SonarrClient
@@ -282,27 +283,10 @@ async def read_logs(lines: int = 200) -> Dict[str, Any]:
@router.get("/requests/cache")
async def requests_cache(limit: int = 50) -> Dict[str, Any]:
+ repaired = repair_request_cache_titles()
+ if repaired:
+ logger.info("Requests cache titles repaired via settings view: %s", repaired)
rows = get_request_cache_overview(limit)
- missing_titles = [row for row in rows if not row.get("title")]
- if missing_titles:
- runtime = get_runtime_settings()
- client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key)
- if client.configured():
- for row in missing_titles:
- request_id = row.get("request_id")
- if not isinstance(request_id, int):
- continue
- details = await requests_router._get_request_details(client, request_id)
- if not isinstance(details, dict):
- continue
- payload = requests_router._parse_request_payload(details)
- title = payload.get("title")
- if not title:
- continue
- row["title"] = title
- if payload.get("year"):
- row["year"] = payload.get("year")
- update_request_cache_title(request_id, title, payload.get("year"))
return {"rows": rows}
diff --git a/backend/app/routers/requests.py b/backend/app/routers/requests.py
index c7d26d3..df07490 100644
--- a/backend/app/routers/requests.py
+++ b/backend/app/routers/requests.py
@@ -30,6 +30,7 @@ from ..db import (
get_request_cache_last_updated,
get_request_cache_count,
get_request_cache_payloads,
+ repair_request_cache_titles,
prune_duplicate_requests_cache,
upsert_request_cache,
get_setting,
@@ -814,13 +815,14 @@ def _get_recent_from_cache(
async def startup_warmup_requests_cache() -> None:
runtime = get_runtime_settings()
client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key)
- if not client.configured():
- return
- try:
- await _ensure_requests_cache(client)
- except httpx.HTTPError as exc:
- logger.warning("Requests warmup skipped: %s", exc)
- return
+ if client.configured():
+ try:
+ await _ensure_requests_cache(client)
+ except httpx.HTTPError as exc:
+ logger.warning("Requests warmup skipped: %s", exc)
+ repaired = repair_request_cache_titles()
+ if repaired:
+ logger.info("Requests cache titles repaired: %s", repaired)
_refresh_recent_cache_from_db()
@@ -968,7 +970,10 @@ async def _ensure_request_access(
runtime = get_runtime_settings()
mode = (runtime.requests_data_source or "prefer_cache").lower()
cached = get_request_cache_payload(request_id)
- if mode != "always_js" and cached is not None:
+ if mode != "always_js":
+ if cached is None:
+ logger.debug("access cache miss: request_id=%s mode=%s", request_id, mode)
+ raise HTTPException(status_code=404, detail="Request not found in cache")
logger.debug("access cache hit: request_id=%s mode=%s", request_id, mode)
if _request_matches_user(cached, user.get("username", "")):
return
@@ -1249,13 +1254,15 @@ async def recent_requests(
) -> dict:
runtime = get_runtime_settings()
client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key)
- if not client.configured():
- raise HTTPException(status_code=400, detail="Jellyseerr not configured")
-
- try:
- await _ensure_requests_cache(client)
- except httpx.HTTPStatusError as exc:
- raise HTTPException(status_code=502, detail=str(exc)) from exc
+ mode = (runtime.requests_data_source or "prefer_cache").lower()
+ allow_remote = mode == "always_js"
+ if allow_remote:
+ if not client.configured():
+ raise HTTPException(status_code=400, detail="Jellyseerr not configured")
+ try:
+ await _ensure_requests_cache(client)
+ except httpx.HTTPStatusError as exc:
+ raise HTTPException(status_code=502, detail=str(exc)) from exc
username_norm = _normalize_username(user.get("username", ""))
requested_by = None if user.get("role") == "admin" else username_norm
@@ -1266,10 +1273,8 @@ async def recent_requests(
_refresh_recent_cache_from_db()
rows = _get_recent_from_cache(requested_by, take, skip, since_iso)
cache_mode = (runtime.artwork_cache_mode or "remote").lower()
- mode = (runtime.requests_data_source or "prefer_cache").lower()
- allow_remote = mode == "always_js"
- allow_title_hydrate = mode == "prefer_cache"
- allow_artwork_hydrate = allow_remote or allow_title_hydrate
+ allow_title_hydrate = False
+ allow_artwork_hydrate = allow_remote
jellyfin = JellyfinClient(runtime.jellyfin_base_url, runtime.jellyfin_api_key)
jellyfin_cache: Dict[str, bool] = {}
@@ -1814,4 +1819,3 @@ async def action_grab(
save_action, request_id, "grab", "Grab release", "ok", action_message
)
return {"status": "ok", "response": {"qbittorrent": "queued"}}
-
diff --git a/backend/app/services/snapshot.py b/backend/app/services/snapshot.py
index 33c6c00..81e5447 100644
--- a/backend/app/services/snapshot.py
+++ b/backend/app/services/snapshot.py
@@ -220,6 +220,7 @@ async def build_snapshot(request_id: str) -> Snapshot:
"snapshot cache miss: request_id=%s mode=%s", request_id, mode
)
+ allow_remote = mode == "always_js" and jellyseerr.configured()
if not jellyseerr.configured() and not cached_request:
timeline.append(TimelineHop(service="Jellyseerr", status="not_configured"))
timeline.append(TimelineHop(service="Sonarr/Radarr", status="not_configured"))
@@ -227,9 +228,15 @@ async def build_snapshot(request_id: str) -> Snapshot:
timeline.append(TimelineHop(service="qBittorrent", status="not_configured"))
snapshot.timeline = timeline
return snapshot
+ if cached_request is None and not allow_remote:
+ timeline.append(TimelineHop(service="Jellyseerr", status="cache_miss"))
+ snapshot.timeline = timeline
+ snapshot.state = NormalizedState.unknown
+ snapshot.state_reason = "Request not found in cache"
+ return snapshot
jelly_request = cached_request
- if (jelly_request is None or mode == "always_js") and jellyseerr.configured():
+ if allow_remote and (jelly_request is None or mode == "always_js"):
try:
jelly_request = await jellyseerr.get_request(request_id)
logging.getLogger(__name__).debug(
@@ -262,7 +269,7 @@ async def build_snapshot(request_id: str) -> Snapshot:
poster_path = media.get("posterPath") or media.get("poster_path")
backdrop_path = media.get("backdropPath") or media.get("backdrop_path")
- if snapshot.title in {None, "", "Unknown"} and jellyseerr.configured():
+ if snapshot.title in {None, "", "Unknown"} and allow_remote:
tmdb_id = jelly_request.get("media", {}).get("tmdbId")
if tmdb_id:
try:
diff --git a/frontend/app/admin/SettingsPage.tsx b/frontend/app/admin/SettingsPage.tsx
index a26051d..5dad47c 100644
--- a/frontend/app/admin/SettingsPage.tsx
+++ b/frontend/app/admin/SettingsPage.tsx
@@ -297,7 +297,8 @@ export default function SettingsPage({ section }: SettingsPageProps) {
requests_full_sync_time: 'Daily time to refresh the full request list.',
requests_cleanup_time: 'Daily time to trim old history.',
requests_cleanup_days: 'History older than this is removed during cleanup.',
- requests_data_source: 'Pick where Magent should read requests from.',
+ requests_data_source:
+ 'Pick where Magent should read requests from. Cache-only avoids Jellyseerr lookups on reads.',
log_level: 'How much detail is written to the activity log.',
log_file: 'Where the activity log is stored.',
site_build_number: 'Build number shown in the account menu (auto-set from releases).',
@@ -1129,7 +1130,9 @@ export default function SettingsPage({ section }: SettingsPageProps) {
}
>
-
+
)