Compare commits

7 Commits

25 changed files with 2255 additions and 331 deletions

View File

@@ -1 +1 @@
0203261953
0303261611

View File

@@ -1,2 +1,2 @@
BUILD_NUMBER = "0203261953"
CHANGELOG = '2026-03-02|Process 1 build 0203261610\n2026-03-02|Process 1 build 0203261608\n2026-03-02|Add dedicated profile invites page and fix mobile admin layout\n2026-03-01|Persist Seerr media failure suppression and reduce sync error noise\n2026-03-01|Add repository line ending policy\n2026-03-01|Finalize diagnostics, logging controls, and email test support\n2026-03-01|Add invite email templates and delivery workflow\n2026-02-28|Finalize dev-1.3 upgrades and Seerr updates\n2026-02-27|admin docs and layout refresh, build 2702261314\n2026-02-27|Build 2702261153: fix jellyfin sync user visibility\n2026-02-26|Build 2602262241: live request page updates\n2026-02-26|Build 2602262204\n2026-02-26|Build 2602262159: restore jellyfin-first user source\n2026-02-26|Build 2602262049: split magent settings and harden local login\n2026-02-26|Build 2602262030: add magent settings and hardening\n2026-02-26|Build 2602261731: fix user resync after nuclear wipe\n2026-02-26|Build 2602261717: master invite policy and self-service invite controls\n2026-02-26|Build 2602261636: self-service invites and count fixes\n2026-02-26|Build 2602261605: invite trace and cross-system user lifecycle\n2026-02-26|Build 2602261536: refine invite layouts and tighten UI\n2026-02-26|Build 2602261523: live updates, invite cleanup and nuclear resync\n2026-02-26|Build 2602261442: tidy users and invite layouts\n2026-02-26|Build 2602261409: unify invite management controls\n2026-02-26|Build 2602260214: invites profiles and expiry admin controls\n2026-02-26|Build 2602260022: enterprise UI refresh and users bulk auto-search\n2026-02-25|Build 2502262321: fix auto-search quality and per-user toggle\n2026-02-02|Build 0202261541: allow FQDN service URLs\n2026-01-30|Build 3001262148: single container\n2026-01-29|Build 2901262244: format changelog\n2026-01-29|Build 2901262240: cache users\n2026-01-29|Tidy full changelog\n2026-01-29|Update full changelog\n2026-01-29|Bake build number and changelog\n2026-01-29|Hardcode build number in backend\n2026-01-29|release: 2901262102\n2026-01-29|release: 2901262044\n2026-01-29|release: 2901262036\n2026-01-27|Hydrate missing artwork from Jellyseerr (build 271261539)\n2026-01-27|Fallback to TMDB when artwork cache fails (build 271261524)\n2026-01-27|Add service test buttons (build 271261335)\n2026-01-27|Bump build number (process 2) 271261322\n2026-01-27|Add cache load spinner (build 271261238)\n2026-01-27|Fix snapshot title fallback (build 271261228)\n2026-01-27|Fix request titles in snapshots (build 271261219)\n2026-01-27|Bump build number to 271261202\n2026-01-27|Clarify request sync settings (build 271261159)\n2026-01-27|Fix backend cache stats import (build 271261149)\n2026-01-27|Improve cache stats performance (build 271261145)\n2026-01-27|Add cache control artwork stats\n2026-01-26|Fix sync progress bar animation\n2026-01-26|Fix cache title hydration\n2026-01-25|Build 2501262041\n2026-01-25|Harden request cache titles and cache-only reads\n2026-01-25|Serve bundled branding assets by default\n2026-01-25|Seed branding logo from bundled assets\n2026-01-25|Tidy request sync controls\n2026-01-25|Add Jellyfin login cache and admin-only stats\n2026-01-25|Add user stats and activity tracking\n2026-01-25|Move account actions into avatar menu\n2026-01-25|Improve mobile header layout\n2026-01-25|Automate build number tagging and sync\n2026-01-25|Add site banner, build number, and changelog\n2026-01-24|Improve request handling and qBittorrent categories\n2026-01-24|Map Prowlarr releases to Arr indexers for manual grab\n2026-01-24|Clarify how-it-works steps and fixes\n2026-01-24|Document fix buttons in how-it-works\n2026-01-24|Route grabs through Sonarr/Radarr only\n2026-01-23|Use backend branding assets for logo and favicon\n2026-01-23|Copy public assets into frontend image\n2026-01-23|Fix backend Dockerfile paths for root context\n2026-01-23|Add Docker Hub compose override\n2026-01-23|Remove password fields from users page\n2026-01-23|Use bundled branding assets\n2026-01-23|Add default branding assets when missing\n2026-01-23|Show available status on landing when in Jellyfin\n2026-01-23|Fix cache titles and move feedback link\n2026-01-23|Add feedback form and webhook\n2026-01-23|Hide header actions when signed out\n2026-01-23|Fallback manual grab to qBittorrent\n2026-01-23|Split search actions and improve download options\n2026-01-23|Fix cache titles via Jellyseerr media lookup\n2026-01-22|Update README with Docker-first guide\n2026-01-22|Update README\n2026-01-22|Ignore build artifacts\n2026-01-22|Initial commit'
BUILD_NUMBER = "0303261611"
CHANGELOG = '2026-03-03|Fix shared request access and Jellyfin-ready pipeline status\n2026-03-03|Process 1 build 0303261507\n2026-03-03|Improve SQLite batching and diagnostics visibility\n2026-03-03|Add login page visibility controls\n2026-03-03|Hotfix: expand landing-page search to all requests\n2026-03-02|Hotfix: add logged-out password reset flow\n2026-03-02|Process 1 build 0203261953\n2026-03-02|Process 1 build 0203261610\n2026-03-02|Process 1 build 0203261608\n2026-03-02|Add dedicated profile invites page and fix mobile admin layout\n2026-03-01|Persist Seerr media failure suppression and reduce sync error noise\n2026-03-01|Add repository line ending policy\n2026-03-01|Finalize diagnostics, logging controls, and email test support\n2026-03-01|Add invite email templates and delivery workflow\n2026-02-28|Finalize dev-1.3 upgrades and Seerr updates\n2026-02-27|admin docs and layout refresh, build 2702261314\n2026-02-27|Build 2702261153: fix jellyfin sync user visibility\n2026-02-26|Build 2602262241: live request page updates\n2026-02-26|Build 2602262204\n2026-02-26|Build 2602262159: restore jellyfin-first user source\n2026-02-26|Build 2602262049: split magent settings and harden local login\n2026-02-26|Build 2602262030: add magent settings and hardening\n2026-02-26|Build 2602261731: fix user resync after nuclear wipe\n2026-02-26|Build 2602261717: master invite policy and self-service invite controls\n2026-02-26|Build 2602261636: self-service invites and count fixes\n2026-02-26|Build 2602261605: invite trace and cross-system user lifecycle\n2026-02-26|Build 2602261536: refine invite layouts and tighten UI\n2026-02-26|Build 2602261523: live updates, invite cleanup and nuclear resync\n2026-02-26|Build 2602261442: tidy users and invite layouts\n2026-02-26|Build 2602261409: unify invite management controls\n2026-02-26|Build 2602260214: invites profiles and expiry admin controls\n2026-02-26|Build 2602260022: enterprise UI refresh and users bulk auto-search\n2026-02-25|Build 2502262321: fix auto-search quality and per-user toggle\n2026-02-02|Build 0202261541: allow FQDN service URLs\n2026-01-30|Build 3001262148: single container\n2026-01-29|Build 2901262244: format changelog\n2026-01-29|Build 2901262240: cache users\n2026-01-29|Tidy full changelog\n2026-01-29|Update full changelog\n2026-01-29|Bake build number and changelog\n2026-01-29|Hardcode build number in backend\n2026-01-29|release: 2901262102\n2026-01-29|release: 2901262044\n2026-01-29|release: 2901262036\n2026-01-27|Hydrate missing artwork from Jellyseerr (build 271261539)\n2026-01-27|Fallback to TMDB when artwork cache fails (build 271261524)\n2026-01-27|Add service test buttons (build 271261335)\n2026-01-27|Bump build number (process 2) 271261322\n2026-01-27|Add cache load spinner (build 271261238)\n2026-01-27|Fix snapshot title fallback (build 271261228)\n2026-01-27|Fix request titles in snapshots (build 271261219)\n2026-01-27|Bump build number to 271261202\n2026-01-27|Clarify request sync settings (build 271261159)\n2026-01-27|Fix backend cache stats import (build 271261149)\n2026-01-27|Improve cache stats performance (build 271261145)\n2026-01-27|Add cache control artwork stats\n2026-01-26|Fix sync progress bar animation\n2026-01-26|Fix cache title hydration\n2026-01-25|Build 2501262041\n2026-01-25|Harden request cache titles and cache-only reads\n2026-01-25|Serve bundled branding assets by default\n2026-01-25|Seed branding logo from bundled assets\n2026-01-25|Tidy request sync controls\n2026-01-25|Add Jellyfin login cache and admin-only stats\n2026-01-25|Add user stats and activity tracking\n2026-01-25|Move account actions into avatar menu\n2026-01-25|Improve mobile header layout\n2026-01-25|Automate build number tagging and sync\n2026-01-25|Add site banner, build number, and changelog\n2026-01-24|Improve request handling and qBittorrent categories\n2026-01-24|Map Prowlarr releases to Arr indexers for manual grab\n2026-01-24|Clarify how-it-works steps and fixes\n2026-01-24|Document fix buttons in how-it-works\n2026-01-24|Route grabs through Sonarr/Radarr only\n2026-01-23|Use backend branding assets for logo and favicon\n2026-01-23|Copy public assets into frontend image\n2026-01-23|Fix backend Dockerfile paths for root context\n2026-01-23|Add Docker Hub compose override\n2026-01-23|Remove password fields from users page\n2026-01-23|Use bundled branding assets\n2026-01-23|Add default branding assets when missing\n2026-01-23|Show available status on landing when in Jellyfin\n2026-01-23|Fix cache titles and move feedback link\n2026-01-23|Add feedback form and webhook\n2026-01-23|Hide header actions when signed out\n2026-01-23|Fallback manual grab to qBittorrent\n2026-01-23|Split search actions and improve download options\n2026-01-23|Fix cache titles via Jellyseerr media lookup\n2026-01-22|Update README with Docker-first guide\n2026-01-22|Update README\n2026-01-22|Ignore build artifacts\n2026-01-22|Initial commit'

View File

@@ -71,6 +71,18 @@ class Settings(BaseSettings):
site_banner_tone: str = Field(
default="info", validation_alias=AliasChoices("SITE_BANNER_TONE")
)
site_login_show_jellyfin_login: bool = Field(
default=True, validation_alias=AliasChoices("SITE_LOGIN_SHOW_JELLYFIN_LOGIN")
)
site_login_show_local_login: bool = Field(
default=True, validation_alias=AliasChoices("SITE_LOGIN_SHOW_LOCAL_LOGIN")
)
site_login_show_forgot_password: bool = Field(
default=True, validation_alias=AliasChoices("SITE_LOGIN_SHOW_FORGOT_PASSWORD")
)
site_login_show_signup_link: bool = Field(
default=True, validation_alias=AliasChoices("SITE_LOGIN_SHOW_SIGNUP_LINK")
)
site_changelog: Optional[str] = Field(default=CHANGELOG)
magent_application_url: Optional[str] = Field(

View File

@@ -2,7 +2,9 @@ import json
import os
import sqlite3
import logging
from hashlib import sha256
from datetime import datetime, timezone, timedelta
from time import perf_counter
from typing import Any, Dict, Optional
from .config import settings
@@ -15,18 +17,44 @@ SEERR_MEDIA_FAILURE_SHORT_SUPPRESS_HOURS = 6
SEERR_MEDIA_FAILURE_RETRY_SUPPRESS_HOURS = 24
SEERR_MEDIA_FAILURE_PERSISTENT_SUPPRESS_DAYS = 30
SEERR_MEDIA_FAILURE_PERSISTENT_THRESHOLD = 3
SQLITE_BUSY_TIMEOUT_MS = 5_000
SQLITE_CACHE_SIZE_KIB = 32_768
SQLITE_MMAP_SIZE_BYTES = 256 * 1024 * 1024
def _db_path() -> str:
path = settings.sqlite_path or "data/magent.db"
if not os.path.isabs(path):
path = os.path.join(os.getcwd(), path)
app_root = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
path = os.path.join(app_root, path)
os.makedirs(os.path.dirname(path), exist_ok=True)
return path
def _apply_connection_pragmas(conn: sqlite3.Connection) -> None:
pragmas = (
("journal_mode", "WAL"),
("synchronous", "NORMAL"),
("temp_store", "MEMORY"),
("cache_size", -SQLITE_CACHE_SIZE_KIB),
("mmap_size", SQLITE_MMAP_SIZE_BYTES),
("busy_timeout", SQLITE_BUSY_TIMEOUT_MS),
)
for pragma, value in pragmas:
try:
conn.execute(f"PRAGMA {pragma} = {value}")
except sqlite3.DatabaseError:
logger.debug("sqlite pragma skipped: %s=%s", pragma, value, exc_info=True)
def _connect() -> sqlite3.Connection:
return sqlite3.connect(_db_path())
conn = sqlite3.connect(
_db_path(),
timeout=SQLITE_BUSY_TIMEOUT_MS / 1000,
cached_statements=512,
)
_apply_connection_pragmas(conn)
return conn
def _parse_datetime_value(value: Optional[str]) -> Optional[datetime]:
@@ -292,6 +320,22 @@ def init_db() -> None:
)
"""
)
conn.execute(
"""
CREATE TABLE IF NOT EXISTS password_reset_tokens (
id INTEGER PRIMARY KEY AUTOINCREMENT,
token_hash TEXT NOT NULL UNIQUE,
username TEXT NOT NULL,
recipient_email TEXT NOT NULL,
auth_provider TEXT NOT NULL,
created_at TEXT NOT NULL,
expires_at TEXT NOT NULL,
used_at TEXT,
requested_by_ip TEXT,
requested_user_agent TEXT
)
"""
)
conn.execute(
"""
CREATE INDEX IF NOT EXISTS idx_requests_cache_created_at
@@ -304,6 +348,30 @@ def init_db() -> None:
ON requests_cache (requested_by_norm)
"""
)
conn.execute(
"""
CREATE INDEX IF NOT EXISTS idx_requests_cache_updated_at
ON requests_cache (updated_at DESC, request_id DESC)
"""
)
conn.execute(
"""
CREATE INDEX IF NOT EXISTS idx_requests_cache_requested_by_id_created_at
ON requests_cache (requested_by_id, created_at DESC, request_id DESC)
"""
)
conn.execute(
"""
CREATE INDEX IF NOT EXISTS idx_requests_cache_requested_by_norm_created_at
ON requests_cache (requested_by_norm, created_at DESC, request_id DESC)
"""
)
conn.execute(
"""
CREATE INDEX IF NOT EXISTS idx_requests_cache_status_created_at
ON requests_cache (status, created_at DESC, request_id DESC)
"""
)
conn.execute(
"""
CREATE INDEX IF NOT EXISTS idx_artwork_cache_status_updated_at
@@ -316,6 +384,18 @@ def init_db() -> None:
ON seerr_media_failures (suppress_until)
"""
)
conn.execute(
"""
CREATE INDEX IF NOT EXISTS idx_password_reset_tokens_username
ON password_reset_tokens (username)
"""
)
conn.execute(
"""
CREATE INDEX IF NOT EXISTS idx_password_reset_tokens_expires_at
ON password_reset_tokens (expires_at)
"""
)
conn.execute(
"""
CREATE TABLE IF NOT EXISTS user_activity (
@@ -412,6 +492,15 @@ def init_db() -> None:
)
except sqlite3.OperationalError:
pass
try:
conn.execute(
"""
CREATE INDEX IF NOT EXISTS idx_users_username_nocase
ON users (username COLLATE NOCASE)
"""
)
except sqlite3.OperationalError:
pass
try:
conn.execute("ALTER TABLE requests_cache ADD COLUMN requested_by_id INTEGER")
except sqlite3.OperationalError:
@@ -425,6 +514,10 @@ def init_db() -> None:
)
except sqlite3.OperationalError:
pass
try:
conn.execute("PRAGMA optimize")
except sqlite3.OperationalError:
pass
_backfill_auth_providers()
ensure_admin_user()
@@ -679,6 +772,45 @@ def get_user_by_username(username: str) -> Optional[Dict[str, Any]]:
}
def get_user_by_jellyseerr_id(jellyseerr_user_id: int) -> Optional[Dict[str, Any]]:
with _connect() as conn:
row = conn.execute(
"""
SELECT id, username, password_hash, role, auth_provider, jellyseerr_user_id,
created_at, last_login_at, is_blocked, auto_search_enabled,
invite_management_enabled, profile_id, expires_at, invited_by_code, invited_at,
jellyfin_password_hash, last_jellyfin_auth_at
FROM users
WHERE jellyseerr_user_id = ?
ORDER BY id ASC
LIMIT 1
""",
(jellyseerr_user_id,),
).fetchone()
if not row:
return None
return {
"id": row[0],
"username": row[1],
"password_hash": row[2],
"role": row[3],
"auth_provider": row[4],
"jellyseerr_user_id": row[5],
"created_at": row[6],
"last_login_at": row[7],
"is_blocked": bool(row[8]),
"auto_search_enabled": bool(row[9]),
"invite_management_enabled": bool(row[10]),
"profile_id": row[11],
"expires_at": row[12],
"invited_by_code": row[13],
"invited_at": row[14],
"is_expired": _is_datetime_in_past(row[12]),
"jellyfin_password_hash": row[15],
"last_jellyfin_auth_at": row[16],
}
def get_user_by_id(user_id: int) -> Optional[Dict[str, Any]]:
with _connect() as conn:
row = conn.execute(
@@ -1551,41 +1683,44 @@ def get_user_request_stats(username_norm: str, requested_by_id: Optional[int] =
"last_request_at": None,
}
with _connect() as conn:
total_row = conn.execute(
row = conn.execute(
"""
SELECT COUNT(*)
SELECT
COUNT(*) AS total,
SUM(CASE WHEN status = 4 THEN 1 ELSE 0 END) AS ready,
SUM(CASE WHEN status = 1 THEN 1 ELSE 0 END) AS pending,
SUM(CASE WHEN status = 2 THEN 1 ELSE 0 END) AS approved,
SUM(CASE WHEN status = 5 THEN 1 ELSE 0 END) AS working,
SUM(CASE WHEN status = 6 THEN 1 ELSE 0 END) AS partial,
SUM(CASE WHEN status = 3 THEN 1 ELSE 0 END) AS declined,
MAX(created_at) AS last_request_at
FROM requests_cache
WHERE requested_by_id = ?
""",
(requested_by_id,),
).fetchone()
status_rows = conn.execute(
"""
SELECT status, COUNT(*)
FROM requests_cache
WHERE requested_by_id = ?
GROUP BY status
""",
(requested_by_id,),
).fetchall()
last_row = conn.execute(
"""
SELECT MAX(created_at)
FROM requests_cache
WHERE requested_by_id = ?
""",
(requested_by_id,),
).fetchone()
counts = {int(row[0]): int(row[1]) for row in status_rows if row[0] is not None}
pending = counts.get(1, 0)
approved = counts.get(2, 0)
declined = counts.get(3, 0)
ready = counts.get(4, 0)
working = counts.get(5, 0)
partial = counts.get(6, 0)
if not row:
return {
"total": 0,
"ready": 0,
"pending": 0,
"approved": 0,
"working": 0,
"partial": 0,
"declined": 0,
"in_progress": 0,
"last_request_at": None,
}
total = int(row[0] or 0)
ready = int(row[1] or 0)
pending = int(row[2] or 0)
approved = int(row[3] or 0)
working = int(row[4] or 0)
partial = int(row[5] or 0)
declined = int(row[6] or 0)
in_progress = approved + working + partial
return {
"total": int(total_row[0] or 0) if total_row else 0,
"total": total,
"ready": ready,
"pending": pending,
"approved": approved,
@@ -1593,7 +1728,7 @@ def get_user_request_stats(username_norm: str, requested_by_id: Optional[int] =
"partial": partial,
"declined": declined,
"in_progress": in_progress,
"last_request_at": last_row[0] if last_row else None,
"last_request_at": row[7],
}
@@ -1620,6 +1755,143 @@ def get_global_request_total() -> int:
return int(row[0] or 0)
_REQUESTS_CACHE_UPSERT_SQL = """
INSERT INTO requests_cache (
request_id,
media_id,
media_type,
status,
title,
year,
requested_by,
requested_by_norm,
requested_by_id,
created_at,
updated_at,
payload_json
)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(request_id) DO UPDATE SET
media_id = excluded.media_id,
media_type = excluded.media_type,
status = excluded.status,
title = excluded.title,
year = excluded.year,
requested_by = excluded.requested_by,
requested_by_norm = excluded.requested_by_norm,
requested_by_id = excluded.requested_by_id,
created_at = excluded.created_at,
updated_at = excluded.updated_at,
payload_json = excluded.payload_json
"""
def get_request_cache_lookup(request_ids: list[int]) -> Dict[int, Dict[str, Any]]:
normalized_ids = sorted({int(request_id) for request_id in request_ids if isinstance(request_id, int)})
if not normalized_ids:
return {}
placeholders = ", ".join("?" for _ in normalized_ids)
query = f"""
SELECT request_id, updated_at, title, year
FROM requests_cache
WHERE request_id IN ({placeholders})
"""
with _connect() as conn:
rows = conn.execute(query, tuple(normalized_ids)).fetchall()
return {
int(row[0]): {
"request_id": int(row[0]),
"updated_at": row[1],
"title": row[2],
"year": row[3],
}
for row in rows
}
def _prepare_requests_cache_upsert_rows(
records: list[Dict[str, Any]], conn: sqlite3.Connection
) -> list[tuple[Any, ...]]:
if not records:
return []
existing_rows: Dict[int, tuple[Optional[str], Optional[int]]] = {}
ids_needing_existing = [
int(record["request_id"])
for record in records
if isinstance(record.get("request_id"), int)
and (
not _normalize_title_value(record.get("title"))
or _normalize_year_value(record.get("year")) is None
)
]
if ids_needing_existing:
placeholders = ", ".join("?" for _ in sorted(set(ids_needing_existing)))
query = f"""
SELECT request_id, title, year
FROM requests_cache
WHERE request_id IN ({placeholders})
"""
for row in conn.execute(query, tuple(sorted(set(ids_needing_existing)))).fetchall():
existing_rows[int(row[0])] = (row[1], row[2])
prepared: list[tuple[Any, ...]] = []
for record in records:
request_id = int(record["request_id"])
media_id = record.get("media_id")
media_type = record.get("media_type")
status = record.get("status")
requested_by = record.get("requested_by")
requested_by_norm = record.get("requested_by_norm")
requested_by_id = record.get("requested_by_id")
created_at = record.get("created_at")
updated_at = record.get("updated_at")
payload_json = str(record.get("payload_json") or "")
normalized_title = _normalize_title_value(record.get("title"))
normalized_year = _normalize_year_value(record.get("year"))
derived_title = None
derived_year = None
if not normalized_title or normalized_year is None:
derived_title, derived_year = _extract_title_year_from_payload(payload_json)
if _is_placeholder_title(normalized_title, request_id):
normalized_title = None
if derived_title and not normalized_title:
normalized_title = derived_title
if normalized_year is None and derived_year is not None:
normalized_year = derived_year
existing_title = None
existing_year = None
if normalized_title is None or normalized_year is None:
existing = existing_rows.get(request_id)
if existing:
existing_title, existing_year = existing
if _is_placeholder_title(existing_title, request_id):
existing_title = None
if normalized_title is None and existing_title:
normalized_title = existing_title
if normalized_year is None and existing_year is not None:
normalized_year = existing_year
prepared.append(
(
request_id,
media_id,
media_type,
status,
normalized_title,
normalized_year,
requested_by,
requested_by_norm,
requested_by_id,
created_at,
updated_at,
payload_json,
)
)
return prepared
def upsert_request_cache(
request_id: int,
media_id: Optional[int],
@@ -1634,79 +1906,28 @@ def upsert_request_cache(
updated_at: Optional[str],
payload_json: str,
) -> None:
normalized_title = _normalize_title_value(title)
normalized_year = _normalize_year_value(year)
derived_title = None
derived_year = None
if not normalized_title or normalized_year is None:
derived_title, derived_year = _extract_title_year_from_payload(payload_json)
if _is_placeholder_title(normalized_title, request_id):
normalized_title = None
if derived_title and not normalized_title:
normalized_title = derived_title
if normalized_year is None and derived_year is not None:
normalized_year = derived_year
with _connect() as conn:
existing_title = None
existing_year = None
if normalized_title is None or normalized_year is None:
row = conn.execute(
"SELECT title, year FROM requests_cache WHERE request_id = ?",
(request_id,),
).fetchone()
if row:
existing_title, existing_year = row[0], row[1]
if _is_placeholder_title(existing_title, request_id):
existing_title = None
if normalized_title is None and existing_title:
normalized_title = existing_title
if normalized_year is None and existing_year is not None:
normalized_year = existing_year
conn.execute(
"""
INSERT INTO requests_cache (
request_id,
media_id,
media_type,
status,
title,
year,
requested_by,
requested_by_norm,
requested_by_id,
created_at,
updated_at,
payload_json
)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(request_id) DO UPDATE SET
media_id = excluded.media_id,
media_type = excluded.media_type,
status = excluded.status,
title = excluded.title,
year = excluded.year,
requested_by = excluded.requested_by,
requested_by_norm = excluded.requested_by_norm,
requested_by_id = excluded.requested_by_id,
created_at = excluded.created_at,
updated_at = excluded.updated_at,
payload_json = excluded.payload_json
""",
(
request_id,
media_id,
media_type,
status,
normalized_title,
normalized_year,
requested_by,
requested_by_norm,
requested_by_id,
created_at,
updated_at,
payload_json,
),
rows = _prepare_requests_cache_upsert_rows(
[
{
"request_id": request_id,
"media_id": media_id,
"media_type": media_type,
"status": status,
"title": title,
"year": year,
"requested_by": requested_by,
"requested_by_norm": requested_by_norm,
"requested_by_id": requested_by_id,
"created_at": created_at,
"updated_at": updated_at,
"payload_json": payload_json,
}
],
conn,
)
if rows:
conn.execute(_REQUESTS_CACHE_UPSERT_SQL, rows[0])
logger.debug(
"requests_cache upsert: request_id=%s media_id=%s status=%s updated_at=%s",
request_id,
@@ -1716,6 +1937,17 @@ def upsert_request_cache(
)
def upsert_request_cache_many(records: list[Dict[str, Any]]) -> int:
if not records:
return 0
with _connect() as conn:
rows = _prepare_requests_cache_upsert_rows(records, conn)
if rows:
conn.executemany(_REQUESTS_CACHE_UPSERT_SQL, rows)
logger.debug("requests_cache bulk upsert: rows=%s", len(records))
return len(records)
def get_request_cache_last_updated() -> Optional[str]:
with _connect() as conn:
row = conn.execute(
@@ -1773,6 +2005,7 @@ def get_cached_requests(
requested_by_norm: Optional[str] = None,
requested_by_id: Optional[int] = None,
since_iso: Optional[str] = None,
status_codes: Optional[list[int]] = None,
) -> list[Dict[str, Any]]:
query = """
SELECT request_id, media_id, media_type, status, title, year, requested_by,
@@ -1790,6 +2023,10 @@ def get_cached_requests(
if since_iso:
conditions.append("created_at >= ?")
params.append(since_iso)
if status_codes:
placeholders = ", ".join("?" for _ in status_codes)
conditions.append(f"status IN ({placeholders})")
params.extend(status_codes)
if conditions:
query += " WHERE " + " AND ".join(conditions)
query += " ORDER BY created_at DESC, request_id DESC LIMIT ? OFFSET ?"
@@ -1797,11 +2034,12 @@ def get_cached_requests(
with _connect() as conn:
rows = conn.execute(query, tuple(params)).fetchall()
logger.debug(
"requests_cache list: count=%s requested_by_norm=%s requested_by_id=%s since_iso=%s",
"requests_cache list: count=%s requested_by_norm=%s requested_by_id=%s since_iso=%s status_codes=%s",
len(rows),
requested_by_norm,
requested_by_id,
since_iso,
status_codes,
)
results: list[Dict[str, Any]] = []
for row in rows:
@@ -1835,6 +2073,7 @@ def get_cached_requests_count(
requested_by_norm: Optional[str] = None,
requested_by_id: Optional[int] = None,
since_iso: Optional[str] = None,
status_codes: Optional[list[int]] = None,
) -> int:
query = "SELECT COUNT(*) FROM requests_cache"
params: list[Any] = []
@@ -1848,6 +2087,10 @@ def get_cached_requests_count(
if since_iso:
conditions.append("created_at >= ?")
params.append(since_iso)
if status_codes:
placeholders = ", ".join("?" for _ in status_codes)
conditions.append(f"status IN ({placeholders})")
params.extend(status_codes)
if conditions:
query += " WHERE " + " AND ".join(conditions)
with _connect() as conn:
@@ -1938,9 +2181,45 @@ def upsert_artwork_cache_status(
poster_cached: bool,
backdrop_cached: bool,
) -> None:
upsert_artwork_cache_status_many(
[
{
"request_id": request_id,
"tmdb_id": tmdb_id,
"media_type": media_type,
"poster_path": poster_path,
"backdrop_path": backdrop_path,
"has_tmdb": has_tmdb,
"poster_cached": poster_cached,
"backdrop_cached": backdrop_cached,
}
]
)
def upsert_artwork_cache_status_many(records: list[Dict[str, Any]]) -> int:
if not records:
return 0
updated_at = datetime.now(timezone.utc).isoformat()
params = [
(
record["request_id"],
record.get("tmdb_id"),
record.get("media_type"),
record.get("poster_path"),
record.get("backdrop_path"),
1 if record.get("has_tmdb") else 0,
1 if record.get("poster_cached") else 0,
1 if record.get("backdrop_cached") else 0,
updated_at,
)
for record in records
if isinstance(record.get("request_id"), int)
]
if not params:
return 0
with _connect() as conn:
conn.execute(
conn.executemany(
"""
INSERT INTO artwork_cache_status (
request_id,
@@ -1964,18 +2243,9 @@ def upsert_artwork_cache_status(
backdrop_cached = excluded.backdrop_cached,
updated_at = excluded.updated_at
""",
(
request_id,
tmdb_id,
media_type,
poster_path,
backdrop_path,
1 if has_tmdb else 0,
1 if poster_cached else 0,
1 if backdrop_cached else 0,
updated_at,
),
params,
)
return len(params)
def get_artwork_cache_status_count() -> int:
@@ -2271,6 +2541,138 @@ def get_settings_overrides() -> Dict[str, str]:
return overrides
def _hash_password_reset_token(token_value: str) -> str:
return sha256(str(token_value).encode("utf-8")).hexdigest()
def _password_reset_token_row_to_dict(row: Any) -> Dict[str, Any]:
return {
"id": row[0],
"token_hash": row[1],
"username": row[2],
"recipient_email": row[3],
"auth_provider": row[4],
"created_at": row[5],
"expires_at": row[6],
"used_at": row[7],
"requested_by_ip": row[8],
"requested_user_agent": row[9],
"is_expired": _is_datetime_in_past(row[6]),
"is_used": bool(row[7]),
}
def delete_expired_password_reset_tokens() -> int:
now_iso = datetime.now(timezone.utc).isoformat()
with _connect() as conn:
cursor = conn.execute(
"""
DELETE FROM password_reset_tokens
WHERE expires_at <= ? OR used_at IS NOT NULL
""",
(now_iso,),
)
return int(cursor.rowcount or 0)
def create_password_reset_token(
token_value: str,
username: str,
recipient_email: str,
auth_provider: str,
expires_at: str,
*,
requested_by_ip: Optional[str] = None,
requested_user_agent: Optional[str] = None,
) -> Dict[str, Any]:
created_at = datetime.now(timezone.utc).isoformat()
token_hash = _hash_password_reset_token(token_value)
delete_expired_password_reset_tokens()
with _connect() as conn:
conn.execute(
"""
DELETE FROM password_reset_tokens
WHERE username = ? AND used_at IS NULL
""",
(username,),
)
conn.execute(
"""
INSERT INTO password_reset_tokens (
token_hash,
username,
recipient_email,
auth_provider,
created_at,
expires_at,
used_at,
requested_by_ip,
requested_user_agent
)
VALUES (?, ?, ?, ?, ?, ?, NULL, ?, ?)
""",
(
token_hash,
username,
recipient_email,
auth_provider,
created_at,
expires_at,
requested_by_ip,
requested_user_agent,
),
)
logger.info(
"password reset token created username=%s provider=%s recipient=%s expires_at=%s requester_ip=%s",
username,
auth_provider,
recipient_email,
expires_at,
requested_by_ip,
)
return {
"username": username,
"recipient_email": recipient_email,
"auth_provider": auth_provider,
"created_at": created_at,
"expires_at": expires_at,
"requested_by_ip": requested_by_ip,
"requested_user_agent": requested_user_agent,
}
def get_password_reset_token(token_value: str) -> Optional[Dict[str, Any]]:
token_hash = _hash_password_reset_token(token_value)
with _connect() as conn:
row = conn.execute(
"""
SELECT id, token_hash, username, recipient_email, auth_provider, created_at,
expires_at, used_at, requested_by_ip, requested_user_agent
FROM password_reset_tokens
WHERE token_hash = ?
""",
(token_hash,),
).fetchone()
if not row:
return None
return _password_reset_token_row_to_dict(row)
def mark_password_reset_token_used(token_value: str) -> None:
token_hash = _hash_password_reset_token(token_value)
used_at = datetime.now(timezone.utc).isoformat()
with _connect() as conn:
conn.execute(
"""
UPDATE password_reset_tokens
SET used_at = ?
WHERE token_hash = ? AND used_at IS NULL
""",
(used_at, token_hash),
)
logger.info("password reset token marked used token_hash=%s", token_hash[:12])
def get_seerr_media_failure(media_type: Optional[str], tmdb_id: Optional[int]) -> Optional[Dict[str, Any]]:
if not media_type or not tmdb_id:
return None
@@ -2427,6 +2829,73 @@ def run_integrity_check() -> str:
return str(row[0])
def get_database_diagnostics() -> Dict[str, Any]:
db_path = _db_path()
wal_path = f"{db_path}-wal"
shm_path = f"{db_path}-shm"
def _size(path: str) -> int:
try:
return os.path.getsize(path)
except OSError:
return 0
started = perf_counter()
with _connect() as conn:
integrity_started = perf_counter()
integrity_row = conn.execute("PRAGMA integrity_check").fetchone()
integrity_ms = round((perf_counter() - integrity_started) * 1000, 1)
integrity = str(integrity_row[0]) if integrity_row else "unknown"
pragma_started = perf_counter()
page_size_row = conn.execute("PRAGMA page_size").fetchone()
page_count_row = conn.execute("PRAGMA page_count").fetchone()
freelist_row = conn.execute("PRAGMA freelist_count").fetchone()
pragma_ms = round((perf_counter() - pragma_started) * 1000, 1)
row_count_started = perf_counter()
table_counts = {
"users": int(conn.execute("SELECT COUNT(*) FROM users").fetchone()[0] or 0),
"requests_cache": int(conn.execute("SELECT COUNT(*) FROM requests_cache").fetchone()[0] or 0),
"artwork_cache_status": int(conn.execute("SELECT COUNT(*) FROM artwork_cache_status").fetchone()[0] or 0),
"signup_invites": int(conn.execute("SELECT COUNT(*) FROM signup_invites").fetchone()[0] or 0),
"settings": int(conn.execute("SELECT COUNT(*) FROM settings").fetchone()[0] or 0),
"actions": int(conn.execute("SELECT COUNT(*) FROM actions").fetchone()[0] or 0),
"snapshots": int(conn.execute("SELECT COUNT(*) FROM snapshots").fetchone()[0] or 0),
"seerr_media_failures": int(conn.execute("SELECT COUNT(*) FROM seerr_media_failures").fetchone()[0] or 0),
"password_reset_tokens": int(conn.execute("SELECT COUNT(*) FROM password_reset_tokens").fetchone()[0] or 0),
}
row_count_ms = round((perf_counter() - row_count_started) * 1000, 1)
page_size = int(page_size_row[0] or 0) if page_size_row else 0
page_count = int(page_count_row[0] or 0) if page_count_row else 0
freelist_pages = int(freelist_row[0] or 0) if freelist_row else 0
db_size_bytes = _size(db_path)
wal_size_bytes = _size(wal_path)
shm_size_bytes = _size(shm_path)
return {
"integrity_check": integrity,
"database_path": db_path,
"database_size_bytes": db_size_bytes,
"wal_size_bytes": wal_size_bytes,
"shm_size_bytes": shm_size_bytes,
"page_size_bytes": page_size,
"page_count": page_count,
"freelist_pages": freelist_pages,
"allocated_bytes": page_size * page_count,
"free_bytes": page_size * freelist_pages,
"row_counts": table_counts,
"timings_ms": {
"integrity_check": integrity_ms,
"pragmas": pragma_ms,
"row_counts": row_count_ms,
"total": round((perf_counter() - started) * 1000, 1),
},
}
def vacuum_db() -> None:
with _connect() as conn:
conn.execute("VACUUM")

View File

@@ -215,6 +215,10 @@ SETTING_KEYS: List[str] = [
"site_banner_enabled",
"site_banner_message",
"site_banner_tone",
"site_login_show_jellyfin_login",
"site_login_show_local_login",
"site_login_show_forgot_password",
"site_login_show_signup_link",
]
@@ -1012,6 +1016,7 @@ async def requests_all(
take: int = 50,
skip: int = 0,
days: Optional[int] = None,
stage: str = "all",
user: Dict[str, str] = Depends(get_current_user),
) -> Dict[str, Any]:
if user.get("role") != "admin":
@@ -1021,8 +1026,9 @@ async def requests_all(
since_iso = None
if days is not None and int(days) > 0:
since_iso = (datetime.now(timezone.utc) - timedelta(days=int(days))).isoformat()
rows = get_cached_requests(limit=take, offset=skip, since_iso=since_iso)
total = get_cached_requests_count(since_iso=since_iso)
status_codes = requests_router.request_stage_filter_codes(stage)
rows = get_cached_requests(limit=take, offset=skip, since_iso=since_iso, status_codes=status_codes)
total = get_cached_requests_count(since_iso=since_iso, status_codes=status_codes)
results = []
for row in rows:
status = row.get("status")

View File

@@ -49,12 +49,21 @@ from ..services.user_cache import (
match_jellyseerr_user_id,
save_jellyfin_users_cache,
)
from ..services.invite_email import send_templated_email
from ..services.invite_email import send_templated_email, smtp_email_config_ready
from ..services.password_reset import (
PasswordResetUnavailableError,
apply_password_reset,
request_password_reset,
verify_password_reset_token,
)
router = APIRouter(prefix="/auth", tags=["auth"])
logger = logging.getLogger(__name__)
SELF_SERVICE_INVITE_MASTER_ID_KEY = "self_service_invite_master_id"
STREAM_TOKEN_TTL_SECONDS = 120
PASSWORD_RESET_GENERIC_MESSAGE = (
"If an account exists for that username or email, a password reset link has been sent."
)
_LOGIN_RATE_LOCK = Lock()
_LOGIN_ATTEMPTS_BY_IP: dict[str, deque[float]] = defaultdict(deque)
@@ -223,6 +232,11 @@ def _extract_http_error_detail(exc: Exception) -> str:
return str(exc)
def _requested_user_agent(request: Request) -> str:
user_agent = request.headers.get("user-agent", "")
return user_agent[:512]
async def _refresh_jellyfin_user_cache(client: JellyfinClient) -> None:
try:
users = await client.get_users()
@@ -880,6 +894,100 @@ async def signup(payload: dict) -> dict:
}
@router.post("/password/forgot")
async def forgot_password(payload: dict, request: Request) -> dict:
if not isinstance(payload, dict):
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid payload")
identifier = payload.get("identifier") or payload.get("username") or payload.get("email")
if not isinstance(identifier, str) or not identifier.strip():
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Username or email is required.")
ready, detail = smtp_email_config_ready()
if not ready:
raise HTTPException(
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
detail=f"Password reset email is unavailable: {detail}",
)
client_ip = _auth_client_ip(request)
safe_identifier = identifier.strip().lower()[:256]
logger.info("password reset requested identifier=%s client=%s", safe_identifier, client_ip)
try:
reset_result = await request_password_reset(
identifier,
requested_by_ip=client_ip,
requested_user_agent=_requested_user_agent(request),
)
if reset_result.get("issued"):
logger.info(
"password reset issued username=%s provider=%s recipient=%s client=%s",
reset_result.get("username"),
reset_result.get("auth_provider"),
reset_result.get("recipient_email"),
client_ip,
)
else:
logger.info(
"password reset request completed with no eligible account identifier=%s client=%s",
safe_identifier,
client_ip,
)
except Exception as exc:
logger.warning(
"password reset email dispatch failed identifier=%s client=%s detail=%s",
safe_identifier,
client_ip,
str(exc),
)
return {"status": "ok", "message": PASSWORD_RESET_GENERIC_MESSAGE}
@router.get("/password/reset/verify")
async def password_reset_verify(token: str) -> dict:
if not isinstance(token, str) or not token.strip():
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Reset token is required.")
try:
return verify_password_reset_token(token.strip())
except ValueError as exc:
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(exc)) from exc
@router.post("/password/reset")
async def password_reset(payload: dict) -> dict:
if not isinstance(payload, dict):
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid payload")
token = payload.get("token")
new_password = payload.get("new_password")
if not isinstance(token, str) or not token.strip():
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Reset token is required.")
if not isinstance(new_password, str) or len(new_password.strip()) < 8:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Password must be at least 8 characters.",
)
try:
result = await apply_password_reset(token.strip(), new_password.strip())
except PasswordResetUnavailableError as exc:
raise HTTPException(status_code=status.HTTP_503_SERVICE_UNAVAILABLE, detail=str(exc)) from exc
except ValueError as exc:
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(exc)) from exc
except Exception as exc:
detail = _extract_http_error_detail(exc)
logger.warning("password reset failed token_present=%s detail=%s", bool(token), detail)
raise HTTPException(
status_code=status.HTTP_502_BAD_GATEWAY,
detail=f"Password reset failed: {detail}",
) from exc
logger.info(
"password reset completed username=%s provider=%s",
result.get("username"),
result.get("provider"),
)
return result
@router.get("/profile")
async def profile(current_user: dict = Depends(get_current_user)) -> dict:
username = current_user.get("username") or ""

View File

@@ -76,6 +76,7 @@ def _request_actions_brief(entries: Any) -> list[dict[str, Any]]:
async def events_stream(
request: Request,
recent_days: int = 90,
recent_stage: str = "all",
user: Dict[str, Any] = Depends(get_current_user_event_stream),
) -> StreamingResponse:
recent_days = max(0, min(int(recent_days or 90), 3650))
@@ -103,6 +104,7 @@ async def events_stream(
take=recent_take,
skip=0,
days=recent_days,
stage=recent_stage,
user=user,
)
results = recent_payload.get("results") if isinstance(recent_payload, dict) else []
@@ -110,6 +112,7 @@ async def events_stream(
"type": "home_recent",
"ts": datetime.now(timezone.utc).isoformat(),
"days": recent_days,
"stage": recent_stage,
"results": results if isinstance(results, list) else [],
}
except Exception as exc:
@@ -117,6 +120,7 @@ async def events_stream(
"type": "home_recent",
"ts": datetime.now(timezone.utc).isoformat(),
"days": recent_days,
"stage": recent_stage,
"error": str(exc),
}
signature = json.dumps(payload, ensure_ascii=True, separators=(",", ":"), default=str)

View File

@@ -26,7 +26,7 @@ from ..db import (
get_cached_requests,
get_cached_requests_since,
get_cached_request_by_media_id,
get_request_cache_by_id,
get_request_cache_lookup,
get_request_cache_payload,
get_request_cache_last_updated,
get_request_cache_count,
@@ -35,7 +35,9 @@ from ..db import (
repair_request_cache_titles,
prune_duplicate_requests_cache,
upsert_request_cache,
upsert_request_cache_many,
upsert_artwork_cache_status,
upsert_artwork_cache_status_many,
get_artwork_cache_missing_count,
get_artwork_cache_status_count,
get_setting,
@@ -47,7 +49,7 @@ from ..db import (
clear_seerr_media_failure,
)
from ..models import Snapshot, TriageResult, RequestType
from ..services.snapshot import build_snapshot
from ..services.snapshot import build_snapshot, jellyfin_item_matches_request
router = APIRouter(prefix="/requests", tags=["requests"], dependencies=[Depends(get_current_user)])
@@ -91,6 +93,17 @@ STATUS_LABELS = {
6: "Partially ready",
}
REQUEST_STAGE_CODES = {
"all": None,
"pending": [1],
"approved": [2],
"declined": [3],
"ready": [4],
"working": [5],
"partial": [6],
"in_progress": [2, 5, 6],
}
def _cache_get(key: str) -> Optional[Dict[str, Any]]:
cached = _detail_cache.get(key)
@@ -105,6 +118,57 @@ def _cache_get(key: str) -> Optional[Dict[str, Any]]:
def _cache_set(key: str, payload: Dict[str, Any]) -> None:
_detail_cache[key] = (time.time() + CACHE_TTL_SECONDS, payload)
def _status_label_with_jellyfin(current_status: Any, jellyfin_available: bool) -> str:
if not jellyfin_available:
return _status_label(current_status)
try:
status_code = int(current_status)
except (TypeError, ValueError):
status_code = None
if status_code == 6:
return STATUS_LABELS[6]
return STATUS_LABELS[4]
async def _request_is_available_in_jellyfin(
jellyfin: JellyfinClient,
title: Optional[str],
year: Optional[int],
media_type: Optional[str],
request_payload: Optional[Dict[str, Any]],
availability_cache: Dict[str, bool],
) -> bool:
if not jellyfin.configured() or not title:
return False
cache_key = f"{media_type or ''}:{title.lower()}:{year or ''}:{request_payload.get('id') if isinstance(request_payload, dict) else ''}"
cached_value = availability_cache.get(cache_key)
if cached_value is not None:
return cached_value
types = ["Movie"] if media_type == "movie" else ["Series"]
try:
search = await jellyfin.search_items(title, types, limit=50)
except Exception:
availability_cache[cache_key] = False
return False
if isinstance(search, dict):
items = search.get("Items") or search.get("items") or []
request_type = RequestType.movie if media_type == "movie" else RequestType.tv
for item in items:
if not isinstance(item, dict):
continue
if jellyfin_item_matches_request(
item,
title=title,
year=year,
request_type=request_type,
request_payload=request_payload,
):
availability_cache[cache_key] = True
return True
availability_cache[cache_key] = False
return False
_failed_detail_cache.pop(key, None)
@@ -152,6 +216,23 @@ def _status_label(value: Any) -> str:
return "Unknown"
def normalize_request_stage_filter(value: Optional[str]) -> str:
if not isinstance(value, str):
return "all"
normalized = value.strip().lower().replace("-", "_").replace(" ", "_")
if not normalized:
return "all"
if normalized in {"processing", "inprogress"}:
normalized = "in_progress"
return normalized if normalized in REQUEST_STAGE_CODES else "all"
def request_stage_filter_codes(value: Optional[str]) -> Optional[list[int]]:
normalized = normalize_request_stage_filter(value)
codes = REQUEST_STAGE_CODES.get(normalized)
return list(codes) if codes else None
def _normalize_username(value: Any) -> Optional[str]:
if not isinstance(value, str):
return None
@@ -383,26 +464,55 @@ def _upsert_artwork_status(
poster_cached: Optional[bool] = None,
backdrop_cached: Optional[bool] = None,
) -> None:
record = _build_artwork_status_record(payload, cache_mode, poster_cached, backdrop_cached)
if not record:
return
upsert_artwork_cache_status(**record)
def _build_request_cache_record(payload: Dict[str, Any], request_payload: Dict[str, Any]) -> Dict[str, Any]:
return {
"request_id": payload.get("request_id"),
"media_id": payload.get("media_id"),
"media_type": payload.get("media_type"),
"status": payload.get("status"),
"title": payload.get("title"),
"year": payload.get("year"),
"requested_by": payload.get("requested_by"),
"requested_by_norm": payload.get("requested_by_norm"),
"requested_by_id": payload.get("requested_by_id"),
"created_at": payload.get("created_at"),
"updated_at": payload.get("updated_at"),
"payload_json": json.dumps(request_payload, ensure_ascii=True),
}
def _build_artwork_status_record(
payload: Dict[str, Any],
cache_mode: str,
poster_cached: Optional[bool] = None,
backdrop_cached: Optional[bool] = None,
) -> Optional[Dict[str, Any]]:
parsed = _parse_request_payload(payload)
request_id = parsed.get("request_id")
if not isinstance(request_id, int):
return
return None
tmdb_id, media_type = _extract_tmdb_lookup(payload)
poster_path, backdrop_path = _extract_artwork_paths(payload)
has_tmdb = bool(tmdb_id and media_type)
poster_cached_flag, backdrop_cached_flag = _compute_cached_flags(
poster_path, backdrop_path, cache_mode, poster_cached, backdrop_cached
)
upsert_artwork_cache_status(
request_id=request_id,
tmdb_id=tmdb_id,
media_type=media_type,
poster_path=poster_path,
backdrop_path=backdrop_path,
has_tmdb=has_tmdb,
poster_cached=poster_cached_flag,
backdrop_cached=backdrop_cached_flag,
)
return {
"request_id": request_id,
"tmdb_id": tmdb_id,
"media_type": media_type,
"poster_path": poster_path,
"backdrop_path": backdrop_path,
"has_tmdb": has_tmdb,
"poster_cached": poster_cached_flag,
"backdrop_cached": backdrop_cached_flag,
}
def _collect_artwork_cache_disk_stats() -> tuple[int, int]:
@@ -603,6 +713,16 @@ async def _sync_all_requests(client: JellyseerrClient) -> int:
if not isinstance(items, list) or not items:
logger.info("Seerr sync completed: no more results at skip=%s", skip)
break
page_request_ids = [
payload.get("request_id")
for item in items
if isinstance(item, dict)
for payload in [_parse_request_payload(item)]
if isinstance(payload.get("request_id"), int)
]
cached_by_request_id = get_request_cache_lookup(page_request_ids)
page_cache_records: list[Dict[str, Any]] = []
page_artwork_records: list[Dict[str, Any]] = []
for item in items:
if not isinstance(item, dict):
continue
@@ -610,10 +730,9 @@ async def _sync_all_requests(client: JellyseerrClient) -> int:
request_id = payload.get("request_id")
cached_title = None
if isinstance(request_id, int):
if not payload.get("title"):
cached = get_request_cache_by_id(request_id)
if cached and cached.get("title"):
cached_title = cached.get("title")
cached = cached_by_request_id.get(request_id)
if not payload.get("title") and cached and cached.get("title"):
cached_title = cached.get("title")
needs_details = (
not payload.get("title")
or not payload.get("media_id")
@@ -644,25 +763,17 @@ async def _sync_all_requests(client: JellyseerrClient) -> int:
payload["title"] = cached_title
if not isinstance(payload.get("request_id"), int):
continue
payload_json = json.dumps(item, ensure_ascii=True)
upsert_request_cache(
request_id=payload.get("request_id"),
media_id=payload.get("media_id"),
media_type=payload.get("media_type"),
status=payload.get("status"),
title=payload.get("title"),
year=payload.get("year"),
requested_by=payload.get("requested_by"),
requested_by_norm=payload.get("requested_by_norm"),
requested_by_id=payload.get("requested_by_id"),
created_at=payload.get("created_at"),
updated_at=payload.get("updated_at"),
payload_json=payload_json,
)
page_cache_records.append(_build_request_cache_record(payload, item))
if isinstance(item, dict):
_upsert_artwork_status(item, cache_mode)
artwork_record = _build_artwork_status_record(item, cache_mode)
if artwork_record:
page_artwork_records.append(artwork_record)
stored += 1
_sync_state["stored"] = stored
if page_cache_records:
upsert_request_cache_many(page_cache_records)
if page_artwork_records:
upsert_artwork_cache_status_many(page_artwork_records)
if len(items) < take:
logger.info("Seerr sync completed: stored=%s", stored)
break
@@ -721,6 +832,16 @@ async def _sync_delta_requests(client: JellyseerrClient) -> int:
if not isinstance(items, list) or not items:
logger.info("Seerr delta sync completed: no more results at skip=%s", skip)
break
page_request_ids = [
payload.get("request_id")
for item in items
if isinstance(item, dict)
for payload in [_parse_request_payload(item)]
if isinstance(payload.get("request_id"), int)
]
cached_by_request_id = get_request_cache_lookup(page_request_ids)
page_cache_records: list[Dict[str, Any]] = []
page_artwork_records: list[Dict[str, Any]] = []
page_changed = False
for item in items:
if not isinstance(item, dict):
@@ -728,7 +849,7 @@ async def _sync_delta_requests(client: JellyseerrClient) -> int:
payload = _parse_request_payload(item)
request_id = payload.get("request_id")
if isinstance(request_id, int):
cached = get_request_cache_by_id(request_id)
cached = cached_by_request_id.get(request_id)
incoming_updated = payload.get("updated_at")
cached_title = cached.get("title") if cached else None
if cached and incoming_updated and cached.get("updated_at") == incoming_updated and cached.get("title"):
@@ -762,26 +883,18 @@ async def _sync_delta_requests(client: JellyseerrClient) -> int:
payload["title"] = cached_title
if not isinstance(payload.get("request_id"), int):
continue
payload_json = json.dumps(item, ensure_ascii=True)
upsert_request_cache(
request_id=payload.get("request_id"),
media_id=payload.get("media_id"),
media_type=payload.get("media_type"),
status=payload.get("status"),
title=payload.get("title"),
year=payload.get("year"),
requested_by=payload.get("requested_by"),
requested_by_norm=payload.get("requested_by_norm"),
requested_by_id=payload.get("requested_by_id"),
created_at=payload.get("created_at"),
updated_at=payload.get("updated_at"),
payload_json=payload_json,
)
page_cache_records.append(_build_request_cache_record(payload, item))
if isinstance(item, dict):
_upsert_artwork_status(item, cache_mode)
artwork_record = _build_artwork_status_record(item, cache_mode)
if artwork_record:
page_artwork_records.append(artwork_record)
stored += 1
page_changed = True
_sync_state["stored"] = stored
if page_cache_records:
upsert_request_cache_many(page_cache_records)
if page_artwork_records:
upsert_artwork_cache_status_many(page_artwork_records)
if not page_changed:
unchanged_pages += 1
else:
@@ -866,6 +979,8 @@ async def _prefetch_artwork_cache(
batch = get_request_cache_payloads(limit=limit, offset=offset)
if not batch:
break
page_cache_records: list[Dict[str, Any]] = []
page_artwork_records: list[Dict[str, Any]] = []
for row in batch:
payload = row.get("payload")
if not isinstance(payload, dict):
@@ -893,20 +1008,7 @@ async def _prefetch_artwork_cache(
parsed = _parse_request_payload(payload)
request_id = parsed.get("request_id")
if isinstance(request_id, int):
upsert_request_cache(
request_id=request_id,
media_id=parsed.get("media_id"),
media_type=parsed.get("media_type"),
status=parsed.get("status"),
title=parsed.get("title"),
year=parsed.get("year"),
requested_by=parsed.get("requested_by"),
requested_by_norm=parsed.get("requested_by_norm"),
requested_by_id=parsed.get("requested_by_id"),
created_at=parsed.get("created_at"),
updated_at=parsed.get("updated_at"),
payload_json=json.dumps(payload, ensure_ascii=True),
)
page_cache_records.append(_build_request_cache_record(parsed, payload))
poster_cached_flag = False
backdrop_cached_flag = False
if poster_path:
@@ -921,17 +1023,23 @@ async def _prefetch_artwork_cache(
backdrop_cached_flag = bool(await cache_tmdb_image(backdrop_path, "w780"))
except httpx.HTTPError:
backdrop_cached_flag = False
_upsert_artwork_status(
artwork_record = _build_artwork_status_record(
payload,
cache_mode,
poster_cached=poster_cached_flag if poster_path else None,
backdrop_cached=backdrop_cached_flag if backdrop_path else None,
)
if artwork_record:
page_artwork_records.append(artwork_record)
processed += 1
if processed % 25 == 0:
_artwork_prefetch_state.update(
{"processed": processed, "message": f"Cached artwork for {processed} requests"}
)
if page_cache_records:
upsert_request_cache_many(page_cache_records)
if page_artwork_records:
upsert_artwork_cache_status_many(page_artwork_records)
offset += limit
total_requests = get_request_cache_count()
@@ -1063,6 +1171,7 @@ def _get_recent_from_cache(
limit: int,
offset: int,
since_iso: Optional[str],
status_codes: Optional[list[int]] = None,
) -> List[Dict[str, Any]]:
items = _recent_cache.get("items") or []
results = []
@@ -1078,6 +1187,8 @@ def _get_recent_from_cache(
item_dt = _parse_iso_datetime(candidate)
if not item_dt or item_dt < since_dt:
continue
if status_codes and item.get("status") not in status_codes:
continue
results.append(item)
return results[offset : offset + limit]
@@ -1235,23 +1346,9 @@ def get_requests_sync_state() -> Dict[str, Any]:
async def _ensure_request_access(
client: JellyseerrClient, request_id: int, user: Dict[str, str]
) -> None:
if user.get("role") == "admin":
if user.get("role") == "admin" or user.get("username"):
return
runtime = get_runtime_settings()
mode = (runtime.requests_data_source or "prefer_cache").lower()
cached = get_request_cache_payload(request_id)
if mode != "always_js":
if cached is None:
logger.debug("access cache miss: request_id=%s mode=%s", request_id, mode)
raise HTTPException(status_code=404, detail="Request not found in cache")
logger.debug("access cache hit: request_id=%s mode=%s", request_id, mode)
if _request_matches_user(cached, user.get("username", "")):
return
raise HTTPException(status_code=403, detail="Request not accessible for this user")
logger.debug("access cache miss: request_id=%s mode=%s", request_id, mode)
details = await _get_request_details(client, request_id)
if details is None or not _request_matches_user(details, user.get("username", "")):
raise HTTPException(status_code=403, detail="Request not accessible for this user")
raise HTTPException(status_code=403, detail="Request not accessible for this user")
def _build_recent_map(response: Dict[str, Any]) -> Dict[int, Dict[str, Any]]:
@@ -1521,6 +1618,7 @@ async def recent_requests(
take: int = 6,
skip: int = 0,
days: int = 90,
stage: str = "all",
user: Dict[str, str] = Depends(get_current_user),
) -> dict:
runtime = get_runtime_settings()
@@ -1542,44 +1640,22 @@ async def recent_requests(
since_iso = None
if days > 0:
since_iso = (datetime.now(timezone.utc) - timedelta(days=days)).isoformat()
status_codes = request_stage_filter_codes(stage)
if _recent_cache_stale():
_refresh_recent_cache_from_db()
rows = _get_recent_from_cache(requested_by, requested_by_id, take, skip, since_iso)
rows = _get_recent_from_cache(
requested_by,
requested_by_id,
take,
skip,
since_iso,
status_codes=status_codes,
)
cache_mode = (runtime.artwork_cache_mode or "remote").lower()
allow_title_hydrate = False
allow_artwork_hydrate = client.configured()
jellyfin = JellyfinClient(runtime.jellyfin_base_url, runtime.jellyfin_api_key)
jellyfin_cache: Dict[str, bool] = {}
async def _jellyfin_available(
title_value: Optional[str], year_value: Optional[int], media_type_value: Optional[str]
) -> bool:
if not jellyfin.configured() or not title_value:
return False
cache_key = f"{media_type_value or ''}:{title_value.lower()}:{year_value or ''}"
cached_value = jellyfin_cache.get(cache_key)
if cached_value is not None:
return cached_value
types = ["Movie"] if media_type_value == "movie" else ["Series"]
try:
search = await jellyfin.search_items(title_value, types)
except Exception:
jellyfin_cache[cache_key] = False
return False
if isinstance(search, dict):
items = search.get("Items") or search.get("items") or []
for item in items:
if not isinstance(item, dict):
continue
name = item.get("Name") or item.get("title")
year = item.get("ProductionYear") or item.get("Year")
if name and name.strip().lower() == title_value.strip().lower():
if year_value and year and int(year) != int(year_value):
continue
jellyfin_cache[cache_key] = True
return True
jellyfin_cache[cache_key] = False
return False
results = []
for row in rows:
status = row.get("status")
@@ -1674,10 +1750,16 @@ async def recent_requests(
payload_json=json.dumps(details, ensure_ascii=True),
)
status_label = _status_label(status)
if status_label == "Working on it":
is_available = await _jellyfin_available(title, year, row.get("media_type"))
if is_available:
status_label = "Available"
if status_label in {"Working on it", "Ready to watch", "Partially ready"}:
is_available = await _request_is_available_in_jellyfin(
jellyfin,
title,
year,
row.get("media_type"),
details if isinstance(details, dict) else None,
jellyfin_cache,
)
status_label = _status_label_with_jellyfin(status, is_available)
results.append(
{
"id": row.get("request_id"),
@@ -1721,6 +1803,8 @@ async def search_requests(
pass
results = []
jellyfin = JellyfinClient(runtime.jellyfin_base_url, runtime.jellyfin_api_key)
jellyfin_cache: Dict[str, bool] = {}
for item in response.get("results", []):
media_type = item.get("mediaType")
title = item.get("title") or item.get("name")
@@ -1733,6 +1817,8 @@ async def search_requests(
request_id = None
status = None
status_label = None
requested_by = None
accessible = False
media_info = item.get("mediaInfo") or {}
media_info_id = media_info.get("id")
requests = media_info.get("requests")
@@ -1741,27 +1827,31 @@ async def search_requests(
status = requests[0].get("status")
status_label = _status_label(status)
elif isinstance(media_info_id, int):
username_norm = _normalize_username(user.get("username", ""))
requested_by_id = user.get("jellyseerr_user_id")
requested_by = None if user.get("role") == "admin" else username_norm
requested_by_id = None if user.get("role") == "admin" else requested_by_id
cached = get_cached_request_by_media_id(
media_info_id,
requested_by_norm=requested_by,
requested_by_id=requested_by_id,
)
if cached:
request_id = cached.get("request_id")
status = cached.get("status")
status_label = _status_label(status)
if user.get("role") != "admin":
if isinstance(request_id, int):
if isinstance(request_id, int):
details = get_request_cache_payload(request_id)
if not isinstance(details, dict):
details = await _get_request_details(client, request_id)
if not _request_matches_user(details, user.get("username", "")):
continue
else:
continue
if user.get("role") == "admin":
requested_by = _request_display_name(details)
accessible = True
if status is not None:
is_available = await _request_is_available_in_jellyfin(
jellyfin,
title,
year,
media_type,
details if isinstance(details, dict) else None,
jellyfin_cache,
)
status_label = _status_label_with_jellyfin(status, is_available)
results.append(
{
@@ -1772,6 +1862,8 @@ async def search_requests(
"requestId": request_id,
"status": status,
"statusLabel": status_label,
"requestedBy": requested_by,
"accessible": accessible,
}
)

View File

@@ -24,6 +24,12 @@ def _build_site_info(include_changelog: bool) -> Dict[str, Any]:
"message": banner_message,
"tone": tone,
},
"login": {
"showJellyfinLogin": bool(runtime.site_login_show_jellyfin_login),
"showLocalLogin": bool(runtime.site_login_show_local_login),
"showForgotPassword": bool(runtime.site_login_show_forgot_password),
"showSignupLink": bool(runtime.site_login_show_signup_link),
},
}
if include_changelog:
info["changelog"] = (CHANGELOG or "").strip()

View File

@@ -29,6 +29,10 @@ _BOOL_FIELDS = {
"magent_notify_webhook_enabled",
"jellyfin_sync_to_arr",
"site_banner_enabled",
"site_login_show_jellyfin_login",
"site_login_show_local_login",
"site_login_show_forgot_password",
"site_login_show_signup_link",
}
_SKIP_OVERRIDE_FIELDS = {"site_build_number", "site_changelog"}

View File

@@ -16,7 +16,7 @@ from ..clients.qbittorrent import QBittorrentClient
from ..clients.radarr import RadarrClient
from ..clients.sonarr import SonarrClient
from ..config import settings as env_settings
from ..db import run_integrity_check
from ..db import get_database_diagnostics
from ..runtime import get_runtime_settings
from .invite_email import send_test_email, smtp_email_config_ready, smtp_email_delivery_warning
@@ -205,12 +205,16 @@ async def _run_http_post(
async def _run_database_check() -> Dict[str, Any]:
integrity = await asyncio.to_thread(run_integrity_check)
detail = await asyncio.to_thread(get_database_diagnostics)
integrity = _clean_text(detail.get("integrity_check"), "unknown")
requests_cached = detail.get("row_counts", {}).get("requests_cache", 0) if isinstance(detail, dict) else 0
wal_size_bytes = detail.get("wal_size_bytes", 0) if isinstance(detail, dict) else 0
wal_size_megabytes = round((float(wal_size_bytes or 0) / (1024 * 1024)), 2)
status = "up" if integrity == "ok" else "degraded"
return {
"status": status,
"message": f"SQLite integrity_check returned {integrity}",
"detail": integrity,
"message": f"SQLite {integrity} · {requests_cached} cached requests · WAL {wal_size_megabytes:.2f} MB",
"detail": detail,
}

View File

@@ -6,8 +6,10 @@ import json
import logging
import re
import smtplib
from email.generator import BytesGenerator
from email.message import EmailMessage
from email.utils import formataddr
from io import BytesIO
from typing import Any, Dict, Optional
from ..build_info import BUILD_NUMBER
@@ -21,6 +23,8 @@ TEMPLATE_SETTING_PREFIX = "invite_email_template_"
TEMPLATE_KEYS = ("invited", "welcome", "warning", "banned")
EMAIL_PATTERN = re.compile(r"^[^@\s]+@[^@\s]+\.[^@\s]+$")
PLACEHOLDER_PATTERN = re.compile(r"{{\s*([a-zA-Z0-9_]+)\s*}}")
EXCHANGE_MESSAGE_ID_PATTERN = re.compile(r"<([^>]+)>")
EXCHANGE_INTERNAL_ID_PATTERN = re.compile(r"\[InternalId=([^\],]+)")
TEMPLATE_METADATA: Dict[str, Dict[str, Any]] = {
"invited": {
@@ -396,7 +400,56 @@ def smtp_email_delivery_warning() -> Optional[str]:
return None
def _send_email_sync(*, recipient_email: str, subject: str, body_text: str, body_html: str) -> None:
def _flatten_message(message: EmailMessage) -> bytes:
buffer = BytesIO()
BytesGenerator(buffer).flatten(message)
return buffer.getvalue()
def _decode_smtp_message(value: bytes | str | None) -> str:
if value is None:
return ""
if isinstance(value, bytes):
return value.decode("utf-8", errors="replace")
return str(value)
def _parse_exchange_receipt(value: bytes | str | None) -> Dict[str, str]:
message = _decode_smtp_message(value)
receipt: Dict[str, str] = {"raw": message}
message_id_match = EXCHANGE_MESSAGE_ID_PATTERN.search(message)
internal_id_match = EXCHANGE_INTERNAL_ID_PATTERN.search(message)
if message_id_match:
receipt["provider_message_id"] = message_id_match.group(1)
if internal_id_match:
receipt["provider_internal_id"] = internal_id_match.group(1)
return receipt
def _send_via_smtp_session(
smtp: smtplib.SMTP,
*,
from_address: str,
recipient_email: str,
message: EmailMessage,
) -> Dict[str, str]:
mail_code, mail_message = smtp.mail(from_address)
if mail_code >= 400:
raise smtplib.SMTPResponseException(mail_code, mail_message)
rcpt_code, rcpt_message = smtp.rcpt(recipient_email)
if rcpt_code >= 400:
raise smtplib.SMTPRecipientsRefused({recipient_email: (rcpt_code, rcpt_message)})
data_code, data_message = smtp.data(_flatten_message(message))
if data_code >= 400:
raise smtplib.SMTPDataError(data_code, data_message)
receipt = _parse_exchange_receipt(data_message)
receipt["mail_response"] = _decode_smtp_message(mail_message)
receipt["rcpt_response"] = _decode_smtp_message(rcpt_message)
receipt["data_response"] = _decode_smtp_message(data_message)
return receipt
def _send_email_sync(*, recipient_email: str, subject: str, body_text: str, body_html: str) -> Dict[str, str]:
runtime = get_runtime_settings()
host = _normalize_display_text(runtime.magent_notify_email_smtp_host)
port = int(runtime.magent_notify_email_smtp_port or 587)
@@ -437,9 +490,20 @@ def _send_email_sync(*, recipient_email: str, subject: str, body_text: str, body
if username and password:
smtp.login(username, password)
logger.debug("smtp login succeeded host=%s username=%s", host, username)
smtp.send_message(message)
logger.info("smtp send accepted recipient=%s host=%s mode=ssl", recipient_email, host)
return
receipt = _send_via_smtp_session(
smtp,
from_address=from_address,
recipient_email=recipient_email,
message=message,
)
logger.info(
"smtp send accepted recipient=%s host=%s mode=ssl provider_message_id=%s provider_internal_id=%s",
recipient_email,
host,
receipt.get("provider_message_id"),
receipt.get("provider_internal_id"),
)
return receipt
with smtplib.SMTP(host, port, timeout=20) as smtp:
logger.debug("smtp connection opened host=%s port=%s", host, port)
@@ -451,8 +515,20 @@ def _send_email_sync(*, recipient_email: str, subject: str, body_text: str, body
if username and password:
smtp.login(username, password)
logger.debug("smtp login succeeded host=%s username=%s", host, username)
smtp.send_message(message)
logger.info("smtp send accepted recipient=%s host=%s mode=plain", recipient_email, host)
receipt = _send_via_smtp_session(
smtp,
from_address=from_address,
recipient_email=recipient_email,
message=message,
)
logger.info(
"smtp send accepted recipient=%s host=%s mode=plain provider_message_id=%s provider_internal_id=%s",
recipient_email,
host,
receipt.get("provider_message_id"),
receipt.get("provider_internal_id"),
)
return receipt
async def send_templated_email(
@@ -484,7 +560,7 @@ async def send_templated_email(
reason=reason,
overrides=overrides,
)
await asyncio.to_thread(
receipt = await asyncio.to_thread(
_send_email_sync,
recipient_email=resolved_email,
subject=rendered["subject"],
@@ -495,6 +571,11 @@ async def send_templated_email(
return {
"recipient_email": resolved_email,
"subject": rendered["subject"],
**{
key: value
for key, value in receipt.items()
if key in {"provider_message_id", "provider_internal_id", "data_response"}
},
}
@@ -524,7 +605,7 @@ async def send_test_email(recipient_email: Optional[str] = None) -> Dict[str, st
f"<strong>Application URL:</strong> {html.escape(application_url)}</p>"
)
await asyncio.to_thread(
receipt = await asyncio.to_thread(
_send_email_sync,
recipient_email=resolved_email,
subject=subject,
@@ -533,6 +614,78 @@ async def send_test_email(recipient_email: Optional[str] = None) -> Dict[str, st
)
logger.info("SMTP test email sent: recipient=%s", resolved_email)
result = {"recipient_email": resolved_email, "subject": subject}
result.update(
{
key: value
for key, value in receipt.items()
if key in {"provider_message_id", "provider_internal_id", "data_response"}
}
)
warning = smtp_email_delivery_warning()
if warning:
result["warning"] = warning
return result
async def send_password_reset_email(
*,
recipient_email: str,
username: str,
token: str,
expires_at: str,
auth_provider: str,
) -> Dict[str, str]:
ready, detail = smtp_email_config_ready()
if not ready:
raise RuntimeError(detail)
resolved_email = _normalize_email(recipient_email)
if not resolved_email:
raise RuntimeError("No valid recipient email is available for password reset.")
app_url = _build_default_base_url()
reset_url = f"{app_url}/reset-password?token={token}"
provider_label = "Jellyfin, Seerr, and Magent" if auth_provider == "jellyfin" else "Magent"
subject = f"{env_settings.app_name} password reset"
body_text = (
f"A password reset was requested for {username}.\n\n"
f"This link will reset the password used for {provider_label}.\n"
f"Reset link: {reset_url}\n"
f"Expires: {expires_at}\n\n"
"If you did not request this reset, you can ignore this email.\n"
)
body_html = (
f"<h1>{html.escape(env_settings.app_name)} password reset</h1>"
f"<p>A password reset was requested for <strong>{html.escape(username)}</strong>.</p>"
f"<p>This link will reset the password used for <strong>{html.escape(provider_label)}</strong>.</p>"
f"<p><a href=\"{html.escape(reset_url)}\">Reset password</a></p>"
f"<p><strong>Expires:</strong> {html.escape(expires_at)}</p>"
"<p>If you did not request this reset, you can ignore this email.</p>"
)
receipt = await asyncio.to_thread(
_send_email_sync,
recipient_email=resolved_email,
subject=subject,
body_text=body_text,
body_html=body_html,
)
logger.info(
"Password reset email sent: username=%s recipient=%s provider=%s",
username,
resolved_email,
auth_provider,
)
result = {
"recipient_email": resolved_email,
"subject": subject,
"reset_url": reset_url,
**{
key: value
for key, value in receipt.items()
if key in {"provider_message_id", "provider_internal_id", "data_response"}
},
}
warning = smtp_email_delivery_warning()
if warning:
result["warning"] = warning

View File

@@ -0,0 +1,330 @@
from __future__ import annotations
import logging
import secrets
from datetime import datetime, timedelta, timezone
from typing import Any, Dict, Optional
from ..auth import normalize_user_auth_provider, resolve_user_auth_provider
from ..clients.jellyfin import JellyfinClient
from ..clients.jellyseerr import JellyseerrClient
from ..db import (
create_password_reset_token,
delete_expired_password_reset_tokens,
get_password_reset_token,
get_user_by_jellyseerr_id,
get_user_by_username,
get_users_by_username_ci,
mark_password_reset_token_used,
set_user_auth_provider,
set_user_password,
sync_jellyfin_password_state,
)
from ..runtime import get_runtime_settings
from .invite_email import send_password_reset_email
from .user_cache import get_cached_jellyseerr_users, save_jellyseerr_users_cache
logger = logging.getLogger(__name__)
PASSWORD_RESET_TOKEN_TTL_MINUTES = 30
class PasswordResetUnavailableError(RuntimeError):
pass
def _normalize_handles(value: object) -> list[str]:
if not isinstance(value, str):
return []
normalized = value.strip().lower()
if not normalized:
return []
handles = [normalized]
if "@" in normalized:
handles.append(normalized.split("@", 1)[0])
return list(dict.fromkeys(handles))
def _pick_preferred_user(users: list[dict], requested_identifier: str) -> dict | None:
if not users:
return None
requested = str(requested_identifier or "").strip().lower()
def _rank(user: dict) -> tuple[int, int, int, int]:
provider = str(user.get("auth_provider") or "local").strip().lower()
role = str(user.get("role") or "user").strip().lower()
username = str(user.get("username") or "").strip().lower()
return (
0 if role == "admin" else 1,
0 if isinstance(user.get("jellyseerr_user_id"), int) else 1,
0 if provider == "jellyfin" else (1 if provider == "local" else 2),
0 if username == requested else 1,
)
return sorted(users, key=_rank)[0]
def _find_matching_seerr_user(identifier: str, users: list[dict]) -> dict | None:
target_handles = set(_normalize_handles(identifier))
if not target_handles:
return None
for user in users:
if not isinstance(user, dict):
continue
for key in ("username", "email"):
value = user.get(key)
if target_handles.intersection(_normalize_handles(value)):
return user
return None
async def _fetch_all_seerr_users() -> list[dict]:
cached = get_cached_jellyseerr_users()
if cached is not None:
return cached
runtime = get_runtime_settings()
client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key)
if not client.configured():
return []
users: list[dict] = []
take = 100
skip = 0
while True:
payload = await client.get_users(take=take, skip=skip)
if not payload:
break
if isinstance(payload, list):
batch = payload
elif isinstance(payload, dict):
batch = payload.get("results") or payload.get("users") or payload.get("data") or payload.get("items")
else:
batch = None
if not isinstance(batch, list) or not batch:
break
users.extend([user for user in batch if isinstance(user, dict)])
if len(batch) < take:
break
skip += take
if users:
return save_jellyseerr_users_cache(users)
return users
def _resolve_seerr_user_email(seerr_user: Optional[dict], local_user: Optional[dict]) -> Optional[str]:
if isinstance(local_user, dict):
username = str(local_user.get("username") or "").strip()
if "@" in username:
return username
if isinstance(seerr_user, dict):
email = str(seerr_user.get("email") or "").strip()
if "@" in email:
return email
return None
async def _resolve_reset_target(identifier: str) -> Optional[Dict[str, Any]]:
normalized_identifier = str(identifier or "").strip()
if not normalized_identifier:
return None
local_user = normalize_user_auth_provider(
_pick_preferred_user(get_users_by_username_ci(normalized_identifier), normalized_identifier)
)
seerr_users: list[dict] | None = None
seerr_user: dict | None = None
if isinstance(local_user, dict) and isinstance(local_user.get("jellyseerr_user_id"), int):
seerr_users = await _fetch_all_seerr_users()
seerr_user = next(
(
user
for user in seerr_users
if isinstance(user, dict) and int(user.get("id") or user.get("userId") or 0) == int(local_user["jellyseerr_user_id"])
),
None,
)
if not local_user:
seerr_users = seerr_users if seerr_users is not None else await _fetch_all_seerr_users()
seerr_user = _find_matching_seerr_user(normalized_identifier, seerr_users)
if seerr_user:
seerr_user_id = seerr_user.get("id") or seerr_user.get("userId") or seerr_user.get("Id")
try:
seerr_user_id = int(seerr_user_id) if seerr_user_id is not None else None
except (TypeError, ValueError):
seerr_user_id = None
if seerr_user_id is not None:
local_user = normalize_user_auth_provider(get_user_by_jellyseerr_id(seerr_user_id))
if not local_user:
for candidate in (seerr_user.get("email"), seerr_user.get("username")):
if not isinstance(candidate, str) or not candidate.strip():
continue
local_user = normalize_user_auth_provider(
_pick_preferred_user(get_users_by_username_ci(candidate), candidate)
)
if local_user:
break
if not local_user:
return None
auth_provider = resolve_user_auth_provider(local_user)
username = str(local_user.get("username") or "").strip()
recipient_email = _resolve_seerr_user_email(seerr_user, local_user)
if not recipient_email:
seerr_users = seerr_users if seerr_users is not None else await _fetch_all_seerr_users()
if isinstance(local_user.get("jellyseerr_user_id"), int):
seerr_user = next(
(
user
for user in seerr_users
if isinstance(user, dict) and int(user.get("id") or user.get("userId") or 0) == int(local_user["jellyseerr_user_id"])
),
None,
)
if not seerr_user:
seerr_user = _find_matching_seerr_user(username, seerr_users)
recipient_email = _resolve_seerr_user_email(seerr_user, local_user)
if not recipient_email:
return None
if auth_provider == "jellyseerr":
runtime = get_runtime_settings()
jellyfin_client = JellyfinClient(runtime.jellyfin_base_url, runtime.jellyfin_api_key)
if jellyfin_client.configured():
try:
jellyfin_user = await jellyfin_client.find_user_by_name(username)
except Exception:
jellyfin_user = None
if isinstance(jellyfin_user, dict):
auth_provider = "jellyfin"
if auth_provider not in {"local", "jellyfin"}:
return None
return {
"username": username,
"recipient_email": recipient_email,
"auth_provider": auth_provider,
}
def _token_record_is_usable(record: Optional[dict]) -> bool:
if not isinstance(record, dict):
return False
if record.get("is_used"):
return False
if record.get("is_expired"):
return False
return True
def _mask_email(email: str) -> str:
candidate = str(email or "").strip()
if "@" not in candidate:
return "valid reset link"
local_part, domain = candidate.split("@", 1)
if not local_part:
return f"***@{domain}"
if len(local_part) == 1:
return f"{local_part}***@{domain}"
return f"{local_part[0]}***{local_part[-1]}@{domain}"
async def request_password_reset(
identifier: str,
*,
requested_by_ip: Optional[str] = None,
requested_user_agent: Optional[str] = None,
) -> Dict[str, Any]:
delete_expired_password_reset_tokens()
target = await _resolve_reset_target(identifier)
if not target:
logger.info("password reset requested with no eligible match identifier=%s", identifier.strip().lower()[:256])
return {"status": "ok", "issued": False}
token = secrets.token_urlsafe(32)
expires_at = (datetime.now(timezone.utc) + timedelta(minutes=PASSWORD_RESET_TOKEN_TTL_MINUTES)).isoformat()
create_password_reset_token(
token,
target["username"],
target["recipient_email"],
target["auth_provider"],
expires_at,
requested_by_ip=requested_by_ip,
requested_user_agent=requested_user_agent,
)
await send_password_reset_email(
recipient_email=target["recipient_email"],
username=target["username"],
token=token,
expires_at=expires_at,
auth_provider=target["auth_provider"],
)
return {
"status": "ok",
"issued": True,
"username": target["username"],
"recipient_email": target["recipient_email"],
"auth_provider": target["auth_provider"],
"expires_at": expires_at,
}
def verify_password_reset_token(token: str) -> Dict[str, Any]:
delete_expired_password_reset_tokens()
record = get_password_reset_token(token)
if not _token_record_is_usable(record):
raise ValueError("Password reset link is invalid or has expired.")
return {
"status": "ok",
"recipient_hint": _mask_email(str(record.get("recipient_email") or "")),
"auth_provider": record.get("auth_provider"),
"expires_at": record.get("expires_at"),
}
async def apply_password_reset(token: str, new_password: str) -> Dict[str, Any]:
delete_expired_password_reset_tokens()
record = get_password_reset_token(token)
if not _token_record_is_usable(record):
raise ValueError("Password reset link is invalid or has expired.")
username = str(record.get("username") or "").strip()
if not username:
raise ValueError("Password reset link is invalid or has expired.")
stored_user = normalize_user_auth_provider(get_user_by_username(username))
if not stored_user:
raise ValueError("Password reset link is invalid or has expired.")
auth_provider = resolve_user_auth_provider(stored_user)
if auth_provider == "jellyseerr":
auth_provider = "jellyfin"
if auth_provider == "local":
set_user_password(username, new_password)
if str(stored_user.get("auth_provider") or "").strip().lower() != "local":
set_user_auth_provider(username, "local")
mark_password_reset_token_used(token)
logger.info("password reset applied username=%s provider=local", username)
return {"status": "ok", "provider": "local", "username": username}
if auth_provider == "jellyfin":
runtime = get_runtime_settings()
client = JellyfinClient(runtime.jellyfin_base_url, runtime.jellyfin_api_key)
if not client.configured():
raise PasswordResetUnavailableError("Jellyfin is not configured for password reset.")
jellyfin_user = await client.find_user_by_name(username)
user_id = client._extract_user_id(jellyfin_user)
if not user_id:
raise ValueError("Password reset link is invalid or has expired.")
await client.set_user_password(user_id, new_password)
sync_jellyfin_password_state(username, new_password)
if str(stored_user.get("auth_provider") or "").strip().lower() != "jellyfin":
set_user_auth_provider(username, "jellyfin")
mark_password_reset_token_used(token)
logger.info("password reset applied username=%s provider=jellyfin", username)
return {"status": "ok", "provider": "jellyfin", "username": username}
raise ValueError("Password reset is not available for this sign-in provider.")

View File

@@ -1,6 +1,7 @@
from typing import Any, Dict, List, Optional
import asyncio
import logging
import re
from datetime import datetime, timezone
from urllib.parse import quote
import httpx
@@ -57,6 +58,100 @@ def _pick_first(value: Any) -> Optional[Dict[str, Any]]:
return None
def _normalize_media_title(value: Any) -> Optional[str]:
if not isinstance(value, str):
return None
normalized = re.sub(r"[^a-z0-9]+", " ", value.lower()).strip()
return normalized or None
def _canonical_provider_key(value: str) -> str:
normalized = value.strip().lower()
if normalized.endswith("id"):
normalized = normalized[:-2]
return normalized
def extract_request_provider_ids(payload: Any) -> Dict[str, str]:
provider_ids: Dict[str, str] = {}
candidates: List[Any] = []
if isinstance(payload, dict):
candidates.append(payload)
media = payload.get("media")
if isinstance(media, dict):
candidates.append(media)
for candidate in candidates:
if not isinstance(candidate, dict):
continue
embedded = candidate.get("ProviderIds") or candidate.get("providerIds")
if isinstance(embedded, dict):
for key, value in embedded.items():
if value is None:
continue
text = str(value).strip()
if text:
provider_ids[_canonical_provider_key(str(key))] = text
for key in ("tmdbId", "tvdbId", "imdbId", "tmdb_id", "tvdb_id", "imdb_id"):
value = candidate.get(key)
if value is None:
continue
text = str(value).strip()
if text:
provider_ids[_canonical_provider_key(key)] = text
return provider_ids
def jellyfin_item_matches_request(
item: Dict[str, Any],
*,
title: Optional[str],
year: Optional[int],
request_type: RequestType,
request_payload: Optional[Dict[str, Any]] = None,
) -> bool:
request_provider_ids = extract_request_provider_ids(request_payload or {})
item_provider_ids = extract_request_provider_ids(item)
provider_priority = ("tmdb", "tvdb", "imdb")
for key in provider_priority:
request_id = request_provider_ids.get(key)
item_id = item_provider_ids.get(key)
if request_id and item_id and request_id == item_id:
return True
request_title = _normalize_media_title(title)
if not request_title:
return False
item_titles = [
_normalize_media_title(item.get("Name")),
_normalize_media_title(item.get("OriginalTitle")),
_normalize_media_title(item.get("SortName")),
_normalize_media_title(item.get("SeriesName")),
_normalize_media_title(item.get("title")),
]
item_titles = [candidate for candidate in item_titles if candidate]
item_year = item.get("ProductionYear") or item.get("Year")
try:
item_year_value = int(item_year) if item_year is not None else None
except (TypeError, ValueError):
item_year_value = None
if year and item_year_value and int(year) != item_year_value:
return False
if request_title in item_titles:
return True
if request_type == RequestType.tv:
for candidate in item_titles:
if candidate and (candidate.startswith(request_title) or request_title.startswith(candidate)):
return True
return False
def _extract_http_error_message(exc: httpx.HTTPStatusError) -> Optional[str]:
response = exc.response
if response is None:
@@ -513,7 +608,7 @@ async def build_snapshot(request_id: str) -> Snapshot:
if jellyfin.configured() and snapshot.title:
types = ["Movie"] if snapshot.request_type == RequestType.movie else ["Series"]
try:
search = await jellyfin.search_items(snapshot.title, types)
search = await jellyfin.search_items(snapshot.title, types, limit=50)
except Exception:
search = None
if isinstance(search, dict):
@@ -521,11 +616,13 @@ async def build_snapshot(request_id: str) -> Snapshot:
for item in items:
if not isinstance(item, dict):
continue
name = item.get("Name") or item.get("title")
year = item.get("ProductionYear") or item.get("Year")
if name and name.strip().lower() == (snapshot.title or "").strip().lower():
if snapshot.year and year and int(year) != int(snapshot.year):
continue
if jellyfin_item_matches_request(
item,
title=snapshot.title,
year=snapshot.year,
request_type=snapshot.request_type,
request_payload=jelly_request,
):
jellyfin_available = True
jellyfin_item = item
break
@@ -646,12 +743,22 @@ async def build_snapshot(request_id: str) -> Snapshot:
snapshot.state = NormalizedState.added_to_arr
snapshot.state_reason = "Item is present in Sonarr/Radarr"
if jellyfin_available and snapshot.state not in {
NormalizedState.downloading,
NormalizedState.importing,
}:
snapshot.state = NormalizedState.completed
snapshot.state_reason = "Ready to watch in Jellyfin."
if jellyfin_available:
missing_episodes = arr_details.get("missingEpisodes")
if snapshot.request_type == RequestType.tv and isinstance(missing_episodes, dict) and missing_episodes:
snapshot.state = NormalizedState.importing
snapshot.state_reason = "Some episodes are available in Jellyfin, but the request is still incomplete."
for hop in timeline:
if hop.service == "Seerr":
hop.status = "Partially ready"
else:
snapshot.state = NormalizedState.completed
snapshot.state_reason = "Ready to watch in Jellyfin."
for hop in timeline:
if hop.service == "Seerr":
hop.status = "Available"
elif hop.service == "Sonarr/Radarr" and hop.status not in {"error"}:
hop.status = "available"
snapshot.timeline = timeline
actions: List[ActionOption] = []

View File

@@ -40,6 +40,10 @@ const SECTION_LABELS: Record<string, string> = {
const BOOL_SETTINGS = new Set([
'jellyfin_sync_to_arr',
'site_banner_enabled',
'site_login_show_jellyfin_login',
'site_login_show_local_login',
'site_login_show_forgot_password',
'site_login_show_signup_link',
'magent_proxy_enabled',
'magent_proxy_trust_forwarded_headers',
'magent_ssl_bind_enabled',
@@ -104,7 +108,7 @@ const SECTION_DESCRIPTIONS: Record<string, string> = {
qbittorrent: 'Downloader connection settings.',
requests: 'Control how often requests are refreshed and cleaned up.',
log: 'Activity log for troubleshooting.',
site: 'Sitewide banner and version details. The changelog is generated from git history during release builds.',
site: 'Sitewide banner, login page visibility, and version details. The changelog is generated from git history during release builds.',
}
const SETTINGS_SECTION_MAP: Record<string, string | null> = {
@@ -239,6 +243,31 @@ const MAGENT_GROUPS_BY_SECTION: Record<string, Set<string>> = {
]),
}
const SITE_SECTION_GROUPS: Array<{
key: string
title: string
description: string
keys: string[]
}> = [
{
key: 'site-banner',
title: 'Site Banner',
description: 'Control the sitewide banner message, tone, and visibility.',
keys: ['site_banner_enabled', 'site_banner_tone', 'site_banner_message'],
},
{
key: 'site-login',
title: 'Login Page Behaviour',
description: 'Control which sign-in and recovery options are shown on the logged-out login page.',
keys: [
'site_login_show_jellyfin_login',
'site_login_show_local_login',
'site_login_show_forgot_password',
'site_login_show_signup_link',
],
},
]
const SETTING_LABEL_OVERRIDES: Record<string, string> = {
jellyseerr_base_url: 'Seerr base URL',
jellyseerr_api_key: 'Seerr API key',
@@ -280,6 +309,10 @@ const SETTING_LABEL_OVERRIDES: Record<string, string> = {
magent_notify_push_device: 'Device / target',
magent_notify_webhook_enabled: 'Generic webhook notifications enabled',
magent_notify_webhook_url: 'Generic webhook URL',
site_login_show_jellyfin_login: 'Login page: Jellyfin sign-in',
site_login_show_local_login: 'Login page: local Magent sign-in',
site_login_show_forgot_password: 'Login page: forgot password',
site_login_show_signup_link: 'Login page: invite signup link',
log_file_max_bytes: 'Log file max size (bytes)',
log_file_backup_count: 'Rotated log files to keep',
log_http_client_level: 'Service HTTP log level',
@@ -551,6 +584,7 @@ export default function SettingsPage({ section }: SettingsPageProps) {
const settingsSection = SETTINGS_SECTION_MAP[section] ?? null
const isMagentGroupedSection = section === 'magent' || section === 'general' || section === 'notifications'
const isSiteGroupedSection = section === 'site'
const visibleSections = settingsSection ? [settingsSection] : []
const isCacheSection = section === 'cache'
const cacheSettingKeys = new Set(['requests_sync_ttl_minutes', 'requests_data_source'])
@@ -564,6 +598,15 @@ export default function SettingsPage({ section }: SettingsPageProps) {
'requests_cleanup_time',
'requests_cleanup_days',
]
const siteSettingOrder = [
'site_banner_enabled',
'site_banner_message',
'site_banner_tone',
'site_login_show_jellyfin_login',
'site_login_show_local_login',
'site_login_show_forgot_password',
'site_login_show_signup_link',
]
const sortByOrder = (items: AdminSetting[], order: string[]) => {
const position = new Map(order.map((key, index) => [key, index]))
return [...items].sort((a, b) => {
@@ -603,6 +646,22 @@ export default function SettingsPage({ section }: SettingsPageProps) {
})
return groups
})()
: isSiteGroupedSection
? (() => {
const siteItems = groupedSettings.site ?? []
const byKey = new Map(siteItems.map((item) => [item.key, item]))
return SITE_SECTION_GROUPS.map((group) => {
const items = group.keys
.map((key) => byKey.get(key))
.filter((item): item is AdminSetting => Boolean(item))
return {
key: group.key,
title: group.title,
description: group.description,
items,
}
})
})()
: visibleSections.map((sectionKey) => ({
key: sectionKey,
title: SECTION_LABELS[sectionKey] ?? sectionKey,
@@ -615,6 +674,9 @@ export default function SettingsPage({ section }: SettingsPageProps) {
if (sectionKey === 'requests') {
return sortByOrder(filtered, requestSettingOrder)
}
if (sectionKey === 'site') {
return sortByOrder(filtered, siteSettingOrder)
}
return filtered
})(),
}))
@@ -748,6 +810,10 @@ export default function SettingsPage({ section }: SettingsPageProps) {
site_banner_enabled: 'Enable a sitewide banner for announcements.',
site_banner_message: 'Short banner message for maintenance or updates.',
site_banner_tone: 'Visual tone for the banner.',
site_login_show_jellyfin_login: 'Show the Jellyfin login button on the login page.',
site_login_show_local_login: 'Show the local Magent login button on the login page.',
site_login_show_forgot_password: 'Show the forgot-password link on the login page.',
site_login_show_signup_link: 'Show the invite signup link on the login page.',
site_changelog: 'One update per line for the public changelog.',
}
@@ -1672,7 +1738,7 @@ export default function SettingsPage({ section }: SettingsPageProps) {
)}
</div>
{(sectionGroup.description || SECTION_DESCRIPTIONS[sectionGroup.key]) &&
(!settingsSection || isMagentGroupedSection) && (
(!settingsSection || isMagentGroupedSection || isSiteGroupedSection) && (
<p className="section-subtitle">
{sectionGroup.description || SECTION_DESCRIPTIONS[sectionGroup.key]}
</p>
@@ -2148,11 +2214,12 @@ export default function SettingsPage({ section }: SettingsPageProps) {
const isPemField =
setting.key === 'magent_ssl_certificate_pem' ||
setting.key === 'magent_ssl_private_key_pem'
const shouldSpanFull = isPemField || setting.key === 'site_banner_message'
return (
<label
key={setting.key}
data-helper={helperText || undefined}
className={isPemField ? 'field-span-full' : undefined}
className={shouldSpanFull ? 'field-span-full' : undefined}
>
<span className="label-row">
<span>{labelFromKey(setting.key)}</span>

View File

@@ -15,6 +15,17 @@ type RequestRow = {
createdAt?: string | null
}
const REQUEST_STAGE_OPTIONS = [
{ value: 'all', label: 'All stages' },
{ value: 'pending', label: 'Waiting for approval' },
{ value: 'approved', label: 'Approved' },
{ value: 'in_progress', label: 'In progress' },
{ value: 'working', label: 'Working on it' },
{ value: 'partial', label: 'Partially ready' },
{ value: 'ready', label: 'Ready to watch' },
{ value: 'declined', label: 'Declined' },
]
const formatDateTime = (value?: string | null) => {
if (!value) return 'Unknown'
const date = new Date(value)
@@ -30,6 +41,7 @@ export default function AdminRequestsAllPage() {
const [error, setError] = useState<string | null>(null)
const [pageSize, setPageSize] = useState(50)
const [page, setPage] = useState(1)
const [stage, setStage] = useState('all')
const pageCount = useMemo(() => {
if (!total || pageSize <= 0) return 1
@@ -46,8 +58,15 @@ export default function AdminRequestsAllPage() {
try {
const baseUrl = getApiBase()
const skip = (page - 1) * pageSize
const params = new URLSearchParams({
take: String(pageSize),
skip: String(skip),
})
if (stage !== 'all') {
params.set('stage', stage)
}
const response = await authFetch(
`${baseUrl}/admin/requests/all?take=${pageSize}&skip=${skip}`
`${baseUrl}/admin/requests/all?${params.toString()}`
)
if (!response.ok) {
if (response.status === 401) {
@@ -74,7 +93,7 @@ export default function AdminRequestsAllPage() {
useEffect(() => {
void load()
}, [page, pageSize])
}, [page, pageSize, stage])
useEffect(() => {
if (page > pageCount) {
@@ -82,6 +101,10 @@ export default function AdminRequestsAllPage() {
}
}, [pageCount, page])
useEffect(() => {
setPage(1)
}, [stage])
return (
<AdminShell
title="All requests"
@@ -98,6 +121,16 @@ export default function AdminRequestsAllPage() {
<span>{total.toLocaleString()} total</span>
</div>
<div className="admin-toolbar-actions">
<label className="admin-select">
<span>Stage</span>
<select value={stage} onChange={(e) => setStage(e.target.value)}>
{REQUEST_STAGE_OPTIONS.map((option) => (
<option key={option.value} value={option.value}>
{option.label}
</option>
))}
</select>
</label>
<label className="admin-select">
<span>Per page</span>
<select value={pageSize} onChange={(e) => setPageSize(Number(e.target.value))}>

View File

@@ -0,0 +1,79 @@
'use client'
import { useState } from 'react'
import { useRouter } from 'next/navigation'
import BrandingLogo from '../ui/BrandingLogo'
import { getApiBase } from '../lib/auth'
export default function ForgotPasswordPage() {
const router = useRouter()
const [identifier, setIdentifier] = useState('')
const [loading, setLoading] = useState(false)
const [error, setError] = useState<string | null>(null)
const [status, setStatus] = useState<string | null>(null)
const submit = async (event: React.FormEvent) => {
event.preventDefault()
if (!identifier.trim()) {
setError('Enter your username or email.')
return
}
setLoading(true)
setError(null)
setStatus(null)
try {
const baseUrl = getApiBase()
const response = await fetch(`${baseUrl}/auth/password/forgot`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ identifier: identifier.trim() }),
})
const data = await response.json().catch(() => null)
if (!response.ok) {
throw new Error(typeof data?.detail === 'string' ? data.detail : 'Unable to send reset link.')
}
setStatus(
typeof data?.message === 'string'
? data.message
: 'If an account exists for that username or email, a password reset link has been sent.',
)
} catch (err) {
console.error(err)
setError(err instanceof Error ? err.message : 'Unable to send reset link.')
} finally {
setLoading(false)
}
}
return (
<main className="card auth-card">
<BrandingLogo className="brand-logo brand-logo--login" />
<h1>Forgot password</h1>
<p className="lede">
Enter the username or email you use for Jellyfin or Magent. If the account is eligible, a reset link
will be emailed to you.
</p>
<form className="auth-form" onSubmit={submit}>
<label>
Username or email
<input
value={identifier}
onChange={(event) => setIdentifier(event.target.value)}
autoComplete="username"
placeholder="you@example.com"
/>
</label>
{error && <div className="error-banner">{error}</div>}
{status && <div className="status-banner">{status}</div>}
<div className="auth-actions">
<button type="submit" disabled={loading}>
{loading ? 'Sending reset link…' : 'Send reset link'}
</button>
</div>
<button type="button" className="ghost-button" onClick={() => router.push('/login')} disabled={loading}>
Back to sign in
</button>
</form>
</main>
)
}

View File

@@ -1527,6 +1527,13 @@ button span {
color: var(--ink-muted);
}
.recent-filter-group {
display: flex;
flex-wrap: wrap;
gap: 10px;
align-items: center;
}
.recent-filter select {
padding: 8px 12px;
font-size: 13px;
@@ -6068,6 +6075,52 @@ textarea {
background: rgba(255, 255, 255, 0.03);
}
.diagnostic-detail-panel {
display: grid;
gap: 0.9rem;
}
.diagnostic-detail-group {
display: grid;
gap: 0.6rem;
}
.diagnostic-detail-group h4 {
margin: 0;
font-size: 0.86rem;
letter-spacing: 0.06em;
text-transform: uppercase;
color: var(--ink-muted);
}
.diagnostic-detail-grid {
display: grid;
grid-template-columns: repeat(auto-fit, minmax(9rem, 1fr));
gap: 0.7rem;
}
.diagnostic-detail-item {
display: grid;
gap: 0.2rem;
min-width: 0;
padding: 0.75rem;
border-radius: 0.8rem;
border: 1px solid rgba(255, 255, 255, 0.06);
background: rgba(255, 255, 255, 0.025);
}
.diagnostic-detail-item span {
font-size: 0.76rem;
letter-spacing: 0.05em;
text-transform: uppercase;
color: var(--muted);
}
.diagnostic-detail-item strong {
line-height: 1.35;
overflow-wrap: anywhere;
}
.diagnostics-rail-metrics {
display: grid;
gap: 0.75rem;

View File

@@ -1,19 +1,36 @@
'use client'
import { useRouter } from 'next/navigation'
import { useState } from 'react'
import { useEffect, useState } from 'react'
import { getApiBase, setToken, clearToken } from '../lib/auth'
import BrandingLogo from '../ui/BrandingLogo'
const DEFAULT_LOGIN_OPTIONS = {
showJellyfinLogin: true,
showLocalLogin: true,
showForgotPassword: true,
showSignupLink: true,
}
export default function LoginPage() {
const router = useRouter()
const [username, setUsername] = useState('')
const [password, setPassword] = useState('')
const [error, setError] = useState<string | null>(null)
const [loading, setLoading] = useState(false)
const [loginOptions, setLoginOptions] = useState(DEFAULT_LOGIN_OPTIONS)
const primaryMode: 'jellyfin' | 'local' | null = loginOptions.showJellyfinLogin
? 'jellyfin'
: loginOptions.showLocalLogin
? 'local'
: null
const submit = async (event: React.FormEvent, mode: 'local' | 'jellyfin') => {
event.preventDefault()
if (!primaryMode) {
setError('Login is currently disabled. Contact an administrator.')
return
}
setError(null)
setLoading(true)
try {
@@ -48,12 +65,63 @@ export default function LoginPage() {
}
}
useEffect(() => {
let active = true
const loadLoginOptions = async () => {
try {
const baseUrl = getApiBase()
const response = await fetch(`${baseUrl}/site/public`)
if (!response.ok) {
return
}
const data = await response.json()
const login = data?.login ?? {}
if (!active) return
setLoginOptions({
showJellyfinLogin: login.showJellyfinLogin !== false,
showLocalLogin: login.showLocalLogin !== false,
showForgotPassword: login.showForgotPassword !== false,
showSignupLink: login.showSignupLink !== false,
})
} catch (err) {
console.error(err)
}
}
void loadLoginOptions()
return () => {
active = false
}
}, [])
const loginHelpText = (() => {
if (loginOptions.showJellyfinLogin && loginOptions.showLocalLogin) {
return 'Use your Jellyfin account, or sign in with a local Magent admin account.'
}
if (loginOptions.showJellyfinLogin) {
return 'Use your Jellyfin account to sign in.'
}
if (loginOptions.showLocalLogin) {
return 'Use your local Magent admin account to sign in.'
}
return 'No sign-in methods are currently available. Contact an administrator.'
})()
return (
<main className="card auth-card">
<BrandingLogo className="brand-logo brand-logo--login" />
<h1>Sign in</h1>
<p className="lede">Use your Jellyfin account, or sign in with a local Magent admin account.</p>
<form onSubmit={(event) => submit(event, 'jellyfin')} className="auth-form">
<p className="lede">{loginHelpText}</p>
<form
onSubmit={(event) => {
if (!primaryMode) {
event.preventDefault()
setError('Login is currently disabled. Contact an administrator.')
return
}
void submit(event, primaryMode)
}}
className="auth-form"
>
<label>
Username
<input
@@ -73,21 +141,35 @@ export default function LoginPage() {
</label>
{error && <div className="error-banner">{error}</div>}
<div className="auth-actions">
<button type="submit" disabled={loading}>
{loading ? 'Signing in...' : 'Login with Jellyfin account'}
</button>
{loginOptions.showJellyfinLogin ? (
<button type="submit" disabled={loading}>
{loading ? 'Signing in...' : 'Login with Jellyfin account'}
</button>
) : null}
</div>
<button
type="button"
className="ghost-button"
disabled={loading}
onClick={(event) => submit(event, 'local')}
>
Sign in with Magent account
</button>
<a className="ghost-button" href="/signup">
Have an invite? Create your account (Jellyfin + Magent)
</a>
{loginOptions.showLocalLogin ? (
<button
type="button"
className="ghost-button"
disabled={loading}
onClick={(event) => submit(event, 'local')}
>
Sign in with Magent account
</button>
) : null}
{loginOptions.showForgotPassword ? (
<a className="ghost-button" href="/forgot-password">
Forgot password?
</a>
) : null}
{loginOptions.showSignupLink ? (
<a className="ghost-button" href="/signup">
Have an invite? Create your account (Jellyfin + Magent)
</a>
) : null}
{!loginOptions.showJellyfinLogin && !loginOptions.showLocalLogin ? (
<div className="error-banner">Login is currently disabled. Contact an administrator.</div>
) : null}
</form>
</main>
)

View File

@@ -22,6 +22,17 @@ const normalizeRecentResults = (items: any[]) =>
}
})
const REQUEST_STAGE_OPTIONS = [
{ value: 'all', label: 'All stages' },
{ value: 'pending', label: 'Waiting' },
{ value: 'approved', label: 'Approved' },
{ value: 'in_progress', label: 'In progress' },
{ value: 'working', label: 'Working' },
{ value: 'partial', label: 'Partial' },
{ value: 'ready', label: 'Ready' },
{ value: 'declined', label: 'Declined' },
]
export default function HomePage() {
const router = useRouter()
const [query, setQuery] = useState('')
@@ -38,11 +49,20 @@ export default function HomePage() {
const [recentError, setRecentError] = useState<string | null>(null)
const [recentLoading, setRecentLoading] = useState(false)
const [searchResults, setSearchResults] = useState<
{ title: string; year?: number; type?: string; requestId?: number; statusLabel?: string }[]
{
title: string
year?: number
type?: string
requestId?: number
statusLabel?: string
requestedBy?: string | null
accessible?: boolean
}[]
>([])
const [searchError, setSearchError] = useState<string | null>(null)
const [role, setRole] = useState<string | null>(null)
const [recentDays, setRecentDays] = useState(90)
const [recentStage, setRecentStage] = useState('all')
const [authReady, setAuthReady] = useState(false)
const [servicesStatus, setServicesStatus] = useState<
{ overall: string; services: { name: string; status: string; message?: string }[] } | null
@@ -143,9 +163,14 @@ export default function HomePage() {
setRole(userRole)
setAuthReady(true)
const take = userRole === 'admin' ? 50 : 6
const response = await authFetch(
`${baseUrl}/requests/recent?take=${take}&days=${recentDays}`
)
const params = new URLSearchParams({
take: String(take),
days: String(recentDays),
})
if (recentStage !== 'all') {
params.set('stage', recentStage)
}
const response = await authFetch(`${baseUrl}/requests/recent?${params.toString()}`)
if (!response.ok) {
if (response.status === 401) {
clearToken()
@@ -167,7 +192,7 @@ export default function HomePage() {
}
load()
}, [recentDays])
}, [recentDays, recentStage])
useEffect(() => {
if (!authReady) {
@@ -222,7 +247,14 @@ export default function HomePage() {
try {
const streamToken = await getEventStreamToken()
if (closed) return
const streamUrl = `${baseUrl}/events/stream?stream_token=${encodeURIComponent(streamToken)}&recent_days=${encodeURIComponent(String(recentDays))}`
const params = new URLSearchParams({
stream_token: streamToken,
recent_days: String(recentDays),
})
if (recentStage !== 'all') {
params.set('recent_stage', recentStage)
}
const streamUrl = `${baseUrl}/events/stream?${params.toString()}`
source = new EventSource(streamUrl)
source.onopen = () => {
@@ -282,7 +314,7 @@ export default function HomePage() {
setLiveStreamConnected(false)
source?.close()
}
}, [authReady, recentDays])
}, [authReady, recentDays, recentStage])
const runSearch = async (term: string) => {
try {
@@ -299,14 +331,16 @@ export default function HomePage() {
const data = await response.json()
if (Array.isArray(data?.results)) {
setSearchResults(
data.results.map((item: any) => ({
title: item.title,
year: item.year,
type: item.type,
requestId: item.requestId,
statusLabel: item.statusLabel,
}))
)
data.results.map((item: any) => ({
title: item.title,
year: item.year,
type: item.type,
requestId: item.requestId,
statusLabel: item.statusLabel,
requestedBy: item.requestedBy ?? null,
accessible: Boolean(item.accessible),
}))
)
setSearchError(null)
}
} catch (error) {
@@ -403,19 +437,34 @@ export default function HomePage() {
<div className="recent-header">
<h2>{role === 'admin' ? 'All requests' : 'My recent requests'}</h2>
{authReady && (
<label className="recent-filter">
<span>Show</span>
<select
value={recentDays}
onChange={(event) => setRecentDays(Number(event.target.value))}
>
<option value={0}>All</option>
<option value={30}>30 days</option>
<option value={60}>60 days</option>
<option value={90}>90 days</option>
<option value={180}>180 days</option>
</select>
</label>
<div className="recent-filter-group">
<label className="recent-filter">
<span>Show</span>
<select
value={recentDays}
onChange={(event) => setRecentDays(Number(event.target.value))}
>
<option value={0}>All</option>
<option value={30}>30 days</option>
<option value={60}>60 days</option>
<option value={90}>90 days</option>
<option value={180}>180 days</option>
</select>
</label>
<label className="recent-filter">
<span>Stage</span>
<select
value={recentStage}
onChange={(event) => setRecentStage(event.target.value)}
>
{REQUEST_STAGE_OPTIONS.map((option) => (
<option key={option.value} value={option.value}>
{option.label}
</option>
))}
</select>
</label>
</div>
)}
</div>
<div className="recent-grid">
@@ -467,9 +516,10 @@ export default function HomePage() {
<aside className="side-panel">
<section className="main-panel find-panel">
<div className="find-header">
<h1>Find my request</h1>
<h1>Search all requests</h1>
<p className="lede">
Search by title + year, paste a request number, or pick from your recent requests.
Search any request by title + year or request number and see whether it already
exists in the system.
</p>
</div>
<div className="find-controls">
@@ -518,14 +568,16 @@ export default function HomePage() {
key={`${item.title || 'Untitled'}-${index}`}
type="button"
disabled={!item.requestId}
onClick={() => item.requestId && router.push(`/requests/${item.requestId}`)}
onClick={() =>
item.requestId && router.push(`/requests/${item.requestId}`)
}
>
{item.title || 'Untitled'} {item.year ? `(${item.year})` : ''}{' '}
{!item.requestId
? '- not requested'
: item.statusLabel
? `- ${item.statusLabel}`
: ''}
: '- already requested'}
</button>
))
)}

View File

@@ -368,7 +368,14 @@ export default function RequestTimelinePage() {
const jellyfinLink = snapshot.raw?.jellyfin?.link
const posterUrl = snapshot.artwork?.poster_url
const resolvedPoster =
posterUrl && posterUrl.startsWith('http') ? posterUrl : posterUrl ? `${getApiBase()}${posterUrl}` : null
posterUrl && posterUrl.startsWith('http') ? posterUrl : posterUrl ? `${getApiBase()}${posterUrl}` : null
const hasPartialReadyTimeline = snapshot.timeline.some(
(hop) => hop.service === 'Seerr' && hop.status === 'Partially ready'
)
const currentStatusText =
snapshot.state === 'IMPORTING' && hasPartialReadyTimeline
? 'Partially ready'
: friendlyState(snapshot.state)
return (
<main className="card">
@@ -400,7 +407,7 @@ export default function RequestTimelinePage() {
<section className="status-box">
<div>
<h2>Status</h2>
<p className="status-text">{friendlyState(snapshot.state)}</p>
<p className="status-text">{currentStatusText}</p>
</div>
<div>
<h2>What this means</h2>

View File

@@ -0,0 +1,156 @@
'use client'
import { Suspense, useEffect, useState } from 'react'
import { useRouter, useSearchParams } from 'next/navigation'
import BrandingLogo from '../ui/BrandingLogo'
import { getApiBase } from '../lib/auth'
type ResetVerification = {
status: string
recipient_hint?: string
auth_provider?: string
expires_at?: string
}
function ResetPasswordPageContent() {
const router = useRouter()
const searchParams = useSearchParams()
const token = searchParams.get('token') ?? ''
const [verification, setVerification] = useState<ResetVerification | null>(null)
const [loading, setLoading] = useState(false)
const [verifying, setVerifying] = useState(true)
const [password, setPassword] = useState('')
const [confirmPassword, setConfirmPassword] = useState('')
const [error, setError] = useState<string | null>(null)
const [status, setStatus] = useState<string | null>(null)
useEffect(() => {
const verifyToken = async () => {
if (!token) {
setError('Password reset link is invalid or missing.')
setVerifying(false)
return
}
setVerifying(true)
setError(null)
setStatus(null)
try {
const baseUrl = getApiBase()
const response = await fetch(
`${baseUrl}/auth/password/reset/verify?token=${encodeURIComponent(token)}`,
)
const data = await response.json().catch(() => null)
if (!response.ok) {
throw new Error(typeof data?.detail === 'string' ? data.detail : 'Password reset link is invalid.')
}
setVerification(data)
} catch (err) {
console.error(err)
setVerification(null)
setError(err instanceof Error ? err.message : 'Password reset link is invalid.')
} finally {
setVerifying(false)
}
}
void verifyToken()
}, [token])
const submit = async (event: React.FormEvent) => {
event.preventDefault()
if (!token) {
setError('Password reset link is invalid or missing.')
return
}
if (password.trim().length < 8) {
setError('Password must be at least 8 characters.')
return
}
if (password !== confirmPassword) {
setError('Passwords do not match.')
return
}
setLoading(true)
setError(null)
setStatus(null)
try {
const baseUrl = getApiBase()
const response = await fetch(`${baseUrl}/auth/password/reset`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ token, new_password: password }),
})
const data = await response.json().catch(() => null)
if (!response.ok) {
throw new Error(typeof data?.detail === 'string' ? data.detail : 'Unable to reset password.')
}
setStatus('Password updated. You can now sign in with the new password.')
setPassword('')
setConfirmPassword('')
window.setTimeout(() => router.push('/login'), 1200)
} catch (err) {
console.error(err)
setError(err instanceof Error ? err.message : 'Unable to reset password.')
} finally {
setLoading(false)
}
}
const providerLabel =
verification?.auth_provider === 'jellyfin' ? 'Jellyfin, Seerr, and Magent' : 'Magent'
return (
<main className="card auth-card">
<BrandingLogo className="brand-logo brand-logo--login" />
<h1>Reset password</h1>
<p className="lede">Choose a new password for your account.</p>
<form className="auth-form" onSubmit={submit}>
{verifying && <div className="status-banner">Checking password reset link</div>}
{!verifying && verification && (
<div className="status-banner">
This reset link was sent to {verification.recipient_hint || 'your email'} and will update the password
used for {providerLabel}.
</div>
)}
<label>
New password
<input
type="password"
value={password}
onChange={(event) => setPassword(event.target.value)}
autoComplete="new-password"
disabled={!verification || loading}
/>
</label>
<label>
Confirm new password
<input
type="password"
value={confirmPassword}
onChange={(event) => setConfirmPassword(event.target.value)}
autoComplete="new-password"
disabled={!verification || loading}
/>
</label>
{error && <div className="error-banner">{error}</div>}
{status && <div className="status-banner">{status}</div>}
<div className="auth-actions">
<button type="submit" disabled={loading || verifying || !verification}>
{loading ? 'Updating password…' : 'Reset password'}
</button>
</div>
<button type="button" className="ghost-button" onClick={() => router.push('/login')} disabled={loading}>
Back to sign in
</button>
</form>
</main>
)
}
export default function ResetPasswordPage() {
return (
<Suspense fallback={<main className="card auth-card">Loading password reset</main>}>
<ResetPasswordPageContent />
</Suspense>
)
}

View File

@@ -56,6 +56,21 @@ type AdminDiagnosticsPanelProps = {
embedded?: boolean
}
type DatabaseDiagnosticDetail = {
integrity_check?: string
database_path?: string
database_size_bytes?: number
wal_size_bytes?: number
shm_size_bytes?: number
page_size_bytes?: number
page_count?: number
freelist_pages?: number
allocated_bytes?: number
free_bytes?: number
row_counts?: Record<string, number>
timings_ms?: Record<string, number>
}
const REFRESH_INTERVAL_MS = 30000
const STATUS_LABELS: Record<string, string> = {
@@ -85,6 +100,54 @@ function statusLabel(status: string) {
return STATUS_LABELS[status] ?? status
}
function formatBytes(value?: number) {
if (typeof value !== 'number' || Number.isNaN(value) || value < 0) {
return '0 B'
}
if (value >= 1024 * 1024 * 1024) {
return `${(value / (1024 * 1024 * 1024)).toFixed(2)} GB`
}
if (value >= 1024 * 1024) {
return `${(value / (1024 * 1024)).toFixed(2)} MB`
}
if (value >= 1024) {
return `${(value / 1024).toFixed(1)} KB`
}
return `${value} B`
}
function formatDetailLabel(value: string) {
return value
.replace(/_/g, ' ')
.replace(/\b\w/g, (character) => character.toUpperCase())
}
function asDatabaseDiagnosticDetail(detail: unknown): DatabaseDiagnosticDetail | null {
if (!detail || typeof detail !== 'object' || Array.isArray(detail)) {
return null
}
return detail as DatabaseDiagnosticDetail
}
function renderDatabaseMetricGroup(title: string, values: Array<[string, string]>) {
if (values.length === 0) {
return null
}
return (
<div className="diagnostic-detail-group">
<h4>{title}</h4>
<div className="diagnostic-detail-grid">
{values.map(([label, value]) => (
<div key={`${title}-${label}`} className="diagnostic-detail-item">
<span>{label}</span>
<strong>{value}</strong>
</div>
))}
</div>
</div>
)
}
export default function AdminDiagnosticsPanel({ embedded = false }: AdminDiagnosticsPanelProps) {
const router = useRouter()
const [loading, setLoading] = useState(true)
@@ -405,6 +468,43 @@ export default function AdminDiagnosticsPanel({ embedded = false }: AdminDiagnos
<span className="system-dot" />
<span>{isRunning ? 'Running diagnostic...' : check.message}</span>
</div>
{check.key === 'database'
? (() => {
const detail = asDatabaseDiagnosticDetail(check.detail)
if (!detail) {
return null
}
return (
<div className="diagnostic-detail-panel">
{renderDatabaseMetricGroup('Storage', [
['Database file', formatBytes(detail.database_size_bytes)],
['WAL file', formatBytes(detail.wal_size_bytes)],
['Shared memory', formatBytes(detail.shm_size_bytes)],
['Allocated bytes', formatBytes(detail.allocated_bytes)],
['Free bytes', formatBytes(detail.free_bytes)],
['Page size', formatBytes(detail.page_size_bytes)],
['Page count', `${detail.page_count?.toLocaleString() ?? 0}`],
['Freelist pages', `${detail.freelist_pages?.toLocaleString() ?? 0}`],
])}
{renderDatabaseMetricGroup(
'Tables',
Object.entries(detail.row_counts ?? {}).map(([key, value]) => [
formatDetailLabel(key),
value.toLocaleString(),
]),
)}
{renderDatabaseMetricGroup(
'Timings',
Object.entries(detail.timings_ms ?? {}).map(([key, value]) => [
formatDetailLabel(key),
`${value.toFixed(1)} ms`,
]),
)}
</div>
)
})()
: null}
</article>
)
})}

View File

@@ -1,12 +1,12 @@
{
"name": "magent-frontend",
"version": "0203261953",
"version": "0303261611",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "magent-frontend",
"version": "0203261953",
"version": "0303261611",
"dependencies": {
"next": "16.1.6",
"react": "19.2.4",

View File

@@ -1,7 +1,7 @@
{
"name": "magent-frontend",
"private": true,
"version": "0203261953",
"version": "0303261611",
"scripts": {
"dev": "next dev",
"build": "next build",