29 Commits

Author SHA1 Message Date
d045dd0b07 Build 0202261541: allow FQDN service URLs 2026-02-02 15:43:08 +13:00
138069590b Build 3001262148: single container 2026-01-30 21:54:25 +13:00
8125b766c7 Build 2901262244: format changelog 2026-01-29 22:46:02 +13:00
d53e2917aa Build 2901262240: cache users 2026-01-29 22:42:00 +13:00
d7847652db Tidy full changelog 2026-01-29 22:13:04 +13:00
24ac54d606 Update full changelog 2026-01-29 22:08:17 +13:00
62f392ad37 Bake build number and changelog 2026-01-29 22:03:12 +13:00
e42ae8585d Hardcode build number in backend 2026-01-29 21:49:01 +13:00
06e0797722 release: 2901262102 2026-01-29 21:03:32 +13:00
914f478178 release: 2901262044 2026-01-29 20:45:20 +13:00
fb65d646f2 release: 2901262036 2026-01-29 20:38:37 +13:00
3493bf715e Hydrate missing artwork from Jellyseerr (build 271261539) 2026-01-27 15:40:36 +13:00
b98239ab3e Fallback to TMDB when artwork cache fails (build 271261524) 2026-01-27 15:26:10 +13:00
40dc46c0c5 Add service test buttons (build 271261335) 2026-01-27 13:36:35 +13:00
d23d84ea42 Bump build number (process 2) 271261322 2026-01-27 13:24:35 +13:00
7d6cdcbe02 Add cache load spinner (build 271261238) 2026-01-27 12:39:51 +13:00
0e95f94025 Fix snapshot title fallback (build 271261228) 2026-01-27 12:30:04 +13:00
8b1a09fbd4 Fix request titles in snapshots (build 271261219) 2026-01-27 12:20:12 +13:00
fe0c108363 Bump build number to 271261202 2026-01-27 12:04:42 +13:00
9e8d22ba85 Clarify request sync settings (build 271261159) 2026-01-27 12:00:32 +13:00
7863658a19 Fix backend cache stats import (build 271261149) 2026-01-27 11:51:01 +13:00
7c97934bb9 Improve cache stats performance (build 271261145) 2026-01-27 11:46:50 +13:00
3f51e24181 Add cache control artwork stats 2026-01-27 11:27:26 +13:00
ab27ebfadf Fix sync progress bar animation 2026-01-26 14:21:18 +13:00
b93b41713a Fix cache title hydration 2026-01-26 14:01:06 +13:00
ceb8c1c9eb Build 2501262041 2026-01-25 20:43:14 +13:00
86ca3bdeb2 Harden request cache titles and cache-only reads 2026-01-25 19:38:31 +13:00
22f90b7e07 Serve bundled branding assets by default 2026-01-25 18:20:30 +13:00
57a4883931 Seed branding logo from bundled assets 2026-01-25 18:01:54 +13:00
32 changed files with 2775 additions and 384 deletions

View File

@@ -1 +1 @@
251260452
0202261541

53
Dockerfile Normal file
View File

@@ -0,0 +1,53 @@
FROM node:20-slim AS frontend-builder
WORKDIR /frontend
ENV NODE_ENV=production \
BACKEND_INTERNAL_URL=http://127.0.0.1:8000 \
NEXT_PUBLIC_API_BASE=/api
COPY frontend/package.json ./
RUN npm install
COPY frontend/app ./app
COPY frontend/public ./public
COPY frontend/next-env.d.ts ./next-env.d.ts
COPY frontend/next.config.js ./next.config.js
COPY frontend/tsconfig.json ./tsconfig.json
RUN npm run build
FROM python:3.12-slim
WORKDIR /app
ENV PYTHONDONTWRITEBYTECODE=1 \
PYTHONUNBUFFERED=1 \
NODE_ENV=production
RUN apt-get update \
&& apt-get install -y --no-install-recommends curl gnupg supervisor \
&& curl -fsSL https://deb.nodesource.com/setup_20.x | bash - \
&& apt-get install -y --no-install-recommends nodejs \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
COPY backend/requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
COPY backend/app ./app
COPY data/branding /app/data/branding
COPY --from=frontend-builder /frontend/.next /app/frontend/.next
COPY --from=frontend-builder /frontend/public /app/frontend/public
COPY --from=frontend-builder /frontend/node_modules /app/frontend/node_modules
COPY --from=frontend-builder /frontend/package.json /app/frontend/package.json
COPY --from=frontend-builder /frontend/next.config.js /app/frontend/next.config.js
COPY --from=frontend-builder /frontend/next-env.d.ts /app/frontend/next-env.d.ts
COPY --from=frontend-builder /frontend/tsconfig.json /app/frontend/tsconfig.json
COPY docker/supervisord.conf /etc/supervisor/conf.d/magent.conf
EXPOSE 3000 8000
CMD ["/usr/bin/supervisord", "-c", "/etc/supervisor/conf.d/magent.conf"]

View File

@@ -2,11 +2,8 @@ FROM python:3.12-slim
WORKDIR /app
ARG BUILD_NUMBER=dev
ENV PYTHONDONTWRITEBYTECODE=1 \
PYTHONUNBUFFERED=1 \
SITE_BUILD_NUMBER=${BUILD_NUMBER}
PYTHONUNBUFFERED=1
COPY backend/requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 38 KiB

View File

@@ -47,6 +47,7 @@ def get_current_user(token: str = Depends(oauth2_scheme), request: Request = Non
"username": user["username"],
"role": user["role"],
"auth_provider": user.get("auth_provider", "local"),
"jellyseerr_user_id": user.get("jellyseerr_user_id"),
}

View File

@@ -0,0 +1,2 @@
BUILD_NUMBER = "0202261541"
CHANGELOG = '2026-01-22\\n- Initial commit\\n- Ignore build artifacts\\n- Update README\\n- Update README with Docker-first guide\\n\\n2026-01-23\\n- Fix cache titles via Jellyseerr media lookup\\n- Split search actions and improve download options\\n- Fallback manual grab to qBittorrent\\n- Hide header actions when signed out\\n- Add feedback form and webhook\\n- Fix cache titles and move feedback link\\n- Show available status on landing when in Jellyfin\\n- Add default branding assets when missing\\n- Use bundled branding assets\\n- Remove password fields from users page\\n- Add Docker Hub compose override\\n- Fix backend Dockerfile paths for root context\\n- Copy public assets into frontend image\\n- Use backend branding assets for logo and favicon\\n\\n2026-01-24\\n- Route grabs through Sonarr/Radarr only\\n- Document fix buttons in how-it-works\\n- Clarify how-it-works steps and fixes\\n- Map Prowlarr releases to Arr indexers for manual grab\\n- Improve request handling and qBittorrent categories\\n\\n2026-01-25\\n- Add site banner, build number, and changelog\\n- Automate build number tagging and sync\\n- Improve mobile header layout\\n- Move account actions into avatar menu\\n- Add user stats and activity tracking\\n- Add Jellyfin login cache and admin-only stats\\n- Tidy request sync controls\\n- Seed branding logo from bundled assets\\n- Serve bundled branding assets by default\\n- Harden request cache titles and cache-only reads\\n- Build 2501262041\\n\\n2026-01-26\\n- Fix cache title hydration\\n- Fix sync progress bar animation\\n\\n2026-01-27\\n- Add cache control artwork stats\\n- Improve cache stats performance (build 271261145)\\n- Fix backend cache stats import (build 271261149)\\n- Clarify request sync settings (build 271261159)\\n- Bump build number to 271261202\\n- Fix request titles in snapshots (build 271261219)\\n- Fix snapshot title fallback (build 271261228)\\n- Add cache load spinner (build 271261238)\\n- Bump build number (process 2) 271261322\\n- Add service test buttons (build 271261335)\\n- Fallback to TMDB when artwork cache fails (build 271261524)\\n- Hydrate missing artwork from Jellyseerr (build 271261539)\\n\\n2026-01-29\\n- release: 2901262036\\n- release: 2901262044\\n- release: 2901262102\\n- Hardcode build number in backend\\n- Bake build number and changelog\\n- Update full changelog\\n- Tidy full changelog\\n- Build 2901262240: cache users\n\n2026-01-30\n- Merge backend and frontend into one container'

View File

@@ -35,3 +35,12 @@ class JellyseerrClient(ApiClient):
"page": page,
},
)
async def get_users(self, take: int = 50, skip: int = 0) -> Optional[Dict[str, Any]]:
return await self.get(
"/api/v1/user",
params={
"take": take,
"skip": skip,
},
)

View File

@@ -2,6 +2,7 @@ from typing import Optional
from pydantic import AliasChoices, Field
from pydantic_settings import BaseSettings, SettingsConfigDict
from .build_info import BUILD_NUMBER, CHANGELOG
class Settings(BaseSettings):
model_config = SettingsConfigDict(env_prefix="")
@@ -38,9 +39,7 @@ class Settings(BaseSettings):
artwork_cache_mode: str = Field(
default="remote", validation_alias=AliasChoices("ARTWORK_CACHE_MODE")
)
site_build_number: Optional[str] = Field(
default=None, validation_alias=AliasChoices("SITE_BUILD_NUMBER")
)
site_build_number: Optional[str] = Field(default=BUILD_NUMBER)
site_banner_enabled: bool = Field(
default=False, validation_alias=AliasChoices("SITE_BANNER_ENABLED")
)
@@ -50,9 +49,7 @@ class Settings(BaseSettings):
site_banner_tone: str = Field(
default="info", validation_alias=AliasChoices("SITE_BANNER_TONE")
)
site_changelog: Optional[str] = Field(
default=None, validation_alias=AliasChoices("SITE_CHANGELOG")
)
site_changelog: Optional[str] = Field(default=CHANGELOG)
jellyseerr_base_url: Optional[str] = Field(
default=None, validation_alias=AliasChoices("JELLYSEERR_URL", "JELLYSEERR_BASE_URL")

View File

@@ -24,6 +24,92 @@ def _connect() -> sqlite3.Connection:
return sqlite3.connect(_db_path())
def _normalize_title_value(title: Optional[str]) -> Optional[str]:
if not isinstance(title, str):
return None
trimmed = title.strip()
return trimmed if trimmed else None
def _normalize_year_value(year: Optional[Any]) -> Optional[int]:
if isinstance(year, int):
return year
if isinstance(year, str):
trimmed = year.strip()
if trimmed.isdigit():
return int(trimmed)
return None
def _is_placeholder_title(title: Optional[str], request_id: Optional[int]) -> bool:
if not isinstance(title, str):
return True
normalized = title.strip().lower()
if not normalized:
return True
if normalized == "untitled":
return True
if request_id and normalized == f"request {request_id}":
return True
return False
def _extract_title_year_from_payload(payload_json: Optional[str]) -> tuple[Optional[str], Optional[int]]:
if not payload_json:
return None, None
try:
payload = json.loads(payload_json)
except json.JSONDecodeError:
return None, None
if not isinstance(payload, dict):
return None, None
media = payload.get("media") or {}
title = None
year = None
if isinstance(media, dict):
title = media.get("title") or media.get("name")
year = media.get("year")
if not title:
title = payload.get("title") or payload.get("name")
if year is None:
year = payload.get("year")
return _normalize_title_value(title), _normalize_year_value(year)
def _extract_tmdb_from_payload(payload_json: Optional[str]) -> tuple[Optional[int], Optional[str]]:
if not payload_json:
return None, None
try:
payload = json.loads(payload_json)
except (TypeError, json.JSONDecodeError):
return None, None
if not isinstance(payload, dict):
return None, None
media = payload.get("media") or {}
if not isinstance(media, dict):
media = {}
tmdb_id = (
media.get("tmdbId")
or payload.get("tmdbId")
or payload.get("tmdb_id")
or media.get("externalServiceId")
or payload.get("externalServiceId")
)
media_type = (
media.get("mediaType")
or payload.get("mediaType")
or payload.get("media_type")
or payload.get("type")
)
try:
tmdb_id = int(tmdb_id) if tmdb_id is not None else None
except (TypeError, ValueError):
tmdb_id = None
if isinstance(media_type, str):
media_type = media_type.strip().lower() or None
return tmdb_id, media_type
def init_db() -> None:
with _connect() as conn:
conn.execute(
@@ -59,6 +145,7 @@ def init_db() -> None:
password_hash TEXT NOT NULL,
role TEXT NOT NULL,
auth_provider TEXT NOT NULL DEFAULT 'local',
jellyseerr_user_id INTEGER,
created_at TEXT NOT NULL,
last_login_at TEXT,
is_blocked INTEGER NOT NULL DEFAULT 0,
@@ -87,12 +174,28 @@ def init_db() -> None:
year INTEGER,
requested_by TEXT,
requested_by_norm TEXT,
requested_by_id INTEGER,
created_at TEXT,
updated_at TEXT,
payload_json TEXT NOT NULL
)
"""
)
conn.execute(
"""
CREATE TABLE IF NOT EXISTS artwork_cache_status (
request_id INTEGER PRIMARY KEY,
tmdb_id INTEGER,
media_type TEXT,
poster_path TEXT,
backdrop_path TEXT,
has_tmdb INTEGER NOT NULL DEFAULT 0,
poster_cached INTEGER NOT NULL DEFAULT 0,
backdrop_cached INTEGER NOT NULL DEFAULT 0,
updated_at TEXT NOT NULL
)
"""
)
conn.execute(
"""
CREATE INDEX IF NOT EXISTS idx_requests_cache_created_at
@@ -105,6 +208,12 @@ def init_db() -> None:
ON requests_cache (requested_by_norm)
"""
)
conn.execute(
"""
CREATE INDEX IF NOT EXISTS idx_artwork_cache_status_updated_at
ON artwork_cache_status (updated_at)
"""
)
conn.execute(
"""
CREATE TABLE IF NOT EXISTS user_activity (
@@ -151,6 +260,23 @@ def init_db() -> None:
conn.execute("ALTER TABLE users ADD COLUMN last_jellyfin_auth_at TEXT")
except sqlite3.OperationalError:
pass
try:
conn.execute("ALTER TABLE users ADD COLUMN jellyseerr_user_id INTEGER")
except sqlite3.OperationalError:
pass
try:
conn.execute("ALTER TABLE requests_cache ADD COLUMN requested_by_id INTEGER")
except sqlite3.OperationalError:
pass
try:
conn.execute(
"""
CREATE INDEX IF NOT EXISTS idx_requests_cache_requested_by_id
ON requests_cache (requested_by_id)
"""
)
except sqlite3.OperationalError:
pass
_backfill_auth_providers()
ensure_admin_user()
@@ -254,31 +380,41 @@ def ensure_admin_user() -> None:
create_user(settings.admin_username, settings.admin_password, role="admin")
def create_user(username: str, password: str, role: str = "user", auth_provider: str = "local") -> None:
def create_user(
username: str,
password: str,
role: str = "user",
auth_provider: str = "local",
jellyseerr_user_id: Optional[int] = None,
) -> None:
created_at = datetime.now(timezone.utc).isoformat()
password_hash = hash_password(password)
with _connect() as conn:
conn.execute(
"""
INSERT INTO users (username, password_hash, role, auth_provider, created_at)
VALUES (?, ?, ?, ?, ?)
INSERT INTO users (username, password_hash, role, auth_provider, jellyseerr_user_id, created_at)
VALUES (?, ?, ?, ?, ?, ?)
""",
(username, password_hash, role, auth_provider, created_at),
(username, password_hash, role, auth_provider, jellyseerr_user_id, created_at),
)
def create_user_if_missing(
username: str, password: str, role: str = "user", auth_provider: str = "local"
username: str,
password: str,
role: str = "user",
auth_provider: str = "local",
jellyseerr_user_id: Optional[int] = None,
) -> bool:
created_at = datetime.now(timezone.utc).isoformat()
password_hash = hash_password(password)
with _connect() as conn:
cursor = conn.execute(
"""
INSERT OR IGNORE INTO users (username, password_hash, role, auth_provider, created_at)
VALUES (?, ?, ?, ?, ?)
INSERT OR IGNORE INTO users (username, password_hash, role, auth_provider, jellyseerr_user_id, created_at)
VALUES (?, ?, ?, ?, ?, ?)
""",
(username, password_hash, role, auth_provider, created_at),
(username, password_hash, role, auth_provider, jellyseerr_user_id, created_at),
)
return cursor.rowcount > 0
@@ -287,10 +423,10 @@ def get_user_by_username(username: str) -> Optional[Dict[str, Any]]:
with _connect() as conn:
row = conn.execute(
"""
SELECT id, username, password_hash, role, auth_provider, created_at, last_login_at,
is_blocked, jellyfin_password_hash, last_jellyfin_auth_at
SELECT id, username, password_hash, role, auth_provider, jellyseerr_user_id,
created_at, last_login_at, is_blocked, jellyfin_password_hash, last_jellyfin_auth_at
FROM users
WHERE username = ?
WHERE username = ? COLLATE NOCASE
""",
(username,),
).fetchone()
@@ -302,19 +438,47 @@ def get_user_by_username(username: str) -> Optional[Dict[str, Any]]:
"password_hash": row[2],
"role": row[3],
"auth_provider": row[4],
"created_at": row[5],
"last_login_at": row[6],
"is_blocked": bool(row[7]),
"jellyfin_password_hash": row[8],
"last_jellyfin_auth_at": row[9],
"jellyseerr_user_id": row[5],
"created_at": row[6],
"last_login_at": row[7],
"is_blocked": bool(row[8]),
"jellyfin_password_hash": row[9],
"last_jellyfin_auth_at": row[10],
}
def get_user_by_id(user_id: int) -> Optional[Dict[str, Any]]:
with _connect() as conn:
row = conn.execute(
"""
SELECT id, username, password_hash, role, auth_provider, jellyseerr_user_id,
created_at, last_login_at, is_blocked, jellyfin_password_hash, last_jellyfin_auth_at
FROM users
WHERE id = ?
""",
(user_id,),
).fetchone()
if not row:
return None
return {
"id": row[0],
"username": row[1],
"password_hash": row[2],
"role": row[3],
"auth_provider": row[4],
"jellyseerr_user_id": row[5],
"created_at": row[6],
"last_login_at": row[7],
"is_blocked": bool(row[8]),
"jellyfin_password_hash": row[9],
"last_jellyfin_auth_at": row[10],
}
def get_all_users() -> list[Dict[str, Any]]:
with _connect() as conn:
rows = conn.execute(
"""
SELECT id, username, role, auth_provider, created_at, last_login_at, is_blocked
SELECT id, username, role, auth_provider, jellyseerr_user_id, created_at, last_login_at, is_blocked
FROM users
ORDER BY username COLLATE NOCASE
"""
@@ -327,14 +491,35 @@ def get_all_users() -> list[Dict[str, Any]]:
"username": row[1],
"role": row[2],
"auth_provider": row[3],
"created_at": row[4],
"last_login_at": row[5],
"is_blocked": bool(row[6]),
"jellyseerr_user_id": row[4],
"created_at": row[5],
"last_login_at": row[6],
"is_blocked": bool(row[7]),
}
)
return results
def delete_non_admin_users() -> int:
with _connect() as conn:
cursor = conn.execute(
"""
DELETE FROM users WHERE role != 'admin'
"""
)
return cursor.rowcount
def set_user_jellyseerr_id(username: str, jellyseerr_user_id: Optional[int]) -> None:
with _connect() as conn:
conn.execute(
"""
UPDATE users SET jellyseerr_user_id = ? WHERE username = ?
""",
(jellyseerr_user_id, username),
)
def set_last_login(username: str) -> None:
timestamp = datetime.now(timezone.utc).isoformat()
with _connect() as conn:
@@ -396,7 +581,7 @@ def set_jellyfin_auth_cache(username: str, password: str) -> None:
"""
UPDATE users
SET jellyfin_password_hash = ?, last_jellyfin_auth_at = ?
WHERE username = ?
WHERE username = ? COLLATE NOCASE
""",
(password_hash, timestamp, username),
)
@@ -507,8 +692,8 @@ def get_user_activity_summary(username: str) -> Dict[str, Any]:
}
def get_user_request_stats(username_norm: str) -> Dict[str, Any]:
if not username_norm:
def get_user_request_stats(username_norm: str, requested_by_id: Optional[int] = None) -> Dict[str, Any]:
if requested_by_id is None:
return {
"total": 0,
"ready": 0,
@@ -525,26 +710,26 @@ def get_user_request_stats(username_norm: str) -> Dict[str, Any]:
"""
SELECT COUNT(*)
FROM requests_cache
WHERE requested_by_norm = ?
WHERE requested_by_id = ?
""",
(username_norm,),
(requested_by_id,),
).fetchone()
status_rows = conn.execute(
"""
SELECT status, COUNT(*)
FROM requests_cache
WHERE requested_by_norm = ?
WHERE requested_by_id = ?
GROUP BY status
""",
(username_norm,),
(requested_by_id,),
).fetchall()
last_row = conn.execute(
"""
SELECT MAX(created_at)
FROM requests_cache
WHERE requested_by_norm = ?
WHERE requested_by_id = ?
""",
(username_norm,),
(requested_by_id,),
).fetchone()
counts = {int(row[0]): int(row[1]) for row in status_rows if row[0] is not None}
pending = counts.get(1, 0)
@@ -599,11 +784,39 @@ def upsert_request_cache(
year: Optional[int],
requested_by: Optional[str],
requested_by_norm: Optional[str],
requested_by_id: Optional[int],
created_at: Optional[str],
updated_at: Optional[str],
payload_json: str,
) -> None:
normalized_title = _normalize_title_value(title)
normalized_year = _normalize_year_value(year)
derived_title = None
derived_year = None
if not normalized_title or normalized_year is None:
derived_title, derived_year = _extract_title_year_from_payload(payload_json)
if _is_placeholder_title(normalized_title, request_id):
normalized_title = None
if derived_title and not normalized_title:
normalized_title = derived_title
if normalized_year is None and derived_year is not None:
normalized_year = derived_year
with _connect() as conn:
existing_title = None
existing_year = None
if normalized_title is None or normalized_year is None:
row = conn.execute(
"SELECT title, year FROM requests_cache WHERE request_id = ?",
(request_id,),
).fetchone()
if row:
existing_title, existing_year = row[0], row[1]
if _is_placeholder_title(existing_title, request_id):
existing_title = None
if normalized_title is None and existing_title:
normalized_title = existing_title
if normalized_year is None and existing_year is not None:
normalized_year = existing_year
conn.execute(
"""
INSERT INTO requests_cache (
@@ -615,11 +828,12 @@ def upsert_request_cache(
year,
requested_by,
requested_by_norm,
requested_by_id,
created_at,
updated_at,
payload_json
)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(request_id) DO UPDATE SET
media_id = excluded.media_id,
media_type = excluded.media_type,
@@ -628,6 +842,7 @@ def upsert_request_cache(
year = excluded.year,
requested_by = excluded.requested_by,
requested_by_norm = excluded.requested_by_norm,
requested_by_id = excluded.requested_by_id,
created_at = excluded.created_at,
updated_at = excluded.updated_at,
payload_json = excluded.payload_json
@@ -637,10 +852,11 @@ def upsert_request_cache(
media_id,
media_type,
status,
title,
year,
normalized_title,
normalized_year,
requested_by,
requested_by_norm,
requested_by_id,
created_at,
updated_at,
payload_json,
@@ -710,15 +926,20 @@ def get_cached_requests(
limit: int,
offset: int,
requested_by_norm: Optional[str] = None,
requested_by_id: Optional[int] = None,
since_iso: Optional[str] = None,
) -> list[Dict[str, Any]]:
query = """
SELECT request_id, media_id, media_type, status, title, year, requested_by, created_at, payload_json
SELECT request_id, media_id, media_type, status, title, year, requested_by,
requested_by_norm, requested_by_id, created_at, payload_json
FROM requests_cache
"""
params: list[Any] = []
conditions = []
if requested_by_norm:
if requested_by_id is not None:
conditions.append("requested_by_id = ?")
params.append(requested_by_id)
elif requested_by_norm:
conditions.append("requested_by_norm = ?")
params.append(requested_by_norm)
if since_iso:
@@ -731,32 +952,23 @@ def get_cached_requests(
with _connect() as conn:
rows = conn.execute(query, tuple(params)).fetchall()
logger.debug(
"requests_cache list: count=%s requested_by_norm=%s since_iso=%s",
"requests_cache list: count=%s requested_by_norm=%s requested_by_id=%s since_iso=%s",
len(rows),
requested_by_norm,
requested_by_id,
since_iso,
)
results: list[Dict[str, Any]] = []
for row in rows:
title = row[4]
year = row[5]
if (not title or not year) and row[8]:
try:
payload = json.loads(row[8])
if isinstance(payload, dict):
media = payload.get("media") or {}
payload_json = row[10]
if (not title or not year) and payload_json:
derived_title, derived_year = _extract_title_year_from_payload(payload_json)
if not title:
title = (
(media.get("title") if isinstance(media, dict) else None)
or (media.get("name") if isinstance(media, dict) else None)
or payload.get("title")
or payload.get("name")
)
title = derived_title
if not year:
year = media.get("year") if isinstance(media, dict) else None
year = year or payload.get("year")
except json.JSONDecodeError:
pass
year = derived_year
results.append(
{
"request_id": row[0],
@@ -766,18 +978,47 @@ def get_cached_requests(
"title": title,
"year": year,
"requested_by": row[6],
"created_at": row[7],
"requested_by_norm": row[7],
"requested_by_id": row[8],
"created_at": row[9],
}
)
return results
def get_cached_requests_count(
requested_by_norm: Optional[str] = None,
requested_by_id: Optional[int] = None,
since_iso: Optional[str] = None,
) -> int:
query = "SELECT COUNT(*) FROM requests_cache"
params: list[Any] = []
conditions = []
if requested_by_id is not None:
conditions.append("requested_by_id = ?")
params.append(requested_by_id)
elif requested_by_norm:
conditions.append("requested_by_norm = ?")
params.append(requested_by_norm)
if since_iso:
conditions.append("created_at >= ?")
params.append(since_iso)
if conditions:
query += " WHERE " + " AND ".join(conditions)
with _connect() as conn:
row = conn.execute(query, tuple(params)).fetchone()
if not row:
return 0
return int(row[0])
def get_request_cache_overview(limit: int = 50) -> list[Dict[str, Any]]:
limit = max(1, min(limit, 200))
with _connect() as conn:
rows = conn.execute(
"""
SELECT request_id, media_id, media_type, status, title, year, requested_by, created_at, updated_at, payload_json
SELECT request_id, media_id, media_type, status, title, year, requested_by,
requested_by_norm, requested_by_id, created_at, updated_at, payload_json
FROM requests_cache
ORDER BY updated_at DESC, request_id DESC
LIMIT ?
@@ -787,19 +1028,9 @@ def get_request_cache_overview(limit: int = 50) -> list[Dict[str, Any]]:
results: list[Dict[str, Any]] = []
for row in rows:
title = row[4]
if not title and row[9]:
try:
payload = json.loads(row[9])
if isinstance(payload, dict):
media = payload.get("media") or {}
title = (
(media.get("title") if isinstance(media, dict) else None)
or (media.get("name") if isinstance(media, dict) else None)
or payload.get("title")
or payload.get("name")
)
except json.JSONDecodeError:
title = row[4]
if not title and row[11]:
derived_title, _ = _extract_title_year_from_payload(row[11])
title = derived_title or row[4]
results.append(
{
"request_id": row[0],
@@ -809,8 +1040,38 @@ def get_request_cache_overview(limit: int = 50) -> list[Dict[str, Any]]:
"title": title,
"year": row[5],
"requested_by": row[6],
"created_at": row[7],
"updated_at": row[8],
"requested_by_norm": row[7],
"requested_by_id": row[8],
"created_at": row[9],
"updated_at": row[10],
}
)
return results
def get_request_cache_missing_titles(limit: int = 200) -> list[Dict[str, Any]]:
limit = max(1, min(limit, 500))
with _connect() as conn:
rows = conn.execute(
"""
SELECT request_id, payload_json
FROM requests_cache
WHERE title IS NULL OR TRIM(title) = '' OR LOWER(title) = 'untitled'
ORDER BY updated_at DESC, request_id DESC
LIMIT ?
""",
(limit,),
).fetchall()
results: list[Dict[str, Any]] = []
for row in rows:
payload_json = row[1]
tmdb_id, media_type = _extract_tmdb_from_payload(payload_json)
results.append(
{
"request_id": row[0],
"payload_json": payload_json,
"tmdb_id": tmdb_id,
"media_type": media_type,
}
)
return results
@@ -822,10 +1083,126 @@ def get_request_cache_count() -> int:
return int(row[0] or 0)
def upsert_artwork_cache_status(
request_id: int,
tmdb_id: Optional[int],
media_type: Optional[str],
poster_path: Optional[str],
backdrop_path: Optional[str],
has_tmdb: bool,
poster_cached: bool,
backdrop_cached: bool,
) -> None:
updated_at = datetime.now(timezone.utc).isoformat()
with _connect() as conn:
conn.execute(
"""
INSERT INTO artwork_cache_status (
request_id,
tmdb_id,
media_type,
poster_path,
backdrop_path,
has_tmdb,
poster_cached,
backdrop_cached,
updated_at
)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(request_id) DO UPDATE SET
tmdb_id = excluded.tmdb_id,
media_type = excluded.media_type,
poster_path = excluded.poster_path,
backdrop_path = excluded.backdrop_path,
has_tmdb = excluded.has_tmdb,
poster_cached = excluded.poster_cached,
backdrop_cached = excluded.backdrop_cached,
updated_at = excluded.updated_at
""",
(
request_id,
tmdb_id,
media_type,
poster_path,
backdrop_path,
1 if has_tmdb else 0,
1 if poster_cached else 0,
1 if backdrop_cached else 0,
updated_at,
),
)
def get_artwork_cache_status_count() -> int:
with _connect() as conn:
row = conn.execute("SELECT COUNT(*) FROM artwork_cache_status").fetchone()
return int(row[0] or 0)
def get_artwork_cache_missing_count() -> int:
with _connect() as conn:
row = conn.execute(
"""
SELECT COUNT(*)
FROM artwork_cache_status
WHERE (
(poster_path IS NULL AND has_tmdb = 1)
OR (poster_path IS NOT NULL AND poster_cached = 0)
OR (backdrop_path IS NULL AND has_tmdb = 1)
OR (backdrop_path IS NOT NULL AND backdrop_cached = 0)
)
"""
).fetchone()
return int(row[0] or 0)
def update_artwork_cache_stats(
cache_bytes: Optional[int] = None,
cache_files: Optional[int] = None,
missing_count: Optional[int] = None,
total_requests: Optional[int] = None,
) -> None:
updated_at = datetime.now(timezone.utc).isoformat()
if cache_bytes is not None:
set_setting("artwork_cache_bytes", str(int(cache_bytes)))
if cache_files is not None:
set_setting("artwork_cache_files", str(int(cache_files)))
if missing_count is not None:
set_setting("artwork_cache_missing", str(int(missing_count)))
if total_requests is not None:
set_setting("artwork_cache_total_requests", str(int(total_requests)))
set_setting("artwork_cache_updated_at", updated_at)
def get_artwork_cache_stats() -> Dict[str, Any]:
def _get_int(key: str) -> int:
value = get_setting(key)
if value is None:
return 0
try:
return int(value)
except (TypeError, ValueError):
return 0
return {
"cache_bytes": _get_int("artwork_cache_bytes"),
"cache_files": _get_int("artwork_cache_files"),
"missing_artwork": _get_int("artwork_cache_missing"),
"total_requests": _get_int("artwork_cache_total_requests"),
"updated_at": get_setting("artwork_cache_updated_at"),
}
def get_request_cache_stats() -> Dict[str, Any]:
return get_artwork_cache_stats()
def update_request_cache_title(
request_id: int, title: str, year: Optional[int] = None
) -> None:
if not title:
normalized_title = _normalize_title_value(title)
normalized_year = _normalize_year_value(year)
if not normalized_title:
return
with _connect() as conn:
conn.execute(
@@ -834,10 +1211,38 @@ def update_request_cache_title(
SET title = ?, year = COALESCE(?, year)
WHERE request_id = ?
""",
(title, year, request_id),
(normalized_title, normalized_year, request_id),
)
def repair_request_cache_titles() -> int:
updated = 0
with _connect() as conn:
rows = conn.execute(
"""
SELECT request_id, title, year, payload_json
FROM requests_cache
"""
).fetchall()
for row in rows:
request_id, title, year, payload_json = row
if not _is_placeholder_title(title, request_id):
continue
derived_title, derived_year = _extract_title_year_from_payload(payload_json)
if not derived_title:
continue
conn.execute(
"""
UPDATE requests_cache
SET title = ?, year = COALESCE(?, year)
WHERE request_id = ?
""",
(derived_title, derived_year, request_id),
)
updated += 1
return updated
def prune_duplicate_requests_cache() -> int:
with _connect() as conn:
cursor = conn.execute(
@@ -880,11 +1285,45 @@ def get_request_cache_payloads(limit: int = 200, offset: int = 0) -> list[Dict[s
return results
def get_request_cache_payloads_missing(limit: int = 200, offset: int = 0) -> list[Dict[str, Any]]:
limit = max(1, min(limit, 1000))
offset = max(0, offset)
with _connect() as conn:
rows = conn.execute(
"""
SELECT rc.request_id, rc.payload_json
FROM requests_cache rc
JOIN artwork_cache_status acs
ON rc.request_id = acs.request_id
WHERE (
(acs.poster_path IS NULL AND acs.has_tmdb = 1)
OR (acs.poster_path IS NOT NULL AND acs.poster_cached = 0)
OR (acs.backdrop_path IS NULL AND acs.has_tmdb = 1)
OR (acs.backdrop_path IS NOT NULL AND acs.backdrop_cached = 0)
)
ORDER BY rc.request_id ASC
LIMIT ? OFFSET ?
""",
(limit, offset),
).fetchall()
results: list[Dict[str, Any]] = []
for row in rows:
payload = None
if row[1]:
try:
payload = json.loads(row[1])
except json.JSONDecodeError:
payload = None
results.append({"request_id": row[0], "payload": payload})
return results
def get_cached_requests_since(since_iso: str) -> list[Dict[str, Any]]:
with _connect() as conn:
rows = conn.execute(
"""
SELECT request_id, media_id, media_type, status, title, year, requested_by, requested_by_norm, created_at
SELECT request_id, media_id, media_type, status, title, year, requested_by,
requested_by_norm, requested_by_id, created_at
FROM requests_cache
WHERE created_at >= ?
ORDER BY created_at DESC, request_id DESC
@@ -903,14 +1342,17 @@ def get_cached_requests_since(since_iso: str) -> list[Dict[str, Any]]:
"year": row[5],
"requested_by": row[6],
"requested_by_norm": row[7],
"created_at": row[8],
"requested_by_id": row[8],
"created_at": row[9],
}
)
return results
def get_cached_request_by_media_id(
media_id: int, requested_by_norm: Optional[str] = None
media_id: int,
requested_by_norm: Optional[str] = None,
requested_by_id: Optional[int] = None,
) -> Optional[Dict[str, Any]]:
query = """
SELECT request_id, status
@@ -918,7 +1360,10 @@ def get_cached_request_by_media_id(
WHERE media_id = ?
"""
params: list[Any] = [media_id]
if requested_by_norm:
if requested_by_id is not None:
query += " AND requested_by_id = ?"
params.append(requested_by_id)
elif requested_by_norm:
query += " AND requested_by_norm = ?"
params.append(requested_by_norm)
query += " ORDER BY created_at DESC, request_id DESC LIMIT 1"

View File

@@ -4,7 +4,7 @@ from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from .config import settings
from .db import init_db, set_setting
from .db import init_db
from .routers.requests import (
router as requests_router,
startup_warmup_requests_cache,
@@ -41,8 +41,6 @@ async def health() -> dict:
@app.on_event("startup")
async def startup() -> None:
init_db()
if settings.site_build_number and settings.site_build_number.strip():
set_setting("site_build_number", settings.site_build_number.strip())
runtime = get_runtime_settings()
configure_logging(runtime.log_level, runtime.log_file)
asyncio.create_task(run_daily_jellyfin_sync())

View File

@@ -1,16 +1,27 @@
from typing import Any, Dict, List
from typing import Any, Dict, List, Optional
from datetime import datetime, timedelta, timezone
import ipaddress
import os
from urllib.parse import urlparse, urlunparse
from fastapi import APIRouter, HTTPException, Depends, UploadFile, File
from ..auth import require_admin
from ..auth import require_admin, get_current_user
from ..config import settings as env_settings
from ..db import (
delete_setting,
get_all_users,
get_cached_requests,
get_cached_requests_count,
get_request_cache_overview,
get_request_cache_missing_titles,
get_request_cache_stats,
get_settings_overrides,
get_user_by_id,
get_user_by_username,
get_user_request_stats,
create_user_if_missing,
set_user_jellyseerr_id,
set_setting,
set_user_blocked,
set_user_password,
@@ -21,6 +32,8 @@ from ..db import (
clear_history,
cleanup_history,
update_request_cache_title,
repair_request_cache_titles,
delete_non_admin_users,
)
from ..runtime import get_runtime_settings
from ..clients.sonarr import SonarrClient
@@ -28,6 +41,14 @@ from ..clients.radarr import RadarrClient
from ..clients.jellyfin import JellyfinClient
from ..clients.jellyseerr import JellyseerrClient
from ..services.jellyfin_sync import sync_jellyfin_users
from ..services.user_cache import (
build_jellyseerr_candidate_map,
get_cached_jellyfin_users,
get_cached_jellyseerr_users,
match_jellyseerr_user_id,
save_jellyfin_users_cache,
save_jellyseerr_users_cache,
)
import logging
from ..logging_config import configure_logging
from ..routers import requests as requests_router
@@ -45,6 +66,16 @@ SENSITIVE_KEYS = {
"qbittorrent_password",
}
URL_SETTING_KEYS = {
"jellyseerr_base_url",
"jellyfin_base_url",
"jellyfin_public_url",
"sonarr_base_url",
"radarr_base_url",
"prowlarr_base_url",
"qbittorrent_base_url",
}
SETTING_KEYS: List[str] = [
"jellyseerr_base_url",
"jellyseerr_api_key",
@@ -77,13 +108,60 @@ SETTING_KEYS: List[str] = [
"requests_cleanup_time",
"requests_cleanup_days",
"requests_data_source",
"site_build_number",
"site_banner_enabled",
"site_banner_message",
"site_banner_tone",
"site_changelog",
]
def _normalize_username(value: str) -> str:
normalized = value.strip().lower()
if "@" in normalized:
normalized = normalized.split("@", 1)[0]
return normalized
def _is_ip_host(host: str) -> bool:
try:
ipaddress.ip_address(host)
return True
except ValueError:
return False
def _normalize_service_url(value: str) -> str:
raw = value.strip()
if not raw:
raise ValueError("URL cannot be empty.")
candidate = raw
if "://" not in candidate:
authority = candidate.split("/", 1)[0].strip()
if authority.startswith("["):
closing = authority.find("]")
host = authority[1:closing] if closing > 0 else authority.strip("[]")
else:
host = authority.split(":", 1)[0]
host = host.strip().lower()
default_scheme = "http" if host in {"localhost"} or _is_ip_host(host) or "." not in host else "https"
candidate = f"{default_scheme}://{candidate}"
parsed = urlparse(candidate)
if parsed.scheme not in {"http", "https"}:
raise ValueError("URL must use http:// or https://.")
if not parsed.netloc:
raise ValueError("URL must include a host.")
if parsed.query or parsed.fragment:
raise ValueError("URL must not include query params or fragments.")
if not parsed.hostname:
raise ValueError("URL must include a valid host.")
normalized_path = parsed.path.rstrip("/")
normalized = parsed._replace(path=normalized_path, params="", query="", fragment="")
result = urlunparse(normalized).rstrip("/")
if not result:
raise ValueError("URL is invalid.")
return result
def _normalize_root_folders(folders: Any) -> List[Dict[str, Any]]:
if not isinstance(folders, list):
return []
@@ -99,6 +177,38 @@ def _normalize_root_folders(folders: Any) -> List[Dict[str, Any]]:
return results
async def _hydrate_cache_titles_from_jellyseerr(limit: int) -> int:
runtime = get_runtime_settings()
client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key)
if not client.configured():
return 0
missing = get_request_cache_missing_titles(limit)
if not missing:
return 0
hydrated = 0
for row in missing:
tmdb_id = row.get("tmdb_id")
media_type = row.get("media_type")
request_id = row.get("request_id")
if not tmdb_id or not media_type or not request_id:
continue
try:
title, year = await requests_router._hydrate_title_from_tmdb(
client, media_type, tmdb_id
)
except Exception:
logger.warning(
"Requests cache title hydrate failed: request_id=%s tmdb_id=%s",
request_id,
tmdb_id,
)
continue
if title:
update_request_cache_title(request_id, title, year)
hydrated += 1
return hydrated
def _normalize_quality_profiles(profiles: Any) -> List[Dict[str, Any]]:
if not isinstance(profiles, list):
return []
@@ -148,7 +258,14 @@ async def update_settings(payload: Dict[str, Any]) -> Dict[str, Any]:
delete_setting(key)
updates += 1
continue
set_setting(key, str(value))
value_to_store = str(value).strip() if isinstance(value, str) else str(value)
if key in URL_SETTING_KEYS and value_to_store:
try:
value_to_store = _normalize_service_url(value_to_store)
except ValueError as exc:
friendly_key = key.replace("_", " ")
raise HTTPException(status_code=400, detail=f"{friendly_key}: {exc}") from exc
set_setting(key, value_to_store)
updates += 1
if key in {"log_level", "log_file"}:
touched_logging = True
@@ -188,6 +305,9 @@ async def radarr_options() -> Dict[str, Any]:
@router.get("/jellyfin/users")
async def jellyfin_users() -> Dict[str, Any]:
cached = get_cached_jellyfin_users()
if cached is not None:
return {"users": cached}
runtime = get_runtime_settings()
client = JellyfinClient(runtime.jellyfin_base_url, runtime.jellyfin_api_key)
if not client.configured():
@@ -195,18 +315,7 @@ async def jellyfin_users() -> Dict[str, Any]:
users = await client.get_users()
if not isinstance(users, list):
return {"users": []}
results = []
for user in users:
if not isinstance(user, dict):
continue
results.append(
{
"id": user.get("Id"),
"name": user.get("Name"),
"hasPassword": user.get("HasPassword"),
"lastLoginDate": user.get("LastLoginDate"),
}
)
results = save_jellyfin_users_cache(users)
return {"users": results}
@@ -215,6 +324,106 @@ async def jellyfin_users_sync() -> Dict[str, Any]:
imported = await sync_jellyfin_users()
return {"status": "ok", "imported": imported}
async def _fetch_all_jellyseerr_users(
client: JellyseerrClient, use_cache: bool = True
) -> List[Dict[str, Any]]:
if use_cache:
cached = get_cached_jellyseerr_users()
if cached is not None:
return cached
users: List[Dict[str, Any]] = []
take = 100
skip = 0
while True:
payload = await client.get_users(take=take, skip=skip)
if not payload:
break
if isinstance(payload, list):
batch = payload
elif isinstance(payload, dict):
batch = payload.get("results") or payload.get("users") or payload.get("data") or payload.get("items")
else:
batch = None
if not isinstance(batch, list) or not batch:
break
users.extend([user for user in batch if isinstance(user, dict)])
if len(batch) < take:
break
skip += take
if users:
return save_jellyseerr_users_cache(users)
return users
@router.post("/jellyseerr/users/sync")
async def jellyseerr_users_sync() -> Dict[str, Any]:
runtime = get_runtime_settings()
client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key)
if not client.configured():
raise HTTPException(status_code=400, detail="Jellyseerr not configured")
jellyseerr_users = await _fetch_all_jellyseerr_users(client, use_cache=False)
if not jellyseerr_users:
return {"status": "ok", "matched": 0, "skipped": 0, "total": 0}
candidate_to_id = build_jellyseerr_candidate_map(jellyseerr_users)
updated = 0
skipped = 0
users = get_all_users()
for user in users:
if user.get("jellyseerr_user_id") is not None:
skipped += 1
continue
username = user.get("username") or ""
matched_id = match_jellyseerr_user_id(username, candidate_to_id)
if matched_id is not None:
set_user_jellyseerr_id(username, matched_id)
updated += 1
else:
skipped += 1
return {"status": "ok", "matched": updated, "skipped": skipped, "total": len(users)}
def _pick_jellyseerr_username(user: Dict[str, Any]) -> Optional[str]:
for key in ("email", "username", "displayName", "name"):
value = user.get(key)
if isinstance(value, str) and value.strip():
return value.strip()
return None
@router.post("/jellyseerr/users/resync")
async def jellyseerr_users_resync() -> Dict[str, Any]:
runtime = get_runtime_settings()
client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key)
if not client.configured():
raise HTTPException(status_code=400, detail="Jellyseerr not configured")
jellyseerr_users = await _fetch_all_jellyseerr_users(client, use_cache=False)
if not jellyseerr_users:
return {"status": "ok", "imported": 0, "cleared": 0}
cleared = delete_non_admin_users()
imported = 0
for user in jellyseerr_users:
user_id = user.get("id") or user.get("userId") or user.get("Id")
try:
user_id = int(user_id)
except (TypeError, ValueError):
continue
username = _pick_jellyseerr_username(user)
if not username:
continue
created = create_user_if_missing(
username,
"jellyseerr-user",
role="user",
auth_provider="jellyseerr",
jellyseerr_user_id=user_id,
)
if created:
imported += 1
else:
set_user_jellyseerr_id(username, user_id)
return {"status": "ok", "imported": imported, "cleared": cleared}
@router.post("/requests/sync")
async def requests_sync() -> Dict[str, Any]:
@@ -243,10 +452,12 @@ async def requests_sync_delta() -> Dict[str, Any]:
@router.post("/requests/artwork/prefetch")
async def requests_artwork_prefetch() -> Dict[str, Any]:
async def requests_artwork_prefetch(only_missing: bool = False) -> Dict[str, Any]:
runtime = get_runtime_settings()
state = await requests_router.start_artwork_prefetch(
runtime.jellyseerr_base_url, runtime.jellyseerr_api_key
runtime.jellyseerr_base_url,
runtime.jellyseerr_api_key,
only_missing=only_missing,
)
logger.info("Admin triggered artwork prefetch: status=%s", state.get("status"))
return {"status": "ok", "prefetch": state}
@@ -256,6 +467,25 @@ async def requests_artwork_prefetch() -> Dict[str, Any]:
async def requests_artwork_status() -> Dict[str, Any]:
return {"status": "ok", "prefetch": requests_router.get_artwork_prefetch_state()}
@router.get("/requests/artwork/summary")
async def requests_artwork_summary() -> Dict[str, Any]:
runtime = get_runtime_settings()
cache_mode = (runtime.artwork_cache_mode or "remote").lower()
stats = get_request_cache_stats()
if cache_mode != "cache":
stats["cache_bytes"] = 0
stats["cache_files"] = 0
stats["missing_artwork"] = 0
summary = {
"cache_mode": cache_mode,
"cache_bytes": stats.get("cache_bytes", 0),
"cache_files": stats.get("cache_files", 0),
"missing_artwork": stats.get("missing_artwork", 0),
"total_requests": stats.get("total_requests", 0),
"updated_at": stats.get("updated_at"),
}
return {"status": "ok", "summary": summary}
@router.get("/requests/sync/status")
async def requests_sync_status() -> Dict[str, Any]:
@@ -282,30 +512,50 @@ async def read_logs(lines: int = 200) -> Dict[str, Any]:
@router.get("/requests/cache")
async def requests_cache(limit: int = 50) -> Dict[str, Any]:
repaired = repair_request_cache_titles()
if repaired:
logger.info("Requests cache titles repaired via settings view: %s", repaired)
hydrated = await _hydrate_cache_titles_from_jellyseerr(limit)
if hydrated:
logger.info("Requests cache titles hydrated via Jellyseerr: %s", hydrated)
rows = get_request_cache_overview(limit)
missing_titles = [row for row in rows if not row.get("title")]
if missing_titles:
runtime = get_runtime_settings()
client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key)
if client.configured():
for row in missing_titles:
request_id = row.get("request_id")
if not isinstance(request_id, int):
continue
details = await requests_router._get_request_details(client, request_id)
if not isinstance(details, dict):
continue
payload = requests_router._parse_request_payload(details)
title = payload.get("title")
if not title:
continue
row["title"] = title
if payload.get("year"):
row["year"] = payload.get("year")
update_request_cache_title(request_id, title, payload.get("year"))
return {"rows": rows}
@router.get("/requests/all")
async def requests_all(
take: int = 50,
skip: int = 0,
days: Optional[int] = None,
user: Dict[str, str] = Depends(get_current_user),
) -> Dict[str, Any]:
if user.get("role") != "admin":
raise HTTPException(status_code=403, detail="Forbidden")
take = max(1, min(int(take or 50), 200))
skip = max(0, int(skip or 0))
since_iso = None
if days is not None and int(days) > 0:
since_iso = (datetime.now(timezone.utc) - timedelta(days=int(days))).isoformat()
rows = get_cached_requests(limit=take, offset=skip, since_iso=since_iso)
total = get_cached_requests_count(since_iso=since_iso)
results = []
for row in rows:
status = row.get("status")
results.append(
{
"id": row.get("request_id"),
"title": row.get("title"),
"year": row.get("year"),
"type": row.get("media_type"),
"status": status,
"statusLabel": requests_router._status_label(status),
"requestedBy": row.get("requested_by"),
"createdAt": row.get("created_at"),
}
)
return {"results": results, "total": total, "take": take, "skip": skip}
@router.post("/branding/logo")
async def upload_branding_logo(file: UploadFile = File(...)) -> Dict[str, Any]:
return await save_branding_image(file)
@@ -355,9 +605,39 @@ async def clear_logs() -> Dict[str, Any]:
@router.get("/users")
async def list_users() -> Dict[str, Any]:
users = get_all_users()
users = [user for user in get_all_users() if user.get("role") == "admin" or user.get("auth_provider") == "jellyseerr"]
return {"users": users}
@router.get("/users/summary")
async def list_users_summary() -> Dict[str, Any]:
users = [user for user in get_all_users() if user.get("role") == "admin" or user.get("auth_provider") == "jellyseerr"]
results: list[Dict[str, Any]] = []
for user in users:
username = user.get("username") or ""
username_norm = _normalize_username(username) if username else ""
stats = get_user_request_stats(username_norm, user.get("jellyseerr_user_id"))
results.append({**user, "stats": stats})
return {"users": results}
@router.get("/users/{username}")
async def get_user_summary(username: str) -> Dict[str, Any]:
user = get_user_by_username(username)
if not user:
raise HTTPException(status_code=404, detail="User not found")
username_norm = _normalize_username(user.get("username") or "")
stats = get_user_request_stats(username_norm, user.get("jellyseerr_user_id"))
return {"user": user, "stats": stats}
@router.get("/users/id/{user_id}")
async def get_user_summary_by_id(user_id: int) -> Dict[str, Any]:
user = get_user_by_id(user_id)
if not user:
raise HTTPException(status_code=404, detail="User not found")
username_norm = _normalize_username(user.get("username") or "")
stats = get_user_request_stats(username_norm, user.get("jellyseerr_user_id"))
return {"user": user, "stats": stats}
@router.post("/users/{username}/block")
async def block_user(username: str) -> Dict[str, Any]:

View File

@@ -10,6 +10,7 @@ from ..db import (
get_user_by_username,
set_user_password,
set_jellyfin_auth_cache,
set_user_jellyseerr_id,
get_user_activity,
get_user_activity_summary,
get_user_request_stats,
@@ -21,12 +22,21 @@ from ..clients.jellyfin import JellyfinClient
from ..clients.jellyseerr import JellyseerrClient
from ..security import create_access_token, verify_password
from ..auth import get_current_user
from ..services.user_cache import (
build_jellyseerr_candidate_map,
get_cached_jellyseerr_users,
match_jellyseerr_user_id,
save_jellyfin_users_cache,
)
router = APIRouter(prefix="/auth", tags=["auth"])
def _normalize_username(value: str) -> str:
return value.strip().lower()
normalized = value.strip().lower()
if "@" in normalized:
normalized = normalized.split("@", 1)[0]
return normalized
def _is_recent_jellyfin_auth(last_auth_at: str) -> bool:
@@ -53,6 +63,22 @@ def _has_valid_jellyfin_cache(user: dict, password: str) -> bool:
return False
return _is_recent_jellyfin_auth(last_auth_at)
def _extract_jellyseerr_user_id(response: dict) -> int | None:
if not isinstance(response, dict):
return None
candidate = response
if isinstance(response.get("user"), dict):
candidate = response.get("user")
for key in ("id", "userId", "Id"):
value = candidate.get(key) if isinstance(candidate, dict) else None
if value is None:
continue
try:
return int(value)
except (TypeError, ValueError):
continue
return None
@router.post("/login")
async def login(form_data: OAuth2PasswordRequestForm = Depends()) -> dict:
@@ -76,6 +102,8 @@ async def jellyfin_login(form_data: OAuth2PasswordRequestForm = Depends()) -> di
client = JellyfinClient(runtime.jellyfin_base_url, runtime.jellyfin_api_key)
if not client.configured():
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Jellyfin not configured")
jellyseerr_users = get_cached_jellyseerr_users()
candidate_map = build_jellyseerr_candidate_map(jellyseerr_users or [])
username = form_data.username
password = form_data.password
user = get_user_by_username(username)
@@ -98,15 +126,20 @@ async def jellyfin_login(form_data: OAuth2PasswordRequestForm = Depends()) -> di
try:
users = await client.get_users()
if isinstance(users, list):
for user in users:
if not isinstance(user, dict):
save_jellyfin_users_cache(users)
for jellyfin_user in users:
if not isinstance(jellyfin_user, dict):
continue
name = user.get("Name")
name = jellyfin_user.get("Name")
if isinstance(name, str) and name:
create_user_if_missing(name, "jellyfin-user", role="user", auth_provider="jellyfin")
except Exception:
pass
set_jellyfin_auth_cache(username, password)
if user and user.get("jellyseerr_user_id") is None and candidate_map:
matched_id = match_jellyseerr_user_id(username, candidate_map)
if matched_id is not None:
set_user_jellyseerr_id(username, matched_id)
token = create_access_token(username, "user")
set_last_login(username)
return {"access_token": token, "token_type": "bearer", "user": {"username": username, "role": "user"}}
@@ -125,10 +158,19 @@ async def jellyseerr_login(form_data: OAuth2PasswordRequestForm = Depends()) ->
raise HTTPException(status_code=status.HTTP_502_BAD_GATEWAY, detail=str(exc)) from exc
if not isinstance(response, dict):
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid Jellyseerr credentials")
create_user_if_missing(form_data.username, "jellyseerr-user", role="user", auth_provider="jellyseerr")
jellyseerr_user_id = _extract_jellyseerr_user_id(response)
create_user_if_missing(
form_data.username,
"jellyseerr-user",
role="user",
auth_provider="jellyseerr",
jellyseerr_user_id=jellyseerr_user_id,
)
user = get_user_by_username(form_data.username)
if user and user.get("is_blocked"):
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="User is blocked")
if jellyseerr_user_id is not None:
set_user_jellyseerr_id(form_data.username, jellyseerr_user_id)
token = create_access_token(form_data.username, "user")
set_last_login(form_data.username)
return {"access_token": token, "token_type": "bearer", "user": {"username": form_data.username, "role": "user"}}
@@ -143,7 +185,7 @@ async def me(current_user: dict = Depends(get_current_user)) -> dict:
async def profile(current_user: dict = Depends(get_current_user)) -> dict:
username = current_user.get("username") or ""
username_norm = _normalize_username(username) if username else ""
stats = get_user_request_stats(username_norm)
stats = get_user_request_stats(username_norm, current_user.get("jellyseerr_user_id"))
global_total = get_global_request_total()
share = (stats.get("total", 0) / global_total) if global_total else 0
activity_summary = get_user_activity_summary(username) if username else {}

View File

@@ -11,6 +11,10 @@ router = APIRouter(prefix="/branding", tags=["branding"])
_BRANDING_DIR = os.path.join(os.getcwd(), "data", "branding")
_LOGO_PATH = os.path.join(_BRANDING_DIR, "logo.png")
_FAVICON_PATH = os.path.join(_BRANDING_DIR, "favicon.ico")
_BUNDLED_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "assets", "branding"))
_BUNDLED_LOGO_PATH = os.path.join(_BUNDLED_DIR, "logo.png")
_BUNDLED_FAVICON_PATH = os.path.join(_BUNDLED_DIR, "favicon.ico")
_BRANDING_SOURCE = os.getenv("BRANDING_SOURCE", "bundled").lower()
def _ensure_branding_dir() -> None:
@@ -41,6 +45,18 @@ def _ensure_default_branding() -> None:
if os.path.exists(_LOGO_PATH) and os.path.exists(_FAVICON_PATH):
return
_ensure_branding_dir()
if not os.path.exists(_LOGO_PATH) and os.path.exists(_BUNDLED_LOGO_PATH):
try:
with open(_BUNDLED_LOGO_PATH, "rb") as source, open(_LOGO_PATH, "wb") as target:
target.write(source.read())
except OSError:
pass
if not os.path.exists(_FAVICON_PATH) and os.path.exists(_BUNDLED_FAVICON_PATH):
try:
with open(_BUNDLED_FAVICON_PATH, "rb") as source, open(_FAVICON_PATH, "wb") as target:
target.write(source.read())
except OSError:
pass
if not os.path.exists(_LOGO_PATH):
image = Image.new("RGBA", (300, 300), (12, 18, 28, 255))
draw = ImageDraw.Draw(image)
@@ -65,24 +81,32 @@ def _ensure_default_branding() -> None:
favicon.save(_FAVICON_PATH, format="ICO")
def _resolve_branding_paths() -> tuple[str, str]:
if _BRANDING_SOURCE == "data":
_ensure_default_branding()
return _LOGO_PATH, _FAVICON_PATH
if os.path.exists(_BUNDLED_LOGO_PATH) and os.path.exists(_BUNDLED_FAVICON_PATH):
return _BUNDLED_LOGO_PATH, _BUNDLED_FAVICON_PATH
_ensure_default_branding()
return _LOGO_PATH, _FAVICON_PATH
@router.get("/logo.png")
async def branding_logo() -> FileResponse:
if not os.path.exists(_LOGO_PATH):
_ensure_default_branding()
if not os.path.exists(_LOGO_PATH):
logo_path, _ = _resolve_branding_paths()
if not os.path.exists(logo_path):
raise HTTPException(status_code=404, detail="Logo not found")
headers = {"Cache-Control": "public, max-age=300"}
return FileResponse(_LOGO_PATH, media_type="image/png", headers=headers)
headers = {"Cache-Control": "no-store"}
return FileResponse(logo_path, media_type="image/png", headers=headers)
@router.get("/favicon.ico")
async def branding_favicon() -> FileResponse:
if not os.path.exists(_FAVICON_PATH):
_ensure_default_branding()
if not os.path.exists(_FAVICON_PATH):
_, favicon_path = _resolve_branding_paths()
if not os.path.exists(favicon_path):
raise HTTPException(status_code=404, detail="Favicon not found")
headers = {"Cache-Control": "public, max-age=300"}
return FileResponse(_FAVICON_PATH, media_type="image/x-icon", headers=headers)
headers = {"Cache-Control": "no-store"}
return FileResponse(favicon_path, media_type="image/x-icon", headers=headers)
async def save_branding_image(file: UploadFile) -> Dict[str, Any]:

View File

@@ -1,6 +1,8 @@
import os
import re
import mimetypes
import logging
from typing import Optional
from fastapi import APIRouter, HTTPException, Response
from fastapi.responses import FileResponse, RedirectResponse
import httpx
@@ -11,6 +13,7 @@ router = APIRouter(prefix="/images", tags=["images"])
_TMDB_BASE = "https://image.tmdb.org/t/p"
_ALLOWED_SIZES = {"w92", "w154", "w185", "w342", "w500", "w780", "original"}
logger = logging.getLogger(__name__)
def _safe_filename(path: str) -> str:
@@ -19,13 +22,24 @@ def _safe_filename(path: str) -> str:
safe = re.sub(r"[^A-Za-z0-9_.-]", "_", trimmed)
return safe or "image"
async def cache_tmdb_image(path: str, size: str = "w342") -> bool:
def tmdb_cache_path(path: str, size: str) -> Optional[str]:
if not path or "://" in path or ".." in path:
return False
return None
if not path.startswith("/"):
path = f"/{path}"
if size not in _ALLOWED_SIZES:
return None
cache_dir = os.path.join(os.getcwd(), "data", "artwork", "tmdb", size)
return os.path.join(cache_dir, _safe_filename(path))
def is_tmdb_cached(path: str, size: str) -> bool:
file_path = tmdb_cache_path(path, size)
return bool(file_path and os.path.exists(file_path))
async def cache_tmdb_image(path: str, size: str = "w342") -> bool:
if not path or "://" in path or ".." in path:
return False
runtime = get_runtime_settings()
@@ -33,9 +47,10 @@ async def cache_tmdb_image(path: str, size: str = "w342") -> bool:
if cache_mode != "cache":
return False
cache_dir = os.path.join(os.getcwd(), "data", "artwork", "tmdb", size)
os.makedirs(cache_dir, exist_ok=True)
file_path = os.path.join(cache_dir, _safe_filename(path))
file_path = tmdb_cache_path(path, size)
if not file_path:
return False
os.makedirs(os.path.dirname(file_path), exist_ok=True)
if os.path.exists(file_path):
return True
@@ -64,9 +79,10 @@ async def tmdb_image(path: str, size: str = "w342"):
if cache_mode != "cache":
return RedirectResponse(url=url)
cache_dir = os.path.join(os.getcwd(), "data", "artwork", "tmdb", size)
os.makedirs(cache_dir, exist_ok=True)
file_path = os.path.join(cache_dir, _safe_filename(path))
file_path = tmdb_cache_path(path, size)
if not file_path:
raise HTTPException(status_code=400, detail="Invalid image path")
os.makedirs(os.path.dirname(file_path), exist_ok=True)
headers = {"Cache-Control": "public, max-age=86400"}
if os.path.exists(file_path):
media_type = mimetypes.guess_type(file_path)[0] or "image/jpeg"
@@ -77,6 +93,8 @@ async def tmdb_image(path: str, size: str = "w342"):
if os.path.exists(file_path):
media_type = mimetypes.guess_type(file_path)[0] or "image/jpeg"
return FileResponse(file_path, media_type=media_type, headers=headers)
raise HTTPException(status_code=502, detail="Image cache failed")
except httpx.HTTPError as exc:
raise HTTPException(status_code=502, detail=f"Image fetch failed: {exc}") from exc
logger.warning("TMDB cache miss after fetch: path=%s size=%s", path, size)
except (httpx.HTTPError, OSError) as exc:
logger.warning("TMDB cache failed: path=%s size=%s error=%s", path, size, exc)
return RedirectResponse(url=url)

View File

@@ -3,6 +3,7 @@ import asyncio
import httpx
import json
import logging
import os
import time
from urllib.parse import quote
from datetime import datetime, timezone, timedelta
@@ -17,7 +18,7 @@ from ..clients.prowlarr import ProwlarrClient
from ..ai.triage import triage_snapshot
from ..auth import get_current_user
from ..runtime import get_runtime_settings
from .images import cache_tmdb_image
from .images import cache_tmdb_image, is_tmdb_cached
from ..db import (
save_action,
get_recent_actions,
@@ -30,10 +31,16 @@ from ..db import (
get_request_cache_last_updated,
get_request_cache_count,
get_request_cache_payloads,
get_request_cache_payloads_missing,
repair_request_cache_titles,
prune_duplicate_requests_cache,
upsert_request_cache,
upsert_artwork_cache_status,
get_artwork_cache_missing_count,
get_artwork_cache_status_count,
get_setting,
set_setting,
update_artwork_cache_stats,
cleanup_history,
)
from ..models import Snapshot, TriageResult, RequestType
@@ -64,6 +71,7 @@ _artwork_prefetch_state: Dict[str, Any] = {
"processed": 0,
"total": 0,
"message": "",
"only_missing": False,
"started_at": None,
"finished_at": None,
}
@@ -105,6 +113,10 @@ def _normalize_username(value: Any) -> Optional[str]:
if not isinstance(value, str):
return None
normalized = value.strip().lower()
if not normalized:
return None
if "@" in normalized:
normalized = normalized.split("@", 1)[0]
return normalized if normalized else None
@@ -156,6 +168,21 @@ def _normalize_requested_by(request_data: Any) -> Optional[str]:
normalized = normalized.split("@", 1)[0]
return normalized
def _extract_requested_by_id(request_data: Any) -> Optional[int]:
if not isinstance(request_data, dict):
return None
requested_by = request_data.get("requestedBy") or request_data.get("requestedByUser")
if isinstance(requested_by, dict):
for key in ("id", "userId", "Id"):
value = requested_by.get(key)
if value is None:
continue
try:
return int(value)
except (TypeError, ValueError):
continue
return None
def _format_upstream_error(service: str, exc: httpx.HTTPStatusError) -> str:
response = exc.response
@@ -198,6 +225,7 @@ def _parse_request_payload(item: Dict[str, Any]) -> Dict[str, Any]:
updated_at = item.get("updatedAt") or created_at
requested_by = _request_display_name(item)
requested_by_norm = _normalize_requested_by(item)
requested_by_id = _extract_requested_by_id(item)
return {
"request_id": item.get("id"),
"media_id": media_id,
@@ -208,6 +236,7 @@ def _parse_request_payload(item: Dict[str, Any]) -> Dict[str, Any]:
"year": year,
"requested_by": requested_by,
"requested_by_norm": requested_by_norm,
"requested_by_id": requested_by_id,
"created_at": created_at,
"updated_at": updated_at,
}
@@ -226,6 +255,108 @@ def _extract_artwork_paths(item: Dict[str, Any]) -> tuple[Optional[str], Optiona
backdrop_path = item.get("backdropPath") or item.get("backdrop_path")
return poster_path, backdrop_path
def _extract_tmdb_lookup(payload: Dict[str, Any]) -> tuple[Optional[int], Optional[str]]:
media = payload.get("media") or {}
if not isinstance(media, dict):
media = {}
tmdb_id = media.get("tmdbId") or payload.get("tmdbId")
media_type = (
media.get("mediaType")
or payload.get("mediaType")
or payload.get("type")
)
try:
tmdb_id = int(tmdb_id) if tmdb_id is not None else None
except (TypeError, ValueError):
tmdb_id = None
if isinstance(media_type, str):
media_type = media_type.strip().lower() or None
else:
media_type = None
return tmdb_id, media_type
def _artwork_missing_for_payload(payload: Dict[str, Any]) -> bool:
poster_path, backdrop_path = _extract_artwork_paths(payload)
tmdb_id, media_type = _extract_tmdb_lookup(payload)
can_hydrate = bool(tmdb_id and media_type)
if poster_path:
if not is_tmdb_cached(poster_path, "w185") or not is_tmdb_cached(poster_path, "w342"):
return True
elif can_hydrate:
return True
if backdrop_path:
if not is_tmdb_cached(backdrop_path, "w780"):
return True
elif can_hydrate:
return True
return False
def _compute_cached_flags(
poster_path: Optional[str],
backdrop_path: Optional[str],
cache_mode: str,
poster_cached: Optional[bool] = None,
backdrop_cached: Optional[bool] = None,
) -> tuple[bool, bool]:
if cache_mode != "cache":
return True, True
poster = poster_cached
backdrop = backdrop_cached
if poster is None:
poster = bool(poster_path) and is_tmdb_cached(poster_path, "w185") and is_tmdb_cached(
poster_path, "w342"
)
if backdrop is None:
backdrop = bool(backdrop_path) and is_tmdb_cached(backdrop_path, "w780")
return bool(poster), bool(backdrop)
def _upsert_artwork_status(
payload: Dict[str, Any],
cache_mode: str,
poster_cached: Optional[bool] = None,
backdrop_cached: Optional[bool] = None,
) -> None:
parsed = _parse_request_payload(payload)
request_id = parsed.get("request_id")
if not isinstance(request_id, int):
return
tmdb_id, media_type = _extract_tmdb_lookup(payload)
poster_path, backdrop_path = _extract_artwork_paths(payload)
has_tmdb = bool(tmdb_id and media_type)
poster_cached_flag, backdrop_cached_flag = _compute_cached_flags(
poster_path, backdrop_path, cache_mode, poster_cached, backdrop_cached
)
upsert_artwork_cache_status(
request_id=request_id,
tmdb_id=tmdb_id,
media_type=media_type,
poster_path=poster_path,
backdrop_path=backdrop_path,
has_tmdb=has_tmdb,
poster_cached=poster_cached_flag,
backdrop_cached=backdrop_cached_flag,
)
def _collect_artwork_cache_disk_stats() -> tuple[int, int]:
cache_root = os.path.join(os.getcwd(), "data", "artwork")
total_bytes = 0
total_files = 0
if not os.path.isdir(cache_root):
return 0, 0
for root, _, files in os.walk(cache_root):
for name in files:
path = os.path.join(root, name)
try:
total_bytes += os.path.getsize(path)
total_files += 1
except OSError:
continue
return total_bytes, total_files
async def _get_request_details(client: JellyseerrClient, request_id: int) -> Optional[Dict[str, Any]]:
cache_key = f"request:{request_id}"
@@ -467,10 +598,13 @@ async def _sync_all_requests(client: JellyseerrClient) -> int:
year=payload.get("year"),
requested_by=payload.get("requested_by"),
requested_by_norm=payload.get("requested_by_norm"),
requested_by_id=payload.get("requested_by_id"),
created_at=payload.get("created_at"),
updated_at=payload.get("updated_at"),
payload_json=payload_json,
)
if isinstance(item, dict):
_upsert_artwork_status(item, cache_mode)
stored += 1
_sync_state["stored"] = stored
if len(items) < take:
@@ -490,6 +624,11 @@ async def _sync_all_requests(client: JellyseerrClient) -> int:
)
set_setting(_sync_last_key, datetime.now(timezone.utc).isoformat())
_refresh_recent_cache_from_db()
if cache_mode == "cache":
update_artwork_cache_stats(
missing_count=get_artwork_cache_missing_count(),
total_requests=get_request_cache_count(),
)
return stored
@@ -597,10 +736,13 @@ async def _sync_delta_requests(client: JellyseerrClient) -> int:
year=payload.get("year"),
requested_by=payload.get("requested_by"),
requested_by_norm=payload.get("requested_by_norm"),
requested_by_id=payload.get("requested_by_id"),
created_at=payload.get("created_at"),
updated_at=payload.get("updated_at"),
payload_json=payload_json,
)
if isinstance(item, dict):
_upsert_artwork_status(item, cache_mode)
stored += 1
page_changed = True
_sync_state["stored"] = stored
@@ -628,10 +770,20 @@ async def _sync_delta_requests(client: JellyseerrClient) -> int:
)
set_setting(_sync_last_key, datetime.now(timezone.utc).isoformat())
_refresh_recent_cache_from_db()
if cache_mode == "cache":
update_artwork_cache_stats(
missing_count=get_artwork_cache_missing_count(),
total_requests=get_request_cache_count(),
)
return stored
async def _prefetch_artwork_cache(client: JellyseerrClient) -> None:
async def _prefetch_artwork_cache(
client: JellyseerrClient,
only_missing: bool = False,
total: Optional[int] = None,
use_missing_query: bool = False,
) -> None:
runtime = get_runtime_settings()
cache_mode = (runtime.artwork_cache_mode or "remote").lower()
if cache_mode != "cache":
@@ -644,35 +796,52 @@ async def _prefetch_artwork_cache(client: JellyseerrClient) -> None:
)
return
total = get_request_cache_count()
total = total if total is not None else get_request_cache_count()
_artwork_prefetch_state.update(
{
"status": "running",
"processed": 0,
"total": total,
"message": "Starting artwork prefetch",
"message": "Starting missing artwork prefetch"
if only_missing
else "Starting artwork prefetch",
"only_missing": only_missing,
"started_at": datetime.now(timezone.utc).isoformat(),
"finished_at": None,
}
)
if only_missing and total == 0:
_artwork_prefetch_state.update(
{
"status": "completed",
"processed": 0,
"message": "No missing artwork to cache.",
"finished_at": datetime.now(timezone.utc).isoformat(),
}
)
return
offset = 0
limit = 200
processed = 0
while True:
if use_missing_query:
batch = get_request_cache_payloads_missing(limit=limit, offset=offset)
else:
batch = get_request_cache_payloads(limit=limit, offset=offset)
if not batch:
break
for row in batch:
payload = row.get("payload")
if not isinstance(payload, dict):
if not only_missing:
processed += 1
continue
if only_missing and not use_missing_query and not _artwork_missing_for_payload(payload):
continue
poster_path, backdrop_path = _extract_artwork_paths(payload)
if not (poster_path or backdrop_path) and client.configured():
tmdb_id, media_type = _extract_tmdb_lookup(payload)
if (not poster_path or not backdrop_path) and client.configured() and tmdb_id and media_type:
media = payload.get("media") or {}
tmdb_id = media.get("tmdbId") or payload.get("tmdbId")
media_type = media.get("mediaType") or payload.get("type")
if tmdb_id and media_type:
hydrated_poster, hydrated_backdrop = await _hydrate_artwork_from_tmdb(
client, media_type, tmdb_id
)
@@ -697,21 +866,31 @@ async def _prefetch_artwork_cache(client: JellyseerrClient) -> None:
year=parsed.get("year"),
requested_by=parsed.get("requested_by"),
requested_by_norm=parsed.get("requested_by_norm"),
requested_by_id=parsed.get("requested_by_id"),
created_at=parsed.get("created_at"),
updated_at=parsed.get("updated_at"),
payload_json=json.dumps(payload, ensure_ascii=True),
)
poster_cached_flag = False
backdrop_cached_flag = False
if poster_path:
try:
poster_cached_flag = bool(
await cache_tmdb_image(poster_path, "w185")
await cache_tmdb_image(poster_path, "w342")
) and bool(await cache_tmdb_image(poster_path, "w342"))
except httpx.HTTPError:
pass
poster_cached_flag = False
if backdrop_path:
try:
await cache_tmdb_image(backdrop_path, "w780")
backdrop_cached_flag = bool(await cache_tmdb_image(backdrop_path, "w780"))
except httpx.HTTPError:
pass
backdrop_cached_flag = False
_upsert_artwork_status(
payload,
cache_mode,
poster_cached=poster_cached_flag if poster_path else None,
backdrop_cached=backdrop_cached_flag if backdrop_path else None,
)
processed += 1
if processed % 25 == 0:
_artwork_prefetch_state.update(
@@ -719,6 +898,15 @@ async def _prefetch_artwork_cache(client: JellyseerrClient) -> None:
)
offset += limit
total_requests = get_request_cache_count()
missing_count = get_artwork_cache_missing_count()
cache_bytes, cache_files = _collect_artwork_cache_disk_stats()
update_artwork_cache_stats(
cache_bytes=cache_bytes,
cache_files=cache_files,
missing_count=missing_count,
total_requests=total_requests,
)
_artwork_prefetch_state.update(
{
"status": "completed",
@@ -729,25 +917,52 @@ async def _prefetch_artwork_cache(client: JellyseerrClient) -> None:
)
async def start_artwork_prefetch(base_url: Optional[str], api_key: Optional[str]) -> Dict[str, Any]:
async def start_artwork_prefetch(
base_url: Optional[str], api_key: Optional[str], only_missing: bool = False
) -> Dict[str, Any]:
global _artwork_prefetch_task
if _artwork_prefetch_task and not _artwork_prefetch_task.done():
return dict(_artwork_prefetch_state)
client = JellyseerrClient(base_url, api_key)
status_count = get_artwork_cache_status_count()
total_requests = get_request_cache_count()
use_missing_query = only_missing and status_count >= total_requests and total_requests > 0
if only_missing and use_missing_query:
total = get_artwork_cache_missing_count()
else:
total = total_requests
_artwork_prefetch_state.update(
{
"status": "running",
"processed": 0,
"total": get_request_cache_count(),
"message": "Starting artwork prefetch",
"total": total,
"message": "Seeding artwork cache status"
if only_missing and not use_missing_query
else ("Starting missing artwork prefetch" if only_missing else "Starting artwork prefetch"),
"only_missing": only_missing,
"started_at": datetime.now(timezone.utc).isoformat(),
"finished_at": None,
}
)
if only_missing and total == 0:
_artwork_prefetch_state.update(
{
"status": "completed",
"processed": 0,
"message": "No missing artwork to cache.",
"finished_at": datetime.now(timezone.utc).isoformat(),
}
)
return dict(_artwork_prefetch_state)
async def _runner() -> None:
try:
await _prefetch_artwork_cache(client)
await _prefetch_artwork_cache(
client,
only_missing=only_missing,
total=total,
use_missing_query=use_missing_query,
)
except Exception:
logger.exception("Artwork prefetch failed")
_artwork_prefetch_state.update(
@@ -794,18 +1009,38 @@ def _recent_cache_stale() -> bool:
return (datetime.now(timezone.utc) - parsed).total_seconds() > RECENT_CACHE_TTL_SECONDS
def _parse_iso_datetime(value: Optional[str]) -> Optional[datetime]:
if not value:
return None
try:
parsed = datetime.fromisoformat(value.replace("Z", "+00:00"))
except ValueError:
return None
if parsed.tzinfo is None:
return parsed.replace(tzinfo=timezone.utc)
return parsed
def _get_recent_from_cache(
requested_by_norm: Optional[str],
requested_by_id: Optional[int],
limit: int,
offset: int,
since_iso: Optional[str],
) -> List[Dict[str, Any]]:
items = _recent_cache.get("items") or []
results = []
since_dt = _parse_iso_datetime(since_iso)
for item in items:
if requested_by_norm and item.get("requested_by_norm") != requested_by_norm:
if requested_by_id is not None:
if item.get("requested_by_id") != requested_by_id:
continue
if since_iso and item.get("created_at") and item["created_at"] < since_iso:
elif requested_by_norm and item.get("requested_by_norm") != requested_by_norm:
continue
if since_dt:
candidate = item.get("created_at") or item.get("updated_at")
item_dt = _parse_iso_datetime(candidate)
if not item_dt or item_dt < since_dt:
continue
results.append(item)
return results[offset : offset + limit]
@@ -814,13 +1049,14 @@ def _get_recent_from_cache(
async def startup_warmup_requests_cache() -> None:
runtime = get_runtime_settings()
client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key)
if not client.configured():
return
if client.configured():
try:
await _ensure_requests_cache(client)
except httpx.HTTPError as exc:
logger.warning("Requests warmup skipped: %s", exc)
return
repaired = repair_request_cache_titles()
if repaired:
logger.info("Requests cache titles repaired: %s", repaired)
_refresh_recent_cache_from_db()
@@ -968,7 +1204,10 @@ async def _ensure_request_access(
runtime = get_runtime_settings()
mode = (runtime.requests_data_source or "prefer_cache").lower()
cached = get_request_cache_payload(request_id)
if mode != "always_js" and cached is not None:
if mode != "always_js":
if cached is None:
logger.debug("access cache miss: request_id=%s mode=%s", request_id, mode)
raise HTTPException(status_code=404, detail="Request not found in cache")
logger.debug("access cache hit: request_id=%s mode=%s", request_id, mode)
if _request_matches_user(cached, user.get("username", "")):
return
@@ -1249,27 +1488,29 @@ async def recent_requests(
) -> dict:
runtime = get_runtime_settings()
client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key)
mode = (runtime.requests_data_source or "prefer_cache").lower()
allow_remote = mode == "always_js"
if allow_remote:
if not client.configured():
raise HTTPException(status_code=400, detail="Jellyseerr not configured")
try:
await _ensure_requests_cache(client)
except httpx.HTTPStatusError as exc:
raise HTTPException(status_code=502, detail=str(exc)) from exc
username_norm = _normalize_username(user.get("username", ""))
requested_by_id = user.get("jellyseerr_user_id")
requested_by = None if user.get("role") == "admin" else username_norm
requested_by_id = None if user.get("role") == "admin" else requested_by_id
since_iso = None
if days > 0:
since_iso = (datetime.now(timezone.utc) - timedelta(days=days)).isoformat()
if _recent_cache_stale():
_refresh_recent_cache_from_db()
rows = _get_recent_from_cache(requested_by, take, skip, since_iso)
rows = _get_recent_from_cache(requested_by, requested_by_id, take, skip, since_iso)
cache_mode = (runtime.artwork_cache_mode or "remote").lower()
mode = (runtime.requests_data_source or "prefer_cache").lower()
allow_remote = mode == "always_js"
allow_title_hydrate = mode == "prefer_cache"
allow_artwork_hydrate = allow_remote or allow_title_hydrate
allow_title_hydrate = False
allow_artwork_hydrate = client.configured()
jellyfin = JellyfinClient(runtime.jellyfin_base_url, runtime.jellyfin_api_key)
jellyfin_cache: Dict[str, bool] = {}
@@ -1342,6 +1583,7 @@ async def recent_requests(
year=year or payload.get("year"),
requested_by=payload.get("requested_by"),
requested_by_norm=payload.get("requested_by_norm"),
requested_by_id=payload.get("requested_by_id"),
created_at=payload.get("created_at"),
updated_at=payload.get("updated_at"),
payload_json=json.dumps(details, ensure_ascii=True),
@@ -1389,6 +1631,7 @@ async def recent_requests(
year=payload.get("year"),
requested_by=payload.get("requested_by"),
requested_by_norm=payload.get("requested_by_norm"),
requested_by_id=payload.get("requested_by_id"),
created_at=payload.get("created_at"),
updated_at=payload.get("updated_at"),
payload_json=json.dumps(details, ensure_ascii=True),
@@ -1407,6 +1650,7 @@ async def recent_requests(
"status": status,
"statusLabel": status_label,
"mediaId": row.get("media_id"),
"createdAt": row.get("created_at") or row.get("updated_at"),
"artwork": {
"poster_url": _artwork_url(poster_path, "w185", cache_mode),
"backdrop_url": _artwork_url(backdrop_path, "w780", cache_mode),
@@ -1461,8 +1705,14 @@ async def search_requests(
status_label = _status_label(status)
elif isinstance(media_info_id, int):
username_norm = _normalize_username(user.get("username", ""))
requested_by_id = user.get("jellyseerr_user_id")
requested_by = None if user.get("role") == "admin" else username_norm
cached = get_cached_request_by_media_id(media_info_id, requested_by_norm=requested_by)
requested_by_id = None if user.get("role") == "admin" else requested_by_id
cached = get_cached_request_by_media_id(
media_info_id,
requested_by_norm=requested_by,
requested_by_id=requested_by_id,
)
if cached:
request_id = cached.get("request_id")
status = cached.get("status")
@@ -1814,4 +2064,3 @@ async def action_grab(
save_action, request_id, "grab", "Grab release", "ok", action_message
)
return {"status": "ok", "response": {"qbittorrent": "queued"}}

View File

@@ -1,6 +1,6 @@
from typing import Any, Dict
import httpx
from fastapi import APIRouter, Depends
from fastapi import APIRouter, Depends, HTTPException
from ..auth import get_current_user
from ..runtime import get_runtime_settings
@@ -93,3 +93,42 @@ async def services_status() -> Dict[str, Any]:
overall = "degraded"
return {"overall": overall, "services": services}
@router.post("/services/{service}/test")
async def test_service(service: str) -> Dict[str, Any]:
runtime = get_runtime_settings()
jellyseerr = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key)
sonarr = SonarrClient(runtime.sonarr_base_url, runtime.sonarr_api_key)
radarr = RadarrClient(runtime.radarr_base_url, runtime.radarr_api_key)
prowlarr = ProwlarrClient(runtime.prowlarr_base_url, runtime.prowlarr_api_key)
qbittorrent = QBittorrentClient(
runtime.qbittorrent_base_url, runtime.qbittorrent_username, runtime.qbittorrent_password
)
jellyfin = JellyfinClient(runtime.jellyfin_base_url, runtime.jellyfin_api_key)
service_key = service.strip().lower()
checks = {
"jellyseerr": (
"Jellyseerr",
jellyseerr.configured(),
lambda: jellyseerr.get_recent_requests(take=1, skip=0),
),
"sonarr": ("Sonarr", sonarr.configured(), sonarr.get_system_status),
"radarr": ("Radarr", radarr.configured(), radarr.get_system_status),
"prowlarr": ("Prowlarr", prowlarr.configured(), prowlarr.get_health),
"qbittorrent": ("qBittorrent", qbittorrent.configured(), qbittorrent.get_app_version),
"jellyfin": ("Jellyfin", jellyfin.configured(), jellyfin.get_system_info),
}
if service_key not in checks:
raise HTTPException(status_code=404, detail="Unknown service")
name, configured, func = checks[service_key]
result = await _check(name, configured, func)
if name == "Prowlarr" and result.get("status") == "up":
health = result.get("detail")
if isinstance(health, list) and health:
result["status"] = "degraded"
result["message"] = "Health warnings"
return result

View File

@@ -14,6 +14,7 @@ _BOOL_FIELDS = {
"jellyfin_sync_to_arr",
"site_banner_enabled",
}
_SKIP_OVERRIDE_FIELDS = {"site_build_number", "site_changelog"}
def get_runtime_settings():
@@ -22,6 +23,8 @@ def get_runtime_settings():
for key, value in overrides.items():
if value is None:
continue
if key in _SKIP_OVERRIDE_FIELDS:
continue
if key in _INT_FIELDS:
try:
update[key] = int(value)

View File

@@ -3,8 +3,14 @@ import logging
from fastapi import HTTPException
from ..clients.jellyfin import JellyfinClient
from ..db import create_user_if_missing
from ..db import create_user_if_missing, set_user_jellyseerr_id
from ..runtime import get_runtime_settings
from .user_cache import (
build_jellyseerr_candidate_map,
get_cached_jellyseerr_users,
match_jellyseerr_user_id,
save_jellyfin_users_cache,
)
logger = logging.getLogger(__name__)
@@ -17,6 +23,9 @@ async def sync_jellyfin_users() -> int:
users = await client.get_users()
if not isinstance(users, list):
return 0
save_jellyfin_users_cache(users)
jellyseerr_users = get_cached_jellyseerr_users()
candidate_map = build_jellyseerr_candidate_map(jellyseerr_users or [])
imported = 0
for user in users:
if not isinstance(user, dict):
@@ -24,8 +33,18 @@ async def sync_jellyfin_users() -> int:
name = user.get("Name")
if not name:
continue
if create_user_if_missing(name, "jellyfin-user", role="user", auth_provider="jellyfin"):
matched_id = match_jellyseerr_user_id(name, candidate_map) if candidate_map else None
created = create_user_if_missing(
name,
"jellyfin-user",
role="user",
auth_provider="jellyfin",
jellyseerr_user_id=matched_id,
)
if created:
imported += 1
elif matched_id is not None:
set_user_jellyseerr_id(name, matched_id)
return imported

View File

@@ -11,7 +11,14 @@ from ..clients.radarr import RadarrClient
from ..clients.prowlarr import ProwlarrClient
from ..clients.qbittorrent import QBittorrentClient
from ..runtime import get_runtime_settings
from ..db import save_snapshot, get_request_cache_payload, get_recent_snapshots, get_setting, set_setting
from ..db import (
save_snapshot,
get_request_cache_payload,
get_request_cache_by_id,
get_recent_snapshots,
get_setting,
set_setting,
)
from ..models import ActionOption, NormalizedState, RequestType, Snapshot, TimelineHop
logger = logging.getLogger(__name__)
@@ -219,7 +226,21 @@ async def build_snapshot(request_id: str) -> Snapshot:
logging.getLogger(__name__).debug(
"snapshot cache miss: request_id=%s mode=%s", request_id, mode
)
if cached_request is not None:
cache_meta = get_request_cache_by_id(int(request_id))
cached_title = cache_meta.get("title") if cache_meta else None
if cached_title and isinstance(cached_request, dict):
media = cached_request.get("media")
if not isinstance(media, dict):
media = {}
cached_request["media"] = media
if not media.get("title") and not media.get("name"):
media["title"] = cached_title
media["name"] = cached_title
if not cached_request.get("title") and not cached_request.get("name"):
cached_request["title"] = cached_title
allow_remote = mode == "always_js" and jellyseerr.configured()
if not jellyseerr.configured() and not cached_request:
timeline.append(TimelineHop(service="Jellyseerr", status="not_configured"))
timeline.append(TimelineHop(service="Sonarr/Radarr", status="not_configured"))
@@ -227,9 +248,15 @@ async def build_snapshot(request_id: str) -> Snapshot:
timeline.append(TimelineHop(service="qBittorrent", status="not_configured"))
snapshot.timeline = timeline
return snapshot
if cached_request is None and not allow_remote:
timeline.append(TimelineHop(service="Jellyseerr", status="cache_miss"))
snapshot.timeline = timeline
snapshot.state = NormalizedState.unknown
snapshot.state_reason = "Request not found in cache"
return snapshot
jelly_request = cached_request
if (jelly_request is None or mode == "always_js") and jellyseerr.configured():
if allow_remote and (jelly_request is None or mode == "always_js"):
try:
jelly_request = await jellyseerr.get_request(request_id)
logging.getLogger(__name__).debug(
@@ -252,17 +279,25 @@ async def build_snapshot(request_id: str) -> Snapshot:
jelly_status = jelly_request.get("status", "unknown")
jelly_status_label = _status_label(jelly_status)
jelly_type = jelly_request.get("type") or "unknown"
snapshot.title = jelly_request.get("media", {}).get("title", "Unknown")
snapshot.year = jelly_request.get("media", {}).get("year")
snapshot.request_type = RequestType(jelly_type) if jelly_type in {"movie", "tv"} else RequestType.unknown
media = jelly_request.get("media", {}) if isinstance(jelly_request, dict) else {}
if not isinstance(media, dict):
media = {}
snapshot.title = (
media.get("title")
or media.get("name")
or jelly_request.get("title")
or jelly_request.get("name")
or "Unknown"
)
snapshot.year = media.get("year") or jelly_request.get("year")
snapshot.request_type = RequestType(jelly_type) if jelly_type in {"movie", "tv"} else RequestType.unknown
poster_path = None
backdrop_path = None
if isinstance(media, dict):
poster_path = media.get("posterPath") or media.get("poster_path")
backdrop_path = media.get("backdropPath") or media.get("backdrop_path")
if snapshot.title in {None, "", "Unknown"} and jellyseerr.configured():
if snapshot.title in {None, "", "Unknown"} and allow_remote:
tmdb_id = jelly_request.get("media", {}).get("tmdbId")
if tmdb_id:
try:

View File

@@ -0,0 +1,144 @@
import json
import logging
from datetime import datetime, timezone, timedelta
from typing import Any, Dict, List, Optional
from ..db import get_setting, set_setting
logger = logging.getLogger(__name__)
JELLYSEERR_CACHE_KEY = "jellyseerr_users_cache"
JELLYSEERR_CACHE_AT_KEY = "jellyseerr_users_cached_at"
JELLYFIN_CACHE_KEY = "jellyfin_users_cache"
JELLYFIN_CACHE_AT_KEY = "jellyfin_users_cached_at"
def _now_iso() -> str:
return datetime.now(timezone.utc).isoformat()
def _parse_iso(value: Optional[str]) -> Optional[datetime]:
if not value:
return None
try:
parsed = datetime.fromisoformat(value)
except ValueError:
return None
if parsed.tzinfo is None:
parsed = parsed.replace(tzinfo=timezone.utc)
return parsed
def _cache_is_fresh(cached_at: Optional[str], max_age_minutes: int) -> bool:
parsed = _parse_iso(cached_at)
if not parsed:
return False
age = datetime.now(timezone.utc) - parsed
return age <= timedelta(minutes=max_age_minutes)
def _load_cached_users(
cache_key: str, cache_at_key: str, max_age_minutes: int
) -> Optional[List[Dict[str, Any]]]:
cached_at = get_setting(cache_at_key)
if not _cache_is_fresh(cached_at, max_age_minutes):
return None
raw = get_setting(cache_key)
if not raw:
return None
try:
data = json.loads(raw)
except (TypeError, json.JSONDecodeError):
return None
if isinstance(data, list):
return [item for item in data if isinstance(item, dict)]
return None
def _save_cached_users(cache_key: str, cache_at_key: str, users: List[Dict[str, Any]]) -> None:
payload = json.dumps(users, ensure_ascii=True)
set_setting(cache_key, payload)
set_setting(cache_at_key, _now_iso())
def _normalized_handles(value: Any) -> List[str]:
if not isinstance(value, str):
return []
normalized = value.strip().lower()
if not normalized:
return []
handles = [normalized]
if "@" in normalized:
handles.append(normalized.split("@", 1)[0])
return list(dict.fromkeys(handles))
def build_jellyseerr_candidate_map(users: List[Dict[str, Any]]) -> Dict[str, int]:
candidate_to_id: Dict[str, int] = {}
for user in users:
if not isinstance(user, dict):
continue
user_id = user.get("id") or user.get("userId") or user.get("Id")
try:
user_id = int(user_id)
except (TypeError, ValueError):
continue
for key in ("username", "email", "displayName", "name"):
for handle in _normalized_handles(user.get(key)):
candidate_to_id.setdefault(handle, user_id)
return candidate_to_id
def match_jellyseerr_user_id(
username: str, candidate_map: Dict[str, int]
) -> Optional[int]:
for handle in _normalized_handles(username):
matched = candidate_map.get(handle)
if matched is not None:
return matched
return None
def save_jellyseerr_users_cache(users: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
normalized: List[Dict[str, Any]] = []
for user in users:
if not isinstance(user, dict):
continue
normalized.append(
{
"id": user.get("id") or user.get("userId") or user.get("Id"),
"email": user.get("email"),
"username": user.get("username"),
"displayName": user.get("displayName"),
"name": user.get("name"),
}
)
_save_cached_users(JELLYSEERR_CACHE_KEY, JELLYSEERR_CACHE_AT_KEY, normalized)
logger.debug("Cached Jellyseerr users: %s", len(normalized))
return normalized
def get_cached_jellyseerr_users(max_age_minutes: int = 1440) -> Optional[List[Dict[str, Any]]]:
return _load_cached_users(JELLYSEERR_CACHE_KEY, JELLYSEERR_CACHE_AT_KEY, max_age_minutes)
def save_jellyfin_users_cache(users: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
normalized: List[Dict[str, Any]] = []
for user in users:
if not isinstance(user, dict):
continue
normalized.append(
{
"id": user.get("Id"),
"name": user.get("Name"),
"hasPassword": user.get("HasPassword"),
"lastLoginDate": user.get("LastLoginDate"),
}
)
_save_cached_users(JELLYFIN_CACHE_KEY, JELLYFIN_CACHE_AT_KEY, normalized)
logger.debug("Cached Jellyfin users: %s", len(normalized))
return normalized
def get_cached_jellyfin_users(max_age_minutes: int = 1440) -> Optional[List[Dict[str, Any]]]:
return _load_cached_users(JELLYFIN_CACHE_KEY, JELLYFIN_CACHE_AT_KEY, max_age_minutes)

View File

@@ -1,19 +1,10 @@
services:
backend:
image: rephl3xnz/magent-backend:latest
magent:
image: rephl3xnz/magent:latest
env_file:
- ./.env
ports:
- "3000:3000"
- "8000:8000"
volumes:
- ./data:/app/data
frontend:
image: rephl3xnz/magent-frontend:latest
environment:
- NEXT_PUBLIC_API_BASE=/api
- BACKEND_INTERNAL_URL=http://backend:8000
ports:
- "3000:3000"
depends_on:
- backend

View File

@@ -1,25 +1,12 @@
services:
backend:
magent:
build:
context: .
dockerfile: backend/Dockerfile
args:
BUILD_NUMBER: ${BUILD_NUMBER}
dockerfile: Dockerfile
env_file:
- ./.env
ports:
- "3000:3000"
- "8000:8000"
volumes:
- ./data:/app/data
frontend:
build:
context: ./frontend
dockerfile: Dockerfile
environment:
- NEXT_PUBLIC_API_BASE=/api
- BACKEND_INTERNAL_URL=http://backend:8000
ports:
- "3000:3000"
depends_on:
- backend

28
docker/supervisord.conf Normal file
View File

@@ -0,0 +1,28 @@
[supervisord]
nodaemon=true
logfile=/dev/null
logfile_maxbytes=0
pidfile=/tmp/supervisord.pid
[program:backend]
directory=/app
command=uvicorn app.main:app --host 0.0.0.0 --port 8000
autostart=true
autorestart=true
stdout_logfile=/dev/stdout
stdout_logfile_maxbytes=0
stderr_logfile=/dev/stderr
stderr_logfile_maxbytes=0
priority=10
[program:frontend]
directory=/app/frontend
command=/usr/bin/npm start -- --hostname 0.0.0.0 --port 3000
environment=NEXT_PUBLIC_API_BASE="/api",BACKEND_INTERNAL_URL="http://127.0.0.1:8000",NODE_ENV="production"
autostart=true
autorestart=true
stdout_logfile=/dev/stdout
stdout_logfile_maxbytes=0
stderr_logfile=/dev/stderr
stderr_logfile_maxbytes=0
priority=20

View File

@@ -21,31 +21,40 @@ type ServiceOptions = {
const SECTION_LABELS: Record<string, string> = {
jellyseerr: 'Jellyseerr',
jellyfin: 'Jellyfin',
artwork: 'Artwork',
cache: 'Cache',
artwork: 'Artwork cache',
cache: 'Cache Control',
sonarr: 'Sonarr',
radarr: 'Radarr',
prowlarr: 'Prowlarr',
qbittorrent: 'qBittorrent',
log: 'Activity log',
requests: 'Request syncing',
requests: 'Request sync',
site: 'Site',
}
const BOOL_SETTINGS = new Set(['jellyfin_sync_to_arr', 'site_banner_enabled'])
const TEXTAREA_SETTINGS = new Set(['site_banner_message', 'site_changelog'])
const URL_SETTINGS = new Set([
'jellyseerr_base_url',
'jellyfin_base_url',
'jellyfin_public_url',
'sonarr_base_url',
'radarr_base_url',
'prowlarr_base_url',
'qbittorrent_base_url',
])
const BANNER_TONES = ['info', 'warning', 'error', 'maintenance']
const SECTION_DESCRIPTIONS: Record<string, string> = {
jellyseerr: 'Connect the request system where users submit content.',
jellyfin: 'Control Jellyfin login and availability checks.',
artwork: 'Configure how posters and artwork are loaded.',
cache: 'Manage saved request data and offline artwork.',
artwork: 'Cache posters/backdrops and review artwork coverage.',
cache: 'Manage saved requests cache and refresh behavior.',
sonarr: 'TV automation settings.',
radarr: 'Movie automation settings.',
prowlarr: 'Indexer search settings.',
qbittorrent: 'Downloader connection settings.',
requests: 'Sync and refresh cadence for requests.',
requests: 'Control how often requests are refreshed and cleaned up.',
log: 'Activity log for troubleshooting.',
site: 'Sitewide banner, version, and changelog details.',
}
@@ -53,7 +62,7 @@ const SECTION_DESCRIPTIONS: Record<string, string> = {
const SETTINGS_SECTION_MAP: Record<string, string | null> = {
jellyseerr: 'jellyseerr',
jellyfin: 'jellyfin',
artwork: 'artwork',
artwork: null,
sonarr: 'sonarr',
radarr: 'radarr',
prowlarr: 'prowlarr',
@@ -73,13 +82,13 @@ const labelFromKey = (key: string) =>
.replace('quality profile id', 'Quality profile ID')
.replace('root folder', 'Root folder')
.replace('qbittorrent', 'qBittorrent')
.replace('requests sync ttl minutes', 'Refresh saved requests if older than (minutes)')
.replace('requests poll interval seconds', 'Background refresh check (seconds)')
.replace('requests delta sync interval minutes', 'Check for new or updated requests every (minutes)')
.replace('requests full sync time', 'Full refresh time (24h)')
.replace('requests cleanup time', 'Clean up old history time (24h)')
.replace('requests cleanup days', 'Remove history older than (days)')
.replace('requests data source', 'Where requests are loaded from')
.replace('requests sync ttl minutes', 'Saved request refresh TTL (minutes)')
.replace('requests poll interval seconds', 'Full refresh check interval (seconds)')
.replace('requests delta sync interval minutes', 'Delta sync interval (minutes)')
.replace('requests full sync time', 'Daily full refresh time (24h)')
.replace('requests cleanup time', 'Daily history cleanup time (24h)')
.replace('requests cleanup days', 'History retention window (days)')
.replace('requests data source', 'Request source (cache vs Jellyseerr)')
.replace('jellyfin public url', 'Jellyfin public URL')
.replace('jellyfin sync to arr', 'Sync Jellyfin to Sonarr/Radarr')
.replace('artwork cache mode', 'Artwork cache mode')
@@ -89,6 +98,19 @@ const labelFromKey = (key: string) =>
.replace('site banner tone', 'Sitewide banner tone')
.replace('site changelog', 'Changelog text')
const formatBytes = (value?: number | null) => {
if (!value || value <= 0) return '0 B'
const units = ['B', 'KB', 'MB', 'GB', 'TB']
let size = value
let unitIndex = 0
while (size >= 1024 && unitIndex < units.length - 1) {
size /= 1024
unitIndex += 1
}
const decimals = unitIndex === 0 || size >= 10 ? 0 : 1
return `${size.toFixed(decimals)} ${units[unitIndex]}`
}
type SettingsPageProps = {
section: string
}
@@ -112,8 +134,11 @@ export default function SettingsPage({ section }: SettingsPageProps) {
const [cacheRows, setCacheRows] = useState<any[]>([])
const [cacheCount, setCacheCount] = useState(50)
const [cacheStatus, setCacheStatus] = useState<string | null>(null)
const [cacheLoading, setCacheLoading] = useState(false)
const [requestsSync, setRequestsSync] = useState<any | null>(null)
const [artworkPrefetch, setArtworkPrefetch] = useState<any | null>(null)
const [artworkSummary, setArtworkSummary] = useState<any | null>(null)
const [artworkSummaryStatus, setArtworkSummaryStatus] = useState<string | null>(null)
const [maintenanceStatus, setMaintenanceStatus] = useState<string | null>(null)
const [maintenanceBusy, setMaintenanceBusy] = useState(false)
@@ -165,6 +190,27 @@ export default function SettingsPage({ section }: SettingsPageProps) {
}
}, [])
const loadArtworkSummary = useCallback(async () => {
setArtworkSummaryStatus(null)
try {
const baseUrl = getApiBase()
const response = await authFetch(`${baseUrl}/admin/requests/artwork/summary`)
if (!response.ok) {
const text = await response.text()
throw new Error(text || 'Artwork summary fetch failed')
}
const data = await response.json()
setArtworkSummary(data?.summary ?? null)
} catch (err) {
console.error(err)
const message =
err instanceof Error && err.message
? err.message.replace(/^\\{\"detail\":\"|\"\\}$/g, '')
: 'Could not load artwork stats.'
setArtworkSummaryStatus(message)
}
}, [])
const loadOptions = useCallback(async (service: 'sonarr' | 'radarr') => {
try {
const baseUrl = getApiBase()
@@ -204,8 +250,9 @@ export default function SettingsPage({ section }: SettingsPageProps) {
}
try {
await loadSettings()
if (section === 'artwork') {
if (section === 'cache' || section === 'artwork') {
await loadArtworkPrefetchStatus()
await loadArtworkSummary()
}
} catch (err) {
console.error(err)
@@ -222,7 +269,7 @@ export default function SettingsPage({ section }: SettingsPageProps) {
if (section === 'radarr') {
void loadOptions('radarr')
}
}, [loadArtworkPrefetchStatus, loadOptions, loadSettings, router, section])
}, [loadArtworkPrefetchStatus, loadArtworkSummary, loadOptions, loadSettings, router, section])
const groupedSettings = useMemo(() => {
const groups: Record<string, AdminSetting[]> = {}
@@ -237,28 +284,51 @@ export default function SettingsPage({ section }: SettingsPageProps) {
const settingsSection = SETTINGS_SECTION_MAP[section] ?? null
const visibleSections = settingsSection ? [settingsSection] : []
const isCacheSection = section === 'cache'
const cacheSettingKeys = new Set([
'requests_sync_ttl_minutes',
'requests_data_source',
'artwork_cache_mode',
])
const cacheSettingKeys = new Set(['requests_sync_ttl_minutes', 'requests_data_source'])
const artworkSettingKeys = new Set(['artwork_cache_mode'])
const hiddenSettingKeys = new Set([...cacheSettingKeys, ...artworkSettingKeys])
const requestSettingOrder = [
'requests_poll_interval_seconds',
'requests_delta_sync_interval_minutes',
'requests_full_sync_time',
'requests_cleanup_time',
'requests_cleanup_days',
]
const sortByOrder = (items: AdminSetting[], order: string[]) => {
const position = new Map(order.map((key, index) => [key, index]))
return [...items].sort((a, b) => {
const aIndex = position.get(a.key) ?? Number.POSITIVE_INFINITY
const bIndex = position.get(b.key) ?? Number.POSITIVE_INFINITY
if (aIndex !== bIndex) return aIndex - bIndex
return a.key.localeCompare(b.key)
})
}
const cacheSettings = settings.filter((setting) => cacheSettingKeys.has(setting.key))
const artworkSettings = settings.filter((setting) => artworkSettingKeys.has(setting.key))
const settingsSections = isCacheSection
? [{ key: 'cache', title: 'Cache settings', items: cacheSettings }]
? [
{ key: 'cache', title: 'Cache control', items: cacheSettings },
{ key: 'artwork', title: 'Artwork cache', items: artworkSettings },
]
: visibleSections.map((sectionKey) => ({
key: sectionKey,
title: SECTION_LABELS[sectionKey] ?? sectionKey,
items:
items: (() => {
const sectionItems = groupedSettings[sectionKey] ?? []
const filtered =
sectionKey === 'requests' || sectionKey === 'artwork'
? (groupedSettings[sectionKey] ?? []).filter(
(setting) => !cacheSettingKeys.has(setting.key)
)
: groupedSettings[sectionKey] ?? [],
? sectionItems.filter((setting) => !hiddenSettingKeys.has(setting.key))
: sectionItems
if (sectionKey === 'requests') {
return sortByOrder(filtered, requestSettingOrder)
}
return filtered
})(),
}))
const showLogs = section === 'logs'
const showMaintenance = section === 'maintenance'
const showRequestsExtras = section === 'requests'
const showArtworkExtras = section === 'artwork'
const showArtworkExtras = section === 'cache'
const showCacheExtras = section === 'cache'
const shouldRenderSection = (sectionGroup: { key: string; items?: AdminSetting[] }) => {
if (sectionGroup.items && sectionGroup.items.length > 0) return true
@@ -269,35 +339,43 @@ export default function SettingsPage({ section }: SettingsPageProps) {
}
const settingDescriptions: Record<string, string> = {
jellyseerr_base_url: 'Base URL for your Jellyseerr server.',
jellyseerr_base_url:
'Base URL for your Jellyseerr server (FQDN or IP). Scheme is optional.',
jellyseerr_api_key: 'API key used to read requests and status.',
jellyfin_base_url: 'Local Jellyfin server URL for logins and lookups.',
jellyfin_base_url:
'Jellyfin server URL for logins and lookups (FQDN or IP). Scheme is optional.',
jellyfin_api_key: 'Admin API key for syncing users and availability.',
jellyfin_public_url: 'Public Jellyfin URL used for the “Open in Jellyfin” button.',
jellyfin_public_url:
'Public Jellyfin URL for the “Open in Jellyfin” button (FQDN or IP).',
jellyfin_sync_to_arr: 'Auto-add items to Sonarr/Radarr when they already exist in Jellyfin.',
artwork_cache_mode: 'Choose whether posters are cached locally or loaded from the web.',
sonarr_base_url: 'Sonarr server URL for TV tracking.',
sonarr_base_url: 'Sonarr server URL for TV tracking (FQDN or IP). Scheme is optional.',
sonarr_api_key: 'API key for Sonarr.',
sonarr_quality_profile_id: 'Quality profile used when adding TV shows.',
sonarr_root_folder: 'Root folder where Sonarr stores TV shows.',
sonarr_qbittorrent_category: 'qBittorrent category for manual Sonarr downloads.',
radarr_base_url: 'Radarr server URL for movies.',
radarr_base_url: 'Radarr server URL for movies (FQDN or IP). Scheme is optional.',
radarr_api_key: 'API key for Radarr.',
radarr_quality_profile_id: 'Quality profile used when adding movies.',
radarr_root_folder: 'Root folder where Radarr stores movies.',
radarr_qbittorrent_category: 'qBittorrent category for manual Radarr downloads.',
prowlarr_base_url: 'Prowlarr server URL for indexer searches.',
prowlarr_base_url:
'Prowlarr server URL for indexer searches (FQDN or IP). Scheme is optional.',
prowlarr_api_key: 'API key for Prowlarr.',
qbittorrent_base_url: 'qBittorrent server URL for download status.',
qbittorrent_base_url:
'qBittorrent server URL for download status (FQDN or IP). Scheme is optional.',
qbittorrent_username: 'qBittorrent login username.',
qbittorrent_password: 'qBittorrent login password.',
requests_sync_ttl_minutes: 'How long saved requests stay fresh before a refresh is needed.',
requests_poll_interval_seconds: 'How often the background checker runs.',
requests_delta_sync_interval_minutes: 'How often we check for new or updated requests.',
requests_full_sync_time: 'Daily time to refresh the full request list.',
requests_cleanup_time: 'Daily time to trim old history.',
requests_poll_interval_seconds:
'How often Magent checks if a full refresh should run.',
requests_delta_sync_interval_minutes:
'How often we poll for new or updated requests.',
requests_full_sync_time: 'Daily time to rebuild the full request cache.',
requests_cleanup_time: 'Daily time to trim old request history.',
requests_cleanup_days: 'History older than this is removed during cleanup.',
requests_data_source: 'Pick where Magent should read requests from.',
requests_data_source:
'Pick where Magent should read requests from. Cache-only avoids Jellyseerr lookups on reads.',
log_level: 'How much detail is written to the activity log.',
log_file: 'Where the activity log is stored.',
site_build_number: 'Build number shown in the account menu (auto-set from releases).',
@@ -307,6 +385,16 @@ export default function SettingsPage({ section }: SettingsPageProps) {
site_changelog: 'One update per line for the public changelog.',
}
const settingPlaceholders: Record<string, string> = {
jellyseerr_base_url: 'https://requests.example.com or 10.30.1.81:5055',
jellyfin_base_url: 'https://jelly.example.com or 10.40.0.80:8096',
jellyfin_public_url: 'https://jelly.example.com',
sonarr_base_url: 'https://sonarr.example.com or 10.30.1.81:8989',
radarr_base_url: 'https://radarr.example.com or 10.30.1.81:7878',
prowlarr_base_url: 'https://prowlarr.example.com or 10.30.1.81:9696',
qbittorrent_base_url: 'https://qb.example.com or 10.30.1.81:8080',
}
const buildSelectOptions = (
currentValue: string,
options: { id: number; label: string; path?: string }[],
@@ -462,6 +550,31 @@ export default function SettingsPage({ section }: SettingsPageProps) {
}
}
const prefetchArtworkMissing = async () => {
setArtworkPrefetchStatus(null)
try {
const baseUrl = getApiBase()
const response = await authFetch(
`${baseUrl}/admin/requests/artwork/prefetch?only_missing=1`,
{ method: 'POST' }
)
if (!response.ok) {
const text = await response.text()
throw new Error(text || 'Missing artwork prefetch failed')
}
const data = await response.json()
setArtworkPrefetch(data?.prefetch ?? null)
setArtworkPrefetchStatus('Missing artwork caching started.')
} catch (err) {
console.error(err)
const message =
err instanceof Error && err.message
? err.message.replace(/^\\{\"detail\":\"|\"\\}$/g, '')
: 'Could not cache missing artwork.'
setArtworkPrefetchStatus(message)
}
}
useEffect(() => {
if (!artworkPrefetch || artworkPrefetch.status !== 'running') {
return
@@ -479,6 +592,7 @@ export default function SettingsPage({ section }: SettingsPageProps) {
setArtworkPrefetch(data?.prefetch ?? null)
if (data?.prefetch?.status && data.prefetch.status !== 'running') {
setArtworkPrefetchStatus(data.prefetch.message || 'Artwork caching complete.')
void loadArtworkSummary()
}
} catch (err) {
console.error(err)
@@ -488,7 +602,7 @@ export default function SettingsPage({ section }: SettingsPageProps) {
active = false
clearInterval(timer)
}
}, [artworkPrefetch])
}, [artworkPrefetch, loadArtworkSummary])
useEffect(() => {
if (!artworkPrefetch || artworkPrefetch.status === 'running') {
@@ -578,6 +692,7 @@ export default function SettingsPage({ section }: SettingsPageProps) {
const loadCache = async () => {
setCacheStatus(null)
setCacheLoading(true)
try {
const baseUrl = getApiBase()
const response = await authFetch(
@@ -600,6 +715,8 @@ export default function SettingsPage({ section }: SettingsPageProps) {
? err.message.replace(/^\\{\"detail\":\"|\"\\}$/g, '')
: 'Could not load cache.'
setCacheStatus(message)
} finally {
setCacheLoading(false)
}
}
@@ -714,7 +831,9 @@ export default function SettingsPage({ section }: SettingsPageProps) {
.map((sectionGroup) => (
<section key={sectionGroup.key} className="admin-section">
<div className="section-header">
<h2>{sectionGroup.key === 'requests' ? 'Sync controls' : sectionGroup.title}</h2>
<h2>
{sectionGroup.key === 'requests' ? 'Request sync controls' : sectionGroup.title}
</h2>
{sectionGroup.key === 'sonarr' && (
<button type="button" onClick={() => loadOptions('sonarr')}>
Refresh Sonarr options
@@ -730,24 +849,33 @@ export default function SettingsPage({ section }: SettingsPageProps) {
Import Jellyfin users
</button>
)}
{(showArtworkExtras && sectionGroup.key === 'artwork') ||
(showCacheExtras && sectionGroup.key === 'cache') ? (
{showArtworkExtras && sectionGroup.key === 'artwork' ? (
<div className="sync-actions">
<button type="button" onClick={prefetchArtwork}>
Cache all artwork now
</button>
<button
type="button"
className="ghost-button"
onClick={prefetchArtworkMissing}
>
Sync only missing artwork
</button>
</div>
) : null}
{showRequestsExtras && sectionGroup.key === 'requests' && (
<div className="sync-actions-block">
<div className="sync-actions">
<button type="button" onClick={syncRequests}>
Full refresh (all requests)
Run full refresh (rebuild cache)
</button>
<button type="button" className="ghost-button" onClick={syncRequestsDelta}>
Quick refresh (delta changes)
Run delta sync (recent changes)
</button>
</div>
<div className="meta sync-note">
Full refresh reloads the entire list. Quick refresh only checks recent changes.
Full refresh rebuilds the entire cache. Delta sync only checks new or updated
requests.
</div>
</div>
)}
@@ -764,17 +892,48 @@ export default function SettingsPage({ section }: SettingsPageProps) {
{sectionGroup.key === 'jellyfin' && jellyfinSyncStatus && (
<div className="status-banner">{jellyfinSyncStatus}</div>
)}
{((showArtworkExtras && sectionGroup.key === 'artwork') ||
(showCacheExtras && sectionGroup.key === 'cache')) &&
artworkPrefetchStatus && (
{showArtworkExtras && sectionGroup.key === 'artwork' && artworkPrefetchStatus && (
<div className="status-banner">{artworkPrefetchStatus}</div>
)}
{showArtworkExtras && sectionGroup.key === 'artwork' && artworkSummaryStatus && (
<div className="status-banner">{artworkSummaryStatus}</div>
)}
{showArtworkExtras && sectionGroup.key === 'artwork' && (
<div className="summary">
<div className="summary-card">
<strong>Missing artwork</strong>
<p>{artworkSummary?.missing_artwork ?? '--'}</p>
<div className="meta">Requests missing poster/backdrop or cache files.</div>
</div>
<div className="summary-card">
<strong>Artwork cache size</strong>
<p>{formatBytes(artworkSummary?.cache_bytes)}</p>
<div className="meta">
{artworkSummary?.cache_files ?? '--'} cached files
</div>
</div>
<div className="summary-card">
<strong>Total requests</strong>
<p>{artworkSummary?.total_requests ?? '--'}</p>
<div className="meta">Requests currently tracked in cache.</div>
</div>
<div className="summary-card">
<strong>Cache mode</strong>
<p>{artworkSummary?.cache_mode ?? '--'}</p>
<div className="meta">Artwork setting applied to posters/backdrops.</div>
</div>
</div>
)}
{showRequestsExtras && sectionGroup.key === 'requests' && requestsSyncStatus && (
<div className="status-banner">{requestsSyncStatus}</div>
)}
{((showArtworkExtras && sectionGroup.key === 'artwork') ||
(showCacheExtras && sectionGroup.key === 'cache')) &&
artworkPrefetch && (
{showRequestsExtras && sectionGroup.key === 'requests' && (
<div className="status-banner">
Full refresh checks only decide when to run a full refresh. The delta sync interval
polls for new or updated requests.
</div>
)}
{showArtworkExtras && sectionGroup.key === 'artwork' && artworkPrefetch && (
<div className="sync-progress">
<div className="sync-meta">
<span>Status: {artworkPrefetch.status}</span>
@@ -847,6 +1006,10 @@ export default function SettingsPage({ section }: SettingsPageProps) {
const isRadarrProfile = setting.key === 'radarr_quality_profile_id'
const isRadarrRoot = setting.key === 'radarr_root_folder'
const isBoolSetting = BOOL_SETTINGS.has(setting.key)
const isUrlSetting = URL_SETTINGS.has(setting.key)
const inputPlaceholder = setting.sensitive && setting.isSet
? 'Configured (enter to replace)'
: settingPlaceholders[setting.key] ?? ''
if (isBoolSetting) {
return (
<label key={setting.key} data-helper={helperText || undefined}>
@@ -1129,7 +1292,9 @@ export default function SettingsPage({ section }: SettingsPageProps) {
}
>
<option value="always_js">Always use Jellyseerr (slower)</option>
<option value="prefer_cache">Use saved requests first (faster)</option>
<option value="prefer_cache">
Use saved requests only (fastest)
</option>
</select>
</label>
)
@@ -1175,9 +1340,8 @@ export default function SettingsPage({ section }: SettingsPageProps) {
<input
name={setting.key}
type={setting.sensitive ? 'password' : 'text'}
placeholder={
setting.sensitive && setting.isSet ? 'Configured (enter to replace)' : ''
}
placeholder={inputPlaceholder}
autoComplete={isUrlSetting ? 'url' : undefined}
value={value}
onChange={(event) =>
setFormValues((current) => ({
@@ -1199,7 +1363,7 @@ export default function SettingsPage({ section }: SettingsPageProps) {
</form>
) : (
<div className="status-banner">
No settings to show here yet. Try the Cache page for artwork and saved-request controls.
No settings to show here yet. Try the Cache Control page for artwork and saved-request controls.
</div>
)}
{showLogs && (
@@ -1245,8 +1409,15 @@ export default function SettingsPage({ section }: SettingsPageProps) {
<option value={200}>200</option>
</select>
</label>
<button type="button" onClick={loadCache}>
Load saved requests
<button type="button" onClick={loadCache} disabled={cacheLoading}>
{cacheLoading ? (
<>
<span className="spinner button-spinner" aria-hidden="true" />
Loading saved requests
</>
) : (
'Load saved requests'
)}
</button>
</div>
</div>

View File

@@ -0,0 +1,172 @@
'use client'
import { useEffect, useMemo, useState } from 'react'
import { useRouter } from 'next/navigation'
import { authFetch, clearToken, getApiBase, getToken } from '../../lib/auth'
import AdminShell from '../../ui/AdminShell'
type RequestRow = {
id: number
title?: string | null
year?: number | null
type?: string | null
statusLabel?: string | null
requestedBy?: string | null
createdAt?: string | null
}
const formatDateTime = (value?: string | null) => {
if (!value) return 'Unknown'
const date = new Date(value)
if (Number.isNaN(date.valueOf())) return value
return date.toLocaleString()
}
export default function AdminRequestsAllPage() {
const router = useRouter()
const [rows, setRows] = useState<RequestRow[]>([])
const [total, setTotal] = useState(0)
const [loading, setLoading] = useState(false)
const [error, setError] = useState<string | null>(null)
const [pageSize, setPageSize] = useState(50)
const [page, setPage] = useState(1)
const pageCount = useMemo(() => {
if (!total || pageSize <= 0) return 1
return Math.max(1, Math.ceil(total / pageSize))
}, [total, pageSize])
const load = async () => {
if (!getToken()) {
router.push('/login')
return
}
setLoading(true)
setError(null)
try {
const baseUrl = getApiBase()
const skip = (page - 1) * pageSize
const response = await authFetch(
`${baseUrl}/admin/requests/all?take=${pageSize}&skip=${skip}`
)
if (!response.ok) {
if (response.status === 401) {
clearToken()
router.push('/login')
return
}
if (response.status === 403) {
router.push('/')
return
}
throw new Error(`Load failed: ${response.status}`)
}
const data = await response.json()
setRows(Array.isArray(data?.results) ? data.results : [])
setTotal(Number(data?.total ?? 0))
} catch (err) {
console.error(err)
setError('Unable to load requests.')
} finally {
setLoading(false)
}
}
useEffect(() => {
void load()
}, [page, pageSize])
useEffect(() => {
if (page > pageCount) {
setPage(pageCount)
}
}, [pageCount, page])
return (
<AdminShell
title="All requests"
subtitle="Paginated view of every cached request."
actions={
<button type="button" onClick={() => router.push('/admin')}>
Back to settings
</button>
}
>
<section className="admin-section">
<div className="admin-toolbar">
<div className="admin-toolbar-info">
<span>{total.toLocaleString()} total</span>
</div>
<div className="admin-toolbar-actions">
<label className="admin-select">
<span>Per page</span>
<select value={pageSize} onChange={(e) => setPageSize(Number(e.target.value))}>
<option value={25}>25</option>
<option value={50}>50</option>
<option value={100}>100</option>
<option value={200}>200</option>
</select>
</label>
</div>
</div>
{loading ? (
<div className="status-banner">Loading requests</div>
) : error ? (
<div className="error-banner">{error}</div>
) : rows.length === 0 ? (
<div className="status-banner">No requests found.</div>
) : (
<div className="admin-table">
<div className="admin-table-head">
<span>Request</span>
<span>Status</span>
<span>Requested by</span>
<span>Created</span>
</div>
{rows.map((row) => (
<button
key={row.id}
type="button"
className="admin-table-row"
onClick={() => router.push(`/requests/${row.id}`)}
>
<span>
{row.title || `Request #${row.id}`}
{row.year ? ` (${row.year})` : ''}
</span>
<span>{row.statusLabel || 'Unknown'}</span>
<span>{row.requestedBy || 'Unknown'}</span>
<span>{formatDateTime(row.createdAt)}</span>
</button>
))}
</div>
)}
<div className="admin-pagination">
<button type="button" onClick={() => setPage(1)} disabled={page <= 1}>
First
</button>
<button type="button" onClick={() => setPage(page - 1)} disabled={page <= 1}>
Previous
</button>
<span>
Page {page} of {pageCount}
</span>
<button
type="button"
onClick={() => setPage(page + 1)}
disabled={page >= pageCount}
>
Next
</button>
<button
type="button"
onClick={() => setPage(pageCount)}
disabled={page >= pageCount}
>
Last
</button>
</div>
</section>
</AdminShell>
)
}

View File

@@ -1027,6 +1027,85 @@ button span {
gap: 12px;
}
.admin-toolbar {
display: flex;
justify-content: space-between;
align-items: center;
gap: 16px;
flex-wrap: wrap;
}
.admin-toolbar-info {
color: var(--ink-muted);
font-size: 13px;
}
.admin-toolbar-actions {
display: flex;
gap: 12px;
align-items: center;
}
.admin-select {
display: inline-flex;
align-items: center;
gap: 8px;
color: var(--ink-muted);
font-size: 13px;
}
.admin-table {
display: grid;
gap: 8px;
}
.admin-table-head {
display: grid;
grid-template-columns: 2fr 1fr 1fr 1fr;
gap: 12px;
font-size: 12px;
color: var(--ink-muted);
text-transform: uppercase;
letter-spacing: 0.08em;
padding: 0 12px;
}
.admin-table-row {
display: grid;
grid-template-columns: 2fr 1fr 1fr 1fr;
gap: 12px;
align-items: center;
text-align: left;
background: rgba(255, 255, 255, 0.04);
border-radius: 16px;
padding: 12px;
transition: transform 0.2s ease, box-shadow 0.2s ease;
}
.admin-table-row:hover {
transform: translateY(-1px);
box-shadow: 0 12px 24px rgba(15, 20, 45, 0.18);
}
.admin-pagination {
display: flex;
flex-wrap: wrap;
gap: 10px;
align-items: center;
justify-content: flex-end;
color: var(--ink-muted);
font-size: 13px;
}
.admin-pagination button {
background: rgba(255, 255, 255, 0.08);
color: var(--ink);
}
.admin-pagination span {
padding: 0 6px;
}
.section-header {
display: flex;
justify-content: space-between;
@@ -1084,6 +1163,118 @@ button span {
line-height: 1.4;
}
.user-grid {
display: grid;
grid-template-columns: repeat(auto-fit, minmax(260px, 1fr));
gap: 16px;
}
.user-grid-card {
display: grid;
gap: 14px;
padding: 16px;
border-radius: 16px;
border: 1px solid var(--border);
background: rgba(255, 255, 255, 0.04);
color: var(--ink);
text-decoration: none;
transition: border-color 0.2s ease, transform 0.2s ease;
}
.user-grid-card:hover {
border-color: rgba(59, 130, 246, 0.5);
transform: translateY(-2px);
}
.user-grid-header {
display: flex;
justify-content: space-between;
gap: 12px;
align-items: flex-start;
}
.user-grid-meta {
display: block;
font-size: 12px;
color: var(--ink-muted);
}
.user-grid-pill {
padding: 4px 10px;
border-radius: 999px;
font-size: 12px;
border: 1px solid rgba(59, 130, 246, 0.4);
color: var(--ink);
background: rgba(59, 130, 246, 0.2);
}
.user-grid-pill.is-blocked {
border-color: rgba(255, 82, 82, 0.5);
background: rgba(255, 82, 82, 0.2);
}
.user-grid-stats {
display: grid;
grid-template-columns: repeat(2, minmax(0, 1fr));
gap: 12px;
}
.user-grid-stats .label {
font-size: 12px;
color: var(--ink-muted);
display: block;
}
.user-grid-stats .value {
font-size: 16px;
font-weight: 600;
}
.user-grid-footer {
display: grid;
gap: 6px;
}
.user-detail-card {
display: grid;
gap: 16px;
padding: 18px;
border-radius: 18px;
border: 1px solid var(--border);
background: rgba(255, 255, 255, 0.04);
}
.user-detail-header {
display: flex;
justify-content: space-between;
align-items: center;
gap: 16px;
flex-wrap: wrap;
}
.user-detail-meta {
display: flex;
gap: 12px;
flex-wrap: wrap;
}
.user-detail-grid {
display: grid;
grid-template-columns: repeat(auto-fit, minmax(160px, 1fr));
gap: 12px;
}
.user-detail-grid .label {
font-size: 12px;
color: var(--ink-muted);
display: block;
}
.user-detail-grid .value {
font-size: 18px;
font-weight: 600;
}
.label-row {
display: flex;
justify-content: space-between;
@@ -1326,6 +1517,17 @@ button span {
.progress-indeterminate .progress-fill {
position: absolute;
width: 100%;
left: 0;
top: 0;
background: linear-gradient(
90deg,
rgba(255, 255, 255, 0),
var(--accent-2),
var(--accent-3),
rgba(255, 255, 255, 0)
);
background-size: 200% 100%;
animation: progress-indeterminate 1.6s ease-in-out infinite;
}
@@ -1404,6 +1606,24 @@ button span {
font-size: 13px;
}
.system-meta {
display: flex;
flex-direction: column;
gap: 2px;
}
.system-test-message {
font-size: 11px;
color: var(--ink-muted);
}
.system-actions {
margin-left: auto;
display: flex;
align-items: center;
gap: 8px;
}
.system-dot {
width: 10px;
height: 10px;
@@ -1433,10 +1653,28 @@ button span {
}
.system-state {
margin-left: auto;
color: var(--ink-muted);
}
.system-test {
padding: 4px 10px;
border-radius: 999px;
border: 1px solid var(--border);
background: rgba(255, 255, 255, 0.08);
color: var(--ink-muted);
font-size: 11px;
letter-spacing: 0.02em;
}
.system-test:hover:not(:disabled) {
background: rgba(255, 255, 255, 0.16);
}
.system-test:disabled {
opacity: 0.6;
cursor: not-allowed;
}
.pipeline-map {
border-radius: 16px;
border: 1px solid var(--border);
@@ -1509,13 +1747,10 @@ button span {
@keyframes progress-indeterminate {
0% {
transform: translateX(-50%);
}
50% {
transform: translateX(120%);
background-position: 200% 0;
}
100% {
transform: translateX(-50%);
background-position: -200% 0;
}
}
@@ -1681,6 +1916,16 @@ button span {
animation: spin 0.9s linear infinite;
}
.button-spinner {
width: 16px;
height: 16px;
border-width: 2px;
box-shadow: none;
margin-right: 8px;
vertical-align: middle;
display: inline-block;
}
.loading-text {
font-size: 16px;
color: var(--ink-muted);

View File

@@ -14,6 +14,7 @@ export default function HomePage() {
year?: number
statusLabel?: string
artwork?: { poster_url?: string }
createdAt?: string | null
}[]
>([])
const [recentError, setRecentError] = useState<string | null>(null)
@@ -30,6 +31,8 @@ export default function HomePage() {
>(null)
const [servicesLoading, setServicesLoading] = useState(false)
const [servicesError, setServicesError] = useState<string | null>(null)
const [serviceTesting, setServiceTesting] = useState<Record<string, boolean>>({})
const [serviceTestResults, setServiceTestResults] = useState<Record<string, string | null>>({})
const submit = (event: React.FormEvent) => {
event.preventDefault()
@@ -42,6 +45,61 @@ export default function HomePage() {
void runSearch(trimmed)
}
const toServiceSlug = (name: string) => name.toLowerCase().replace(/[^a-z0-9]/g, '')
const updateServiceStatus = (name: string, status: string, message?: string) => {
setServicesStatus((prev) => {
if (!prev) return prev
return {
...prev,
services: prev.services.map((service) =>
service.name === name ? { ...service, status, message } : service
),
}
})
}
const testService = async (name: string) => {
const slug = toServiceSlug(name)
setServiceTesting((prev) => ({ ...prev, [name]: true }))
setServiceTestResults((prev) => ({ ...prev, [name]: null }))
try {
const baseUrl = getApiBase()
const response = await authFetch(`${baseUrl}/status/services/${slug}/test`, {
method: 'POST',
})
if (!response.ok) {
if (response.status === 401) {
clearToken()
router.push('/login')
return
}
const text = await response.text()
throw new Error(text || `Service test failed: ${response.status}`)
}
const data = await response.json()
const status = data?.status ?? 'unknown'
const message =
data?.message ||
(status === 'up'
? 'API OK'
: status === 'down'
? 'API unreachable'
: status === 'degraded'
? 'Health warnings'
: status === 'not_configured'
? 'Not configured'
: 'Unknown')
setServiceTestResults((prev) => ({ ...prev, [name]: message }))
updateServiceStatus(name, status, data?.message)
} catch (error) {
console.error(error)
setServiceTestResults((prev) => ({ ...prev, [name]: 'Test failed' }))
} finally {
setServiceTesting((prev) => ({ ...prev, [name]: false }))
}
}
useEffect(() => {
if (!getToken()) {
router.push('/login')
@@ -94,6 +152,7 @@ export default function HomePage() {
year: item.year,
statusLabel: item.statusLabel,
artwork: item.artwork,
createdAt: item.createdAt ?? null,
}
})
)
@@ -179,6 +238,13 @@ export default function HomePage() {
return url.startsWith('http') ? url : `${getApiBase()}${url}`
}
const formatRequestTime = (value?: string | null) => {
if (!value) return null
const date = new Date(value)
if (Number.isNaN(date.valueOf())) return value
return date.toLocaleString()
}
return (
<main className="card">
<div className="layout-grid">
@@ -214,10 +280,17 @@ export default function HomePage() {
return order.map((name) => {
const item = items.find((entry) => entry.name === name)
const status = item?.status ?? 'unknown'
const testing = serviceTesting[name] ?? false
return (
<div key={name} className={`system-item system-${status}`}>
<span className="system-dot" />
<div className="system-meta">
<span className="system-name">{name}</span>
{serviceTestResults[name] && (
<span className="system-test-message">{serviceTestResults[name]}</span>
)}
</div>
<div className="system-actions">
<span className="system-state">
{status === 'up'
? 'Up'
@@ -229,6 +302,15 @@ export default function HomePage() {
? 'Not configured'
: 'Unknown'}
</span>
<button
type="button"
className="system-test"
onClick={() => void testService(name)}
disabled={testing}
>
{testing ? 'Testing...' : 'Test'}
</button>
</div>
</div>
)
})
@@ -239,11 +321,12 @@ export default function HomePage() {
<h2>{role === 'admin' ? 'All requests' : 'My recent requests'}</h2>
{authReady && (
<label className="recent-filter">
<span>Show last</span>
<span>Show</span>
<select
value={recentDays}
onChange={(event) => setRecentDays(Number(event.target.value))}
>
<option value={0}>All</option>
<option value={30}>30 days</option>
<option value={60}>60 days</option>
<option value={90}>90 days</option>
@@ -290,6 +373,7 @@ export default function HomePage() {
<span className="recent-meta">
{item.statusLabel ? item.statusLabel : 'Status not available yet'} · Request{' '}
{item.id}
{item.createdAt ? ` · ${formatRequestTime(item.createdAt)}` : ''}
</span>
</span>
</button>

View File

@@ -17,9 +17,9 @@ const NAV_GROUPS = [
{
title: 'Requests',
items: [
{ href: '/admin/requests', label: 'Request syncing' },
{ href: '/admin/artwork', label: 'Artwork' },
{ href: '/admin/cache', label: 'Cache' },
{ href: '/admin/requests', label: 'Request sync' },
{ href: '/admin/requests-all', label: 'All requests' },
{ href: '/admin/cache', label: 'Cache Control' },
],
},
{

View File

@@ -0,0 +1,234 @@
'use client'
import { useEffect, useState } from 'react'
import { useParams, useRouter } from 'next/navigation'
import { authFetch, clearToken, getApiBase, getToken } from '../../lib/auth'
import AdminShell from '../../ui/AdminShell'
type UserStats = {
total: number
ready: number
pending: number
approved: number
working: number
partial: number
declined: number
in_progress: number
last_request_at?: string | null
}
type AdminUser = {
id?: number
username: string
role: string
auth_provider?: string | null
last_login_at?: string | null
is_blocked?: boolean
jellyseerr_user_id?: number | null
}
const formatDateTime = (value?: string | null) => {
if (!value) return 'Never'
const date = new Date(value)
if (Number.isNaN(date.valueOf())) return value
return date.toLocaleString()
}
const normalizeStats = (stats: any): UserStats => ({
total: Number(stats?.total ?? 0),
ready: Number(stats?.ready ?? 0),
pending: Number(stats?.pending ?? 0),
approved: Number(stats?.approved ?? 0),
working: Number(stats?.working ?? 0),
partial: Number(stats?.partial ?? 0),
declined: Number(stats?.declined ?? 0),
in_progress: Number(stats?.in_progress ?? 0),
last_request_at: stats?.last_request_at ?? null,
})
export default function UserDetailPage() {
const params = useParams()
const router = useRouter()
const idParam = Array.isArray(params?.id) ? params.id[0] : params?.id
const [user, setUser] = useState<AdminUser | null>(null)
const [stats, setStats] = useState<UserStats | null>(null)
const [error, setError] = useState<string | null>(null)
const [loading, setLoading] = useState(true)
const loadUser = async () => {
if (!idParam) return
try {
const baseUrl = getApiBase()
const response = await authFetch(
`${baseUrl}/admin/users/id/${encodeURIComponent(idParam)}`
)
if (!response.ok) {
if (response.status === 401) {
clearToken()
router.push('/login')
return
}
if (response.status === 403) {
router.push('/')
return
}
if (response.status === 404) {
setError('User not found.')
return
}
throw new Error('Could not load user.')
}
const data = await response.json()
setUser(data?.user ?? null)
setStats(normalizeStats(data?.stats))
setError(null)
} catch (err) {
console.error(err)
setError('Could not load user.')
} finally {
setLoading(false)
}
}
const toggleUserBlock = async (blocked: boolean) => {
if (!user) return
try {
const baseUrl = getApiBase()
const response = await authFetch(
`${baseUrl}/admin/users/${encodeURIComponent(user.username)}/${blocked ? 'block' : 'unblock'}`,
{ method: 'POST' }
)
if (!response.ok) {
throw new Error('Update failed')
}
await loadUser()
} catch (err) {
console.error(err)
setError('Could not update user access.')
}
}
const updateUserRole = async (role: string) => {
if (!user) return
try {
const baseUrl = getApiBase()
const response = await authFetch(
`${baseUrl}/admin/users/${encodeURIComponent(user.username)}/role`,
{
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ role }),
}
)
if (!response.ok) {
throw new Error('Update failed')
}
await loadUser()
} catch (err) {
console.error(err)
setError('Could not update user role.')
}
}
useEffect(() => {
if (!getToken()) {
router.push('/login')
return
}
void loadUser()
}, [router, idParam])
if (loading) {
return <main className="card">Loading user...</main>
}
return (
<AdminShell
title={user?.username || 'User'}
subtitle="User overview and request stats."
actions={
<button type="button" onClick={() => router.push('/users')}>
Back to users
</button>
}
>
<section className="admin-section">
{error && <div className="error-banner">{error}</div>}
{!user ? (
<div className="status-banner">No user data found.</div>
) : (
<>
<div className="user-detail-card">
<div className="user-detail-header">
<div>
<strong>{user.username}</strong>
<div className="user-detail-meta">
<span className="meta">Jellyseerr ID: {user.jellyseerr_user_id ?? user.id ?? 'Unknown'}</span>
<span className="meta">Role: {user.role}</span>
<span className="meta">Login type: {user.auth_provider || 'local'}</span>
<span className="meta">Last login: {formatDateTime(user.last_login_at)}</span>
</div>
</div>
<div className="user-actions">
<label className="toggle">
<input
type="checkbox"
checked={user.role === 'admin'}
onChange={(event) => updateUserRole(event.target.checked ? 'admin' : 'user')}
/>
<span>Make admin</span>
</label>
<button
type="button"
className="ghost-button"
onClick={() => toggleUserBlock(!user.is_blocked)}
>
{user.is_blocked ? 'Allow access' : 'Block access'}
</button>
</div>
</div>
<div className="user-detail-grid">
<div>
<span className="label">Total</span>
<span className="value">{stats?.total ?? 0}</span>
</div>
<div>
<span className="label">Ready</span>
<span className="value">{stats?.ready ?? 0}</span>
</div>
<div>
<span className="label">Pending</span>
<span className="value">{stats?.pending ?? 0}</span>
</div>
<div>
<span className="label">Approved</span>
<span className="value">{stats?.approved ?? 0}</span>
</div>
<div>
<span className="label">Working</span>
<span className="value">{stats?.working ?? 0}</span>
</div>
<div>
<span className="label">Partial</span>
<span className="value">{stats?.partial ?? 0}</span>
</div>
<div>
<span className="label">Declined</span>
<span className="value">{stats?.declined ?? 0}</span>
</div>
<div>
<span className="label">In progress</span>
<span className="value">{stats?.in_progress ?? 0}</span>
</div>
<div>
<span className="label">Last request</span>
<span className="value">{formatDateTime(stats?.last_request_at)}</span>
</div>
</div>
</div>
</>
)}
</section>
</AdminShell>
)
}

View File

@@ -2,15 +2,30 @@
import { useEffect, useState } from 'react'
import { useRouter } from 'next/navigation'
import Link from 'next/link'
import { authFetch, clearToken, getApiBase, getToken } from '../lib/auth'
import AdminShell from '../ui/AdminShell'
type AdminUser = {
id: number
username: string
role: string
authProvider?: string | null
lastLoginAt?: string | null
isBlocked?: boolean
stats?: UserStats
}
type UserStats = {
total: number
ready: number
pending: number
approved: number
working: number
partial: number
declined: number
in_progress: number
last_request_at?: string | null
}
const formatLastLogin = (value?: string | null) => {
@@ -20,16 +35,50 @@ const formatLastLogin = (value?: string | null) => {
return date.toLocaleString()
}
const formatLastRequest = (value?: string | null) => {
if (!value) return '—'
const date = new Date(value)
if (Number.isNaN(date.valueOf())) return value
return date.toLocaleString()
}
const emptyStats: UserStats = {
total: 0,
ready: 0,
pending: 0,
approved: 0,
working: 0,
partial: 0,
declined: 0,
in_progress: 0,
last_request_at: null,
}
const normalizeStats = (stats: any): UserStats => ({
total: Number(stats?.total ?? 0),
ready: Number(stats?.ready ?? 0),
pending: Number(stats?.pending ?? 0),
approved: Number(stats?.approved ?? 0),
working: Number(stats?.working ?? 0),
partial: Number(stats?.partial ?? 0),
declined: Number(stats?.declined ?? 0),
in_progress: Number(stats?.in_progress ?? 0),
last_request_at: stats?.last_request_at ?? null,
})
export default function UsersPage() {
const router = useRouter()
const [users, setUsers] = useState<AdminUser[]>([])
const [error, setError] = useState<string | null>(null)
const [loading, setLoading] = useState(true)
const [jellyseerrSyncStatus, setJellyseerrSyncStatus] = useState<string | null>(null)
const [jellyseerrSyncBusy, setJellyseerrSyncBusy] = useState(false)
const [jellyseerrResyncBusy, setJellyseerrResyncBusy] = useState(false)
const loadUsers = async () => {
try {
const baseUrl = getApiBase()
const response = await authFetch(`${baseUrl}/admin/users`)
const response = await authFetch(`${baseUrl}/admin/users/summary`)
if (!response.ok) {
if (response.status === 401) {
clearToken()
@@ -51,6 +100,8 @@ export default function UsersPage() {
authProvider: user.auth_provider ?? 'local',
lastLoginAt: user.last_login_at ?? null,
isBlocked: Boolean(user.is_blocked),
id: Number(user.id ?? 0),
stats: normalizeStats(user.stats ?? emptyStats),
}))
)
} else {
@@ -103,6 +154,59 @@ export default function UsersPage() {
}
}
const syncJellyseerrUsers = async () => {
setJellyseerrSyncStatus(null)
setJellyseerrSyncBusy(true)
try {
const baseUrl = getApiBase()
const response = await authFetch(`${baseUrl}/admin/jellyseerr/users/sync`, {
method: 'POST',
})
if (!response.ok) {
const text = await response.text()
throw new Error(text || 'Sync failed')
}
const data = await response.json()
setJellyseerrSyncStatus(
`Matched ${data?.matched ?? 0} users. Skipped ${data?.skipped ?? 0}.`
)
await loadUsers()
} catch (err) {
console.error(err)
setJellyseerrSyncStatus('Could not sync Jellyseerr users.')
} finally {
setJellyseerrSyncBusy(false)
}
}
const resyncJellyseerrUsers = async () => {
const confirmed = window.confirm(
'This will remove all non-admin users and re-import from Jellyseerr. Continue?'
)
if (!confirmed) return
setJellyseerrSyncStatus(null)
setJellyseerrResyncBusy(true)
try {
const baseUrl = getApiBase()
const response = await authFetch(`${baseUrl}/admin/jellyseerr/users/resync`, {
method: 'POST',
})
if (!response.ok) {
const text = await response.text()
throw new Error(text || 'Resync failed')
}
const data = await response.json()
setJellyseerrSyncStatus(
`Re-imported ${data?.imported ?? 0} users. Cleared ${data?.cleared ?? 0}.`
)
await loadUsers()
} catch (err) {
console.error(err)
setJellyseerrSyncStatus('Could not resync Jellyseerr users.')
} finally {
setJellyseerrResyncBusy(false)
}
}
useEffect(() => {
if (!getToken()) {
@@ -121,47 +225,67 @@ export default function UsersPage() {
title="Users"
subtitle="Manage who can use Magent."
actions={
<>
<button type="button" onClick={loadUsers}>
Reload list
</button>
<button type="button" onClick={syncJellyseerrUsers} disabled={jellyseerrSyncBusy}>
{jellyseerrSyncBusy ? 'Syncing Jellyseerr users...' : 'Sync Jellyseerr users'}
</button>
<button type="button" onClick={resyncJellyseerrUsers} disabled={jellyseerrResyncBusy}>
{jellyseerrResyncBusy ? 'Resyncing Jellyseerr users...' : 'Resync Jellyseerr users'}
</button>
</>
}
>
<section className="admin-section">
{error && <div className="error-banner">{error}</div>}
{jellyseerrSyncStatus && <div className="status-banner">{jellyseerrSyncStatus}</div>}
{users.length === 0 ? (
<div className="status-banner">No users found yet.</div>
) : (
<div className="admin-grid">
<div className="user-grid">
{users.map((user) => (
<div key={user.username} className="summary-card user-card">
<Link
key={user.username}
className="user-grid-card"
href={`/users/${user.id}`}
>
<div className="user-grid-header">
<div>
<strong>{user.username}</strong>
<div className="user-meta">
<span className="meta">Role: {user.role}</span>
<span className="meta">Login type: {user.authProvider || 'local'}</span>
<span className="user-grid-meta">{user.role}</span>
</div>
<span className={`user-grid-pill ${user.isBlocked ? 'is-blocked' : ''}`}>
{user.isBlocked ? 'Blocked' : 'Active'}
</span>
</div>
<div className="user-grid-stats">
<div>
<span className="label">Total</span>
<span className="value">{user.stats?.total ?? 0}</span>
</div>
<div>
<span className="label">Ready</span>
<span className="value">{user.stats?.ready ?? 0}</span>
</div>
<div>
<span className="label">Pending</span>
<span className="value">{user.stats?.pending ?? 0}</span>
</div>
<div>
<span className="label">In progress</span>
<span className="value">{user.stats?.in_progress ?? 0}</span>
</div>
</div>
<div className="user-grid-footer">
<span className="meta">Login: {user.authProvider || 'local'}</span>
<span className="meta">Last login: {formatLastLogin(user.lastLoginAt)}</span>
<span className="meta">
Last request: {formatLastRequest(user.stats?.last_request_at)}
</span>
</div>
</div>
<div className="user-actions">
<label className="toggle">
<input
type="checkbox"
checked={user.role === 'admin'}
onChange={(event) =>
updateUserRole(user.username, event.target.checked ? 'admin' : 'user')
}
/>
<span>Make admin</span>
</label>
<button
type="button"
className="ghost-button"
onClick={() => toggleUserBlock(user.username, !user.isBlocked)}
>
{user.isBlocked ? 'Allow access' : 'Block access'}
</button>
</div>
</div>
</Link>
))}
</div>
)}

View File

@@ -1,7 +1,7 @@
{
"name": "magent-frontend",
"private": true,
"version": "0.1.0",
"version": "0202261541",
"scripts": {
"dev": "next dev",
"build": "next build",