Initial commit

This commit is contained in:
2026-01-22 22:49:57 +13:00
commit fe43a81175
67 changed files with 9408 additions and 0 deletions

4
backend/.dockerignore Normal file
View File

@@ -0,0 +1,4 @@
__pycache__/
*.pyc
.venv/
.env

15
backend/Dockerfile Normal file
View File

@@ -0,0 +1,15 @@
FROM python:3.12-slim
WORKDIR /app
ENV PYTHONDONTWRITEBYTECODE=1 \
PYTHONUNBUFFERED=1
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
COPY app ./app
EXPOSE 8000
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

64
backend/app/ai/triage.py Normal file
View File

@@ -0,0 +1,64 @@
from ..models import NormalizedState, TriageRecommendation, TriageResult, Snapshot
def triage_snapshot(snapshot: Snapshot) -> TriageResult:
recommendations = []
root_cause = "unknown"
summary = "No clear blocker detected yet."
confidence = 0.2
if snapshot.state == NormalizedState.requested:
root_cause = "approval"
summary = "The request is waiting for approval in Jellyseerr."
recommendations.append(
TriageRecommendation(
action_id="wait_for_approval",
title="Ask an admin to approve the request",
reason="Jellyseerr has not marked this request as approved.",
risk="low",
)
)
confidence = 0.6
if snapshot.state == NormalizedState.needs_add:
root_cause = "not_added"
summary = "The request is approved but not added to Sonarr/Radarr yet."
recommendations.append(
TriageRecommendation(
action_id="readd_to_arr",
title="Add it to the library queue",
reason="Sonarr/Radarr has not created the entry for this request.",
risk="medium",
)
)
confidence = 0.7
if snapshot.state == NormalizedState.added_to_arr:
root_cause = "search"
summary = "The item is in Sonarr/Radarr but has not been downloaded yet."
recommendations.append(
TriageRecommendation(
action_id="search",
title="Re-run search",
reason="A fresh search can locate new releases.",
risk="low",
)
)
confidence = 0.55
if not recommendations:
recommendations.append(
TriageRecommendation(
action_id="diagnostics",
title="Generate diagnostics bundle",
reason="Collect service status and recent errors for review.",
risk="low",
)
)
return TriageResult(
summary=summary,
confidence=confidence,
root_cause=root_cause,
recommendations=recommendations,
)

38
backend/app/auth.py Normal file
View File

@@ -0,0 +1,38 @@
from typing import Dict, Any
from fastapi import Depends, HTTPException, status
from fastapi.security import OAuth2PasswordBearer
from .db import get_user_by_username
from .security import safe_decode_token, TokenError
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="/auth/login")
def get_current_user(token: str = Depends(oauth2_scheme)) -> Dict[str, Any]:
try:
payload = safe_decode_token(token)
except TokenError as exc:
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid token") from exc
username = payload.get("sub")
if not username:
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid token subject")
user = get_user_by_username(username)
if not user:
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="User not found")
if user.get("is_blocked"):
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="User is blocked")
return {
"username": user["username"],
"role": user["role"],
"auth_provider": user.get("auth_provider", "local"),
}
def require_admin(user: Dict[str, Any] = Depends(get_current_user)) -> Dict[str, Any]:
if user.get("role") != "admin":
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Admin access required")
return user

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@@ -0,0 +1,32 @@
from typing import Any, Dict, Optional
import httpx
class ApiClient:
def __init__(self, base_url: Optional[str], api_key: Optional[str] = None):
self.base_url = base_url.rstrip("/") if base_url else None
self.api_key = api_key
def configured(self) -> bool:
return bool(self.base_url)
def headers(self) -> Dict[str, str]:
return {"X-Api-Key": self.api_key} if self.api_key else {}
async def get(self, path: str, params: Optional[Dict[str, Any]] = None) -> Optional[Any]:
if not self.base_url:
return None
url = f"{self.base_url}{path}"
async with httpx.AsyncClient(timeout=10.0) as client:
response = await client.get(url, headers=self.headers(), params=params)
response.raise_for_status()
return response.json()
async def post(self, path: str, payload: Optional[Dict[str, Any]] = None) -> Optional[Any]:
if not self.base_url:
return None
url = f"{self.base_url}{path}"
async with httpx.AsyncClient(timeout=10.0) as client:
response = await client.post(url, headers=self.headers(), json=payload)
response.raise_for_status()
return response.json()

View File

@@ -0,0 +1,60 @@
from typing import Any, Dict, Optional
import httpx
from .base import ApiClient
class JellyfinClient(ApiClient):
def __init__(self, base_url: Optional[str], api_key: Optional[str]):
super().__init__(base_url, api_key)
def configured(self) -> bool:
return bool(self.base_url and self.api_key)
async def get_users(self) -> Optional[Dict[str, Any]]:
if not self.base_url:
return None
url = f"{self.base_url}/Users"
headers = {"X-Emby-Token": self.api_key} if self.api_key else {}
async with httpx.AsyncClient(timeout=10.0) as client:
response = await client.get(url, headers=headers)
response.raise_for_status()
return response.json()
async def authenticate_by_name(self, username: str, password: str) -> Optional[Dict[str, Any]]:
if not self.base_url:
return None
url = f"{self.base_url}/Users/AuthenticateByName"
headers = {"X-Emby-Token": self.api_key} if self.api_key else {}
payload = {"Username": username, "Pw": password}
async with httpx.AsyncClient(timeout=10.0) as client:
response = await client.post(url, headers=headers, json=payload)
response.raise_for_status()
return response.json()
async def search_items(
self, term: str, item_types: Optional[list[str]] = None, limit: int = 20
) -> Optional[Dict[str, Any]]:
if not self.base_url or not self.api_key:
return None
url = f"{self.base_url}/Items"
params = {
"SearchTerm": term,
"IncludeItemTypes": ",".join(item_types or []),
"Recursive": "true",
"Limit": limit,
}
headers = {"X-Emby-Token": self.api_key}
async with httpx.AsyncClient(timeout=10.0) as client:
response = await client.get(url, headers=headers, params=params)
response.raise_for_status()
return response.json()
async def get_system_info(self) -> Optional[Dict[str, Any]]:
if not self.base_url or not self.api_key:
return None
url = f"{self.base_url}/System/Info"
headers = {"X-Emby-Token": self.api_key}
async with httpx.AsyncClient(timeout=10.0) as client:
response = await client.get(url, headers=headers)
response.raise_for_status()
return response.json()

View File

@@ -0,0 +1,37 @@
from typing import Any, Dict, Optional
from .base import ApiClient
class JellyseerrClient(ApiClient):
async def get_status(self) -> Optional[Dict[str, Any]]:
return await self.get("/api/v1/status")
async def get_request(self, request_id: str) -> Optional[Dict[str, Any]]:
return await self.get(f"/api/v1/request/{request_id}")
async def get_recent_requests(self, take: int = 10, skip: int = 0) -> Optional[Dict[str, Any]]:
return await self.get(
"/api/v1/request",
params={
"take": take,
"skip": skip,
},
)
async def get_media(self, media_id: int) -> Optional[Dict[str, Any]]:
return await self.get(f"/api/v1/media/{media_id}")
async def get_movie(self, tmdb_id: int) -> Optional[Dict[str, Any]]:
return await self.get(f"/api/v1/movie/{tmdb_id}")
async def get_tv(self, tmdb_id: int) -> Optional[Dict[str, Any]]:
return await self.get(f"/api/v1/tv/{tmdb_id}")
async def search(self, query: str, page: int = 1) -> Optional[Dict[str, Any]]:
return await self.get(
"/api/v1/search",
params={
"query": query,
"page": page,
},
)

View File

@@ -0,0 +1,10 @@
from typing import Any, Dict, Optional
from .base import ApiClient
class ProwlarrClient(ApiClient):
async def get_health(self) -> Optional[Dict[str, Any]]:
return await self.get("/api/v1/health")
async def search(self, query: str) -> Optional[Any]:
return await self.get("/api/v1/search", params={"query": query})

View File

@@ -0,0 +1,69 @@
from typing import Any, Dict, Optional
import httpx
from .base import ApiClient
class QBittorrentClient(ApiClient):
def __init__(self, base_url: Optional[str], username: Optional[str], password: Optional[str]):
super().__init__(base_url, None)
self.username = username
self.password = password
def configured(self) -> bool:
return bool(self.base_url and self.username and self.password)
async def _login(self, client: httpx.AsyncClient) -> None:
if not self.base_url or not self.username or not self.password:
raise RuntimeError("qBittorrent not configured")
response = await client.post(
f"{self.base_url}/api/v2/auth/login",
data={"username": self.username, "password": self.password},
headers={"Referer": self.base_url},
)
response.raise_for_status()
if response.text.strip().lower() != "ok.":
raise RuntimeError("qBittorrent login failed")
async def _get(self, path: str, params: Optional[Dict[str, Any]] = None) -> Optional[Any]:
if not self.base_url:
return None
async with httpx.AsyncClient(timeout=10.0) as client:
await self._login(client)
response = await client.get(f"{self.base_url}{path}", params=params)
response.raise_for_status()
return response.json()
async def _get_text(self, path: str, params: Optional[Dict[str, Any]] = None) -> Optional[str]:
if not self.base_url:
return None
async with httpx.AsyncClient(timeout=10.0) as client:
await self._login(client)
response = await client.get(f"{self.base_url}{path}", params=params)
response.raise_for_status()
return response.text.strip()
async def _post_form(self, path: str, data: Dict[str, Any]) -> None:
if not self.base_url:
return None
async with httpx.AsyncClient(timeout=10.0) as client:
await self._login(client)
response = await client.post(f"{self.base_url}{path}", data=data)
response.raise_for_status()
async def get_torrents(self) -> Optional[Any]:
return await self._get("/api/v2/torrents/info")
async def get_torrents_by_hashes(self, hashes: str) -> Optional[Any]:
return await self._get("/api/v2/torrents/info", params={"hashes": hashes})
async def get_app_version(self) -> Optional[Any]:
return await self._get_text("/api/v2/app/version")
async def resume_torrents(self, hashes: str) -> None:
try:
await self._post_form("/api/v2/torrents/resume", data={"hashes": hashes})
except httpx.HTTPStatusError as exc:
if exc.response is not None and exc.response.status_code == 404:
await self._post_form("/api/v2/torrents/start", data={"hashes": hashes})
return
raise

View File

@@ -0,0 +1,45 @@
from typing import Any, Dict, Optional
from .base import ApiClient
class RadarrClient(ApiClient):
async def get_system_status(self) -> Optional[Dict[str, Any]]:
return await self.get("/api/v3/system/status")
async def get_movie_by_tmdb_id(self, tmdb_id: int) -> Optional[Dict[str, Any]]:
return await self.get("/api/v3/movie", params={"tmdbId": tmdb_id})
async def get_movies(self) -> Optional[Dict[str, Any]]:
return await self.get("/api/v3/movie")
async def get_root_folders(self) -> Optional[Dict[str, Any]]:
return await self.get("/api/v3/rootfolder")
async def get_quality_profiles(self) -> Optional[Dict[str, Any]]:
return await self.get("/api/v3/qualityprofile")
async def get_queue(self, movie_id: int) -> Optional[Dict[str, Any]]:
return await self.get("/api/v3/queue", params={"movieId": movie_id})
async def search(self, movie_id: int) -> Optional[Dict[str, Any]]:
return await self.post("/api/v3/command", payload={"name": "MoviesSearch", "movieIds": [movie_id]})
async def add_movie(
self,
tmdb_id: int,
quality_profile_id: int,
root_folder: str,
monitored: bool = True,
search_for_movie: bool = True,
) -> Optional[Dict[str, Any]]:
payload = {
"tmdbId": tmdb_id,
"qualityProfileId": quality_profile_id,
"rootFolderPath": root_folder,
"monitored": monitored,
"addOptions": {"searchForMovie": search_for_movie},
}
return await self.post("/api/v3/movie", payload=payload)
async def grab_release(self, guid: str, indexer_id: int) -> Optional[Dict[str, Any]]:
return await self.post("/api/v3/release", payload={"guid": guid, "indexerId": indexer_id})

View File

@@ -0,0 +1,52 @@
from typing import Any, Dict, Optional
from .base import ApiClient
class SonarrClient(ApiClient):
async def get_system_status(self) -> Optional[Dict[str, Any]]:
return await self.get("/api/v3/system/status")
async def get_series_by_tvdb_id(self, tvdb_id: int) -> Optional[Dict[str, Any]]:
return await self.get("/api/v3/series", params={"tvdbId": tvdb_id})
async def get_root_folders(self) -> Optional[Dict[str, Any]]:
return await self.get("/api/v3/rootfolder")
async def get_quality_profiles(self) -> Optional[Dict[str, Any]]:
return await self.get("/api/v3/qualityprofile")
async def get_queue(self, series_id: int) -> Optional[Dict[str, Any]]:
return await self.get("/api/v3/queue", params={"seriesId": series_id})
async def get_episodes(self, series_id: int) -> Optional[Dict[str, Any]]:
return await self.get("/api/v3/episode", params={"seriesId": series_id})
async def search(self, series_id: int) -> Optional[Dict[str, Any]]:
return await self.post("/api/v3/command", payload={"name": "SeriesSearch", "seriesId": series_id})
async def search_episodes(self, episode_ids: list[int]) -> Optional[Dict[str, Any]]:
return await self.post("/api/v3/command", payload={"name": "EpisodeSearch", "episodeIds": episode_ids})
async def add_series(
self,
tvdb_id: int,
quality_profile_id: int,
root_folder: str,
monitored: bool = True,
title: Optional[str] = None,
search_missing: bool = True,
) -> Optional[Dict[str, Any]]:
payload = {
"tvdbId": tvdb_id,
"qualityProfileId": quality_profile_id,
"rootFolderPath": root_folder,
"monitored": monitored,
"seasonFolder": True,
"addOptions": {"searchForMissingEpisodes": search_missing},
}
if title:
payload["title"] = title
return await self.post("/api/v3/series", payload=payload)
async def grab_release(self, guid: str, indexer_id: int) -> Optional[Dict[str, Any]]:
return await self.post("/api/v3/release", payload={"guid": guid, "indexerId": indexer_id})

105
backend/app/config.py Normal file
View File

@@ -0,0 +1,105 @@
from typing import Optional
from pydantic import AliasChoices, Field
from pydantic_settings import BaseSettings, SettingsConfigDict
class Settings(BaseSettings):
model_config = SettingsConfigDict(env_prefix="")
app_name: str = "Magent"
cors_allow_origin: str = "http://localhost:3000"
sqlite_path: str = Field(default="data/magent.db", validation_alias=AliasChoices("SQLITE_PATH"))
jwt_secret: str = Field(default="change-me", validation_alias=AliasChoices("JWT_SECRET"))
jwt_exp_minutes: int = Field(default=720, validation_alias=AliasChoices("JWT_EXP_MINUTES"))
admin_username: str = Field(default="admin", validation_alias=AliasChoices("ADMIN_USERNAME"))
admin_password: str = Field(default="adminadmin", validation_alias=AliasChoices("ADMIN_PASSWORD"))
log_level: str = Field(default="INFO", validation_alias=AliasChoices("LOG_LEVEL"))
log_file: str = Field(default="data/magent.log", validation_alias=AliasChoices("LOG_FILE"))
requests_sync_ttl_minutes: int = Field(
default=1440, validation_alias=AliasChoices("REQUESTS_SYNC_TTL_MINUTES")
)
requests_poll_interval_seconds: int = Field(
default=300, validation_alias=AliasChoices("REQUESTS_POLL_INTERVAL_SECONDS")
)
requests_delta_sync_interval_minutes: int = Field(
default=5, validation_alias=AliasChoices("REQUESTS_DELTA_SYNC_INTERVAL_MINUTES")
)
requests_full_sync_time: str = Field(
default="00:00", validation_alias=AliasChoices("REQUESTS_FULL_SYNC_TIME")
)
requests_cleanup_time: str = Field(
default="02:00", validation_alias=AliasChoices("REQUESTS_CLEANUP_TIME")
)
requests_cleanup_days: int = Field(
default=90, validation_alias=AliasChoices("REQUESTS_CLEANUP_DAYS")
)
requests_data_source: str = Field(
default="prefer_cache", validation_alias=AliasChoices("REQUESTS_DATA_SOURCE")
)
artwork_cache_mode: str = Field(
default="remote", validation_alias=AliasChoices("ARTWORK_CACHE_MODE")
)
jellyseerr_base_url: Optional[str] = Field(
default=None, validation_alias=AliasChoices("JELLYSEERR_URL", "JELLYSEERR_BASE_URL")
)
jellyseerr_api_key: Optional[str] = Field(
default=None, validation_alias=AliasChoices("JELLYSEERR_API_KEY", "JELLYSEERR_KEY")
)
jellyfin_base_url: Optional[str] = Field(
default=None, validation_alias=AliasChoices("JELLYFIN_URL", "JELLYFIN_BASE_URL")
)
jellyfin_api_key: Optional[str] = Field(
default=None, validation_alias=AliasChoices("JELLYFIN_API_KEY", "JELLYFIN_KEY")
)
jellyfin_public_url: Optional[str] = Field(
default=None, validation_alias=AliasChoices("JELLYFIN_PUBLIC_URL")
)
jellyfin_sync_to_arr: bool = Field(
default=True, validation_alias=AliasChoices("JELLYFIN_SYNC_TO_ARR")
)
sonarr_base_url: Optional[str] = Field(
default=None, validation_alias=AliasChoices("SONARR_URL", "SONARR_BASE_URL")
)
sonarr_api_key: Optional[str] = Field(
default=None, validation_alias=AliasChoices("SONARR_API_KEY", "SONARR_KEY")
)
sonarr_quality_profile_id: Optional[int] = Field(
default=None, validation_alias=AliasChoices("SONARR_QUALITY_PROFILE_ID")
)
sonarr_root_folder: Optional[str] = Field(
default=None, validation_alias=AliasChoices("SONARR_ROOT_FOLDER")
)
radarr_base_url: Optional[str] = Field(
default=None, validation_alias=AliasChoices("RADARR_URL", "RADARR_BASE_URL")
)
radarr_api_key: Optional[str] = Field(
default=None, validation_alias=AliasChoices("RADARR_API_KEY", "RADARR_KEY")
)
radarr_quality_profile_id: Optional[int] = Field(
default=None, validation_alias=AliasChoices("RADARR_QUALITY_PROFILE_ID")
)
radarr_root_folder: Optional[str] = Field(
default=None, validation_alias=AliasChoices("RADARR_ROOT_FOLDER")
)
prowlarr_base_url: Optional[str] = Field(
default=None, validation_alias=AliasChoices("PROWLARR_URL", "PROWLARR_BASE_URL")
)
prowlarr_api_key: Optional[str] = Field(
default=None, validation_alias=AliasChoices("PROWLARR_API_KEY", "PROWLARR_KEY")
)
qbittorrent_base_url: Optional[str] = Field(
default=None, validation_alias=AliasChoices("QBIT_URL", "QBITTORRENT_URL", "QBITTORRENT_BASE_URL")
)
qbittorrent_username: Optional[str] = Field(
default=None, validation_alias=AliasChoices("QBIT_USERNAME", "QBITTORRENT_USERNAME")
)
qbittorrent_password: Optional[str] = Field(
default=None, validation_alias=AliasChoices("QBIT_PASSWORD", "QBITTORRENT_PASSWORD")
)
settings = Settings()

761
backend/app/db.py Normal file
View File

@@ -0,0 +1,761 @@
import json
import os
import sqlite3
import logging
from datetime import datetime, timezone, timedelta
from typing import Any, Dict, Optional
from .config import settings
from .models import Snapshot
from .security import hash_password, verify_password
logger = logging.getLogger(__name__)
def _db_path() -> str:
path = settings.sqlite_path or "data/magent.db"
if not os.path.isabs(path):
path = os.path.join(os.getcwd(), path)
os.makedirs(os.path.dirname(path), exist_ok=True)
return path
def _connect() -> sqlite3.Connection:
return sqlite3.connect(_db_path())
def init_db() -> None:
with _connect() as conn:
conn.execute(
"""
CREATE TABLE IF NOT EXISTS snapshots (
id INTEGER PRIMARY KEY AUTOINCREMENT,
request_id TEXT NOT NULL,
state TEXT NOT NULL,
state_reason TEXT,
created_at TEXT NOT NULL,
payload_json TEXT NOT NULL
)
"""
)
conn.execute(
"""
CREATE TABLE IF NOT EXISTS actions (
id INTEGER PRIMARY KEY AUTOINCREMENT,
request_id TEXT NOT NULL,
action_id TEXT NOT NULL,
label TEXT NOT NULL,
status TEXT NOT NULL,
message TEXT,
created_at TEXT NOT NULL
)
"""
)
conn.execute(
"""
CREATE TABLE IF NOT EXISTS users (
id INTEGER PRIMARY KEY AUTOINCREMENT,
username TEXT NOT NULL UNIQUE,
password_hash TEXT NOT NULL,
role TEXT NOT NULL,
auth_provider TEXT NOT NULL DEFAULT 'local',
created_at TEXT NOT NULL,
last_login_at TEXT,
is_blocked INTEGER NOT NULL DEFAULT 0
)
"""
)
conn.execute(
"""
CREATE TABLE IF NOT EXISTS settings (
key TEXT PRIMARY KEY,
value TEXT,
updated_at TEXT NOT NULL
)
"""
)
conn.execute(
"""
CREATE TABLE IF NOT EXISTS requests_cache (
request_id INTEGER PRIMARY KEY,
media_id INTEGER,
media_type TEXT,
status INTEGER,
title TEXT,
year INTEGER,
requested_by TEXT,
requested_by_norm TEXT,
created_at TEXT,
updated_at TEXT,
payload_json TEXT NOT NULL
)
"""
)
conn.execute(
"""
CREATE INDEX IF NOT EXISTS idx_requests_cache_created_at
ON requests_cache (created_at)
"""
)
conn.execute(
"""
CREATE INDEX IF NOT EXISTS idx_requests_cache_requested_by_norm
ON requests_cache (requested_by_norm)
"""
)
try:
conn.execute("ALTER TABLE users ADD COLUMN last_login_at TEXT")
except sqlite3.OperationalError:
pass
try:
conn.execute("ALTER TABLE users ADD COLUMN is_blocked INTEGER NOT NULL DEFAULT 0")
except sqlite3.OperationalError:
pass
try:
conn.execute("ALTER TABLE users ADD COLUMN auth_provider TEXT NOT NULL DEFAULT 'local'")
except sqlite3.OperationalError:
pass
_backfill_auth_providers()
ensure_admin_user()
def save_snapshot(snapshot: Snapshot) -> None:
payload = json.dumps(snapshot.model_dump(), ensure_ascii=True)
created_at = datetime.now(timezone.utc).isoformat()
with _connect() as conn:
conn.execute(
"""
INSERT INTO snapshots (request_id, state, state_reason, created_at, payload_json)
VALUES (?, ?, ?, ?, ?)
""",
(
snapshot.request_id,
snapshot.state.value,
snapshot.state_reason,
created_at,
payload,
),
)
def save_action(
request_id: str,
action_id: str,
label: str,
status: str,
message: Optional[str] = None,
) -> None:
created_at = datetime.now(timezone.utc).isoformat()
with _connect() as conn:
conn.execute(
"""
INSERT INTO actions (request_id, action_id, label, status, message, created_at)
VALUES (?, ?, ?, ?, ?, ?)
""",
(request_id, action_id, label, status, message, created_at),
)
def get_recent_snapshots(request_id: str, limit: int = 10) -> list[dict[str, Any]]:
with _connect() as conn:
rows = conn.execute(
"""
SELECT request_id, state, state_reason, created_at, payload_json
FROM snapshots
WHERE request_id = ?
ORDER BY id DESC
LIMIT ?
""",
(request_id, limit),
).fetchall()
results = []
for row in rows:
results.append(
{
"request_id": row[0],
"state": row[1],
"state_reason": row[2],
"created_at": row[3],
"payload": json.loads(row[4]),
}
)
return results
def get_recent_actions(request_id: str, limit: int = 10) -> list[dict[str, Any]]:
with _connect() as conn:
rows = conn.execute(
"""
SELECT request_id, action_id, label, status, message, created_at
FROM actions
WHERE request_id = ?
ORDER BY id DESC
LIMIT ?
""",
(request_id, limit),
).fetchall()
results = []
for row in rows:
results.append(
{
"request_id": row[0],
"action_id": row[1],
"label": row[2],
"status": row[3],
"message": row[4],
"created_at": row[5],
}
)
return results
def ensure_admin_user() -> None:
if not settings.admin_username or not settings.admin_password:
return
existing = get_user_by_username(settings.admin_username)
if existing:
return
create_user(settings.admin_username, settings.admin_password, role="admin")
def create_user(username: str, password: str, role: str = "user", auth_provider: str = "local") -> None:
created_at = datetime.now(timezone.utc).isoformat()
password_hash = hash_password(password)
with _connect() as conn:
conn.execute(
"""
INSERT INTO users (username, password_hash, role, auth_provider, created_at)
VALUES (?, ?, ?, ?, ?)
""",
(username, password_hash, role, auth_provider, created_at),
)
def create_user_if_missing(
username: str, password: str, role: str = "user", auth_provider: str = "local"
) -> bool:
created_at = datetime.now(timezone.utc).isoformat()
password_hash = hash_password(password)
with _connect() as conn:
cursor = conn.execute(
"""
INSERT OR IGNORE INTO users (username, password_hash, role, auth_provider, created_at)
VALUES (?, ?, ?, ?, ?)
""",
(username, password_hash, role, auth_provider, created_at),
)
return cursor.rowcount > 0
def get_user_by_username(username: str) -> Optional[Dict[str, Any]]:
with _connect() as conn:
row = conn.execute(
"""
SELECT id, username, password_hash, role, auth_provider, created_at, last_login_at, is_blocked
FROM users
WHERE username = ?
""",
(username,),
).fetchone()
if not row:
return None
return {
"id": row[0],
"username": row[1],
"password_hash": row[2],
"role": row[3],
"auth_provider": row[4],
"created_at": row[5],
"last_login_at": row[6],
"is_blocked": bool(row[7]),
}
def get_all_users() -> list[Dict[str, Any]]:
with _connect() as conn:
rows = conn.execute(
"""
SELECT id, username, role, auth_provider, created_at, last_login_at, is_blocked
FROM users
ORDER BY username COLLATE NOCASE
"""
).fetchall()
results: list[Dict[str, Any]] = []
for row in rows:
results.append(
{
"id": row[0],
"username": row[1],
"role": row[2],
"auth_provider": row[3],
"created_at": row[4],
"last_login_at": row[5],
"is_blocked": bool(row[6]),
}
)
return results
def set_last_login(username: str) -> None:
timestamp = datetime.now(timezone.utc).isoformat()
with _connect() as conn:
conn.execute(
"""
UPDATE users SET last_login_at = ? WHERE username = ?
""",
(timestamp, username),
)
def set_user_blocked(username: str, blocked: bool) -> None:
with _connect() as conn:
conn.execute(
"""
UPDATE users SET is_blocked = ? WHERE username = ?
""",
(1 if blocked else 0, username),
)
def set_user_role(username: str, role: str) -> None:
with _connect() as conn:
conn.execute(
"""
UPDATE users SET role = ? WHERE username = ?
""",
(role, username),
)
def verify_user_password(username: str, password: str) -> Optional[Dict[str, Any]]:
user = get_user_by_username(username)
if not user:
return None
if not verify_password(password, user["password_hash"]):
return None
return user
def set_user_password(username: str, password: str) -> None:
password_hash = hash_password(password)
with _connect() as conn:
conn.execute(
"""
UPDATE users SET password_hash = ? WHERE username = ?
""",
(password_hash, username),
)
def _backfill_auth_providers() -> None:
with _connect() as conn:
rows = conn.execute(
"""
SELECT username, password_hash, auth_provider
FROM users
"""
).fetchall()
updates: list[tuple[str, str]] = []
for row in rows:
username, password_hash, auth_provider = row
provider = auth_provider or "local"
if provider == "local":
if verify_password("jellyfin-user", password_hash):
provider = "jellyfin"
elif verify_password("jellyseerr-user", password_hash):
provider = "jellyseerr"
if provider != auth_provider:
updates.append((provider, username))
if not updates:
return
with _connect() as conn:
conn.executemany(
"""
UPDATE users SET auth_provider = ? WHERE username = ?
""",
updates,
)
def upsert_request_cache(
request_id: int,
media_id: Optional[int],
media_type: Optional[str],
status: Optional[int],
title: Optional[str],
year: Optional[int],
requested_by: Optional[str],
requested_by_norm: Optional[str],
created_at: Optional[str],
updated_at: Optional[str],
payload_json: str,
) -> None:
with _connect() as conn:
conn.execute(
"""
INSERT INTO requests_cache (
request_id,
media_id,
media_type,
status,
title,
year,
requested_by,
requested_by_norm,
created_at,
updated_at,
payload_json
)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(request_id) DO UPDATE SET
media_id = excluded.media_id,
media_type = excluded.media_type,
status = excluded.status,
title = excluded.title,
year = excluded.year,
requested_by = excluded.requested_by,
requested_by_norm = excluded.requested_by_norm,
created_at = excluded.created_at,
updated_at = excluded.updated_at,
payload_json = excluded.payload_json
""",
(
request_id,
media_id,
media_type,
status,
title,
year,
requested_by,
requested_by_norm,
created_at,
updated_at,
payload_json,
),
)
logger.debug(
"requests_cache upsert: request_id=%s media_id=%s status=%s updated_at=%s",
request_id,
media_id,
status,
updated_at,
)
def get_request_cache_last_updated() -> Optional[str]:
with _connect() as conn:
row = conn.execute(
"""
SELECT MAX(updated_at) FROM requests_cache
"""
).fetchone()
if not row:
return None
return row[0]
def get_request_cache_by_id(request_id: int) -> Optional[Dict[str, Any]]:
with _connect() as conn:
row = conn.execute(
"""
SELECT request_id, updated_at
FROM requests_cache
WHERE request_id = ?
""",
(request_id,),
).fetchone()
if not row:
logger.debug("requests_cache miss: request_id=%s", request_id)
return None
logger.debug("requests_cache hit: request_id=%s updated_at=%s", row[0], row[1])
return {"request_id": row[0], "updated_at": row[1]}
def get_request_cache_payload(request_id: int) -> Optional[Dict[str, Any]]:
with _connect() as conn:
row = conn.execute(
"""
SELECT payload_json
FROM requests_cache
WHERE request_id = ?
""",
(request_id,),
).fetchone()
if not row or not row[0]:
logger.debug("requests_cache payload miss: request_id=%s", request_id)
return None
try:
payload = json.loads(row[0])
logger.debug("requests_cache payload hit: request_id=%s", request_id)
return payload
except json.JSONDecodeError:
logger.warning("requests_cache payload invalid json: request_id=%s", request_id)
return None
def get_cached_requests(
limit: int,
offset: int,
requested_by_norm: Optional[str] = None,
since_iso: Optional[str] = None,
) -> list[Dict[str, Any]]:
query = """
SELECT request_id, media_id, media_type, status, title, year, requested_by, created_at
FROM requests_cache
"""
params: list[Any] = []
conditions = []
if requested_by_norm:
conditions.append("requested_by_norm = ?")
params.append(requested_by_norm)
if since_iso:
conditions.append("created_at >= ?")
params.append(since_iso)
if conditions:
query += " WHERE " + " AND ".join(conditions)
query += " ORDER BY created_at DESC, request_id DESC LIMIT ? OFFSET ?"
params.extend([limit, offset])
with _connect() as conn:
rows = conn.execute(query, tuple(params)).fetchall()
logger.debug(
"requests_cache list: count=%s requested_by_norm=%s since_iso=%s",
len(rows),
requested_by_norm,
since_iso,
)
results: list[Dict[str, Any]] = []
for row in rows:
results.append(
{
"request_id": row[0],
"media_id": row[1],
"media_type": row[2],
"status": row[3],
"title": row[4],
"year": row[5],
"requested_by": row[6],
"created_at": row[7],
}
)
return results
def get_request_cache_overview(limit: int = 50) -> list[Dict[str, Any]]:
limit = max(1, min(limit, 200))
with _connect() as conn:
rows = conn.execute(
"""
SELECT request_id, media_id, media_type, status, title, year, requested_by, created_at, updated_at
FROM requests_cache
ORDER BY updated_at DESC, request_id DESC
LIMIT ?
""",
(limit,),
).fetchall()
results: list[Dict[str, Any]] = []
for row in rows:
results.append(
{
"request_id": row[0],
"media_id": row[1],
"media_type": row[2],
"status": row[3],
"title": row[4],
"year": row[5],
"requested_by": row[6],
"created_at": row[7],
"updated_at": row[8],
}
)
return results
def get_request_cache_count() -> int:
with _connect() as conn:
row = conn.execute("SELECT COUNT(*) FROM requests_cache").fetchone()
return int(row[0] or 0)
def prune_duplicate_requests_cache() -> int:
with _connect() as conn:
cursor = conn.execute(
"""
DELETE FROM requests_cache
WHERE media_id IS NOT NULL
AND request_id NOT IN (
SELECT MAX(request_id)
FROM requests_cache
WHERE media_id IS NOT NULL
GROUP BY media_id, COALESCE(requested_by_norm, '')
)
"""
)
return cursor.rowcount
def get_request_cache_payloads(limit: int = 200, offset: int = 0) -> list[Dict[str, Any]]:
limit = max(1, min(limit, 1000))
offset = max(0, offset)
with _connect() as conn:
rows = conn.execute(
"""
SELECT request_id, payload_json
FROM requests_cache
ORDER BY request_id ASC
LIMIT ? OFFSET ?
""",
(limit, offset),
).fetchall()
results: list[Dict[str, Any]] = []
for row in rows:
payload = None
if row[1]:
try:
payload = json.loads(row[1])
except json.JSONDecodeError:
payload = None
results.append({"request_id": row[0], "payload": payload})
return results
def get_cached_requests_since(since_iso: str) -> list[Dict[str, Any]]:
with _connect() as conn:
rows = conn.execute(
"""
SELECT request_id, media_id, media_type, status, title, year, requested_by, requested_by_norm, created_at
FROM requests_cache
WHERE created_at >= ?
ORDER BY created_at DESC, request_id DESC
""",
(since_iso,),
).fetchall()
results: list[Dict[str, Any]] = []
for row in rows:
results.append(
{
"request_id": row[0],
"media_id": row[1],
"media_type": row[2],
"status": row[3],
"title": row[4],
"year": row[5],
"requested_by": row[6],
"requested_by_norm": row[7],
"created_at": row[8],
}
)
return results
def get_cached_request_by_media_id(
media_id: int, requested_by_norm: Optional[str] = None
) -> Optional[Dict[str, Any]]:
query = """
SELECT request_id, status
FROM requests_cache
WHERE media_id = ?
"""
params: list[Any] = [media_id]
if requested_by_norm:
query += " AND requested_by_norm = ?"
params.append(requested_by_norm)
query += " ORDER BY created_at DESC, request_id DESC LIMIT 1"
with _connect() as conn:
row = conn.execute(query, tuple(params)).fetchone()
if not row:
return None
return {"request_id": row[0], "status": row[1]}
def get_setting(key: str) -> Optional[str]:
with _connect() as conn:
row = conn.execute(
"""
SELECT value FROM settings WHERE key = ?
""",
(key,),
).fetchone()
if not row:
return None
return row[0]
def set_setting(key: str, value: Optional[str]) -> None:
updated_at = datetime.now(timezone.utc).isoformat()
with _connect() as conn:
conn.execute(
"""
INSERT INTO settings (key, value, updated_at)
VALUES (?, ?, ?)
ON CONFLICT(key) DO UPDATE SET value = excluded.value, updated_at = excluded.updated_at
""",
(key, value, updated_at),
)
def delete_setting(key: str) -> None:
with _connect() as conn:
conn.execute(
"""
DELETE FROM settings WHERE key = ?
""",
(key,),
)
def get_settings_overrides() -> Dict[str, str]:
with _connect() as conn:
rows = conn.execute(
"""
SELECT key, value FROM settings
"""
).fetchall()
overrides: Dict[str, str] = {}
for row in rows:
key = row[0]
value = row[1]
if key:
overrides[key] = value
return overrides
def run_integrity_check() -> str:
with _connect() as conn:
row = conn.execute("PRAGMA integrity_check").fetchone()
if not row:
return "unknown"
return str(row[0])
def vacuum_db() -> None:
with _connect() as conn:
conn.execute("VACUUM")
def clear_requests_cache() -> int:
with _connect() as conn:
cursor = conn.execute("DELETE FROM requests_cache")
return cursor.rowcount
def clear_history() -> Dict[str, int]:
with _connect() as conn:
actions = conn.execute("DELETE FROM actions").rowcount
snapshots = conn.execute("DELETE FROM snapshots").rowcount
return {"actions": actions, "snapshots": snapshots}
def cleanup_history(days: int) -> Dict[str, int]:
if days <= 0:
return {"actions": 0, "snapshots": 0}
cutoff = (datetime.now(timezone.utc) - timedelta(days=days)).isoformat()
with _connect() as conn:
actions = conn.execute(
"DELETE FROM actions WHERE created_at < ?",
(cutoff,),
).rowcount
snapshots = conn.execute(
"DELETE FROM snapshots WHERE created_at < ?",
(cutoff,),
).rowcount
return {"actions": actions, "snapshots": snapshots}

View File

@@ -0,0 +1,41 @@
import logging
import os
from logging.handlers import RotatingFileHandler
from typing import Optional
def configure_logging(log_level: Optional[str], log_file: Optional[str]) -> None:
level_name = (log_level or "INFO").upper()
level = getattr(logging, level_name, logging.INFO)
handlers: list[logging.Handler] = []
stream_handler = logging.StreamHandler()
handlers.append(stream_handler)
if log_file:
log_path = log_file
if not os.path.isabs(log_path):
log_path = os.path.join(os.getcwd(), log_path)
os.makedirs(os.path.dirname(log_path), exist_ok=True)
file_handler = RotatingFileHandler(
log_path, maxBytes=2_000_000, backupCount=3, encoding="utf-8"
)
handlers.append(file_handler)
formatter = logging.Formatter(
fmt="%(asctime)s | %(levelname)s | %(name)s | %(message)s",
datefmt="%Y-%m-%d %H:%M:%S",
)
for handler in handlers:
handler.setFormatter(formatter)
root = logging.getLogger()
for handler in list(root.handlers):
root.removeHandler(handler)
for handler in handlers:
root.addHandler(handler)
root.setLevel(level)
logging.getLogger("uvicorn").setLevel(level)
logging.getLogger("uvicorn.error").setLevel(level)
logging.getLogger("uvicorn.access").setLevel(level)

56
backend/app/main.py Normal file
View File

@@ -0,0 +1,56 @@
import asyncio
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from .config import settings
from .db import init_db
from .routers.requests import (
router as requests_router,
startup_warmup_requests_cache,
run_requests_delta_loop,
run_daily_requests_full_sync,
run_daily_db_cleanup,
)
from .routers.auth import router as auth_router
from .routers.admin import router as admin_router
from .routers.images import router as images_router
from .routers.branding import router as branding_router
from .routers.status import router as status_router
from .services.jellyfin_sync import run_daily_jellyfin_sync
from .logging_config import configure_logging
from .runtime import get_runtime_settings
app = FastAPI(title=settings.app_name)
app.add_middleware(
CORSMiddleware,
allow_origins=[settings.cors_allow_origin],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
@app.get("/health")
async def health() -> dict:
return {"status": "ok"}
@app.on_event("startup")
async def startup() -> None:
init_db()
runtime = get_runtime_settings()
configure_logging(runtime.log_level, runtime.log_file)
asyncio.create_task(run_daily_jellyfin_sync())
asyncio.create_task(startup_warmup_requests_cache())
asyncio.create_task(run_requests_delta_loop())
asyncio.create_task(run_daily_requests_full_sync())
asyncio.create_task(run_daily_db_cleanup())
app.include_router(requests_router)
app.include_router(auth_router)
app.include_router(admin_router)
app.include_router(images_router)
app.include_router(branding_router)
app.include_router(status_router)

65
backend/app/models.py Normal file
View File

@@ -0,0 +1,65 @@
from enum import Enum
from typing import Any, Dict, List, Optional
from pydantic import BaseModel, Field
class RequestType(str, Enum):
movie = "movie"
tv = "tv"
unknown = "unknown"
class NormalizedState(str, Enum):
requested = "REQUESTED"
approved = "APPROVED"
needs_add = "NEEDS_ADD"
added_to_arr = "ADDED_TO_ARR"
searching = "SEARCHING"
grabbed = "GRABBED"
downloading = "DOWNLOADING"
importing = "IMPORTING"
completed = "COMPLETED"
failed = "FAILED"
available = "AVAILABLE"
unknown = "UNKNOWN"
class TimelineHop(BaseModel):
service: str
status: str
details: Dict[str, Any] = Field(default_factory=dict)
timestamp: Optional[str] = None
class ActionOption(BaseModel):
id: str
label: str
risk: str
requires_confirmation: bool = True
class Snapshot(BaseModel):
request_id: str
title: str
year: Optional[int] = None
request_type: RequestType = RequestType.unknown
state: NormalizedState = NormalizedState.unknown
state_reason: Optional[str] = None
timeline: List[TimelineHop] = Field(default_factory=list)
actions: List[ActionOption] = Field(default_factory=list)
artwork: Dict[str, Any] = Field(default_factory=dict)
raw: Dict[str, Any] = Field(default_factory=dict)
class TriageRecommendation(BaseModel):
action_id: str
title: str
reason: str
risk: str
class TriageResult(BaseModel):
summary: str
confidence: float
root_cause: str
recommendations: List[TriageRecommendation]

View File

@@ -0,0 +1,367 @@
from typing import Any, Dict, List
import os
from fastapi import APIRouter, HTTPException, Depends, UploadFile, File
from ..auth import require_admin
from ..config import settings as env_settings
from ..db import (
delete_setting,
get_all_users,
get_request_cache_overview,
get_settings_overrides,
get_user_by_username,
set_setting,
set_user_blocked,
set_user_password,
set_user_role,
run_integrity_check,
vacuum_db,
clear_requests_cache,
clear_history,
cleanup_history,
)
from ..runtime import get_runtime_settings
from ..clients.sonarr import SonarrClient
from ..clients.radarr import RadarrClient
from ..clients.jellyfin import JellyfinClient
from ..clients.jellyseerr import JellyseerrClient
from ..services.jellyfin_sync import sync_jellyfin_users
import logging
from ..logging_config import configure_logging
from ..routers import requests as requests_router
from ..routers.branding import save_branding_image
router = APIRouter(prefix="/admin", tags=["admin"], dependencies=[Depends(require_admin)])
logger = logging.getLogger(__name__)
SENSITIVE_KEYS = {
"jellyseerr_api_key",
"jellyfin_api_key",
"sonarr_api_key",
"radarr_api_key",
"prowlarr_api_key",
"qbittorrent_password",
}
SETTING_KEYS: List[str] = [
"jellyseerr_base_url",
"jellyseerr_api_key",
"jellyfin_base_url",
"jellyfin_api_key",
"jellyfin_public_url",
"jellyfin_sync_to_arr",
"artwork_cache_mode",
"sonarr_base_url",
"sonarr_api_key",
"sonarr_quality_profile_id",
"sonarr_root_folder",
"radarr_base_url",
"radarr_api_key",
"radarr_quality_profile_id",
"radarr_root_folder",
"prowlarr_base_url",
"prowlarr_api_key",
"qbittorrent_base_url",
"qbittorrent_username",
"qbittorrent_password",
"log_level",
"log_file",
"requests_sync_ttl_minutes",
"requests_poll_interval_seconds",
"requests_delta_sync_interval_minutes",
"requests_full_sync_time",
"requests_cleanup_time",
"requests_cleanup_days",
"requests_data_source",
]
def _normalize_root_folders(folders: Any) -> List[Dict[str, Any]]:
if not isinstance(folders, list):
return []
results = []
for folder in folders:
if not isinstance(folder, dict):
continue
folder_id = folder.get("id")
path = folder.get("path")
if folder_id is None or path is None:
continue
results.append({"id": folder_id, "path": path, "label": path})
return results
def _normalize_quality_profiles(profiles: Any) -> List[Dict[str, Any]]:
if not isinstance(profiles, list):
return []
results = []
for profile in profiles:
if not isinstance(profile, dict):
continue
profile_id = profile.get("id")
name = profile.get("name")
if profile_id is None or name is None:
continue
results.append({"id": profile_id, "name": name, "label": name})
return results
@router.get("/settings")
async def list_settings() -> Dict[str, Any]:
overrides = get_settings_overrides()
results = []
for key in SETTING_KEYS:
override_present = key in overrides
value = overrides.get(key) if override_present else getattr(env_settings, key)
is_set = value is not None and str(value).strip() != ""
sensitive = key in SENSITIVE_KEYS
results.append(
{
"key": key,
"value": None if sensitive else value,
"isSet": is_set,
"source": "db" if override_present else ("env" if is_set else "unset"),
"sensitive": sensitive,
}
)
return {"settings": results}
@router.put("/settings")
async def update_settings(payload: Dict[str, Any]) -> Dict[str, Any]:
updates = 0
touched_logging = False
for key, value in payload.items():
if key not in SETTING_KEYS:
raise HTTPException(status_code=400, detail=f"Unknown setting: {key}")
if value is None:
continue
if isinstance(value, str) and value.strip() == "":
delete_setting(key)
updates += 1
continue
set_setting(key, str(value))
updates += 1
if key in {"log_level", "log_file"}:
touched_logging = True
if touched_logging:
runtime = get_runtime_settings()
configure_logging(runtime.log_level, runtime.log_file)
return {"status": "ok", "updated": updates}
@router.get("/sonarr/options")
async def sonarr_options() -> Dict[str, Any]:
runtime = get_runtime_settings()
client = SonarrClient(runtime.sonarr_base_url, runtime.sonarr_api_key)
if not client.configured():
raise HTTPException(status_code=400, detail="Sonarr not configured")
root_folders = await client.get_root_folders()
profiles = await client.get_quality_profiles()
return {
"rootFolders": _normalize_root_folders(root_folders),
"qualityProfiles": _normalize_quality_profiles(profiles),
}
@router.get("/radarr/options")
async def radarr_options() -> Dict[str, Any]:
runtime = get_runtime_settings()
client = RadarrClient(runtime.radarr_base_url, runtime.radarr_api_key)
if not client.configured():
raise HTTPException(status_code=400, detail="Radarr not configured")
root_folders = await client.get_root_folders()
profiles = await client.get_quality_profiles()
return {
"rootFolders": _normalize_root_folders(root_folders),
"qualityProfiles": _normalize_quality_profiles(profiles),
}
@router.get("/jellyfin/users")
async def jellyfin_users() -> Dict[str, Any]:
runtime = get_runtime_settings()
client = JellyfinClient(runtime.jellyfin_base_url, runtime.jellyfin_api_key)
if not client.configured():
raise HTTPException(status_code=400, detail="Jellyfin not configured")
users = await client.get_users()
if not isinstance(users, list):
return {"users": []}
results = []
for user in users:
if not isinstance(user, dict):
continue
results.append(
{
"id": user.get("Id"),
"name": user.get("Name"),
"hasPassword": user.get("HasPassword"),
"lastLoginDate": user.get("LastLoginDate"),
}
)
return {"users": results}
@router.post("/jellyfin/users/sync")
async def jellyfin_users_sync() -> Dict[str, Any]:
imported = await sync_jellyfin_users()
return {"status": "ok", "imported": imported}
@router.post("/requests/sync")
async def requests_sync() -> Dict[str, Any]:
runtime = get_runtime_settings()
client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key)
if not client.configured():
raise HTTPException(status_code=400, detail="Jellyseerr not configured")
state = await requests_router.start_requests_sync(
runtime.jellyseerr_base_url, runtime.jellyseerr_api_key
)
logger.info("Admin triggered requests sync: status=%s", state.get("status"))
return {"status": "ok", "sync": state}
@router.post("/requests/sync/delta")
async def requests_sync_delta() -> Dict[str, Any]:
runtime = get_runtime_settings()
client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key)
if not client.configured():
raise HTTPException(status_code=400, detail="Jellyseerr not configured")
state = await requests_router.start_requests_delta_sync(
runtime.jellyseerr_base_url, runtime.jellyseerr_api_key
)
logger.info("Admin triggered delta requests sync: status=%s", state.get("status"))
return {"status": "ok", "sync": state}
@router.post("/requests/artwork/prefetch")
async def requests_artwork_prefetch() -> Dict[str, Any]:
runtime = get_runtime_settings()
state = await requests_router.start_artwork_prefetch(
runtime.jellyseerr_base_url, runtime.jellyseerr_api_key
)
logger.info("Admin triggered artwork prefetch: status=%s", state.get("status"))
return {"status": "ok", "prefetch": state}
@router.get("/requests/artwork/status")
async def requests_artwork_status() -> Dict[str, Any]:
return {"status": "ok", "prefetch": requests_router.get_artwork_prefetch_state()}
@router.get("/requests/sync/status")
async def requests_sync_status() -> Dict[str, Any]:
return {"status": "ok", "sync": requests_router.get_requests_sync_state()}
@router.get("/logs")
async def read_logs(lines: int = 200) -> Dict[str, Any]:
runtime = get_runtime_settings()
log_file = runtime.log_file
if not log_file:
raise HTTPException(status_code=400, detail="Log file not configured")
if not os.path.isabs(log_file):
log_file = os.path.join(os.getcwd(), log_file)
if not os.path.exists(log_file):
raise HTTPException(status_code=404, detail="Log file not found")
lines = max(1, min(lines, 1000))
from collections import deque
with open(log_file, "r", encoding="utf-8", errors="replace") as handle:
tail = deque(handle, maxlen=lines)
return {"lines": list(tail)}
@router.get("/requests/cache")
async def requests_cache(limit: int = 50) -> Dict[str, Any]:
return {"rows": get_request_cache_overview(limit)}
@router.post("/branding/logo")
async def upload_branding_logo(file: UploadFile = File(...)) -> Dict[str, Any]:
return await save_branding_image(file)
@router.post("/maintenance/repair")
async def repair_database() -> Dict[str, Any]:
result = run_integrity_check()
vacuum_db()
logger.info("Database repair executed: integrity_check=%s", result)
return {"status": "ok", "integrity": result}
@router.post("/maintenance/flush")
async def flush_database() -> Dict[str, Any]:
cleared = clear_requests_cache()
history = clear_history()
delete_setting("requests_sync_last_at")
logger.warning("Database flush executed: requests_cache=%s history=%s", cleared, history)
return {"status": "ok", "requestsCleared": cleared, "historyCleared": history}
@router.post("/maintenance/cleanup")
async def cleanup_database(days: int = 90) -> Dict[str, Any]:
result = cleanup_history(days)
logger.info("Database cleanup executed: days=%s result=%s", days, result)
return {"status": "ok", "days": days, "cleared": result}
@router.post("/maintenance/logs/clear")
async def clear_logs() -> Dict[str, Any]:
runtime = get_runtime_settings()
log_file = runtime.log_file
if not log_file:
raise HTTPException(status_code=400, detail="Log file not configured")
if not os.path.isabs(log_file):
log_file = os.path.join(os.getcwd(), log_file)
try:
os.makedirs(os.path.dirname(log_file), exist_ok=True)
with open(log_file, "w", encoding="utf-8"):
pass
except OSError as exc:
raise HTTPException(status_code=500, detail=str(exc)) from exc
logger.info("Log file cleared")
return {"status": "ok"}
@router.get("/users")
async def list_users() -> Dict[str, Any]:
users = get_all_users()
return {"users": users}
@router.post("/users/{username}/block")
async def block_user(username: str) -> Dict[str, Any]:
set_user_blocked(username, True)
return {"status": "ok", "username": username, "blocked": True}
@router.post("/users/{username}/unblock")
async def unblock_user(username: str) -> Dict[str, Any]:
set_user_blocked(username, False)
return {"status": "ok", "username": username, "blocked": False}
@router.post("/users/{username}/role")
async def update_user_role(username: str, payload: Dict[str, Any]) -> Dict[str, Any]:
role = payload.get("role")
if role not in {"admin", "user"}:
raise HTTPException(status_code=400, detail="Invalid role")
set_user_role(username, role)
return {"status": "ok", "username": username, "role": role}
@router.post("/users/{username}/password")
async def update_user_password(username: str, payload: Dict[str, Any]) -> Dict[str, Any]:
new_password = payload.get("password") if isinstance(payload, dict) else None
if not isinstance(new_password, str) or len(new_password.strip()) < 8:
raise HTTPException(status_code=400, detail="Password must be at least 8 characters.")
user = get_user_by_username(username)
if not user:
raise HTTPException(status_code=404, detail="User not found")
if user.get("auth_provider") != "local":
raise HTTPException(
status_code=400, detail="Password changes are only available for local users."
)
set_user_password(username, new_password.strip())
return {"status": "ok", "username": username}

114
backend/app/routers/auth.py Normal file
View File

@@ -0,0 +1,114 @@
from fastapi import APIRouter, HTTPException, status, Depends
from fastapi.security import OAuth2PasswordRequestForm
from ..db import (
verify_user_password,
create_user_if_missing,
set_last_login,
get_user_by_username,
set_user_password,
)
from ..runtime import get_runtime_settings
from ..clients.jellyfin import JellyfinClient
from ..clients.jellyseerr import JellyseerrClient
from ..security import create_access_token
from ..auth import get_current_user
router = APIRouter(prefix="/auth", tags=["auth"])
@router.post("/login")
async def login(form_data: OAuth2PasswordRequestForm = Depends()) -> dict:
user = verify_user_password(form_data.username, form_data.password)
if not user:
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid credentials")
if user.get("is_blocked"):
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="User is blocked")
token = create_access_token(user["username"], user["role"])
set_last_login(user["username"])
return {
"access_token": token,
"token_type": "bearer",
"user": {"username": user["username"], "role": user["role"]},
}
@router.post("/jellyfin/login")
async def jellyfin_login(form_data: OAuth2PasswordRequestForm = Depends()) -> dict:
runtime = get_runtime_settings()
client = JellyfinClient(runtime.jellyfin_base_url, runtime.jellyfin_api_key)
if not client.configured():
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Jellyfin not configured")
try:
response = await client.authenticate_by_name(form_data.username, form_data.password)
except Exception as exc:
raise HTTPException(status_code=status.HTTP_502_BAD_GATEWAY, detail=str(exc)) from exc
if not isinstance(response, dict) or not response.get("User"):
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid Jellyfin credentials")
create_user_if_missing(form_data.username, "jellyfin-user", role="user", auth_provider="jellyfin")
user = get_user_by_username(form_data.username)
if user and user.get("is_blocked"):
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="User is blocked")
try:
users = await client.get_users()
if isinstance(users, list):
for user in users:
if not isinstance(user, dict):
continue
name = user.get("Name")
if isinstance(name, str) and name:
create_user_if_missing(name, "jellyfin-user", role="user", auth_provider="jellyfin")
except Exception:
pass
token = create_access_token(form_data.username, "user")
set_last_login(form_data.username)
return {"access_token": token, "token_type": "bearer", "user": {"username": form_data.username, "role": "user"}}
@router.post("/jellyseerr/login")
async def jellyseerr_login(form_data: OAuth2PasswordRequestForm = Depends()) -> dict:
runtime = get_runtime_settings()
client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key)
if not client.configured():
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Jellyseerr not configured")
payload = {"email": form_data.username, "password": form_data.password}
try:
response = await client.post("/api/v1/auth/login", payload=payload)
except Exception as exc:
raise HTTPException(status_code=status.HTTP_502_BAD_GATEWAY, detail=str(exc)) from exc
if not isinstance(response, dict):
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid Jellyseerr credentials")
create_user_if_missing(form_data.username, "jellyseerr-user", role="user", auth_provider="jellyseerr")
user = get_user_by_username(form_data.username)
if user and user.get("is_blocked"):
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="User is blocked")
token = create_access_token(form_data.username, "user")
set_last_login(form_data.username)
return {"access_token": token, "token_type": "bearer", "user": {"username": form_data.username, "role": "user"}}
@router.get("/me")
async def me(current_user: dict = Depends(get_current_user)) -> dict:
return current_user
@router.post("/password")
async def change_password(payload: dict, current_user: dict = Depends(get_current_user)) -> dict:
if current_user.get("auth_provider") != "local":
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Password changes are only available for local users.",
)
current_password = payload.get("current_password") if isinstance(payload, dict) else None
new_password = payload.get("new_password") if isinstance(payload, dict) else None
if not isinstance(current_password, str) or not isinstance(new_password, str):
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid payload")
if len(new_password.strip()) < 8:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST, detail="Password must be at least 8 characters."
)
user = verify_user_password(current_user["username"], current_password)
if not user:
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Current password is incorrect")
set_user_password(current_user["username"], new_password.strip())
return {"status": "ok"}

View File

@@ -0,0 +1,64 @@
import os
from io import BytesIO
from typing import Any, Dict
from fastapi import APIRouter, HTTPException, UploadFile, File
from fastapi.responses import FileResponse
from PIL import Image
router = APIRouter(prefix="/branding", tags=["branding"])
_BRANDING_DIR = os.path.join(os.getcwd(), "data", "branding")
_LOGO_PATH = os.path.join(_BRANDING_DIR, "logo.png")
_FAVICON_PATH = os.path.join(_BRANDING_DIR, "favicon.ico")
def _ensure_branding_dir() -> None:
os.makedirs(_BRANDING_DIR, exist_ok=True)
def _resize_image(image: Image.Image, max_size: int = 300) -> Image.Image:
image = image.convert("RGBA")
image.thumbnail((max_size, max_size))
return image
@router.get("/logo.png")
async def branding_logo() -> FileResponse:
if not os.path.exists(_LOGO_PATH):
raise HTTPException(status_code=404, detail="Logo not found")
headers = {"Cache-Control": "public, max-age=300"}
return FileResponse(_LOGO_PATH, media_type="image/png", headers=headers)
@router.get("/favicon.ico")
async def branding_favicon() -> FileResponse:
if not os.path.exists(_FAVICON_PATH):
raise HTTPException(status_code=404, detail="Favicon not found")
headers = {"Cache-Control": "public, max-age=300"}
return FileResponse(_FAVICON_PATH, media_type="image/x-icon", headers=headers)
async def save_branding_image(file: UploadFile) -> Dict[str, Any]:
if not file.content_type or not file.content_type.startswith("image/"):
raise HTTPException(status_code=400, detail="Please upload an image file.")
content = await file.read()
if not content:
raise HTTPException(status_code=400, detail="Uploaded file is empty.")
try:
image = Image.open(BytesIO(content))
except OSError as exc:
raise HTTPException(status_code=400, detail="Image file could not be read.") from exc
_ensure_branding_dir()
image = _resize_image(image, 300)
image.save(_LOGO_PATH, format="PNG")
favicon = image.copy()
favicon.thumbnail((64, 64))
try:
favicon.save(_FAVICON_PATH, format="ICO", sizes=[(32, 32), (64, 64)])
except OSError:
favicon.save(_FAVICON_PATH, format="ICO")
return {"status": "ok", "width": image.width, "height": image.height}

View File

@@ -0,0 +1,82 @@
import os
import re
import mimetypes
from fastapi import APIRouter, HTTPException, Response
from fastapi.responses import FileResponse, RedirectResponse
import httpx
from ..runtime import get_runtime_settings
router = APIRouter(prefix="/images", tags=["images"])
_TMDB_BASE = "https://image.tmdb.org/t/p"
_ALLOWED_SIZES = {"w92", "w154", "w185", "w342", "w500", "w780", "original"}
def _safe_filename(path: str) -> str:
trimmed = path.strip("/")
trimmed = trimmed.replace("/", "_")
safe = re.sub(r"[^A-Za-z0-9_.-]", "_", trimmed)
return safe or "image"
async def cache_tmdb_image(path: str, size: str = "w342") -> bool:
if not path or "://" in path or ".." in path:
return False
if not path.startswith("/"):
path = f"/{path}"
if size not in _ALLOWED_SIZES:
return False
runtime = get_runtime_settings()
cache_mode = (runtime.artwork_cache_mode or "remote").lower()
if cache_mode != "cache":
return False
cache_dir = os.path.join(os.getcwd(), "data", "artwork", "tmdb", size)
os.makedirs(cache_dir, exist_ok=True)
file_path = os.path.join(cache_dir, _safe_filename(path))
if os.path.exists(file_path):
return True
url = f"{_TMDB_BASE}/{size}{path}"
async with httpx.AsyncClient(timeout=20) as client:
response = await client.get(url)
response.raise_for_status()
content = response.content
with open(file_path, "wb") as handle:
handle.write(content)
return True
@router.get("/tmdb")
async def tmdb_image(path: str, size: str = "w342"):
if not path or "://" in path or ".." in path:
raise HTTPException(status_code=400, detail="Invalid image path")
if not path.startswith("/"):
path = f"/{path}"
if size not in _ALLOWED_SIZES:
raise HTTPException(status_code=400, detail="Invalid size")
runtime = get_runtime_settings()
cache_mode = (runtime.artwork_cache_mode or "remote").lower()
url = f"{_TMDB_BASE}/{size}{path}"
if cache_mode != "cache":
return RedirectResponse(url=url)
cache_dir = os.path.join(os.getcwd(), "data", "artwork", "tmdb", size)
os.makedirs(cache_dir, exist_ok=True)
file_path = os.path.join(cache_dir, _safe_filename(path))
headers = {"Cache-Control": "public, max-age=86400"}
if os.path.exists(file_path):
media_type = mimetypes.guess_type(file_path)[0] or "image/jpeg"
return FileResponse(file_path, media_type=media_type, headers=headers)
try:
await cache_tmdb_image(path, size)
if os.path.exists(file_path):
media_type = mimetypes.guess_type(file_path)[0] or "image/jpeg"
return FileResponse(file_path, media_type=media_type, headers=headers)
raise HTTPException(status_code=502, detail="Image cache failed")
except httpx.HTTPError as exc:
raise HTTPException(status_code=502, detail=f"Image fetch failed: {exc}") from exc

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,95 @@
from typing import Any, Dict
import httpx
from fastapi import APIRouter, Depends
from ..auth import get_current_user
from ..runtime import get_runtime_settings
from ..clients.jellyseerr import JellyseerrClient
from ..clients.sonarr import SonarrClient
from ..clients.radarr import RadarrClient
from ..clients.prowlarr import ProwlarrClient
from ..clients.qbittorrent import QBittorrentClient
from ..clients.jellyfin import JellyfinClient
router = APIRouter(prefix="/status", tags=["status"], dependencies=[Depends(get_current_user)])
async def _check(name: str, configured: bool, func) -> Dict[str, Any]:
if not configured:
return {"name": name, "status": "not_configured"}
try:
result = await func()
return {"name": name, "status": "up", "detail": result}
except httpx.HTTPError as exc:
return {"name": name, "status": "down", "message": str(exc)}
except Exception as exc:
return {"name": name, "status": "down", "message": str(exc)}
@router.get("/services")
async def services_status() -> Dict[str, Any]:
runtime = get_runtime_settings()
jellyseerr = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key)
sonarr = SonarrClient(runtime.sonarr_base_url, runtime.sonarr_api_key)
radarr = RadarrClient(runtime.radarr_base_url, runtime.radarr_api_key)
prowlarr = ProwlarrClient(runtime.prowlarr_base_url, runtime.prowlarr_api_key)
qbittorrent = QBittorrentClient(
runtime.qbittorrent_base_url, runtime.qbittorrent_username, runtime.qbittorrent_password
)
jellyfin = JellyfinClient(runtime.jellyfin_base_url, runtime.jellyfin_api_key)
services = []
services.append(
await _check(
"Jellyseerr",
jellyseerr.configured(),
lambda: jellyseerr.get_recent_requests(take=1, skip=0),
)
)
services.append(
await _check(
"Sonarr",
sonarr.configured(),
sonarr.get_system_status,
)
)
services.append(
await _check(
"Radarr",
radarr.configured(),
radarr.get_system_status,
)
)
prowlarr_status = await _check(
"Prowlarr",
prowlarr.configured(),
prowlarr.get_health,
)
if prowlarr_status.get("status") == "up":
health = prowlarr_status.get("detail")
if isinstance(health, list) and health:
prowlarr_status["status"] = "degraded"
prowlarr_status["message"] = "Health warnings"
services.append(prowlarr_status)
services.append(
await _check(
"qBittorrent",
qbittorrent.configured(),
qbittorrent.get_app_version,
)
)
services.append(
await _check(
"Jellyfin",
jellyfin.configured(),
jellyfin.get_system_info,
)
)
overall = "up"
if any(s.get("status") == "down" for s in services):
overall = "down"
elif any(s.get("status") in {"degraded", "not_configured"} for s in services):
overall = "degraded"
return {"overall": overall, "services": services}

36
backend/app/runtime.py Normal file
View File

@@ -0,0 +1,36 @@
from .config import settings
from .db import get_settings_overrides
_INT_FIELDS = {
"sonarr_quality_profile_id",
"radarr_quality_profile_id",
"jwt_exp_minutes",
"requests_sync_ttl_minutes",
"requests_poll_interval_seconds",
"requests_delta_sync_interval_minutes",
"requests_cleanup_days",
}
_BOOL_FIELDS = {
"jellyfin_sync_to_arr",
}
def get_runtime_settings():
overrides = get_settings_overrides()
update = {}
for key, value in overrides.items():
if value is None:
continue
if key in _INT_FIELDS:
try:
update[key] = int(value)
except (TypeError, ValueError):
continue
elif key in _BOOL_FIELDS:
if isinstance(value, bool):
update[key] = value
else:
update[key] = str(value).strip().lower() in {"1", "true", "yes", "on"}
else:
update[key] = value
return settings.model_copy(update=update)

40
backend/app/security.py Normal file
View File

@@ -0,0 +1,40 @@
from datetime import datetime, timedelta, timezone
from typing import Any, Dict, Optional
from jose import JWTError, jwt
from passlib.context import CryptContext
from .config import settings
_pwd_context = CryptContext(schemes=["pbkdf2_sha256"], deprecated="auto")
_ALGORITHM = "HS256"
def hash_password(password: str) -> str:
return _pwd_context.hash(password)
def verify_password(plain_password: str, hashed_password: str) -> bool:
return _pwd_context.verify(plain_password, hashed_password)
def create_access_token(subject: str, role: str, expires_minutes: Optional[int] = None) -> str:
minutes = expires_minutes or settings.jwt_exp_minutes
expires = datetime.now(timezone.utc) + timedelta(minutes=minutes)
payload: Dict[str, Any] = {"sub": subject, "role": role, "exp": expires}
return jwt.encode(payload, settings.jwt_secret, algorithm=_ALGORITHM)
def decode_token(token: str) -> Dict[str, Any]:
return jwt.decode(token, settings.jwt_secret, algorithms=[_ALGORITHM])
class TokenError(Exception):
pass
def safe_decode_token(token: str) -> Dict[str, Any]:
try:
return decode_token(token)
except JWTError as exc:
raise TokenError("Invalid token") from exc

View File

@@ -0,0 +1,58 @@
import logging
from fastapi import HTTPException
from ..clients.jellyfin import JellyfinClient
from ..db import create_user_if_missing
from ..runtime import get_runtime_settings
logger = logging.getLogger(__name__)
async def sync_jellyfin_users() -> int:
runtime = get_runtime_settings()
client = JellyfinClient(runtime.jellyfin_base_url, runtime.jellyfin_api_key)
if not client.configured():
raise HTTPException(status_code=400, detail="Jellyfin not configured")
users = await client.get_users()
if not isinstance(users, list):
return 0
imported = 0
for user in users:
if not isinstance(user, dict):
continue
name = user.get("Name")
if not name:
continue
if create_user_if_missing(name, "jellyfin-user", role="user", auth_provider="jellyfin"):
imported += 1
return imported
async def run_daily_jellyfin_sync() -> None:
while True:
delay = _seconds_until_midnight()
await _sleep_seconds(delay)
try:
imported = await sync_jellyfin_users()
logger.info("Jellyfin daily sync complete: imported=%s", imported)
except HTTPException as exc:
logger.warning("Jellyfin daily sync skipped: %s", exc.detail)
except Exception:
logger.exception("Jellyfin daily sync failed")
def _seconds_until_midnight() -> float:
from datetime import datetime, timedelta
now = datetime.now()
next_midnight = (now + timedelta(days=1)).replace(
hour=0, minute=0, second=0, microsecond=0
)
return max((next_midnight - now).total_seconds(), 0.0)
async def _sleep_seconds(delay: float) -> None:
import asyncio
await asyncio.sleep(delay)

View File

@@ -0,0 +1,596 @@
from typing import Any, Dict, List, Optional
import asyncio
import logging
from datetime import datetime, timezone
from urllib.parse import quote
from ..clients.jellyseerr import JellyseerrClient
from ..clients.jellyfin import JellyfinClient
from ..clients.sonarr import SonarrClient
from ..clients.radarr import RadarrClient
from ..clients.prowlarr import ProwlarrClient
from ..clients.qbittorrent import QBittorrentClient
from ..runtime import get_runtime_settings
from ..db import save_snapshot, get_request_cache_payload
from ..models import ActionOption, NormalizedState, RequestType, Snapshot, TimelineHop
STATUS_LABELS = {
1: "Waiting for approval",
2: "Approved",
3: "Declined",
4: "Ready to watch",
5: "Working on it",
6: "Partially ready",
}
def _status_label(value: Any) -> str:
try:
numeric = int(value)
return STATUS_LABELS.get(numeric, f"Status {numeric}")
except (TypeError, ValueError):
return "Unknown"
def _pick_first(value: Any) -> Optional[Dict[str, Any]]:
if isinstance(value, list):
return value[0] if value else None
if isinstance(value, dict):
return value
return None
def _queue_records(queue: Any) -> List[Dict[str, Any]]:
if isinstance(queue, dict):
records = queue.get("records")
if isinstance(records, list):
return records
if isinstance(queue, list):
return queue
return []
def _filter_queue(queue: Any, item_id: Optional[int], request_type: RequestType) -> Any:
if not item_id:
return queue
records = _queue_records(queue)
if not records:
return queue
key = "seriesId" if request_type == RequestType.tv else "movieId"
filtered = [record for record in records if record.get(key) == item_id]
if isinstance(queue, dict):
filtered_queue = dict(queue)
filtered_queue["records"] = filtered
filtered_queue["totalRecords"] = len(filtered)
return filtered_queue
return filtered
def _download_ids(records: List[Dict[str, Any]]) -> List[str]:
ids = []
for record in records:
download_id = record.get("downloadId") or record.get("download_id")
if isinstance(download_id, str) and download_id:
ids.append(download_id)
return ids
def _missing_episode_numbers_by_season(episodes: Any) -> Dict[int, List[int]]:
if not isinstance(episodes, list):
return {}
grouped: Dict[int, List[int]] = {}
now = datetime.now(timezone.utc)
for episode in episodes:
if not isinstance(episode, dict):
continue
if not episode.get("monitored", True):
continue
if episode.get("hasFile"):
continue
air_date = episode.get("airDateUtc")
if isinstance(air_date, str):
try:
aired_at = datetime.fromisoformat(air_date.replace("Z", "+00:00"))
except ValueError:
aired_at = None
if aired_at and aired_at > now:
continue
season_number = episode.get("seasonNumber")
episode_number = episode.get("episodeNumber")
if not isinstance(episode_number, int):
episode_number = episode.get("absoluteEpisodeNumber")
if isinstance(season_number, int) and isinstance(episode_number, int):
grouped.setdefault(season_number, []).append(episode_number)
for season_number in list(grouped.keys()):
grouped[season_number] = sorted(set(grouped[season_number]))
return grouped
def _summarize_qbit(torrents: List[Dict[str, Any]]) -> Dict[str, Any]:
if not torrents:
return {"state": "idle", "message": "0 active downloads."}
downloading_states = {"downloading", "stalleddl", "queueddl", "checkingdl", "forceddl"}
paused_states = {"pauseddl", "pausedup"}
completed_states = {"uploading", "stalledup", "queuedup", "checkingup", "forcedup", "stoppedup"}
downloading = [t for t in torrents if str(t.get("state", "")).lower() in downloading_states]
paused = [t for t in torrents if str(t.get("state", "")).lower() in paused_states]
completed = [t for t in torrents if str(t.get("state", "")).lower() in completed_states]
if downloading:
return {
"state": "downloading",
"message": f"Downloading ({len(downloading)} active).",
}
if paused:
return {
"state": "paused",
"message": f"Paused ({len(paused)} paused).",
}
if completed:
return {
"state": "completed",
"message": f"Completed/seeding ({len(completed)} seeding).",
}
return {
"state": "idle",
"message": "0 active downloads.",
}
def _artwork_url(path: Optional[str], size: str, cache_mode: str) -> Optional[str]:
if not path:
return None
if not path.startswith("/"):
path = f"/{path}"
if cache_mode == "cache":
return f"/images/tmdb?path={quote(path)}&size={size}"
return f"https://image.tmdb.org/t/p/{size}{path}"
async def build_snapshot(request_id: str) -> Snapshot:
timeline = []
runtime = get_runtime_settings()
jellyseerr = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key)
jellyfin = JellyfinClient(runtime.jellyfin_base_url, runtime.jellyfin_api_key)
sonarr = SonarrClient(runtime.sonarr_base_url, runtime.sonarr_api_key)
radarr = RadarrClient(runtime.radarr_base_url, runtime.radarr_api_key)
prowlarr = ProwlarrClient(runtime.prowlarr_base_url, runtime.prowlarr_api_key)
qbittorrent = QBittorrentClient(
runtime.qbittorrent_base_url,
runtime.qbittorrent_username,
runtime.qbittorrent_password,
)
snapshot = Snapshot(
request_id=request_id,
title="Unknown",
state=NormalizedState.unknown,
state_reason="Awaiting configuration",
)
cached_request = None
mode = (runtime.requests_data_source or "prefer_cache").lower()
if mode != "always_js" and request_id.isdigit():
cached_request = get_request_cache_payload(int(request_id))
if cached_request is not None:
logging.getLogger(__name__).debug(
"snapshot cache hit: request_id=%s mode=%s", request_id, mode
)
else:
logging.getLogger(__name__).debug(
"snapshot cache miss: request_id=%s mode=%s", request_id, mode
)
if not jellyseerr.configured() and not cached_request:
timeline.append(TimelineHop(service="Jellyseerr", status="not_configured"))
timeline.append(TimelineHop(service="Sonarr/Radarr", status="not_configured"))
timeline.append(TimelineHop(service="Prowlarr", status="not_configured"))
timeline.append(TimelineHop(service="qBittorrent", status="not_configured"))
snapshot.timeline = timeline
return snapshot
jelly_request = cached_request
if (jelly_request is None or mode == "always_js") and jellyseerr.configured():
try:
jelly_request = await jellyseerr.get_request(request_id)
logging.getLogger(__name__).debug(
"snapshot jellyseerr fetch: request_id=%s mode=%s", request_id, mode
)
except Exception as exc:
timeline.append(TimelineHop(service="Jellyseerr", status="error", details={"error": str(exc)}))
snapshot.timeline = timeline
snapshot.state = NormalizedState.failed
snapshot.state_reason = "Failed to reach Jellyseerr"
return snapshot
if not jelly_request:
timeline.append(TimelineHop(service="Jellyseerr", status="not_found"))
snapshot.timeline = timeline
snapshot.state = NormalizedState.unknown
snapshot.state_reason = "Request not found in Jellyseerr"
return snapshot
jelly_status = jelly_request.get("status", "unknown")
jelly_status_label = _status_label(jelly_status)
jelly_type = jelly_request.get("type") or "unknown"
snapshot.title = jelly_request.get("media", {}).get("title", "Unknown")
snapshot.year = jelly_request.get("media", {}).get("year")
snapshot.request_type = RequestType(jelly_type) if jelly_type in {"movie", "tv"} else RequestType.unknown
media = jelly_request.get("media", {}) if isinstance(jelly_request, dict) else {}
poster_path = None
backdrop_path = None
if isinstance(media, dict):
poster_path = media.get("posterPath") or media.get("poster_path")
backdrop_path = media.get("backdropPath") or media.get("backdrop_path")
if snapshot.title in {None, "", "Unknown"} and jellyseerr.configured():
tmdb_id = jelly_request.get("media", {}).get("tmdbId")
if tmdb_id:
try:
if snapshot.request_type == RequestType.movie:
details = await jellyseerr.get_movie(int(tmdb_id))
if isinstance(details, dict):
snapshot.title = details.get("title") or snapshot.title
release_date = details.get("releaseDate")
snapshot.year = int(release_date[:4]) if release_date else snapshot.year
poster_path = poster_path or details.get("posterPath") or details.get("poster_path")
backdrop_path = (
backdrop_path
or details.get("backdropPath")
or details.get("backdrop_path")
)
elif snapshot.request_type == RequestType.tv:
details = await jellyseerr.get_tv(int(tmdb_id))
if isinstance(details, dict):
snapshot.title = details.get("name") or details.get("title") or snapshot.title
first_air = details.get("firstAirDate")
snapshot.year = int(first_air[:4]) if first_air else snapshot.year
poster_path = poster_path or details.get("posterPath") or details.get("poster_path")
backdrop_path = (
backdrop_path
or details.get("backdropPath")
or details.get("backdrop_path")
)
except Exception:
pass
cache_mode = (runtime.artwork_cache_mode or "remote").lower()
snapshot.artwork = {
"poster_path": poster_path,
"backdrop_path": backdrop_path,
"poster_url": _artwork_url(poster_path, "w342", cache_mode),
"backdrop_url": _artwork_url(backdrop_path, "w780", cache_mode),
}
timeline.append(
TimelineHop(
service="Jellyseerr",
status=jelly_status_label,
details={
"requestedBy": jelly_request.get("requestedBy", {}).get("displayName")
or jelly_request.get("requestedBy", {}).get("username")
or jelly_request.get("requestedBy", {}).get("jellyfinUsername")
or jelly_request.get("requestedBy", {}).get("email"),
"createdAt": jelly_request.get("createdAt"),
"updatedAt": jelly_request.get("updatedAt"),
"approved": jelly_request.get("isApproved"),
"statusCode": jelly_status,
},
)
)
arr_state = None
arr_details: Dict[str, Any] = {}
arr_item = None
arr_queue = None
media_status = jelly_request.get("media", {}).get("status")
try:
media_status_code = int(media_status) if media_status is not None else None
except (TypeError, ValueError):
media_status_code = None
if snapshot.request_type == RequestType.tv:
tvdb_id = jelly_request.get("media", {}).get("tvdbId")
if tvdb_id:
try:
series = await sonarr.get_series_by_tvdb_id(int(tvdb_id))
arr_item = _pick_first(series)
arr_details["series"] = arr_item
arr_state = "added" if arr_item else "missing"
if arr_item:
stats = arr_item.get("statistics") if isinstance(arr_item, dict) else None
if isinstance(stats, dict):
file_count = stats.get("episodeFileCount")
total_count = (
stats.get("totalEpisodeCount")
if isinstance(stats.get("totalEpisodeCount"), int)
else stats.get("episodeCount")
)
if (
isinstance(file_count, int)
and isinstance(total_count, int)
and total_count > 0
and file_count >= total_count
):
arr_state = "available"
if arr_item and isinstance(arr_item.get("id"), int):
series_id = int(arr_item["id"])
arr_queue = await sonarr.get_queue(series_id)
arr_queue = _filter_queue(arr_queue, series_id, RequestType.tv)
arr_details["queue"] = arr_queue
episodes = await sonarr.get_episodes(series_id)
missing_by_season = _missing_episode_numbers_by_season(episodes)
if missing_by_season:
arr_details["missingEpisodes"] = missing_by_season
except Exception as exc:
arr_state = "error"
arr_details["error"] = str(exc)
elif snapshot.request_type == RequestType.movie:
tmdb_id = jelly_request.get("media", {}).get("tmdbId")
if tmdb_id:
try:
movie = await radarr.get_movie_by_tmdb_id(int(tmdb_id))
arr_item = _pick_first(movie)
if not arr_item:
title_hint = (
jelly_request.get("media", {}).get("title")
or jelly_request.get("title")
or snapshot.title
)
year_hint = (
jelly_request.get("media", {}).get("year")
or jelly_request.get("year")
or snapshot.year
)
try:
all_movies = await radarr.get_movies()
except Exception:
all_movies = None
if isinstance(all_movies, list):
for candidate in all_movies:
if not isinstance(candidate, dict):
continue
if tmdb_id and candidate.get("tmdbId") == int(tmdb_id):
arr_item = candidate
break
if title_hint and candidate.get("title") == title_hint:
if not year_hint or candidate.get("year") == year_hint:
arr_item = candidate
break
arr_details["movie"] = arr_item
if arr_item:
if arr_item.get("hasFile"):
arr_state = "available"
elif arr_item.get("isAvailable"):
arr_state = "searching"
else:
arr_state = "added"
else:
arr_state = "missing"
if arr_item and isinstance(arr_item.get("id"), int):
arr_queue = await radarr.get_queue(int(arr_item["id"]))
arr_queue = _filter_queue(arr_queue, int(arr_item["id"]), RequestType.movie)
arr_details["queue"] = arr_queue
except Exception as exc:
arr_state = "error"
arr_details["error"] = str(exc)
if arr_state is None:
arr_state = "unknown"
if arr_state == "missing" and media_status_code in {4}:
arr_state = "available"
elif arr_state == "missing" and media_status_code in {6}:
arr_state = "added"
timeline.append(TimelineHop(service="Sonarr/Radarr", status=arr_state, details=arr_details))
try:
prowlarr_health = await prowlarr.get_health()
if isinstance(prowlarr_health, list) and len(prowlarr_health) > 0:
timeline.append(TimelineHop(service="Prowlarr", status="issues", details={"health": prowlarr_health}))
else:
timeline.append(TimelineHop(service="Prowlarr", status="ok"))
except Exception as exc:
timeline.append(TimelineHop(service="Prowlarr", status="error", details={"error": str(exc)}))
jellyfin_available = False
jellyfin_item = None
if jellyfin.configured() and snapshot.title:
types = ["Movie"] if snapshot.request_type == RequestType.movie else ["Series"]
try:
search = await jellyfin.search_items(snapshot.title, types)
except Exception:
search = None
if isinstance(search, dict):
items = search.get("Items") or search.get("items") or []
for item in items:
if not isinstance(item, dict):
continue
name = item.get("Name") or item.get("title")
year = item.get("ProductionYear") or item.get("Year")
if name and name.strip().lower() == (snapshot.title or "").strip().lower():
if snapshot.year and year and int(year) != int(snapshot.year):
continue
jellyfin_available = True
jellyfin_item = item
break
if jellyfin_available and arr_state == "missing" and runtime.jellyfin_sync_to_arr:
arr_details["note"] = "Found in Jellyfin but not tracked in Sonarr/Radarr."
if snapshot.request_type == RequestType.movie:
if runtime.radarr_quality_profile_id and runtime.radarr_root_folder:
radarr_client = RadarrClient(runtime.radarr_base_url, runtime.radarr_api_key)
if radarr_client.configured():
root_folder = await _resolve_root_folder_path(
radarr_client, runtime.radarr_root_folder, "Radarr"
)
tmdb_id = jelly_request.get("media", {}).get("tmdbId")
if tmdb_id:
try:
await radarr_client.add_movie(
int(tmdb_id),
runtime.radarr_quality_profile_id,
root_folder,
monitored=False,
search_for_movie=False,
)
except Exception:
pass
if snapshot.request_type == RequestType.tv:
if runtime.sonarr_quality_profile_id and runtime.sonarr_root_folder:
sonarr_client = SonarrClient(runtime.sonarr_base_url, runtime.sonarr_api_key)
if sonarr_client.configured():
root_folder = await _resolve_root_folder_path(
sonarr_client, runtime.sonarr_root_folder, "Sonarr"
)
tvdb_id = jelly_request.get("media", {}).get("tvdbId")
if tvdb_id:
try:
await sonarr_client.add_series(
int(tvdb_id),
runtime.sonarr_quality_profile_id,
root_folder,
monitored=False,
search_missing=False,
)
except Exception:
pass
qbit_state = None
qbit_message = None
try:
download_ids = _download_ids(_queue_records(arr_queue))
torrent_list: List[Dict[str, Any]] = []
if download_ids and qbittorrent.configured():
torrents = await qbittorrent.get_torrents_by_hashes("|".join(download_ids))
torrent_list = torrents if isinstance(torrents, list) else []
summary = _summarize_qbit(torrent_list)
qbit_state = summary.get("state")
qbit_message = summary.get("message")
timeline.append(
TimelineHop(
service="qBittorrent",
status=summary["state"],
details={
"summary": summary["message"],
"torrents": torrent_list,
},
)
)
except Exception as exc:
timeline.append(TimelineHop(service="qBittorrent", status="error", details={"error": str(exc)}))
status_code = None
try:
status_code = int(jelly_status)
except (TypeError, ValueError):
status_code = None
derived_approved = bool(jelly_request.get("isApproved")) or status_code in {2, 4, 5, 6}
if derived_approved:
snapshot.state = NormalizedState.approved
snapshot.state_reason = "Approved and queued for processing."
else:
snapshot.state = NormalizedState.requested
snapshot.state_reason = "Waiting for approval before we can search."
queue_records = _queue_records(arr_queue)
if qbit_state in {"downloading", "paused"}:
snapshot.state = NormalizedState.downloading
snapshot.state_reason = "Downloading in qBittorrent."
if qbit_message:
snapshot.state_reason = qbit_message
elif qbit_state == "completed":
if arr_state == "available":
snapshot.state = NormalizedState.completed
snapshot.state_reason = "In your library and ready to watch."
else:
snapshot.state = NormalizedState.importing
snapshot.state_reason = "Download finished. Waiting for library import."
elif queue_records:
if arr_state == "missing":
snapshot.state_reason = "Queue shows a download, but qBittorrent has no active torrent."
else:
snapshot.state_reason = "Waiting for download to start in qBittorrent."
elif arr_state == "missing" and derived_approved:
snapshot.state = NormalizedState.needs_add
snapshot.state_reason = "Approved, but not added to the library yet."
elif arr_state == "searching":
snapshot.state = NormalizedState.searching
snapshot.state_reason = "Searching for a matching release."
elif arr_state == "available":
snapshot.state = NormalizedState.completed
snapshot.state_reason = "In your library and ready to watch."
elif arr_state == "added" and snapshot.state == NormalizedState.approved:
snapshot.state = NormalizedState.added_to_arr
snapshot.state_reason = "Item is present in Sonarr/Radarr"
if jellyfin_available and snapshot.state not in {
NormalizedState.downloading,
NormalizedState.importing,
}:
snapshot.state = NormalizedState.completed
snapshot.state_reason = "Ready to watch in Jellyfin."
snapshot.timeline = timeline
actions: List[ActionOption] = []
if arr_state == "missing":
actions.append(
ActionOption(
id="readd_to_arr",
label="Add to the library queue (Sonarr/Radarr)",
risk="medium",
)
)
elif arr_item and arr_state != "available":
actions.append(
ActionOption(
id="search",
label="Search again for releases",
risk="low",
)
)
download_ids = _download_ids(_queue_records(arr_queue))
if download_ids and qbittorrent.configured():
actions.append(
ActionOption(
id="resume_torrent",
label="Resume the download",
risk="low",
)
)
snapshot.actions = actions
jellyfin_link = None
if runtime.jellyfin_public_url and snapshot.state in {
NormalizedState.available,
NormalizedState.completed,
}:
base_url = runtime.jellyfin_public_url.rstrip("/")
query = quote(snapshot.title or "")
jellyfin_link = f"{base_url}/web/index.html#!/search?query={query}"
snapshot.raw = {
"jellyseerr": jelly_request,
"arr": {
"item": arr_item,
"queue": arr_queue,
},
"jellyfin": {
"publicUrl": runtime.jellyfin_public_url,
"available": snapshot.state in {
NormalizedState.available,
NormalizedState.completed,
},
"link": jellyfin_link,
"item": jellyfin_item,
},
}
await asyncio.to_thread(save_snapshot, snapshot)
return snapshot

9
backend/requirements.txt Normal file
View File

@@ -0,0 +1,9 @@
fastapi==0.115.0
uvicorn==0.30.6
httpx==0.27.2
pydantic==2.9.2
pydantic-settings==2.5.2
python-jose[cryptography]==3.3.0
passlib==1.7.4
python-multipart==0.0.9
Pillow==10.4.0