commit fe43a81175c2859543cb6b0c876309d05c2b5c19 Author: Rephl3x Date: Thu Jan 22 22:49:57 2026 +1300 Initial commit diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..9779c24 --- /dev/null +++ b/.gitignore @@ -0,0 +1,8 @@ +.env +.venv/ +data/ +backend/__pycache__/ +backend/.pytest_cache/ +frontend/node_modules/ +frontend/.next/ +*.log diff --git a/README.md b/README.md new file mode 100644 index 0000000..a0cfd18 --- /dev/null +++ b/README.md @@ -0,0 +1,75 @@ +# Magent + +AI-powered request timeline for Jellyseerr + Arr stack. + +## Backend (FastAPI) + +```bash +cd backend +python -m venv .venv +.\.venv\Scripts\Activate.ps1 +pip install -r requirements.txt +uvicorn app.main:app --reload --port 8000 +``` + +Environment variables (sample): + +```bash +$env:JELLYSEERR_URL="http://localhost:5055" +$env:JELLYSEERR_API_KEY="..." +$env:SONARR_URL="http://localhost:8989" +$env:SONARR_API_KEY="..." +$env:SONARR_QUALITY_PROFILE_ID="1" +$env:SONARR_ROOT_FOLDER="/tv" +$env:RADARR_URL="http://localhost:7878" +$env:RADARR_API_KEY="..." +$env:RADARR_QUALITY_PROFILE_ID="1" +$env:RADARR_ROOT_FOLDER="/movies" +$env:PROWLARR_URL="http://localhost:9696" +$env:PROWLARR_API_KEY="..." +$env:QBIT_URL="http://localhost:8080" +$env:QBIT_USERNAME="..." +$env:QBIT_PASSWORD="..." +$env:SQLITE_PATH="data/magent.db" +$env:JWT_SECRET="change-me" +$env:JWT_EXP_MINUTES="720" +$env:ADMIN_USERNAME="admin" +$env:ADMIN_PASSWORD="adminadmin" +``` + +## Frontend (Next.js) + +```bash +cd frontend +npm install +npm run dev +``` + +Open http://localhost:3000 + +Admin panel: http://localhost:3000/admin + +Login uses the admin credentials above (or any other user you create in SQLite). + +## Docker (Testing) + +```bash +docker compose up --build +``` + +Backend: http://localhost:8000 +Frontend: http://localhost:3000 + +## Public Hosting Notes + +The frontend now proxies `/api/*` to the backend container. Set: + +- `NEXT_PUBLIC_API_BASE=/api` (browser uses same-origin) +- `BACKEND_INTERNAL_URL=http://backend:8000` (container-to-container) + +If you prefer the browser to call the backend directly, set `NEXT_PUBLIC_API_BASE` to your public backend URL and ensure CORS is configured. + +## History endpoints + +- `GET /requests/{id}/history?limit=10` recent snapshots +- `GET /requests/{id}/actions?limit=10` recent action logs diff --git a/backend/.dockerignore b/backend/.dockerignore new file mode 100644 index 0000000..bc4f72a --- /dev/null +++ b/backend/.dockerignore @@ -0,0 +1,4 @@ +__pycache__/ +*.pyc +.venv/ +.env diff --git a/backend/Dockerfile b/backend/Dockerfile new file mode 100644 index 0000000..93712a6 --- /dev/null +++ b/backend/Dockerfile @@ -0,0 +1,15 @@ +FROM python:3.12-slim + +WORKDIR /app + +ENV PYTHONDONTWRITEBYTECODE=1 \ + PYTHONUNBUFFERED=1 + +COPY requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +COPY app ./app + +EXPOSE 8000 + +CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"] diff --git a/backend/app/__pycache__/config.cpython-312.pyc b/backend/app/__pycache__/config.cpython-312.pyc new file mode 100644 index 0000000..9a4aae5 Binary files /dev/null and b/backend/app/__pycache__/config.cpython-312.pyc differ diff --git a/backend/app/__pycache__/main.cpython-312.pyc b/backend/app/__pycache__/main.cpython-312.pyc new file mode 100644 index 0000000..f94f61a Binary files /dev/null and b/backend/app/__pycache__/main.cpython-312.pyc differ diff --git a/backend/app/__pycache__/models.cpython-312.pyc b/backend/app/__pycache__/models.cpython-312.pyc new file mode 100644 index 0000000..1d54524 Binary files /dev/null and b/backend/app/__pycache__/models.cpython-312.pyc differ diff --git a/backend/app/ai/__pycache__/triage.cpython-312.pyc b/backend/app/ai/__pycache__/triage.cpython-312.pyc new file mode 100644 index 0000000..fcc20f4 Binary files /dev/null and b/backend/app/ai/__pycache__/triage.cpython-312.pyc differ diff --git a/backend/app/ai/triage.py b/backend/app/ai/triage.py new file mode 100644 index 0000000..b01dc0c --- /dev/null +++ b/backend/app/ai/triage.py @@ -0,0 +1,64 @@ +from ..models import NormalizedState, TriageRecommendation, TriageResult, Snapshot + + +def triage_snapshot(snapshot: Snapshot) -> TriageResult: + recommendations = [] + root_cause = "unknown" + summary = "No clear blocker detected yet." + confidence = 0.2 + + if snapshot.state == NormalizedState.requested: + root_cause = "approval" + summary = "The request is waiting for approval in Jellyseerr." + recommendations.append( + TriageRecommendation( + action_id="wait_for_approval", + title="Ask an admin to approve the request", + reason="Jellyseerr has not marked this request as approved.", + risk="low", + ) + ) + confidence = 0.6 + + if snapshot.state == NormalizedState.needs_add: + root_cause = "not_added" + summary = "The request is approved but not added to Sonarr/Radarr yet." + recommendations.append( + TriageRecommendation( + action_id="readd_to_arr", + title="Add it to the library queue", + reason="Sonarr/Radarr has not created the entry for this request.", + risk="medium", + ) + ) + confidence = 0.7 + + if snapshot.state == NormalizedState.added_to_arr: + root_cause = "search" + summary = "The item is in Sonarr/Radarr but has not been downloaded yet." + recommendations.append( + TriageRecommendation( + action_id="search", + title="Re-run search", + reason="A fresh search can locate new releases.", + risk="low", + ) + ) + confidence = 0.55 + + if not recommendations: + recommendations.append( + TriageRecommendation( + action_id="diagnostics", + title="Generate diagnostics bundle", + reason="Collect service status and recent errors for review.", + risk="low", + ) + ) + + return TriageResult( + summary=summary, + confidence=confidence, + root_cause=root_cause, + recommendations=recommendations, + ) diff --git a/backend/app/auth.py b/backend/app/auth.py new file mode 100644 index 0000000..c54c1f3 --- /dev/null +++ b/backend/app/auth.py @@ -0,0 +1,38 @@ +from typing import Dict, Any + +from fastapi import Depends, HTTPException, status +from fastapi.security import OAuth2PasswordBearer + +from .db import get_user_by_username +from .security import safe_decode_token, TokenError + +oauth2_scheme = OAuth2PasswordBearer(tokenUrl="/auth/login") + + +def get_current_user(token: str = Depends(oauth2_scheme)) -> Dict[str, Any]: + try: + payload = safe_decode_token(token) + except TokenError as exc: + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid token") from exc + + username = payload.get("sub") + if not username: + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid token subject") + + user = get_user_by_username(username) + if not user: + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="User not found") + if user.get("is_blocked"): + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="User is blocked") + + return { + "username": user["username"], + "role": user["role"], + "auth_provider": user.get("auth_provider", "local"), + } + + +def require_admin(user: Dict[str, Any] = Depends(get_current_user)) -> Dict[str, Any]: + if user.get("role") != "admin": + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Admin access required") + return user diff --git a/backend/app/clients/__pycache__/base.cpython-312.pyc b/backend/app/clients/__pycache__/base.cpython-312.pyc new file mode 100644 index 0000000..7a3a7b2 Binary files /dev/null and b/backend/app/clients/__pycache__/base.cpython-312.pyc differ diff --git a/backend/app/clients/__pycache__/jellyseerr.cpython-312.pyc b/backend/app/clients/__pycache__/jellyseerr.cpython-312.pyc new file mode 100644 index 0000000..4139b1e Binary files /dev/null and b/backend/app/clients/__pycache__/jellyseerr.cpython-312.pyc differ diff --git a/backend/app/clients/__pycache__/prowlarr.cpython-312.pyc b/backend/app/clients/__pycache__/prowlarr.cpython-312.pyc new file mode 100644 index 0000000..c06f12d Binary files /dev/null and b/backend/app/clients/__pycache__/prowlarr.cpython-312.pyc differ diff --git a/backend/app/clients/__pycache__/qbittorrent.cpython-312.pyc b/backend/app/clients/__pycache__/qbittorrent.cpython-312.pyc new file mode 100644 index 0000000..96d4fbf Binary files /dev/null and b/backend/app/clients/__pycache__/qbittorrent.cpython-312.pyc differ diff --git a/backend/app/clients/__pycache__/radarr.cpython-312.pyc b/backend/app/clients/__pycache__/radarr.cpython-312.pyc new file mode 100644 index 0000000..1d89f1c Binary files /dev/null and b/backend/app/clients/__pycache__/radarr.cpython-312.pyc differ diff --git a/backend/app/clients/__pycache__/sonarr.cpython-312.pyc b/backend/app/clients/__pycache__/sonarr.cpython-312.pyc new file mode 100644 index 0000000..39c7f88 Binary files /dev/null and b/backend/app/clients/__pycache__/sonarr.cpython-312.pyc differ diff --git a/backend/app/clients/base.py b/backend/app/clients/base.py new file mode 100644 index 0000000..303ca23 --- /dev/null +++ b/backend/app/clients/base.py @@ -0,0 +1,32 @@ +from typing import Any, Dict, Optional +import httpx + + +class ApiClient: + def __init__(self, base_url: Optional[str], api_key: Optional[str] = None): + self.base_url = base_url.rstrip("/") if base_url else None + self.api_key = api_key + + def configured(self) -> bool: + return bool(self.base_url) + + def headers(self) -> Dict[str, str]: + return {"X-Api-Key": self.api_key} if self.api_key else {} + + async def get(self, path: str, params: Optional[Dict[str, Any]] = None) -> Optional[Any]: + if not self.base_url: + return None + url = f"{self.base_url}{path}" + async with httpx.AsyncClient(timeout=10.0) as client: + response = await client.get(url, headers=self.headers(), params=params) + response.raise_for_status() + return response.json() + + async def post(self, path: str, payload: Optional[Dict[str, Any]] = None) -> Optional[Any]: + if not self.base_url: + return None + url = f"{self.base_url}{path}" + async with httpx.AsyncClient(timeout=10.0) as client: + response = await client.post(url, headers=self.headers(), json=payload) + response.raise_for_status() + return response.json() diff --git a/backend/app/clients/jellyfin.py b/backend/app/clients/jellyfin.py new file mode 100644 index 0000000..16ad8a4 --- /dev/null +++ b/backend/app/clients/jellyfin.py @@ -0,0 +1,60 @@ +from typing import Any, Dict, Optional +import httpx +from .base import ApiClient + + +class JellyfinClient(ApiClient): + def __init__(self, base_url: Optional[str], api_key: Optional[str]): + super().__init__(base_url, api_key) + + def configured(self) -> bool: + return bool(self.base_url and self.api_key) + + async def get_users(self) -> Optional[Dict[str, Any]]: + if not self.base_url: + return None + url = f"{self.base_url}/Users" + headers = {"X-Emby-Token": self.api_key} if self.api_key else {} + async with httpx.AsyncClient(timeout=10.0) as client: + response = await client.get(url, headers=headers) + response.raise_for_status() + return response.json() + + async def authenticate_by_name(self, username: str, password: str) -> Optional[Dict[str, Any]]: + if not self.base_url: + return None + url = f"{self.base_url}/Users/AuthenticateByName" + headers = {"X-Emby-Token": self.api_key} if self.api_key else {} + payload = {"Username": username, "Pw": password} + async with httpx.AsyncClient(timeout=10.0) as client: + response = await client.post(url, headers=headers, json=payload) + response.raise_for_status() + return response.json() + + async def search_items( + self, term: str, item_types: Optional[list[str]] = None, limit: int = 20 + ) -> Optional[Dict[str, Any]]: + if not self.base_url or not self.api_key: + return None + url = f"{self.base_url}/Items" + params = { + "SearchTerm": term, + "IncludeItemTypes": ",".join(item_types or []), + "Recursive": "true", + "Limit": limit, + } + headers = {"X-Emby-Token": self.api_key} + async with httpx.AsyncClient(timeout=10.0) as client: + response = await client.get(url, headers=headers, params=params) + response.raise_for_status() + return response.json() + + async def get_system_info(self) -> Optional[Dict[str, Any]]: + if not self.base_url or not self.api_key: + return None + url = f"{self.base_url}/System/Info" + headers = {"X-Emby-Token": self.api_key} + async with httpx.AsyncClient(timeout=10.0) as client: + response = await client.get(url, headers=headers) + response.raise_for_status() + return response.json() diff --git a/backend/app/clients/jellyseerr.py b/backend/app/clients/jellyseerr.py new file mode 100644 index 0000000..fdb32fa --- /dev/null +++ b/backend/app/clients/jellyseerr.py @@ -0,0 +1,37 @@ +from typing import Any, Dict, Optional +from .base import ApiClient + + +class JellyseerrClient(ApiClient): + async def get_status(self) -> Optional[Dict[str, Any]]: + return await self.get("/api/v1/status") + + async def get_request(self, request_id: str) -> Optional[Dict[str, Any]]: + return await self.get(f"/api/v1/request/{request_id}") + + async def get_recent_requests(self, take: int = 10, skip: int = 0) -> Optional[Dict[str, Any]]: + return await self.get( + "/api/v1/request", + params={ + "take": take, + "skip": skip, + }, + ) + + async def get_media(self, media_id: int) -> Optional[Dict[str, Any]]: + return await self.get(f"/api/v1/media/{media_id}") + + async def get_movie(self, tmdb_id: int) -> Optional[Dict[str, Any]]: + return await self.get(f"/api/v1/movie/{tmdb_id}") + + async def get_tv(self, tmdb_id: int) -> Optional[Dict[str, Any]]: + return await self.get(f"/api/v1/tv/{tmdb_id}") + + async def search(self, query: str, page: int = 1) -> Optional[Dict[str, Any]]: + return await self.get( + "/api/v1/search", + params={ + "query": query, + "page": page, + }, + ) diff --git a/backend/app/clients/prowlarr.py b/backend/app/clients/prowlarr.py new file mode 100644 index 0000000..5d8203b --- /dev/null +++ b/backend/app/clients/prowlarr.py @@ -0,0 +1,10 @@ +from typing import Any, Dict, Optional +from .base import ApiClient + + +class ProwlarrClient(ApiClient): + async def get_health(self) -> Optional[Dict[str, Any]]: + return await self.get("/api/v1/health") + + async def search(self, query: str) -> Optional[Any]: + return await self.get("/api/v1/search", params={"query": query}) diff --git a/backend/app/clients/qbittorrent.py b/backend/app/clients/qbittorrent.py new file mode 100644 index 0000000..621d6e4 --- /dev/null +++ b/backend/app/clients/qbittorrent.py @@ -0,0 +1,69 @@ +from typing import Any, Dict, Optional +import httpx +from .base import ApiClient + + +class QBittorrentClient(ApiClient): + def __init__(self, base_url: Optional[str], username: Optional[str], password: Optional[str]): + super().__init__(base_url, None) + self.username = username + self.password = password + + def configured(self) -> bool: + return bool(self.base_url and self.username and self.password) + + async def _login(self, client: httpx.AsyncClient) -> None: + if not self.base_url or not self.username or not self.password: + raise RuntimeError("qBittorrent not configured") + response = await client.post( + f"{self.base_url}/api/v2/auth/login", + data={"username": self.username, "password": self.password}, + headers={"Referer": self.base_url}, + ) + response.raise_for_status() + if response.text.strip().lower() != "ok.": + raise RuntimeError("qBittorrent login failed") + + async def _get(self, path: str, params: Optional[Dict[str, Any]] = None) -> Optional[Any]: + if not self.base_url: + return None + async with httpx.AsyncClient(timeout=10.0) as client: + await self._login(client) + response = await client.get(f"{self.base_url}{path}", params=params) + response.raise_for_status() + return response.json() + + async def _get_text(self, path: str, params: Optional[Dict[str, Any]] = None) -> Optional[str]: + if not self.base_url: + return None + async with httpx.AsyncClient(timeout=10.0) as client: + await self._login(client) + response = await client.get(f"{self.base_url}{path}", params=params) + response.raise_for_status() + return response.text.strip() + + async def _post_form(self, path: str, data: Dict[str, Any]) -> None: + if not self.base_url: + return None + async with httpx.AsyncClient(timeout=10.0) as client: + await self._login(client) + response = await client.post(f"{self.base_url}{path}", data=data) + response.raise_for_status() + + async def get_torrents(self) -> Optional[Any]: + return await self._get("/api/v2/torrents/info") + + async def get_torrents_by_hashes(self, hashes: str) -> Optional[Any]: + return await self._get("/api/v2/torrents/info", params={"hashes": hashes}) + + async def get_app_version(self) -> Optional[Any]: + return await self._get_text("/api/v2/app/version") + + async def resume_torrents(self, hashes: str) -> None: + try: + await self._post_form("/api/v2/torrents/resume", data={"hashes": hashes}) + except httpx.HTTPStatusError as exc: + if exc.response is not None and exc.response.status_code == 404: + await self._post_form("/api/v2/torrents/start", data={"hashes": hashes}) + return + raise diff --git a/backend/app/clients/radarr.py b/backend/app/clients/radarr.py new file mode 100644 index 0000000..569ca00 --- /dev/null +++ b/backend/app/clients/radarr.py @@ -0,0 +1,45 @@ +from typing import Any, Dict, Optional +from .base import ApiClient + + +class RadarrClient(ApiClient): + async def get_system_status(self) -> Optional[Dict[str, Any]]: + return await self.get("/api/v3/system/status") + + async def get_movie_by_tmdb_id(self, tmdb_id: int) -> Optional[Dict[str, Any]]: + return await self.get("/api/v3/movie", params={"tmdbId": tmdb_id}) + + async def get_movies(self) -> Optional[Dict[str, Any]]: + return await self.get("/api/v3/movie") + + async def get_root_folders(self) -> Optional[Dict[str, Any]]: + return await self.get("/api/v3/rootfolder") + + async def get_quality_profiles(self) -> Optional[Dict[str, Any]]: + return await self.get("/api/v3/qualityprofile") + + async def get_queue(self, movie_id: int) -> Optional[Dict[str, Any]]: + return await self.get("/api/v3/queue", params={"movieId": movie_id}) + + async def search(self, movie_id: int) -> Optional[Dict[str, Any]]: + return await self.post("/api/v3/command", payload={"name": "MoviesSearch", "movieIds": [movie_id]}) + + async def add_movie( + self, + tmdb_id: int, + quality_profile_id: int, + root_folder: str, + monitored: bool = True, + search_for_movie: bool = True, + ) -> Optional[Dict[str, Any]]: + payload = { + "tmdbId": tmdb_id, + "qualityProfileId": quality_profile_id, + "rootFolderPath": root_folder, + "monitored": monitored, + "addOptions": {"searchForMovie": search_for_movie}, + } + return await self.post("/api/v3/movie", payload=payload) + + async def grab_release(self, guid: str, indexer_id: int) -> Optional[Dict[str, Any]]: + return await self.post("/api/v3/release", payload={"guid": guid, "indexerId": indexer_id}) diff --git a/backend/app/clients/sonarr.py b/backend/app/clients/sonarr.py new file mode 100644 index 0000000..65f1d96 --- /dev/null +++ b/backend/app/clients/sonarr.py @@ -0,0 +1,52 @@ +from typing import Any, Dict, Optional +from .base import ApiClient + + +class SonarrClient(ApiClient): + async def get_system_status(self) -> Optional[Dict[str, Any]]: + return await self.get("/api/v3/system/status") + + async def get_series_by_tvdb_id(self, tvdb_id: int) -> Optional[Dict[str, Any]]: + return await self.get("/api/v3/series", params={"tvdbId": tvdb_id}) + + async def get_root_folders(self) -> Optional[Dict[str, Any]]: + return await self.get("/api/v3/rootfolder") + + async def get_quality_profiles(self) -> Optional[Dict[str, Any]]: + return await self.get("/api/v3/qualityprofile") + + async def get_queue(self, series_id: int) -> Optional[Dict[str, Any]]: + return await self.get("/api/v3/queue", params={"seriesId": series_id}) + + async def get_episodes(self, series_id: int) -> Optional[Dict[str, Any]]: + return await self.get("/api/v3/episode", params={"seriesId": series_id}) + + async def search(self, series_id: int) -> Optional[Dict[str, Any]]: + return await self.post("/api/v3/command", payload={"name": "SeriesSearch", "seriesId": series_id}) + + async def search_episodes(self, episode_ids: list[int]) -> Optional[Dict[str, Any]]: + return await self.post("/api/v3/command", payload={"name": "EpisodeSearch", "episodeIds": episode_ids}) + + async def add_series( + self, + tvdb_id: int, + quality_profile_id: int, + root_folder: str, + monitored: bool = True, + title: Optional[str] = None, + search_missing: bool = True, + ) -> Optional[Dict[str, Any]]: + payload = { + "tvdbId": tvdb_id, + "qualityProfileId": quality_profile_id, + "rootFolderPath": root_folder, + "monitored": monitored, + "seasonFolder": True, + "addOptions": {"searchForMissingEpisodes": search_missing}, + } + if title: + payload["title"] = title + return await self.post("/api/v3/series", payload=payload) + + async def grab_release(self, guid: str, indexer_id: int) -> Optional[Dict[str, Any]]: + return await self.post("/api/v3/release", payload={"guid": guid, "indexerId": indexer_id}) diff --git a/backend/app/config.py b/backend/app/config.py new file mode 100644 index 0000000..c501fc8 --- /dev/null +++ b/backend/app/config.py @@ -0,0 +1,105 @@ +from typing import Optional +from pydantic import AliasChoices, Field +from pydantic_settings import BaseSettings, SettingsConfigDict + + +class Settings(BaseSettings): + model_config = SettingsConfigDict(env_prefix="") + app_name: str = "Magent" + cors_allow_origin: str = "http://localhost:3000" + sqlite_path: str = Field(default="data/magent.db", validation_alias=AliasChoices("SQLITE_PATH")) + jwt_secret: str = Field(default="change-me", validation_alias=AliasChoices("JWT_SECRET")) + jwt_exp_minutes: int = Field(default=720, validation_alias=AliasChoices("JWT_EXP_MINUTES")) + admin_username: str = Field(default="admin", validation_alias=AliasChoices("ADMIN_USERNAME")) + admin_password: str = Field(default="adminadmin", validation_alias=AliasChoices("ADMIN_PASSWORD")) + log_level: str = Field(default="INFO", validation_alias=AliasChoices("LOG_LEVEL")) + log_file: str = Field(default="data/magent.log", validation_alias=AliasChoices("LOG_FILE")) + requests_sync_ttl_minutes: int = Field( + default=1440, validation_alias=AliasChoices("REQUESTS_SYNC_TTL_MINUTES") + ) + requests_poll_interval_seconds: int = Field( + default=300, validation_alias=AliasChoices("REQUESTS_POLL_INTERVAL_SECONDS") + ) + requests_delta_sync_interval_minutes: int = Field( + default=5, validation_alias=AliasChoices("REQUESTS_DELTA_SYNC_INTERVAL_MINUTES") + ) + requests_full_sync_time: str = Field( + default="00:00", validation_alias=AliasChoices("REQUESTS_FULL_SYNC_TIME") + ) + requests_cleanup_time: str = Field( + default="02:00", validation_alias=AliasChoices("REQUESTS_CLEANUP_TIME") + ) + requests_cleanup_days: int = Field( + default=90, validation_alias=AliasChoices("REQUESTS_CLEANUP_DAYS") + ) + requests_data_source: str = Field( + default="prefer_cache", validation_alias=AliasChoices("REQUESTS_DATA_SOURCE") + ) + artwork_cache_mode: str = Field( + default="remote", validation_alias=AliasChoices("ARTWORK_CACHE_MODE") + ) + + jellyseerr_base_url: Optional[str] = Field( + default=None, validation_alias=AliasChoices("JELLYSEERR_URL", "JELLYSEERR_BASE_URL") + ) + jellyseerr_api_key: Optional[str] = Field( + default=None, validation_alias=AliasChoices("JELLYSEERR_API_KEY", "JELLYSEERR_KEY") + ) + jellyfin_base_url: Optional[str] = Field( + default=None, validation_alias=AliasChoices("JELLYFIN_URL", "JELLYFIN_BASE_URL") + ) + jellyfin_api_key: Optional[str] = Field( + default=None, validation_alias=AliasChoices("JELLYFIN_API_KEY", "JELLYFIN_KEY") + ) + jellyfin_public_url: Optional[str] = Field( + default=None, validation_alias=AliasChoices("JELLYFIN_PUBLIC_URL") + ) + jellyfin_sync_to_arr: bool = Field( + default=True, validation_alias=AliasChoices("JELLYFIN_SYNC_TO_ARR") + ) + + sonarr_base_url: Optional[str] = Field( + default=None, validation_alias=AliasChoices("SONARR_URL", "SONARR_BASE_URL") + ) + sonarr_api_key: Optional[str] = Field( + default=None, validation_alias=AliasChoices("SONARR_API_KEY", "SONARR_KEY") + ) + sonarr_quality_profile_id: Optional[int] = Field( + default=None, validation_alias=AliasChoices("SONARR_QUALITY_PROFILE_ID") + ) + sonarr_root_folder: Optional[str] = Field( + default=None, validation_alias=AliasChoices("SONARR_ROOT_FOLDER") + ) + + radarr_base_url: Optional[str] = Field( + default=None, validation_alias=AliasChoices("RADARR_URL", "RADARR_BASE_URL") + ) + radarr_api_key: Optional[str] = Field( + default=None, validation_alias=AliasChoices("RADARR_API_KEY", "RADARR_KEY") + ) + radarr_quality_profile_id: Optional[int] = Field( + default=None, validation_alias=AliasChoices("RADARR_QUALITY_PROFILE_ID") + ) + radarr_root_folder: Optional[str] = Field( + default=None, validation_alias=AliasChoices("RADARR_ROOT_FOLDER") + ) + + prowlarr_base_url: Optional[str] = Field( + default=None, validation_alias=AliasChoices("PROWLARR_URL", "PROWLARR_BASE_URL") + ) + prowlarr_api_key: Optional[str] = Field( + default=None, validation_alias=AliasChoices("PROWLARR_API_KEY", "PROWLARR_KEY") + ) + + qbittorrent_base_url: Optional[str] = Field( + default=None, validation_alias=AliasChoices("QBIT_URL", "QBITTORRENT_URL", "QBITTORRENT_BASE_URL") + ) + qbittorrent_username: Optional[str] = Field( + default=None, validation_alias=AliasChoices("QBIT_USERNAME", "QBITTORRENT_USERNAME") + ) + qbittorrent_password: Optional[str] = Field( + default=None, validation_alias=AliasChoices("QBIT_PASSWORD", "QBITTORRENT_PASSWORD") + ) + + +settings = Settings() diff --git a/backend/app/db.py b/backend/app/db.py new file mode 100644 index 0000000..5fe0560 --- /dev/null +++ b/backend/app/db.py @@ -0,0 +1,761 @@ +import json +import os +import sqlite3 +import logging +from datetime import datetime, timezone, timedelta +from typing import Any, Dict, Optional + +from .config import settings +from .models import Snapshot +from .security import hash_password, verify_password + +logger = logging.getLogger(__name__) + + +def _db_path() -> str: + path = settings.sqlite_path or "data/magent.db" + if not os.path.isabs(path): + path = os.path.join(os.getcwd(), path) + os.makedirs(os.path.dirname(path), exist_ok=True) + return path + + +def _connect() -> sqlite3.Connection: + return sqlite3.connect(_db_path()) + + +def init_db() -> None: + with _connect() as conn: + conn.execute( + """ + CREATE TABLE IF NOT EXISTS snapshots ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + request_id TEXT NOT NULL, + state TEXT NOT NULL, + state_reason TEXT, + created_at TEXT NOT NULL, + payload_json TEXT NOT NULL + ) + """ + ) + conn.execute( + """ + CREATE TABLE IF NOT EXISTS actions ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + request_id TEXT NOT NULL, + action_id TEXT NOT NULL, + label TEXT NOT NULL, + status TEXT NOT NULL, + message TEXT, + created_at TEXT NOT NULL + ) + """ + ) + conn.execute( + """ + CREATE TABLE IF NOT EXISTS users ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + username TEXT NOT NULL UNIQUE, + password_hash TEXT NOT NULL, + role TEXT NOT NULL, + auth_provider TEXT NOT NULL DEFAULT 'local', + created_at TEXT NOT NULL, + last_login_at TEXT, + is_blocked INTEGER NOT NULL DEFAULT 0 + ) + """ + ) + conn.execute( + """ + CREATE TABLE IF NOT EXISTS settings ( + key TEXT PRIMARY KEY, + value TEXT, + updated_at TEXT NOT NULL + ) + """ + ) + conn.execute( + """ + CREATE TABLE IF NOT EXISTS requests_cache ( + request_id INTEGER PRIMARY KEY, + media_id INTEGER, + media_type TEXT, + status INTEGER, + title TEXT, + year INTEGER, + requested_by TEXT, + requested_by_norm TEXT, + created_at TEXT, + updated_at TEXT, + payload_json TEXT NOT NULL + ) + """ + ) + conn.execute( + """ + CREATE INDEX IF NOT EXISTS idx_requests_cache_created_at + ON requests_cache (created_at) + """ + ) + conn.execute( + """ + CREATE INDEX IF NOT EXISTS idx_requests_cache_requested_by_norm + ON requests_cache (requested_by_norm) + """ + ) + try: + conn.execute("ALTER TABLE users ADD COLUMN last_login_at TEXT") + except sqlite3.OperationalError: + pass + try: + conn.execute("ALTER TABLE users ADD COLUMN is_blocked INTEGER NOT NULL DEFAULT 0") + except sqlite3.OperationalError: + pass + try: + conn.execute("ALTER TABLE users ADD COLUMN auth_provider TEXT NOT NULL DEFAULT 'local'") + except sqlite3.OperationalError: + pass + _backfill_auth_providers() + ensure_admin_user() + + +def save_snapshot(snapshot: Snapshot) -> None: + payload = json.dumps(snapshot.model_dump(), ensure_ascii=True) + created_at = datetime.now(timezone.utc).isoformat() + with _connect() as conn: + conn.execute( + """ + INSERT INTO snapshots (request_id, state, state_reason, created_at, payload_json) + VALUES (?, ?, ?, ?, ?) + """, + ( + snapshot.request_id, + snapshot.state.value, + snapshot.state_reason, + created_at, + payload, + ), + ) + + +def save_action( + request_id: str, + action_id: str, + label: str, + status: str, + message: Optional[str] = None, +) -> None: + created_at = datetime.now(timezone.utc).isoformat() + with _connect() as conn: + conn.execute( + """ + INSERT INTO actions (request_id, action_id, label, status, message, created_at) + VALUES (?, ?, ?, ?, ?, ?) + """, + (request_id, action_id, label, status, message, created_at), + ) + + +def get_recent_snapshots(request_id: str, limit: int = 10) -> list[dict[str, Any]]: + with _connect() as conn: + rows = conn.execute( + """ + SELECT request_id, state, state_reason, created_at, payload_json + FROM snapshots + WHERE request_id = ? + ORDER BY id DESC + LIMIT ? + """, + (request_id, limit), + ).fetchall() + results = [] + for row in rows: + results.append( + { + "request_id": row[0], + "state": row[1], + "state_reason": row[2], + "created_at": row[3], + "payload": json.loads(row[4]), + } + ) + return results + + +def get_recent_actions(request_id: str, limit: int = 10) -> list[dict[str, Any]]: + with _connect() as conn: + rows = conn.execute( + """ + SELECT request_id, action_id, label, status, message, created_at + FROM actions + WHERE request_id = ? + ORDER BY id DESC + LIMIT ? + """, + (request_id, limit), + ).fetchall() + results = [] + for row in rows: + results.append( + { + "request_id": row[0], + "action_id": row[1], + "label": row[2], + "status": row[3], + "message": row[4], + "created_at": row[5], + } + ) + return results + + +def ensure_admin_user() -> None: + if not settings.admin_username or not settings.admin_password: + return + existing = get_user_by_username(settings.admin_username) + if existing: + return + create_user(settings.admin_username, settings.admin_password, role="admin") + + +def create_user(username: str, password: str, role: str = "user", auth_provider: str = "local") -> None: + created_at = datetime.now(timezone.utc).isoformat() + password_hash = hash_password(password) + with _connect() as conn: + conn.execute( + """ + INSERT INTO users (username, password_hash, role, auth_provider, created_at) + VALUES (?, ?, ?, ?, ?) + """, + (username, password_hash, role, auth_provider, created_at), + ) + + +def create_user_if_missing( + username: str, password: str, role: str = "user", auth_provider: str = "local" +) -> bool: + created_at = datetime.now(timezone.utc).isoformat() + password_hash = hash_password(password) + with _connect() as conn: + cursor = conn.execute( + """ + INSERT OR IGNORE INTO users (username, password_hash, role, auth_provider, created_at) + VALUES (?, ?, ?, ?, ?) + """, + (username, password_hash, role, auth_provider, created_at), + ) + return cursor.rowcount > 0 + + +def get_user_by_username(username: str) -> Optional[Dict[str, Any]]: + with _connect() as conn: + row = conn.execute( + """ + SELECT id, username, password_hash, role, auth_provider, created_at, last_login_at, is_blocked + FROM users + WHERE username = ? + """, + (username,), + ).fetchone() + if not row: + return None + return { + "id": row[0], + "username": row[1], + "password_hash": row[2], + "role": row[3], + "auth_provider": row[4], + "created_at": row[5], + "last_login_at": row[6], + "is_blocked": bool(row[7]), + } + + +def get_all_users() -> list[Dict[str, Any]]: + with _connect() as conn: + rows = conn.execute( + """ + SELECT id, username, role, auth_provider, created_at, last_login_at, is_blocked + FROM users + ORDER BY username COLLATE NOCASE + """ + ).fetchall() + results: list[Dict[str, Any]] = [] + for row in rows: + results.append( + { + "id": row[0], + "username": row[1], + "role": row[2], + "auth_provider": row[3], + "created_at": row[4], + "last_login_at": row[5], + "is_blocked": bool(row[6]), + } + ) + return results + + +def set_last_login(username: str) -> None: + timestamp = datetime.now(timezone.utc).isoformat() + with _connect() as conn: + conn.execute( + """ + UPDATE users SET last_login_at = ? WHERE username = ? + """, + (timestamp, username), + ) + + +def set_user_blocked(username: str, blocked: bool) -> None: + with _connect() as conn: + conn.execute( + """ + UPDATE users SET is_blocked = ? WHERE username = ? + """, + (1 if blocked else 0, username), + ) + + +def set_user_role(username: str, role: str) -> None: + with _connect() as conn: + conn.execute( + """ + UPDATE users SET role = ? WHERE username = ? + """, + (role, username), + ) + + +def verify_user_password(username: str, password: str) -> Optional[Dict[str, Any]]: + user = get_user_by_username(username) + if not user: + return None + if not verify_password(password, user["password_hash"]): + return None + return user + + +def set_user_password(username: str, password: str) -> None: + password_hash = hash_password(password) + with _connect() as conn: + conn.execute( + """ + UPDATE users SET password_hash = ? WHERE username = ? + """, + (password_hash, username), + ) + + +def _backfill_auth_providers() -> None: + with _connect() as conn: + rows = conn.execute( + """ + SELECT username, password_hash, auth_provider + FROM users + """ + ).fetchall() + updates: list[tuple[str, str]] = [] + for row in rows: + username, password_hash, auth_provider = row + provider = auth_provider or "local" + if provider == "local": + if verify_password("jellyfin-user", password_hash): + provider = "jellyfin" + elif verify_password("jellyseerr-user", password_hash): + provider = "jellyseerr" + if provider != auth_provider: + updates.append((provider, username)) + if not updates: + return + with _connect() as conn: + conn.executemany( + """ + UPDATE users SET auth_provider = ? WHERE username = ? + """, + updates, + ) + + +def upsert_request_cache( + request_id: int, + media_id: Optional[int], + media_type: Optional[str], + status: Optional[int], + title: Optional[str], + year: Optional[int], + requested_by: Optional[str], + requested_by_norm: Optional[str], + created_at: Optional[str], + updated_at: Optional[str], + payload_json: str, +) -> None: + with _connect() as conn: + conn.execute( + """ + INSERT INTO requests_cache ( + request_id, + media_id, + media_type, + status, + title, + year, + requested_by, + requested_by_norm, + created_at, + updated_at, + payload_json + ) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + ON CONFLICT(request_id) DO UPDATE SET + media_id = excluded.media_id, + media_type = excluded.media_type, + status = excluded.status, + title = excluded.title, + year = excluded.year, + requested_by = excluded.requested_by, + requested_by_norm = excluded.requested_by_norm, + created_at = excluded.created_at, + updated_at = excluded.updated_at, + payload_json = excluded.payload_json + """, + ( + request_id, + media_id, + media_type, + status, + title, + year, + requested_by, + requested_by_norm, + created_at, + updated_at, + payload_json, + ), + ) + logger.debug( + "requests_cache upsert: request_id=%s media_id=%s status=%s updated_at=%s", + request_id, + media_id, + status, + updated_at, + ) + + +def get_request_cache_last_updated() -> Optional[str]: + with _connect() as conn: + row = conn.execute( + """ + SELECT MAX(updated_at) FROM requests_cache + """ + ).fetchone() + if not row: + return None + return row[0] + + +def get_request_cache_by_id(request_id: int) -> Optional[Dict[str, Any]]: + with _connect() as conn: + row = conn.execute( + """ + SELECT request_id, updated_at + FROM requests_cache + WHERE request_id = ? + """, + (request_id,), + ).fetchone() + if not row: + logger.debug("requests_cache miss: request_id=%s", request_id) + return None + logger.debug("requests_cache hit: request_id=%s updated_at=%s", row[0], row[1]) + return {"request_id": row[0], "updated_at": row[1]} + + +def get_request_cache_payload(request_id: int) -> Optional[Dict[str, Any]]: + with _connect() as conn: + row = conn.execute( + """ + SELECT payload_json + FROM requests_cache + WHERE request_id = ? + """, + (request_id,), + ).fetchone() + if not row or not row[0]: + logger.debug("requests_cache payload miss: request_id=%s", request_id) + return None + try: + payload = json.loads(row[0]) + logger.debug("requests_cache payload hit: request_id=%s", request_id) + return payload + except json.JSONDecodeError: + logger.warning("requests_cache payload invalid json: request_id=%s", request_id) + return None + + +def get_cached_requests( + limit: int, + offset: int, + requested_by_norm: Optional[str] = None, + since_iso: Optional[str] = None, +) -> list[Dict[str, Any]]: + query = """ + SELECT request_id, media_id, media_type, status, title, year, requested_by, created_at + FROM requests_cache + """ + params: list[Any] = [] + conditions = [] + if requested_by_norm: + conditions.append("requested_by_norm = ?") + params.append(requested_by_norm) + if since_iso: + conditions.append("created_at >= ?") + params.append(since_iso) + if conditions: + query += " WHERE " + " AND ".join(conditions) + query += " ORDER BY created_at DESC, request_id DESC LIMIT ? OFFSET ?" + params.extend([limit, offset]) + with _connect() as conn: + rows = conn.execute(query, tuple(params)).fetchall() + logger.debug( + "requests_cache list: count=%s requested_by_norm=%s since_iso=%s", + len(rows), + requested_by_norm, + since_iso, + ) + results: list[Dict[str, Any]] = [] + for row in rows: + results.append( + { + "request_id": row[0], + "media_id": row[1], + "media_type": row[2], + "status": row[3], + "title": row[4], + "year": row[5], + "requested_by": row[6], + "created_at": row[7], + } + ) + return results + + +def get_request_cache_overview(limit: int = 50) -> list[Dict[str, Any]]: + limit = max(1, min(limit, 200)) + with _connect() as conn: + rows = conn.execute( + """ + SELECT request_id, media_id, media_type, status, title, year, requested_by, created_at, updated_at + FROM requests_cache + ORDER BY updated_at DESC, request_id DESC + LIMIT ? + """, + (limit,), + ).fetchall() + results: list[Dict[str, Any]] = [] + for row in rows: + results.append( + { + "request_id": row[0], + "media_id": row[1], + "media_type": row[2], + "status": row[3], + "title": row[4], + "year": row[5], + "requested_by": row[6], + "created_at": row[7], + "updated_at": row[8], + } + ) + return results + + +def get_request_cache_count() -> int: + with _connect() as conn: + row = conn.execute("SELECT COUNT(*) FROM requests_cache").fetchone() + return int(row[0] or 0) + + +def prune_duplicate_requests_cache() -> int: + with _connect() as conn: + cursor = conn.execute( + """ + DELETE FROM requests_cache + WHERE media_id IS NOT NULL + AND request_id NOT IN ( + SELECT MAX(request_id) + FROM requests_cache + WHERE media_id IS NOT NULL + GROUP BY media_id, COALESCE(requested_by_norm, '') + ) + """ + ) + return cursor.rowcount + + +def get_request_cache_payloads(limit: int = 200, offset: int = 0) -> list[Dict[str, Any]]: + limit = max(1, min(limit, 1000)) + offset = max(0, offset) + with _connect() as conn: + rows = conn.execute( + """ + SELECT request_id, payload_json + FROM requests_cache + ORDER BY request_id ASC + LIMIT ? OFFSET ? + """, + (limit, offset), + ).fetchall() + results: list[Dict[str, Any]] = [] + for row in rows: + payload = None + if row[1]: + try: + payload = json.loads(row[1]) + except json.JSONDecodeError: + payload = None + results.append({"request_id": row[0], "payload": payload}) + return results + + +def get_cached_requests_since(since_iso: str) -> list[Dict[str, Any]]: + with _connect() as conn: + rows = conn.execute( + """ + SELECT request_id, media_id, media_type, status, title, year, requested_by, requested_by_norm, created_at + FROM requests_cache + WHERE created_at >= ? + ORDER BY created_at DESC, request_id DESC + """, + (since_iso,), + ).fetchall() + results: list[Dict[str, Any]] = [] + for row in rows: + results.append( + { + "request_id": row[0], + "media_id": row[1], + "media_type": row[2], + "status": row[3], + "title": row[4], + "year": row[5], + "requested_by": row[6], + "requested_by_norm": row[7], + "created_at": row[8], + } + ) + return results + + +def get_cached_request_by_media_id( + media_id: int, requested_by_norm: Optional[str] = None +) -> Optional[Dict[str, Any]]: + query = """ + SELECT request_id, status + FROM requests_cache + WHERE media_id = ? + """ + params: list[Any] = [media_id] + if requested_by_norm: + query += " AND requested_by_norm = ?" + params.append(requested_by_norm) + query += " ORDER BY created_at DESC, request_id DESC LIMIT 1" + with _connect() as conn: + row = conn.execute(query, tuple(params)).fetchone() + if not row: + return None + return {"request_id": row[0], "status": row[1]} + + +def get_setting(key: str) -> Optional[str]: + with _connect() as conn: + row = conn.execute( + """ + SELECT value FROM settings WHERE key = ? + """, + (key,), + ).fetchone() + if not row: + return None + return row[0] + + +def set_setting(key: str, value: Optional[str]) -> None: + updated_at = datetime.now(timezone.utc).isoformat() + with _connect() as conn: + conn.execute( + """ + INSERT INTO settings (key, value, updated_at) + VALUES (?, ?, ?) + ON CONFLICT(key) DO UPDATE SET value = excluded.value, updated_at = excluded.updated_at + """, + (key, value, updated_at), + ) + + +def delete_setting(key: str) -> None: + with _connect() as conn: + conn.execute( + """ + DELETE FROM settings WHERE key = ? + """, + (key,), + ) + + +def get_settings_overrides() -> Dict[str, str]: + with _connect() as conn: + rows = conn.execute( + """ + SELECT key, value FROM settings + """ + ).fetchall() + overrides: Dict[str, str] = {} + for row in rows: + key = row[0] + value = row[1] + if key: + overrides[key] = value + return overrides + + +def run_integrity_check() -> str: + with _connect() as conn: + row = conn.execute("PRAGMA integrity_check").fetchone() + if not row: + return "unknown" + return str(row[0]) + + +def vacuum_db() -> None: + with _connect() as conn: + conn.execute("VACUUM") + + +def clear_requests_cache() -> int: + with _connect() as conn: + cursor = conn.execute("DELETE FROM requests_cache") + return cursor.rowcount + + +def clear_history() -> Dict[str, int]: + with _connect() as conn: + actions = conn.execute("DELETE FROM actions").rowcount + snapshots = conn.execute("DELETE FROM snapshots").rowcount + return {"actions": actions, "snapshots": snapshots} + + +def cleanup_history(days: int) -> Dict[str, int]: + if days <= 0: + return {"actions": 0, "snapshots": 0} + cutoff = (datetime.now(timezone.utc) - timedelta(days=days)).isoformat() + with _connect() as conn: + actions = conn.execute( + "DELETE FROM actions WHERE created_at < ?", + (cutoff,), + ).rowcount + snapshots = conn.execute( + "DELETE FROM snapshots WHERE created_at < ?", + (cutoff,), + ).rowcount + return {"actions": actions, "snapshots": snapshots} diff --git a/backend/app/logging_config.py b/backend/app/logging_config.py new file mode 100644 index 0000000..c928c48 --- /dev/null +++ b/backend/app/logging_config.py @@ -0,0 +1,41 @@ +import logging +import os +from logging.handlers import RotatingFileHandler +from typing import Optional + + +def configure_logging(log_level: Optional[str], log_file: Optional[str]) -> None: + level_name = (log_level or "INFO").upper() + level = getattr(logging, level_name, logging.INFO) + + handlers: list[logging.Handler] = [] + stream_handler = logging.StreamHandler() + handlers.append(stream_handler) + + if log_file: + log_path = log_file + if not os.path.isabs(log_path): + log_path = os.path.join(os.getcwd(), log_path) + os.makedirs(os.path.dirname(log_path), exist_ok=True) + file_handler = RotatingFileHandler( + log_path, maxBytes=2_000_000, backupCount=3, encoding="utf-8" + ) + handlers.append(file_handler) + + formatter = logging.Formatter( + fmt="%(asctime)s | %(levelname)s | %(name)s | %(message)s", + datefmt="%Y-%m-%d %H:%M:%S", + ) + for handler in handlers: + handler.setFormatter(formatter) + + root = logging.getLogger() + for handler in list(root.handlers): + root.removeHandler(handler) + for handler in handlers: + root.addHandler(handler) + root.setLevel(level) + + logging.getLogger("uvicorn").setLevel(level) + logging.getLogger("uvicorn.error").setLevel(level) + logging.getLogger("uvicorn.access").setLevel(level) diff --git a/backend/app/main.py b/backend/app/main.py new file mode 100644 index 0000000..8c33255 --- /dev/null +++ b/backend/app/main.py @@ -0,0 +1,56 @@ +import asyncio + +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware + +from .config import settings +from .db import init_db +from .routers.requests import ( + router as requests_router, + startup_warmup_requests_cache, + run_requests_delta_loop, + run_daily_requests_full_sync, + run_daily_db_cleanup, +) +from .routers.auth import router as auth_router +from .routers.admin import router as admin_router +from .routers.images import router as images_router +from .routers.branding import router as branding_router +from .routers.status import router as status_router +from .services.jellyfin_sync import run_daily_jellyfin_sync +from .logging_config import configure_logging +from .runtime import get_runtime_settings + +app = FastAPI(title=settings.app_name) + +app.add_middleware( + CORSMiddleware, + allow_origins=[settings.cors_allow_origin], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + + +@app.get("/health") +async def health() -> dict: + return {"status": "ok"} + +@app.on_event("startup") +async def startup() -> None: + init_db() + runtime = get_runtime_settings() + configure_logging(runtime.log_level, runtime.log_file) + asyncio.create_task(run_daily_jellyfin_sync()) + asyncio.create_task(startup_warmup_requests_cache()) + asyncio.create_task(run_requests_delta_loop()) + asyncio.create_task(run_daily_requests_full_sync()) + asyncio.create_task(run_daily_db_cleanup()) + + +app.include_router(requests_router) +app.include_router(auth_router) +app.include_router(admin_router) +app.include_router(images_router) +app.include_router(branding_router) +app.include_router(status_router) diff --git a/backend/app/models.py b/backend/app/models.py new file mode 100644 index 0000000..94329dd --- /dev/null +++ b/backend/app/models.py @@ -0,0 +1,65 @@ +from enum import Enum +from typing import Any, Dict, List, Optional +from pydantic import BaseModel, Field + + +class RequestType(str, Enum): + movie = "movie" + tv = "tv" + unknown = "unknown" + + +class NormalizedState(str, Enum): + requested = "REQUESTED" + approved = "APPROVED" + needs_add = "NEEDS_ADD" + added_to_arr = "ADDED_TO_ARR" + searching = "SEARCHING" + grabbed = "GRABBED" + downloading = "DOWNLOADING" + importing = "IMPORTING" + completed = "COMPLETED" + failed = "FAILED" + available = "AVAILABLE" + unknown = "UNKNOWN" + + +class TimelineHop(BaseModel): + service: str + status: str + details: Dict[str, Any] = Field(default_factory=dict) + timestamp: Optional[str] = None + + +class ActionOption(BaseModel): + id: str + label: str + risk: str + requires_confirmation: bool = True + + +class Snapshot(BaseModel): + request_id: str + title: str + year: Optional[int] = None + request_type: RequestType = RequestType.unknown + state: NormalizedState = NormalizedState.unknown + state_reason: Optional[str] = None + timeline: List[TimelineHop] = Field(default_factory=list) + actions: List[ActionOption] = Field(default_factory=list) + artwork: Dict[str, Any] = Field(default_factory=dict) + raw: Dict[str, Any] = Field(default_factory=dict) + + +class TriageRecommendation(BaseModel): + action_id: str + title: str + reason: str + risk: str + + +class TriageResult(BaseModel): + summary: str + confidence: float + root_cause: str + recommendations: List[TriageRecommendation] diff --git a/backend/app/routers/__pycache__/requests.cpython-312.pyc b/backend/app/routers/__pycache__/requests.cpython-312.pyc new file mode 100644 index 0000000..e0da300 Binary files /dev/null and b/backend/app/routers/__pycache__/requests.cpython-312.pyc differ diff --git a/backend/app/routers/admin.py b/backend/app/routers/admin.py new file mode 100644 index 0000000..a5603f3 --- /dev/null +++ b/backend/app/routers/admin.py @@ -0,0 +1,367 @@ +from typing import Any, Dict, List +import os + +from fastapi import APIRouter, HTTPException, Depends, UploadFile, File + +from ..auth import require_admin +from ..config import settings as env_settings +from ..db import ( + delete_setting, + get_all_users, + get_request_cache_overview, + get_settings_overrides, + get_user_by_username, + set_setting, + set_user_blocked, + set_user_password, + set_user_role, + run_integrity_check, + vacuum_db, + clear_requests_cache, + clear_history, + cleanup_history, +) +from ..runtime import get_runtime_settings +from ..clients.sonarr import SonarrClient +from ..clients.radarr import RadarrClient +from ..clients.jellyfin import JellyfinClient +from ..clients.jellyseerr import JellyseerrClient +from ..services.jellyfin_sync import sync_jellyfin_users +import logging +from ..logging_config import configure_logging +from ..routers import requests as requests_router +from ..routers.branding import save_branding_image + +router = APIRouter(prefix="/admin", tags=["admin"], dependencies=[Depends(require_admin)]) +logger = logging.getLogger(__name__) + +SENSITIVE_KEYS = { + "jellyseerr_api_key", + "jellyfin_api_key", + "sonarr_api_key", + "radarr_api_key", + "prowlarr_api_key", + "qbittorrent_password", +} + +SETTING_KEYS: List[str] = [ + "jellyseerr_base_url", + "jellyseerr_api_key", + "jellyfin_base_url", + "jellyfin_api_key", + "jellyfin_public_url", + "jellyfin_sync_to_arr", + "artwork_cache_mode", + "sonarr_base_url", + "sonarr_api_key", + "sonarr_quality_profile_id", + "sonarr_root_folder", + "radarr_base_url", + "radarr_api_key", + "radarr_quality_profile_id", + "radarr_root_folder", + "prowlarr_base_url", + "prowlarr_api_key", + "qbittorrent_base_url", + "qbittorrent_username", + "qbittorrent_password", + "log_level", + "log_file", + "requests_sync_ttl_minutes", + "requests_poll_interval_seconds", + "requests_delta_sync_interval_minutes", + "requests_full_sync_time", + "requests_cleanup_time", + "requests_cleanup_days", + "requests_data_source", +] + +def _normalize_root_folders(folders: Any) -> List[Dict[str, Any]]: + if not isinstance(folders, list): + return [] + results = [] + for folder in folders: + if not isinstance(folder, dict): + continue + folder_id = folder.get("id") + path = folder.get("path") + if folder_id is None or path is None: + continue + results.append({"id": folder_id, "path": path, "label": path}) + return results + + +def _normalize_quality_profiles(profiles: Any) -> List[Dict[str, Any]]: + if not isinstance(profiles, list): + return [] + results = [] + for profile in profiles: + if not isinstance(profile, dict): + continue + profile_id = profile.get("id") + name = profile.get("name") + if profile_id is None or name is None: + continue + results.append({"id": profile_id, "name": name, "label": name}) + return results + + +@router.get("/settings") +async def list_settings() -> Dict[str, Any]: + overrides = get_settings_overrides() + results = [] + for key in SETTING_KEYS: + override_present = key in overrides + value = overrides.get(key) if override_present else getattr(env_settings, key) + is_set = value is not None and str(value).strip() != "" + sensitive = key in SENSITIVE_KEYS + results.append( + { + "key": key, + "value": None if sensitive else value, + "isSet": is_set, + "source": "db" if override_present else ("env" if is_set else "unset"), + "sensitive": sensitive, + } + ) + return {"settings": results} + + +@router.put("/settings") +async def update_settings(payload: Dict[str, Any]) -> Dict[str, Any]: + updates = 0 + touched_logging = False + for key, value in payload.items(): + if key not in SETTING_KEYS: + raise HTTPException(status_code=400, detail=f"Unknown setting: {key}") + if value is None: + continue + if isinstance(value, str) and value.strip() == "": + delete_setting(key) + updates += 1 + continue + set_setting(key, str(value)) + updates += 1 + if key in {"log_level", "log_file"}: + touched_logging = True + if touched_logging: + runtime = get_runtime_settings() + configure_logging(runtime.log_level, runtime.log_file) + return {"status": "ok", "updated": updates} + + +@router.get("/sonarr/options") +async def sonarr_options() -> Dict[str, Any]: + runtime = get_runtime_settings() + client = SonarrClient(runtime.sonarr_base_url, runtime.sonarr_api_key) + if not client.configured(): + raise HTTPException(status_code=400, detail="Sonarr not configured") + root_folders = await client.get_root_folders() + profiles = await client.get_quality_profiles() + return { + "rootFolders": _normalize_root_folders(root_folders), + "qualityProfiles": _normalize_quality_profiles(profiles), + } + + +@router.get("/radarr/options") +async def radarr_options() -> Dict[str, Any]: + runtime = get_runtime_settings() + client = RadarrClient(runtime.radarr_base_url, runtime.radarr_api_key) + if not client.configured(): + raise HTTPException(status_code=400, detail="Radarr not configured") + root_folders = await client.get_root_folders() + profiles = await client.get_quality_profiles() + return { + "rootFolders": _normalize_root_folders(root_folders), + "qualityProfiles": _normalize_quality_profiles(profiles), + } + + +@router.get("/jellyfin/users") +async def jellyfin_users() -> Dict[str, Any]: + runtime = get_runtime_settings() + client = JellyfinClient(runtime.jellyfin_base_url, runtime.jellyfin_api_key) + if not client.configured(): + raise HTTPException(status_code=400, detail="Jellyfin not configured") + users = await client.get_users() + if not isinstance(users, list): + return {"users": []} + results = [] + for user in users: + if not isinstance(user, dict): + continue + results.append( + { + "id": user.get("Id"), + "name": user.get("Name"), + "hasPassword": user.get("HasPassword"), + "lastLoginDate": user.get("LastLoginDate"), + } + ) + return {"users": results} + + +@router.post("/jellyfin/users/sync") +async def jellyfin_users_sync() -> Dict[str, Any]: + imported = await sync_jellyfin_users() + return {"status": "ok", "imported": imported} + + +@router.post("/requests/sync") +async def requests_sync() -> Dict[str, Any]: + runtime = get_runtime_settings() + client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key) + if not client.configured(): + raise HTTPException(status_code=400, detail="Jellyseerr not configured") + state = await requests_router.start_requests_sync( + runtime.jellyseerr_base_url, runtime.jellyseerr_api_key + ) + logger.info("Admin triggered requests sync: status=%s", state.get("status")) + return {"status": "ok", "sync": state} + + +@router.post("/requests/sync/delta") +async def requests_sync_delta() -> Dict[str, Any]: + runtime = get_runtime_settings() + client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key) + if not client.configured(): + raise HTTPException(status_code=400, detail="Jellyseerr not configured") + state = await requests_router.start_requests_delta_sync( + runtime.jellyseerr_base_url, runtime.jellyseerr_api_key + ) + logger.info("Admin triggered delta requests sync: status=%s", state.get("status")) + return {"status": "ok", "sync": state} + + +@router.post("/requests/artwork/prefetch") +async def requests_artwork_prefetch() -> Dict[str, Any]: + runtime = get_runtime_settings() + state = await requests_router.start_artwork_prefetch( + runtime.jellyseerr_base_url, runtime.jellyseerr_api_key + ) + logger.info("Admin triggered artwork prefetch: status=%s", state.get("status")) + return {"status": "ok", "prefetch": state} + + +@router.get("/requests/artwork/status") +async def requests_artwork_status() -> Dict[str, Any]: + return {"status": "ok", "prefetch": requests_router.get_artwork_prefetch_state()} + + +@router.get("/requests/sync/status") +async def requests_sync_status() -> Dict[str, Any]: + return {"status": "ok", "sync": requests_router.get_requests_sync_state()} + + +@router.get("/logs") +async def read_logs(lines: int = 200) -> Dict[str, Any]: + runtime = get_runtime_settings() + log_file = runtime.log_file + if not log_file: + raise HTTPException(status_code=400, detail="Log file not configured") + if not os.path.isabs(log_file): + log_file = os.path.join(os.getcwd(), log_file) + if not os.path.exists(log_file): + raise HTTPException(status_code=404, detail="Log file not found") + lines = max(1, min(lines, 1000)) + from collections import deque + + with open(log_file, "r", encoding="utf-8", errors="replace") as handle: + tail = deque(handle, maxlen=lines) + return {"lines": list(tail)} + + +@router.get("/requests/cache") +async def requests_cache(limit: int = 50) -> Dict[str, Any]: + return {"rows": get_request_cache_overview(limit)} + + +@router.post("/branding/logo") +async def upload_branding_logo(file: UploadFile = File(...)) -> Dict[str, Any]: + return await save_branding_image(file) + + +@router.post("/maintenance/repair") +async def repair_database() -> Dict[str, Any]: + result = run_integrity_check() + vacuum_db() + logger.info("Database repair executed: integrity_check=%s", result) + return {"status": "ok", "integrity": result} + + +@router.post("/maintenance/flush") +async def flush_database() -> Dict[str, Any]: + cleared = clear_requests_cache() + history = clear_history() + delete_setting("requests_sync_last_at") + logger.warning("Database flush executed: requests_cache=%s history=%s", cleared, history) + return {"status": "ok", "requestsCleared": cleared, "historyCleared": history} + + +@router.post("/maintenance/cleanup") +async def cleanup_database(days: int = 90) -> Dict[str, Any]: + result = cleanup_history(days) + logger.info("Database cleanup executed: days=%s result=%s", days, result) + return {"status": "ok", "days": days, "cleared": result} + + +@router.post("/maintenance/logs/clear") +async def clear_logs() -> Dict[str, Any]: + runtime = get_runtime_settings() + log_file = runtime.log_file + if not log_file: + raise HTTPException(status_code=400, detail="Log file not configured") + if not os.path.isabs(log_file): + log_file = os.path.join(os.getcwd(), log_file) + try: + os.makedirs(os.path.dirname(log_file), exist_ok=True) + with open(log_file, "w", encoding="utf-8"): + pass + except OSError as exc: + raise HTTPException(status_code=500, detail=str(exc)) from exc + logger.info("Log file cleared") + return {"status": "ok"} + + +@router.get("/users") +async def list_users() -> Dict[str, Any]: + users = get_all_users() + return {"users": users} + + +@router.post("/users/{username}/block") +async def block_user(username: str) -> Dict[str, Any]: + set_user_blocked(username, True) + return {"status": "ok", "username": username, "blocked": True} + + +@router.post("/users/{username}/unblock") +async def unblock_user(username: str) -> Dict[str, Any]: + set_user_blocked(username, False) + return {"status": "ok", "username": username, "blocked": False} + + +@router.post("/users/{username}/role") +async def update_user_role(username: str, payload: Dict[str, Any]) -> Dict[str, Any]: + role = payload.get("role") + if role not in {"admin", "user"}: + raise HTTPException(status_code=400, detail="Invalid role") + set_user_role(username, role) + return {"status": "ok", "username": username, "role": role} + + +@router.post("/users/{username}/password") +async def update_user_password(username: str, payload: Dict[str, Any]) -> Dict[str, Any]: + new_password = payload.get("password") if isinstance(payload, dict) else None + if not isinstance(new_password, str) or len(new_password.strip()) < 8: + raise HTTPException(status_code=400, detail="Password must be at least 8 characters.") + user = get_user_by_username(username) + if not user: + raise HTTPException(status_code=404, detail="User not found") + if user.get("auth_provider") != "local": + raise HTTPException( + status_code=400, detail="Password changes are only available for local users." + ) + set_user_password(username, new_password.strip()) + return {"status": "ok", "username": username} diff --git a/backend/app/routers/auth.py b/backend/app/routers/auth.py new file mode 100644 index 0000000..97f0d67 --- /dev/null +++ b/backend/app/routers/auth.py @@ -0,0 +1,114 @@ +from fastapi import APIRouter, HTTPException, status, Depends +from fastapi.security import OAuth2PasswordRequestForm + +from ..db import ( + verify_user_password, + create_user_if_missing, + set_last_login, + get_user_by_username, + set_user_password, +) +from ..runtime import get_runtime_settings +from ..clients.jellyfin import JellyfinClient +from ..clients.jellyseerr import JellyseerrClient +from ..security import create_access_token +from ..auth import get_current_user + +router = APIRouter(prefix="/auth", tags=["auth"]) + + +@router.post("/login") +async def login(form_data: OAuth2PasswordRequestForm = Depends()) -> dict: + user = verify_user_password(form_data.username, form_data.password) + if not user: + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid credentials") + if user.get("is_blocked"): + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="User is blocked") + token = create_access_token(user["username"], user["role"]) + set_last_login(user["username"]) + return { + "access_token": token, + "token_type": "bearer", + "user": {"username": user["username"], "role": user["role"]}, + } + + +@router.post("/jellyfin/login") +async def jellyfin_login(form_data: OAuth2PasswordRequestForm = Depends()) -> dict: + runtime = get_runtime_settings() + client = JellyfinClient(runtime.jellyfin_base_url, runtime.jellyfin_api_key) + if not client.configured(): + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Jellyfin not configured") + try: + response = await client.authenticate_by_name(form_data.username, form_data.password) + except Exception as exc: + raise HTTPException(status_code=status.HTTP_502_BAD_GATEWAY, detail=str(exc)) from exc + if not isinstance(response, dict) or not response.get("User"): + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid Jellyfin credentials") + create_user_if_missing(form_data.username, "jellyfin-user", role="user", auth_provider="jellyfin") + user = get_user_by_username(form_data.username) + if user and user.get("is_blocked"): + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="User is blocked") + try: + users = await client.get_users() + if isinstance(users, list): + for user in users: + if not isinstance(user, dict): + continue + name = user.get("Name") + if isinstance(name, str) and name: + create_user_if_missing(name, "jellyfin-user", role="user", auth_provider="jellyfin") + except Exception: + pass + token = create_access_token(form_data.username, "user") + set_last_login(form_data.username) + return {"access_token": token, "token_type": "bearer", "user": {"username": form_data.username, "role": "user"}} + + +@router.post("/jellyseerr/login") +async def jellyseerr_login(form_data: OAuth2PasswordRequestForm = Depends()) -> dict: + runtime = get_runtime_settings() + client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key) + if not client.configured(): + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Jellyseerr not configured") + payload = {"email": form_data.username, "password": form_data.password} + try: + response = await client.post("/api/v1/auth/login", payload=payload) + except Exception as exc: + raise HTTPException(status_code=status.HTTP_502_BAD_GATEWAY, detail=str(exc)) from exc + if not isinstance(response, dict): + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid Jellyseerr credentials") + create_user_if_missing(form_data.username, "jellyseerr-user", role="user", auth_provider="jellyseerr") + user = get_user_by_username(form_data.username) + if user and user.get("is_blocked"): + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="User is blocked") + token = create_access_token(form_data.username, "user") + set_last_login(form_data.username) + return {"access_token": token, "token_type": "bearer", "user": {"username": form_data.username, "role": "user"}} + + +@router.get("/me") +async def me(current_user: dict = Depends(get_current_user)) -> dict: + return current_user + + +@router.post("/password") +async def change_password(payload: dict, current_user: dict = Depends(get_current_user)) -> dict: + if current_user.get("auth_provider") != "local": + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Password changes are only available for local users.", + ) + current_password = payload.get("current_password") if isinstance(payload, dict) else None + new_password = payload.get("new_password") if isinstance(payload, dict) else None + if not isinstance(current_password, str) or not isinstance(new_password, str): + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid payload") + if len(new_password.strip()) < 8: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, detail="Password must be at least 8 characters." + ) + user = verify_user_password(current_user["username"], current_password) + if not user: + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Current password is incorrect") + set_user_password(current_user["username"], new_password.strip()) + return {"status": "ok"} diff --git a/backend/app/routers/branding.py b/backend/app/routers/branding.py new file mode 100644 index 0000000..d0c57e7 --- /dev/null +++ b/backend/app/routers/branding.py @@ -0,0 +1,64 @@ +import os +from io import BytesIO +from typing import Any, Dict + +from fastapi import APIRouter, HTTPException, UploadFile, File +from fastapi.responses import FileResponse +from PIL import Image + +router = APIRouter(prefix="/branding", tags=["branding"]) + +_BRANDING_DIR = os.path.join(os.getcwd(), "data", "branding") +_LOGO_PATH = os.path.join(_BRANDING_DIR, "logo.png") +_FAVICON_PATH = os.path.join(_BRANDING_DIR, "favicon.ico") + + +def _ensure_branding_dir() -> None: + os.makedirs(_BRANDING_DIR, exist_ok=True) + + +def _resize_image(image: Image.Image, max_size: int = 300) -> Image.Image: + image = image.convert("RGBA") + image.thumbnail((max_size, max_size)) + return image + + +@router.get("/logo.png") +async def branding_logo() -> FileResponse: + if not os.path.exists(_LOGO_PATH): + raise HTTPException(status_code=404, detail="Logo not found") + headers = {"Cache-Control": "public, max-age=300"} + return FileResponse(_LOGO_PATH, media_type="image/png", headers=headers) + + +@router.get("/favicon.ico") +async def branding_favicon() -> FileResponse: + if not os.path.exists(_FAVICON_PATH): + raise HTTPException(status_code=404, detail="Favicon not found") + headers = {"Cache-Control": "public, max-age=300"} + return FileResponse(_FAVICON_PATH, media_type="image/x-icon", headers=headers) + + +async def save_branding_image(file: UploadFile) -> Dict[str, Any]: + if not file.content_type or not file.content_type.startswith("image/"): + raise HTTPException(status_code=400, detail="Please upload an image file.") + content = await file.read() + if not content: + raise HTTPException(status_code=400, detail="Uploaded file is empty.") + try: + image = Image.open(BytesIO(content)) + except OSError as exc: + raise HTTPException(status_code=400, detail="Image file could not be read.") from exc + + _ensure_branding_dir() + image = _resize_image(image, 300) + image.save(_LOGO_PATH, format="PNG") + + favicon = image.copy() + favicon.thumbnail((64, 64)) + try: + favicon.save(_FAVICON_PATH, format="ICO", sizes=[(32, 32), (64, 64)]) + except OSError: + favicon.save(_FAVICON_PATH, format="ICO") + + return {"status": "ok", "width": image.width, "height": image.height} diff --git a/backend/app/routers/images.py b/backend/app/routers/images.py new file mode 100644 index 0000000..4d40fd3 --- /dev/null +++ b/backend/app/routers/images.py @@ -0,0 +1,82 @@ +import os +import re +import mimetypes +from fastapi import APIRouter, HTTPException, Response +from fastapi.responses import FileResponse, RedirectResponse +import httpx + +from ..runtime import get_runtime_settings + +router = APIRouter(prefix="/images", tags=["images"]) + +_TMDB_BASE = "https://image.tmdb.org/t/p" +_ALLOWED_SIZES = {"w92", "w154", "w185", "w342", "w500", "w780", "original"} + + +def _safe_filename(path: str) -> str: + trimmed = path.strip("/") + trimmed = trimmed.replace("/", "_") + safe = re.sub(r"[^A-Za-z0-9_.-]", "_", trimmed) + return safe or "image" + + +async def cache_tmdb_image(path: str, size: str = "w342") -> bool: + if not path or "://" in path or ".." in path: + return False + if not path.startswith("/"): + path = f"/{path}" + if size not in _ALLOWED_SIZES: + return False + + runtime = get_runtime_settings() + cache_mode = (runtime.artwork_cache_mode or "remote").lower() + if cache_mode != "cache": + return False + + cache_dir = os.path.join(os.getcwd(), "data", "artwork", "tmdb", size) + os.makedirs(cache_dir, exist_ok=True) + file_path = os.path.join(cache_dir, _safe_filename(path)) + if os.path.exists(file_path): + return True + + url = f"{_TMDB_BASE}/{size}{path}" + async with httpx.AsyncClient(timeout=20) as client: + response = await client.get(url) + response.raise_for_status() + content = response.content + with open(file_path, "wb") as handle: + handle.write(content) + return True + + +@router.get("/tmdb") +async def tmdb_image(path: str, size: str = "w342"): + if not path or "://" in path or ".." in path: + raise HTTPException(status_code=400, detail="Invalid image path") + if not path.startswith("/"): + path = f"/{path}" + if size not in _ALLOWED_SIZES: + raise HTTPException(status_code=400, detail="Invalid size") + + runtime = get_runtime_settings() + cache_mode = (runtime.artwork_cache_mode or "remote").lower() + url = f"{_TMDB_BASE}/{size}{path}" + if cache_mode != "cache": + return RedirectResponse(url=url) + + cache_dir = os.path.join(os.getcwd(), "data", "artwork", "tmdb", size) + os.makedirs(cache_dir, exist_ok=True) + file_path = os.path.join(cache_dir, _safe_filename(path)) + headers = {"Cache-Control": "public, max-age=86400"} + if os.path.exists(file_path): + media_type = mimetypes.guess_type(file_path)[0] or "image/jpeg" + return FileResponse(file_path, media_type=media_type, headers=headers) + + try: + await cache_tmdb_image(path, size) + if os.path.exists(file_path): + media_type = mimetypes.guess_type(file_path)[0] or "image/jpeg" + return FileResponse(file_path, media_type=media_type, headers=headers) + raise HTTPException(status_code=502, detail="Image cache failed") + except httpx.HTTPError as exc: + raise HTTPException(status_code=502, detail=f"Image fetch failed: {exc}") from exc diff --git a/backend/app/routers/requests.py b/backend/app/routers/requests.py new file mode 100644 index 0000000..bd7370c --- /dev/null +++ b/backend/app/routers/requests.py @@ -0,0 +1,1539 @@ +from typing import Any, Dict, List, Optional, Tuple +import asyncio +import httpx +import json +import logging +import time +from urllib.parse import quote +from datetime import datetime, timezone, timedelta +from fastapi import APIRouter, HTTPException, Depends + +from ..clients.jellyseerr import JellyseerrClient +from ..clients.qbittorrent import QBittorrentClient +from ..clients.radarr import RadarrClient +from ..clients.sonarr import SonarrClient +from ..clients.prowlarr import ProwlarrClient +from ..ai.triage import triage_snapshot +from ..auth import get_current_user +from ..runtime import get_runtime_settings +from .images import cache_tmdb_image +from ..db import ( + save_action, + get_recent_actions, + get_recent_snapshots, + get_cached_requests, + get_cached_requests_since, + get_cached_request_by_media_id, + get_request_cache_by_id, + get_request_cache_payload, + get_request_cache_last_updated, + get_request_cache_count, + get_request_cache_payloads, + prune_duplicate_requests_cache, + upsert_request_cache, + get_setting, + set_setting, + cleanup_history, +) +from ..models import Snapshot, TriageResult, RequestType +from ..services.snapshot import build_snapshot + +router = APIRouter(prefix="/requests", tags=["requests"], dependencies=[Depends(get_current_user)]) + +CACHE_TTL_SECONDS = 600 +_detail_cache: Dict[str, Tuple[float, Dict[str, Any]]] = {} +REQUEST_CACHE_TTL_SECONDS = 600 +logger = logging.getLogger(__name__) +_sync_state: Dict[str, Any] = { + "status": "idle", + "stored": 0, + "total": None, + "skip": 0, + "message": None, + "started_at": None, + "finished_at": None, +} +_sync_task: Optional[asyncio.Task] = None +_sync_last_key = "requests_sync_last_at" +RECENT_CACHE_MAX_DAYS = 180 +RECENT_CACHE_TTL_SECONDS = 300 +_recent_cache: Dict[str, Any] = {"items": [], "updated_at": None} +_artwork_prefetch_state: Dict[str, Any] = { + "status": "idle", + "processed": 0, + "total": 0, + "message": "", + "started_at": None, + "finished_at": None, +} +_artwork_prefetch_task: Optional[asyncio.Task] = None + +STATUS_LABELS = { + 1: "Waiting for approval", + 2: "Approved", + 3: "Declined", + 4: "Ready to watch", + 5: "Working on it", + 6: "Partially ready", +} + + +def _cache_get(key: str) -> Optional[Dict[str, Any]]: + cached = _detail_cache.get(key) + if not cached: + return None + expires_at, payload = cached + if expires_at < time.time(): + _detail_cache.pop(key, None) + return None + return payload + + +def _cache_set(key: str, payload: Dict[str, Any]) -> None: + _detail_cache[key] = (time.time() + CACHE_TTL_SECONDS, payload) + + +def _status_label(value: Any) -> str: + if isinstance(value, int): + return STATUS_LABELS.get(value, f"Status {value}") + return "Unknown" + + +def _normalize_username(value: Any) -> Optional[str]: + if not isinstance(value, str): + return None + normalized = value.strip().lower() + return normalized if normalized else None + + +def _request_matches_user(request_data: Any, username: str) -> bool: + requested_by = None + if isinstance(request_data, dict): + requested_by = request_data.get("requestedBy") or request_data.get("requestedByUser") + if requested_by is None: + requested_by = request_data.get("requestedByName") or request_data.get("requestedByUsername") + if isinstance(requested_by, dict): + candidates = [ + requested_by.get("username"), + requested_by.get("displayName"), + requested_by.get("name"), + requested_by.get("email"), + ] + else: + candidates = [requested_by] + + username_norm = _normalize_username(username) + if not username_norm: + return False + + for candidate in candidates: + candidate_norm = _normalize_username(candidate) + if not candidate_norm: + continue + if "@" in candidate_norm: + candidate_norm = candidate_norm.split("@", 1)[0] + if candidate_norm == username_norm: + return True + return False + + +def _normalize_requested_by(request_data: Any) -> Optional[str]: + if not isinstance(request_data, dict): + return None + requested_by = request_data.get("requestedBy") + if isinstance(requested_by, dict): + for key in ("username", "displayName", "name", "email"): + value = requested_by.get(key) + normalized = _normalize_username(value) + if normalized and "@" in normalized: + normalized = normalized.split("@", 1)[0] + if normalized: + return normalized + normalized = _normalize_username(requested_by) + if normalized and "@" in normalized: + normalized = normalized.split("@", 1)[0] + return normalized + + +def _format_upstream_error(service: str, exc: httpx.HTTPStatusError) -> str: + response = exc.response + status = response.status_code if response is not None else "unknown" + body = "" + if response is not None: + try: + payload = response.json() + body = json.dumps(payload, ensure_ascii=True) + except ValueError: + body = response.text + body = body.strip() if body else "" + if body: + return f"{service} error {status}: {body}" + return f"{service} error {status}." + + +def _request_display_name(request_data: Any) -> Optional[str]: + if not isinstance(request_data, dict): + return None + requested_by = request_data.get("requestedBy") + if isinstance(requested_by, dict): + for key in ("displayName", "username", "name", "email"): + value = requested_by.get(key) + if isinstance(value, str) and value.strip(): + return value.strip() + if isinstance(requested_by, str) and requested_by.strip(): + return requested_by.strip() + return None + + +def _parse_request_payload(item: Dict[str, Any]) -> Dict[str, Any]: + media = item.get("media") or {} + media_id = media.get("id") or item.get("mediaId") + media_type = media.get("mediaType") or item.get("type") + tmdb_id = media.get("tmdbId") or item.get("tmdbId") + title = media.get("title") or media.get("name") or item.get("title") or item.get("name") + year = media.get("year") or item.get("year") + created_at = item.get("createdAt") or item.get("addedAt") or item.get("updatedAt") + updated_at = item.get("updatedAt") or created_at + requested_by = _request_display_name(item) + requested_by_norm = _normalize_requested_by(item) + return { + "request_id": item.get("id"), + "media_id": media_id, + "media_type": media_type, + "tmdb_id": tmdb_id, + "status": item.get("status"), + "title": title, + "year": year, + "requested_by": requested_by, + "requested_by_norm": requested_by_norm, + "created_at": created_at, + "updated_at": updated_at, + } + + +def _extract_artwork_paths(item: Dict[str, Any]) -> tuple[Optional[str], Optional[str]]: + media = item.get("media") or {} + poster_path = None + backdrop_path = None + if isinstance(media, dict): + poster_path = media.get("posterPath") or media.get("poster_path") + backdrop_path = media.get("backdropPath") or media.get("backdrop_path") + if not poster_path: + poster_path = item.get("posterPath") or item.get("poster_path") + if not backdrop_path: + backdrop_path = item.get("backdropPath") or item.get("backdrop_path") + return poster_path, backdrop_path + + +async def _get_request_details(client: JellyseerrClient, request_id: int) -> Optional[Dict[str, Any]]: + cache_key = f"request:{request_id}" + cached = _cache_get(cache_key) + if isinstance(cached, dict): + return cached + try: + fetched = await client.get_request(str(request_id)) + except httpx.HTTPStatusError: + return None + if isinstance(fetched, dict): + _cache_set(cache_key, fetched) + return fetched + return None + + +async def _hydrate_title_from_tmdb( + client: JellyseerrClient, media_type: Optional[str], tmdb_id: Optional[int] +) -> tuple[Optional[str], Optional[int]]: + if not tmdb_id or not media_type: + return None, None + try: + if media_type == "movie": + details = await client.get_movie(int(tmdb_id)) + if isinstance(details, dict): + title = details.get("title") + release_date = details.get("releaseDate") + year = int(release_date[:4]) if release_date else None + return title, year + if media_type == "tv": + details = await client.get_tv(int(tmdb_id)) + if isinstance(details, dict): + title = details.get("name") or details.get("title") + first_air = details.get("firstAirDate") + year = int(first_air[:4]) if first_air else None + return title, year + except httpx.HTTPStatusError: + return None, None + return None, None + + +async def _hydrate_artwork_from_tmdb( + client: JellyseerrClient, media_type: Optional[str], tmdb_id: Optional[int] +) -> tuple[Optional[str], Optional[str]]: + if not tmdb_id or not media_type: + return None, None + try: + if media_type == "movie": + details = await client.get_movie(int(tmdb_id)) + if isinstance(details, dict): + return ( + details.get("posterPath") or details.get("poster_path"), + details.get("backdropPath") or details.get("backdrop_path"), + ) + if media_type == "tv": + details = await client.get_tv(int(tmdb_id)) + if isinstance(details, dict): + return ( + details.get("posterPath") or details.get("poster_path"), + details.get("backdropPath") or details.get("backdrop_path"), + ) + except httpx.HTTPStatusError: + return None, None + return None, None + + +def _artwork_url(path: Optional[str], size: str, cache_mode: str) -> Optional[str]: + if not path: + return None + if not path.startswith("/"): + path = f"/{path}" + if cache_mode == "cache": + return f"/images/tmdb?path={quote(path)}&size={size}" + return f"https://image.tmdb.org/t/p/{size}{path}" + + +def _cache_is_stale(last_updated: Optional[str]) -> bool: + if not last_updated: + return True + runtime = get_runtime_settings() + ttl_seconds = max(60, int(runtime.requests_sync_ttl_minutes or 1440) * 60) + try: + parsed = datetime.fromisoformat(last_updated.replace("Z", "+00:00")) + now = datetime.now(timezone.utc) + return (now - parsed).total_seconds() > ttl_seconds + except ValueError: + return True + + +def _parse_time(value: Optional[str], fallback_hour: int, fallback_minute: int) -> tuple[int, int]: + if isinstance(value, str) and ":" in value: + parts = value.strip().split(":") + if len(parts) == 2: + try: + hour = int(parts[0]) + minute = int(parts[1]) + if 0 <= hour <= 23 and 0 <= minute <= 59: + return hour, minute + except ValueError: + pass + return fallback_hour, fallback_minute + + +def _seconds_until(hour: int, minute: int) -> int: + now = datetime.now(timezone.utc).astimezone() + target = now.replace(hour=hour, minute=minute, second=0, microsecond=0) + if target <= now: + target = target + timedelta(days=1) + return int((target - now).total_seconds()) + + +async def _sync_all_requests(client: JellyseerrClient) -> int: + take = 50 + skip = 0 + stored = 0 + cache_mode = (get_runtime_settings().artwork_cache_mode or "remote").lower() + logger.info("Jellyseerr sync starting: take=%s", take) + _sync_state.update( + { + "status": "running", + "stored": 0, + "total": None, + "skip": 0, + "message": "Starting sync", + "started_at": datetime.now(timezone.utc).isoformat(), + "finished_at": None, + } + ) + while True: + try: + response = await client.get_recent_requests(take=take, skip=skip) + except httpx.HTTPError as exc: + logger.warning("Jellyseerr sync failed at skip=%s: %s", skip, exc) + _sync_state.update({"status": "failed", "message": f"Sync failed: {exc}"}) + break + if not isinstance(response, dict): + logger.warning("Jellyseerr sync stopped: non-dict response at skip=%s", skip) + _sync_state.update({"status": "failed", "message": "Invalid response"}) + break + if _sync_state["total"] is None: + page_info = response.get("pageInfo") or {} + total = ( + page_info.get("totalResults") + or page_info.get("total") + or response.get("totalResults") + or response.get("total") + ) + if isinstance(total, int): + _sync_state["total"] = total + items = response.get("results") or [] + if not isinstance(items, list) or not items: + logger.info("Jellyseerr sync completed: no more results at skip=%s", skip) + break + for item in items: + if not isinstance(item, dict): + continue + payload = _parse_request_payload(item) + request_id = payload.get("request_id") + if isinstance(request_id, int): + if not payload.get("title") or not payload.get("media_id"): + logger.debug("Jellyseerr sync hydrate request_id=%s", request_id) + details = await _get_request_details(client, request_id) + if isinstance(details, dict): + payload = _parse_request_payload(details) + item = details + poster_path, backdrop_path = _extract_artwork_paths(item) + if cache_mode == "cache" and not (poster_path or backdrop_path): + details = await _get_request_details(client, request_id) + if isinstance(details, dict): + item = details + payload = _parse_request_payload(details) + if not payload.get("title") and payload.get("tmdb_id"): + hydrated_title, hydrated_year = await _hydrate_title_from_tmdb( + client, payload.get("media_type"), payload.get("tmdb_id") + ) + if hydrated_title: + payload["title"] = hydrated_title + if hydrated_year: + payload["year"] = hydrated_year + if not isinstance(payload.get("request_id"), int): + continue + payload_json = json.dumps(item, ensure_ascii=True) + upsert_request_cache( + request_id=payload.get("request_id"), + media_id=payload.get("media_id"), + media_type=payload.get("media_type"), + status=payload.get("status"), + title=payload.get("title"), + year=payload.get("year"), + requested_by=payload.get("requested_by"), + requested_by_norm=payload.get("requested_by_norm"), + created_at=payload.get("created_at"), + updated_at=payload.get("updated_at"), + payload_json=payload_json, + ) + stored += 1 + _sync_state["stored"] = stored + if len(items) < take: + logger.info("Jellyseerr sync completed: stored=%s", stored) + break + skip += take + _sync_state["skip"] = skip + _sync_state["message"] = f"Synced {stored} requests" + logger.info("Jellyseerr sync progress: stored=%s skip=%s", stored, skip) + _sync_state.update( + { + "status": "completed", + "stored": stored, + "message": f"Sync complete: {stored} requests", + "finished_at": datetime.now(timezone.utc).isoformat(), + } + ) + set_setting(_sync_last_key, datetime.now(timezone.utc).isoformat()) + _refresh_recent_cache_from_db() + return stored + + +async def _sync_delta_requests(client: JellyseerrClient) -> int: + take = 50 + skip = 0 + stored = 0 + unchanged_pages = 0 + cache_mode = (get_runtime_settings().artwork_cache_mode or "remote").lower() + logger.info("Jellyseerr delta sync starting: take=%s", take) + _sync_state.update( + { + "status": "running", + "stored": 0, + "total": None, + "skip": 0, + "message": "Starting delta sync", + "started_at": datetime.now(timezone.utc).isoformat(), + "finished_at": None, + } + ) + while True: + try: + response = await client.get_recent_requests(take=take, skip=skip) + except httpx.HTTPError as exc: + logger.warning("Jellyseerr delta sync failed at skip=%s: %s", skip, exc) + _sync_state.update({"status": "failed", "message": f"Delta sync failed: {exc}"}) + break + if not isinstance(response, dict): + logger.warning("Jellyseerr delta sync stopped: non-dict response at skip=%s", skip) + _sync_state.update({"status": "failed", "message": "Invalid response"}) + break + items = response.get("results") or [] + if not isinstance(items, list) or not items: + logger.info("Jellyseerr delta sync completed: no more results at skip=%s", skip) + break + page_changed = False + for item in items: + if not isinstance(item, dict): + continue + payload = _parse_request_payload(item) + request_id = payload.get("request_id") + if isinstance(request_id, int): + cached = get_request_cache_by_id(request_id) + incoming_updated = payload.get("updated_at") + if cached and incoming_updated and cached.get("updated_at") == incoming_updated: + continue + if not payload.get("title") or not payload.get("media_id"): + details = await _get_request_details(client, request_id) + if isinstance(details, dict): + payload = _parse_request_payload(details) + item = details + poster_path, backdrop_path = _extract_artwork_paths(item) + if cache_mode == "cache" and not (poster_path or backdrop_path): + details = await _get_request_details(client, request_id) + if isinstance(details, dict): + payload = _parse_request_payload(details) + item = details + if not payload.get("title") and payload.get("tmdb_id"): + hydrated_title, hydrated_year = await _hydrate_title_from_tmdb( + client, payload.get("media_type"), payload.get("tmdb_id") + ) + if hydrated_title: + payload["title"] = hydrated_title + if hydrated_year: + payload["year"] = hydrated_year + if not isinstance(payload.get("request_id"), int): + continue + payload_json = json.dumps(item, ensure_ascii=True) + upsert_request_cache( + request_id=payload.get("request_id"), + media_id=payload.get("media_id"), + media_type=payload.get("media_type"), + status=payload.get("status"), + title=payload.get("title"), + year=payload.get("year"), + requested_by=payload.get("requested_by"), + requested_by_norm=payload.get("requested_by_norm"), + created_at=payload.get("created_at"), + updated_at=payload.get("updated_at"), + payload_json=payload_json, + ) + stored += 1 + page_changed = True + _sync_state["stored"] = stored + if not page_changed: + unchanged_pages += 1 + else: + unchanged_pages = 0 + if len(items) < take or unchanged_pages >= 2: + logger.info("Jellyseerr delta sync completed: stored=%s", stored) + break + skip += take + _sync_state["skip"] = skip + _sync_state["message"] = f"Delta synced {stored} requests" + logger.info("Jellyseerr delta sync progress: stored=%s skip=%s", stored, skip) + deduped = prune_duplicate_requests_cache() + if deduped: + logger.info("Jellyseerr delta sync removed duplicate rows: %s", deduped) + _sync_state.update( + { + "status": "completed", + "stored": stored, + "message": f"Delta sync complete: {stored} updated", + "finished_at": datetime.now(timezone.utc).isoformat(), + } + ) + set_setting(_sync_last_key, datetime.now(timezone.utc).isoformat()) + _refresh_recent_cache_from_db() + return stored + + +async def _prefetch_artwork_cache(client: JellyseerrClient) -> None: + runtime = get_runtime_settings() + cache_mode = (runtime.artwork_cache_mode or "remote").lower() + if cache_mode != "cache": + _artwork_prefetch_state.update( + { + "status": "failed", + "message": "Artwork cache mode is not set to cache.", + "finished_at": datetime.now(timezone.utc).isoformat(), + } + ) + return + + total = get_request_cache_count() + _artwork_prefetch_state.update( + { + "status": "running", + "processed": 0, + "total": total, + "message": "Starting artwork prefetch", + "started_at": datetime.now(timezone.utc).isoformat(), + "finished_at": None, + } + ) + offset = 0 + limit = 200 + processed = 0 + while True: + batch = get_request_cache_payloads(limit=limit, offset=offset) + if not batch: + break + for row in batch: + payload = row.get("payload") + if not isinstance(payload, dict): + processed += 1 + continue + poster_path, backdrop_path = _extract_artwork_paths(payload) + if not (poster_path or backdrop_path) and client.configured(): + media = payload.get("media") or {} + tmdb_id = media.get("tmdbId") or payload.get("tmdbId") + media_type = media.get("mediaType") or payload.get("type") + if tmdb_id and media_type: + hydrated_poster, hydrated_backdrop = await _hydrate_artwork_from_tmdb( + client, media_type, tmdb_id + ) + poster_path = poster_path or hydrated_poster + backdrop_path = backdrop_path or hydrated_backdrop + if hydrated_poster or hydrated_backdrop: + media = dict(media) if isinstance(media, dict) else {} + if hydrated_poster: + media["posterPath"] = hydrated_poster + if hydrated_backdrop: + media["backdropPath"] = hydrated_backdrop + payload["media"] = media + parsed = _parse_request_payload(payload) + request_id = parsed.get("request_id") + if isinstance(request_id, int): + upsert_request_cache( + request_id=request_id, + media_id=parsed.get("media_id"), + media_type=parsed.get("media_type"), + status=parsed.get("status"), + title=parsed.get("title"), + year=parsed.get("year"), + requested_by=parsed.get("requested_by"), + requested_by_norm=parsed.get("requested_by_norm"), + created_at=parsed.get("created_at"), + updated_at=parsed.get("updated_at"), + payload_json=json.dumps(payload, ensure_ascii=True), + ) + if poster_path: + try: + await cache_tmdb_image(poster_path, "w185") + await cache_tmdb_image(poster_path, "w342") + except httpx.HTTPError: + pass + if backdrop_path: + try: + await cache_tmdb_image(backdrop_path, "w780") + except httpx.HTTPError: + pass + processed += 1 + if processed % 25 == 0: + _artwork_prefetch_state.update( + {"processed": processed, "message": f"Cached artwork for {processed} requests"} + ) + offset += limit + + _artwork_prefetch_state.update( + { + "status": "completed", + "processed": processed, + "message": f"Artwork cached for {processed} requests", + "finished_at": datetime.now(timezone.utc).isoformat(), + } + ) + + +async def start_artwork_prefetch(base_url: Optional[str], api_key: Optional[str]) -> Dict[str, Any]: + global _artwork_prefetch_task + if _artwork_prefetch_task and not _artwork_prefetch_task.done(): + return dict(_artwork_prefetch_state) + client = JellyseerrClient(base_url, api_key) + _artwork_prefetch_state.update( + { + "status": "running", + "processed": 0, + "total": get_request_cache_count(), + "message": "Starting artwork prefetch", + "started_at": datetime.now(timezone.utc).isoformat(), + "finished_at": None, + } + ) + + async def _runner() -> None: + try: + await _prefetch_artwork_cache(client) + except Exception: + logger.exception("Artwork prefetch failed") + _artwork_prefetch_state.update( + { + "status": "failed", + "message": "Artwork prefetch failed.", + "finished_at": datetime.now(timezone.utc).isoformat(), + } + ) + + _artwork_prefetch_task = asyncio.create_task(_runner()) + return dict(_artwork_prefetch_state) + + +def get_artwork_prefetch_state() -> Dict[str, Any]: + return dict(_artwork_prefetch_state) + + +async def _ensure_requests_cache(client: JellyseerrClient) -> None: + last_sync = get_setting(_sync_last_key) + last_updated = last_sync or get_request_cache_last_updated() + if _cache_is_stale(last_updated): + logger.info("Requests cache stale or empty, starting sync.") + await _sync_all_requests(client) + else: + logger.debug("Requests cache fresh: last_sync=%s", last_updated) + + +def _refresh_recent_cache_from_db() -> None: + since_iso = (datetime.now(timezone.utc) - timedelta(days=RECENT_CACHE_MAX_DAYS)).isoformat() + items = get_cached_requests_since(since_iso) + _recent_cache["items"] = items + _recent_cache["updated_at"] = datetime.now(timezone.utc).isoformat() + + +def _recent_cache_stale() -> bool: + updated_at = _recent_cache.get("updated_at") + if not updated_at: + return True + try: + parsed = datetime.fromisoformat(updated_at) + except ValueError: + return True + return (datetime.now(timezone.utc) - parsed).total_seconds() > RECENT_CACHE_TTL_SECONDS + + +def _get_recent_from_cache( + requested_by_norm: Optional[str], + limit: int, + offset: int, + since_iso: Optional[str], +) -> List[Dict[str, Any]]: + items = _recent_cache.get("items") or [] + results = [] + for item in items: + if requested_by_norm and item.get("requested_by_norm") != requested_by_norm: + continue + if since_iso and item.get("created_at") and item["created_at"] < since_iso: + continue + results.append(item) + return results[offset : offset + limit] + + +async def startup_warmup_requests_cache() -> None: + runtime = get_runtime_settings() + client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key) + if not client.configured(): + return + try: + await _ensure_requests_cache(client) + except httpx.HTTPError as exc: + logger.warning("Requests warmup skipped: %s", exc) + return + _refresh_recent_cache_from_db() + + +async def run_requests_poll_loop() -> None: + while True: + runtime = get_runtime_settings() + interval = max(60, int(runtime.requests_poll_interval_seconds or 300)) + client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key) + if client.configured(): + try: + await _ensure_requests_cache(client) + except httpx.HTTPError as exc: + logger.debug("Requests poll skipped: %s", exc) + await asyncio.sleep(interval) + + +async def run_requests_delta_loop() -> None: + while True: + runtime = get_runtime_settings() + interval = max(60, int(runtime.requests_delta_sync_interval_minutes or 5) * 60) + client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key) + if client.configured(): + if _sync_task and not _sync_task.done(): + logger.debug("Delta sync skipped: another sync is running.") + else: + try: + await _sync_delta_requests(client) + except httpx.HTTPError as exc: + logger.debug("Delta sync skipped: %s", exc) + await asyncio.sleep(interval) + + +async def run_daily_requests_full_sync() -> None: + while True: + runtime = get_runtime_settings() + hour, minute = _parse_time(runtime.requests_full_sync_time, 0, 0) + await asyncio.sleep(_seconds_until(hour, minute)) + runtime = get_runtime_settings() + client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key) + if not client.configured(): + logger.info("Daily full sync skipped: Jellyseerr not configured.") + continue + if _sync_task and not _sync_task.done(): + logger.info("Daily full sync skipped: another sync is running.") + continue + try: + await _sync_all_requests(client) + except httpx.HTTPError as exc: + logger.warning("Daily full sync failed: %s", exc) + + +async def run_daily_db_cleanup() -> None: + while True: + runtime = get_runtime_settings() + hour, minute = _parse_time(runtime.requests_cleanup_time, 2, 0) + await asyncio.sleep(_seconds_until(hour, minute)) + runtime = get_runtime_settings() + result = cleanup_history(int(runtime.requests_cleanup_days or 90)) + logger.info("Daily cleanup complete: %s", result) + + +async def start_requests_sync(base_url: Optional[str], api_key: Optional[str]) -> Dict[str, Any]: + global _sync_task + if _sync_task and not _sync_task.done(): + return dict(_sync_state) + if not base_url: + _sync_state.update({"status": "failed", "message": "Jellyseerr not configured"}) + return dict(_sync_state) + client = JellyseerrClient(base_url, api_key) + _sync_state.update( + { + "status": "running", + "stored": 0, + "total": None, + "skip": 0, + "message": "Starting sync", + "started_at": datetime.now(timezone.utc).isoformat(), + "finished_at": None, + } + ) + + async def _runner() -> None: + try: + await _sync_all_requests(client) + except Exception as exc: + logger.exception("Jellyseerr sync failed") + _sync_state.update( + { + "status": "failed", + "message": f"Sync failed: {exc}", + "finished_at": datetime.now(timezone.utc).isoformat(), + } + ) + + _sync_task = asyncio.create_task(_runner()) + return dict(_sync_state) + + +async def start_requests_delta_sync(base_url: Optional[str], api_key: Optional[str]) -> Dict[str, Any]: + global _sync_task + if _sync_task and not _sync_task.done(): + return dict(_sync_state) + if not base_url: + _sync_state.update({"status": "failed", "message": "Jellyseerr not configured"}) + return dict(_sync_state) + client = JellyseerrClient(base_url, api_key) + _sync_state.update( + { + "status": "running", + "stored": 0, + "total": None, + "skip": 0, + "message": "Starting delta sync", + "started_at": datetime.now(timezone.utc).isoformat(), + "finished_at": None, + } + ) + + async def _runner() -> None: + try: + await _sync_delta_requests(client) + except Exception as exc: + logger.exception("Jellyseerr delta sync failed") + _sync_state.update( + { + "status": "failed", + "message": f"Delta sync failed: {exc}", + "finished_at": datetime.now(timezone.utc).isoformat(), + } + ) + + _sync_task = asyncio.create_task(_runner()) + return dict(_sync_state) + + +def get_requests_sync_state() -> Dict[str, Any]: + return dict(_sync_state) + + +async def _ensure_request_access( + client: JellyseerrClient, request_id: int, user: Dict[str, str] +) -> None: + if user.get("role") == "admin": + return + runtime = get_runtime_settings() + mode = (runtime.requests_data_source or "prefer_cache").lower() + cached = get_request_cache_payload(request_id) + if mode != "always_js" and cached is not None: + logger.debug("access cache hit: request_id=%s mode=%s", request_id, mode) + if _request_matches_user(cached, user.get("username", "")): + return + raise HTTPException(status_code=403, detail="Request not accessible for this user") + logger.debug("access cache miss: request_id=%s mode=%s", request_id, mode) + details = await _get_request_details(client, request_id) + if details is None or not _request_matches_user(details, user.get("username", "")): + raise HTTPException(status_code=403, detail="Request not accessible for this user") + + +def _build_recent_map(response: Dict[str, Any]) -> Dict[int, Dict[str, Any]]: + mapping: Dict[int, Dict[str, Any]] = {} + for item in response.get("results", []): + media = item.get("media") or {} + media_id = media.get("id") or item.get("mediaId") + request_id = item.get("id") + status = item.get("status") + if isinstance(media_id, int) and isinstance(request_id, int): + mapping[media_id] = { + "requestId": request_id, + "status": status, + "statusLabel": _status_label(status), + } + return mapping + + +def _queue_records(queue: Any) -> List[Dict[str, Any]]: + if isinstance(queue, dict): + records = queue.get("records") + if isinstance(records, list): + return records + if isinstance(queue, list): + return queue + return [] + + +def _download_ids(records: List[Dict[str, Any]]) -> List[str]: + ids = [] + for record in records: + download_id = record.get("downloadId") or record.get("download_id") + if isinstance(download_id, str) and download_id: + ids.append(download_id) + return ids + + +def _normalize_categories(categories: Any) -> List[str]: + names = [] + if isinstance(categories, list): + for cat in categories: + if isinstance(cat, dict): + name = cat.get("name") + if isinstance(name, str): + names.append(name.lower()) + return names + + +def _filter_prowlarr_results(results: Any, request_type: RequestType) -> List[Dict[str, Any]]: + if not isinstance(results, list): + return [] + keep = [] + for item in results: + if not isinstance(item, dict): + continue + categories = _normalize_categories(item.get("categories")) + if request_type == RequestType.movie: + if not any("movies" in name for name in categories): + continue + elif request_type == RequestType.tv: + if not any(name.startswith("tv") or "tv/" in name for name in categories): + continue + keep.append( + { + "title": item.get("title"), + "indexer": item.get("indexer"), + "indexerId": item.get("indexerId"), + "guid": item.get("guid"), + "size": item.get("size"), + "seeders": item.get("seeders"), + "leechers": item.get("leechers"), + "publishDate": item.get("publishDate"), + "infoUrl": item.get("infoUrl"), + "downloadUrl": item.get("downloadUrl"), + "protocol": item.get("protocol"), + } + ) + keep.sort(key=lambda item: (item.get("seeders") or 0), reverse=True) + return keep[:10] + + +def _missing_episode_ids_by_season(episodes: Any) -> Dict[int, List[int]]: + if not isinstance(episodes, list): + return {} + grouped: Dict[int, List[int]] = {} + for episode in episodes: + if not isinstance(episode, dict): + continue + if not episode.get("monitored", True): + continue + if episode.get("hasFile"): + continue + season_number = episode.get("seasonNumber") + episode_id = episode.get("id") + if isinstance(season_number, int) and isinstance(episode_id, int): + grouped.setdefault(season_number, []).append(episode_id) + return grouped + + +async def _resolve_root_folder_path(client: Any, root_folder: str, service_name: str) -> str: + if root_folder.isdigit(): + folders = await client.get_root_folders() + if isinstance(folders, list): + for folder in folders: + if folder.get("id") == int(root_folder): + path = folder.get("path") + if isinstance(path, str) and path: + return path + raise HTTPException(status_code=400, detail=f"{service_name} root folder id {root_folder} not found") + return root_folder + + +@router.get("/{request_id}/snapshot", response_model=Snapshot) +async def get_snapshot(request_id: str, user: Dict[str, str] = Depends(get_current_user)) -> Snapshot: + runtime = get_runtime_settings() + client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key) + if client.configured(): + await _ensure_request_access(client, int(request_id), user) + return await build_snapshot(request_id) + + +@router.get("/recent") +async def recent_requests( + take: int = 6, + skip: int = 0, + days: int = 90, + user: Dict[str, str] = Depends(get_current_user), +) -> dict: + runtime = get_runtime_settings() + client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key) + if not client.configured(): + raise HTTPException(status_code=400, detail="Jellyseerr not configured") + + try: + await _ensure_requests_cache(client) + except httpx.HTTPStatusError as exc: + raise HTTPException(status_code=502, detail=str(exc)) from exc + + username_norm = _normalize_username(user.get("username", "")) + requested_by = None if user.get("role") == "admin" else username_norm + since_iso = None + if days > 0: + since_iso = (datetime.now(timezone.utc) - timedelta(days=days)).isoformat() + if _recent_cache_stale(): + _refresh_recent_cache_from_db() + rows = _get_recent_from_cache(requested_by, take, skip, since_iso) + cache_mode = (runtime.artwork_cache_mode or "remote").lower() + mode = (runtime.requests_data_source or "prefer_cache").lower() + allow_remote = mode == "always_js" + allow_title_hydrate = mode == "prefer_cache" + allow_artwork_hydrate = allow_remote or allow_title_hydrate + results = [] + for row in rows: + status = row.get("status") + title = row.get("title") + title_is_placeholder = ( + isinstance(title, str) + and row.get("request_id") is not None + and title.strip().lower() == f"request {row.get('request_id')}" + ) + year = row.get("year") + details = None + if row.get("request_id") and mode != "always_js": + cached_payload = get_request_cache_payload(int(row["request_id"])) + if isinstance(cached_payload, dict): + details = cached_payload + if (not title or title_is_placeholder) and row.get("request_id"): + if details is None and (allow_remote or allow_title_hydrate): + details = await _get_request_details(client, int(row["request_id"])) + if isinstance(details, dict): + payload = _parse_request_payload(details) + title = payload.get("title") or title + year = payload.get("year") or year + if not title and payload.get("tmdb_id") and (allow_remote or allow_title_hydrate): + hydrated_title, hydrated_year = await _hydrate_title_from_tmdb( + client, payload.get("media_type"), payload.get("tmdb_id") + ) + if hydrated_title: + title = hydrated_title + if hydrated_year: + year = hydrated_year + if allow_remote and isinstance(payload.get("request_id"), int): + upsert_request_cache( + request_id=payload.get("request_id"), + media_id=payload.get("media_id"), + media_type=payload.get("media_type"), + status=payload.get("status"), + title=title or payload.get("title"), + year=year or payload.get("year"), + requested_by=payload.get("requested_by"), + requested_by_norm=payload.get("requested_by_norm"), + created_at=payload.get("created_at"), + updated_at=payload.get("updated_at"), + payload_json=json.dumps(details, ensure_ascii=True), + ) + row["title"] = title + row["year"] = year + row["media_type"] = payload.get("media_type") or row.get("media_type") + row["status"] = payload.get("status") or row.get("status") + if details is None and row.get("request_id") and allow_remote: + details = await _get_request_details(client, int(row["request_id"])) + + poster_path = None + backdrop_path = None + if isinstance(details, dict): + media = details.get("media") or {} + if isinstance(media, dict): + poster_path = media.get("posterPath") or media.get("poster_path") + backdrop_path = media.get("backdropPath") or media.get("backdrop_path") + tmdb_id = media.get("tmdbId") or details.get("tmdbId") + else: + tmdb_id = details.get("tmdbId") + media_type = media.get("mediaType") if isinstance(media, dict) else None + media_type = media_type or details.get("type") or row.get("media_type") + if not poster_path and tmdb_id and allow_artwork_hydrate: + hydrated_poster, hydrated_backdrop = await _hydrate_artwork_from_tmdb( + client, media_type, tmdb_id + ) + poster_path = poster_path or hydrated_poster + backdrop_path = backdrop_path or hydrated_backdrop + if (hydrated_poster or hydrated_backdrop) and isinstance(details, dict): + media = dict(media) if isinstance(media, dict) else {} + if hydrated_poster: + media["posterPath"] = hydrated_poster + if hydrated_backdrop: + media["backdropPath"] = hydrated_backdrop + details["media"] = media + payload = _parse_request_payload(details) + if isinstance(payload.get("request_id"), int): + upsert_request_cache( + request_id=payload.get("request_id"), + media_id=payload.get("media_id"), + media_type=payload.get("media_type"), + status=payload.get("status"), + title=payload.get("title"), + year=payload.get("year"), + requested_by=payload.get("requested_by"), + requested_by_norm=payload.get("requested_by_norm"), + created_at=payload.get("created_at"), + updated_at=payload.get("updated_at"), + payload_json=json.dumps(details, ensure_ascii=True), + ) + results.append( + { + "id": row.get("request_id"), + "title": title, + "year": year, + "type": row.get("media_type"), + "status": status, + "statusLabel": _status_label(status), + "mediaId": row.get("media_id"), + "artwork": { + "poster_url": _artwork_url(poster_path, "w185", cache_mode), + "backdrop_url": _artwork_url(backdrop_path, "w780", cache_mode), + }, + } + ) + + return {"results": results} + + +@router.get("/search") +async def search_requests( + query: str, page: int = 1, user: Dict[str, str] = Depends(get_current_user) +) -> dict: + runtime = get_runtime_settings() + client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key) + if not client.configured(): + raise HTTPException(status_code=400, detail="Jellyseerr not configured") + + try: + response = await client.search(query=query, page=page) + except httpx.HTTPStatusError as exc: + raise HTTPException(status_code=502, detail=str(exc)) from exc + + if not isinstance(response, dict): + return {"results": []} + + try: + await _ensure_requests_cache(client) + except httpx.HTTPStatusError: + pass + + results = [] + for item in response.get("results", []): + media_type = item.get("mediaType") + title = item.get("title") or item.get("name") + year = None + if item.get("releaseDate"): + year = int(item["releaseDate"][:4]) + if item.get("firstAirDate"): + year = int(item["firstAirDate"][:4]) + + request_id = None + status = None + status_label = None + media_info = item.get("mediaInfo") or {} + media_info_id = media_info.get("id") + requests = media_info.get("requests") + if isinstance(requests, list) and requests: + request_id = requests[0].get("id") + status = requests[0].get("status") + status_label = _status_label(status) + elif isinstance(media_info_id, int): + username_norm = _normalize_username(user.get("username", "")) + requested_by = None if user.get("role") == "admin" else username_norm + cached = get_cached_request_by_media_id(media_info_id, requested_by_norm=requested_by) + if cached: + request_id = cached.get("request_id") + status = cached.get("status") + status_label = _status_label(status) + + if user.get("role") != "admin": + if isinstance(request_id, int): + details = await _get_request_details(client, request_id) + if not _request_matches_user(details, user.get("username", "")): + continue + else: + continue + + results.append( + { + "title": title, + "year": year, + "type": media_type, + "tmdbId": item.get("id"), + "requestId": request_id, + "status": status, + "statusLabel": status_label, + } + ) + + return {"results": results} + + +@router.post("/{request_id}/ai/triage", response_model=TriageResult) +async def ai_triage(request_id: str, user: Dict[str, str] = Depends(get_current_user)) -> TriageResult: + runtime = get_runtime_settings() + client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key) + if client.configured(): + await _ensure_request_access(client, int(request_id), user) + snapshot = await build_snapshot(request_id) + return triage_snapshot(snapshot) + + +@router.post("/{request_id}/actions/search") +async def action_search(request_id: str, user: Dict[str, str] = Depends(get_current_user)) -> dict: + runtime = get_runtime_settings() + client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key) + if client.configured(): + await _ensure_request_access(client, int(request_id), user) + snapshot = await build_snapshot(request_id) + arr_item = snapshot.raw.get("arr", {}).get("item") + if not isinstance(arr_item, dict): + raise HTTPException(status_code=404, detail="Item not found in Sonarr/Radarr") + + prowlarr_results: List[Dict[str, Any]] = [] + prowlarr = ProwlarrClient(runtime.prowlarr_base_url, runtime.prowlarr_api_key) + if prowlarr.configured(): + query = snapshot.title + if snapshot.year: + query = f"{query} {snapshot.year}" + try: + results = await prowlarr.search(query=query) + prowlarr_results = _filter_prowlarr_results(results, snapshot.request_type) + except httpx.HTTPStatusError: + prowlarr_results = [] + + if snapshot.request_type.value == "tv": + client = SonarrClient(runtime.sonarr_base_url, runtime.sonarr_api_key) + if not client.configured(): + raise HTTPException(status_code=400, detail="Sonarr not configured") + episodes = await client.get_episodes(int(arr_item["id"])) + missing_by_season = _missing_episode_ids_by_season(episodes) + if not missing_by_season: + return { + "status": "ok", + "message": "No missing monitored episodes found", + "searched": [], + "releases": prowlarr_results, + } + responses = [] + for season_number in sorted(missing_by_season.keys()): + episode_ids = missing_by_season[season_number] + if episode_ids: + response = await client.search_episodes(episode_ids) + responses.append( + {"season": season_number, "episodeCount": len(episode_ids), "response": response} + ) + result = {"status": "ok", "searched": responses, "releases": prowlarr_results} + await asyncio.to_thread( + save_action, + request_id, + "search", + "Re-run search in Sonarr/Radarr", + "ok", + f"Found {len(prowlarr_results)} releases.", + ) + return result + elif snapshot.request_type.value == "movie": + client = RadarrClient(runtime.radarr_base_url, runtime.radarr_api_key) + if not client.configured(): + raise HTTPException(status_code=400, detail="Radarr not configured") + response = await client.search(int(arr_item["id"])) + result = {"status": "ok", "response": response, "releases": prowlarr_results} + await asyncio.to_thread( + save_action, + request_id, + "search", + "Re-run search in Sonarr/Radarr", + "ok", + f"Found {len(prowlarr_results)} releases.", + ) + return result + else: + raise HTTPException(status_code=400, detail="Unknown request type") + + +@router.post("/{request_id}/actions/qbit/resume") +async def action_resume(request_id: str, user: Dict[str, str] = Depends(get_current_user)) -> dict: + runtime = get_runtime_settings() + client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key) + if client.configured(): + await _ensure_request_access(client, int(request_id), user) + snapshot = await build_snapshot(request_id) + queue = snapshot.raw.get("arr", {}).get("queue") + download_ids = _download_ids(_queue_records(queue)) + if not download_ids: + message = "Nothing to force resume." + await asyncio.to_thread( + save_action, request_id, "resume_torrent", "Resume torrent", "ok", message + ) + return {"status": "ok", "message": message} + + runtime = get_runtime_settings() + client = QBittorrentClient( + runtime.qbittorrent_base_url, + runtime.qbittorrent_username, + runtime.qbittorrent_password, + ) + if not client.configured(): + raise HTTPException(status_code=400, detail="qBittorrent not configured") + + try: + torrents = await client.get_torrents_by_hashes("|".join(download_ids)) + torrent_list = torrents if isinstance(torrents, list) else [] + downloading_states = {"downloading", "stalleddl", "queueddl", "checkingdl", "forceddl"} + if torrent_list and all( + str(t.get("state", "")).lower() in downloading_states for t in torrent_list + ): + message = "No need to force resume. Already downloading." + await asyncio.to_thread( + save_action, request_id, "resume_torrent", "Resume torrent", "ok", message + ) + return {"status": "ok", "message": message} + await client.resume_torrents("|".join(download_ids)) + except httpx.HTTPStatusError as exc: + raise HTTPException(status_code=502, detail=str(exc)) from exc + message = "Resume sent to qBittorrent." + await asyncio.to_thread( + save_action, request_id, "resume_torrent", "Resume torrent", "ok", message + ) + return {"status": "ok", "resumed": download_ids, "message": message} + + +@router.post("/{request_id}/actions/readd") +async def action_readd(request_id: str, user: Dict[str, str] = Depends(get_current_user)) -> dict: + runtime = get_runtime_settings() + client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key) + if client.configured(): + await _ensure_request_access(client, int(request_id), user) + snapshot = await build_snapshot(request_id) + jelly = snapshot.raw.get("jellyseerr") or {} + media = jelly.get("media") or {} + + if snapshot.request_type.value == "tv": + tvdb_id = media.get("tvdbId") + if not tvdb_id: + raise HTTPException(status_code=400, detail="Missing tvdbId for series") + title = snapshot.title + if title in {None, "", "Unknown"}: + title = ( + media.get("name") + or media.get("title") + or jelly.get("title") + or jelly.get("name") + ) + if not runtime.sonarr_quality_profile_id or not runtime.sonarr_root_folder: + raise HTTPException(status_code=400, detail="Sonarr profile/root not configured") + client = SonarrClient(runtime.sonarr_base_url, runtime.sonarr_api_key) + if not client.configured(): + raise HTTPException(status_code=400, detail="Sonarr not configured") + try: + existing = await client.get_series_by_tvdb_id(int(tvdb_id)) + except httpx.HTTPStatusError as exc: + detail = _format_upstream_error("Sonarr", exc) + await asyncio.to_thread( + save_action, request_id, "readd_to_arr", "Re-add to Sonarr/Radarr", "failed", detail + ) + raise HTTPException(status_code=502, detail=detail) from exc + if isinstance(existing, list) and existing: + series_id = existing[0].get("id") + message = f"Already in Sonarr (seriesId {series_id})." + await asyncio.to_thread( + save_action, request_id, "readd_to_arr", "Re-add to Sonarr/Radarr", "ok", message + ) + return {"status": "ok", "message": message, "seriesId": series_id} + root_folder = await _resolve_root_folder_path(client, runtime.sonarr_root_folder, "Sonarr") + try: + response = await client.add_series( + int(tvdb_id), runtime.sonarr_quality_profile_id, root_folder, title=title + ) + except httpx.HTTPStatusError as exc: + detail = _format_upstream_error("Sonarr", exc) + await asyncio.to_thread( + save_action, request_id, "readd_to_arr", "Re-add to Sonarr/Radarr", "failed", detail + ) + raise HTTPException(status_code=502, detail=detail) from exc + await asyncio.to_thread( + save_action, + request_id, + "readd_to_arr", + "Re-add to Sonarr/Radarr", + "ok", + f"Re-added in Sonarr to {root_folder}.", + ) + return {"status": "ok", "response": response, "rootFolder": root_folder} + + if snapshot.request_type.value == "movie": + tmdb_id = media.get("tmdbId") + if not tmdb_id: + raise HTTPException(status_code=400, detail="Missing tmdbId for movie") + if not runtime.radarr_quality_profile_id or not runtime.radarr_root_folder: + raise HTTPException(status_code=400, detail="Radarr profile/root not configured") + client = RadarrClient(runtime.radarr_base_url, runtime.radarr_api_key) + if not client.configured(): + raise HTTPException(status_code=400, detail="Radarr not configured") + try: + existing = await client.get_movie_by_tmdb_id(int(tmdb_id)) + except httpx.HTTPStatusError as exc: + detail = _format_upstream_error("Radarr", exc) + await asyncio.to_thread( + save_action, request_id, "readd_to_arr", "Re-add to Sonarr/Radarr", "failed", detail + ) + raise HTTPException(status_code=502, detail=detail) from exc + if isinstance(existing, list) and existing: + movie_id = existing[0].get("id") + message = f"Already in Radarr (movieId {movie_id})." + await asyncio.to_thread( + save_action, request_id, "readd_to_arr", "Re-add to Sonarr/Radarr", "ok", message + ) + return {"status": "ok", "message": message, "movieId": movie_id} + root_folder = await _resolve_root_folder_path(client, runtime.radarr_root_folder, "Radarr") + try: + response = await client.add_movie( + int(tmdb_id), runtime.radarr_quality_profile_id, root_folder + ) + except httpx.HTTPStatusError as exc: + detail = _format_upstream_error("Radarr", exc) + await asyncio.to_thread( + save_action, request_id, "readd_to_arr", "Re-add to Sonarr/Radarr", "failed", detail + ) + raise HTTPException(status_code=502, detail=detail) from exc + await asyncio.to_thread( + save_action, + request_id, + "readd_to_arr", + "Re-add to Sonarr/Radarr", + "ok", + f"Re-added in Radarr to {root_folder}.", + ) + return {"status": "ok", "response": response, "rootFolder": root_folder} + + raise HTTPException(status_code=400, detail="Unknown request type") + + +@router.get("/{request_id}/history") +async def request_history( + request_id: str, limit: int = 10, user: Dict[str, str] = Depends(get_current_user) +) -> dict: + runtime = get_runtime_settings() + client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key) + if client.configured(): + await _ensure_request_access(client, int(request_id), user) + snapshots = await asyncio.to_thread(get_recent_snapshots, request_id, limit) + return {"snapshots": snapshots} + + +@router.get("/{request_id}/actions") +async def request_actions( + request_id: str, limit: int = 10, user: Dict[str, str] = Depends(get_current_user) +) -> dict: + runtime = get_runtime_settings() + client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key) + if client.configured(): + await _ensure_request_access(client, int(request_id), user) + actions = await asyncio.to_thread(get_recent_actions, request_id, limit) + return {"actions": actions} + + +@router.post("/{request_id}/actions/grab") +async def action_grab( + request_id: str, payload: Dict[str, Any], user: Dict[str, str] = Depends(get_current_user) +) -> dict: + runtime = get_runtime_settings() + client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key) + if client.configured(): + await _ensure_request_access(client, int(request_id), user) + snapshot = await build_snapshot(request_id) + guid = payload.get("guid") + indexer_id = payload.get("indexerId") + if not guid or not indexer_id: + raise HTTPException(status_code=400, detail="Missing guid or indexerId") + + runtime = get_runtime_settings() + if snapshot.request_type.value == "tv": + client = SonarrClient(runtime.sonarr_base_url, runtime.sonarr_api_key) + if not client.configured(): + raise HTTPException(status_code=400, detail="Sonarr not configured") + try: + response = await client.grab_release(str(guid), int(indexer_id)) + except httpx.HTTPStatusError as exc: + raise HTTPException(status_code=502, detail=str(exc)) from exc + await asyncio.to_thread( + save_action, request_id, "grab", "Grab release", "ok", "Grab sent to Sonarr." + ) + return {"status": "ok", "response": response} + if snapshot.request_type.value == "movie": + client = RadarrClient(runtime.radarr_base_url, runtime.radarr_api_key) + if not client.configured(): + raise HTTPException(status_code=400, detail="Radarr not configured") + try: + response = await client.grab_release(str(guid), int(indexer_id)) + except httpx.HTTPStatusError as exc: + raise HTTPException(status_code=502, detail=str(exc)) from exc + await asyncio.to_thread( + save_action, request_id, "grab", "Grab release", "ok", "Grab sent to Radarr." + ) + return {"status": "ok", "response": response} + + raise HTTPException(status_code=400, detail="Unknown request type") diff --git a/backend/app/routers/status.py b/backend/app/routers/status.py new file mode 100644 index 0000000..692dde1 --- /dev/null +++ b/backend/app/routers/status.py @@ -0,0 +1,95 @@ +from typing import Any, Dict +import httpx +from fastapi import APIRouter, Depends + +from ..auth import get_current_user +from ..runtime import get_runtime_settings +from ..clients.jellyseerr import JellyseerrClient +from ..clients.sonarr import SonarrClient +from ..clients.radarr import RadarrClient +from ..clients.prowlarr import ProwlarrClient +from ..clients.qbittorrent import QBittorrentClient +from ..clients.jellyfin import JellyfinClient + +router = APIRouter(prefix="/status", tags=["status"], dependencies=[Depends(get_current_user)]) + + +async def _check(name: str, configured: bool, func) -> Dict[str, Any]: + if not configured: + return {"name": name, "status": "not_configured"} + try: + result = await func() + return {"name": name, "status": "up", "detail": result} + except httpx.HTTPError as exc: + return {"name": name, "status": "down", "message": str(exc)} + except Exception as exc: + return {"name": name, "status": "down", "message": str(exc)} + + +@router.get("/services") +async def services_status() -> Dict[str, Any]: + runtime = get_runtime_settings() + jellyseerr = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key) + sonarr = SonarrClient(runtime.sonarr_base_url, runtime.sonarr_api_key) + radarr = RadarrClient(runtime.radarr_base_url, runtime.radarr_api_key) + prowlarr = ProwlarrClient(runtime.prowlarr_base_url, runtime.prowlarr_api_key) + qbittorrent = QBittorrentClient( + runtime.qbittorrent_base_url, runtime.qbittorrent_username, runtime.qbittorrent_password + ) + jellyfin = JellyfinClient(runtime.jellyfin_base_url, runtime.jellyfin_api_key) + + services = [] + services.append( + await _check( + "Jellyseerr", + jellyseerr.configured(), + lambda: jellyseerr.get_recent_requests(take=1, skip=0), + ) + ) + services.append( + await _check( + "Sonarr", + sonarr.configured(), + sonarr.get_system_status, + ) + ) + services.append( + await _check( + "Radarr", + radarr.configured(), + radarr.get_system_status, + ) + ) + prowlarr_status = await _check( + "Prowlarr", + prowlarr.configured(), + prowlarr.get_health, + ) + if prowlarr_status.get("status") == "up": + health = prowlarr_status.get("detail") + if isinstance(health, list) and health: + prowlarr_status["status"] = "degraded" + prowlarr_status["message"] = "Health warnings" + services.append(prowlarr_status) + services.append( + await _check( + "qBittorrent", + qbittorrent.configured(), + qbittorrent.get_app_version, + ) + ) + services.append( + await _check( + "Jellyfin", + jellyfin.configured(), + jellyfin.get_system_info, + ) + ) + + overall = "up" + if any(s.get("status") == "down" for s in services): + overall = "down" + elif any(s.get("status") in {"degraded", "not_configured"} for s in services): + overall = "degraded" + + return {"overall": overall, "services": services} diff --git a/backend/app/runtime.py b/backend/app/runtime.py new file mode 100644 index 0000000..1ad0fc4 --- /dev/null +++ b/backend/app/runtime.py @@ -0,0 +1,36 @@ +from .config import settings +from .db import get_settings_overrides + +_INT_FIELDS = { + "sonarr_quality_profile_id", + "radarr_quality_profile_id", + "jwt_exp_minutes", + "requests_sync_ttl_minutes", + "requests_poll_interval_seconds", + "requests_delta_sync_interval_minutes", + "requests_cleanup_days", +} +_BOOL_FIELDS = { + "jellyfin_sync_to_arr", +} + + +def get_runtime_settings(): + overrides = get_settings_overrides() + update = {} + for key, value in overrides.items(): + if value is None: + continue + if key in _INT_FIELDS: + try: + update[key] = int(value) + except (TypeError, ValueError): + continue + elif key in _BOOL_FIELDS: + if isinstance(value, bool): + update[key] = value + else: + update[key] = str(value).strip().lower() in {"1", "true", "yes", "on"} + else: + update[key] = value + return settings.model_copy(update=update) diff --git a/backend/app/security.py b/backend/app/security.py new file mode 100644 index 0000000..5632c8b --- /dev/null +++ b/backend/app/security.py @@ -0,0 +1,40 @@ +from datetime import datetime, timedelta, timezone +from typing import Any, Dict, Optional + +from jose import JWTError, jwt +from passlib.context import CryptContext + +from .config import settings + +_pwd_context = CryptContext(schemes=["pbkdf2_sha256"], deprecated="auto") +_ALGORITHM = "HS256" + + +def hash_password(password: str) -> str: + return _pwd_context.hash(password) + + +def verify_password(plain_password: str, hashed_password: str) -> bool: + return _pwd_context.verify(plain_password, hashed_password) + + +def create_access_token(subject: str, role: str, expires_minutes: Optional[int] = None) -> str: + minutes = expires_minutes or settings.jwt_exp_minutes + expires = datetime.now(timezone.utc) + timedelta(minutes=minutes) + payload: Dict[str, Any] = {"sub": subject, "role": role, "exp": expires} + return jwt.encode(payload, settings.jwt_secret, algorithm=_ALGORITHM) + + +def decode_token(token: str) -> Dict[str, Any]: + return jwt.decode(token, settings.jwt_secret, algorithms=[_ALGORITHM]) + + +class TokenError(Exception): + pass + + +def safe_decode_token(token: str) -> Dict[str, Any]: + try: + return decode_token(token) + except JWTError as exc: + raise TokenError("Invalid token") from exc diff --git a/backend/app/services/__pycache__/snapshot.cpython-312.pyc b/backend/app/services/__pycache__/snapshot.cpython-312.pyc new file mode 100644 index 0000000..2679f02 Binary files /dev/null and b/backend/app/services/__pycache__/snapshot.cpython-312.pyc differ diff --git a/backend/app/services/jellyfin_sync.py b/backend/app/services/jellyfin_sync.py new file mode 100644 index 0000000..3cd2ea5 --- /dev/null +++ b/backend/app/services/jellyfin_sync.py @@ -0,0 +1,58 @@ +import logging + +from fastapi import HTTPException + +from ..clients.jellyfin import JellyfinClient +from ..db import create_user_if_missing +from ..runtime import get_runtime_settings + +logger = logging.getLogger(__name__) + + +async def sync_jellyfin_users() -> int: + runtime = get_runtime_settings() + client = JellyfinClient(runtime.jellyfin_base_url, runtime.jellyfin_api_key) + if not client.configured(): + raise HTTPException(status_code=400, detail="Jellyfin not configured") + users = await client.get_users() + if not isinstance(users, list): + return 0 + imported = 0 + for user in users: + if not isinstance(user, dict): + continue + name = user.get("Name") + if not name: + continue + if create_user_if_missing(name, "jellyfin-user", role="user", auth_provider="jellyfin"): + imported += 1 + return imported + + +async def run_daily_jellyfin_sync() -> None: + while True: + delay = _seconds_until_midnight() + await _sleep_seconds(delay) + try: + imported = await sync_jellyfin_users() + logger.info("Jellyfin daily sync complete: imported=%s", imported) + except HTTPException as exc: + logger.warning("Jellyfin daily sync skipped: %s", exc.detail) + except Exception: + logger.exception("Jellyfin daily sync failed") + + +def _seconds_until_midnight() -> float: + from datetime import datetime, timedelta + + now = datetime.now() + next_midnight = (now + timedelta(days=1)).replace( + hour=0, minute=0, second=0, microsecond=0 + ) + return max((next_midnight - now).total_seconds(), 0.0) + + +async def _sleep_seconds(delay: float) -> None: + import asyncio + + await asyncio.sleep(delay) diff --git a/backend/app/services/snapshot.py b/backend/app/services/snapshot.py new file mode 100644 index 0000000..50f9ae2 --- /dev/null +++ b/backend/app/services/snapshot.py @@ -0,0 +1,596 @@ +from typing import Any, Dict, List, Optional +import asyncio +import logging +from datetime import datetime, timezone +from urllib.parse import quote + +from ..clients.jellyseerr import JellyseerrClient +from ..clients.jellyfin import JellyfinClient +from ..clients.sonarr import SonarrClient +from ..clients.radarr import RadarrClient +from ..clients.prowlarr import ProwlarrClient +from ..clients.qbittorrent import QBittorrentClient +from ..runtime import get_runtime_settings +from ..db import save_snapshot, get_request_cache_payload +from ..models import ActionOption, NormalizedState, RequestType, Snapshot, TimelineHop + + +STATUS_LABELS = { + 1: "Waiting for approval", + 2: "Approved", + 3: "Declined", + 4: "Ready to watch", + 5: "Working on it", + 6: "Partially ready", +} + + +def _status_label(value: Any) -> str: + try: + numeric = int(value) + return STATUS_LABELS.get(numeric, f"Status {numeric}") + except (TypeError, ValueError): + return "Unknown" + + +def _pick_first(value: Any) -> Optional[Dict[str, Any]]: + if isinstance(value, list): + return value[0] if value else None + if isinstance(value, dict): + return value + return None + + +def _queue_records(queue: Any) -> List[Dict[str, Any]]: + if isinstance(queue, dict): + records = queue.get("records") + if isinstance(records, list): + return records + if isinstance(queue, list): + return queue + return [] + + +def _filter_queue(queue: Any, item_id: Optional[int], request_type: RequestType) -> Any: + if not item_id: + return queue + records = _queue_records(queue) + if not records: + return queue + key = "seriesId" if request_type == RequestType.tv else "movieId" + filtered = [record for record in records if record.get(key) == item_id] + if isinstance(queue, dict): + filtered_queue = dict(queue) + filtered_queue["records"] = filtered + filtered_queue["totalRecords"] = len(filtered) + return filtered_queue + return filtered + + +def _download_ids(records: List[Dict[str, Any]]) -> List[str]: + ids = [] + for record in records: + download_id = record.get("downloadId") or record.get("download_id") + if isinstance(download_id, str) and download_id: + ids.append(download_id) + return ids + + +def _missing_episode_numbers_by_season(episodes: Any) -> Dict[int, List[int]]: + if not isinstance(episodes, list): + return {} + grouped: Dict[int, List[int]] = {} + now = datetime.now(timezone.utc) + for episode in episodes: + if not isinstance(episode, dict): + continue + if not episode.get("monitored", True): + continue + if episode.get("hasFile"): + continue + air_date = episode.get("airDateUtc") + if isinstance(air_date, str): + try: + aired_at = datetime.fromisoformat(air_date.replace("Z", "+00:00")) + except ValueError: + aired_at = None + if aired_at and aired_at > now: + continue + season_number = episode.get("seasonNumber") + episode_number = episode.get("episodeNumber") + if not isinstance(episode_number, int): + episode_number = episode.get("absoluteEpisodeNumber") + if isinstance(season_number, int) and isinstance(episode_number, int): + grouped.setdefault(season_number, []).append(episode_number) + for season_number in list(grouped.keys()): + grouped[season_number] = sorted(set(grouped[season_number])) + return grouped + + +def _summarize_qbit(torrents: List[Dict[str, Any]]) -> Dict[str, Any]: + if not torrents: + return {"state": "idle", "message": "0 active downloads."} + + downloading_states = {"downloading", "stalleddl", "queueddl", "checkingdl", "forceddl"} + paused_states = {"pauseddl", "pausedup"} + completed_states = {"uploading", "stalledup", "queuedup", "checkingup", "forcedup", "stoppedup"} + + downloading = [t for t in torrents if str(t.get("state", "")).lower() in downloading_states] + paused = [t for t in torrents if str(t.get("state", "")).lower() in paused_states] + completed = [t for t in torrents if str(t.get("state", "")).lower() in completed_states] + + if downloading: + return { + "state": "downloading", + "message": f"Downloading ({len(downloading)} active).", + } + if paused: + return { + "state": "paused", + "message": f"Paused ({len(paused)} paused).", + } + if completed: + return { + "state": "completed", + "message": f"Completed/seeding ({len(completed)} seeding).", + } + + return { + "state": "idle", + "message": "0 active downloads.", + } + + +def _artwork_url(path: Optional[str], size: str, cache_mode: str) -> Optional[str]: + if not path: + return None + if not path.startswith("/"): + path = f"/{path}" + if cache_mode == "cache": + return f"/images/tmdb?path={quote(path)}&size={size}" + return f"https://image.tmdb.org/t/p/{size}{path}" + + +async def build_snapshot(request_id: str) -> Snapshot: + timeline = [] + runtime = get_runtime_settings() + + jellyseerr = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key) + jellyfin = JellyfinClient(runtime.jellyfin_base_url, runtime.jellyfin_api_key) + sonarr = SonarrClient(runtime.sonarr_base_url, runtime.sonarr_api_key) + radarr = RadarrClient(runtime.radarr_base_url, runtime.radarr_api_key) + prowlarr = ProwlarrClient(runtime.prowlarr_base_url, runtime.prowlarr_api_key) + qbittorrent = QBittorrentClient( + runtime.qbittorrent_base_url, + runtime.qbittorrent_username, + runtime.qbittorrent_password, + ) + + snapshot = Snapshot( + request_id=request_id, + title="Unknown", + state=NormalizedState.unknown, + state_reason="Awaiting configuration", + ) + + cached_request = None + mode = (runtime.requests_data_source or "prefer_cache").lower() + if mode != "always_js" and request_id.isdigit(): + cached_request = get_request_cache_payload(int(request_id)) + if cached_request is not None: + logging.getLogger(__name__).debug( + "snapshot cache hit: request_id=%s mode=%s", request_id, mode + ) + else: + logging.getLogger(__name__).debug( + "snapshot cache miss: request_id=%s mode=%s", request_id, mode + ) + + if not jellyseerr.configured() and not cached_request: + timeline.append(TimelineHop(service="Jellyseerr", status="not_configured")) + timeline.append(TimelineHop(service="Sonarr/Radarr", status="not_configured")) + timeline.append(TimelineHop(service="Prowlarr", status="not_configured")) + timeline.append(TimelineHop(service="qBittorrent", status="not_configured")) + snapshot.timeline = timeline + return snapshot + + jelly_request = cached_request + if (jelly_request is None or mode == "always_js") and jellyseerr.configured(): + try: + jelly_request = await jellyseerr.get_request(request_id) + logging.getLogger(__name__).debug( + "snapshot jellyseerr fetch: request_id=%s mode=%s", request_id, mode + ) + except Exception as exc: + timeline.append(TimelineHop(service="Jellyseerr", status="error", details={"error": str(exc)})) + snapshot.timeline = timeline + snapshot.state = NormalizedState.failed + snapshot.state_reason = "Failed to reach Jellyseerr" + return snapshot + + if not jelly_request: + timeline.append(TimelineHop(service="Jellyseerr", status="not_found")) + snapshot.timeline = timeline + snapshot.state = NormalizedState.unknown + snapshot.state_reason = "Request not found in Jellyseerr" + return snapshot + + jelly_status = jelly_request.get("status", "unknown") + jelly_status_label = _status_label(jelly_status) + jelly_type = jelly_request.get("type") or "unknown" + snapshot.title = jelly_request.get("media", {}).get("title", "Unknown") + snapshot.year = jelly_request.get("media", {}).get("year") + snapshot.request_type = RequestType(jelly_type) if jelly_type in {"movie", "tv"} else RequestType.unknown + media = jelly_request.get("media", {}) if isinstance(jelly_request, dict) else {} + poster_path = None + backdrop_path = None + if isinstance(media, dict): + poster_path = media.get("posterPath") or media.get("poster_path") + backdrop_path = media.get("backdropPath") or media.get("backdrop_path") + + if snapshot.title in {None, "", "Unknown"} and jellyseerr.configured(): + tmdb_id = jelly_request.get("media", {}).get("tmdbId") + if tmdb_id: + try: + if snapshot.request_type == RequestType.movie: + details = await jellyseerr.get_movie(int(tmdb_id)) + if isinstance(details, dict): + snapshot.title = details.get("title") or snapshot.title + release_date = details.get("releaseDate") + snapshot.year = int(release_date[:4]) if release_date else snapshot.year + poster_path = poster_path or details.get("posterPath") or details.get("poster_path") + backdrop_path = ( + backdrop_path + or details.get("backdropPath") + or details.get("backdrop_path") + ) + elif snapshot.request_type == RequestType.tv: + details = await jellyseerr.get_tv(int(tmdb_id)) + if isinstance(details, dict): + snapshot.title = details.get("name") or details.get("title") or snapshot.title + first_air = details.get("firstAirDate") + snapshot.year = int(first_air[:4]) if first_air else snapshot.year + poster_path = poster_path or details.get("posterPath") or details.get("poster_path") + backdrop_path = ( + backdrop_path + or details.get("backdropPath") + or details.get("backdrop_path") + ) + except Exception: + pass + + cache_mode = (runtime.artwork_cache_mode or "remote").lower() + snapshot.artwork = { + "poster_path": poster_path, + "backdrop_path": backdrop_path, + "poster_url": _artwork_url(poster_path, "w342", cache_mode), + "backdrop_url": _artwork_url(backdrop_path, "w780", cache_mode), + } + + timeline.append( + TimelineHop( + service="Jellyseerr", + status=jelly_status_label, + details={ + "requestedBy": jelly_request.get("requestedBy", {}).get("displayName") + or jelly_request.get("requestedBy", {}).get("username") + or jelly_request.get("requestedBy", {}).get("jellyfinUsername") + or jelly_request.get("requestedBy", {}).get("email"), + "createdAt": jelly_request.get("createdAt"), + "updatedAt": jelly_request.get("updatedAt"), + "approved": jelly_request.get("isApproved"), + "statusCode": jelly_status, + }, + ) + ) + + arr_state = None + arr_details: Dict[str, Any] = {} + arr_item = None + arr_queue = None + media_status = jelly_request.get("media", {}).get("status") + try: + media_status_code = int(media_status) if media_status is not None else None + except (TypeError, ValueError): + media_status_code = None + if snapshot.request_type == RequestType.tv: + tvdb_id = jelly_request.get("media", {}).get("tvdbId") + if tvdb_id: + try: + series = await sonarr.get_series_by_tvdb_id(int(tvdb_id)) + arr_item = _pick_first(series) + arr_details["series"] = arr_item + arr_state = "added" if arr_item else "missing" + if arr_item: + stats = arr_item.get("statistics") if isinstance(arr_item, dict) else None + if isinstance(stats, dict): + file_count = stats.get("episodeFileCount") + total_count = ( + stats.get("totalEpisodeCount") + if isinstance(stats.get("totalEpisodeCount"), int) + else stats.get("episodeCount") + ) + if ( + isinstance(file_count, int) + and isinstance(total_count, int) + and total_count > 0 + and file_count >= total_count + ): + arr_state = "available" + if arr_item and isinstance(arr_item.get("id"), int): + series_id = int(arr_item["id"]) + arr_queue = await sonarr.get_queue(series_id) + arr_queue = _filter_queue(arr_queue, series_id, RequestType.tv) + arr_details["queue"] = arr_queue + episodes = await sonarr.get_episodes(series_id) + missing_by_season = _missing_episode_numbers_by_season(episodes) + if missing_by_season: + arr_details["missingEpisodes"] = missing_by_season + except Exception as exc: + arr_state = "error" + arr_details["error"] = str(exc) + elif snapshot.request_type == RequestType.movie: + tmdb_id = jelly_request.get("media", {}).get("tmdbId") + if tmdb_id: + try: + movie = await radarr.get_movie_by_tmdb_id(int(tmdb_id)) + arr_item = _pick_first(movie) + if not arr_item: + title_hint = ( + jelly_request.get("media", {}).get("title") + or jelly_request.get("title") + or snapshot.title + ) + year_hint = ( + jelly_request.get("media", {}).get("year") + or jelly_request.get("year") + or snapshot.year + ) + try: + all_movies = await radarr.get_movies() + except Exception: + all_movies = None + if isinstance(all_movies, list): + for candidate in all_movies: + if not isinstance(candidate, dict): + continue + if tmdb_id and candidate.get("tmdbId") == int(tmdb_id): + arr_item = candidate + break + if title_hint and candidate.get("title") == title_hint: + if not year_hint or candidate.get("year") == year_hint: + arr_item = candidate + break + arr_details["movie"] = arr_item + if arr_item: + if arr_item.get("hasFile"): + arr_state = "available" + elif arr_item.get("isAvailable"): + arr_state = "searching" + else: + arr_state = "added" + else: + arr_state = "missing" + if arr_item and isinstance(arr_item.get("id"), int): + arr_queue = await radarr.get_queue(int(arr_item["id"])) + arr_queue = _filter_queue(arr_queue, int(arr_item["id"]), RequestType.movie) + arr_details["queue"] = arr_queue + except Exception as exc: + arr_state = "error" + arr_details["error"] = str(exc) + + if arr_state is None: + arr_state = "unknown" + if arr_state == "missing" and media_status_code in {4}: + arr_state = "available" + elif arr_state == "missing" and media_status_code in {6}: + arr_state = "added" + + timeline.append(TimelineHop(service="Sonarr/Radarr", status=arr_state, details=arr_details)) + + try: + prowlarr_health = await prowlarr.get_health() + if isinstance(prowlarr_health, list) and len(prowlarr_health) > 0: + timeline.append(TimelineHop(service="Prowlarr", status="issues", details={"health": prowlarr_health})) + else: + timeline.append(TimelineHop(service="Prowlarr", status="ok")) + except Exception as exc: + timeline.append(TimelineHop(service="Prowlarr", status="error", details={"error": str(exc)})) + + jellyfin_available = False + jellyfin_item = None + if jellyfin.configured() and snapshot.title: + types = ["Movie"] if snapshot.request_type == RequestType.movie else ["Series"] + try: + search = await jellyfin.search_items(snapshot.title, types) + except Exception: + search = None + if isinstance(search, dict): + items = search.get("Items") or search.get("items") or [] + for item in items: + if not isinstance(item, dict): + continue + name = item.get("Name") or item.get("title") + year = item.get("ProductionYear") or item.get("Year") + if name and name.strip().lower() == (snapshot.title or "").strip().lower(): + if snapshot.year and year and int(year) != int(snapshot.year): + continue + jellyfin_available = True + jellyfin_item = item + break + + if jellyfin_available and arr_state == "missing" and runtime.jellyfin_sync_to_arr: + arr_details["note"] = "Found in Jellyfin but not tracked in Sonarr/Radarr." + if snapshot.request_type == RequestType.movie: + if runtime.radarr_quality_profile_id and runtime.radarr_root_folder: + radarr_client = RadarrClient(runtime.radarr_base_url, runtime.radarr_api_key) + if radarr_client.configured(): + root_folder = await _resolve_root_folder_path( + radarr_client, runtime.radarr_root_folder, "Radarr" + ) + tmdb_id = jelly_request.get("media", {}).get("tmdbId") + if tmdb_id: + try: + await radarr_client.add_movie( + int(tmdb_id), + runtime.radarr_quality_profile_id, + root_folder, + monitored=False, + search_for_movie=False, + ) + except Exception: + pass + if snapshot.request_type == RequestType.tv: + if runtime.sonarr_quality_profile_id and runtime.sonarr_root_folder: + sonarr_client = SonarrClient(runtime.sonarr_base_url, runtime.sonarr_api_key) + if sonarr_client.configured(): + root_folder = await _resolve_root_folder_path( + sonarr_client, runtime.sonarr_root_folder, "Sonarr" + ) + tvdb_id = jelly_request.get("media", {}).get("tvdbId") + if tvdb_id: + try: + await sonarr_client.add_series( + int(tvdb_id), + runtime.sonarr_quality_profile_id, + root_folder, + monitored=False, + search_missing=False, + ) + except Exception: + pass + + qbit_state = None + qbit_message = None + try: + download_ids = _download_ids(_queue_records(arr_queue)) + torrent_list: List[Dict[str, Any]] = [] + if download_ids and qbittorrent.configured(): + torrents = await qbittorrent.get_torrents_by_hashes("|".join(download_ids)) + torrent_list = torrents if isinstance(torrents, list) else [] + summary = _summarize_qbit(torrent_list) + qbit_state = summary.get("state") + qbit_message = summary.get("message") + timeline.append( + TimelineHop( + service="qBittorrent", + status=summary["state"], + details={ + "summary": summary["message"], + "torrents": torrent_list, + }, + ) + ) + except Exception as exc: + timeline.append(TimelineHop(service="qBittorrent", status="error", details={"error": str(exc)})) + + status_code = None + try: + status_code = int(jelly_status) + except (TypeError, ValueError): + status_code = None + + derived_approved = bool(jelly_request.get("isApproved")) or status_code in {2, 4, 5, 6} + + if derived_approved: + snapshot.state = NormalizedState.approved + snapshot.state_reason = "Approved and queued for processing." + else: + snapshot.state = NormalizedState.requested + snapshot.state_reason = "Waiting for approval before we can search." + + queue_records = _queue_records(arr_queue) + if qbit_state in {"downloading", "paused"}: + snapshot.state = NormalizedState.downloading + snapshot.state_reason = "Downloading in qBittorrent." + if qbit_message: + snapshot.state_reason = qbit_message + elif qbit_state == "completed": + if arr_state == "available": + snapshot.state = NormalizedState.completed + snapshot.state_reason = "In your library and ready to watch." + else: + snapshot.state = NormalizedState.importing + snapshot.state_reason = "Download finished. Waiting for library import." + elif queue_records: + if arr_state == "missing": + snapshot.state_reason = "Queue shows a download, but qBittorrent has no active torrent." + else: + snapshot.state_reason = "Waiting for download to start in qBittorrent." + elif arr_state == "missing" and derived_approved: + snapshot.state = NormalizedState.needs_add + snapshot.state_reason = "Approved, but not added to the library yet." + elif arr_state == "searching": + snapshot.state = NormalizedState.searching + snapshot.state_reason = "Searching for a matching release." + elif arr_state == "available": + snapshot.state = NormalizedState.completed + snapshot.state_reason = "In your library and ready to watch." + elif arr_state == "added" and snapshot.state == NormalizedState.approved: + snapshot.state = NormalizedState.added_to_arr + snapshot.state_reason = "Item is present in Sonarr/Radarr" + + if jellyfin_available and snapshot.state not in { + NormalizedState.downloading, + NormalizedState.importing, + }: + snapshot.state = NormalizedState.completed + snapshot.state_reason = "Ready to watch in Jellyfin." + + snapshot.timeline = timeline + actions: List[ActionOption] = [] + if arr_state == "missing": + actions.append( + ActionOption( + id="readd_to_arr", + label="Add to the library queue (Sonarr/Radarr)", + risk="medium", + ) + ) + elif arr_item and arr_state != "available": + actions.append( + ActionOption( + id="search", + label="Search again for releases", + risk="low", + ) + ) + + download_ids = _download_ids(_queue_records(arr_queue)) + if download_ids and qbittorrent.configured(): + actions.append( + ActionOption( + id="resume_torrent", + label="Resume the download", + risk="low", + ) + ) + + snapshot.actions = actions + jellyfin_link = None + if runtime.jellyfin_public_url and snapshot.state in { + NormalizedState.available, + NormalizedState.completed, + }: + base_url = runtime.jellyfin_public_url.rstrip("/") + query = quote(snapshot.title or "") + jellyfin_link = f"{base_url}/web/index.html#!/search?query={query}" + snapshot.raw = { + "jellyseerr": jelly_request, + "arr": { + "item": arr_item, + "queue": arr_queue, + }, + "jellyfin": { + "publicUrl": runtime.jellyfin_public_url, + "available": snapshot.state in { + NormalizedState.available, + NormalizedState.completed, + }, + "link": jellyfin_link, + "item": jellyfin_item, + }, + } + + await asyncio.to_thread(save_snapshot, snapshot) + return snapshot diff --git a/backend/requirements.txt b/backend/requirements.txt new file mode 100644 index 0000000..ea7c5c7 --- /dev/null +++ b/backend/requirements.txt @@ -0,0 +1,9 @@ +fastapi==0.115.0 +uvicorn==0.30.6 +httpx==0.27.2 +pydantic==2.9.2 +pydantic-settings==2.5.2 +python-jose[cryptography]==3.3.0 +passlib==1.7.4 +python-multipart==0.0.9 +Pillow==10.4.0 diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..aeb4a12 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,23 @@ +services: + backend: + build: + context: ./backend + dockerfile: Dockerfile + env_file: + - ./.env + ports: + - "8000:8000" + volumes: + - ./data:/app/data + + frontend: + build: + context: ./frontend + dockerfile: Dockerfile + environment: + - NEXT_PUBLIC_API_BASE=/api + - BACKEND_INTERNAL_URL=http://backend:8000 + ports: + - "3000:3000" + depends_on: + - backend diff --git a/frontend/.dockerignore b/frontend/.dockerignore new file mode 100644 index 0000000..b2e279a --- /dev/null +++ b/frontend/.dockerignore @@ -0,0 +1,3 @@ +node_modules/ +.next/ +.env diff --git a/frontend/Dockerfile b/frontend/Dockerfile new file mode 100644 index 0000000..899f83d --- /dev/null +++ b/frontend/Dockerfile @@ -0,0 +1,31 @@ +FROM node:20-alpine AS builder + +WORKDIR /app + +ENV NEXT_TELEMETRY_DISABLED=1 + +COPY package.json ./ +RUN npm install + +COPY app ./app +COPY next-env.d.ts ./next-env.d.ts +COPY next.config.js ./next.config.js +COPY tsconfig.json ./tsconfig.json + +RUN npm run build + +FROM node:20-alpine + +WORKDIR /app + +ENV NEXT_TELEMETRY_DISABLED=1 \ + NODE_ENV=production + +COPY --from=builder /app/.next ./.next +COPY --from=builder /app/node_modules ./node_modules +COPY --from=builder /app/package.json ./package.json +COPY --from=builder /app/next.config.js ./next.config.js + +EXPOSE 3000 + +CMD ["npm", "run", "start"] diff --git a/frontend/app/admin/SettingsPage.tsx b/frontend/app/admin/SettingsPage.tsx new file mode 100644 index 0000000..2db337d --- /dev/null +++ b/frontend/app/admin/SettingsPage.tsx @@ -0,0 +1,1265 @@ +'use client' + +import { useEffect, useMemo, useState } from 'react' +import { useRouter } from 'next/navigation' +import { authFetch, clearToken, getApiBase, getToken } from '../lib/auth' +import AdminShell from '../ui/AdminShell' + +type AdminSetting = { + key: string + value: string | null + isSet: boolean + source: string + sensitive: boolean +} + +type ServiceOptions = { + rootFolders: { id: number; path: string; label: string }[] + qualityProfiles: { id: number; name: string; label: string }[] +} + +const SECTION_LABELS: Record = { + jellyseerr: 'Jellyseerr', + jellyfin: 'Jellyfin', + artwork: 'Artwork', + cache: 'Cache', + sonarr: 'Sonarr', + radarr: 'Radarr', + prowlarr: 'Prowlarr', + qbittorrent: 'qBittorrent', + log: 'Activity log', + requests: 'Request syncing', +} + +const BOOL_SETTINGS = new Set(['jellyfin_sync_to_arr']) + +const SECTION_DESCRIPTIONS: Record = { + jellyseerr: 'Connect the request system where users submit content.', + jellyfin: 'Control Jellyfin login and availability checks.', + artwork: 'Configure how posters and artwork are loaded.', + cache: 'Manage saved request data and offline artwork.', + sonarr: 'TV automation settings.', + radarr: 'Movie automation settings.', + prowlarr: 'Indexer search settings.', + qbittorrent: 'Downloader connection settings.', + requests: 'Sync and refresh cadence for requests.', + log: 'Activity log for troubleshooting.', +} + +const SETTINGS_SECTION_MAP: Record = { + jellyseerr: 'jellyseerr', + jellyfin: 'jellyfin', + artwork: 'artwork', + sonarr: 'sonarr', + radarr: 'radarr', + prowlarr: 'prowlarr', + qbittorrent: 'qbittorrent', + requests: 'requests', + cache: null, + logs: 'log', + maintenance: null, +} + +const labelFromKey = (key: string) => + key + .replaceAll('_', ' ') + .replace('base url', 'URL') + .replace('api key', 'API key') + .replace('quality profile id', 'Quality profile ID') + .replace('root folder', 'Root folder') + .replace('qbittorrent', 'qBittorrent') + .replace('requests sync ttl minutes', 'Refresh saved requests if older than (minutes)') + .replace('requests poll interval seconds', 'Background refresh check (seconds)') + .replace('requests delta sync interval minutes', 'Check for new or updated requests every (minutes)') + .replace('requests full sync time', 'Full refresh time (24h)') + .replace('requests cleanup time', 'Clean up old history time (24h)') + .replace('requests cleanup days', 'Remove history older than (days)') + .replace('requests data source', 'Where requests are loaded from') + .replace('jellyfin public url', 'Jellyfin public URL') + .replace('jellyfin sync to arr', 'Sync Jellyfin to Sonarr/Radarr') + .replace('artwork cache mode', 'Artwork cache mode') + +type SettingsPageProps = { + section: string +} + +export default function SettingsPage({ section }: SettingsPageProps) { + const router = useRouter() + const [settings, setSettings] = useState([]) + const [formValues, setFormValues] = useState>({}) + const [status, setStatus] = useState(null) + const [loading, setLoading] = useState(true) + const [sonarrOptions, setSonarrOptions] = useState(null) + const [radarrOptions, setRadarrOptions] = useState(null) + const [sonarrError, setSonarrError] = useState(null) + const [radarrError, setRadarrError] = useState(null) + const [jellyfinSyncStatus, setJellyfinSyncStatus] = useState(null) + const [requestsSyncStatus, setRequestsSyncStatus] = useState(null) + const [artworkPrefetchStatus, setArtworkPrefetchStatus] = useState(null) + const [logsStatus, setLogsStatus] = useState(null) + const [logsLines, setLogsLines] = useState([]) + const [logsCount, setLogsCount] = useState(200) + const [cacheRows, setCacheRows] = useState([]) + const [cacheCount, setCacheCount] = useState(50) + const [cacheStatus, setCacheStatus] = useState(null) + const [requestsSync, setRequestsSync] = useState(null) + const [artworkPrefetch, setArtworkPrefetch] = useState(null) + const [maintenanceStatus, setMaintenanceStatus] = useState(null) + const [maintenanceBusy, setMaintenanceBusy] = useState(false) + + const loadSettings = async () => { + const baseUrl = getApiBase() + const response = await authFetch(`${baseUrl}/admin/settings`) + if (!response.ok) { + if (response.status === 401) { + clearToken() + router.push('/login') + return + } + if (response.status === 403) { + router.push('/') + return + } + throw new Error('Failed to load settings') + } + const data = await response.json() + const fetched = Array.isArray(data?.settings) ? data.settings : [] + setSettings(fetched) + const initialValues: Record = {} + for (const setting of fetched) { + if (!setting.sensitive && setting.value) { + if (BOOL_SETTINGS.has(setting.key)) { + initialValues[setting.key] = String(setting.value).toLowerCase() + } else { + initialValues[setting.key] = setting.value + } + } else { + initialValues[setting.key] = '' + } + } + setFormValues(initialValues) + setStatus(null) + } + + const loadArtworkPrefetchStatus = async () => { + try { + const baseUrl = getApiBase() + const response = await authFetch(`${baseUrl}/admin/requests/artwork/status`) + if (!response.ok) { + return + } + const data = await response.json() + setArtworkPrefetch(data?.prefetch ?? null) + } catch (err) { + console.error(err) + } + } + + + const loadOptions = async (service: 'sonarr' | 'radarr') => { + try { + const baseUrl = getApiBase() + const response = await authFetch(`${baseUrl}/admin/${service}/options`) + if (!response.ok) { + throw new Error('Options unavailable') + } + const data = await response.json() + if (service === 'sonarr') { + setSonarrOptions({ + rootFolders: Array.isArray(data?.rootFolders) ? data.rootFolders : [], + qualityProfiles: Array.isArray(data?.qualityProfiles) ? data.qualityProfiles : [], + }) + setSonarrError(null) + } else { + setRadarrOptions({ + rootFolders: Array.isArray(data?.rootFolders) ? data.rootFolders : [], + qualityProfiles: Array.isArray(data?.qualityProfiles) ? data.qualityProfiles : [], + }) + setRadarrError(null) + } + } catch (err) { + console.error(err) + if (service === 'sonarr') { + setSonarrError('Could not load Sonarr options.') + } else { + setRadarrError('Could not load Radarr options.') + } + } + } + + useEffect(() => { + const load = async () => { + if (!getToken()) { + router.push('/login') + return + } + try { + await loadSettings() + if (section === 'artwork') { + await loadArtworkPrefetchStatus() + } + } catch (err) { + console.error(err) + setStatus('Could not load admin settings.') + } finally { + setLoading(false) + } + } + + load() + if (section === 'sonarr') { + void loadOptions('sonarr') + } + if (section === 'radarr') { + void loadOptions('radarr') + } + }, [router, section]) + + const groupedSettings = useMemo(() => { + const groups: Record = {} + for (const setting of settings) { + const section = setting.key.split('_')[0] ?? 'other' + if (!groups[section]) groups[section] = [] + groups[section].push(setting) + } + return groups + }, [settings]) + + const settingsSection = SETTINGS_SECTION_MAP[section] ?? null + const visibleSections = settingsSection ? [settingsSection] : [] + const isCacheSection = section === 'cache' + const cacheSettingKeys = new Set([ + 'requests_sync_ttl_minutes', + 'requests_data_source', + 'artwork_cache_mode', + ]) + const cacheSettings = settings.filter((setting) => cacheSettingKeys.has(setting.key)) + const settingsSections = isCacheSection + ? [{ key: 'cache', title: 'Cache settings', items: cacheSettings }] + : visibleSections.map((sectionKey) => ({ + key: sectionKey, + title: SECTION_LABELS[sectionKey] ?? sectionKey, + items: + sectionKey === 'requests' || sectionKey === 'artwork' + ? (groupedSettings[sectionKey] ?? []).filter( + (setting) => !cacheSettingKeys.has(setting.key) + ) + : groupedSettings[sectionKey] ?? [], + })) + const showLogs = section === 'logs' + const showMaintenance = section === 'maintenance' + const showRequestsExtras = section === 'requests' + const showArtworkExtras = section === 'artwork' + const showCacheExtras = section === 'cache' + const shouldRenderSection = (sectionGroup: { key: string; items?: AdminSetting[] }) => { + if (sectionGroup.items && sectionGroup.items.length > 0) return true + if (showArtworkExtras && sectionGroup.key === 'artwork') return true + if (showCacheExtras && sectionGroup.key === 'cache') return true + if (showRequestsExtras && sectionGroup.key === 'requests') return true + return false + } + + const settingDescriptions: Record = { + jellyseerr_base_url: 'Base URL for your Jellyseerr server.', + jellyseerr_api_key: 'API key used to read requests and status.', + jellyfin_base_url: 'Local Jellyfin server URL for logins and lookups.', + jellyfin_api_key: 'Admin API key for syncing users and availability.', + jellyfin_public_url: 'Public Jellyfin URL used for the “Open in Jellyfin” button.', + jellyfin_sync_to_arr: 'Auto-add items to Sonarr/Radarr when they already exist in Jellyfin.', + artwork_cache_mode: 'Choose whether posters are cached locally or loaded from the web.', + sonarr_base_url: 'Sonarr server URL for TV tracking.', + sonarr_api_key: 'API key for Sonarr.', + sonarr_quality_profile_id: 'Quality profile used when adding TV shows.', + sonarr_root_folder: 'Root folder where Sonarr stores TV shows.', + radarr_base_url: 'Radarr server URL for movies.', + radarr_api_key: 'API key for Radarr.', + radarr_quality_profile_id: 'Quality profile used when adding movies.', + radarr_root_folder: 'Root folder where Radarr stores movies.', + prowlarr_base_url: 'Prowlarr server URL for indexer searches.', + prowlarr_api_key: 'API key for Prowlarr.', + qbittorrent_base_url: 'qBittorrent server URL for download status.', + qbittorrent_username: 'qBittorrent login username.', + qbittorrent_password: 'qBittorrent login password.', + requests_sync_ttl_minutes: 'How long saved requests stay fresh before a refresh is needed.', + requests_poll_interval_seconds: 'How often the background checker runs.', + requests_delta_sync_interval_minutes: 'How often we check for new or updated requests.', + requests_full_sync_time: 'Daily time to refresh the full request list.', + requests_cleanup_time: 'Daily time to trim old history.', + requests_cleanup_days: 'History older than this is removed during cleanup.', + requests_data_source: 'Pick where Magent should read requests from.', + log_level: 'How much detail is written to the activity log.', + log_file: 'Where the activity log is stored.', + } + + const buildSelectOptions = ( + currentValue: string, + options: { id: number; label: string; path?: string }[], + includePath: boolean + ) => { + const optionValues = new Set(options.map((option) => String(option.id))) + const list = options.map((option) => ( + + )) + if (currentValue && !optionValues.has(currentValue)) { + list.unshift( + + ) + } + return list + } + + const submit = async (event: React.FormEvent) => { + event.preventDefault() + setStatus(null) + const payload: Record = {} + const formData = new FormData(event.currentTarget) + for (const setting of settings) { + const rawValue = formData.get(setting.key) + if (typeof rawValue !== 'string') { + continue + } + const value = rawValue.trim() + if (value === '') { + continue + } + payload[setting.key] = value + } + try { + const baseUrl = getApiBase() + const response = await authFetch(`${baseUrl}/admin/settings`, { + method: 'PUT', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(payload), + }) + if (!response.ok) { + const text = await response.text() + throw new Error(text || 'Update failed') + } + setStatus('Settings saved. New values take effect immediately.') + await loadSettings() + } catch (err) { + console.error(err) + const message = + err instanceof Error && err.message + ? err.message.replace(/^\\{\"detail\":\"|\"\\}$/g, '') + : 'Could not save settings.' + setStatus(message) + } + } + + const syncJellyfinUsers = async () => { + setJellyfinSyncStatus(null) + try { + const baseUrl = getApiBase() + const response = await authFetch(`${baseUrl}/admin/jellyfin/users/sync`, { + method: 'POST', + }) + if (!response.ok) { + const text = await response.text() + throw new Error(text || 'Sync failed') + } + const data = await response.json() + setJellyfinSyncStatus(`Imported ${data?.imported ?? 0} users from Jellyfin.`) + } catch (err) { + console.error(err) + const message = + err instanceof Error && err.message + ? err.message.replace(/^\\{\"detail\":\"|\"\\}$/g, '') + : 'Could not import Jellyfin users.' + setJellyfinSyncStatus(message) + } + } + + const syncRequests = async () => { + setRequestsSyncStatus(null) + try { + const baseUrl = getApiBase() + const response = await authFetch(`${baseUrl}/admin/requests/sync`, { + method: 'POST', + }) + if (!response.ok) { + const text = await response.text() + throw new Error(text || 'Sync failed') + } + const data = await response.json() + setRequestsSync(data?.sync ?? null) + setRequestsSyncStatus('Sync started.') + } catch (err) { + console.error(err) + const message = + err instanceof Error && err.message + ? err.message.replace(/^\\{\"detail\":\"|\"\\}$/g, '') + : 'Could not sync requests.' + setRequestsSyncStatus(message) + } + } + + const syncRequestsDelta = async () => { + setRequestsSyncStatus(null) + try { + const baseUrl = getApiBase() + const response = await authFetch(`${baseUrl}/admin/requests/sync/delta`, { + method: 'POST', + }) + if (!response.ok) { + const text = await response.text() + throw new Error(text || 'Delta sync failed') + } + const data = await response.json() + setRequestsSync(data?.sync ?? null) + setRequestsSyncStatus('Delta sync started.') + } catch (err) { + console.error(err) + const message = + err instanceof Error && err.message + ? err.message.replace(/^\\{\"detail\":\"|\"\\}$/g, '') + : 'Could not run delta sync.' + setRequestsSyncStatus(message) + } + } + + const prefetchArtwork = async () => { + setArtworkPrefetchStatus(null) + try { + const baseUrl = getApiBase() + const response = await authFetch(`${baseUrl}/admin/requests/artwork/prefetch`, { + method: 'POST', + }) + if (!response.ok) { + const text = await response.text() + throw new Error(text || 'Artwork prefetch failed') + } + const data = await response.json() + setArtworkPrefetch(data?.prefetch ?? null) + setArtworkPrefetchStatus('Artwork caching started.') + } catch (err) { + console.error(err) + const message = + err instanceof Error && err.message + ? err.message.replace(/^\\{\"detail\":\"|\"\\}$/g, '') + : 'Could not cache artwork.' + setArtworkPrefetchStatus(message) + } + } + + useEffect(() => { + if (!artworkPrefetch || artworkPrefetch.status !== 'running') { + return + } + let active = true + const timer = setInterval(async () => { + try { + const baseUrl = getApiBase() + const response = await authFetch(`${baseUrl}/admin/requests/artwork/status`) + if (!response.ok) { + return + } + const data = await response.json() + if (!active) return + setArtworkPrefetch(data?.prefetch ?? null) + if (data?.prefetch?.status && data.prefetch.status !== 'running') { + setArtworkPrefetchStatus(data.prefetch.message || 'Artwork caching complete.') + } + } catch (err) { + console.error(err) + } + }, 2000) + return () => { + active = false + clearInterval(timer) + } + }, [artworkPrefetch?.status]) + + useEffect(() => { + if (!artworkPrefetch || artworkPrefetch.status === 'running') { + return + } + const timer = setTimeout(() => { + setArtworkPrefetch(null) + }, 5000) + return () => clearTimeout(timer) + }, [artworkPrefetch?.status]) + + useEffect(() => { + if (!requestsSync || requestsSync.status !== 'running') { + return + } + let active = true + const timer = setInterval(async () => { + try { + const baseUrl = getApiBase() + const response = await authFetch(`${baseUrl}/admin/requests/sync/status`) + if (!response.ok) { + return + } + const data = await response.json() + if (!active) return + setRequestsSync(data?.sync ?? null) + if (data?.sync?.status && data.sync.status !== 'running') { + setRequestsSyncStatus(data.sync.message || 'Sync complete.') + } + } catch (err) { + console.error(err) + } + }, 2000) + return () => { + active = false + clearInterval(timer) + } + }, [requestsSync?.status]) + + useEffect(() => { + if (!requestsSync || requestsSync.status === 'running') { + return + } + const timer = setTimeout(() => { + setRequestsSync(null) + }, 5000) + return () => clearTimeout(timer) + }, [requestsSync?.status]) + + const loadLogs = async () => { + setLogsStatus(null) + try { + const baseUrl = getApiBase() + const response = await authFetch( + `${baseUrl}/admin/logs?lines=${encodeURIComponent(String(logsCount))}` + ) + if (!response.ok) { + const text = await response.text() + throw new Error(text || 'Log fetch failed') + } + const data = await response.json() + if (Array.isArray(data?.lines)) { + setLogsLines(data.lines) + } else { + setLogsLines([]) + } + } catch (err) { + console.error(err) + const message = + err instanceof Error && err.message + ? err.message.replace(/^\\{\"detail\":\"|\"\\}$/g, '') + : 'Could not load logs.' + setLogsStatus(message) + } + } + + useEffect(() => { + if (!showLogs) { + return + } + void loadLogs() + const timer = setInterval(() => { + void loadLogs() + }, 5000) + return () => clearInterval(timer) + }, [logsCount, showLogs]) + + const loadCache = async () => { + setCacheStatus(null) + try { + const baseUrl = getApiBase() + const response = await authFetch( + `${baseUrl}/admin/requests/cache?limit=${encodeURIComponent(String(cacheCount))}` + ) + if (!response.ok) { + const text = await response.text() + throw new Error(text || 'Cache fetch failed') + } + const data = await response.json() + if (Array.isArray(data?.rows)) { + setCacheRows(data.rows) + } else { + setCacheRows([]) + } + } catch (err) { + console.error(err) + const message = + err instanceof Error && err.message + ? err.message.replace(/^\\{\"detail\":\"|\"\\}$/g, '') + : 'Could not load cache.' + setCacheStatus(message) + } + } + + const runRepair = async () => { + setMaintenanceStatus(null) + try { + const baseUrl = getApiBase() + const response = await authFetch(`${baseUrl}/admin/maintenance/repair`, { method: 'POST' }) + if (!response.ok) { + const text = await response.text() + throw new Error(text || 'Repair failed') + } + const data = await response.json() + setMaintenanceStatus(`Integrity check: ${data?.integrity ?? 'unknown'}. Vacuum complete.`) + } catch (err) { + console.error(err) + setMaintenanceStatus('Database repair failed.') + } + } + + const runCleanup = async () => { + setMaintenanceStatus(null) + try { + const baseUrl = getApiBase() + const response = await authFetch(`${baseUrl}/admin/maintenance/cleanup?days=90`, { + method: 'POST', + }) + if (!response.ok) { + const text = await response.text() + throw new Error(text || 'Cleanup failed') + } + const data = await response.json() + setMaintenanceStatus( + `Cleaned history older than ${data?.days ?? 90} days.` + ) + } catch (err) { + console.error(err) + setMaintenanceStatus('Cleanup failed.') + } + } + + const runFlushAndResync = async () => { + setMaintenanceStatus(null) + setMaintenanceBusy(true) + if (typeof window !== 'undefined') { + const ok = window.confirm( + 'This will clear cached requests and history, then re-sync from Jellyseerr. Continue?' + ) + if (!ok) { + setMaintenanceBusy(false) + return + } + } + try { + const baseUrl = getApiBase() + setMaintenanceStatus('Flushing database...') + const flushResponse = await authFetch(`${baseUrl}/admin/maintenance/flush`, { + method: 'POST', + }) + if (!flushResponse.ok) { + const text = await flushResponse.text() + throw new Error(text || 'Flush failed') + } + setMaintenanceStatus('Database flushed. Starting re-sync...') + await syncRequests() + setMaintenanceStatus('Database flushed. Re-sync running now.') + } catch (err) { + console.error(err) + setMaintenanceStatus('Flush + resync failed.') + } finally { + setMaintenanceBusy(false) + } + } + + const clearLogFile = async () => { + setMaintenanceStatus(null) + try { + const baseUrl = getApiBase() + const response = await authFetch(`${baseUrl}/admin/maintenance/logs/clear`, { + method: 'POST', + }) + if (!response.ok) { + const text = await response.text() + throw new Error(text || 'Clear logs failed') + } + setMaintenanceStatus('Log file cleared.') + setLogsLines([]) + } catch (err) { + console.error(err) + setMaintenanceStatus('Clearing logs failed.') + } + } + + if (loading) { + return
Loading admin settings...
+ } + + return ( + router.push('/admin')}> + Back to settings + + } + > + {settingsSections.length > 0 ? ( +
+ {settingsSections + .filter(shouldRenderSection) + .map((sectionGroup) => ( +
+
+

{sectionGroup.title}

+ {sectionGroup.key === 'sonarr' && ( + + )} + {sectionGroup.key === 'radarr' && ( + + )} + {sectionGroup.key === 'jellyfin' && ( + + )} + {(showArtworkExtras && sectionGroup.key === 'artwork') || + (showCacheExtras && sectionGroup.key === 'cache') ? ( + + ) : null} + {showRequestsExtras && sectionGroup.key === 'requests' && ( +
+ + +
+ )} +
+ {SECTION_DESCRIPTIONS[sectionGroup.key] && ( +

{SECTION_DESCRIPTIONS[sectionGroup.key]}

+ )} + {sectionGroup.key === 'sonarr' && sonarrError && ( +
{sonarrError}
+ )} + {sectionGroup.key === 'radarr' && radarrError && ( +
{radarrError}
+ )} + {sectionGroup.key === 'jellyfin' && jellyfinSyncStatus && ( +
{jellyfinSyncStatus}
+ )} + {((showArtworkExtras && sectionGroup.key === 'artwork') || + (showCacheExtras && sectionGroup.key === 'cache')) && + artworkPrefetchStatus && ( +
{artworkPrefetchStatus}
+ )} + {showRequestsExtras && sectionGroup.key === 'requests' && requestsSyncStatus && ( +
{requestsSyncStatus}
+ )} + {((showArtworkExtras && sectionGroup.key === 'artwork') || + (showCacheExtras && sectionGroup.key === 'cache')) && + artworkPrefetch && ( +
+
+ Status: {artworkPrefetch.status} + + {artworkPrefetch.processed ?? 0} + {artworkPrefetch.total ? ` / ${artworkPrefetch.total}` : ''} cached + +
+
+
+
+ {artworkPrefetch.message &&
{artworkPrefetch.message}
} +
+ )} + {showRequestsExtras && sectionGroup.key === 'requests' && requestsSync && ( +
+
+ Status: {requestsSync.status} + + {requestsSync.stored ?? 0} + {requestsSync.total ? ` / ${requestsSync.total}` : ''} synced + +
+
+
+
+ {requestsSync.message &&
{requestsSync.message}
} +
+ )} +
+ {sectionGroup.items.map((setting) => { + const value = formValues[setting.key] ?? '' + const helperText = settingDescriptions[setting.key] + const isSonarrProfile = setting.key === 'sonarr_quality_profile_id' + const isSonarrRoot = setting.key === 'sonarr_root_folder' + const isRadarrProfile = setting.key === 'radarr_quality_profile_id' + const isRadarrRoot = setting.key === 'radarr_root_folder' + const isBoolSetting = BOOL_SETTINGS.has(setting.key) + if (isBoolSetting) { + return ( + + ) + } + if (isSonarrProfile && sonarrOptions) { + return ( + + ) + } + if (isSonarrRoot && sonarrOptions) { + return ( + + ) + } + if (isRadarrProfile && radarrOptions) { + return ( + + ) + } + if (isRadarrRoot && radarrOptions) { + return ( + + ) + } + if (setting.key === 'log_level') { + return ( + + ) + } + if (setting.key === 'artwork_cache_mode') { + return ( + + ) + } + if ( + setting.key === 'requests_full_sync_time' || + setting.key === 'requests_cleanup_time' + ) { + return ( + + ) + } + if ( + setting.key === 'requests_delta_sync_interval_minutes' || + setting.key === 'requests_cleanup_days' + ) { + return ( + + ) + } + if (setting.key === 'requests_data_source') { + return ( + + ) + } + return ( + + ) + })} +
+
+ ))} + {status &&
{status}
} +
+ +
+
+ ) : ( +
+ No settings to show here yet. Try the Cache page for artwork and saved-request controls. +
+ )} + {showLogs && ( +
+
+

Activity log

+
+ + +
+
+ {logsStatus &&
{logsStatus}
} +
{logsLines.join('')}
+
+ )} + {showCacheExtras && ( +
+
+

Saved requests (cache)

+
+ + +
+
+ {cacheStatus &&
{cacheStatus}
} +
+
+ Request + Title + Type + Status + Last update +
+ {cacheRows.length === 0 ? ( +
No saved requests loaded yet.
+ ) : ( + cacheRows.map((row) => ( +
+ #{row.request_id} + {row.title || 'Untitled'} + {row.media_type || 'unknown'} + {row.status ?? 'n/a'} + {row.updated_at || row.created_at || 'n/a'} +
+ )) + )} +
+
+ )} + {showMaintenance && ( +
+
+

Maintenance

+
+
+ Emergency tools. Use with care: flush will clear saved requests and history. +
+ {maintenanceStatus &&
{maintenanceStatus}
} +
+ + + + +
+
+ )} + {showRequestsExtras && ( +
+
+

Scheduled tasks

+
+
+ Automated jobs keep requests and housekeeping up to date. +
+
+
+

Quick request check

+

+ Every {formValues.requests_delta_sync_interval_minutes || '5'} minutes, checks for + new or updated requests. +

+
+
+

Full daily refresh

+

+ Every day at {formValues.requests_full_sync_time || '00:00'}, refreshes the entire + requests list. +

+
+
+

History cleanup

+

+ Every day at {formValues.requests_cleanup_time || '02:00'}, removes history older + than {formValues.requests_cleanup_days || '90'} days. +

+
+
+
+ )} +
+ ) +} diff --git a/frontend/app/admin/[section]/page.tsx b/frontend/app/admin/[section]/page.tsx new file mode 100644 index 0000000..de5832f --- /dev/null +++ b/frontend/app/admin/[section]/page.tsx @@ -0,0 +1,27 @@ +import { notFound } from 'next/navigation' +import SettingsPage from '../SettingsPage' + +const ALLOWED_SECTIONS = new Set([ + 'jellyseerr', + 'jellyfin', + 'artwork', + 'sonarr', + 'radarr', + 'prowlarr', + 'qbittorrent', + 'requests', + 'cache', + 'logs', + 'maintenance', +]) + +type PageProps = { + params: { section: string } +} + +export default function AdminSectionPage({ params }: PageProps) { + if (!ALLOWED_SECTIONS.has(params.section)) { + notFound() + } + return +} diff --git a/frontend/app/admin/page.tsx b/frontend/app/admin/page.tsx new file mode 100644 index 0000000..30f1888 --- /dev/null +++ b/frontend/app/admin/page.tsx @@ -0,0 +1,26 @@ +'use client' + +import { useRouter } from 'next/navigation' +import AdminShell from '../ui/AdminShell' + +export default function AdminLandingPage() { + const router = useRouter() + + return ( + router.push('/')}> + Back to requests + + } + > +
+
+ Pick a section from the left. Each page explains what it does and how it helps. +
+
+
+ ) +} diff --git a/frontend/app/globals.css b/frontend/app/globals.css new file mode 100644 index 0000000..23820c5 --- /dev/null +++ b/frontend/app/globals.css @@ -0,0 +1,1482 @@ +@import url('https://fonts.googleapis.com/css2?family=Space+Grotesk:wght@400;600;700&family=JetBrains+Mono:wght@400;600&display=swap'); + +:root { + color-scheme: light; + --ink: #0f1117; + --ink-muted: #3f4656; + --paper: #f0f4ff; + --paper-strong: #ffffff; + --accent: #ff6b2b; + --accent-2: #1c6bff; + --accent-3: #11d6c6; + --border: rgba(15, 17, 23, 0.12); + --shadow: rgba(15, 17, 23, 0.18); + --glow: 0 0 18px rgba(28, 107, 255, 0.25); + --input-bg: rgba(15, 17, 23, 0.04); + --input-ink: var(--ink); + --error-bg: rgba(255, 107, 43, 0.12); + --error-ink: #6b2c17; +} + +[data-theme='dark'] { + color-scheme: dark; + --ink: #e9ecf5; + --ink-muted: #9aa3b8; + --paper: #0b0f18; + --paper-strong: #111827; + --accent: #ff6b2b; + --accent-2: #3b82f6; + --accent-3: #22f6e3; + --border: rgba(255, 255, 255, 0.08); + --shadow: rgba(0, 0, 0, 0.6); + --glow: 0 0 22px rgba(59, 130, 246, 0.45); + --input-bg: rgba(255, 255, 255, 0.08); + --input-ink: var(--ink); + --error-bg: rgba(255, 107, 43, 0.18); + --error-ink: #ffd3bf; +} + +* { + box-sizing: border-box; + margin: 0; + padding: 0; +} + +body { + font-family: "Space Grotesk", "Segoe UI", sans-serif; + background: radial-gradient(circle at top, rgba(17, 33, 74, 0.9) 0%, rgba(8, 12, 22, 1) 55%, #05070d 100%); + color: var(--ink); + min-height: 100vh; + transition: background 0.4s ease, color 0.4s ease; +} + +[data-theme='light'] body { + background: radial-gradient(circle at top, #f7faff 0%, #eef2ff 45%, #e3edff 100%); +} + +.page { + max-width: 1100px; + margin: 0 auto; + padding: 40px 24px 80px; + display: grid; + gap: 32px; +} + +.header { + display: grid; + grid-template-columns: 1fr auto; + grid-template-rows: auto auto; + align-items: center; + gap: 12px 16px; +} + +.header-left { + grid-column: 1 / 2; + grid-row: 1 / 2; + display: inline-flex; + align-items: center; + gap: 14px; +} + +.brand-link { + display: inline-flex; + align-items: center; + gap: 14px; + color: inherit; + text-decoration: none; +} + +.brand-link:hover .brand { + color: var(--ink); +} + +.brand-stack { + display: grid; + gap: 4px; +} + +.header-right { + grid-column: 2 / 3; + grid-row: 1 / 2; + display: inline-flex; + align-items: center; + justify-content: flex-end; + gap: 12px; +} + +.header-nav { + grid-column: 1 / -1; + grid-row: 2 / 3; + display: flex; + justify-content: flex-end; +} + +.brand { + font-size: 32px; + letter-spacing: 0.02em; + font-weight: 700; + text-transform: uppercase; +} + +.tagline { + color: var(--ink-muted); + font-size: 16px; +} + +.header-actions { + display: flex; + gap: 16px; + font-size: 14px; + align-items: center; + justify-content: flex-end; + flex-wrap: wrap; +} + +.header-actions a { + color: var(--ink); + text-decoration: none; + padding: 6px 12px; + border-radius: 999px; + background: rgba(255, 255, 255, 0.08); + border: 1px solid var(--border); + backdrop-filter: blur(8px); + display: inline-flex; + align-items: center; + justify-content: center; + text-align: center; +} + +.header-actions .header-link { + color: var(--ink); + text-decoration: none; + padding: 6px 12px; + border-radius: 999px; + background: rgba(255, 255, 255, 0.08); + border: 1px solid var(--border); + backdrop-filter: blur(8px); + font-size: 14px; + box-shadow: none; + display: inline-flex; + align-items: center; + justify-content: center; + text-align: center; +} + +.signed-in { + font-size: 12px; + text-transform: uppercase; + letter-spacing: 0.08em; + color: var(--ink-muted); + padding: 6px 10px; + border-radius: 999px; + border: 1px dashed var(--border); + background: transparent; + cursor: pointer; +} + +.signed-in-menu { + position: relative; + display: inline-flex; + align-items: center; +} + +.signed-in-dropdown { + position: absolute; + top: calc(100% + 8px); + right: 0; + min-width: 180px; + background: rgba(14, 20, 32, 0.95); + border: 1px solid var(--border); + border-radius: 12px; + padding: 8px; + box-shadow: 0 12px 26px var(--shadow); + z-index: 20; +} + +.signed-in-dropdown a { + display: block; + padding: 8px 12px; + border-radius: 10px; + color: var(--ink); + text-decoration: none; + text-align: center; +} + +.signed-in-dropdown a:hover { + background: rgba(255, 255, 255, 0.08); +} + +.theme-toggle { + width: 40px; + height: 40px; + padding: 0; + border-radius: 50%; + background: linear-gradient(120deg, rgba(28, 107, 255, 0.2), rgba(34, 246, 227, 0.2)); + border: 1px solid var(--border); + color: var(--ink); + display: inline-flex; + align-items: center; + justify-content: center; + box-shadow: var(--glow); +} + +.theme-toggle svg { + width: 20px; + height: 20px; + fill: none; + stroke: currentColor; + stroke-width: 1.6; + stroke-linecap: round; + stroke-linejoin: round; +} + +.card { + background: var(--paper-strong); + border-radius: 24px; + padding: 32px; + box-shadow: 0 18px 40px var(--shadow); + display: grid; + gap: 24px; + animation: rise 0.5s ease-out; + border: 1px solid var(--border); +} + +.layout-grid { + display: grid; + grid-template-columns: minmax(0, 1.2fr) minmax(0, 0.8fr); + gap: 28px; + align-items: start; +} + +.side-panel { + position: sticky; + top: 24px; + align-self: start; +} + +.find-panel { + display: grid; + gap: 20px; +} + +.find-header { + display: grid; + gap: 8px; +} + +.find-controls { + display: grid; + gap: 16px; +} + +.centerpiece { + padding: 8px 0 4px; +} + +.centerpiece .recent-grid button { + padding: 14px 18px; + border-radius: 18px; +} + +h1 { + font-size: 36px; +} + +h2 { + font-size: 22px; +} + +h3 { + font-size: 18px; +} + +.lede { + font-size: 18px; + color: var(--ink-muted); +} + +.search { + display: grid; + grid-template-columns: 1fr auto; + gap: 14px; +} + +.search-row button { + align-self: stretch; +} + +input { + padding: 14px 16px; + border-radius: 16px; + border: 1px solid var(--border); + font-size: 16px; + background: var(--input-bg); + color: var(--input-ink); +} + +input::placeholder { + color: var(--ink-muted); +} + +select { + padding: 14px 16px; + border-radius: 16px; + border: 1px solid var(--border); + font-size: 16px; + background: var(--input-bg); + color: var(--input-ink); +} + +select option { + background: var(--paper-strong); + color: var(--ink); +} + +button { + padding: 12px 18px; + border-radius: 999px; + border: none; + background: linear-gradient(120deg, var(--accent), var(--accent-2)); + color: #fff; + font-size: 15px; + cursor: pointer; + display: inline-flex; + align-items: center; + justify-content: center; + gap: 10px; + box-shadow: var(--glow); + text-align: center; +} + +button span { + font-size: 12px; + text-transform: uppercase; + opacity: 0.8; + text-align: center; +} + +.filters { + display: grid; + gap: 12px; +} + +.filters-compact { + grid-template-columns: repeat(auto-fit, minmax(200px, 1fr)); + align-items: start; + padding: 12px; + border-radius: 18px; + border: 1px solid var(--border); + background: rgba(255, 255, 255, 0.04); +} + +.filter { + display: grid; + gap: 8px; + font-size: 14px; + color: var(--ink-muted); +} + +.pill-group { + display: flex; + flex-wrap: wrap; + gap: 8px; +} + +.pill-group button { + background: rgba(28, 107, 255, 0.15); + color: var(--ink); + padding: 8px 14px; + font-size: 13px; +} + +.recent-grid { + display: grid; + gap: 10px; +} + +.recent-grid button { + justify-content: center; + background: rgba(255, 255, 255, 0.08); + color: var(--ink); + border: 1px solid var(--border); +} + +.recent-card { + display: flex; + align-items: center; + gap: 12px; + text-align: left; +} + +.recent-poster { + width: 46px; + height: 68px; + border-radius: 10px; + object-fit: cover; + border: 1px solid var(--border); + box-shadow: 0 8px 18px var(--shadow); + flex-shrink: 0; +} + +.recent-info { + display: grid; + gap: 4px; +} + +.recent-title { + font-weight: 600; +} + +.recent-meta { + color: var(--ink-muted); + font-size: 13px; +} + +.request-header { + display: flex; + flex-wrap: wrap; + justify-content: space-between; + gap: 16px; +} + +.request-header-main { + display: flex; + align-items: center; + gap: 16px; +} + +.request-poster { + width: 90px; + height: 135px; + border-radius: 14px; + object-fit: cover; + border: 1px solid var(--border); + box-shadow: 0 12px 26px var(--shadow); +} + +.brand-preview { + margin-top: 12px; + max-width: 300px; + max-height: 300px; + width: 100%; + height: auto; + border-radius: 16px; + border: 1px solid var(--border); + background: rgba(255, 255, 255, 0.05); + box-shadow: 0 10px 24px var(--shadow); +} + +.brand-logo { + display: block; + max-width: 100%; + height: auto; + object-fit: contain; +} + +.brand-logo--header { + width: 100px; + height: 100px; + border-radius: 0; + border: none; + background: transparent; + box-shadow: none; +} + +.brand-logo--login { + width: 180px; + height: 180px; + margin: 40px auto 16px; + border-radius: 0; + border: none; + background: transparent; + box-shadow: none; +} + +.meta { + color: var(--ink-muted); + margin-top: 4px; +} + +.state { + display: grid; + gap: 6px; + padding: 12px 16px; + background: rgba(255, 255, 255, 0.08); + border-radius: 16px; +} + +.status-box { + background: rgba(11, 15, 24, 0.7); + border-radius: 20px; + padding: 20px; + display: grid; + gap: 16px; + border: 1px solid var(--border); + box-shadow: var(--glow); +} + +.status-box h2 { + font-size: 18px; +} + +.status-box p { + color: var(--ink-muted); +} + +.status-text { + font-size: 20px; + color: var(--ink); + font-weight: 600; +} + +.timeline { + display: grid; + gap: 20px; + position: relative; + padding-left: 20px; +} + +.timeline::before { + content: ""; + position: absolute; + left: 10px; + top: 0; + bottom: 0; + width: 2px; + background: linear-gradient(180deg, var(--accent-2), transparent); +} + +.timeline-item { + display: grid; + grid-template-columns: 20px 1fr; + gap: 12px; +} + +.timeline-marker { + width: 10px; + height: 10px; + border-radius: 50%; + background: var(--accent-3); + box-shadow: 0 0 10px rgba(34, 246, 227, 0.6); + margin-top: 6px; +} + +.timeline-card { + background: rgba(255, 255, 255, 0.06); + border-radius: 16px; + padding: 16px; + display: grid; + gap: 12px; + border: 1px solid var(--border); +} + +.timeline-card pre { + background: rgba(255, 255, 255, 0.08); + padding: 12px; + border-radius: 12px; + overflow-x: auto; + font-size: 12px; + font-family: "JetBrains Mono", "Consolas", monospace; +} + +.timeline-sublist { + display: grid; + gap: 8px; +} + +.timeline-sublist ul { + list-style: none; + display: grid; + gap: 6px; +} + +.timeline-sublist li { + display: flex; + justify-content: space-between; + gap: 12px; + font-size: 14px; + color: var(--ink-muted); +} + +.timeline-title { + display: flex; + justify-content: space-between; + align-items: center; + text-transform: uppercase; + font-size: 12px; + letter-spacing: 0.08em; +} + +.summary { + display: grid; + grid-template-columns: repeat(2, minmax(0, 1fr)); + gap: 16px; +} + +.summary-card { + background: rgba(255, 255, 255, 0.08); + border-radius: 18px; + padding: 18px; + display: grid; + gap: 10px; + border: 1px solid var(--border); +} + +.user-card { + display: flex; + justify-content: space-between; + align-items: center; + gap: 16px; +} + +.user-actions { + display: grid; + gap: 8px; + justify-items: end; +} + +.toggle { + display: inline-flex; + gap: 8px; + align-items: center; + font-size: 13px; + color: var(--ink); +} + +.toggle input[type='checkbox'] { + width: 16px; + height: 16px; +} + +.summary-card ul { + list-style: disc; + padding-left: 18px; + color: var(--ink-muted); +} + +.summary-card p { + color: var(--ink-muted); +} + +.summary-card .helper { + font-size: 13px; + line-height: 1.4; + color: #6a5b4c; +} + +.details-toggle { + display: flex; + justify-content: flex-end; +} + +.details-toggle button { + background: rgba(255, 255, 255, 0.08); + color: var(--ink); + border: 1px solid var(--border); +} + +.actions { + display: grid; + gap: 12px; +} + +.action-grid { + display: grid; + gap: 10px; +} + +.action-message { + padding: 10px 14px; + border-radius: 12px; + background: rgba(255, 255, 255, 0.08); + color: var(--ink-muted); + font-size: 14px; +} + +.action-grid button { + background: linear-gradient(120deg, rgba(59, 130, 246, 0.8), rgba(34, 246, 227, 0.7)); +} + +.history { + display: grid; + gap: 12px; +} + +.history-grid { + display: grid; + grid-template-columns: repeat(2, minmax(0, 1fr)); + gap: 16px; +} + +.history-grid ul { + list-style: none; + display: grid; + gap: 8px; + color: var(--ink-muted); +} + +.history-grid li { + display: grid; + gap: 4px; +} + +.history-grid li span:last-child { + font-size: 13px; + color: #6a5b4c; +} + +.modal-backdrop { + position: fixed; + inset: 0; + background: rgba(27, 28, 30, 0.45); + display: grid; + place-items: center; + z-index: 40; + padding: 24px; +} + +.modal-card { + background: var(--paper-strong); + border-radius: 20px; + padding: 24px; + box-shadow: 0 20px 45px var(--shadow); + display: grid; + gap: 12px; + max-width: 420px; + width: 100%; +} + +.modal-card button { + justify-self: start; + background: linear-gradient(120deg, var(--accent), var(--accent-2)); +} + +.auth-card { + max-width: 520px; + margin: 0 auto; +} + +.auth-form { + display: grid; + gap: 16px; +} + +.auth-actions { + display: grid; + gap: 10px; +} + +.ghost-button { + background: rgba(255, 255, 255, 0.08); + color: var(--ink); + border: 1px solid var(--border); + box-shadow: none; + display: inline-flex; + align-items: center; + justify-content: center; + gap: 10px; + padding: 12px 18px; + border-radius: 999px; + font-size: 15px; + text-decoration: none; + text-align: center; +} + +.auth-form label { + display: grid; + gap: 8px; + font-size: 14px; + color: var(--ink-muted); + text-align: center; +} + +.error-banner { + padding: 10px 14px; + border-radius: 12px; + background: var(--error-bg); + color: var(--error-ink); + font-size: 14px; + border: 1px solid var(--border); +} + +.admin-card { + gap: 32px; +} + +.admin-shell { + display: grid; + grid-template-columns: minmax(210px, 240px) minmax(0, 1fr); + gap: 24px; + align-items: start; +} + +.admin-shell-nav { + position: sticky; + top: 24px; +} + +.admin-sidebar { + display: grid; + gap: 16px; + padding: 16px; + border-radius: 18px; + border: 1px solid var(--border); + background: rgba(255, 255, 255, 0.06); +} + +.admin-sidebar-title { + font-size: 12px; + text-transform: uppercase; + letter-spacing: 0.12em; + color: var(--ink-muted); +} + +.admin-nav-group { + display: grid; + gap: 8px; +} + +.admin-nav-title { + font-size: 12px; + text-transform: uppercase; + letter-spacing: 0.08em; + color: var(--ink-muted); +} + +.admin-nav-links { + display: grid; + gap: 8px; +} + +.admin-nav-links a { + color: var(--ink); + text-decoration: none; + padding: 8px 12px; + border-radius: 12px; + border: 1px solid transparent; + background: rgba(255, 255, 255, 0.04); + font-size: 14px; +} + +.admin-nav-links a.is-active { + border-color: rgba(59, 130, 246, 0.4); + background: rgba(59, 130, 246, 0.18); + color: var(--ink); +} + +.admin-header { + display: flex; + justify-content: space-between; + gap: 16px; + align-items: center; + flex-wrap: wrap; +} + +.admin-form { + display: grid; + gap: 24px; +} + +.admin-section { + display: grid; + gap: 12px; +} + +.section-header { + display: flex; + justify-content: space-between; + align-items: center; + gap: 12px; + flex-wrap: wrap; +} + +.section-subtitle { + color: var(--ink-muted); + font-size: 13px; + margin-top: -6px; +} + +.sync-actions { + display: inline-flex; + gap: 10px; + flex-wrap: wrap; +} + +.section-header button { + background: rgba(255, 255, 255, 0.08); + color: var(--ink); +} + +.admin-grid { + display: grid; + grid-template-columns: repeat(auto-fit, minmax(240px, 1fr)); + gap: 16px; +} + +.admin-grid label { + display: grid; + gap: 8px; + font-size: 14px; + color: var(--ink-muted); + text-align: center; +} + +.admin-grid label[data-helper]::after { + content: attr(data-helper); + font-size: 12px; + color: var(--ink-muted); + line-height: 1.4; +} + +.label-row { + display: flex; + justify-content: space-between; + gap: 12px; + text-align: center; +} + +.status-banner { + padding: 12px 16px; + border-radius: 12px; + background: rgba(255, 255, 255, 0.08); + color: var(--ink); + font-size: 14px; + border: 1px solid var(--border); +} + +.recent-header { + display: flex; + justify-content: space-between; + align-items: center; + gap: 12px; + flex-wrap: wrap; +} + +.recent-filter { + display: inline-flex; + gap: 8px; + align-items: center; + font-size: 13px; + color: var(--ink-muted); +} + +.recent-filter select { + padding: 8px 12px; + font-size: 13px; +} + +.admin-actions { + display: flex; + justify-content: flex-end; +} + +.settings-nav { + display: flex; + gap: 16px; + flex-wrap: wrap; + padding: 12px 16px; + border-radius: 16px; + border: 1px solid var(--border); + background: rgba(255, 255, 255, 0.06); +} + +.settings-group { + display: grid; + gap: 6px; + min-width: 180px; +} + +.settings-title { + font-size: 12px; + text-transform: uppercase; + letter-spacing: 0.08em; + color: var(--ink-muted); +} + +.settings-links { + display: flex; + gap: 8px; + flex-wrap: wrap; +} + +.settings-links a { + color: var(--ink); + text-decoration: none; + padding: 6px 12px; + border-radius: 999px; + border: 1px solid var(--border); + background: rgba(255, 255, 255, 0.08); + font-size: 13px; +} + +.log-actions { + display: inline-flex; + gap: 12px; + align-items: center; +} + +.log-viewer { + background: rgba(255, 255, 255, 0.08); + border: 1px solid var(--border); + border-radius: 12px; + padding: 16px; + max-height: 320px; + overflow: auto; + font-size: 12px; + line-height: 1.4; + color: var(--ink); +} + +.cache-table { + display: grid; + gap: 8px; +} + +.cache-row { + display: grid; + grid-template-columns: 90px minmax(0, 1.6fr) 120px 90px 180px; + gap: 12px; + padding: 10px 12px; + border-radius: 12px; + border: 1px solid var(--border); + background: rgba(255, 255, 255, 0.06); + font-size: 13px; + color: var(--ink); +} + +.cache-row span { + overflow: hidden; + text-overflow: ellipsis; + white-space: nowrap; +} + +.cache-head { + background: rgba(255, 255, 255, 0.12); + font-weight: 600; + text-transform: uppercase; + font-size: 11px; + letter-spacing: 0.08em; +} + +.maintenance-grid { + display: grid; + gap: 12px; + grid-template-columns: repeat(auto-fit, minmax(200px, 1fr)); +} + +.schedule-grid { + display: grid; + gap: 12px; + grid-template-columns: repeat(auto-fit, minmax(220px, 1fr)); +} + +.schedule-card { + background: rgba(255, 255, 255, 0.08); + border-radius: 16px; + padding: 16px; + border: 1px solid var(--border); + display: grid; + gap: 6px; +} + +.schedule-card h3 { + font-size: 16px; +} + +.schedule-card p { + color: var(--ink-muted); + font-size: 14px; +} + +.danger-button { + background: linear-gradient(120deg, #ff3b30, #ff8a3d); + color: #fff; +} + +.sync-progress { + display: grid; + gap: 8px; + padding: 12px 16px; + border-radius: 12px; + background: rgba(255, 255, 255, 0.08); + border: 1px solid var(--border); +} + +.sync-meta { + display: flex; + justify-content: space-between; + gap: 12px; + font-size: 13px; + color: var(--ink-muted); +} + +.progress { + position: relative; + height: 10px; + border-radius: 999px; + background: rgba(255, 255, 255, 0.08); + overflow: hidden; + border: 1px solid var(--border); +} + +.progress-fill { + height: 100%; + background: linear-gradient(120deg, var(--accent-2), var(--accent-3)); + transition: width 0.3s ease; +} + +.progress-indeterminate .progress-fill { + position: absolute; + animation: progress-indeterminate 1.6s ease-in-out infinite; +} + +.progress-complete .progress-fill { + animation: none; +} + +.system-status { + border-radius: 16px; + border: 1px solid var(--border); + background: rgba(255, 255, 255, 0.08); + padding: 16px; + margin-bottom: 16px; +} + +.system-header { + display: flex; + align-items: center; + justify-content: space-between; + gap: 12px; + margin-bottom: 12px; +} + +.system-header h2 { + font-size: 18px; +} + +.system-pill { + padding: 6px 12px; + border-radius: 999px; + font-size: 12px; + letter-spacing: 0.02em; + background: rgba(255, 255, 255, 0.08); + border: 1px solid var(--border); + color: var(--ink-muted); +} + +.system-pill-up { + color: #0b3d2e; + background: rgba(61, 220, 151, 0.2); + border-color: rgba(61, 220, 151, 0.4); +} + +[data-theme='dark'] .system-pill-up { + color: #e9fff6; + background: rgba(61, 220, 151, 0.28); + border-color: rgba(61, 220, 151, 0.5); +} + +.system-pill-down { + color: #4a0c0c; + background: rgba(255, 59, 48, 0.2); + border-color: rgba(255, 59, 48, 0.4); +} + +.system-pill-degraded { + color: #3e2b00; + background: rgba(255, 200, 87, 0.22); + border-color: rgba(255, 200, 87, 0.4); +} + +.system-list { + display: grid; + gap: 8px; + grid-template-columns: repeat(auto-fit, minmax(180px, 1fr)); +} + +.system-item { + display: flex; + align-items: center; + gap: 10px; + padding: 10px 12px; + border-radius: 12px; + background: rgba(255, 255, 255, 0.04); + border: 1px solid var(--border); + font-size: 13px; +} + +.system-dot { + width: 10px; + height: 10px; + border-radius: 999px; + background: #6b6b6b; + box-shadow: 0 0 0 3px rgba(255, 255, 255, 0.06); +} + +.system-up .system-dot { + background: #3ddc97; + box-shadow: 0 0 10px rgba(61, 220, 151, 0.6); +} + +.system-down .system-dot { + background: #ff3b30; + box-shadow: 0 0 10px rgba(255, 59, 48, 0.6); +} + +.system-degraded .system-dot, +.system-not_configured .system-dot { + background: #ffc857; + box-shadow: 0 0 10px rgba(255, 200, 87, 0.5); +} + +.system-name { + font-weight: 600; +} + +.system-state { + margin-left: auto; + color: var(--ink-muted); +} + +.pipeline-map { + border-radius: 16px; + border: 1px solid var(--border); + background: rgba(255, 255, 255, 0.06); + padding: 16px; + margin-bottom: 16px; +} + +.pipeline-map h2 { + font-size: 18px; + margin-bottom: 12px; +} + +.pipeline-steps { + display: grid; + grid-template-columns: repeat(auto-fit, minmax(120px, 1fr)); + gap: 10px; +} + +.pipeline-step { + display: flex; + align-items: center; + gap: 8px; + padding: 10px 12px; + border-radius: 999px; + border: 1px solid var(--border); + background: rgba(255, 255, 255, 0.04); + color: var(--ink-muted); + font-size: 13px; +} + +.pipeline-dot { + width: 10px; + height: 10px; + border-radius: 999px; + background: #6b6b6b; + box-shadow: 0 0 0 3px rgba(255, 255, 255, 0.06); +} + +.pipeline-step.is-complete { + color: var(--ink); +} + +.pipeline-step.is-complete .pipeline-dot { + background: #3ddc97; + box-shadow: 0 0 10px rgba(61, 220, 151, 0.4); +} + +.pipeline-step.is-active { + color: var(--ink); + border-color: rgba(61, 220, 151, 0.5); + background: rgba(61, 220, 151, 0.12); +} + +.pipeline-step.is-active .pipeline-dot { + background: #3ddc97; + box-shadow: 0 0 14px rgba(61, 220, 151, 0.8); +} + +.pipeline-hint { + margin-top: 10px; + color: var(--ink-muted); + font-size: 13px; +} + +.timeline-item.is-active .timeline-marker { + background: #3ddc97; + box-shadow: 0 0 12px rgba(61, 220, 151, 0.8); +} + +@keyframes progress-indeterminate { + 0% { + transform: translateX(-50%); + } + 50% { + transform: translateX(120%); + } + 100% { + transform: translateX(-50%); + } +} + +@keyframes rise { + from { + opacity: 0; + transform: translateY(12px); + } + to { + opacity: 1; + transform: translateY(0); + } +} + +@media (max-width: 720px) { + .header { + grid-template-columns: 1fr; + grid-template-rows: auto auto auto; + align-items: flex-start; + } + + .header-right { + grid-column: 1 / -1; + justify-content: flex-start; + } + + .header-nav { + justify-content: flex-start; + } + + .summary { + grid-template-columns: 1fr; + } + + .history-grid { + grid-template-columns: 1fr; + } + + .layout-grid { + grid-template-columns: 1fr; + } + + .side-panel { + position: static; + } + + .admin-shell { + grid-template-columns: 1fr; + } + + .admin-shell-nav { + position: static; + } + + .search { + grid-template-columns: 1fr; + } + + .card { + padding: 24px; + } + + .cache-row { + grid-template-columns: 1fr; + } +} + +/* Loading spinner */ +.loading-center { + display: flex; + align-items: center; + justify-content: center; + gap: 12px; + padding: 28px 0; +} + +.spinner { + width: 44px; + height: 44px; + border-radius: 50%; + border: 4px solid rgba(255, 255, 255, 0.12); + border-top-color: var(--accent-2); + box-shadow: 0 6px 18px rgba(28, 107, 255, 0.12); + animation: spin 0.9s linear infinite; +} + +.loading-text { + font-size: 16px; + color: var(--ink-muted); +} + +@keyframes spin { + to { + transform: rotate(360deg); + } +} + +/* How it works */ +.how-page { + display: grid; + gap: 28px; +} + +.how-hero { + display: grid; + gap: 10px; +} + +.eyebrow { + text-transform: uppercase; + letter-spacing: 0.18em; + font-size: 12px; + color: var(--ink-muted); +} + +.how-grid { + display: grid; + grid-template-columns: repeat(auto-fit, minmax(220px, 1fr)); + gap: 18px; +} + +.how-card { + background: var(--paper-strong); + border: 1px solid var(--border); + padding: 16px; + border-radius: 16px; + box-shadow: 0 16px 40px rgba(0, 0, 0, 0.08); + display: grid; + gap: 8px; +} + +.how-title { + color: var(--accent-3); + font-weight: 600; +} + +.how-flow { + display: grid; + gap: 12px; +} + +.how-steps { + list-style: none; + display: grid; + gap: 10px; + padding: 0; +} + +.how-steps li { + padding: 12px 14px; + border-radius: 12px; + background: rgba(255, 255, 255, 0.06); + border: 1px solid var(--border); +} + +.how-callout { + border-left: 4px solid var(--accent); + padding: 16px 18px; + background: rgba(255, 255, 255, 0.06); + border-radius: 12px; + display: grid; + gap: 8px; +} diff --git a/frontend/app/how-it-works/page.tsx b/frontend/app/how-it-works/page.tsx new file mode 100644 index 0000000..ca12c14 --- /dev/null +++ b/frontend/app/how-it-works/page.tsx @@ -0,0 +1,84 @@ +'use client' + +export default function HowItWorksPage() { + return ( +
+
+

How this works

+

Your request, step by step

+

+ Think of Magent as a status tracker. It checks a few helper apps that do different jobs, + then tells you where your request is stuck and what you can safely try next. +

+
+ +
+
+

Jellyseerr

+

The request box

+

+ This is where you ask for a movie or show. It records your request and keeps track of + approvals. +

+
+
+

Sonarr / Radarr

+

The librarian

+

+ These apps add the item to the library, decide what quality to grab, and look for the + files that match your request. +

+
+
+

Prowlarr

+

The search helper

+

+ This one checks your torrent sources and reports back what it found, or if nothing is + available yet. +

+
+
+

qBittorrent

+

The downloader

+

+ If a file is found, this app downloads it. Magent can tell if it is actively + downloading, stalled, or finished. +

+
+
+ +
+

The pipeline in plain English

+
    +
  1. + You request a title in Jellyseerr. +
  2. +
  3. + Sonarr/Radarr adds it to the library list and asks Prowlarr to search. +
  4. +
  5. + Prowlarr looks for sources and sends results back. +
  6. +
  7. + qBittorrent downloads the best match. +
  8. +
  9. + Sonarr/Radarr imports it into your library. +
  10. +
  11. + Jellyfin shows it when it is ready to watch. +
  12. +
+
+ +
+

Why Magent sometimes says "waiting"

+

+ If the search helper cannot find a match yet, Magent will say there is nothing to grab. + This does not mean something is broken. It usually means the release is not available + yet or your search sources do not have it. +

+
+
+ ) +} diff --git a/frontend/app/layout.tsx b/frontend/app/layout.tsx new file mode 100644 index 0000000..e87695a --- /dev/null +++ b/frontend/app/layout.tsx @@ -0,0 +1,43 @@ +import './globals.css' +import type { ReactNode } from 'react' +import HeaderActions from './ui/HeaderActions' +import HeaderIdentity from './ui/HeaderIdentity' +import ThemeToggle from './ui/ThemeToggle' +import BrandingFavicon from './ui/BrandingFavicon' +import BrandingLogo from './ui/BrandingLogo' + +export const metadata = { + title: 'Magent', + description: 'Request timeline and AI triage for media requests', +} + +export default function RootLayout({ children }: { children: ReactNode }) { + return ( + + + +
+
+ +
+ + +
+
+ +
+
+ {children} +
+ + + ) +} diff --git a/frontend/app/lib/auth.ts b/frontend/app/lib/auth.ts new file mode 100644 index 0000000..4e9bc55 --- /dev/null +++ b/frontend/app/lib/auth.ts @@ -0,0 +1,25 @@ +export const getApiBase = () => process.env.NEXT_PUBLIC_API_BASE ?? '/api' + +export const getToken = () => { + if (typeof window === 'undefined') return null + return window.localStorage.getItem('magent_token') +} + +export const setToken = (token: string) => { + if (typeof window === 'undefined') return + window.localStorage.setItem('magent_token', token) +} + +export const clearToken = () => { + if (typeof window === 'undefined') return + window.localStorage.removeItem('magent_token') +} + +export const authFetch = (input: RequestInfo | URL, init?: RequestInit) => { + const token = getToken() + const headers = new Headers(init?.headers || {}) + if (token) { + headers.set('Authorization', `Bearer ${token}`) + } + return fetch(input, { ...init, headers }) +} diff --git a/frontend/app/login/page.tsx b/frontend/app/login/page.tsx new file mode 100644 index 0000000..44564a6 --- /dev/null +++ b/frontend/app/login/page.tsx @@ -0,0 +1,91 @@ +'use client' + +import { useRouter } from 'next/navigation' +import { useState } from 'react' +import { getApiBase, setToken, clearToken } from '../lib/auth' +import BrandingLogo from '../ui/BrandingLogo' + +export default function LoginPage() { + const router = useRouter() + const [username, setUsername] = useState('') + const [password, setPassword] = useState('') + const [error, setError] = useState(null) + const [loading, setLoading] = useState(false) + + const submit = async (event: React.FormEvent, mode: 'local' | 'jellyfin') => { + event.preventDefault() + setError(null) + setLoading(true) + try { + clearToken() + const baseUrl = getApiBase() + const endpoint = mode === 'jellyfin' ? '/auth/jellyfin/login' : '/auth/login' + const body = new URLSearchParams({ username, password }) + const response = await fetch(`${baseUrl}${endpoint}`, { + method: 'POST', + headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, + body, + }) + if (!response.ok) { + throw new Error('Login failed') + } + const data = await response.json() + if (data?.access_token) { + setToken(data.access_token) + if (typeof window !== 'undefined') { + window.location.href = '/' + return + } + router.push('/') + return + } + throw new Error('Login failed') + } catch (err) { + console.error(err) + setError('Invalid username or password.') + } finally { + setLoading(false) + } + } + + return ( +
+ +

Sign in

+

Use your Jellyfin account, or sign in with Magent instead.

+
submit(event, 'jellyfin')} className="auth-form"> + + + {error &&
{error}
} +
+ +
+ +
+
+ ) +} diff --git a/frontend/app/page.tsx b/frontend/app/page.tsx new file mode 100644 index 0000000..ac85ce3 --- /dev/null +++ b/frontend/app/page.tsx @@ -0,0 +1,372 @@ +'use client' + +import { useRouter } from 'next/navigation' +import { useEffect, useState } from 'react' +import { authFetch, getApiBase, getToken, clearToken } from './lib/auth' + +export default function HomePage() { + const router = useRouter() + const [query, setQuery] = useState('') + const [recent, setRecent] = useState< + { + id: number + title: string + year?: number + statusLabel?: string + artwork?: { poster_url?: string } + }[] + >([]) + const [recentError, setRecentError] = useState(null) + const [recentLoading, setRecentLoading] = useState(false) + const [searchResults, setSearchResults] = useState< + { title: string; year?: number; type?: string; requestId?: number; statusLabel?: string }[] + >([]) + const [searchError, setSearchError] = useState(null) + const [role, setRole] = useState(null) + const [recentDays, setRecentDays] = useState(90) + const [authReady, setAuthReady] = useState(false) + const [servicesStatus, setServicesStatus] = useState< + { overall: string; services: { name: string; status: string; message?: string }[] } | null + >(null) + const [servicesLoading, setServicesLoading] = useState(false) + const [servicesError, setServicesError] = useState(null) + + const submit = (event: React.FormEvent) => { + event.preventDefault() + const trimmed = query.trim() + if (!trimmed) return + if (/^\d+$/.test(trimmed)) { + router.push(`/requests/${encodeURIComponent(trimmed)}`) + return + } + void runSearch(trimmed) + } + + useEffect(() => { + if (!getToken()) { + router.push('/login') + return + } + const load = async () => { + setRecentLoading(true) + setRecentError(null) + try { + const baseUrl = getApiBase() + const meResponse = await authFetch(`${baseUrl}/auth/me`) + if (!meResponse.ok) { + if (meResponse.status === 401) { + clearToken() + router.push('/login') + return + } + throw new Error(`Auth failed: ${meResponse.status}`) + } + const me = await meResponse.json() + const userRole = me?.role ?? null + setRole(userRole) + setAuthReady(true) + const take = userRole === 'admin' ? 50 : 6 + const response = await authFetch( + `${baseUrl}/requests/recent?take=${take}&days=${recentDays}` + ) + if (!response.ok) { + if (response.status === 401) { + clearToken() + router.push('/login') + return + } + throw new Error(`Recent requests failed: ${response.status}`) + } + const data = await response.json() + if (Array.isArray(data?.results)) { + setRecent( + data.results + .filter((item: any) => item?.id) + .map((item: any) => { + const id = item.id + const rawTitle = item.title + const placeholder = + typeof rawTitle === 'string' && + rawTitle.trim().toLowerCase() === `request ${id}` + return { + id, + title: !rawTitle || placeholder ? `Request #${id}` : rawTitle, + year: item.year, + statusLabel: item.statusLabel, + artwork: item.artwork, + } + }) + ) + } + } catch (error) { + console.error(error) + setRecentError('Recent requests are not available right now.') + } finally { + setRecentLoading(false) + } + } + + load() + }, [recentDays]) + + useEffect(() => { + if (!authReady) { + return + } + const load = async () => { + setServicesLoading(true) + setServicesError(null) + try { + const baseUrl = getApiBase() + const response = await authFetch(`${baseUrl}/status/services`) + if (!response.ok) { + if (response.status === 401) { + clearToken() + router.push('/login') + return + } + throw new Error(`Service status failed: ${response.status}`) + } + const data = await response.json() + setServicesStatus(data) + } catch (error) { + console.error(error) + setServicesError('Service status is not available right now.') + } finally { + setServicesLoading(false) + } + } + + load() + const timer = setInterval(load, 30000) + return () => clearInterval(timer) + }, [authReady, router]) + + const runSearch = async (term: string) => { + try { + const baseUrl = getApiBase() + const response = await authFetch(`${baseUrl}/requests/search?query=${encodeURIComponent(term)}`) + if (!response.ok) { + if (response.status === 401) { + clearToken() + router.push('/login') + return + } + throw new Error(`Search failed: ${response.status}`) + } + const data = await response.json() + if (Array.isArray(data?.results)) { + setSearchResults( + data.results.map((item: any) => ({ + title: item.title, + year: item.year, + type: item.type, + requestId: item.requestId, + statusLabel: item.statusLabel, + })) + ) + setSearchError(null) + } + } catch (error) { + console.error(error) + setSearchError('Search failed. Try a request ID instead.') + setSearchResults([]) + } + } + + const resolveArtworkUrl = (url?: string | null) => { + if (!url) return null + return url.startsWith('http') ? url : `${getApiBase()}${url}` + } + + return ( +
+
+
+
+
+

System status

+ + {servicesLoading + ? 'Checking services...' + : servicesError + ? 'Status not available yet' + : servicesStatus?.overall === 'up' + ? 'Services are up and running' + : servicesStatus?.overall === 'down' + ? 'Something is down' + : 'Some services need attention'} + +
+
+ {(() => { + const order = [ + 'Jellyseerr', + 'Sonarr', + 'Radarr', + 'Prowlarr', + 'qBittorrent', + 'Jellyfin', + ] + const items = servicesStatus?.services ?? [] + return order.map((name) => { + const item = items.find((entry) => entry.name === name) + const status = item?.status ?? 'unknown' + return ( +
+ + {name} + + {status === 'up' + ? 'Up' + : status === 'down' + ? 'Down' + : status === 'degraded' + ? 'Needs attention' + : status === 'not_configured' + ? 'Not configured' + : 'Unknown'} + +
+ ) + }) + })()} +
+
+
+

{role === 'admin' ? 'All requests' : 'My recent requests'}

+ {authReady && ( + + )} +
+
+ {recentLoading ? ( +
+ + ) : recentError ? ( + + ) : recent.length === 0 ? ( + + ) : ( + recent.map((item) => ( + + )) + )} +
+
+ +
+
+ ) +} diff --git a/frontend/app/profile/page.tsx b/frontend/app/profile/page.tsx new file mode 100644 index 0000000..f1beac7 --- /dev/null +++ b/frontend/app/profile/page.tsx @@ -0,0 +1,126 @@ +'use client' + +import { useEffect, useState } from 'react' +import { useRouter } from 'next/navigation' +import { authFetch, clearToken, getApiBase, getToken } from '../lib/auth' + +type ProfileInfo = { + username: string + role: string + auth_provider: string +} + +export default function ProfilePage() { + const router = useRouter() + const [profile, setProfile] = useState(null) + const [currentPassword, setCurrentPassword] = useState('') + const [newPassword, setNewPassword] = useState('') + const [status, setStatus] = useState(null) + const [loading, setLoading] = useState(true) + + useEffect(() => { + if (!getToken()) { + router.push('/login') + return + } + const load = async () => { + try { + const baseUrl = getApiBase() + const response = await authFetch(`${baseUrl}/auth/me`) + if (!response.ok) { + clearToken() + router.push('/login') + return + } + const data = await response.json() + setProfile({ + username: data?.username ?? 'Unknown', + role: data?.role ?? 'user', + auth_provider: data?.auth_provider ?? 'local', + }) + } catch (err) { + console.error(err) + setStatus('Could not load your profile.') + } finally { + setLoading(false) + } + } + void load() + }, [router]) + + const submit = async (event: React.FormEvent) => { + event.preventDefault() + setStatus(null) + if (!currentPassword || !newPassword) { + setStatus('Enter your current password and a new password.') + return + } + try { + const baseUrl = getApiBase() + const response = await authFetch(`${baseUrl}/auth/password`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + current_password: currentPassword, + new_password: newPassword, + }), + }) + if (!response.ok) { + const text = await response.text() + throw new Error(text || 'Update failed') + } + setCurrentPassword('') + setNewPassword('') + setStatus('Password updated.') + } catch (err) { + console.error(err) + setStatus('Could not update password. Check your current password.') + } + } + + if (loading) { + return
Loading profile...
+ } + + return ( +
+

My profile

+ {profile && ( +
+ Signed in as {profile.username} ({profile.role}). Login type:{' '} + {profile.auth_provider}. +
+ )} + {profile?.auth_provider !== 'local' ? ( +
+ Password changes are only available for local Magent accounts. +
+ ) : ( +
+ + + {status &&
{status}
} +
+ +
+
+ )} +
+ ) +} diff --git a/frontend/app/requests/[id]/page.tsx b/frontend/app/requests/[id]/page.tsx new file mode 100644 index 0000000..3966108 --- /dev/null +++ b/frontend/app/requests/[id]/page.tsx @@ -0,0 +1,666 @@ +'use client' + +import { useEffect, useState } from 'react' +import { useRouter } from 'next/navigation' +import { authFetch, clearToken, getApiBase, getToken } from '../../lib/auth' + +type TimelineHop = { + service: string + status: string + details?: Record + timestamp?: string +} + +type Snapshot = { + request_id: string + title: string + year?: number + request_type: string + state: string + state_reason?: string + timeline: TimelineHop[] + actions: { id: string; label: string; risk: string; requires_confirmation: boolean }[] + artwork?: { poster_url?: string; backdrop_url?: string } + raw?: Record +} + +type ReleaseOption = { + title?: string + indexer?: string + indexerId?: number + guid?: string + size?: number + seeders?: number + leechers?: number + protocol?: string + infoUrl?: string +} + +type SnapshotHistory = { + request_id: string + state: string + state_reason?: string + created_at: string +} + +type ActionHistory = { + request_id: string + action_id: string + label: string + status: string + message?: string + created_at: string +} + +const percentFromTorrent = (torrent: Record) => { + const progress = Number(torrent.progress) + if (!Number.isNaN(progress) && progress >= 0 && progress <= 1) { + return Math.round(progress * 100) + } + const size = Number(torrent.size) + const left = Number(torrent.amount_left) + if (!Number.isNaN(size) && size > 0 && !Number.isNaN(left)) { + return Math.round(((size - left) / size) * 100) + } + return null +} + +const formatBytes = (value?: number) => { + if (!value || Number.isNaN(value)) return 'n/a' + const units = ['B', 'KB', 'MB', 'GB', 'TB'] + let size = value + let idx = 0 + while (size >= 1024 && idx < units.length - 1) { + size /= 1024 + idx += 1 + } + return `${size.toFixed(1)} ${units[idx]}` +} + +type SeasonStat = { + seasonNumber: number + available: number + missing: number +} + +const seasonStatsFromSeries = (series: Record): SeasonStat[] => { + const dateValue = series?.previousAiring ?? series?.firstAired + const airedAt = dateValue ? new Date(dateValue) : null + if (!airedAt || Number.isNaN(airedAt.valueOf()) || airedAt > new Date()) { + return [] + } + const seasons = Array.isArray(series?.seasons) ? series.seasons : [] + return seasons + .filter((season: Record) => season?.monitored === true) + .map((season: Record) => { + const stats = season.statistics + const available = stats && typeof stats === 'object' ? Number(stats.episodeFileCount) : NaN + const aired = stats && typeof stats === 'object' ? Number(stats.episodeCount) : NaN + const fallbackTotal = + stats && typeof stats === 'object' ? Number(stats.totalEpisodeCount) : NaN + const total = !Number.isNaN(aired) && aired > 0 ? aired : fallbackTotal + const seasonDateValue = stats?.previousAiring ?? stats?.firstAired ?? null + const seasonAiredAt = seasonDateValue ? new Date(seasonDateValue) : null + if ( + !Number.isNaN(available) && + !Number.isNaN(total) && + total > 0 && + (!seasonAiredAt || Number.isNaN(seasonAiredAt.valueOf()) || seasonAiredAt <= new Date()) + ) { + return { + seasonNumber: season.seasonNumber, + available, + missing: Math.max(0, total - available), + } + } + return null + }) + .filter((season): season is SeasonStat => season !== null && season.missing > 0) +} + +const friendlyState = (value: string) => { + const map: Record = { + REQUESTED: 'Waiting for approval', + APPROVED: 'Approved and queued', + NEEDS_ADD: 'Needs adding to the library', + ADDED_TO_ARR: 'Added to the library queue', + SEARCHING: 'Searching for releases', + GRABBED: 'Download queued', + DOWNLOADING: 'Downloading', + IMPORTING: 'Adding to your library', + COMPLETED: 'Ready to watch', + AVAILABLE: 'Ready to watch', + FAILED: 'Needs attention', + UNKNOWN: 'Status not available yet', + } + return map[value] ?? value.replaceAll('_', ' ').toLowerCase() +} + +const friendlyTimelineStatus = (service: string, status: string) => { + if (service === 'Jellyseerr') { + const map: Record = { + Pending: 'Waiting for approval', + Approved: 'Approved', + Declined: 'Declined', + Available: 'Ready to watch', + Processing: 'Working on it', + 'Partially Available': 'Partially ready', + 'Waiting for approval': 'Waiting for approval', + 'Working on it': 'Working on it', + 'Partially ready': 'Partially ready', + 'Ready to watch': 'Ready to watch', + } + return map[status] ?? status + } + if (service === 'Sonarr/Radarr') { + const map: Record = { + missing: 'Not added yet', + added: 'Added to the library queue', + searching: 'Searching for releases', + available: 'Ready to watch', + error: 'Needs attention', + unknown: 'Checking…', + } + return map[status] ?? status + } + if (service === 'Prowlarr') { + const map: Record = { + ok: 'Search sources OK', + issues: 'Search sources need attention', + error: 'Search sources unavailable', + } + return map[status] ?? status + } + if (service === 'qBittorrent') { + const map: Record = { + downloading: 'Downloading', + paused: 'Paused', + completed: 'Content downloaded', + idle: 'No active downloads', + error: 'Downloader error', + } + return map[status] ?? status + } + if (service === 'Jellyfin') { + const map: Record = { + available: 'Ready to watch', + missing: 'Not in Jellyfin yet', + error: 'Jellyfin unavailable', + } + return map[status] ?? status + } + return status +} + +export default function RequestTimelinePage({ params }: { params: { id: string } }) { + const router = useRouter() + const [snapshot, setSnapshot] = useState(null) + const [loading, setLoading] = useState(true) + const [showDetails, setShowDetails] = useState(false) + const [actionMessage, setActionMessage] = useState(null) + const [releaseOptions, setReleaseOptions] = useState([]) + const [searchRan, setSearchRan] = useState(false) + const [modalMessage, setModalMessage] = useState(null) + const [historySnapshots, setHistorySnapshots] = useState([]) + const [historyActions, setHistoryActions] = useState([]) + + useEffect(() => { + const load = async () => { + try { + if (!getToken()) { + router.push('/login') + return + } + const baseUrl = getApiBase() + const [snapshotResponse, historyResponse, actionsResponse] = await Promise.all([ + authFetch(`${baseUrl}/requests/${params.id}/snapshot`), + authFetch(`${baseUrl}/requests/${params.id}/history?limit=5`), + authFetch(`${baseUrl}/requests/${params.id}/actions?limit=5`), + ]) + + if (snapshotResponse.status === 401) { + clearToken() + router.push('/login') + return + } + const snapshotData = await snapshotResponse.json() + setSnapshot(snapshotData) + setReleaseOptions([]) + setSearchRan(false) + setModalMessage(null) + + if (historyResponse.ok) { + const historyData = await historyResponse.json() + if (Array.isArray(historyData.snapshots)) { + setHistorySnapshots(historyData.snapshots) + } + } + if (actionsResponse.ok) { + const actionsData = await actionsResponse.json() + if (Array.isArray(actionsData.actions)) { + setHistoryActions(actionsData.actions) + } + } + } catch (error) { + console.error(error) + } finally { + setLoading(false) + } + } + + load() + }, [params.id]) + + if (loading) { + return ( +
+
+ +
+ ) + } + + if (!snapshot) { + return
Could not load that request.
+ } + + const summary = + snapshot.state_reason ?? + `This request is currently ${snapshot.state.replaceAll('_', ' ').toLowerCase()}.` + + const downloadHop = snapshot.timeline.find((hop) => hop.service === 'qBittorrent') + const downloadState = downloadHop?.details?.summary ?? downloadHop?.status ?? 'Unknown' + const jellyfinAvailable = Boolean(snapshot.raw?.jellyfin?.available) + const pipelineSteps = [ + { key: 'Jellyseerr', label: 'Jellyseerr' }, + { key: 'Sonarr/Radarr', label: 'Library queue' }, + { key: 'Prowlarr', label: 'Search' }, + { key: 'qBittorrent', label: 'Download' }, + { key: 'Jellyfin', label: 'Jellyfin' }, + ] + const stageFromState = (state: string) => { + if (jellyfinAvailable || state === 'COMPLETED' || state === 'AVAILABLE') return 4 + if (state === 'DOWNLOADING' || state === 'IMPORTING') return 3 + if (state === 'GRABBED') return 2 + if (state === 'SEARCHING' || state === 'ADDED_TO_ARR' || state === 'NEEDS_ADD') return 1 + if (state === 'APPROVED' || state === 'REQUESTED') return 0 + return 1 + } + const activeStage = stageFromState(snapshot.state) + const extendedTimeline: TimelineHop[] = [ + ...snapshot.timeline, + { + service: 'Jellyfin', + status: jellyfinAvailable ? 'available' : 'missing', + details: snapshot.raw?.jellyfin ?? {}, + }, + ] + const jellyfinLink = snapshot.raw?.jellyfin?.link + const posterUrl = snapshot.artwork?.poster_url + const resolvedPoster = + posterUrl && posterUrl.startsWith('http') ? posterUrl : posterUrl ? `${getApiBase()}${posterUrl}` : null + + return ( +
+
+
+ {resolvedPoster && ( + {`${snapshot.title} + )} +
+

{snapshot.title}

+
{snapshot.request_type.toUpperCase()} {snapshot.year ?? ''}
+
+
+ {jellyfinAvailable && jellyfinLink && ( + + Open in Jellyfin + + )} +
+ +
+
+

Status

+

{friendlyState(snapshot.state)}

+
+
+

What this means

+

{summary}

+
+ {(actionMessage || (searchRan && releaseOptions.length === 0)) && ( +
+

Last action

+ {actionMessage &&

{actionMessage}

} + {searchRan && releaseOptions.length === 0 && ( +

Nothing to grab yet. We did not find a match on your torrent providers.

+ )} +
+ )} +
+

Current download state

+

{downloadState}

+
+
+

Next step

+

+ {snapshot.actions.length === 0 + ? 'Nothing to do right now.' + : snapshot.actions[0].label} +

+

+ Use the buttons below if you want to run a safe retry or a fix. +

+
+
+ +
+ +
+ +
+

Pipeline location

+
+ {pipelineSteps.map((step, index) => ( +
+
+ {step.label} +
+ ))} +
+

The glowing light shows where your request is right now.

+
+ +
+ {extendedTimeline.map((hop, index) => ( +
+
+
+
+ {hop.service} + {friendlyTimelineStatus(hop.service, hop.status)} +
+ {hop.service === 'Sonarr/Radarr' && hop.details?.series && (() => { + const seasons = seasonStatsFromSeries(hop.details.series) + if (seasons.length === 0) { + return
Up to date
+ } + return ( +
+
Seasons available vs missing
+
    + {seasons.map((season) => ( +
  • + Season {season.seasonNumber} + {season.available} available / {season.missing} missing +
  • + ))} +
+
+ ) + })()} + {hop.service === 'Sonarr/Radarr' && hop.details?.missingEpisodes && ( +
+
Missing episodes
+
    + {Object.entries(hop.details.missingEpisodes as Record).map( + ([seasonNumber, episodes]) => ( +
  • + Season {seasonNumber} + + {episodes.length + ? episodes.map((ep) => `E${ep}`).join(', ') + : 'Episode numbers unavailable'} + +
  • + ) + )} +
+
+ )} + {hop.service === 'Sonarr/Radarr' && hop.details?.note && ( +
{hop.details.note}
+ )} + {hop.service === 'qBittorrent' && + Array.isArray(hop.details?.torrents) && + hop.details.torrents.length > 0 && ( +
+
Downloads in qBittorrent
+
    + {hop.details.torrents.map((torrent: Record) => { + const percent = percentFromTorrent(torrent) + return ( +
  • + {torrent.name ?? 'Unknown item'} + {percent === null ? 'n/a' : `${percent}%`} +
  • + ) + })} +
+
+ )} + {showDetails && hop.details && ( +
{JSON.stringify(hop.details, null, 2)}
+ )} +
+
+ ))} +
+ +
+

Try a safe fix

+ {actionMessage &&
{actionMessage}
} +
+ {snapshot.actions.map((action) => ( + + ))} +
+ {releaseOptions.length > 0 && ( +
+
Download options found
+
    + {releaseOptions.map((release) => ( +
  • + + {release.title ?? 'Unknown option'}{' '} + {release.indexer ? `(${release.indexer})` : ''} + + {release.seeders ?? 0} seeders · {formatBytes(release.size)} + +
  • + ))} +
+
+ )} +
+ +
+

History

+
+
+

Recent status changes

+
    + {historySnapshots.length === 0 ? ( +
  • No history recorded yet.
  • + ) : ( + historySnapshots.map((entry) => ( +
  • + {entry.state.replaceAll('_', ' ')} + {entry.state_reason ?? 'No reason provided.'} +
  • + )) + )} +
+
+
+

Recent actions

+
    + {historyActions.length === 0 ? ( +
  • No actions recorded yet.
  • + ) : ( + historyActions.map((entry) => ( +
  • + {entry.label} + {entry.message ?? entry.status} +
  • + )) + )} +
+
+
+
+ + {modalMessage && ( +
+
+

Update

+

{modalMessage}

+ +
+
+ )} +
+ ) +} diff --git a/frontend/app/ui/AdminShell.tsx b/frontend/app/ui/AdminShell.tsx new file mode 100644 index 0000000..9aa240e --- /dev/null +++ b/frontend/app/ui/AdminShell.tsx @@ -0,0 +1,31 @@ +'use client' + +import type { ReactNode } from 'react' +import AdminSidebar from './AdminSidebar' + +type AdminShellProps = { + title: string + subtitle?: string + actions?: ReactNode + children: ReactNode +} + +export default function AdminShell({ title, subtitle, actions, children }: AdminShellProps) { + return ( +
+ +
+
+
+

{title}

+ {subtitle &&

{subtitle}

} +
+ {actions} +
+ {children} +
+
+ ) +} diff --git a/frontend/app/ui/AdminSidebar.tsx b/frontend/app/ui/AdminSidebar.tsx new file mode 100644 index 0000000..972ab8a --- /dev/null +++ b/frontend/app/ui/AdminSidebar.tsx @@ -0,0 +1,59 @@ +'use client' + +import { usePathname } from 'next/navigation' + +const NAV_GROUPS = [ + { + title: 'Services', + items: [ + { href: '/admin/jellyseerr', label: 'Jellyseerr' }, + { href: '/admin/jellyfin', label: 'Jellyfin' }, + { href: '/admin/sonarr', label: 'Sonarr' }, + { href: '/admin/radarr', label: 'Radarr' }, + { href: '/admin/prowlarr', label: 'Prowlarr' }, + { href: '/admin/qbittorrent', label: 'qBittorrent' }, + ], + }, + { + title: 'Requests', + items: [ + { href: '/admin/requests', label: 'Request syncing' }, + { href: '/admin/artwork', label: 'Artwork' }, + { href: '/admin/cache', label: 'Cache' }, + ], + }, + { + title: 'Admin', + items: [ + { href: '/users', label: 'Users' }, + { href: '/admin/logs', label: 'Activity log' }, + { href: '/admin/maintenance', label: 'Maintenance' }, + ], + }, +] + +export default function AdminSidebar() { + const pathname = usePathname() + return ( + + ) +} diff --git a/frontend/app/ui/BrandingFavicon.tsx b/frontend/app/ui/BrandingFavicon.tsx new file mode 100644 index 0000000..adf9e28 --- /dev/null +++ b/frontend/app/ui/BrandingFavicon.tsx @@ -0,0 +1,25 @@ +'use client' + +import { useEffect } from 'react' +import { getApiBase } from '../lib/auth' + +const STORAGE_KEY = 'branding_version' + +export default function BrandingFavicon() { + useEffect(() => { + const baseUrl = getApiBase() + const version = + (typeof window !== 'undefined' && window.localStorage.getItem(STORAGE_KEY)) || '' + const versionSuffix = version ? `?v=${encodeURIComponent(version)}` : '' + const href = `${baseUrl}/branding/favicon.ico${versionSuffix}` + let link = document.querySelector("link[rel='icon']") as HTMLLinkElement | null + if (!link) { + link = document.createElement('link') + link.rel = 'icon' + document.head.appendChild(link) + } + link.href = href + }, []) + + return null +} diff --git a/frontend/app/ui/BrandingLogo.tsx b/frontend/app/ui/BrandingLogo.tsx new file mode 100644 index 0000000..afc3148 --- /dev/null +++ b/frontend/app/ui/BrandingLogo.tsx @@ -0,0 +1,36 @@ +'use client' + +import { useEffect, useState } from 'react' +import { getApiBase } from '../lib/auth' + +const STORAGE_KEY = 'branding_version' + +type BrandingLogoProps = { + className?: string + alt?: string +} + +export default function BrandingLogo({ className, alt = 'Magent logo' }: BrandingLogoProps) { + const [src, setSrc] = useState(null) + + useEffect(() => { + const baseUrl = getApiBase() + const version = + (typeof window !== 'undefined' && window.localStorage.getItem(STORAGE_KEY)) || '' + const versionSuffix = version ? `?v=${encodeURIComponent(version)}` : '' + setSrc(`${baseUrl}/branding/logo.png${versionSuffix}`) + }, []) + + if (!src) { + return null + } + + return ( + {alt} setSrc(null)} + /> + ) +} diff --git a/frontend/app/ui/HeaderActions.tsx b/frontend/app/ui/HeaderActions.tsx new file mode 100644 index 0000000..2b915c7 --- /dev/null +++ b/frontend/app/ui/HeaderActions.tsx @@ -0,0 +1,58 @@ +'use client' + +import { useEffect, useState } from 'react' +import { authFetch, clearToken, getApiBase, getToken } from '../lib/auth' + +export default function HeaderActions() { + const [signedIn, setSignedIn] = useState(false) + const [role, setRole] = useState(null) + + useEffect(() => { + const token = getToken() + setSignedIn(Boolean(token)) + if (!token) { + return + } + const load = async () => { + try { + const baseUrl = getApiBase() + const response = await authFetch(`${baseUrl}/auth/me`) + if (!response.ok) { + clearToken() + setSignedIn(false) + setRole(null) + return + } + const data = await response.json() + setRole(data?.role ?? null) + } catch (err) { + console.error(err) + } + } + void load() + }, []) + + const signOut = () => { + clearToken() + setSignedIn(false) + if (typeof window !== 'undefined') { + window.location.href = '/login' + } + } + + return ( +
+ Requests + How it works + {signedIn && My profile} + {role === 'admin' && Settings} + {signedIn ? ( + + ) : ( + Sign in + )} +
+ ) +} diff --git a/frontend/app/ui/HeaderIdentity.tsx b/frontend/app/ui/HeaderIdentity.tsx new file mode 100644 index 0000000..cbb8054 --- /dev/null +++ b/frontend/app/ui/HeaderIdentity.tsx @@ -0,0 +1,53 @@ +'use client' + +import { useEffect, useState } from 'react' +import { authFetch, clearToken, getApiBase, getToken } from '../lib/auth' + +export default function HeaderIdentity() { + const [identity, setIdentity] = useState(null) + const [open, setOpen] = useState(false) + + useEffect(() => { + const token = getToken() + if (!token) { + setIdentity(null) + return + } + const load = async () => { + try { + const baseUrl = getApiBase() + const response = await authFetch(`${baseUrl}/auth/me`) + if (!response.ok) { + clearToken() + setIdentity(null) + return + } + const data = await response.json() + if (data?.username) { + setIdentity(`${data.username}${data.role ? ` (${data.role})` : ''}`) + } + } catch (err) { + console.error(err) + setIdentity(null) + } + } + void load() + }, []) + + if (!identity) { + return null + } + + return ( +
+ + {open && ( + + )} +
+ ) +} diff --git a/frontend/app/ui/ThemeToggle.tsx b/frontend/app/ui/ThemeToggle.tsx new file mode 100644 index 0000000..2b8ad8b --- /dev/null +++ b/frontend/app/ui/ThemeToggle.tsx @@ -0,0 +1,59 @@ +'use client' + +import { useEffect, useState } from 'react' + +const STORAGE_KEY = 'magent_theme' + +const getPreferredTheme = () => { + if (typeof window === 'undefined') return 'dark' + const stored = window.localStorage.getItem(STORAGE_KEY) + if (stored === 'light' || stored === 'dark') { + return stored + } + return window.matchMedia('(prefers-color-scheme: dark)').matches ? 'dark' : 'light' +} + +const applyTheme = (theme: string) => { + if (typeof document === 'undefined') return + document.documentElement.setAttribute('data-theme', theme) +} + +export default function ThemeToggle() { + const [theme, setTheme] = useState<'light' | 'dark'>('dark') + + useEffect(() => { + const preferred = getPreferredTheme() + setTheme(preferred) + applyTheme(preferred) + }, []) + + const toggle = () => { + const next = theme === 'dark' ? 'light' : 'dark' + setTheme(next) + applyTheme(next) + if (typeof window !== 'undefined') { + window.localStorage.setItem(STORAGE_KEY, next) + } + } + + return ( + + ) +} diff --git a/frontend/app/users/page.tsx b/frontend/app/users/page.tsx new file mode 100644 index 0000000..d0a8bc7 --- /dev/null +++ b/frontend/app/users/page.tsx @@ -0,0 +1,228 @@ +'use client' + +import { useEffect, useState } from 'react' +import { useRouter } from 'next/navigation' +import { authFetch, clearToken, getApiBase, getToken } from '../lib/auth' +import AdminShell from '../ui/AdminShell' + +type AdminUser = { + username: string + role: string + authProvider?: string | null + lastLoginAt?: string | null + isBlocked?: boolean +} + +const formatLastLogin = (value?: string | null) => { + if (!value) return 'Never' + const date = new Date(value) + if (Number.isNaN(date.valueOf())) return value + return date.toLocaleString() +} + +export default function UsersPage() { + const router = useRouter() + const [users, setUsers] = useState([]) + const [error, setError] = useState(null) + const [loading, setLoading] = useState(true) + const [passwordInputs, setPasswordInputs] = useState>({}) + const [passwordStatus, setPasswordStatus] = useState>({}) + + const loadUsers = async () => { + try { + const baseUrl = getApiBase() + const response = await authFetch(`${baseUrl}/admin/users`) + if (!response.ok) { + if (response.status === 401) { + clearToken() + router.push('/login') + return + } + if (response.status === 403) { + router.push('/') + return + } + throw new Error('Could not load users.') + } + const data = await response.json() + if (Array.isArray(data?.users)) { + setUsers( + data.users.map((user: any) => ({ + username: user.username ?? 'Unknown', + role: user.role ?? 'user', + authProvider: user.auth_provider ?? 'local', + lastLoginAt: user.last_login_at ?? null, + isBlocked: Boolean(user.is_blocked), + })) + ) + } else { + setUsers([]) + } + setError(null) + } catch (err) { + console.error(err) + setError('Could not load user list.') + } finally { + setLoading(false) + } + } + + const toggleUserBlock = async (username: string, blocked: boolean) => { + try { + const baseUrl = getApiBase() + const response = await authFetch( + `${baseUrl}/admin/users/${encodeURIComponent(username)}/${blocked ? 'block' : 'unblock'}`, + { method: 'POST' } + ) + if (!response.ok) { + throw new Error('Update failed') + } + await loadUsers() + } catch (err) { + console.error(err) + setError('Could not update user access.') + } + } + + const updateUserRole = async (username: string, role: string) => { + try { + const baseUrl = getApiBase() + const response = await authFetch( + `${baseUrl}/admin/users/${encodeURIComponent(username)}/role`, + { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ role }), + } + ) + if (!response.ok) { + throw new Error('Update failed') + } + await loadUsers() + } catch (err) { + console.error(err) + setError('Could not update user role.') + } + } + + const updateUserPassword = async (username: string) => { + const newPassword = passwordInputs[username] || '' + if (!newPassword || newPassword.length < 8) { + setPasswordStatus((current) => ({ + ...current, + [username]: 'Password must be at least 8 characters.', + })) + return + } + try { + const baseUrl = getApiBase() + const response = await authFetch( + `${baseUrl}/admin/users/${encodeURIComponent(username)}/password`, + { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ password: newPassword }), + } + ) + if (!response.ok) { + const text = await response.text() + throw new Error(text || 'Update failed') + } + setPasswordInputs((current) => ({ ...current, [username]: '' })) + setPasswordStatus((current) => ({ + ...current, + [username]: 'Password updated.', + })) + } catch (err) { + console.error(err) + setPasswordStatus((current) => ({ + ...current, + [username]: 'Could not update password.', + })) + } + } + + useEffect(() => { + if (!getToken()) { + router.push('/login') + return + } + void loadUsers() + }, [router]) + + if (loading) { + return
Loading users...
+ } + + return ( + + Reload list + + } + > +
+ {error &&
{error}
} + {users.length === 0 ? ( +
No users found yet.
+ ) : ( +
+ {users.map((user) => ( +
+
+ {user.username} + Role: {user.role} + Login type: {user.authProvider || 'local'} + Last login: {formatLastLogin(user.lastLoginAt)} +
+
+ + +
+ {user.authProvider === 'local' && ( +
+ + setPasswordInputs((current) => ({ + ...current, + [user.username]: event.target.value, + })) + } + /> + +
+ )} + {passwordStatus[user.username] && ( +
{passwordStatus[user.username]}
+ )} +
+ ))} +
+ )} +
+
+ ) +} diff --git a/frontend/next-env.d.ts b/frontend/next-env.d.ts new file mode 100644 index 0000000..6080add --- /dev/null +++ b/frontend/next-env.d.ts @@ -0,0 +1,2 @@ +/// +/// diff --git a/frontend/next.config.js b/frontend/next.config.js new file mode 100644 index 0000000..c5e687e --- /dev/null +++ b/frontend/next.config.js @@ -0,0 +1,15 @@ +const backendUrl = process.env.BACKEND_INTERNAL_URL || 'http://backend:8000' + +/** @type {import('next').NextConfig} */ +const nextConfig = { + async rewrites() { + return [ + { + source: '/api/:path*', + destination: `${backendUrl}/:path*`, + }, + ] + }, +} + +module.exports = nextConfig diff --git a/frontend/package.json b/frontend/package.json new file mode 100644 index 0000000..8e03c28 --- /dev/null +++ b/frontend/package.json @@ -0,0 +1,22 @@ +{ + "name": "magent-frontend", + "private": true, + "version": "0.1.0", + "scripts": { + "dev": "next dev", + "build": "next build", + "start": "next start", + "lint": "next lint" + }, + "dependencies": { + "next": "14.2.5", + "react": "18.3.1", + "react-dom": "18.3.1" + }, + "devDependencies": { + "typescript": "5.5.4", + "@types/node": "20.14.10", + "@types/react": "18.3.3", + "@types/react-dom": "18.3.0" + } +} diff --git a/frontend/tsconfig.json b/frontend/tsconfig.json new file mode 100644 index 0000000..d15de4f --- /dev/null +++ b/frontend/tsconfig.json @@ -0,0 +1,19 @@ +{ + "compilerOptions": { + "target": "ES2019", + "lib": ["dom", "dom.iterable", "esnext"], + "allowJs": false, + "skipLibCheck": true, + "strict": true, + "noEmit": true, + "esModuleInterop": true, + "module": "esnext", + "moduleResolution": "bundler", + "resolveJsonModule": true, + "isolatedModules": true, + "jsx": "preserve", + "incremental": true + }, + "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx"], + "exclude": ["node_modules"] +}