Finalize diagnostics, logging controls, and email test support
This commit is contained in:
@@ -1,4 +1,7 @@
|
||||
BUILD_NUMBER = "0103261543"
|
||||
BUILD_NUMBER = "0103262231"
|
||||
CHANGELOG = '2026-01-22\\n- Initial commit\\n- Ignore build artifacts\\n- Update README\\n- Update README with Docker-first guide\\n\\n2026-01-23\\n- Fix cache titles via Seerr media lookup\\n- Split search actions and improve download options\\n- Fallback manual grab to qBittorrent\\n- Hide header actions when signed out\\n- Add feedback form and webhook\\n- Fix cache titles and move feedback link\\n- Show available status on landing when in Jellyfin\\n- Add default branding assets when missing\\n- Use bundled branding assets\\n- Remove password fields from users page\\n- Add Docker Hub compose override\\n- Fix backend Dockerfile paths for root context\\n- Copy public assets into frontend image\\n- Use backend branding assets for logo and favicon\\n\\n2026-01-24\\n- Route grabs through Sonarr/Radarr only\\n- Document fix buttons in how-it-works\\n- Clarify how-it-works steps and fixes\\n- Map Prowlarr releases to Arr indexers for manual grab\\n- Improve request handling and qBittorrent categories\\n\\n2026-01-25\\n- Add site banner, build number, and changelog\\n- Automate build number tagging and sync\\n- Improve mobile header layout\\n- Move account actions into avatar menu\\n- Add user stats and activity tracking\\n- Add Jellyfin login cache and admin-only stats\\n- Tidy request sync controls\\n- Seed branding logo from bundled assets\\n- Serve bundled branding assets by default\\n- Harden request cache titles and cache-only reads\\n- Build 2501262041\\n\\n2026-01-26\\n- Fix cache title hydration\\n- Fix sync progress bar animation\\n\\n2026-01-27\\n- Add cache control artwork stats\\n- Improve cache stats performance (build 271261145)\\n- Fix backend cache stats import (build 271261149)\\n- Clarify request sync settings (build 271261159)\\n- Bump build number to 271261202\\n- Fix request titles in snapshots (build 271261219)\\n- Fix snapshot title fallback (build 271261228)\\n- Add cache load spinner (build 271261238)\\n- Bump build number (process 2) 271261322\\n- Add service test buttons (build 271261335)\\n- Fallback to TMDB when artwork cache fails (build 271261524)\\n- Hydrate missing artwork from Seerr (build 271261539)\\n\\n2026-01-29\\n- release: 2901262036\\n- release: 2901262044\\n- release: 2901262102\\n- Hardcode build number in backend\\n- Bake build number and changelog\\n- Update full changelog\\n- Tidy full changelog\\n- Build 2901262240: cache users\n\n2026-01-30\n- Merge backend and frontend into one container'
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -1,11 +1,16 @@
|
||||
from typing import Any, Dict, Optional
|
||||
import logging
|
||||
import time
|
||||
import httpx
|
||||
|
||||
from ..logging_config import sanitize_headers, sanitize_value
|
||||
|
||||
|
||||
class ApiClient:
|
||||
def __init__(self, base_url: Optional[str], api_key: Optional[str] = None):
|
||||
self.base_url = base_url.rstrip("/") if base_url else None
|
||||
self.api_key = api_key
|
||||
self.logger = logging.getLogger(f"{__name__}.{self.__class__.__name__}")
|
||||
|
||||
def configured(self) -> bool:
|
||||
return bool(self.base_url)
|
||||
@@ -13,42 +18,66 @@ class ApiClient:
|
||||
def headers(self) -> Dict[str, str]:
|
||||
return {"X-Api-Key": self.api_key} if self.api_key else {}
|
||||
|
||||
async def get(self, path: str, params: Optional[Dict[str, Any]] = None) -> Optional[Any]:
|
||||
async def _request(
|
||||
self,
|
||||
method: str,
|
||||
path: str,
|
||||
*,
|
||||
params: Optional[Dict[str, Any]] = None,
|
||||
payload: Optional[Dict[str, Any]] = None,
|
||||
) -> Optional[Any]:
|
||||
if not self.base_url:
|
||||
self.logger.warning("client request skipped method=%s path=%s reason=not-configured", method, path)
|
||||
return None
|
||||
url = f"{self.base_url}{path}"
|
||||
async with httpx.AsyncClient(timeout=10.0) as client:
|
||||
response = await client.get(url, headers=self.headers(), params=params)
|
||||
response.raise_for_status()
|
||||
started_at = time.perf_counter()
|
||||
self.logger.debug(
|
||||
"outbound request started method=%s url=%s params=%s payload=%s headers=%s",
|
||||
method,
|
||||
url,
|
||||
sanitize_value(params),
|
||||
sanitize_value(payload),
|
||||
sanitize_headers(self.headers()),
|
||||
)
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=10.0) as client:
|
||||
response = await client.request(
|
||||
method,
|
||||
url,
|
||||
headers=self.headers(),
|
||||
params=params,
|
||||
json=payload,
|
||||
)
|
||||
response.raise_for_status()
|
||||
duration_ms = round((time.perf_counter() - started_at) * 1000, 2)
|
||||
self.logger.debug(
|
||||
"outbound request completed method=%s url=%s status=%s duration_ms=%s",
|
||||
method,
|
||||
url,
|
||||
response.status_code,
|
||||
duration_ms,
|
||||
)
|
||||
if not response.content:
|
||||
return None
|
||||
return response.json()
|
||||
except Exception:
|
||||
duration_ms = round((time.perf_counter() - started_at) * 1000, 2)
|
||||
self.logger.exception(
|
||||
"outbound request failed method=%s url=%s duration_ms=%s",
|
||||
method,
|
||||
url,
|
||||
duration_ms,
|
||||
)
|
||||
raise
|
||||
|
||||
async def get(self, path: str, params: Optional[Dict[str, Any]] = None) -> Optional[Any]:
|
||||
return await self._request("GET", path, params=params)
|
||||
|
||||
async def post(self, path: str, payload: Optional[Dict[str, Any]] = None) -> Optional[Any]:
|
||||
if not self.base_url:
|
||||
return None
|
||||
url = f"{self.base_url}{path}"
|
||||
async with httpx.AsyncClient(timeout=10.0) as client:
|
||||
response = await client.post(url, headers=self.headers(), json=payload)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
return await self._request("POST", path, payload=payload)
|
||||
|
||||
async def put(self, path: str, payload: Optional[Dict[str, Any]] = None) -> Optional[Any]:
|
||||
if not self.base_url:
|
||||
return None
|
||||
url = f"{self.base_url}{path}"
|
||||
async with httpx.AsyncClient(timeout=10.0) as client:
|
||||
response = await client.put(url, headers=self.headers(), json=payload)
|
||||
response.raise_for_status()
|
||||
if not response.content:
|
||||
return None
|
||||
return response.json()
|
||||
return await self._request("PUT", path, payload=payload)
|
||||
|
||||
async def delete(self, path: str) -> Optional[Any]:
|
||||
if not self.base_url:
|
||||
return None
|
||||
url = f"{self.base_url}{path}"
|
||||
async with httpx.AsyncClient(timeout=10.0) as client:
|
||||
response = await client.delete(url, headers=self.headers())
|
||||
response.raise_for_status()
|
||||
if not response.content:
|
||||
return None
|
||||
return response.json()
|
||||
return await self._request("DELETE", path)
|
||||
|
||||
@@ -25,6 +25,18 @@ class Settings(BaseSettings):
|
||||
admin_password: str = Field(default="adminadmin", validation_alias=AliasChoices("ADMIN_PASSWORD"))
|
||||
log_level: str = Field(default="INFO", validation_alias=AliasChoices("LOG_LEVEL"))
|
||||
log_file: str = Field(default="data/magent.log", validation_alias=AliasChoices("LOG_FILE"))
|
||||
log_file_max_bytes: int = Field(
|
||||
default=20_000_000, validation_alias=AliasChoices("LOG_FILE_MAX_BYTES")
|
||||
)
|
||||
log_file_backup_count: int = Field(
|
||||
default=10, validation_alias=AliasChoices("LOG_FILE_BACKUP_COUNT")
|
||||
)
|
||||
log_http_client_level: str = Field(
|
||||
default="INFO", validation_alias=AliasChoices("LOG_HTTP_CLIENT_LEVEL")
|
||||
)
|
||||
log_background_sync_level: str = Field(
|
||||
default="INFO", validation_alias=AliasChoices("LOG_BACKGROUND_SYNC_LEVEL")
|
||||
)
|
||||
requests_sync_ttl_minutes: int = Field(
|
||||
default=1440, validation_alias=AliasChoices("REQUESTS_SYNC_TTL_MINUTES")
|
||||
)
|
||||
|
||||
@@ -599,7 +599,20 @@ def create_user_if_missing(
|
||||
created_at if invited_by_code else None,
|
||||
),
|
||||
)
|
||||
return cursor.rowcount > 0
|
||||
created = cursor.rowcount > 0
|
||||
if created:
|
||||
logger.info(
|
||||
"user created-if-missing username=%s role=%s auth_provider=%s jellyseerr_user_id=%s profile_id=%s expires_at=%s",
|
||||
username,
|
||||
role,
|
||||
auth_provider,
|
||||
jellyseerr_user_id,
|
||||
profile_id,
|
||||
expires_at,
|
||||
)
|
||||
else:
|
||||
logger.debug("user create-if-missing skipped existing username=%s", username)
|
||||
return created
|
||||
|
||||
|
||||
def get_user_by_username(username: str) -> Optional[Dict[str, Any]]:
|
||||
@@ -809,6 +822,7 @@ def set_user_blocked(username: str, blocked: bool) -> None:
|
||||
""",
|
||||
(1 if blocked else 0, username),
|
||||
)
|
||||
logger.info("user blocked state updated username=%s blocked=%s", username, blocked)
|
||||
|
||||
|
||||
def delete_user_by_username(username: str) -> bool:
|
||||
@@ -819,7 +833,9 @@ def delete_user_by_username(username: str) -> bool:
|
||||
""",
|
||||
(username,),
|
||||
)
|
||||
return cursor.rowcount > 0
|
||||
deleted = cursor.rowcount > 0
|
||||
logger.warning("user delete username=%s deleted=%s", username, deleted)
|
||||
return deleted
|
||||
|
||||
|
||||
def delete_user_activity_by_username(username: str) -> int:
|
||||
@@ -855,6 +871,7 @@ def set_user_role(username: str, role: str) -> None:
|
||||
""",
|
||||
(role, username),
|
||||
)
|
||||
logger.info("user role updated username=%s role=%s", username, role)
|
||||
|
||||
|
||||
def set_user_auto_search_enabled(username: str, enabled: bool) -> None:
|
||||
@@ -865,6 +882,7 @@ def set_user_auto_search_enabled(username: str, enabled: bool) -> None:
|
||||
""",
|
||||
(1 if enabled else 0, username),
|
||||
)
|
||||
logger.info("user auto-search updated username=%s enabled=%s", username, enabled)
|
||||
|
||||
|
||||
def set_user_invite_management_enabled(username: str, enabled: bool) -> None:
|
||||
@@ -875,6 +893,7 @@ def set_user_invite_management_enabled(username: str, enabled: bool) -> None:
|
||||
""",
|
||||
(1 if enabled else 0, username),
|
||||
)
|
||||
logger.info("user invite-management updated username=%s enabled=%s", username, enabled)
|
||||
|
||||
|
||||
def set_auto_search_enabled_for_non_admin_users(enabled: bool) -> int:
|
||||
@@ -896,6 +915,11 @@ def set_invite_management_enabled_for_non_admin_users(enabled: bool) -> int:
|
||||
""",
|
||||
(1 if enabled else 0,),
|
||||
)
|
||||
logger.info(
|
||||
"bulk invite-management updated non_admin_users=%s enabled=%s",
|
||||
cursor.rowcount,
|
||||
enabled,
|
||||
)
|
||||
return cursor.rowcount
|
||||
|
||||
|
||||
@@ -907,6 +931,7 @@ def set_user_profile_id(username: str, profile_id: Optional[int]) -> None:
|
||||
""",
|
||||
(profile_id, username),
|
||||
)
|
||||
logger.info("user profile assignment updated username=%s profile_id=%s", username, profile_id)
|
||||
|
||||
|
||||
def set_user_expires_at(username: str, expires_at: Optional[str]) -> None:
|
||||
@@ -917,6 +942,7 @@ def set_user_expires_at(username: str, expires_at: Optional[str]) -> None:
|
||||
""",
|
||||
(expires_at, username),
|
||||
)
|
||||
logger.info("user expiry updated username=%s expires_at=%s", username, expires_at)
|
||||
|
||||
|
||||
def _row_to_user_profile(row: Any) -> Dict[str, Any]:
|
||||
@@ -1162,6 +1188,18 @@ def create_signup_invite(
|
||||
),
|
||||
)
|
||||
invite_id = int(cursor.lastrowid)
|
||||
logger.info(
|
||||
"signup invite created invite_id=%s code=%s role=%s profile_id=%s max_uses=%s enabled=%s expires_at=%s recipient_email=%s created_by=%s",
|
||||
invite_id,
|
||||
code,
|
||||
role,
|
||||
profile_id,
|
||||
max_uses,
|
||||
enabled,
|
||||
expires_at,
|
||||
recipient_email,
|
||||
created_by,
|
||||
)
|
||||
invite = get_signup_invite_by_id(invite_id)
|
||||
if not invite:
|
||||
raise RuntimeError("Invite creation failed")
|
||||
|
||||
@@ -1,10 +1,148 @@
|
||||
import contextvars
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from logging.handlers import RotatingFileHandler
|
||||
from typing import Optional
|
||||
from typing import Any, Mapping, Optional
|
||||
from urllib.parse import parse_qs
|
||||
|
||||
REQUEST_ID_CONTEXT: contextvars.ContextVar[str] = contextvars.ContextVar(
|
||||
"magent_request_id", default="-"
|
||||
)
|
||||
|
||||
_SENSITIVE_KEYWORDS = (
|
||||
"api_key",
|
||||
"authorization",
|
||||
"cert",
|
||||
"cookie",
|
||||
"jwt",
|
||||
"key",
|
||||
"pass",
|
||||
"password",
|
||||
"pem",
|
||||
"private",
|
||||
"secret",
|
||||
"session",
|
||||
"signature",
|
||||
"token",
|
||||
)
|
||||
_MAX_BODY_BYTES = 4096
|
||||
|
||||
|
||||
def configure_logging(log_level: Optional[str], log_file: Optional[str]) -> None:
|
||||
class RequestContextFilter(logging.Filter):
|
||||
def filter(self, record: logging.LogRecord) -> bool:
|
||||
record.request_id = REQUEST_ID_CONTEXT.get("-")
|
||||
return True
|
||||
|
||||
|
||||
def bind_request_id(request_id: str) -> contextvars.Token[str]:
|
||||
return REQUEST_ID_CONTEXT.set(request_id or "-")
|
||||
|
||||
|
||||
def reset_request_id(token: contextvars.Token[str]) -> None:
|
||||
REQUEST_ID_CONTEXT.reset(token)
|
||||
|
||||
|
||||
def current_request_id() -> str:
|
||||
return REQUEST_ID_CONTEXT.get("-")
|
||||
|
||||
|
||||
def _is_sensitive_key(key: str) -> bool:
|
||||
lowered = key.strip().lower()
|
||||
return any(marker in lowered for marker in _SENSITIVE_KEYWORDS)
|
||||
|
||||
|
||||
def _redact_scalar(value: Any) -> Any:
|
||||
if value is None or isinstance(value, (int, float, bool)):
|
||||
return value
|
||||
text = str(value)
|
||||
if len(text) <= 4:
|
||||
return "***"
|
||||
return f"{text[:2]}***{text[-2:]}"
|
||||
|
||||
|
||||
def sanitize_value(value: Any, *, key_hint: Optional[str] = None, depth: int = 0) -> Any:
|
||||
if key_hint and _is_sensitive_key(key_hint):
|
||||
return _redact_scalar(value)
|
||||
if value is None or isinstance(value, (bool, int, float)):
|
||||
return value
|
||||
if isinstance(value, bytes):
|
||||
return f"<bytes:{len(value)}>"
|
||||
if isinstance(value, str):
|
||||
return value if len(value) <= 512 else f"{value[:509]}..."
|
||||
if depth >= 3:
|
||||
return f"<{type(value).__name__}>"
|
||||
if isinstance(value, Mapping):
|
||||
return {
|
||||
str(key): sanitize_value(item, key_hint=str(key), depth=depth + 1)
|
||||
for key, item in value.items()
|
||||
}
|
||||
if isinstance(value, (list, tuple, set)):
|
||||
return [sanitize_value(item, depth=depth + 1) for item in list(value)[:20]]
|
||||
if hasattr(value, "model_dump"):
|
||||
try:
|
||||
return sanitize_value(value.model_dump(), depth=depth + 1)
|
||||
except Exception:
|
||||
return f"<{type(value).__name__}>"
|
||||
return str(value)
|
||||
|
||||
|
||||
def sanitize_headers(headers: Mapping[str, Any]) -> dict[str, Any]:
|
||||
return {
|
||||
str(key).lower(): sanitize_value(value, key_hint=str(key))
|
||||
for key, value in headers.items()
|
||||
}
|
||||
|
||||
|
||||
def summarize_http_body(body: bytes, content_type: Optional[str]) -> Any:
|
||||
if not body:
|
||||
return None
|
||||
normalized = (content_type or "").split(";")[0].strip().lower()
|
||||
if normalized == "application/json":
|
||||
preview = body[:_MAX_BODY_BYTES]
|
||||
try:
|
||||
payload = json.loads(preview.decode("utf-8"))
|
||||
summary = sanitize_value(payload)
|
||||
if len(body) > _MAX_BODY_BYTES:
|
||||
return {"truncated": True, "bytes": len(body), "payload": summary}
|
||||
return summary
|
||||
except Exception:
|
||||
pass
|
||||
if normalized == "application/x-www-form-urlencoded":
|
||||
try:
|
||||
parsed = parse_qs(body.decode("utf-8"), keep_blank_values=True)
|
||||
compact = {
|
||||
key: value[0] if len(value) == 1 else value
|
||||
for key, value in parsed.items()
|
||||
}
|
||||
return sanitize_value(compact)
|
||||
except Exception:
|
||||
pass
|
||||
if normalized.startswith("multipart/"):
|
||||
return {"content_type": normalized, "bytes": len(body)}
|
||||
preview = body[: min(len(body), 256)].decode("utf-8", errors="replace")
|
||||
return {
|
||||
"content_type": normalized or "unknown",
|
||||
"bytes": len(body),
|
||||
"preview": preview if len(body) <= 256 else f"{preview}...",
|
||||
}
|
||||
|
||||
|
||||
def _coerce_level(level_name: Optional[str], fallback: int) -> int:
|
||||
if not level_name:
|
||||
return fallback
|
||||
return getattr(logging, str(level_name).upper(), fallback)
|
||||
|
||||
|
||||
def configure_logging(
|
||||
log_level: Optional[str],
|
||||
log_file: Optional[str],
|
||||
*,
|
||||
log_file_max_bytes: int = 20_000_000,
|
||||
log_file_backup_count: int = 10,
|
||||
log_http_client_level: Optional[str] = "INFO",
|
||||
log_background_sync_level: Optional[str] = "INFO",
|
||||
) -> None:
|
||||
level_name = (log_level or "INFO").upper()
|
||||
level = getattr(logging, level_name, logging.INFO)
|
||||
|
||||
@@ -18,15 +156,20 @@ def configure_logging(log_level: Optional[str], log_file: Optional[str]) -> None
|
||||
log_path = os.path.join(os.getcwd(), log_path)
|
||||
os.makedirs(os.path.dirname(log_path), exist_ok=True)
|
||||
file_handler = RotatingFileHandler(
|
||||
log_path, maxBytes=2_000_000, backupCount=3, encoding="utf-8"
|
||||
log_path,
|
||||
maxBytes=max(1_000_000, int(log_file_max_bytes or 20_000_000)),
|
||||
backupCount=max(1, int(log_file_backup_count or 10)),
|
||||
encoding="utf-8",
|
||||
)
|
||||
handlers.append(file_handler)
|
||||
|
||||
context_filter = RequestContextFilter()
|
||||
formatter = logging.Formatter(
|
||||
fmt="%(asctime)s | %(levelname)s | %(name)s | %(message)s",
|
||||
fmt="%(asctime)s | %(levelname)s | %(name)s | request_id=%(request_id)s | %(message)s",
|
||||
datefmt="%Y-%m-%d %H:%M:%S",
|
||||
)
|
||||
for handler in handlers:
|
||||
handler.addFilter(context_filter)
|
||||
handler.setFormatter(formatter)
|
||||
|
||||
root = logging.getLogger()
|
||||
@@ -38,4 +181,10 @@ def configure_logging(log_level: Optional[str], log_file: Optional[str]) -> None
|
||||
|
||||
logging.getLogger("uvicorn").setLevel(level)
|
||||
logging.getLogger("uvicorn.error").setLevel(level)
|
||||
logging.getLogger("uvicorn.access").setLevel(level)
|
||||
logging.getLogger("uvicorn.access").setLevel(logging.WARNING)
|
||||
http_client_level = _coerce_level(log_http_client_level, logging.DEBUG)
|
||||
background_sync_level = _coerce_level(log_background_sync_level, logging.INFO)
|
||||
logging.getLogger("app.clients.base").setLevel(http_client_level)
|
||||
logging.getLogger("app.routers.requests").setLevel(background_sync_level)
|
||||
logging.getLogger("httpx").setLevel(logging.WARNING if level > logging.DEBUG else logging.INFO)
|
||||
logging.getLogger("httpcore").setLevel(logging.WARNING)
|
||||
|
||||
@@ -1,4 +1,8 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import time
|
||||
import uuid
|
||||
from typing import Awaitable, Callable
|
||||
|
||||
from fastapi import FastAPI, Request
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
@@ -21,9 +25,19 @@ from .routers.feedback import router as feedback_router
|
||||
from .routers.site import router as site_router
|
||||
from .routers.events import router as events_router
|
||||
from .services.jellyfin_sync import run_daily_jellyfin_sync
|
||||
from .logging_config import configure_logging
|
||||
from .logging_config import (
|
||||
bind_request_id,
|
||||
configure_logging,
|
||||
reset_request_id,
|
||||
sanitize_headers,
|
||||
sanitize_value,
|
||||
summarize_http_body,
|
||||
)
|
||||
from .runtime import get_runtime_settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
_background_tasks: list[asyncio.Task[None]] = []
|
||||
|
||||
app = FastAPI(
|
||||
title=settings.app_name,
|
||||
docs_url="/docs" if settings.api_docs_enabled else None,
|
||||
@@ -41,8 +55,56 @@ app.add_middleware(
|
||||
|
||||
|
||||
@app.middleware("http")
|
||||
async def add_security_headers(request: Request, call_next):
|
||||
response = await call_next(request)
|
||||
async def log_requests_and_add_security_headers(request: Request, call_next):
|
||||
request_id = request.headers.get("X-Request-ID") or uuid.uuid4().hex[:12]
|
||||
token = bind_request_id(request_id)
|
||||
request.state.request_id = request_id
|
||||
started_at = time.perf_counter()
|
||||
body = await request.body()
|
||||
body_summary = summarize_http_body(body, request.headers.get("content-type"))
|
||||
|
||||
async def receive() -> dict:
|
||||
return {"type": "http.request", "body": body, "more_body": False}
|
||||
|
||||
request._receive = receive
|
||||
logger.info(
|
||||
"request started method=%s path=%s query=%s client=%s headers=%s body=%s",
|
||||
request.method,
|
||||
request.url.path,
|
||||
sanitize_value(dict(request.query_params)),
|
||||
request.client.host if request.client else "-",
|
||||
sanitize_headers(
|
||||
{
|
||||
key: value
|
||||
for key, value in request.headers.items()
|
||||
if key.lower()
|
||||
in {
|
||||
"content-type",
|
||||
"content-length",
|
||||
"user-agent",
|
||||
"x-forwarded-for",
|
||||
"x-forwarded-proto",
|
||||
"x-request-id",
|
||||
}
|
||||
}
|
||||
),
|
||||
body_summary,
|
||||
)
|
||||
try:
|
||||
response = await call_next(request)
|
||||
except Exception:
|
||||
duration_ms = round((time.perf_counter() - started_at) * 1000, 2)
|
||||
logger.exception(
|
||||
"request failed method=%s path=%s duration_ms=%s",
|
||||
request.method,
|
||||
request.url.path,
|
||||
duration_ms,
|
||||
)
|
||||
reset_request_id(token)
|
||||
raise
|
||||
|
||||
duration_ms = round((time.perf_counter() - started_at) * 1000, 2)
|
||||
response.headers.setdefault("X-Request-ID", request_id)
|
||||
response.headers.setdefault("X-Content-Type-Options", "nosniff")
|
||||
response.headers.setdefault("X-Frame-Options", "DENY")
|
||||
response.headers.setdefault("Referrer-Policy", "no-referrer")
|
||||
@@ -53,6 +115,21 @@ async def add_security_headers(request: Request, call_next):
|
||||
"Content-Security-Policy",
|
||||
"default-src 'none'; frame-ancestors 'none'; base-uri 'none'",
|
||||
)
|
||||
logger.info(
|
||||
"request completed method=%s path=%s status=%s duration_ms=%s response_headers=%s",
|
||||
request.method,
|
||||
request.url.path,
|
||||
response.status_code,
|
||||
duration_ms,
|
||||
sanitize_headers(
|
||||
{
|
||||
key: value
|
||||
for key, value in response.headers.items()
|
||||
if key.lower() in {"content-type", "content-length", "x-request-id"}
|
||||
}
|
||||
),
|
||||
)
|
||||
reset_request_id(token)
|
||||
return response
|
||||
|
||||
|
||||
@@ -60,16 +137,69 @@ async def add_security_headers(request: Request, call_next):
|
||||
async def health() -> dict:
|
||||
return {"status": "ok"}
|
||||
|
||||
|
||||
async def _run_background_task(
|
||||
name: str, coroutine_factory: Callable[[], Awaitable[None]]
|
||||
) -> None:
|
||||
token = bind_request_id(f"task-{name}")
|
||||
logger.info("background task started task=%s", name)
|
||||
try:
|
||||
await coroutine_factory()
|
||||
logger.warning("background task exited task=%s", name)
|
||||
except asyncio.CancelledError:
|
||||
logger.info("background task cancelled task=%s", name)
|
||||
raise
|
||||
except Exception:
|
||||
logger.exception("background task crashed task=%s", name)
|
||||
raise
|
||||
finally:
|
||||
reset_request_id(token)
|
||||
|
||||
|
||||
def _launch_background_task(name: str, coroutine_factory: Callable[[], Awaitable[None]]) -> None:
|
||||
task = asyncio.create_task(
|
||||
_run_background_task(name, coroutine_factory), name=f"magent:{name}"
|
||||
)
|
||||
_background_tasks.append(task)
|
||||
|
||||
|
||||
@app.on_event("startup")
|
||||
async def startup() -> None:
|
||||
configure_logging(
|
||||
settings.log_level,
|
||||
settings.log_file,
|
||||
log_file_max_bytes=settings.log_file_max_bytes,
|
||||
log_file_backup_count=settings.log_file_backup_count,
|
||||
log_http_client_level=settings.log_http_client_level,
|
||||
log_background_sync_level=settings.log_background_sync_level,
|
||||
)
|
||||
logger.info("startup begin app=%s build=%s", settings.app_name, settings.site_build_number)
|
||||
init_db()
|
||||
runtime = get_runtime_settings()
|
||||
configure_logging(runtime.log_level, runtime.log_file)
|
||||
asyncio.create_task(run_daily_jellyfin_sync())
|
||||
asyncio.create_task(startup_warmup_requests_cache())
|
||||
asyncio.create_task(run_requests_delta_loop())
|
||||
asyncio.create_task(run_daily_requests_full_sync())
|
||||
asyncio.create_task(run_daily_db_cleanup())
|
||||
configure_logging(
|
||||
runtime.log_level,
|
||||
runtime.log_file,
|
||||
log_file_max_bytes=runtime.log_file_max_bytes,
|
||||
log_file_backup_count=runtime.log_file_backup_count,
|
||||
log_http_client_level=runtime.log_http_client_level,
|
||||
log_background_sync_level=runtime.log_background_sync_level,
|
||||
)
|
||||
logger.info(
|
||||
"runtime settings applied log_level=%s log_file=%s log_file_max_bytes=%s log_file_backup_count=%s log_http_client_level=%s log_background_sync_level=%s request_source=%s",
|
||||
runtime.log_level,
|
||||
runtime.log_file,
|
||||
runtime.log_file_max_bytes,
|
||||
runtime.log_file_backup_count,
|
||||
runtime.log_http_client_level,
|
||||
runtime.log_background_sync_level,
|
||||
runtime.requests_data_source,
|
||||
)
|
||||
_launch_background_task("jellyfin-sync", run_daily_jellyfin_sync)
|
||||
_launch_background_task("requests-warmup", startup_warmup_requests_cache)
|
||||
_launch_background_task("requests-delta-loop", run_requests_delta_loop)
|
||||
_launch_background_task("requests-full-sync", run_daily_requests_full_sync)
|
||||
_launch_background_task("db-cleanup", run_daily_db_cleanup)
|
||||
logger.info("startup complete")
|
||||
|
||||
|
||||
app.include_router(requests_router)
|
||||
|
||||
@@ -84,9 +84,11 @@ from ..services.invite_email import (
|
||||
get_invite_email_templates,
|
||||
reset_invite_email_template,
|
||||
save_invite_email_template,
|
||||
send_test_email,
|
||||
send_templated_email,
|
||||
smtp_email_config_ready,
|
||||
)
|
||||
from ..services.diagnostics import get_diagnostics_catalog, run_diagnostics
|
||||
import logging
|
||||
from ..logging_config import configure_logging
|
||||
from ..routers import requests as requests_router
|
||||
@@ -192,6 +194,10 @@ SETTING_KEYS: List[str] = [
|
||||
"qbittorrent_password",
|
||||
"log_level",
|
||||
"log_file",
|
||||
"log_file_max_bytes",
|
||||
"log_file_backup_count",
|
||||
"log_http_client_level",
|
||||
"log_background_sync_level",
|
||||
"requests_sync_ttl_minutes",
|
||||
"requests_poll_interval_seconds",
|
||||
"requests_delta_sync_interval_minutes",
|
||||
@@ -609,6 +615,7 @@ async def list_settings() -> Dict[str, Any]:
|
||||
async def update_settings(payload: Dict[str, Any]) -> Dict[str, Any]:
|
||||
updates = 0
|
||||
touched_logging = False
|
||||
changed_keys: List[str] = []
|
||||
for key, value in payload.items():
|
||||
if key not in SETTING_KEYS:
|
||||
raise HTTPException(status_code=400, detail=f"Unknown setting: {key}")
|
||||
@@ -617,6 +624,7 @@ async def update_settings(payload: Dict[str, Any]) -> Dict[str, Any]:
|
||||
if isinstance(value, str) and value.strip() == "":
|
||||
delete_setting(key)
|
||||
updates += 1
|
||||
changed_keys.append(key)
|
||||
continue
|
||||
value_to_store = str(value).strip() if isinstance(value, str) else str(value)
|
||||
if key in URL_SETTING_KEYS and value_to_store:
|
||||
@@ -627,14 +635,79 @@ async def update_settings(payload: Dict[str, Any]) -> Dict[str, Any]:
|
||||
raise HTTPException(status_code=400, detail=f"{friendly_key}: {exc}") from exc
|
||||
set_setting(key, value_to_store)
|
||||
updates += 1
|
||||
if key in {"log_level", "log_file"}:
|
||||
changed_keys.append(key)
|
||||
if key in {"log_level", "log_file", "log_file_max_bytes", "log_file_backup_count", "log_http_client_level", "log_background_sync_level"}:
|
||||
touched_logging = True
|
||||
if touched_logging:
|
||||
runtime = get_runtime_settings()
|
||||
configure_logging(runtime.log_level, runtime.log_file)
|
||||
configure_logging(
|
||||
runtime.log_level,
|
||||
runtime.log_file,
|
||||
log_file_max_bytes=runtime.log_file_max_bytes,
|
||||
log_file_backup_count=runtime.log_file_backup_count,
|
||||
log_http_client_level=runtime.log_http_client_level,
|
||||
log_background_sync_level=runtime.log_background_sync_level,
|
||||
)
|
||||
logger.info("Admin updated settings: count=%s keys=%s", updates, changed_keys)
|
||||
return {"status": "ok", "updated": updates}
|
||||
|
||||
|
||||
@router.post("/settings/test/email")
|
||||
async def test_email_settings(request: Request) -> Dict[str, Any]:
|
||||
recipient_email = None
|
||||
content_type = (request.headers.get("content-type") or "").split(";", 1)[0].strip().lower()
|
||||
try:
|
||||
if content_type == "application/json":
|
||||
payload = await request.json()
|
||||
if isinstance(payload, dict) and isinstance(payload.get("recipient_email"), str):
|
||||
recipient_email = payload["recipient_email"]
|
||||
elif content_type in {
|
||||
"application/x-www-form-urlencoded",
|
||||
"multipart/form-data",
|
||||
}:
|
||||
form = await request.form()
|
||||
candidate = form.get("recipient_email")
|
||||
if isinstance(candidate, str):
|
||||
recipient_email = candidate
|
||||
except Exception:
|
||||
recipient_email = None
|
||||
try:
|
||||
result = await send_test_email(recipient_email=recipient_email)
|
||||
except RuntimeError as exc:
|
||||
raise HTTPException(status_code=502, detail=str(exc)) from exc
|
||||
logger.info("Admin triggered SMTP test: recipient=%s", result.get("recipient_email"))
|
||||
return {"status": "ok", **result}
|
||||
|
||||
|
||||
@router.get("/diagnostics")
|
||||
async def diagnostics_catalog() -> Dict[str, Any]:
|
||||
return {"status": "ok", **get_diagnostics_catalog()}
|
||||
|
||||
|
||||
@router.post("/diagnostics/run")
|
||||
async def diagnostics_run(payload: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
|
||||
keys: Optional[List[str]] = None
|
||||
recipient_email: Optional[str] = None
|
||||
if payload is not None:
|
||||
raw_keys = payload.get("keys")
|
||||
if raw_keys is not None:
|
||||
if not isinstance(raw_keys, list):
|
||||
raise HTTPException(status_code=400, detail="keys must be an array of diagnostic keys")
|
||||
keys = []
|
||||
for raw_key in raw_keys:
|
||||
if not isinstance(raw_key, str):
|
||||
raise HTTPException(status_code=400, detail="Each diagnostic key must be a string")
|
||||
normalized = raw_key.strip()
|
||||
if normalized:
|
||||
keys.append(normalized)
|
||||
raw_recipient_email = payload.get("recipient_email")
|
||||
if raw_recipient_email is not None:
|
||||
if not isinstance(raw_recipient_email, str):
|
||||
raise HTTPException(status_code=400, detail="recipient_email must be a string")
|
||||
recipient_email = raw_recipient_email.strip() or None
|
||||
return {"status": "ok", **(await run_diagnostics(keys, recipient_email=recipient_email))}
|
||||
|
||||
|
||||
@router.get("/sonarr/options")
|
||||
async def sonarr_options() -> Dict[str, Any]:
|
||||
runtime = get_runtime_settings()
|
||||
@@ -1061,12 +1134,14 @@ async def get_user_summary_by_id(user_id: int) -> Dict[str, Any]:
|
||||
@router.post("/users/{username}/block")
|
||||
async def block_user(username: str) -> Dict[str, Any]:
|
||||
set_user_blocked(username, True)
|
||||
logger.warning("Admin blocked user: username=%s", username)
|
||||
return {"status": "ok", "username": username, "blocked": True}
|
||||
|
||||
|
||||
@router.post("/users/{username}/unblock")
|
||||
async def unblock_user(username: str) -> Dict[str, Any]:
|
||||
set_user_blocked(username, False)
|
||||
logger.info("Admin unblocked user: username=%s", username)
|
||||
return {"status": "ok", "username": username, "blocked": False}
|
||||
|
||||
|
||||
@@ -1173,6 +1248,17 @@ async def user_system_action(username: str, payload: Dict[str, Any]) -> Dict[str
|
||||
for system in (result.get("jellyfin"), result.get("jellyseerr"), result.get("email"))
|
||||
):
|
||||
result["status"] = "partial"
|
||||
logger.info(
|
||||
"Admin system action completed: username=%s action=%s overall=%s local=%s jellyfin=%s jellyseerr=%s invites=%s email=%s",
|
||||
username,
|
||||
action,
|
||||
result.get("status"),
|
||||
result.get("local", {}).get("status"),
|
||||
result.get("jellyfin", {}).get("status"),
|
||||
result.get("jellyseerr", {}).get("status"),
|
||||
result.get("invites", {}).get("status"),
|
||||
result.get("email", {}).get("status"),
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
@@ -1450,6 +1536,15 @@ async def create_profile(payload: Dict[str, Any]) -> Dict[str, Any]:
|
||||
)
|
||||
except sqlite3.IntegrityError as exc:
|
||||
raise HTTPException(status_code=409, detail="A profile with that name already exists") from exc
|
||||
logger.info(
|
||||
"Admin created profile: profile_id=%s name=%s role=%s active=%s auto_search=%s expires_days=%s",
|
||||
profile.get("id"),
|
||||
profile.get("name"),
|
||||
profile.get("role"),
|
||||
profile.get("is_active"),
|
||||
profile.get("auto_search_enabled"),
|
||||
profile.get("account_expires_days"),
|
||||
)
|
||||
return {"status": "ok", "profile": profile}
|
||||
|
||||
|
||||
@@ -1487,6 +1582,15 @@ async def edit_profile(profile_id: int, payload: Dict[str, Any]) -> Dict[str, An
|
||||
raise HTTPException(status_code=409, detail="A profile with that name already exists") from exc
|
||||
if not profile:
|
||||
raise HTTPException(status_code=404, detail="Profile not found")
|
||||
logger.info(
|
||||
"Admin updated profile: profile_id=%s name=%s role=%s active=%s auto_search=%s expires_days=%s",
|
||||
profile.get("id"),
|
||||
profile.get("name"),
|
||||
profile.get("role"),
|
||||
profile.get("is_active"),
|
||||
profile.get("auto_search_enabled"),
|
||||
profile.get("account_expires_days"),
|
||||
)
|
||||
return {"status": "ok", "profile": profile}
|
||||
|
||||
|
||||
@@ -1498,6 +1602,7 @@ async def remove_profile(profile_id: int) -> Dict[str, Any]:
|
||||
raise HTTPException(status_code=400, detail=str(exc)) from exc
|
||||
if not deleted:
|
||||
raise HTTPException(status_code=404, detail="Profile not found")
|
||||
logger.warning("Admin deleted profile: profile_id=%s", profile_id)
|
||||
return {"status": "ok", "deleted": True, "profile_id": profile_id}
|
||||
|
||||
|
||||
@@ -1561,6 +1666,7 @@ async def update_invite_policy(payload: Dict[str, Any]) -> Dict[str, Any]:
|
||||
master_invite_value = payload.get("master_invite_id")
|
||||
if master_invite_value in (None, "", 0, "0"):
|
||||
set_setting(SELF_SERVICE_INVITE_MASTER_ID_KEY, None)
|
||||
logger.info("Admin cleared invite policy master invite")
|
||||
return {"status": "ok", "policy": {"master_invite_id": None, "master_invite": None}}
|
||||
try:
|
||||
master_invite_id = int(master_invite_value)
|
||||
@@ -1572,6 +1678,7 @@ async def update_invite_policy(payload: Dict[str, Any]) -> Dict[str, Any]:
|
||||
if not invite:
|
||||
raise HTTPException(status_code=404, detail="Master invite not found")
|
||||
set_setting(SELF_SERVICE_INVITE_MASTER_ID_KEY, str(master_invite_id))
|
||||
logger.info("Admin updated invite policy: master_invite_id=%s", master_invite_id)
|
||||
return {
|
||||
"status": "ok",
|
||||
"policy": {
|
||||
@@ -1613,6 +1720,7 @@ async def update_invite_email_template_settings(template_key: str, payload: Dict
|
||||
body_text=body_text or "",
|
||||
body_html=body_html or "",
|
||||
)
|
||||
logger.info("Admin updated invite email template: template=%s", template_key)
|
||||
return {"status": "ok", "template": template}
|
||||
|
||||
|
||||
@@ -1621,6 +1729,7 @@ async def reset_invite_email_template_settings(template_key: str) -> Dict[str, A
|
||||
if template_key not in INVITE_EMAIL_TEMPLATE_KEYS:
|
||||
raise HTTPException(status_code=404, detail="Email template not found")
|
||||
template = reset_invite_email_template(template_key)
|
||||
logger.info("Admin reset invite email template: template=%s", template_key)
|
||||
return {"status": "ok", "template": template}
|
||||
|
||||
|
||||
@@ -1666,6 +1775,13 @@ async def send_invite_email(payload: Dict[str, Any]) -> Dict[str, Any]:
|
||||
)
|
||||
except Exception as exc:
|
||||
raise HTTPException(status_code=502, detail=str(exc)) from exc
|
||||
logger.info(
|
||||
"Admin sent invite email template: template=%s recipient=%s invite_id=%s username=%s",
|
||||
template_key,
|
||||
result.get("recipient_email"),
|
||||
invite.get("id") if invite else None,
|
||||
user.get("username") if user else None,
|
||||
)
|
||||
|
||||
return {
|
||||
"status": "ok",
|
||||
@@ -1725,6 +1841,18 @@ async def create_invite(payload: Dict[str, Any], current_user: Dict[str, Any] =
|
||||
)
|
||||
except Exception as exc:
|
||||
email_error = str(exc)
|
||||
logger.info(
|
||||
"Admin created invite: invite_id=%s code=%s label=%s profile_id=%s role=%s max_uses=%s enabled=%s recipient_email=%s send_email=%s",
|
||||
invite.get("id"),
|
||||
invite.get("code"),
|
||||
invite.get("label"),
|
||||
invite.get("profile_id"),
|
||||
invite.get("role"),
|
||||
invite.get("max_uses"),
|
||||
invite.get("enabled"),
|
||||
invite.get("recipient_email"),
|
||||
send_email,
|
||||
)
|
||||
return {
|
||||
"status": "partial" if email_error else "ok",
|
||||
"invite": invite,
|
||||
@@ -1785,6 +1913,18 @@ async def edit_invite(invite_id: int, payload: Dict[str, Any]) -> Dict[str, Any]
|
||||
)
|
||||
except Exception as exc:
|
||||
email_error = str(exc)
|
||||
logger.info(
|
||||
"Admin updated invite: invite_id=%s code=%s label=%s profile_id=%s role=%s max_uses=%s enabled=%s recipient_email=%s send_email=%s",
|
||||
invite.get("id"),
|
||||
invite.get("code"),
|
||||
invite.get("label"),
|
||||
invite.get("profile_id"),
|
||||
invite.get("role"),
|
||||
invite.get("max_uses"),
|
||||
invite.get("enabled"),
|
||||
invite.get("recipient_email"),
|
||||
send_email,
|
||||
)
|
||||
return {
|
||||
"status": "partial" if email_error else "ok",
|
||||
"invite": invite,
|
||||
@@ -1803,4 +1943,5 @@ async def remove_invite(invite_id: int) -> Dict[str, Any]:
|
||||
deleted = delete_signup_invite(invite_id)
|
||||
if not deleted:
|
||||
raise HTTPException(status_code=404, detail="Invite not found")
|
||||
logger.warning("Admin deleted invite: invite_id=%s", invite_id)
|
||||
return {"status": "ok", "deleted": True, "invite_id": invite_id}
|
||||
|
||||
@@ -115,6 +115,7 @@ def _record_login_failure(request: Request, username: str) -> None:
|
||||
_prune_attempts(user_bucket, now, window)
|
||||
ip_bucket.append(now)
|
||||
user_bucket.append(now)
|
||||
logger.warning("login failure recorded username=%s client=%s", user_key, ip_key)
|
||||
|
||||
|
||||
def _clear_login_failures(request: Request, username: str) -> None:
|
||||
@@ -148,6 +149,12 @@ def _enforce_login_rate_limit(request: Request, username: str) -> None:
|
||||
if retry_candidates:
|
||||
retry_after = max(retry_candidates)
|
||||
if exceeded:
|
||||
logger.warning(
|
||||
"login rate limit exceeded username=%s client=%s retry_after=%s",
|
||||
user_key,
|
||||
ip_key,
|
||||
retry_after,
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_429_TOO_MANY_REQUESTS,
|
||||
detail="Too many login attempts. Try again shortly.",
|
||||
@@ -474,6 +481,11 @@ def _master_invite_controlled_values(master_invite: dict) -> tuple[int | None, s
|
||||
@router.post("/login")
|
||||
async def login(request: Request, form_data: OAuth2PasswordRequestForm = Depends()) -> dict:
|
||||
_enforce_login_rate_limit(request, form_data.username)
|
||||
logger.info(
|
||||
"login attempt provider=local username=%s client=%s",
|
||||
_login_rate_key_user(form_data.username),
|
||||
_auth_client_ip(request),
|
||||
)
|
||||
# Provider placeholder passwords must never be accepted by the local-login endpoint.
|
||||
if form_data.password in {"jellyfin-user", "jellyseerr-user"}:
|
||||
_record_login_failure(request, form_data.username)
|
||||
@@ -483,6 +495,11 @@ async def login(request: Request, form_data: OAuth2PasswordRequestForm = Depends
|
||||
str(user.get("auth_provider") or "local").lower() != "local" for user in matching_users
|
||||
)
|
||||
if has_external_match:
|
||||
logger.warning(
|
||||
"login rejected provider=local username=%s reason=external-account client=%s",
|
||||
_login_rate_key_user(form_data.username),
|
||||
_auth_client_ip(request),
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="This account uses external sign-in. Use the external sign-in option.",
|
||||
@@ -492,6 +509,11 @@ async def login(request: Request, form_data: OAuth2PasswordRequestForm = Depends
|
||||
_record_login_failure(request, form_data.username)
|
||||
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid credentials")
|
||||
if user.get("auth_provider") != "local":
|
||||
logger.warning(
|
||||
"login rejected provider=local username=%s reason=wrong-provider client=%s",
|
||||
_login_rate_key_user(form_data.username),
|
||||
_auth_client_ip(request),
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="This account uses external sign-in. Use the external sign-in option.",
|
||||
@@ -500,6 +522,12 @@ async def login(request: Request, form_data: OAuth2PasswordRequestForm = Depends
|
||||
token = create_access_token(user["username"], user["role"])
|
||||
_clear_login_failures(request, form_data.username)
|
||||
set_last_login(user["username"])
|
||||
logger.info(
|
||||
"login success provider=local username=%s role=%s client=%s",
|
||||
user["username"],
|
||||
user["role"],
|
||||
_auth_client_ip(request),
|
||||
)
|
||||
return {
|
||||
"access_token": token,
|
||||
"token_type": "bearer",
|
||||
@@ -510,6 +538,11 @@ async def login(request: Request, form_data: OAuth2PasswordRequestForm = Depends
|
||||
@router.post("/jellyfin/login")
|
||||
async def jellyfin_login(request: Request, form_data: OAuth2PasswordRequestForm = Depends()) -> dict:
|
||||
_enforce_login_rate_limit(request, form_data.username)
|
||||
logger.info(
|
||||
"login attempt provider=jellyfin username=%s client=%s",
|
||||
_login_rate_key_user(form_data.username),
|
||||
_auth_client_ip(request),
|
||||
)
|
||||
runtime = get_runtime_settings()
|
||||
client = JellyfinClient(runtime.jellyfin_base_url, runtime.jellyfin_api_key)
|
||||
if not client.configured():
|
||||
@@ -527,6 +560,11 @@ async def jellyfin_login(request: Request, form_data: OAuth2PasswordRequestForm
|
||||
token = create_access_token(canonical_username, "user")
|
||||
_clear_login_failures(request, username)
|
||||
set_last_login(canonical_username)
|
||||
logger.info(
|
||||
"login success provider=jellyfin username=%s source=cache client=%s",
|
||||
canonical_username,
|
||||
_auth_client_ip(request),
|
||||
)
|
||||
return {
|
||||
"access_token": token,
|
||||
"token_type": "bearer",
|
||||
@@ -535,6 +573,11 @@ async def jellyfin_login(request: Request, form_data: OAuth2PasswordRequestForm
|
||||
try:
|
||||
response = await client.authenticate_by_name(username, password)
|
||||
except Exception as exc:
|
||||
logger.exception(
|
||||
"login upstream error provider=jellyfin username=%s client=%s",
|
||||
_login_rate_key_user(username),
|
||||
_auth_client_ip(request),
|
||||
)
|
||||
raise HTTPException(status_code=status.HTTP_502_BAD_GATEWAY, detail=str(exc)) from exc
|
||||
if not isinstance(response, dict) or not response.get("User"):
|
||||
_record_login_failure(request, username)
|
||||
@@ -564,6 +607,12 @@ async def jellyfin_login(request: Request, form_data: OAuth2PasswordRequestForm
|
||||
token = create_access_token(canonical_username, "user")
|
||||
_clear_login_failures(request, username)
|
||||
set_last_login(canonical_username)
|
||||
logger.info(
|
||||
"login success provider=jellyfin username=%s linked_seerr_id=%s client=%s",
|
||||
canonical_username,
|
||||
get_user_by_username(canonical_username).get("jellyseerr_user_id") if get_user_by_username(canonical_username) else None,
|
||||
_auth_client_ip(request),
|
||||
)
|
||||
return {
|
||||
"access_token": token,
|
||||
"token_type": "bearer",
|
||||
@@ -575,6 +624,11 @@ async def jellyfin_login(request: Request, form_data: OAuth2PasswordRequestForm
|
||||
@router.post("/jellyseerr/login")
|
||||
async def jellyseerr_login(request: Request, form_data: OAuth2PasswordRequestForm = Depends()) -> dict:
|
||||
_enforce_login_rate_limit(request, form_data.username)
|
||||
logger.info(
|
||||
"login attempt provider=seerr username=%s client=%s",
|
||||
_login_rate_key_user(form_data.username),
|
||||
_auth_client_ip(request),
|
||||
)
|
||||
runtime = get_runtime_settings()
|
||||
client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key)
|
||||
if not client.configured():
|
||||
@@ -582,6 +636,11 @@ async def jellyseerr_login(request: Request, form_data: OAuth2PasswordRequestFor
|
||||
try:
|
||||
response = await client.login_local(form_data.username, form_data.password)
|
||||
except Exception as exc:
|
||||
logger.exception(
|
||||
"login upstream error provider=seerr username=%s client=%s",
|
||||
_login_rate_key_user(form_data.username),
|
||||
_auth_client_ip(request),
|
||||
)
|
||||
raise HTTPException(status_code=status.HTTP_502_BAD_GATEWAY, detail=str(exc)) from exc
|
||||
if not isinstance(response, dict):
|
||||
_record_login_failure(request, form_data.username)
|
||||
@@ -605,6 +664,12 @@ async def jellyseerr_login(request: Request, form_data: OAuth2PasswordRequestFor
|
||||
token = create_access_token(canonical_username, "user")
|
||||
_clear_login_failures(request, form_data.username)
|
||||
set_last_login(canonical_username)
|
||||
logger.info(
|
||||
"login success provider=seerr username=%s seerr_user_id=%s client=%s",
|
||||
canonical_username,
|
||||
jellyseerr_user_id,
|
||||
_auth_client_ip(request),
|
||||
)
|
||||
return {
|
||||
"access_token": token,
|
||||
"token_type": "bearer",
|
||||
@@ -663,6 +728,11 @@ async def signup(payload: dict) -> dict:
|
||||
)
|
||||
if get_user_by_username(username):
|
||||
raise HTTPException(status_code=status.HTTP_409_CONFLICT, detail="User already exists")
|
||||
logger.info(
|
||||
"signup attempt username=%s invite_code=%s",
|
||||
username,
|
||||
invite_code,
|
||||
)
|
||||
|
||||
invite = get_signup_invite_by_code(invite_code)
|
||||
if not invite:
|
||||
@@ -709,6 +779,7 @@ async def signup(payload: dict) -> dict:
|
||||
|
||||
jellyfin_client = JellyfinClient(runtime.jellyfin_base_url, runtime.jellyfin_api_key)
|
||||
if jellyfin_client.configured():
|
||||
logger.info("signup provisioning jellyfin username=%s", username)
|
||||
auth_provider = "jellyfin"
|
||||
local_password_value = "jellyfin-user"
|
||||
try:
|
||||
@@ -788,6 +859,14 @@ async def signup(payload: dict) -> dict:
|
||||
_assert_user_can_login(created_user)
|
||||
token = create_access_token(username, role)
|
||||
set_last_login(username)
|
||||
logger.info(
|
||||
"signup success username=%s role=%s auth_provider=%s profile_id=%s invite_code=%s",
|
||||
username,
|
||||
role,
|
||||
created_user.get("auth_provider") if created_user else auth_provider,
|
||||
created_user.get("profile_id") if created_user else None,
|
||||
invite.get("code"),
|
||||
)
|
||||
return {
|
||||
"access_token": token,
|
||||
"token_type": "bearer",
|
||||
|
||||
@@ -596,7 +596,7 @@ async def _sync_all_requests(client: JellyseerrClient) -> int:
|
||||
skip += take
|
||||
_sync_state["skip"] = skip
|
||||
_sync_state["message"] = f"Synced {stored} requests"
|
||||
logger.info("Seerr sync progress: stored=%s skip=%s", stored, skip)
|
||||
logger.debug("Seerr sync progress: stored=%s skip=%s", stored, skip)
|
||||
_sync_state.update(
|
||||
{
|
||||
"status": "completed",
|
||||
@@ -719,7 +719,7 @@ async def _sync_delta_requests(client: JellyseerrClient) -> int:
|
||||
skip += take
|
||||
_sync_state["skip"] = skip
|
||||
_sync_state["message"] = f"Delta synced {stored} requests"
|
||||
logger.info("Seerr delta sync progress: stored=%s skip=%s", stored, skip)
|
||||
logger.debug("Seerr delta sync progress: stored=%s skip=%s", stored, skip)
|
||||
deduped = prune_duplicate_requests_cache()
|
||||
if deduped:
|
||||
logger.info("Seerr delta sync removed duplicate rows: %s", deduped)
|
||||
|
||||
@@ -7,6 +7,8 @@ _INT_FIELDS = {
|
||||
"sonarr_quality_profile_id",
|
||||
"radarr_quality_profile_id",
|
||||
"jwt_exp_minutes",
|
||||
"log_file_max_bytes",
|
||||
"log_file_backup_count",
|
||||
"requests_sync_ttl_minutes",
|
||||
"requests_poll_interval_seconds",
|
||||
"requests_delta_sync_interval_minutes",
|
||||
|
||||
696
backend/app/services/diagnostics.py
Normal file
696
backend/app/services/diagnostics.py
Normal file
@@ -0,0 +1,696 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timezone
|
||||
from time import perf_counter
|
||||
from typing import Any, Awaitable, Callable, Dict, List, Optional, Sequence
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import httpx
|
||||
|
||||
from ..clients.jellyfin import JellyfinClient
|
||||
from ..clients.jellyseerr import JellyseerrClient
|
||||
from ..clients.prowlarr import ProwlarrClient
|
||||
from ..clients.qbittorrent import QBittorrentClient
|
||||
from ..clients.radarr import RadarrClient
|
||||
from ..clients.sonarr import SonarrClient
|
||||
from ..config import settings as env_settings
|
||||
from ..db import run_integrity_check
|
||||
from ..runtime import get_runtime_settings
|
||||
from .invite_email import send_test_email, smtp_email_config_ready
|
||||
|
||||
|
||||
DiagnosticRunner = Callable[[], Awaitable[Dict[str, Any]]]
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class DiagnosticCheck:
|
||||
key: str
|
||||
label: str
|
||||
category: str
|
||||
description: str
|
||||
live_safe: bool
|
||||
configured: bool
|
||||
config_detail: str
|
||||
target: Optional[str]
|
||||
runner: DiagnosticRunner
|
||||
|
||||
|
||||
def _now_iso() -> str:
|
||||
return datetime.now(timezone.utc).isoformat()
|
||||
|
||||
|
||||
def _clean_text(value: Any, fallback: str = "") -> str:
|
||||
if value is None:
|
||||
return fallback
|
||||
if isinstance(value, str):
|
||||
trimmed = value.strip()
|
||||
return trimmed if trimmed else fallback
|
||||
return str(value)
|
||||
|
||||
|
||||
def _url_target(url: Optional[str]) -> Optional[str]:
|
||||
raw = _clean_text(url)
|
||||
if not raw:
|
||||
return None
|
||||
try:
|
||||
parsed = urlparse(raw)
|
||||
except Exception:
|
||||
return raw
|
||||
host = parsed.hostname or parsed.netloc or raw
|
||||
if parsed.port:
|
||||
host = f"{host}:{parsed.port}"
|
||||
return host
|
||||
|
||||
|
||||
def _host_port_target(host: Optional[str], port: Optional[int]) -> Optional[str]:
|
||||
resolved_host = _clean_text(host)
|
||||
if not resolved_host:
|
||||
return None
|
||||
if port is None:
|
||||
return resolved_host
|
||||
return f"{resolved_host}:{port}"
|
||||
|
||||
|
||||
def _http_error_detail(exc: Exception) -> str:
|
||||
if isinstance(exc, httpx.HTTPStatusError):
|
||||
response = exc.response
|
||||
body = ""
|
||||
try:
|
||||
body = response.text.strip()
|
||||
except Exception:
|
||||
body = ""
|
||||
if body:
|
||||
return f"HTTP {response.status_code}: {body}"
|
||||
return f"HTTP {response.status_code}"
|
||||
return str(exc)
|
||||
|
||||
|
||||
def _config_status(detail: str) -> str:
|
||||
lowered = detail.lower()
|
||||
if "disabled" in lowered:
|
||||
return "disabled"
|
||||
return "not_configured"
|
||||
|
||||
|
||||
def _discord_config_ready(runtime) -> tuple[bool, str]:
|
||||
if not runtime.magent_notify_enabled or not runtime.magent_notify_discord_enabled:
|
||||
return False, "Discord notifications are disabled."
|
||||
if _clean_text(runtime.magent_notify_discord_webhook_url) or _clean_text(runtime.discord_webhook_url):
|
||||
return True, "ok"
|
||||
return False, "Discord webhook URL is required."
|
||||
|
||||
|
||||
def _telegram_config_ready(runtime) -> tuple[bool, str]:
|
||||
if not runtime.magent_notify_enabled or not runtime.magent_notify_telegram_enabled:
|
||||
return False, "Telegram notifications are disabled."
|
||||
if _clean_text(runtime.magent_notify_telegram_bot_token) and _clean_text(runtime.magent_notify_telegram_chat_id):
|
||||
return True, "ok"
|
||||
return False, "Telegram bot token and chat ID are required."
|
||||
|
||||
|
||||
def _webhook_config_ready(runtime) -> tuple[bool, str]:
|
||||
if not runtime.magent_notify_enabled or not runtime.magent_notify_webhook_enabled:
|
||||
return False, "Generic webhook notifications are disabled."
|
||||
if _clean_text(runtime.magent_notify_webhook_url):
|
||||
return True, "ok"
|
||||
return False, "Generic webhook URL is required."
|
||||
|
||||
|
||||
def _push_config_ready(runtime) -> tuple[bool, str]:
|
||||
if not runtime.magent_notify_enabled or not runtime.magent_notify_push_enabled:
|
||||
return False, "Push notifications are disabled."
|
||||
provider = _clean_text(runtime.magent_notify_push_provider, "ntfy").lower()
|
||||
if provider == "ntfy":
|
||||
if _clean_text(runtime.magent_notify_push_base_url) and _clean_text(runtime.magent_notify_push_topic):
|
||||
return True, "ok"
|
||||
return False, "ntfy requires a base URL and topic."
|
||||
if provider == "gotify":
|
||||
if _clean_text(runtime.magent_notify_push_base_url) and _clean_text(runtime.magent_notify_push_token):
|
||||
return True, "ok"
|
||||
return False, "Gotify requires a base URL and app token."
|
||||
if provider == "pushover":
|
||||
if _clean_text(runtime.magent_notify_push_token) and _clean_text(runtime.magent_notify_push_user_key):
|
||||
return True, "ok"
|
||||
return False, "Pushover requires an application token and user key."
|
||||
if provider == "webhook":
|
||||
if _clean_text(runtime.magent_notify_push_base_url):
|
||||
return True, "ok"
|
||||
return False, "Webhook relay requires a target URL."
|
||||
if provider == "telegram":
|
||||
return _telegram_config_ready(runtime)
|
||||
if provider == "discord":
|
||||
return _discord_config_ready(runtime)
|
||||
return False, f"Unsupported push provider: {provider or 'unknown'}"
|
||||
|
||||
|
||||
def _summary_from_results(results: Sequence[Dict[str, Any]]) -> Dict[str, int]:
|
||||
summary = {
|
||||
"total": len(results),
|
||||
"up": 0,
|
||||
"down": 0,
|
||||
"degraded": 0,
|
||||
"not_configured": 0,
|
||||
"disabled": 0,
|
||||
}
|
||||
for result in results:
|
||||
status = str(result.get("status") or "").strip().lower()
|
||||
if status in summary:
|
||||
summary[status] += 1
|
||||
return summary
|
||||
|
||||
|
||||
async def _run_http_json_get(
|
||||
url: str,
|
||||
*,
|
||||
headers: Optional[Dict[str, str]] = None,
|
||||
params: Optional[Dict[str, Any]] = None,
|
||||
) -> Dict[str, Any]:
|
||||
async with httpx.AsyncClient(timeout=10.0, follow_redirects=True) as client:
|
||||
response = await client.get(url, headers=headers, params=params)
|
||||
response.raise_for_status()
|
||||
payload = response.json()
|
||||
return {"response": payload}
|
||||
|
||||
|
||||
async def _run_http_text_get(url: str) -> Dict[str, Any]:
|
||||
async with httpx.AsyncClient(timeout=10.0, follow_redirects=True) as client:
|
||||
response = await client.get(url)
|
||||
response.raise_for_status()
|
||||
body = response.text
|
||||
return {"response": body, "message": f"HTTP {response.status_code}"}
|
||||
|
||||
|
||||
async def _run_http_post(
|
||||
url: str,
|
||||
*,
|
||||
json_payload: Optional[Dict[str, Any]] = None,
|
||||
data_payload: Any = None,
|
||||
params: Optional[Dict[str, Any]] = None,
|
||||
headers: Optional[Dict[str, str]] = None,
|
||||
) -> Dict[str, Any]:
|
||||
async with httpx.AsyncClient(timeout=15.0, follow_redirects=True) as client:
|
||||
response = await client.post(url, json=json_payload, data=data_payload, params=params, headers=headers)
|
||||
response.raise_for_status()
|
||||
if not response.content:
|
||||
return {"message": f"HTTP {response.status_code}"}
|
||||
content_type = response.headers.get("content-type", "")
|
||||
if "application/json" in content_type.lower():
|
||||
try:
|
||||
return {"response": response.json(), "message": f"HTTP {response.status_code}"}
|
||||
except Exception:
|
||||
pass
|
||||
return {"response": response.text.strip(), "message": f"HTTP {response.status_code}"}
|
||||
|
||||
|
||||
async def _run_database_check() -> Dict[str, Any]:
|
||||
integrity = await asyncio.to_thread(run_integrity_check)
|
||||
status = "up" if integrity == "ok" else "degraded"
|
||||
return {
|
||||
"status": status,
|
||||
"message": f"SQLite integrity_check returned {integrity}",
|
||||
"detail": integrity,
|
||||
}
|
||||
|
||||
|
||||
async def _run_magent_api_check(runtime) -> Dict[str, Any]:
|
||||
base_url = _clean_text(runtime.magent_api_url) or f"http://127.0.0.1:{int(runtime.magent_api_port or 8000)}"
|
||||
result = await _run_http_json_get(f"{base_url.rstrip('/')}/health")
|
||||
payload = result.get("response")
|
||||
build_number = payload.get("build") if isinstance(payload, dict) else None
|
||||
message = "Health endpoint responded"
|
||||
if build_number:
|
||||
message = f"Health endpoint responded (build {build_number})"
|
||||
return {"message": message, "detail": payload}
|
||||
|
||||
|
||||
async def _run_magent_web_check(runtime) -> Dict[str, Any]:
|
||||
base_url = _clean_text(runtime.magent_application_url) or f"http://127.0.0.1:{int(runtime.magent_application_port or 3000)}"
|
||||
result = await _run_http_text_get(base_url.rstrip("/"))
|
||||
body = result.get("response")
|
||||
if isinstance(body, str) and "<html" in body.lower():
|
||||
return {"message": "Application page responded", "detail": "html"}
|
||||
return {"status": "degraded", "message": "Application responded with unexpected content"}
|
||||
|
||||
|
||||
async def _run_seerr_check(runtime) -> Dict[str, Any]:
|
||||
client = JellyseerrClient(runtime.jellyseerr_base_url, runtime.jellyseerr_api_key)
|
||||
payload = await client.get_status()
|
||||
version = payload.get("version") if isinstance(payload, dict) else None
|
||||
message = "Seerr responded"
|
||||
if version:
|
||||
message = f"Seerr version {version}"
|
||||
return {"message": message, "detail": payload}
|
||||
|
||||
|
||||
async def _run_sonarr_check(runtime) -> Dict[str, Any]:
|
||||
client = SonarrClient(runtime.sonarr_base_url, runtime.sonarr_api_key)
|
||||
payload = await client.get_system_status()
|
||||
version = payload.get("version") if isinstance(payload, dict) else None
|
||||
message = "Sonarr responded"
|
||||
if version:
|
||||
message = f"Sonarr version {version}"
|
||||
return {"message": message, "detail": payload}
|
||||
|
||||
|
||||
async def _run_radarr_check(runtime) -> Dict[str, Any]:
|
||||
client = RadarrClient(runtime.radarr_base_url, runtime.radarr_api_key)
|
||||
payload = await client.get_system_status()
|
||||
version = payload.get("version") if isinstance(payload, dict) else None
|
||||
message = "Radarr responded"
|
||||
if version:
|
||||
message = f"Radarr version {version}"
|
||||
return {"message": message, "detail": payload}
|
||||
|
||||
|
||||
async def _run_prowlarr_check(runtime) -> Dict[str, Any]:
|
||||
client = ProwlarrClient(runtime.prowlarr_base_url, runtime.prowlarr_api_key)
|
||||
payload = await client.get_health()
|
||||
if isinstance(payload, list) and payload:
|
||||
return {
|
||||
"status": "degraded",
|
||||
"message": f"Prowlarr health warnings: {len(payload)}",
|
||||
"detail": payload,
|
||||
}
|
||||
return {"message": "Prowlarr reported healthy", "detail": payload}
|
||||
|
||||
|
||||
async def _run_qbittorrent_check(runtime) -> Dict[str, Any]:
|
||||
client = QBittorrentClient(
|
||||
runtime.qbittorrent_base_url,
|
||||
runtime.qbittorrent_username,
|
||||
runtime.qbittorrent_password,
|
||||
)
|
||||
version = await client.get_app_version()
|
||||
message = "qBittorrent responded"
|
||||
if isinstance(version, str) and version:
|
||||
message = f"qBittorrent version {version}"
|
||||
return {"message": message, "detail": version}
|
||||
|
||||
|
||||
async def _run_jellyfin_check(runtime) -> Dict[str, Any]:
|
||||
client = JellyfinClient(runtime.jellyfin_base_url, runtime.jellyfin_api_key)
|
||||
payload = await client.get_system_info()
|
||||
version = payload.get("Version") if isinstance(payload, dict) else None
|
||||
message = "Jellyfin responded"
|
||||
if version:
|
||||
message = f"Jellyfin version {version}"
|
||||
return {"message": message, "detail": payload}
|
||||
|
||||
|
||||
async def _run_email_check(recipient_email: Optional[str] = None) -> Dict[str, Any]:
|
||||
result = await send_test_email(recipient_email=recipient_email)
|
||||
recipient = _clean_text(result.get("recipient_email"), "configured recipient")
|
||||
return {"message": f"Test email sent to {recipient}", "detail": result}
|
||||
|
||||
|
||||
async def _run_discord_check(runtime) -> Dict[str, Any]:
|
||||
webhook_url = _clean_text(runtime.magent_notify_discord_webhook_url) or _clean_text(runtime.discord_webhook_url)
|
||||
payload = {
|
||||
"content": f"{env_settings.app_name} diagnostics ping\nBuild {env_settings.site_build_number or 'unknown'}",
|
||||
}
|
||||
result = await _run_http_post(webhook_url, json_payload=payload)
|
||||
return {"message": "Discord webhook accepted ping", "detail": result.get("response")}
|
||||
|
||||
|
||||
async def _run_telegram_check(runtime) -> Dict[str, Any]:
|
||||
bot_token = _clean_text(runtime.magent_notify_telegram_bot_token)
|
||||
chat_id = _clean_text(runtime.magent_notify_telegram_chat_id)
|
||||
url = f"https://api.telegram.org/bot{bot_token}/sendMessage"
|
||||
payload = {
|
||||
"chat_id": chat_id,
|
||||
"text": f"{env_settings.app_name} diagnostics ping\nBuild {env_settings.site_build_number or 'unknown'}",
|
||||
}
|
||||
result = await _run_http_post(url, json_payload=payload)
|
||||
return {"message": "Telegram ping accepted", "detail": result.get("response")}
|
||||
|
||||
|
||||
async def _run_webhook_check(runtime) -> Dict[str, Any]:
|
||||
webhook_url = _clean_text(runtime.magent_notify_webhook_url)
|
||||
payload = {
|
||||
"type": "diagnostics.ping",
|
||||
"application": env_settings.app_name,
|
||||
"build": env_settings.site_build_number,
|
||||
"checked_at": _now_iso(),
|
||||
}
|
||||
result = await _run_http_post(webhook_url, json_payload=payload)
|
||||
return {"message": "Webhook accepted ping", "detail": result.get("response")}
|
||||
|
||||
|
||||
async def _run_push_check(runtime) -> Dict[str, Any]:
|
||||
provider = _clean_text(runtime.magent_notify_push_provider, "ntfy").lower()
|
||||
message = f"{env_settings.app_name} diagnostics ping"
|
||||
build_suffix = f"Build {env_settings.site_build_number or 'unknown'}"
|
||||
|
||||
if provider == "ntfy":
|
||||
base_url = _clean_text(runtime.magent_notify_push_base_url)
|
||||
topic = _clean_text(runtime.magent_notify_push_topic)
|
||||
result = await _run_http_post(
|
||||
f"{base_url.rstrip('/')}/{topic}",
|
||||
data_payload=f"{message}\n{build_suffix}",
|
||||
headers={"Content-Type": "text/plain; charset=utf-8"},
|
||||
)
|
||||
return {"message": "ntfy push accepted", "detail": result.get("response")}
|
||||
|
||||
if provider == "gotify":
|
||||
base_url = _clean_text(runtime.magent_notify_push_base_url)
|
||||
token = _clean_text(runtime.magent_notify_push_token)
|
||||
result = await _run_http_post(
|
||||
f"{base_url.rstrip('/')}/message",
|
||||
json_payload={"title": env_settings.app_name, "message": build_suffix, "priority": 5},
|
||||
params={"token": token},
|
||||
)
|
||||
return {"message": "Gotify push accepted", "detail": result.get("response")}
|
||||
|
||||
if provider == "pushover":
|
||||
token = _clean_text(runtime.magent_notify_push_token)
|
||||
user_key = _clean_text(runtime.magent_notify_push_user_key)
|
||||
device = _clean_text(runtime.magent_notify_push_device)
|
||||
payload = {
|
||||
"token": token,
|
||||
"user": user_key,
|
||||
"message": f"{message}\n{build_suffix}",
|
||||
"title": env_settings.app_name,
|
||||
}
|
||||
if device:
|
||||
payload["device"] = device
|
||||
result = await _run_http_post("https://api.pushover.net/1/messages.json", data_payload=payload)
|
||||
return {"message": "Pushover push accepted", "detail": result.get("response")}
|
||||
|
||||
if provider == "webhook":
|
||||
base_url = _clean_text(runtime.magent_notify_push_base_url)
|
||||
payload = {
|
||||
"type": "diagnostics.push",
|
||||
"application": env_settings.app_name,
|
||||
"build": env_settings.site_build_number,
|
||||
"checked_at": _now_iso(),
|
||||
}
|
||||
result = await _run_http_post(base_url, json_payload=payload)
|
||||
return {"message": "Push webhook accepted", "detail": result.get("response")}
|
||||
|
||||
if provider == "telegram":
|
||||
return await _run_telegram_check(runtime)
|
||||
|
||||
if provider == "discord":
|
||||
return await _run_discord_check(runtime)
|
||||
|
||||
raise RuntimeError(f"Unsupported push provider: {provider}")
|
||||
|
||||
|
||||
def _build_diagnostic_checks(recipient_email: Optional[str] = None) -> List[DiagnosticCheck]:
|
||||
runtime = get_runtime_settings()
|
||||
seerr_target = _url_target(runtime.jellyseerr_base_url)
|
||||
jellyfin_target = _url_target(runtime.jellyfin_base_url)
|
||||
sonarr_target = _url_target(runtime.sonarr_base_url)
|
||||
radarr_target = _url_target(runtime.radarr_base_url)
|
||||
prowlarr_target = _url_target(runtime.prowlarr_base_url)
|
||||
qbittorrent_target = _url_target(runtime.qbittorrent_base_url)
|
||||
application_target = _url_target(runtime.magent_application_url) or _host_port_target("127.0.0.1", runtime.magent_application_port)
|
||||
api_target = _url_target(runtime.magent_api_url) or _host_port_target("127.0.0.1", runtime.magent_api_port)
|
||||
smtp_target = _host_port_target(runtime.magent_notify_email_smtp_host, runtime.magent_notify_email_smtp_port)
|
||||
discord_target = _url_target(runtime.magent_notify_discord_webhook_url) or _url_target(runtime.discord_webhook_url)
|
||||
telegram_target = "api.telegram.org" if _clean_text(runtime.magent_notify_telegram_bot_token) else None
|
||||
webhook_target = _url_target(runtime.magent_notify_webhook_url)
|
||||
|
||||
push_provider = _clean_text(runtime.magent_notify_push_provider, "ntfy").lower()
|
||||
push_target = None
|
||||
if push_provider == "pushover":
|
||||
push_target = "api.pushover.net"
|
||||
elif push_provider == "telegram":
|
||||
push_target = telegram_target or "api.telegram.org"
|
||||
elif push_provider == "discord":
|
||||
push_target = discord_target or "discord.com"
|
||||
else:
|
||||
push_target = _url_target(runtime.magent_notify_push_base_url)
|
||||
|
||||
email_ready, email_detail = smtp_email_config_ready()
|
||||
discord_ready, discord_detail = _discord_config_ready(runtime)
|
||||
telegram_ready, telegram_detail = _telegram_config_ready(runtime)
|
||||
push_ready, push_detail = _push_config_ready(runtime)
|
||||
webhook_ready, webhook_detail = _webhook_config_ready(runtime)
|
||||
|
||||
checks = [
|
||||
DiagnosticCheck(
|
||||
key="magent-web",
|
||||
label="Magent application",
|
||||
category="Application",
|
||||
description="Checks that the frontend application URL is responding.",
|
||||
live_safe=True,
|
||||
configured=True,
|
||||
config_detail="ok",
|
||||
target=application_target,
|
||||
runner=lambda runtime=runtime: _run_magent_web_check(runtime),
|
||||
),
|
||||
DiagnosticCheck(
|
||||
key="magent-api",
|
||||
label="Magent API",
|
||||
category="Application",
|
||||
description="Checks the Magent API health endpoint.",
|
||||
live_safe=True,
|
||||
configured=True,
|
||||
config_detail="ok",
|
||||
target=api_target,
|
||||
runner=lambda runtime=runtime: _run_magent_api_check(runtime),
|
||||
),
|
||||
DiagnosticCheck(
|
||||
key="database",
|
||||
label="SQLite database",
|
||||
category="Application",
|
||||
description="Runs SQLite integrity_check against the current Magent database.",
|
||||
live_safe=True,
|
||||
configured=True,
|
||||
config_detail="ok",
|
||||
target="sqlite",
|
||||
runner=_run_database_check,
|
||||
),
|
||||
DiagnosticCheck(
|
||||
key="seerr",
|
||||
label="Seerr",
|
||||
category="Media services",
|
||||
description="Checks Seerr API reachability and version.",
|
||||
live_safe=True,
|
||||
configured=bool(runtime.jellyseerr_base_url and runtime.jellyseerr_api_key),
|
||||
config_detail="Seerr URL and API key are required.",
|
||||
target=seerr_target,
|
||||
runner=lambda runtime=runtime: _run_seerr_check(runtime),
|
||||
),
|
||||
DiagnosticCheck(
|
||||
key="jellyfin",
|
||||
label="Jellyfin",
|
||||
category="Media services",
|
||||
description="Checks Jellyfin system info with the configured API key.",
|
||||
live_safe=True,
|
||||
configured=bool(runtime.jellyfin_base_url and runtime.jellyfin_api_key),
|
||||
config_detail="Jellyfin URL and API key are required.",
|
||||
target=jellyfin_target,
|
||||
runner=lambda runtime=runtime: _run_jellyfin_check(runtime),
|
||||
),
|
||||
DiagnosticCheck(
|
||||
key="sonarr",
|
||||
label="Sonarr",
|
||||
category="Media services",
|
||||
description="Checks Sonarr system status with the configured API key.",
|
||||
live_safe=True,
|
||||
configured=bool(runtime.sonarr_base_url and runtime.sonarr_api_key),
|
||||
config_detail="Sonarr URL and API key are required.",
|
||||
target=sonarr_target,
|
||||
runner=lambda runtime=runtime: _run_sonarr_check(runtime),
|
||||
),
|
||||
DiagnosticCheck(
|
||||
key="radarr",
|
||||
label="Radarr",
|
||||
category="Media services",
|
||||
description="Checks Radarr system status with the configured API key.",
|
||||
live_safe=True,
|
||||
configured=bool(runtime.radarr_base_url and runtime.radarr_api_key),
|
||||
config_detail="Radarr URL and API key are required.",
|
||||
target=radarr_target,
|
||||
runner=lambda runtime=runtime: _run_radarr_check(runtime),
|
||||
),
|
||||
DiagnosticCheck(
|
||||
key="prowlarr",
|
||||
label="Prowlarr",
|
||||
category="Media services",
|
||||
description="Checks Prowlarr health and flags warnings as degraded.",
|
||||
live_safe=True,
|
||||
configured=bool(runtime.prowlarr_base_url and runtime.prowlarr_api_key),
|
||||
config_detail="Prowlarr URL and API key are required.",
|
||||
target=prowlarr_target,
|
||||
runner=lambda runtime=runtime: _run_prowlarr_check(runtime),
|
||||
),
|
||||
DiagnosticCheck(
|
||||
key="qbittorrent",
|
||||
label="qBittorrent",
|
||||
category="Media services",
|
||||
description="Checks qBittorrent login and app version.",
|
||||
live_safe=True,
|
||||
configured=bool(
|
||||
runtime.qbittorrent_base_url and runtime.qbittorrent_username and runtime.qbittorrent_password
|
||||
),
|
||||
config_detail="qBittorrent URL, username, and password are required.",
|
||||
target=qbittorrent_target,
|
||||
runner=lambda runtime=runtime: _run_qbittorrent_check(runtime),
|
||||
),
|
||||
DiagnosticCheck(
|
||||
key="email",
|
||||
label="SMTP email",
|
||||
category="Notifications",
|
||||
description="Sends a live test email using the configured SMTP provider.",
|
||||
live_safe=False,
|
||||
configured=email_ready,
|
||||
config_detail=email_detail,
|
||||
target=smtp_target,
|
||||
runner=lambda recipient_email=recipient_email: _run_email_check(recipient_email),
|
||||
),
|
||||
DiagnosticCheck(
|
||||
key="discord",
|
||||
label="Discord webhook",
|
||||
category="Notifications",
|
||||
description="Posts a live test message to the configured Discord webhook.",
|
||||
live_safe=False,
|
||||
configured=discord_ready,
|
||||
config_detail=discord_detail,
|
||||
target=discord_target,
|
||||
runner=lambda runtime=runtime: _run_discord_check(runtime),
|
||||
),
|
||||
DiagnosticCheck(
|
||||
key="telegram",
|
||||
label="Telegram",
|
||||
category="Notifications",
|
||||
description="Sends a live test message to the configured Telegram chat.",
|
||||
live_safe=False,
|
||||
configured=telegram_ready,
|
||||
config_detail=telegram_detail,
|
||||
target=telegram_target,
|
||||
runner=lambda runtime=runtime: _run_telegram_check(runtime),
|
||||
),
|
||||
DiagnosticCheck(
|
||||
key="push",
|
||||
label="Push/mobile provider",
|
||||
category="Notifications",
|
||||
description="Sends a live test message through the configured push provider.",
|
||||
live_safe=False,
|
||||
configured=push_ready,
|
||||
config_detail=push_detail,
|
||||
target=push_target,
|
||||
runner=lambda runtime=runtime: _run_push_check(runtime),
|
||||
),
|
||||
DiagnosticCheck(
|
||||
key="webhook",
|
||||
label="Generic webhook",
|
||||
category="Notifications",
|
||||
description="Posts a live test payload to the configured generic webhook.",
|
||||
live_safe=False,
|
||||
configured=webhook_ready,
|
||||
config_detail=webhook_detail,
|
||||
target=webhook_target,
|
||||
runner=lambda runtime=runtime: _run_webhook_check(runtime),
|
||||
),
|
||||
]
|
||||
return checks
|
||||
|
||||
|
||||
async def _execute_check(check: DiagnosticCheck) -> Dict[str, Any]:
|
||||
if not check.configured:
|
||||
return {
|
||||
"key": check.key,
|
||||
"label": check.label,
|
||||
"category": check.category,
|
||||
"description": check.description,
|
||||
"target": check.target,
|
||||
"live_safe": check.live_safe,
|
||||
"configured": False,
|
||||
"status": _config_status(check.config_detail),
|
||||
"message": check.config_detail,
|
||||
"checked_at": _now_iso(),
|
||||
"duration_ms": 0,
|
||||
}
|
||||
|
||||
started = perf_counter()
|
||||
checked_at = _now_iso()
|
||||
try:
|
||||
payload = await check.runner()
|
||||
status = _clean_text(payload.get("status"), "up")
|
||||
message = _clean_text(payload.get("message"), "Check passed")
|
||||
detail = payload.get("detail")
|
||||
return {
|
||||
"key": check.key,
|
||||
"label": check.label,
|
||||
"category": check.category,
|
||||
"description": check.description,
|
||||
"target": check.target,
|
||||
"live_safe": check.live_safe,
|
||||
"configured": True,
|
||||
"status": status,
|
||||
"message": message,
|
||||
"detail": detail,
|
||||
"checked_at": checked_at,
|
||||
"duration_ms": round((perf_counter() - started) * 1000, 1),
|
||||
}
|
||||
except httpx.HTTPError as exc:
|
||||
return {
|
||||
"key": check.key,
|
||||
"label": check.label,
|
||||
"category": check.category,
|
||||
"description": check.description,
|
||||
"target": check.target,
|
||||
"live_safe": check.live_safe,
|
||||
"configured": True,
|
||||
"status": "down",
|
||||
"message": _http_error_detail(exc),
|
||||
"checked_at": checked_at,
|
||||
"duration_ms": round((perf_counter() - started) * 1000, 1),
|
||||
}
|
||||
except Exception as exc:
|
||||
return {
|
||||
"key": check.key,
|
||||
"label": check.label,
|
||||
"category": check.category,
|
||||
"description": check.description,
|
||||
"target": check.target,
|
||||
"live_safe": check.live_safe,
|
||||
"configured": True,
|
||||
"status": "down",
|
||||
"message": str(exc),
|
||||
"checked_at": checked_at,
|
||||
"duration_ms": round((perf_counter() - started) * 1000, 1),
|
||||
}
|
||||
|
||||
|
||||
def get_diagnostics_catalog() -> Dict[str, Any]:
|
||||
checks = _build_diagnostic_checks()
|
||||
items = []
|
||||
for check in checks:
|
||||
items.append(
|
||||
{
|
||||
"key": check.key,
|
||||
"label": check.label,
|
||||
"category": check.category,
|
||||
"description": check.description,
|
||||
"live_safe": check.live_safe,
|
||||
"target": check.target,
|
||||
"configured": check.configured,
|
||||
"config_status": "configured" if check.configured else _config_status(check.config_detail),
|
||||
"config_detail": "Ready to test." if check.configured else check.config_detail,
|
||||
}
|
||||
)
|
||||
categories = sorted({item["category"] for item in items})
|
||||
return {
|
||||
"checks": items,
|
||||
"categories": categories,
|
||||
"generated_at": _now_iso(),
|
||||
}
|
||||
|
||||
|
||||
async def run_diagnostics(keys: Optional[Sequence[str]] = None, recipient_email: Optional[str] = None) -> Dict[str, Any]:
|
||||
checks = _build_diagnostic_checks(recipient_email=recipient_email)
|
||||
selected = {str(key).strip().lower() for key in (keys or []) if str(key).strip()}
|
||||
if selected:
|
||||
checks = [check for check in checks if check.key.lower() in selected]
|
||||
results = await asyncio.gather(*(_execute_check(check) for check in checks))
|
||||
return {
|
||||
"results": results,
|
||||
"summary": _summary_from_results(results),
|
||||
"checked_at": _now_iso(),
|
||||
}
|
||||
@@ -394,6 +394,17 @@ def _send_email_sync(*, recipient_email: str, subject: str, body_text: str, body
|
||||
use_ssl = bool(runtime.magent_notify_email_use_ssl)
|
||||
if not host or not from_address:
|
||||
raise RuntimeError("SMTP email settings are incomplete.")
|
||||
logger.info(
|
||||
"smtp send started recipient=%s from=%s host=%s port=%s tls=%s ssl=%s auth=%s subject=%s",
|
||||
recipient_email,
|
||||
from_address,
|
||||
host,
|
||||
port,
|
||||
use_tls,
|
||||
use_ssl,
|
||||
bool(username and password),
|
||||
subject,
|
||||
)
|
||||
|
||||
message = EmailMessage()
|
||||
message["Subject"] = subject
|
||||
@@ -405,19 +416,26 @@ def _send_email_sync(*, recipient_email: str, subject: str, body_text: str, body
|
||||
|
||||
if use_ssl:
|
||||
with smtplib.SMTP_SSL(host, port, timeout=20) as smtp:
|
||||
logger.debug("smtp ssl connection opened host=%s port=%s", host, port)
|
||||
if username and password:
|
||||
smtp.login(username, password)
|
||||
logger.debug("smtp login succeeded host=%s username=%s", host, username)
|
||||
smtp.send_message(message)
|
||||
logger.info("smtp send accepted recipient=%s host=%s mode=ssl", recipient_email, host)
|
||||
return
|
||||
|
||||
with smtplib.SMTP(host, port, timeout=20) as smtp:
|
||||
logger.debug("smtp connection opened host=%s port=%s", host, port)
|
||||
smtp.ehlo()
|
||||
if use_tls:
|
||||
smtp.starttls()
|
||||
smtp.ehlo()
|
||||
logger.debug("smtp starttls negotiated host=%s port=%s", host, port)
|
||||
if username and password:
|
||||
smtp.login(username, password)
|
||||
logger.debug("smtp login succeeded host=%s username=%s", host, username)
|
||||
smtp.send_message(message)
|
||||
logger.info("smtp send accepted recipient=%s host=%s mode=plain", recipient_email, host)
|
||||
|
||||
|
||||
async def send_templated_email(
|
||||
@@ -461,3 +479,40 @@ async def send_templated_email(
|
||||
"recipient_email": resolved_email,
|
||||
"subject": rendered["subject"],
|
||||
}
|
||||
|
||||
|
||||
async def send_test_email(recipient_email: Optional[str] = None) -> Dict[str, str]:
|
||||
ready, detail = smtp_email_config_ready()
|
||||
if not ready:
|
||||
raise RuntimeError(detail)
|
||||
|
||||
runtime = get_runtime_settings()
|
||||
resolved_email = _normalize_email(recipient_email) or _normalize_email(
|
||||
runtime.magent_notify_email_from_address
|
||||
)
|
||||
if not resolved_email:
|
||||
raise RuntimeError("No valid recipient email is configured for the test message.")
|
||||
|
||||
application_url = _normalize_display_text(runtime.magent_application_url, "Not configured")
|
||||
subject = f"{env_settings.app_name} email test"
|
||||
body_text = (
|
||||
f"This is a test email from {env_settings.app_name}.\n\n"
|
||||
f"Build: {BUILD_NUMBER}\n"
|
||||
f"Application URL: {application_url}\n"
|
||||
)
|
||||
body_html = (
|
||||
f"<h1>{html.escape(env_settings.app_name)} email test</h1>"
|
||||
f"<p>This is a test email from <strong>{html.escape(env_settings.app_name)}</strong>.</p>"
|
||||
f"<p><strong>Build:</strong> {html.escape(BUILD_NUMBER)}<br />"
|
||||
f"<strong>Application URL:</strong> {html.escape(application_url)}</p>"
|
||||
)
|
||||
|
||||
await asyncio.to_thread(
|
||||
_send_email_sync,
|
||||
recipient_email=resolved_email,
|
||||
subject=subject,
|
||||
body_text=body_text,
|
||||
body_html=body_html,
|
||||
)
|
||||
logger.info("SMTP test email sent: recipient=%s", resolved_email)
|
||||
return {"recipient_email": resolved_email, "subject": subject}
|
||||
|
||||
Reference in New Issue
Block a user