Build 2602261523: live updates, invite cleanup and nuclear resync
This commit is contained in:
@@ -1,5 +1,5 @@
|
||||
from datetime import datetime, timezone
|
||||
from typing import Dict, Any
|
||||
from typing import Dict, Any, Optional
|
||||
|
||||
from fastapi import Depends, HTTPException, status, Request
|
||||
from fastapi.security import OAuth2PasswordBearer
|
||||
@@ -38,7 +38,7 @@ def _extract_client_ip(request: Request) -> str:
|
||||
return "unknown"
|
||||
|
||||
|
||||
def get_current_user(token: str = Depends(oauth2_scheme), request: Request = None) -> Dict[str, Any]:
|
||||
def _load_current_user_from_token(token: str, request: Optional[Request] = None) -> Dict[str, Any]:
|
||||
try:
|
||||
payload = safe_decode_token(token)
|
||||
except TokenError as exc:
|
||||
@@ -73,7 +73,32 @@ def get_current_user(token: str = Depends(oauth2_scheme), request: Request = Non
|
||||
}
|
||||
|
||||
|
||||
def get_current_user(token: str = Depends(oauth2_scheme), request: Request = None) -> Dict[str, Any]:
|
||||
return _load_current_user_from_token(token, request)
|
||||
|
||||
|
||||
def get_current_user_event_stream(request: Request) -> Dict[str, Any]:
|
||||
"""EventSource cannot send Authorization headers, so allow a query token here only."""
|
||||
token = None
|
||||
auth_header = request.headers.get("authorization", "")
|
||||
if auth_header.lower().startswith("bearer "):
|
||||
token = auth_header.split(" ", 1)[1].strip()
|
||||
if not token:
|
||||
token = request.query_params.get("access_token")
|
||||
if not token:
|
||||
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Missing token")
|
||||
return _load_current_user_from_token(token, None)
|
||||
|
||||
|
||||
def require_admin(user: Dict[str, Any] = Depends(get_current_user)) -> Dict[str, Any]:
|
||||
if user.get("role") != "admin":
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Admin access required")
|
||||
return user
|
||||
|
||||
|
||||
def require_admin_event_stream(
|
||||
user: Dict[str, Any] = Depends(get_current_user_event_stream),
|
||||
) -> Dict[str, Any]:
|
||||
if user.get("role") != "admin":
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Admin access required")
|
||||
return user
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
BUILD_NUMBER = "2602261442"
|
||||
BUILD_NUMBER = "2602261523"
|
||||
CHANGELOG = '2026-01-22\\n- Initial commit\\n- Ignore build artifacts\\n- Update README\\n- Update README with Docker-first guide\\n\\n2026-01-23\\n- Fix cache titles via Jellyseerr media lookup\\n- Split search actions and improve download options\\n- Fallback manual grab to qBittorrent\\n- Hide header actions when signed out\\n- Add feedback form and webhook\\n- Fix cache titles and move feedback link\\n- Show available status on landing when in Jellyfin\\n- Add default branding assets when missing\\n- Use bundled branding assets\\n- Remove password fields from users page\\n- Add Docker Hub compose override\\n- Fix backend Dockerfile paths for root context\\n- Copy public assets into frontend image\\n- Use backend branding assets for logo and favicon\\n\\n2026-01-24\\n- Route grabs through Sonarr/Radarr only\\n- Document fix buttons in how-it-works\\n- Clarify how-it-works steps and fixes\\n- Map Prowlarr releases to Arr indexers for manual grab\\n- Improve request handling and qBittorrent categories\\n\\n2026-01-25\\n- Add site banner, build number, and changelog\\n- Automate build number tagging and sync\\n- Improve mobile header layout\\n- Move account actions into avatar menu\\n- Add user stats and activity tracking\\n- Add Jellyfin login cache and admin-only stats\\n- Tidy request sync controls\\n- Seed branding logo from bundled assets\\n- Serve bundled branding assets by default\\n- Harden request cache titles and cache-only reads\\n- Build 2501262041\\n\\n2026-01-26\\n- Fix cache title hydration\\n- Fix sync progress bar animation\\n\\n2026-01-27\\n- Add cache control artwork stats\\n- Improve cache stats performance (build 271261145)\\n- Fix backend cache stats import (build 271261149)\\n- Clarify request sync settings (build 271261159)\\n- Bump build number to 271261202\\n- Fix request titles in snapshots (build 271261219)\\n- Fix snapshot title fallback (build 271261228)\\n- Add cache load spinner (build 271261238)\\n- Bump build number (process 2) 271261322\\n- Add service test buttons (build 271261335)\\n- Fallback to TMDB when artwork cache fails (build 271261524)\\n- Hydrate missing artwork from Jellyseerr (build 271261539)\\n\\n2026-01-29\\n- release: 2901262036\\n- release: 2901262044\\n- release: 2901262102\\n- Hardcode build number in backend\\n- Bake build number and changelog\\n- Update full changelog\\n- Tidy full changelog\\n- Build 2901262240: cache users\n\n2026-01-30\n- Merge backend and frontend into one container'
|
||||
|
||||
@@ -1991,6 +1991,29 @@ def clear_history() -> Dict[str, int]:
|
||||
return {"actions": actions, "snapshots": snapshots}
|
||||
|
||||
|
||||
def clear_user_objects_nuclear() -> Dict[str, int]:
|
||||
with _connect() as conn:
|
||||
# Preserve admin accounts, but remove invite/profile references so profile rows can be deleted safely.
|
||||
admin_reset = conn.execute(
|
||||
"""
|
||||
UPDATE users
|
||||
SET profile_id = NULL,
|
||||
invited_by_code = NULL,
|
||||
invited_at = NULL
|
||||
WHERE role = 'admin'
|
||||
"""
|
||||
).rowcount
|
||||
users = conn.execute("DELETE FROM users WHERE role != 'admin'").rowcount
|
||||
invites = conn.execute("DELETE FROM signup_invites").rowcount
|
||||
profiles = conn.execute("DELETE FROM user_profiles").rowcount
|
||||
return {
|
||||
"users": users,
|
||||
"invites": invites,
|
||||
"profiles": profiles,
|
||||
"adminsReset": admin_reset,
|
||||
}
|
||||
|
||||
|
||||
def cleanup_history(days: int) -> Dict[str, int]:
|
||||
if days <= 0:
|
||||
return {"actions": 0, "snapshots": 0}
|
||||
|
||||
@@ -13,12 +13,13 @@ from .routers.requests import (
|
||||
run_daily_db_cleanup,
|
||||
)
|
||||
from .routers.auth import router as auth_router
|
||||
from .routers.admin import router as admin_router
|
||||
from .routers.admin import router as admin_router, events_router as admin_events_router
|
||||
from .routers.images import router as images_router
|
||||
from .routers.branding import router as branding_router
|
||||
from .routers.status import router as status_router
|
||||
from .routers.feedback import router as feedback_router
|
||||
from .routers.site import router as site_router
|
||||
from .routers.events import router as events_router
|
||||
from .services.jellyfin_sync import run_daily_jellyfin_sync
|
||||
from .logging_config import configure_logging
|
||||
from .runtime import get_runtime_settings
|
||||
@@ -53,8 +54,10 @@ async def startup() -> None:
|
||||
app.include_router(requests_router)
|
||||
app.include_router(auth_router)
|
||||
app.include_router(admin_router)
|
||||
app.include_router(admin_events_router)
|
||||
app.include_router(images_router)
|
||||
app.include_router(branding_router)
|
||||
app.include_router(status_router)
|
||||
app.include_router(feedback_router)
|
||||
app.include_router(site_router)
|
||||
app.include_router(events_router)
|
||||
|
||||
@@ -1,15 +1,18 @@
|
||||
from typing import Any, Dict, List, Optional
|
||||
from datetime import datetime, timedelta, timezone
|
||||
import asyncio
|
||||
import ipaddress
|
||||
import json
|
||||
import os
|
||||
import secrets
|
||||
import sqlite3
|
||||
import string
|
||||
from urllib.parse import urlparse, urlunparse
|
||||
|
||||
from fastapi import APIRouter, HTTPException, Depends, UploadFile, File
|
||||
from fastapi import APIRouter, HTTPException, Depends, UploadFile, File, Request
|
||||
from fastapi.responses import StreamingResponse
|
||||
|
||||
from ..auth import require_admin, get_current_user
|
||||
from ..auth import require_admin, get_current_user, require_admin_event_stream
|
||||
from ..config import settings as env_settings
|
||||
from ..db import (
|
||||
delete_setting,
|
||||
@@ -37,6 +40,7 @@ from ..db import (
|
||||
vacuum_db,
|
||||
clear_requests_cache,
|
||||
clear_history,
|
||||
clear_user_objects_nuclear,
|
||||
cleanup_history,
|
||||
update_request_cache_title,
|
||||
repair_request_cache_titles,
|
||||
@@ -65,6 +69,7 @@ from ..services.user_cache import (
|
||||
match_jellyseerr_user_id,
|
||||
save_jellyfin_users_cache,
|
||||
save_jellyseerr_users_cache,
|
||||
clear_user_import_caches,
|
||||
)
|
||||
import logging
|
||||
from ..logging_config import configure_logging
|
||||
@@ -72,6 +77,7 @@ from ..routers import requests as requests_router
|
||||
from ..routers.branding import save_branding_image
|
||||
|
||||
router = APIRouter(prefix="/admin", tags=["admin"], dependencies=[Depends(require_admin)])
|
||||
events_router = APIRouter(prefix="/admin/events", tags=["admin"])
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
SENSITIVE_KEYS = {
|
||||
@@ -130,6 +136,36 @@ SETTING_KEYS: List[str] = [
|
||||
"site_banner_tone",
|
||||
]
|
||||
|
||||
|
||||
def _admin_live_state_snapshot() -> Dict[str, Any]:
|
||||
return {
|
||||
"type": "admin_live_state",
|
||||
"requestsSync": requests_router.get_requests_sync_state(),
|
||||
"artworkPrefetch": requests_router.get_artwork_prefetch_state(),
|
||||
}
|
||||
|
||||
|
||||
def _sse_encode(data: Dict[str, Any]) -> str:
|
||||
payload = json.dumps(data, ensure_ascii=True, separators=(",", ":"), default=str)
|
||||
return f"data: {payload}\n\n"
|
||||
|
||||
|
||||
def _read_log_tail_lines(lines: int) -> List[str]:
|
||||
runtime = get_runtime_settings()
|
||||
log_file = runtime.log_file
|
||||
if not log_file:
|
||||
raise HTTPException(status_code=400, detail="Log file not configured")
|
||||
if not os.path.isabs(log_file):
|
||||
log_file = os.path.join(os.getcwd(), log_file)
|
||||
if not os.path.exists(log_file):
|
||||
raise HTTPException(status_code=404, detail="Log file not found")
|
||||
lines = max(1, min(lines, 1000))
|
||||
from collections import deque
|
||||
|
||||
with open(log_file, "r", encoding="utf-8", errors="replace") as handle:
|
||||
tail = deque(handle, maxlen=lines)
|
||||
return list(tail)
|
||||
|
||||
def _normalize_username(value: str) -> str:
|
||||
normalized = value.strip().lower()
|
||||
if "@" in normalized:
|
||||
@@ -608,22 +644,65 @@ async def requests_sync_status() -> Dict[str, Any]:
|
||||
return {"status": "ok", "sync": requests_router.get_requests_sync_state()}
|
||||
|
||||
|
||||
@events_router.get("/stream")
|
||||
async def admin_events_stream(
|
||||
request: Request,
|
||||
include_logs: bool = False,
|
||||
log_lines: int = 200,
|
||||
_: Dict[str, Any] = Depends(require_admin_event_stream),
|
||||
) -> StreamingResponse:
|
||||
async def event_generator():
|
||||
# Advise client reconnect timing once per stream.
|
||||
yield "retry: 2000\n\n"
|
||||
last_snapshot: Optional[str] = None
|
||||
heartbeat_counter = 0
|
||||
log_refresh_counter = 5 if include_logs else 0
|
||||
latest_logs_payload: Optional[Dict[str, Any]] = None
|
||||
while True:
|
||||
if await request.is_disconnected():
|
||||
break
|
||||
snapshot_payload = _admin_live_state_snapshot()
|
||||
if include_logs:
|
||||
log_refresh_counter += 1
|
||||
if log_refresh_counter >= 5:
|
||||
log_refresh_counter = 0
|
||||
try:
|
||||
latest_logs_payload = {
|
||||
"lines": _read_log_tail_lines(log_lines),
|
||||
"count": max(1, min(int(log_lines or 200), 1000)),
|
||||
}
|
||||
except HTTPException as exc:
|
||||
latest_logs_payload = {
|
||||
"error": str(exc.detail) if exc.detail else "Could not read logs",
|
||||
}
|
||||
except Exception as exc:
|
||||
latest_logs_payload = {"error": str(exc)}
|
||||
snapshot_payload["logs"] = latest_logs_payload
|
||||
|
||||
snapshot = _sse_encode(snapshot_payload)
|
||||
if snapshot != last_snapshot:
|
||||
last_snapshot = snapshot
|
||||
yield snapshot
|
||||
heartbeat_counter = 0
|
||||
else:
|
||||
heartbeat_counter += 1
|
||||
# Keep the stream alive through proxies even when state is unchanged.
|
||||
if heartbeat_counter >= 15:
|
||||
yield ": ping\n\n"
|
||||
heartbeat_counter = 0
|
||||
await asyncio.sleep(1.0)
|
||||
|
||||
headers = {
|
||||
"Cache-Control": "no-cache",
|
||||
"Connection": "keep-alive",
|
||||
"X-Accel-Buffering": "no",
|
||||
}
|
||||
return StreamingResponse(event_generator(), media_type="text/event-stream", headers=headers)
|
||||
|
||||
|
||||
@router.get("/logs")
|
||||
async def read_logs(lines: int = 200) -> Dict[str, Any]:
|
||||
runtime = get_runtime_settings()
|
||||
log_file = runtime.log_file
|
||||
if not log_file:
|
||||
raise HTTPException(status_code=400, detail="Log file not configured")
|
||||
if not os.path.isabs(log_file):
|
||||
log_file = os.path.join(os.getcwd(), log_file)
|
||||
if not os.path.exists(log_file):
|
||||
raise HTTPException(status_code=404, detail="Log file not found")
|
||||
lines = max(1, min(lines, 1000))
|
||||
from collections import deque
|
||||
|
||||
with open(log_file, "r", encoding="utf-8", errors="replace") as handle:
|
||||
tail = deque(handle, maxlen=lines)
|
||||
return {"lines": list(tail)}
|
||||
return {"lines": _read_log_tail_lines(lines)}
|
||||
|
||||
|
||||
@router.get("/requests/cache")
|
||||
@@ -689,9 +768,23 @@ async def repair_database() -> Dict[str, Any]:
|
||||
async def flush_database() -> Dict[str, Any]:
|
||||
cleared = clear_requests_cache()
|
||||
history = clear_history()
|
||||
user_objects = clear_user_objects_nuclear()
|
||||
user_caches = clear_user_import_caches()
|
||||
delete_setting("requests_sync_last_at")
|
||||
logger.warning("Database flush executed: requests_cache=%s history=%s", cleared, history)
|
||||
return {"status": "ok", "requestsCleared": cleared, "historyCleared": history}
|
||||
logger.warning(
|
||||
"Database flush executed: requests_cache=%s history=%s user_objects=%s user_caches=%s",
|
||||
cleared,
|
||||
history,
|
||||
user_objects,
|
||||
user_caches,
|
||||
)
|
||||
return {
|
||||
"status": "ok",
|
||||
"requestsCleared": cleared,
|
||||
"historyCleared": history,
|
||||
"userObjectsCleared": user_objects,
|
||||
"userCachesCleared": user_caches,
|
||||
}
|
||||
|
||||
|
||||
@router.post("/maintenance/cleanup")
|
||||
|
||||
112
backend/app/routers/events.py
Normal file
112
backend/app/routers/events.py
Normal file
@@ -0,0 +1,112 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import time
|
||||
from datetime import datetime, timezone
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, Request
|
||||
from fastapi.responses import StreamingResponse
|
||||
|
||||
from ..auth import get_current_user_event_stream
|
||||
from . import requests as requests_router
|
||||
from .status import services_status
|
||||
|
||||
router = APIRouter(prefix="/events", tags=["events"])
|
||||
|
||||
|
||||
def _sse_json(payload: Dict[str, Any]) -> str:
|
||||
return f"data: {json.dumps(payload, ensure_ascii=True, separators=(',', ':'), default=str)}\n\n"
|
||||
|
||||
|
||||
@router.get("/stream")
|
||||
async def events_stream(
|
||||
request: Request,
|
||||
recent_days: int = 90,
|
||||
user: Dict[str, Any] = Depends(get_current_user_event_stream),
|
||||
) -> StreamingResponse:
|
||||
recent_days = max(0, min(int(recent_days or 90), 3650))
|
||||
recent_take = 50 if user.get("role") == "admin" else 6
|
||||
|
||||
async def event_generator():
|
||||
yield "retry: 2000\n\n"
|
||||
last_recent_signature: Optional[str] = None
|
||||
last_services_signature: Optional[str] = None
|
||||
next_recent_at = 0.0
|
||||
next_services_at = 0.0
|
||||
heartbeat_counter = 0
|
||||
|
||||
while True:
|
||||
if await request.is_disconnected():
|
||||
break
|
||||
|
||||
now = time.monotonic()
|
||||
sent_any = False
|
||||
|
||||
if now >= next_recent_at:
|
||||
next_recent_at = now + 15.0
|
||||
try:
|
||||
recent_payload = await requests_router.recent_requests(
|
||||
take=recent_take,
|
||||
skip=0,
|
||||
days=recent_days,
|
||||
user=user,
|
||||
)
|
||||
results = recent_payload.get("results") if isinstance(recent_payload, dict) else []
|
||||
payload = {
|
||||
"type": "home_recent",
|
||||
"ts": datetime.now(timezone.utc).isoformat(),
|
||||
"days": recent_days,
|
||||
"results": results if isinstance(results, list) else [],
|
||||
}
|
||||
except Exception as exc:
|
||||
payload = {
|
||||
"type": "home_recent",
|
||||
"ts": datetime.now(timezone.utc).isoformat(),
|
||||
"days": recent_days,
|
||||
"error": str(exc),
|
||||
}
|
||||
signature = json.dumps(payload, ensure_ascii=True, separators=(",", ":"), default=str)
|
||||
if signature != last_recent_signature:
|
||||
last_recent_signature = signature
|
||||
yield _sse_json(payload)
|
||||
sent_any = True
|
||||
|
||||
if now >= next_services_at:
|
||||
next_services_at = now + 30.0
|
||||
try:
|
||||
status_payload = await services_status()
|
||||
payload = {
|
||||
"type": "home_services",
|
||||
"ts": datetime.now(timezone.utc).isoformat(),
|
||||
"status": status_payload,
|
||||
}
|
||||
except Exception as exc:
|
||||
payload = {
|
||||
"type": "home_services",
|
||||
"ts": datetime.now(timezone.utc).isoformat(),
|
||||
"error": str(exc),
|
||||
}
|
||||
signature = json.dumps(payload, ensure_ascii=True, separators=(",", ":"), default=str)
|
||||
if signature != last_services_signature:
|
||||
last_services_signature = signature
|
||||
yield _sse_json(payload)
|
||||
sent_any = True
|
||||
|
||||
if sent_any:
|
||||
heartbeat_counter = 0
|
||||
else:
|
||||
heartbeat_counter += 1
|
||||
if heartbeat_counter >= 15:
|
||||
yield ": ping\n\n"
|
||||
heartbeat_counter = 0
|
||||
|
||||
await asyncio.sleep(1.0)
|
||||
|
||||
headers = {
|
||||
"Cache-Control": "no-cache",
|
||||
"Connection": "keep-alive",
|
||||
"X-Accel-Buffering": "no",
|
||||
}
|
||||
return StreamingResponse(event_generator(), media_type="text/event-stream", headers=headers)
|
||||
@@ -3,7 +3,7 @@ import logging
|
||||
from datetime import datetime, timezone, timedelta
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from ..db import get_setting, set_setting
|
||||
from ..db import get_setting, set_setting, delete_setting
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -142,3 +142,17 @@ def save_jellyfin_users_cache(users: List[Dict[str, Any]]) -> List[Dict[str, Any
|
||||
|
||||
def get_cached_jellyfin_users(max_age_minutes: int = 1440) -> Optional[List[Dict[str, Any]]]:
|
||||
return _load_cached_users(JELLYFIN_CACHE_KEY, JELLYFIN_CACHE_AT_KEY, max_age_minutes)
|
||||
|
||||
|
||||
def clear_user_import_caches() -> Dict[str, int]:
|
||||
cleared = 0
|
||||
for key in (
|
||||
JELLYSEERR_CACHE_KEY,
|
||||
JELLYSEERR_CACHE_AT_KEY,
|
||||
JELLYFIN_CACHE_KEY,
|
||||
JELLYFIN_CACHE_AT_KEY,
|
||||
):
|
||||
delete_setting(key)
|
||||
cleared += 1
|
||||
logger.debug("Cleared user import cache keys: %s", cleared)
|
||||
return {"settingsKeysCleared": cleared}
|
||||
|
||||
Reference in New Issue
Block a user