Files
Magent/backend/app/services/user_cache.py
2026-01-29 22:42:00 +13:00

145 lines
4.6 KiB
Python

import json
import logging
from datetime import datetime, timezone, timedelta
from typing import Any, Dict, List, Optional
from ..db import get_setting, set_setting
logger = logging.getLogger(__name__)
JELLYSEERR_CACHE_KEY = "jellyseerr_users_cache"
JELLYSEERR_CACHE_AT_KEY = "jellyseerr_users_cached_at"
JELLYFIN_CACHE_KEY = "jellyfin_users_cache"
JELLYFIN_CACHE_AT_KEY = "jellyfin_users_cached_at"
def _now_iso() -> str:
return datetime.now(timezone.utc).isoformat()
def _parse_iso(value: Optional[str]) -> Optional[datetime]:
if not value:
return None
try:
parsed = datetime.fromisoformat(value)
except ValueError:
return None
if parsed.tzinfo is None:
parsed = parsed.replace(tzinfo=timezone.utc)
return parsed
def _cache_is_fresh(cached_at: Optional[str], max_age_minutes: int) -> bool:
parsed = _parse_iso(cached_at)
if not parsed:
return False
age = datetime.now(timezone.utc) - parsed
return age <= timedelta(minutes=max_age_minutes)
def _load_cached_users(
cache_key: str, cache_at_key: str, max_age_minutes: int
) -> Optional[List[Dict[str, Any]]]:
cached_at = get_setting(cache_at_key)
if not _cache_is_fresh(cached_at, max_age_minutes):
return None
raw = get_setting(cache_key)
if not raw:
return None
try:
data = json.loads(raw)
except (TypeError, json.JSONDecodeError):
return None
if isinstance(data, list):
return [item for item in data if isinstance(item, dict)]
return None
def _save_cached_users(cache_key: str, cache_at_key: str, users: List[Dict[str, Any]]) -> None:
payload = json.dumps(users, ensure_ascii=True)
set_setting(cache_key, payload)
set_setting(cache_at_key, _now_iso())
def _normalized_handles(value: Any) -> List[str]:
if not isinstance(value, str):
return []
normalized = value.strip().lower()
if not normalized:
return []
handles = [normalized]
if "@" in normalized:
handles.append(normalized.split("@", 1)[0])
return list(dict.fromkeys(handles))
def build_jellyseerr_candidate_map(users: List[Dict[str, Any]]) -> Dict[str, int]:
candidate_to_id: Dict[str, int] = {}
for user in users:
if not isinstance(user, dict):
continue
user_id = user.get("id") or user.get("userId") or user.get("Id")
try:
user_id = int(user_id)
except (TypeError, ValueError):
continue
for key in ("username", "email", "displayName", "name"):
for handle in _normalized_handles(user.get(key)):
candidate_to_id.setdefault(handle, user_id)
return candidate_to_id
def match_jellyseerr_user_id(
username: str, candidate_map: Dict[str, int]
) -> Optional[int]:
for handle in _normalized_handles(username):
matched = candidate_map.get(handle)
if matched is not None:
return matched
return None
def save_jellyseerr_users_cache(users: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
normalized: List[Dict[str, Any]] = []
for user in users:
if not isinstance(user, dict):
continue
normalized.append(
{
"id": user.get("id") or user.get("userId") or user.get("Id"),
"email": user.get("email"),
"username": user.get("username"),
"displayName": user.get("displayName"),
"name": user.get("name"),
}
)
_save_cached_users(JELLYSEERR_CACHE_KEY, JELLYSEERR_CACHE_AT_KEY, normalized)
logger.debug("Cached Jellyseerr users: %s", len(normalized))
return normalized
def get_cached_jellyseerr_users(max_age_minutes: int = 1440) -> Optional[List[Dict[str, Any]]]:
return _load_cached_users(JELLYSEERR_CACHE_KEY, JELLYSEERR_CACHE_AT_KEY, max_age_minutes)
def save_jellyfin_users_cache(users: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
normalized: List[Dict[str, Any]] = []
for user in users:
if not isinstance(user, dict):
continue
normalized.append(
{
"id": user.get("Id"),
"name": user.get("Name"),
"hasPassword": user.get("HasPassword"),
"lastLoginDate": user.get("LastLoginDate"),
}
)
_save_cached_users(JELLYFIN_CACHE_KEY, JELLYFIN_CACHE_AT_KEY, normalized)
logger.debug("Cached Jellyfin users: %s", len(normalized))
return normalized
def get_cached_jellyfin_users(max_age_minutes: int = 1440) -> Optional[List[Dict[str, Any]]]:
return _load_cached_users(JELLYFIN_CACHE_KEY, JELLYFIN_CACHE_AT_KEY, max_age_minutes)