release: 2901262036
This commit is contained in:
@@ -113,6 +113,10 @@ def _normalize_username(value: Any) -> Optional[str]:
|
||||
if not isinstance(value, str):
|
||||
return None
|
||||
normalized = value.strip().lower()
|
||||
if not normalized:
|
||||
return None
|
||||
if "@" in normalized:
|
||||
normalized = normalized.split("@", 1)[0]
|
||||
return normalized if normalized else None
|
||||
|
||||
|
||||
@@ -164,6 +168,21 @@ def _normalize_requested_by(request_data: Any) -> Optional[str]:
|
||||
normalized = normalized.split("@", 1)[0]
|
||||
return normalized
|
||||
|
||||
def _extract_requested_by_id(request_data: Any) -> Optional[int]:
|
||||
if not isinstance(request_data, dict):
|
||||
return None
|
||||
requested_by = request_data.get("requestedBy") or request_data.get("requestedByUser")
|
||||
if isinstance(requested_by, dict):
|
||||
for key in ("id", "userId", "Id"):
|
||||
value = requested_by.get(key)
|
||||
if value is None:
|
||||
continue
|
||||
try:
|
||||
return int(value)
|
||||
except (TypeError, ValueError):
|
||||
continue
|
||||
return None
|
||||
|
||||
|
||||
def _format_upstream_error(service: str, exc: httpx.HTTPStatusError) -> str:
|
||||
response = exc.response
|
||||
@@ -206,6 +225,7 @@ def _parse_request_payload(item: Dict[str, Any]) -> Dict[str, Any]:
|
||||
updated_at = item.get("updatedAt") or created_at
|
||||
requested_by = _request_display_name(item)
|
||||
requested_by_norm = _normalize_requested_by(item)
|
||||
requested_by_id = _extract_requested_by_id(item)
|
||||
return {
|
||||
"request_id": item.get("id"),
|
||||
"media_id": media_id,
|
||||
@@ -216,6 +236,7 @@ def _parse_request_payload(item: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"year": year,
|
||||
"requested_by": requested_by,
|
||||
"requested_by_norm": requested_by_norm,
|
||||
"requested_by_id": requested_by_id,
|
||||
"created_at": created_at,
|
||||
"updated_at": updated_at,
|
||||
}
|
||||
@@ -577,6 +598,7 @@ async def _sync_all_requests(client: JellyseerrClient) -> int:
|
||||
year=payload.get("year"),
|
||||
requested_by=payload.get("requested_by"),
|
||||
requested_by_norm=payload.get("requested_by_norm"),
|
||||
requested_by_id=payload.get("requested_by_id"),
|
||||
created_at=payload.get("created_at"),
|
||||
updated_at=payload.get("updated_at"),
|
||||
payload_json=payload_json,
|
||||
@@ -714,6 +736,7 @@ async def _sync_delta_requests(client: JellyseerrClient) -> int:
|
||||
year=payload.get("year"),
|
||||
requested_by=payload.get("requested_by"),
|
||||
requested_by_norm=payload.get("requested_by_norm"),
|
||||
requested_by_id=payload.get("requested_by_id"),
|
||||
created_at=payload.get("created_at"),
|
||||
updated_at=payload.get("updated_at"),
|
||||
payload_json=payload_json,
|
||||
@@ -843,6 +866,7 @@ async def _prefetch_artwork_cache(
|
||||
year=parsed.get("year"),
|
||||
requested_by=parsed.get("requested_by"),
|
||||
requested_by_norm=parsed.get("requested_by_norm"),
|
||||
requested_by_id=parsed.get("requested_by_id"),
|
||||
created_at=parsed.get("created_at"),
|
||||
updated_at=parsed.get("updated_at"),
|
||||
payload_json=json.dumps(payload, ensure_ascii=True),
|
||||
@@ -985,19 +1009,39 @@ def _recent_cache_stale() -> bool:
|
||||
return (datetime.now(timezone.utc) - parsed).total_seconds() > RECENT_CACHE_TTL_SECONDS
|
||||
|
||||
|
||||
def _parse_iso_datetime(value: Optional[str]) -> Optional[datetime]:
|
||||
if not value:
|
||||
return None
|
||||
try:
|
||||
parsed = datetime.fromisoformat(value.replace("Z", "+00:00"))
|
||||
except ValueError:
|
||||
return None
|
||||
if parsed.tzinfo is None:
|
||||
return parsed.replace(tzinfo=timezone.utc)
|
||||
return parsed
|
||||
|
||||
|
||||
def _get_recent_from_cache(
|
||||
requested_by_norm: Optional[str],
|
||||
requested_by_id: Optional[int],
|
||||
limit: int,
|
||||
offset: int,
|
||||
since_iso: Optional[str],
|
||||
) -> List[Dict[str, Any]]:
|
||||
items = _recent_cache.get("items") or []
|
||||
results = []
|
||||
since_dt = _parse_iso_datetime(since_iso)
|
||||
for item in items:
|
||||
if requested_by_norm and item.get("requested_by_norm") != requested_by_norm:
|
||||
continue
|
||||
if since_iso and item.get("created_at") and item["created_at"] < since_iso:
|
||||
if requested_by_id is not None:
|
||||
if item.get("requested_by_id") != requested_by_id:
|
||||
continue
|
||||
elif requested_by_norm and item.get("requested_by_norm") != requested_by_norm:
|
||||
continue
|
||||
if since_dt:
|
||||
candidate = item.get("created_at") or item.get("updated_at")
|
||||
item_dt = _parse_iso_datetime(candidate)
|
||||
if not item_dt or item_dt < since_dt:
|
||||
continue
|
||||
results.append(item)
|
||||
return results[offset : offset + limit]
|
||||
|
||||
@@ -1455,13 +1499,15 @@ async def recent_requests(
|
||||
raise HTTPException(status_code=502, detail=str(exc)) from exc
|
||||
|
||||
username_norm = _normalize_username(user.get("username", ""))
|
||||
requested_by_id = user.get("jellyseerr_user_id")
|
||||
requested_by = None if user.get("role") == "admin" else username_norm
|
||||
requested_by_id = None if user.get("role") == "admin" else requested_by_id
|
||||
since_iso = None
|
||||
if days > 0:
|
||||
since_iso = (datetime.now(timezone.utc) - timedelta(days=days)).isoformat()
|
||||
if _recent_cache_stale():
|
||||
_refresh_recent_cache_from_db()
|
||||
rows = _get_recent_from_cache(requested_by, take, skip, since_iso)
|
||||
rows = _get_recent_from_cache(requested_by, requested_by_id, take, skip, since_iso)
|
||||
cache_mode = (runtime.artwork_cache_mode or "remote").lower()
|
||||
allow_title_hydrate = False
|
||||
allow_artwork_hydrate = client.configured()
|
||||
@@ -1537,6 +1583,7 @@ async def recent_requests(
|
||||
year=year or payload.get("year"),
|
||||
requested_by=payload.get("requested_by"),
|
||||
requested_by_norm=payload.get("requested_by_norm"),
|
||||
requested_by_id=payload.get("requested_by_id"),
|
||||
created_at=payload.get("created_at"),
|
||||
updated_at=payload.get("updated_at"),
|
||||
payload_json=json.dumps(details, ensure_ascii=True),
|
||||
@@ -1584,6 +1631,7 @@ async def recent_requests(
|
||||
year=payload.get("year"),
|
||||
requested_by=payload.get("requested_by"),
|
||||
requested_by_norm=payload.get("requested_by_norm"),
|
||||
requested_by_id=payload.get("requested_by_id"),
|
||||
created_at=payload.get("created_at"),
|
||||
updated_at=payload.get("updated_at"),
|
||||
payload_json=json.dumps(details, ensure_ascii=True),
|
||||
@@ -1656,8 +1704,14 @@ async def search_requests(
|
||||
status_label = _status_label(status)
|
||||
elif isinstance(media_info_id, int):
|
||||
username_norm = _normalize_username(user.get("username", ""))
|
||||
requested_by_id = user.get("jellyseerr_user_id")
|
||||
requested_by = None if user.get("role") == "admin" else username_norm
|
||||
cached = get_cached_request_by_media_id(media_info_id, requested_by_norm=requested_by)
|
||||
requested_by_id = None if user.get("role") == "admin" else requested_by_id
|
||||
cached = get_cached_request_by_media_id(
|
||||
media_info_id,
|
||||
requested_by_norm=requested_by,
|
||||
requested_by_id=requested_by_id,
|
||||
)
|
||||
if cached:
|
||||
request_id = cached.get("request_id")
|
||||
status = cached.get("status")
|
||||
|
||||
Reference in New Issue
Block a user