Redis caching patterns for FastAPI: cache-aside, distributed locks, sessions, rate limiting, pub/sub, and semantic (vector) caching with pgvector. Use for: response caching, session storage, rate limiting, real-time events, leaderboards. Triggers: cache, Redis, caching, invalidation, session, rate limit, pub/sub, TTL, lock.
import json
from redis.asyncio import Redis
from fastapi import Depends
async def get_cached_or_fetch(
key: str,
fetch_fn,
redis: Redis = Depends(get_redis),
ttl: int = 300,
):
cached = await redis.get(key)
if cached:
return json.loads(cached)
value = await fetch_fn()
await redis.setex(key, ttl, json.dumps(value, default=str))
return value
import uuid
from contextlib import asynccontextmanager
from redis.asyncio import Redis
@asynccontextmanager
async def redis_lock(redis: Redis, lock_key: str, timeout: int = 10):
"""Acquire a Redis lock; raise RuntimeError if unavailable.
Prevents thundering herd on cache misses for expensive recomputations.
"""
token = str(uuid.uuid4())
acquired = await redis.set(lock_key, token, nx=True, ex=timeout)
if not acquired:
raise RuntimeError(f"Could not acquire lock: {lock_key}")
try:
yield
finally:
# Release only if we still own the lock (atomic Lua script)
script = "if redis.call('get', KEYS[1]) == ARGV[1] then return redis.call('del', KEYS[1]) else return 0 end"
await redis.eval(script, 1, lock_key, token)
model.model_dump_json(), not json.dumps(model) — unserializable fields (datetime, UUID) silently corrupt without default=strttl + random.randint(0, 30)) to prevent mass simultaneous expiryredis.asyncio.Redis instance at startup with max_connections=20; never create a new connection per request../database/SKILL.md../monitoring/SKILL.mdreferences/redis-patterns.mdreferences/semantic-cache.md