Redis is the Swiss Army knife of backend infrastructure: cache, session store, rate limiter, pub/sub broker, leaderboard engine, and job queue. Claude Code generates Redis patterns that are production-ready — proper TTLs, atomic operations via Lua scripts, and connection pooling — rather than toy examples that break under load.
This guide covers Redis with Claude Code: caching, sessions, rate limiting, pub/sub, and sorted set patterns.
Redis Client Setup
Set up Redis in this Node.js project.
I need: connection pooling, reconnect logic, and separate
connections for subscribe/publish operations.
// src/lib/redis.ts
import { Redis } from 'ioredis';
function createRedisClient(name: string): Redis {
const client = new Redis({
host: process.env.REDIS_HOST ?? 'localhost',
port: parseInt(process.env.REDIS_PORT ?? '6379'),
password: process.env.REDIS_PASSWORD,
db: parseInt(process.env.REDIS_DB ?? '0'),
// Connection settings
maxRetriesPerRequest: 3,
enableReadyCheck: true,
// Reconnect with exponential backoff
retryStrategy: (times) => {
const delay = Math.min(times * 50, 2000);
return delay;
},
// Timeout settings
connectTimeout: 10_000,
commandTimeout: 5_000,
lazyConnect: false,
});
client.on('connect', () => console.log(`Redis ${name}: connected`));
client.on('error', (err) => console.error(`Redis ${name} error:`, err));
client.on('close', () => console.warn(`Redis ${name}: connection closed`));
return client;
}
// Separate clients for different use cases
// (pub/sub clients can't run regular commands while subscribed)
export const redis = createRedisClient('main');
export const redisPub = createRedisClient('publisher');
export const redisSub = createRedisClient('subscriber');
CLAUDE.md for Redis Projects
## Redis Usage
- Connection: redis singleton in src/lib/redis.ts
- Separate clients for pub/sub (redisPub, redisSub) — subscribed clients can't run commands
- Key naming: {service}:{resource}:{id} (e.g., "auth:session:abc123")
- TTLs required on all cache keys — never cache permanently unless explicitly designed
- Use SET NX (SET ... NX) for distributed locks, not manual GET+SET
- Lua scripts for atomic multi-step operations
- Rate limiting: token bucket in src/lib/rateLimiter.ts
Caching Patterns
Cache the product catalog API response.
Cache for 5 minutes, but invalidate immediately when any product is updated.
// src/lib/cache.ts
import { redis } from './redis';
const CATALOG_KEY = 'catalog:products:all';
const CATALOG_TTL = 300; // 5 minutes
export async function getCachedCatalog(): Promise<Product[] | null> {
const cached = await redis.get(CATALOG_KEY);
return cached ? JSON.parse(cached) : null;
}
export async function setCatalogCache(products: Product[]): Promise<void> {
await redis.setex(CATALOG_KEY, CATALOG_TTL, JSON.stringify(products));
}
export async function invalidateCatalogCache(): Promise<void> {
await redis.del(CATALOG_KEY);
}
// Usage pattern:
async function getCatalog(): Promise<Product[]> {
const cached = await getCachedCatalog();
if (cached) return cached;
const products = await db.query('SELECT * FROM products WHERE active = true');
await setCatalogCache(products);
return products;
}
Cache-Aside with Stampede Protection
Multiple requests hit the cache simultaneously when it expires.
This causes a "thundering herd" — all of them hit the database at once.
// Probabilistic Early Expiration — prevents stampede
// (start refreshing before TTL hits zero)
async function getCatalogWithStampedeProtection(): Promise<Product[]> {
const cached = await redis.get(CATALOG_KEY);
if (cached) {
const ttl = await redis.ttl(CATALOG_KEY);
// Probabilistically refresh when within 20% of expiry
// Math.random() > (ttl / TTL) means refresh more as TTL decreases
if (ttl > 0 && Math.random() > (ttl / CATALOG_TTL) * 0.8) {
// Refresh in background — don't block current request
refreshCatalogCache().catch(console.error);
}
return JSON.parse(cached);
}
// Cache miss — use distributed lock to prevent thundering herd
const lockKey = `${CATALOG_KEY}:lock`;
const lockAcquired = await redis.set(lockKey, '1', 'EX', 10, 'NX'); // 10s lock
if (!lockAcquired) {
// Another instance is refreshing — wait briefly and retry from cache
await new Promise(resolve => setTimeout(resolve, 100));
const retryCache = await redis.get(CATALOG_KEY);
if (retryCache) return JSON.parse(retryCache);
}
const products = await db.query('SELECT * FROM products WHERE active = true');
await redis.setex(CATALOG_KEY, CATALOG_TTL, JSON.stringify(products));
await redis.del(lockKey);
return products;
}
Rate Limiting
Implement rate limiting for the API.
Rules: 100 requests/minute for authenticated users, 20/minute for anonymous.
Return 429 with a Retry-After header.
// Sliding window rate limiter using Lua script (atomic)
const RATE_LIMIT_SCRIPT = `
local key = KEYS[1]
local limit = tonumber(ARGV[1])
local window = tonumber(ARGV[2])
local now = tonumber(ARGV[3])
local window_start = now - window
-- Remove expired entries
redis.call('ZREMRANGEBYSCORE', key, 0, window_start)
-- Count requests in window
local count = redis.call('ZCARD', key)
if count >= limit then
-- Rate limited — return time until oldest request expires
local oldest = redis.call('ZRANGE', key, 0, 0, 'WITHSCORES')
local retry_after = math.ceil((oldest[2] + window - now) / 1000)
return {0, retry_after}
end
-- Record this request
redis.call('ZADD', key, now, now .. math.random())
redis.call('EXPIRE', key, math.ceil(window / 1000))
return {1, 0}
`;
export async function checkRateLimit(
identifier: string,
limit: number,
windowMs: number,
): Promise<{ allowed: boolean; retryAfter?: number }> {
const key = `rate:${identifier}`;
const now = Date.now();
const [allowed, retryAfter] = await redis.eval(
RATE_LIMIT_SCRIPT, 1, key, limit, windowMs, now,
) as [number, number];
return {
allowed: allowed === 1,
retryAfter: allowed ? undefined : retryAfter,
};
}
// Express/Next.js middleware
export function rateLimitMiddleware(options?: { limit?: number; window?: number }) {
return async (req: Request, res: Response, next: Next) => {
const userId = req.user?.id;
const identifier = userId ? `user:${userId}` : `ip:${req.ip}`;
const limit = userId ? (options?.limit ?? 100) : 20;
const { allowed, retryAfter } = await checkRateLimit(
identifier, limit, options?.window ?? 60_000
);
if (!allowed) {
res.set('Retry-After', String(retryAfter));
return res.status(429).json({
error: 'Too many requests',
retryAfter,
});
}
// Add rate limit headers
res.set('X-RateLimit-Limit', String(limit));
next();
};
}
Session Storage
Store user sessions in Redis instead of JWTs.
Sessions should expire after 24 hours of inactivity (sliding expiry).
// src/lib/session.ts
import { redis } from './redis';
import { randomBytes } from 'crypto';
const SESSION_TTL = 86400; // 24 hours
const SESSION_PREFIX = 'session:';
interface Session {
userId: string;
email: string;
role: string;
createdAt: number;
lastActiveAt: number;
}
export async function createSession(user: { id: string; email: string; role: string }): Promise<string> {
const sessionId = randomBytes(32).toString('hex');
const session: Session = {
userId: user.id,
email: user.email,
role: user.role,
createdAt: Date.now(),
lastActiveAt: Date.now(),
};
await redis.setex(
`${SESSION_PREFIX}${sessionId}`,
SESSION_TTL,
JSON.stringify(session),
);
return sessionId;
}
export async function getSession(sessionId: string): Promise<Session | null> {
const data = await redis.getex(
`${SESSION_PREFIX}${sessionId}`,
'EX', SESSION_TTL, // Refresh TTL on access (sliding expiry)
);
if (!data) return null;
const session = JSON.parse(data) as Session;
session.lastActiveAt = Date.now();
return session;
}
export async function deleteSession(sessionId: string): Promise<void> {
await redis.del(`${SESSION_PREFIX}${sessionId}`);
}
// Delete all sessions for a user (e.g., after password change)
export async function deleteAllUserSessions(userId: string): Promise<void> {
// Use scan to find all sessions for this user without blocking
const pattern = `${SESSION_PREFIX}*`;
let cursor = '0';
do {
const [newCursor, keys] = await redis.scan(cursor, 'MATCH', pattern, 'COUNT', 100);
cursor = newCursor;
for (const key of keys) {
const data = await redis.get(key);
if (data) {
const session = JSON.parse(data) as Session;
if (session.userId === userId) {
await redis.del(key);
}
}
}
} while (cursor !== '0');
}
Sorted Sets for Leaderboards
Build a real-time leaderboard for a game.
Display: top 100 players, current user's rank, and nearby players.
const LEADERBOARD_KEY = 'leaderboard:game:global';
// Update score
async function updateScore(userId: string, score: number): Promise<void> {
await redis.zadd(LEADERBOARD_KEY, score, userId);
}
// Get top N players
async function getTopPlayers(n: number): Promise<Array<{ userId: string; score: number; rank: number }>> {
// ZREVRANGE returns highest scores first
const results = await redis.zrevrange(LEADERBOARD_KEY, 0, n - 1, 'WITHSCORES');
const players = [];
for (let i = 0; i < results.length; i += 2) {
players.push({
userId: results[i],
score: parseInt(results[i + 1]),
rank: Math.floor(i / 2) + 1,
});
}
return players;
}
// Get user's rank and nearby players
async function getUserContext(userId: string): Promise<{
rank: number;
score: number;
nearby: Array<{ userId: string; score: number; rank: number }>;
}> {
// Pipeline for efficiency
const pipeline = redis.pipeline();
pipeline.zrevrank(LEADERBOARD_KEY, userId); // 0-indexed rank
pipeline.zscore(LEADERBOARD_KEY, userId); // Current score
const [[, rank], [, score]] = await pipeline.exec() as any[];
if (rank === null) {
return { rank: -1, score: 0, nearby: [] };
}
// Get 2 players above and below
const start = Math.max(0, rank - 2);
const end = rank + 2;
const nearbyResults = await redis.zrevrange(LEADERBOARD_KEY, start, end, 'WITHSCORES');
const nearby = [];
for (let i = 0; i < nearbyResults.length; i += 2) {
nearby.push({
userId: nearbyResults[i],
score: parseInt(nearbyResults[i + 1]),
rank: start + Math.floor(i / 2) + 1,
});
}
return { rank: rank + 1, score: parseInt(score), nearby };
}
For real-time features using Redis pub/sub alongside WebSockets, see the WebSockets guide. For Redis pub/sub scaling in microservices, see the microservices guide. The Claude Skills 360 bundle includes Redis skill sets for caching patterns, rate limiting implementations, and distributed locking. Start with the free tier to try Redis pattern generation for your project.