Upstash provides serverless Redis and Kafka with HTTP-based SDKs that work at the edge — Redis.fromEnv() auto-reads UPSTASH_REDIS_REST_URL and UPSTASH_REDIS_REST_TOKEN. await redis.set("key", value, { ex: 60 }) stores with TTL. await redis.get("key") retrieves. await redis.hset("hash", { field: value }) and redis.hgetall("hash") for hash maps. Rate limiting: new Ratelimit({ redis, limiter: Ratelimit.slidingWindow(10, "10 s") }) and const { success, limit, remaining } = await ratelimit.limit(identifier). QStash message queue: const client = new Client({ token }), client.publishJSON({ url: webhookUrl, body: payload, delay: 60 }) schedules delivery. client.schedules.create({ cron: "0 9 * * *", destination: url, body: {} }) creates recurring jobs. Kafka: const kafka = new Kafka({ url, username, password }), const producer = kafka.producer(), await producer.produce("topic", { value: JSON.stringify(event) }), kafka.consumer({ groupId }).consume({ instanceId, topics, autoOffsetReset: "latest" }, handler). Pipeline: const p = redis.pipeline(); p.set("a", 1); p.incr("counter"); await p.exec(). Claude Code generates Upstash Redis caching, serverless rate limiting, and QStash background job queues.
CLAUDE.md for Upstash
## Upstash Stack
- Version: @upstash/redis >= 1.34, @upstash/ratelimit >= 2.x, @upstash/qstash >= 2.x
- Redis init: const redis = Redis.fromEnv() — needs UPSTASH_REDIS_REST_URL + UPSTASH_REDIS_REST_TOKEN
- Get/set: await redis.set("key", value, { ex: 300 }); const val = await redis.get<MyType>("key")
- Hash: await redis.hset("user:123", { name, email }); const user = await redis.hgetall("user:123")
- Rate limit: const rl = new Ratelimit({ redis, limiter: Ratelimit.slidingWindow(100, "60 s"), prefix: "@app" })
- Rate check: const { success, remaining, reset } = await rl.limit(userId)
- QStash publish: await qstash.publishJSON({ url: WEBHOOK, body: job, retries: 3 })
- All ops are HTTP, work in Vercel Edge Functions / Cloudflare Workers / Node.js
Upstash Redis Client
// lib/upstash/redis.ts — Upstash Redis helpers with type safety
import { Redis } from "@upstash/redis"
// Auto-reads UPSTASH_REDIS_REST_URL and UPSTASH_REDIS_REST_TOKEN
export const redis = Redis.fromEnv()
/** Cache wrapper — get or fetch and cache */
export async function cached<T>(
key: string,
fetcher: () => Promise<T>,
ttlSeconds = 300,
): Promise<T> {
const cached = await redis.get<T>(key)
if (cached !== null) return cached
const value = await fetcher()
await redis.set(key, value, { ex: ttlSeconds })
return value
}
/** Invalidate cache by key or prefix pattern */
export async function invalidate(pattern: string): Promise<number> {
if (!pattern.includes("*")) {
return redis.del(pattern)
}
// Scan and delete matching keys
let cursor = 0
let deleted = 0
do {
const [nextCursor, keys] = await redis.scan(cursor, { match: pattern, count: 100 })
cursor = Number(nextCursor)
if (keys.length > 0) {
await redis.del(...keys)
deleted += keys.length
}
} while (cursor !== 0)
return deleted
}
// ── Session store ─────────────────────────────────────────────────────────
export type SessionData = {
userId: string
email: string
role: string
expiresAt: number
}
export async function setSession(sessionId: string, data: SessionData, ttlSeconds = 86400): Promise<void> {
await redis.hset(`session:${sessionId}`, data)
await redis.expire(`session:${sessionId}`, ttlSeconds)
}
export async function getSession(sessionId: string): Promise<SessionData | null> {
const data = await redis.hgetall<SessionData>(`session:${sessionId}`)
if (!data) return null
if (Date.now() > data.expiresAt) {
await redis.del(`session:${sessionId}`)
return null
}
return data
}
export async function deleteSession(sessionId: string): Promise<void> {
await redis.del(`session:${sessionId}`)
}
// ── Leaderboard (sorted set) ──────────────────────────────────────────────
export async function addScore(board: string, userId: string, score: number): Promise<void> {
await redis.zadd(board, { score, member: userId })
}
export async function getLeaderboard(
board: string,
limit = 10,
): Promise<Array<{ userId: string; score: number; rank: number }>> {
const results = await redis.zrange(board, 0, limit - 1, {
rev: true,
withScores: true,
})
const entries: Array<{ userId: string; score: number; rank: number }> = []
for (let i = 0; i < results.length; i += 2) {
entries.push({
userId: results[i] as string,
score: Number(results[i + 1]),
rank: i / 2 + 1,
})
}
return entries
}
export async function getUserRank(
board: string,
userId: string,
): Promise<{ rank: number; score: number } | null> {
const [rank, score] = await Promise.all([
redis.zrevrank(board, userId),
redis.zscore(board, userId),
])
if (rank === null || score === null) return null
return { rank: rank + 1, score: Number(score) }
}
// ── Atomic pipeline ───────────────────────────────────────────────────────
/** Increment a counter and get current value atomically */
export async function incrementCounter(
key: string,
ttlSeconds?: number,
): Promise<number> {
const p = redis.pipeline()
p.incr(key)
if (ttlSeconds) p.expire(key, ttlSeconds)
const results = await p.exec()
return results[0] as number
}
Rate Limiting Middleware
// lib/upstash/ratelimit.ts — serverless rate limiting with Upstash
import { Ratelimit } from "@upstash/ratelimit"
import { Redis } from "@upstash/redis"
const redis = Redis.fromEnv()
// Pre-built limiters for different use cases
export const limiters = {
// API endpoint: 100 req/min per user
api: new Ratelimit({
redis,
limiter: Ratelimit.slidingWindow(100, "60 s"),
prefix: "@api",
analytics: true, // Records to Upstash console
}),
// Auth endpoints: 5 attempts per 15 min per IP (stricter)
auth: new Ratelimit({
redis,
limiter: Ratelimit.fixedWindow(5, "15 m"),
prefix: "@auth",
}),
// AI generation: 10 req/hour per user
ai: new Ratelimit({
redis,
limiter: Ratelimit.tokenBucket(10, "1 h", 10),
prefix: "@ai",
}),
}
export type RateLimitResult = {
success: boolean
limit: number
remaining: number
reset: number // Unix timestamp in ms
retryAfter?: number // seconds to wait
}
export async function checkRateLimit(
limiterName: keyof typeof limiters,
identifier: string,
): Promise<RateLimitResult> {
const { success, limit, remaining, reset } = await limiters[limiterName].limit(identifier)
return {
success,
limit,
remaining,
reset,
retryAfter: success ? undefined : Math.ceil((reset - Date.now()) / 1000),
}
}
// Next.js middleware usage:
// import { NextResponse } from "next/server"
// export async function middleware(req: NextRequest) {
// const ip = req.ip ?? req.headers.get("x-forwarded-for") ?? "anonymous"
// const { success, retryAfter } = await checkRateLimit("api", ip)
// if (!success) {
// return NextResponse.json({ error: "Rate limit exceeded" }, {
// status: 429,
// headers: { "Retry-After": String(retryAfter) },
// })
// }
// return NextResponse.next()
// }
QStash Background Jobs
// lib/upstash/qstash.ts — durable background job queue with QStash
import { Client as QStashClient, Receiver } from "@upstash/qstash"
export const qstash = new QStashClient({
token: process.env.QSTASH_TOKEN!,
})
const WEBHOOK_BASE = process.env.NEXT_PUBLIC_APP_URL!
// ── Publish jobs ───────────────────────────────────────────────────────────
export async function enqueueEmail(payload: {
to: string
subject: string
template: string
data: Record<string, unknown>
}): Promise<string> {
const { messageId } = await qstash.publishJSON({
url: `${WEBHOOK_BASE}/api/jobs/email`,
body: payload,
retries: 3,
delay: 0,
})
return messageId
}
export async function scheduleReport(
reportType: string,
userId: string,
delaySeconds = 0,
): Promise<string> {
const { messageId } = await qstash.publishJSON({
url: `${WEBHOOK_BASE}/api/jobs/report`,
body: { reportType, userId },
retries: 2,
...(delaySeconds > 0 ? { delay: delaySeconds } : {}),
})
return messageId
}
export async function createRecurringJob(
name: string,
endpoint: string,
cron: string,
body?: Record<string, unknown>,
): Promise<string> {
const schedule = await qstash.schedules.create({
cron,
destination: `${WEBHOOK_BASE}${endpoint}`,
body: body ? JSON.stringify(body) : undefined,
headers: body ? { "Content-Type": "application/json" } : undefined,
})
return schedule.scheduleId
}
// ── Verify incoming QStash requests ──────────────────────────────────────
export const receiver = new Receiver({
currentSigningKey: process.env.QSTASH_CURRENT_SIGNING_KEY!,
nextSigningKey: process.env.QSTASH_NEXT_SIGNING_KEY!,
})
export async function verifyQStashRequest(req: Request): Promise<boolean> {
const signature = req.headers.get("upstash-signature")
if (!signature) return false
const body = await req.text()
try {
await receiver.verify({ signature, body })
return true
} catch {
return false
}
}
// app/api/jobs/email/route.ts usage:
// export async function POST(req: Request) {
// if (!await verifyQStashRequest(req)) return new Response("Unauthorized", { status: 401 })
// const { to, subject, template, data } = await req.json()
// await sendEmail({ to, subject, template, data })
// return new Response("OK")
// }
For the Redis Cloud (Redis Inc.) alternative when needing a traditional persistent Redis instance with full Redis module support (RedisSearch, RedisJSON, RedisGraph, RedisTimeSeries) and sub-millisecond latency from a long-running server — Redis Cloud is a fully managed persistent Redis while Upstash’s serverless HTTP model is uniquely suited for edge runtimes and functions that can’t hold TCP connections, see the Redis guide. For the Vercel KV alternative when already deploying on Vercel and wanting a Redis-backed store with zero additional configuration through the Vercel dashboard — Vercel KV is powered by Upstash under the hood with slightly fewer features exposed, so Upstash directly gives you more control (QStash, Kafka, analytics) at the same underlying service, see the Vercel KV guide. The Claude Skills 360 bundle includes Upstash skill sets covering Redis caching, rate limiting, and QStash job queues. Start with the free tier to try serverless Redis generation.