The Vercel AI SDK unifies AI providers — Claude, OpenAI, Gemini, Mistral — behind a single interface. streamText streams tokens as they arrive. generateObject returns structured data validated by a Zod schema. useChat wires a React chat UI with streaming and tool-call support in 20 lines. The tool-calls API enables multi-step agentic workflows where the model calls your functions and gets results. Claude Code generates AI SDK route handlers, React chat components, structured data extraction, and the multi-tool agent patterns for production AI applications.
CLAUDE.md for AI SDK Projects
## AI SDK Stack
- SDK: ai (Vercel AI SDK) >= 4.0
- Provider: @ai-sdk/anthropic (Claude), @ai-sdk/openai as fallback
- Framework: Next.js 15 App Router with route handlers
- Streaming: streamText for chat, generateObject for extraction
- Tools: maxSteps=5 for agentic loops (prevent infinite recursion)
- Model: claude-sonnet-4-6 as default (balance speed/quality)
- Error handling: isAISDKError() type guard for provider errors
- Rate limits: implement retry with exponential backoff
Basic Chat Route Handler
// app/api/chat/route.ts
import { anthropic } from '@ai-sdk/anthropic'
import { streamText, Message } from 'ai'
import { z } from 'zod'
export const maxDuration = 60 // Vercel serverless function timeout
export async function POST(req: Request) {
const { messages, systemPrompt }: { messages: Message[]; systemPrompt?: string } = await req.json()
const result = streamText({
model: anthropic('claude-sonnet-4-6'),
system: systemPrompt ?? 'You are a helpful assistant.',
messages,
maxTokens: 4096,
temperature: 0.7,
})
return result.toDataStreamResponse()
}
// app/chat/page.tsx — client component with useChat hook
'use client'
import { useChat } from 'ai/react'
import { useRef, useEffect } from 'react'
export default function ChatPage() {
const { messages, input, handleInputChange, handleSubmit, isLoading, error } = useChat({
api: '/api/chat',
initialMessages: [
{ id: '0', role: 'assistant', content: 'Hello! How can I help you today?' }
],
onError: (error) => console.error('Chat error:', error),
})
const messagesEndRef = useRef<HTMLDivElement>(null)
useEffect(() => {
messagesEndRef.current?.scrollIntoView({ behavior: 'smooth' })
}, [messages])
return (
<div className="flex flex-col h-screen max-w-2xl mx-auto p-4">
<div className="flex-1 overflow-y-auto space-y-4 pb-4">
{messages.map(m => (
<div key={m.id} className={`flex ${m.role === 'user' ? 'justify-end' : 'justify-start'}`}>
<div className={`max-w-[80%] rounded-lg p-3 ${
m.role === 'user'
? 'bg-blue-600 text-white'
: 'bg-gray-100 text-gray-900'
}`}>
{m.content}
</div>
</div>
))}
{isLoading && (
<div className="flex justify-start">
<div className="bg-gray-100 rounded-lg p-3 text-gray-500 text-sm">
Thinking...
</div>
</div>
)}
{error && (
<div className="text-red-500 text-sm text-center">{error.message}</div>
)}
<div ref={messagesEndRef} />
</div>
<form onSubmit={handleSubmit} className="flex gap-2 pt-4 border-t">
<input
value={input}
onChange={handleInputChange}
placeholder="Type a message..."
className="flex-1 px-4 py-2 border rounded-lg focus:outline-none focus:ring-2 focus:ring-blue-500"
disabled={isLoading}
/>
<button
type="submit"
disabled={isLoading || !input.trim()}
className="px-6 py-2 bg-blue-600 text-white rounded-lg disabled:opacity-50"
>
Send
</button>
</form>
</div>
)
}
generateObject for Structured Extraction
// app/api/extract/route.ts
import { anthropic } from '@ai-sdk/anthropic'
import { generateObject } from 'ai'
import { z } from 'zod'
const OrderExtractionSchema = z.object({
orders: z.array(z.object({
product: z.string().describe('Product name'),
quantity: z.number().int().positive(),
unitPrice: z.number().positive().describe('Price per unit in dollars'),
currency: z.enum(['USD', 'EUR', 'GBP']).default('USD'),
})),
customerName: z.string().optional(),
deliveryAddress: z.string().optional(),
requestedDeliveryDate: z.string().optional().describe('ISO date string if mentioned'),
specialInstructions: z.string().optional(),
estimatedTotal: z.number().optional(),
})
export async function POST(req: Request) {
const { text }: { text: string } = await req.json()
const { object } = await generateObject({
model: anthropic('claude-sonnet-4-6'),
schema: OrderExtractionSchema,
prompt: `Extract order information from this text. Be precise and only include information explicitly mentioned.\n\n${text}`,
})
return Response.json(object)
}
// Streaming version with useObject hook on client
// app/api/extract-stream/route.ts
import { anthropic } from '@ai-sdk/anthropic'
import { streamObject } from 'ai'
import { z } from 'zod'
const ResumeSchema = z.object({
name: z.string(),
email: z.string().email(),
skills: z.array(z.string()),
experience: z.array(z.object({
company: z.string(),
role: z.string(),
years: z.number(),
highlights: z.array(z.string()),
})),
})
export async function POST(req: Request) {
const { resumeText } = await req.json()
const result = streamObject({
model: anthropic('claude-sonnet-4-6'),
schema: ResumeSchema,
prompt: `Parse this resume:\n${resumeText}`,
})
return result.toTextStreamResponse()
}
// Client: partial streaming with useObject
'use client'
import { experimental_useObject as useObject } from 'ai/react'
import { ResumeSchema } from '@/lib/schemas'
export function ResumeParser() {
const { object, submit, isLoading } = useObject({
api: '/api/extract-stream',
schema: ResumeSchema,
})
return (
<div>
<button onClick={() => submit({ resumeText: '...' })}>Parse Resume</button>
{/* Renders partial data as it streams in */}
{object?.name && <h2>{object.name}</h2>}
{object?.skills && (
<ul>{object.skills.map(s => <li key={s}>{s}</li>)}</ul>
)}
</div>
)
}
Tool Calls and Agentic Loops
// app/api/agent/route.ts — multi-step tool calling
import { anthropic } from '@ai-sdk/anthropic'
import { streamText, tool } from 'ai'
import { z } from 'zod'
import { db } from '@/lib/db'
import { searchKnowledgeBase } from '@/lib/search'
export async function POST(req: Request) {
const { messages } = await req.json()
const result = streamText({
model: anthropic('claude-sonnet-4-6'),
system: `You are an order management assistant. Use the available tools to help customers.
Always verify order existence before providing information.`,
messages,
maxSteps: 5, // Max tool call rounds before forcing text response
tools: {
lookupOrder: tool({
description: 'Look up an order by ID. Returns order status, items, and tracking info.',
parameters: z.object({
orderId: z.string().describe('The order ID (format: ord_xxx)'),
}),
execute: async ({ orderId }) => {
const order = await db.orders.findById(orderId)
if (!order) throw new Error(`Order ${orderId} not found`)
return {
id: order.id,
status: order.status,
createdAt: order.createdAt,
items: order.items.map(i => ({
product: i.productName,
quantity: i.quantity,
price: i.totalCents / 100,
})),
trackingNumber: order.trackingNumber,
estimatedDelivery: order.estimatedDelivery,
}
},
}),
searchFAQ: tool({
description: 'Search the knowledge base for answers to common questions.',
parameters: z.object({
query: z.string().describe('The customer question or topic to search for'),
}),
execute: async ({ query }) => {
const results = await searchKnowledgeBase(query, { limit: 3 })
return results.map(r => ({
title: r.title,
content: r.content,
relevanceScore: r.score,
}))
},
}),
initiateReturn: tool({
description: 'Initiate a return request for an order item.',
parameters: z.object({
orderId: z.string(),
itemId: z.string(),
reason: z.enum(['defective', 'wrong_item', 'not_as_described', 'changed_mind']),
notes: z.string().optional(),
}),
execute: async ({ orderId, itemId, reason, notes }) => {
const returnRequest = await db.returns.create({
orderId, itemId, reason, notes,
})
return {
returnId: returnRequest.id,
status: 'initiated',
prepaidLabelUrl: returnRequest.labelUrl,
message: `Return initiated. Print your prepaid label at: ${returnRequest.labelUrl}`,
}
},
}),
},
onStepFinish: ({ toolCalls, toolResults, finishReason }) => {
// Log tool usage for analytics
console.log('Step completed:', { toolCalls: toolCalls.length, finishReason })
},
})
return result.toDataStreamResponse()
}
// Client: display tool call results inline
'use client'
import { useChat } from 'ai/react'
import type { Message, ToolInvocation } from 'ai'
function ToolCallDisplay({ toolInvocation }: { toolInvocation: ToolInvocation }) {
if (toolInvocation.state === 'result') {
if (toolInvocation.toolName === 'lookupOrder') {
const order = toolInvocation.result
return (
<div className="bg-gray-50 rounded p-3 text-sm border">
<div className="font-medium">Order {order.id}</div>
<div className="text-gray-600">Status: {order.status}</div>
{order.trackingNumber && (
<div>Tracking: {order.trackingNumber}</div>
)}
</div>
)
}
}
return <div className="text-gray-400 text-xs">Using tool: {toolInvocation.toolName}...</div>
}
export function AgentChat() {
const { messages, input, handleInputChange, handleSubmit, isLoading } = useChat({
api: '/api/agent',
})
return (
<div>
{messages.map(m => (
<div key={m.id}>
<div>{m.content}</div>
{m.toolInvocations?.map((inv, i) => (
<ToolCallDisplay key={i} toolInvocation={inv} />
))}
</div>
))}
<form onSubmit={handleSubmit}>
<input value={input} onChange={handleInputChange} />
<button type="submit" disabled={isLoading}>Send</button>
</form>
</div>
)
}
Provider Switching and Fallback
// lib/ai-client.ts — unified client with fallback
import { anthropic } from '@ai-sdk/anthropic'
import { openai } from '@ai-sdk/openai'
import { streamText } from 'ai'
type Provider = 'anthropic' | 'openai'
function getModel(provider: Provider = 'anthropic') {
switch (provider) {
case 'anthropic':
return anthropic('claude-sonnet-4-6')
case 'openai':
return openai('gpt-4o')
}
}
export async function streamAIResponse(
messages: any[],
options: { provider?: Provider; temperature?: number } = {}
) {
const { provider = 'anthropic', temperature = 0.7 } = options
try {
return streamText({
model: getModel(provider),
messages,
temperature,
})
} catch (error) {
// Fallback to OpenAI if Anthropic is unavailable
if (provider === 'anthropic') {
console.warn('Anthropic unavailable, falling back to OpenAI')
return streamText({
model: getModel('openai'),
messages,
temperature,
})
}
throw error
}
}
For the Next.js App Router patterns that host AI SDK route handlers, see the Next.js App Router guide for server components and route handler patterns. For the Anthropic SDK used directly for Claude integrations outside of Next.js, the Anthropic SDK guide covers direct API usage and advanced Claude features. The Claude Skills 360 bundle includes AI SDK skill sets covering tool calls, streaming, and multi-step agentic patterns. Start with the free tier to try AI chat component generation.