File uploads touch security (malicious files), performance (large files blocking your server), and cost (bandwidth). The pattern that solves all three: the browser uploads directly to S3 using a presigned URL generated by your API, so large files never pass through your server. Claude Code generates the presigned URL flow, chunked upload handling, and the post-upload processing pipeline.
Presigned URL Upload Flow
Implement direct S3 uploads from the browser.
Server generates presigned POST URL. Browser uploads directly.
After upload, server processes the file (validate, thumbnail, virus scan).
Server: Generate Upload URL
// api/upload/presign.ts
import { S3Client, PutObjectCommand } from '@aws-sdk/client-s3';
import { getSignedUrl } from '@aws-sdk/s3-request-presigner';
import { randomUUID } from 'crypto';
import { z } from 'zod';
const s3 = new S3Client({ region: process.env.AWS_REGION });
const ALLOWED_MIME_TYPES = ['image/jpeg', 'image/png', 'image/webp', 'application/pdf'];
const MAX_FILE_SIZE = 50 * 1024 * 1024; // 50MB
const presignSchema = z.object({
fileName: z.string().min(1).max(255),
mimeType: z.enum(ALLOWED_MIME_TYPES as [string, ...string[]]),
fileSize: z.number().positive().max(MAX_FILE_SIZE),
});
export async function POST(request: Request) {
const session = await getSession(request);
const body = presignSchema.parse(await request.json());
// Generate a safe, unguessable key
const extension = body.fileName.split('.').pop()?.toLowerCase() ?? '';
const key = `uploads/${session.userId}/${randomUUID()}.${extension}`;
const command = new PutObjectCommand({
Bucket: process.env.UPLOAD_BUCKET,
Key: key,
ContentType: body.mimeType,
ContentLength: body.fileSize,
// Tag for lifecycle policy — move to Glacier if not processed in 7 days
Tagging: `status=pending&userId=${session.userId}`,
Metadata: {
'original-name': encodeURIComponent(body.fileName),
'user-id': session.userId,
'uploaded-at': new Date().toISOString(),
},
});
const uploadUrl = await getSignedUrl(s3, command, {
expiresIn: 300, // 5 minute expiry
});
// Pre-register the upload — track status
const uploadId = randomUUID();
await db('uploads').insert({
id: uploadId,
user_id: session.userId,
s3_key: key,
original_filename: body.fileName,
mime_type: body.mimeType,
file_size: body.fileSize,
status: 'pending',
});
return Response.json({ uploadId, uploadUrl, key });
}
Client: Upload UI
// components/FileUploader.tsx
import { useState, useCallback } from 'react';
import { useDropzone } from 'react-dropzone';
interface UploadState {
status: 'idle' | 'uploading' | 'processing' | 'done' | 'error';
progress: number;
fileUrl?: string;
error?: string;
}
export function FileUploader({ onUploadComplete }: { onUploadComplete: (url: string) => void }) {
const [state, setState] = useState<UploadState>({ status: 'idle', progress: 0 });
const uploadFile = useCallback(async (file: File) => {
setState({ status: 'uploading', progress: 0 });
try {
// 1. Get presigned URL from server
const presignResponse = await fetch('/api/upload/presign', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
fileName: file.name,
mimeType: file.type,
fileSize: file.size,
}),
});
const { uploadId, uploadUrl } = await presignResponse.json();
// 2. Upload directly to S3 with progress tracking
await new Promise<void>((resolve, reject) => {
const xhr = new XMLHttpRequest();
xhr.upload.onprogress = (event) => {
if (event.lengthComputable) {
setState(prev => ({
...prev,
progress: Math.round((event.loaded / event.total) * 100),
}));
}
};
xhr.onload = () => {
if (xhr.status >= 200 && xhr.status < 300) resolve();
else reject(new Error(`Upload failed: ${xhr.status}`));
};
xhr.onerror = () => reject(new Error('Network error'));
xhr.open('PUT', uploadUrl);
xhr.setRequestHeader('Content-Type', file.type);
xhr.send(file);
});
// 3. Notify server to process the uploaded file
setState({ status: 'processing', progress: 100 });
const processResponse = await fetch('/api/upload/process', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ uploadId }),
});
const { fileUrl } = await processResponse.json();
setState({ status: 'done', progress: 100, fileUrl });
onUploadComplete(fileUrl);
} catch (error) {
setState({ status: 'error', progress: 0, error: (error as Error).message });
}
}, [onUploadComplete]);
const { getRootProps, getInputProps, isDragActive } = useDropzone({
onDrop: ([file]) => { if (file) uploadFile(file); },
accept: { 'image/*': ['.jpg', '.jpeg', '.png', '.webp'], 'application/pdf': ['.pdf'] },
maxSize: 50 * 1024 * 1024,
multiple: false,
});
return (
<div>
{state.status === 'idle' && (
<div
{...getRootProps()}
className={`dropzone ${isDragActive ? 'active' : ''}`}
role="button"
aria-label="Upload file — click or drag and drop"
>
<input {...getInputProps()} />
<p>{isDragActive ? 'Drop file here' : 'Drag file here or click to select'}</p>
<p className="hint">Images up to 50MB, PDF up to 50MB</p>
</div>
)}
{(state.status === 'uploading' || state.status === 'processing') && (
<div role="progressbar" aria-valuenow={state.progress} aria-valuemin={0} aria-valuemax={100}>
<div className="progress-bar" style={{ width: `${state.progress}%` }} />
<p>{state.status === 'uploading' ? `Uploading... ${state.progress}%` : 'Processing...'}</p>
</div>
)}
{state.status === 'error' && (
<p role="alert" className="error">{state.error}</p>
)}
</div>
);
}
Server: Post-Upload Processing
// api/upload/process.ts
import { S3Client, GetObjectCommand, PutObjectCommand } from '@aws-sdk/client-s3';
import sharp from 'sharp';
export async function POST(request: Request) {
const { uploadId } = await request.json();
const upload = await db('uploads').where('id', uploadId).first();
if (!upload) return Response.json({ error: 'Upload not found' }, { status: 404 });
// Download from S3
const getCommand = new GetObjectCommand({ Bucket: process.env.UPLOAD_BUCKET, Key: upload.s3_key });
const s3Object = await s3.send(getCommand);
const buffer = Buffer.from(await s3Object.Body!.transformToByteArray());
// Process images
if (upload.mime_type.startsWith('image/')) {
// Validate it's actually an image (not a renamed executable)
const metadata = await sharp(buffer).metadata();
if (!metadata.format) {
await db('uploads').where('id', uploadId).update({ status: 'rejected', rejection_reason: 'invalid_image' });
return Response.json({ error: 'Invalid image file' }, { status: 400 });
}
// Generate thumbnail
const thumbnail = await sharp(buffer)
.resize(400, 400, { fit: 'cover', position: 'attention' })
.webp({ quality: 80 })
.toBuffer();
const thumbnailKey = upload.s3_key.replace(/\.[^.]+$/, '_thumb.webp');
await s3.send(new PutObjectCommand({
Bucket: process.env.ASSETS_BUCKET, // Public CDN bucket
Key: thumbnailKey,
Body: thumbnail,
ContentType: 'image/webp',
CacheControl: 'public, max-age=31536000, immutable',
}));
// Also optimize the original
const optimized = await sharp(buffer)
.resize(2000, 2000, { fit: 'inside', withoutEnlargement: true })
.webp({ quality: 85 })
.toBuffer();
const optimizedKey = upload.s3_key.replace(/\.[^.]+$/, '.webp');
await s3.send(new PutObjectCommand({
Bucket: process.env.ASSETS_BUCKET,
Key: optimizedKey,
Body: optimized,
ContentType: 'image/webp',
CacheControl: 'public, max-age=31536000, immutable',
}));
const cdnUrl = `${process.env.CDN_URL}/${optimizedKey}`;
const thumbUrl = `${process.env.CDN_URL}/${thumbnailKey}`;
await db('uploads').where('id', uploadId).update({
status: 'processed',
cdn_url: cdnUrl,
thumbnail_url: thumbUrl,
processed_at: new Date(),
});
return Response.json({ fileUrl: cdnUrl, thumbnailUrl: thumbUrl });
}
// PDFs — just move to CDN bucket
const cdnKey = `pdfs/${upload.s3_key.split('/').pop()}`;
await s3.send(new PutObjectCommand({
Bucket: process.env.ASSETS_BUCKET,
Key: cdnKey,
Body: buffer,
ContentType: 'application/pdf',
ContentDisposition: `attachment; filename="${encodeURIComponent(upload.original_filename)}"`,
}));
const cdnUrl = `${process.env.CDN_URL}/${cdnKey}`;
await db('uploads').where('id', uploadId).update({ status: 'processed', cdn_url: cdnUrl });
return Response.json({ fileUrl: cdnUrl });
}
For multi-part file uploads needed for files over 5GB, see the AWS serverless guide for S3 multipart upload patterns. For the Cloudflare R2 alternative to S3 without egress fees, see the Cloudflare Workers guide. The Claude Skills 360 bundle includes file upload skill sets for direct S3 upload, image pipelines, and CDN delivery. Start with the free tier to try file upload scaffolding.