Pino is a low-overhead Node.js logger — it serializes JSON to stdout with minimal CPU impact using a dedicated worker thread for I/O. pino({ level: "info" }) creates the root logger. logger.child({ requestId }) creates a child logger that merges additional context into every log line. pino-http middleware adds per-request child loggers to req.log. redact strips sensitive fields from JSON output before writing. pino-pretty transforms JSON to human-readable colorized output in development. Async transport with pino.transport moves I/O to a worker to keep the main thread unblocked. Correlation IDs from HTTP headers (x-request-id, traceparent) integrate with OpenTelemetry distributed traces. pino.multi fans log output to multiple destinations simultaneously. Claude Code generates Pino logger configurations, pino-http middleware setups, redact rules, correlation ID patterns, and the async transport configurations for production Node.js applications.
CLAUDE.md for Pino
## Pino Stack
- Version: pino >= 9.0, pino-http >= 10.0
- Root logger: pino({ level, name, base: { service, version }, redact: [...paths] })
- Child: logger.child({ requestId, userId }) — merges context onto every line
- HTTP: pino-http({ logger, customProps: (req) => ({ userId: req.user?.id }) })
- Levels: trace < debug < info < warn < error < fatal — set via LOG_LEVEL env var
- Redact: ["req.headers.authorization", "*.password", "body.card.number"]
- Dev: pino-pretty — pipe output: node server.js | pino-pretty
- Async transport: pino.transport({ target: "pino/file", options: { destination: 1 } })
Logger Configuration
// lib/logger.ts — root Pino logger
import pino from "pino"
const isDev = process.env.NODE_ENV !== "production"
export const logger = pino({
level: process.env.LOG_LEVEL ?? (isDev ? "debug" : "info"),
// Base fields included in all log lines
base: {
service: process.env.SERVICE_NAME ?? "api",
version: process.env.npm_package_version ?? "unknown",
env: process.env.NODE_ENV ?? "development",
},
// Redact PII and secrets from all log output
redact: {
paths: [
"req.headers.authorization",
"req.headers.cookie",
"*.password",
"*.passwordHash",
"*.token",
"*.accessToken",
"*.refreshToken",
"*.cardNumber",
"*.cvv",
"*.ssn",
],
censor: "[REDACTED]",
},
// Custom serializers for standard objects
serializers: {
err: pino.stdSerializers.err,
req: pino.stdSerializers.req,
res: pino.stdSerializers.res,
},
// Pretty-print in development, JSON in production
transport: isDev
? {
target: "pino-pretty",
options: {
colorize: true,
translateTime: "SYS:standard",
ignore: "pid,hostname,service,env",
singleLine: false,
},
}
: undefined,
// Timestamp as milliseconds
timestamp: pino.stdTimeFunctions.isoTime,
})
// Convenience child loggers for specific domains
export const dbLogger = logger.child({ domain: "db" })
export const authLogger = logger.child({ domain: "auth" })
export const queueLogger = logger.child({ domain: "queue" })
Express / Hono Middleware
// middleware/request-logger.ts — Express
import { randomUUID } from "crypto"
import pinoHttp from "pino-http"
import { logger } from "../lib/logger"
import type { Request, Response } from "express"
export const requestLogger = pinoHttp({
logger,
// Attach custom fields from request context
customProps: (req: Request) => ({
requestId: req.headers["x-request-id"] ?? randomUUID(),
userId: (req as any).user?.id,
userAgent: req.headers["user-agent"],
}),
// Customize success/error log levels
customLogLevel: (_req: Request, res: Response, err?: Error) => {
if (err) return "error"
if (res.statusCode >= 500) return "error"
if (res.statusCode >= 400) return "warn"
if (res.statusCode >= 300) return "info"
return "info"
},
// Customize success message
customSuccessMessage: (req: Request, res: Response) =>
`${req.method} ${req.url} ${res.statusCode}`,
// Customize error message
customErrorMessage: (req: Request, res: Response, err: Error) =>
`${req.method} ${req.url} ${res.statusCode} — ${err.message}`,
// Redact sensitive request data before logging headers
redact: ["req.headers.authorization", "req.headers.cookie"],
})
// Hono middleware equivalent
import { logger as pinoLogger } from "../lib/logger"
import type { Context } from "hono"
export async function requestLoggerMiddleware(c: Context, next: () => Promise<void>) {
const start = Date.now()
const requestId = c.req.header("x-request-id") ?? crypto.randomUUID()
const reqLogger = pinoLogger.child({ requestId })
c.set("logger" as any, reqLogger)
reqLogger.info({
msg: "Request started",
method: c.req.method,
url: c.req.url,
})
await next()
const ms = Date.now() - start
reqLogger.info({
msg: "Request completed",
method: c.req.method,
url: c.req.url,
status: c.res.status,
ms,
})
}
Usage Patterns
// services/order-service.ts — domain logging with child loggers
import { logger } from "../lib/logger"
import { db } from "../db"
const orderLogger = logger.child({ service: "orders" })
export async function createOrder(
customerId: string,
items: { productId: string; quantity: number }[]
) {
const log = orderLogger.child({ customerId, itemCount: items.length })
log.info("Creating order")
try {
// Check inventory
for (const item of items) {
const product = await db.products.findById(item.productId)
if (!product) {
log.warn({ productId: item.productId }, "Product not found")
throw new Error(`Product ${item.productId} not found`)
}
if (product.stock < item.quantity) {
log.warn(
{ productId: item.productId, requested: item.quantity, available: product.stock },
"Insufficient stock"
)
throw new Error(`Insufficient stock for ${product.name}`)
}
}
const order = await db.orders.create({ customerId, items })
log.info({ orderId: order.id, totalCents: order.totalCents }, "Order created")
return order
} catch (err) {
log.error({ err }, "Failed to create order")
throw err
}
}
export async function updateOrderStatus(orderId: string, status: string) {
const log = orderLogger.child({ orderId })
try {
const updated = await db.orders.update(orderId, { status })
log.info({ status }, "Order status updated")
return updated
} catch (err) {
log.error({ err, status }, "Failed to update order status")
throw err
}
}
Async Transport and Multi-Destination
// lib/logger-production.ts — async transport for production
import pino from "pino"
// Multi-transport: console + file rotation + external sink
export const logger = pino(
{
level: process.env.LOG_LEVEL ?? "info",
base: { service: process.env.SERVICE_NAME },
redact: ["*.password", "*.token", "req.headers.authorization"],
timestamp: pino.stdTimeFunctions.isoTime,
},
pino.multistream([
// stdout for container log collection (k8s, Docker)
{ stream: process.stdout, level: "info" },
// Error-only to stderr
{ stream: process.stderr, level: "error" },
// Async file transport (non-blocking I/O in worker thread)
{
stream: pino.transport({
target: "pino-roll",
options: {
file: "/var/log/app/app.log",
frequency: "daily",
mkdir: true,
size: "50m", // Rotate at 50MB
limit: { count: 7 }, // Keep 7 days
},
}),
level: "debug",
},
])
)
Correlation IDs with OpenTelemetry
// lib/logger-otel.ts — inject trace context into logs
import pino from "pino"
import { trace, context } from "@opentelemetry/api"
// Mixin function called on every log line
function otelMixin() {
const span = trace.getActiveSpan()
if (!span) return {}
const { traceId, spanId, traceFlags } = span.spanContext()
return { traceId, spanId, traceFlags }
}
export const logger = pino({
level: process.env.LOG_LEVEL ?? "info",
mixin: otelMixin,
// Now every log line includes { traceId, spanId } for log correlation
// in Datadog, Grafana Loki, or any structured log system
})
For the Winston logging alternative when a more feature-rich logger with built-in transports for HTTP, files, and streams and a larger ecosystem of third-party Winston transports is preferred over Pino’s performance-first approach, the Express middleware patterns cover Winston setup. For the OpenTelemetry SDK for traces, metrics, and logs in a unified observability pipeline — where structured logs should be correlated with distributed traces automatically, see the OpenTelemetry guide for the full instrumentation setup. The Claude Skills 360 bundle includes Pino skill sets covering structured logging, redaction, and correlation IDs. Start with the free tier to try Pino configuration generation.