Claude Code for Background Jobs: Workers, Queues, and Long-Running Processes — Claude Skills 360 Blog
Blog / Development / Claude Code for Background Jobs: Workers, Queues, and Long-Running Processes
Development

Claude Code for Background Jobs: Workers, Queues, and Long-Running Processes

Published: August 7, 2026
Read time: 8 min read
By: Claude Skills 360

Background jobs execute work outside of request/response cycles — sending emails, processing uploads, generating reports, syncing with third-party APIs. The reliability requirements are different from HTTP handlers: jobs must survive server restarts, handle partial failures gracefully, and avoid running the same job twice. Claude Code generates the worker infrastructure, retry policies, and idempotency patterns for production job systems.

BullMQ Job Queues

Set up BullMQ queues for our application.
Queues needed: emails (high priority), image processing (medium), reports (low).
Retry failed jobs with backoff. Dead letter queue for jobs that exhaust retries.

Queue Setup

// queues/index.ts
import { Queue, QueueEvents } from 'bullmq';
import { Redis } from 'ioredis';

const connection = new Redis(process.env.REDIS_URL!, {
  maxRetriesPerRequest: null, // Required by BullMQ
  enableReadyCheck: false,
});

// Three queues with different priorities (processed by the same worker or separate ones)
export const emailQueue = new Queue('emails', {
  connection,
  defaultJobOptions: {
    attempts: 5,
    backoff: { type: 'exponential', delay: 2000 },
    removeOnComplete: { count: 100 },    // Keep last 100 completed
    removeOnFail: { count: 500 },        // Keep last 500 failed for debugging
  },
});

export const imageQueue = new Queue('images', {
  connection,
  defaultJobOptions: {
    attempts: 3,
    backoff: { type: 'exponential', delay: 5000 },
    removeOnComplete: { count: 50 },
    removeOnFail: { count: 100 },
  },
});

export const reportQueue = new Queue('reports', {
  connection,
  defaultJobOptions: {
    attempts: 2,
    backoff: { type: 'fixed', delay: 60000 }, // 1 min between retries
    removeOnComplete: { count: 20 },
    removeOnFail: false, // Keep all failed reports
  },
});

// Dead letter queue — jobs that exhausted all retries
export const deadLetterQueue = new Queue('dead-letter', { connection });

Workers

// workers/emailWorker.ts
import { Worker, Job } from 'bullmq';
import { sendEmail } from '../email';
import { deadLetterQueue } from '../queues';

interface EmailJob {
  to: string;
  templateId: string;
  data: Record<string, unknown>;
  idempotencyKey: string; // Prevent duplicate sends
}

const worker = new Worker<EmailJob>(
  'emails',
  async (job: Job<EmailJob>) => {
    const { to, templateId, data, idempotencyKey } = job.data;

    // Idempotency: check if already sent
    const alreadySent = await db('sent_emails').where('idempotency_key', idempotencyKey).first();
    if (alreadySent) {
      job.log(`Skipping duplicate email for key: ${idempotencyKey}`);
      return { skipped: true };
    }

    job.log(`Sending ${templateId} to ${to}`);
    await sendEmail(to, templateId, data);

    // Record delivery
    await db('sent_emails').insert({
      to,
      template_id: templateId,
      idempotency_key: idempotencyKey,
      sent_at: new Date(),
    });

    job.updateProgress(100);
    return { delivered: true };
  },
  {
    connection: redis,
    concurrency: 10,           // Process 10 emails simultaneously
    limiter: {
      max: 100,                // Rate limit: max 100 jobs per period
      duration: 60000,         // ...per minute
    },
  },
);

// Handle exhausted jobs — move to dead letter queue
worker.on('failed', async (job, error) => {
  if (job && job.attemptsMade >= (job.opts.attempts ?? 1)) {
    await deadLetterQueue.add(
      'failed-email',
      { originalJob: job.data, error: error.message, failedAt: new Date() },
      { jobId: `dead:${job.id}` }
    );
    console.error(`Job ${job.id} moved to dead letter queue:`, error.message);
  }
});

worker.on('error', (error) => {
  console.error('Worker error:', error);
});
// workers/imageWorker.ts
interface ImageJob {
  uploadId: string;
  s3Key: string;
  operations: Array<'thumbnail' | 'webp' | 'blur_faces'>;
}

const imageWorker = new Worker<ImageJob>(
  'images',
  async (job: Job<ImageJob>) => {
    const { uploadId, s3Key, operations } = job.data;

    await db('uploads').where('id', uploadId).update({ status: 'processing' });

    for (let i = 0; i < operations.length; i++) {
      const op = operations[i];
      job.log(`Processing operation: ${op}`);

      await processOperation(s3Key, op);

      // Report progress: each operation is worth equal weight
      await job.updateProgress(Math.round(((i + 1) / operations.length) * 100));
    }

    await db('uploads').where('id', uploadId).update({ status: 'done' });
    return { uploadId, processed: operations };
  },
  { connection: redis, concurrency: 3 }, // Image processing is CPU-intensive
);

Enqueueing Jobs

// In your application code:

// High-priority email
await emailQueue.add('welcome', {
  to: user.email,
  templateId: 'welcome',
  data: { userName: user.name },
  idempotencyKey: `welcome:${user.id}`,
}, {
  priority: 1,       // 1 = highest priority
  delay: 0,          // Send immediately
  jobId: `welcome:${user.id}`, // Deduplicate — won't add if already queued/running
});

// Delayed email (send in 1 hour)
await emailQueue.add('onboarding-followup', {
  to: user.email,
  templateId: 'onboarding-day-1',
  data: { userName: user.name },
  idempotencyKey: `onboarding-d1:${user.id}`,
}, {
  delay: 60 * 60 * 1000, // 1 hour in ms
  jobId: `onboarding-d1:${user.id}`,
});

// Repeating job (daily report)
await reportQueue.add('daily-revenue', {}, {
  repeat: { cron: '0 9 * * *' }, // Every day at 9am
  jobId: 'daily-revenue-report',  // Single repeating job
});

Job Health Dashboard

// api/admin/jobs.ts — monitor queue health
import { QueueEvents, Queue } from 'bullmq';

export async function getQueueStats() {
  const queues = [emailQueue, imageQueue, reportQueue, deadLetterQueue];

  return Promise.all(queues.map(async (queue) => {
    const [waiting, active, completed, failed, delayed] = await Promise.all([
      queue.getWaitingCount(),
      queue.getActiveCount(),
      queue.getCompletedCount(),
      queue.getFailedCount(),
      queue.getDelayedCount(),
    ]);

    return {
      name: queue.name,
      waiting,
      active,
      completed,
      failed,
      delayed,
    };
  }));
}

// Retry dead letter jobs
export async function retryDeadLetter(jobId: string) {
  const job = await deadLetterQueue.getJob(jobId);
  if (!job) throw new Error('Job not found');

  const originalQueue = getQueueByName(job.name.replace('failed-', ''));
  await originalQueue.add(job.name, job.data.originalJob);

  await job.remove();
}

For the Redis infrastructure that BullMQ depends on, see the Redis guide. For scheduled recurring jobs (cron-based) rather than event-driven queues, see the scheduled jobs guide. The Claude Skills 360 bundle includes background job skill sets for BullMQ, priority queues, and dead letter patterns. Start with the free tier to try job worker scaffolding.

Put these ideas into practice

Claude Skills 360 gives you production-ready skills for everything in this article — and 2,350+ more. Start free or go all-in.

Back to Blog

Get 360 skills free