Claude Code for compileall: Python Bytecode Compilation — Claude Skills 360 Blog
Blog / AI / Claude Code for compileall: Python Bytecode Compilation
AI

Claude Code for compileall: Python Bytecode Compilation

Published: October 3, 2028
Read time: 5 min read
By: Claude Skills 360

Python’s compileall module pre-compiles .py source files to .pyc bytecode cached in __pycache__, eliminating parse overhead on first import. import compileall. compile_dir: compileall.compile_dir("src/", recursive=True, quiet=1) → True if all OK; recursive=True default since 3.2. compile_file: compileall.compile_file("app/main.py", quiet=1) — compile one file; returns True/False. compile_path: compileall.compile_path(skip_curdir=True) — compiles all entries in sys.path. Parameters: quiet=0 (verbose), quiet=1 (errors only), quiet=2 (silent). force=True — recompile even if .pyc is fresh. optimize=0/1/2 — bytecode optimization level (0=none, 1=no asserts, 2=no docstrings). maxlevels=10 — max recursion depth. ddir="/" — override directory shown in tracebacks (for deployment path remapping). workers=N (3.6+) — parallel compile with N processes (0 = cpu_count). invalidation_modePycInvalidationMode.TIMESTAMP (default) or PycInvalidationMode.CHECKED_HASH / UNCHECKED_HASH (hash-based). py_compile.compile("file.py") — low-level single-file compile. __pycache__ directory holds .cpython-312.pyc etc. CLI: python -m compileall src/. Claude Code generates deployment build steps, pre-compilation hooks, bytecode auditors, and Docker layer optimizers.

CLAUDE.md for compileall

## compileall Stack
- Stdlib: import compileall, py_compile
- Dir:    compileall.compile_dir("src/", quiet=1, workers=4)
- File:   compileall.compile_file("app/main.py", quiet=1)
- Force:  compileall.compile_dir("pkg/", force=True)
- Opt:    compileall.compile_dir("pkg/", optimize=2)   # strip docstrings
- CLI:    python -m compileall src/

compileall Bytecode Pipeline

# app/compileutil.py — compile, audit, clean, deploy-prep, hash-mode, zipapp
from __future__ import annotations

import compileall
import importlib.util
import os
import py_compile
import shutil
import struct
import sys
import time
from dataclasses import dataclass, field
from pathlib import Path
from typing import Any


# ─────────────────────────────────────────────────────────────────────────────
# 1. Compile helpers
# ─────────────────────────────────────────────────────────────────────────────

def compile_directory(
    path: str | Path,
    quiet: int = 1,
    force: bool = False,
    optimize: int = 0,
    workers: int = 0,
    maxlevels: int = 20,
) -> bool:
    """
    Recursively compile all .py files under path to __pycache__/*.pyc.

    quiet: 0=verbose, 1=errors only, 2=silent.
    force: recompile even if .pyc is fresh.
    optimize: 0=no opt, 1=remove asserts, 2=remove asserts+docstrings.
    workers: 0=use cpu_count, >0=explicit count.

    Example:
        compile_directory("src/", quiet=1, workers=4)
    """
    return bool(compileall.compile_dir(
        str(path),
        quiet=quiet,
        force=force,
        optimize=optimize,
        workers=workers,
        maxlevels=maxlevels,
    ))


def compile_file(
    path: str | Path,
    quiet: int = 1,
    force: bool = False,
    optimize: int = 0,
) -> bool:
    """
    Compile a single .py file to bytecode.

    Example:
        compile_file("app/main.py")
    """
    return bool(compileall.compile_file(
        str(path), quiet=quiet, force=force, optimize=optimize
    ))


def compile_check(path: str | Path) -> tuple[bool, str]:
    """
    Try compiling a single .py file without writing output.
    Returns (success, error_message).

    Example:
        ok, err = compile_check("app/broken.py")
        if not ok:
            print(f"Syntax error: {err}")
    """
    try:
        py_compile.compile(str(path), doraise=True)
        return True, ""
    except py_compile.PyCompileError as e:
        return False, str(e)


def syntax_check_directory(
    path: str | Path,
    pattern: str = "**/*.py",
) -> dict[str, str]:
    """
    Check all .py files under path for syntax errors.
    Returns {relative_path: error_message} for files with errors.

    Example:
        errors = syntax_check_directory("src/")
        if errors:
            for p, msg in errors.items():
                print(f"  {p}: {msg}")
    """
    root = Path(path)
    errors: dict[str, str] = {}
    for py_file in sorted(root.glob(pattern)):
        ok, msg = compile_check(py_file)
        if not ok:
            errors[str(py_file.relative_to(root))] = msg
    return errors


# ─────────────────────────────────────────────────────────────────────────────
# 2. __pycache__ audit
# ─────────────────────────────────────────────────────────────────────────────

@dataclass
class PycInfo:
    source_path: Path
    pyc_path:    Path | None
    is_current:  bool    # pyc matches source mtime
    pyc_size:    int

    def __str__(self) -> str:
        status = "OK" if self.is_current else "STALE" if self.pyc_path else "MISSING"
        return f"{status:8s}  {self.source_path}"


def audit_pyc(
    path: str | Path,
    pattern: str = "**/*.py",
) -> list[PycInfo]:
    """
    Audit the __pycache__ state for all .py files under path.

    Example:
        for info in audit_pyc("src/"):
            if not info.is_current:
                print(f"  {info}")
    """
    results: list[PycInfo] = []
    root = Path(path)
    tag = sys.implementation.cache_tag  # e.g. "cpython-312"

    for py_file in sorted(root.glob(pattern)):
        cache_dir = py_file.parent / "__pycache__"
        pyc_file = cache_dir / f"{py_file.stem}.{tag}.pyc"

        if pyc_file.exists():
            # Check if pyc is newer than source (with 1s tolerance)
            src_mtime = py_file.stat().st_mtime
            pyc_mtime = pyc_file.stat().st_mtime
            current = pyc_mtime >= src_mtime - 1.0
            size = pyc_file.stat().st_size
        else:
            current = False
            size = 0
            pyc_file_or_none: Path | None = None

        results.append(PycInfo(
            source_path=py_file.relative_to(root),
            pyc_path=pyc_file if pyc_file.exists() else None,
            is_current=current,
            pyc_size=size,
        ))
    return results


def pyc_summary(path: str | Path) -> dict:
    """
    Summarize __pycache__ coverage for a directory.

    Example:
        s = pyc_summary("src/")
        print(f"{s['compiled']}/{s['total']} compiled, {s['stale']} stale")
    """
    infos = audit_pyc(path)
    compiled = sum(1 for i in infos if i.pyc_path)
    current  = sum(1 for i in infos if i.is_current)
    return {
        "total":    len(infos),
        "compiled": compiled,
        "current":  current,
        "stale":    compiled - current,
        "missing":  len(infos) - compiled,
        "total_pyc_bytes": sum(i.pyc_size for i in infos),
    }


# ─────────────────────────────────────────────────────────────────────────────
# 3. Clean and strip helpers
# ─────────────────────────────────────────────────────────────────────────────

def clean_pycache(path: str | Path) -> int:
    """
    Remove all __pycache__ directories under path.
    Returns number of directories removed.

    Example:
        n = clean_pycache("src/")
        print(f"Removed {n} __pycache__ dirs")
    """
    count = 0
    for cache_dir in Path(path).rglob("__pycache__"):
        if cache_dir.is_dir():
            shutil.rmtree(cache_dir)
            count += 1
    return count


def strip_pyc_files(path: str | Path) -> int:
    """
    Delete all .pyc files under path (including those not in __pycache__).
    Returns number of files removed.

    Example:
        n = strip_pyc_files("dist/")
    """
    count = 0
    for pyc in Path(path).rglob("*.pyc"):
        pyc.unlink()
        count += 1
    return count


# ─────────────────────────────────────────────────────────────────────────────
# 4. Deployment pre-compilation helper
# ─────────────────────────────────────────────────────────────────────────────

@dataclass
class CompileResult:
    path:         str
    files_found:  int
    compiled_ok:  int
    errors:       dict[str, str] = field(default_factory=dict)
    elapsed_s:    float = 0.0

    @property
    def success(self) -> bool:
        return len(self.errors) == 0

    def __str__(self) -> str:
        return (f"{'OK' if self.success else 'FAIL'}  "
                f"{self.compiled_ok}/{self.files_found} compiled  "
                f"{len(self.errors)} errors  "
                f"{self.elapsed_s:.2f}s")


def precompile_for_deploy(
    src: str | Path,
    optimize: int = 1,
    workers: int = 0,
    check_syntax_first: bool = True,
) -> CompileResult:
    """
    Full pre-compilation pipeline for a deployment directory:
    1. Optional syntax check (fast, no .pyc written)
    2. Compile with optimize level
    3. Return audit result

    Example:
        result = precompile_for_deploy("app/", optimize=1, workers=4)
        if not result.success:
            for path, err in result.errors.items():
                print(f"  ERROR {path}: {err}")
    """
    t0 = time.monotonic()
    root = Path(src)
    py_files = list(root.rglob("**/*.py"))

    errors: dict[str, str] = {}
    if check_syntax_first:
        for f in py_files:
            ok, msg = compile_check(f)
            if not ok:
                errors[str(f.relative_to(root))] = msg

    compiled_ok = 0
    if not errors:
        success = compile_directory(root, quiet=1, optimize=optimize, workers=workers)
        if success:
            compiled_ok = sum(1 for i in audit_pyc(root) if i.is_current)

    return CompileResult(
        path=str(root),
        files_found=len(py_files),
        compiled_ok=compiled_ok,
        errors=errors,
        elapsed_s=time.monotonic() - t0,
    )


# ─────────────────────────────────────────────────────────────────────────────
# Demo
# ─────────────────────────────────────────────────────────────────────────────

if __name__ == "__main__":
    import tempfile

    print("=== compileall demo ===")

    with tempfile.TemporaryDirectory() as tmpdir:
        root = Path(tmpdir)

        # ── create sample Python files ─────────────────────────────────────────
        (root / "app").mkdir()
        (root / "app" / "__init__.py").write_text("")
        (root / "app" / "main.py").write_text(
            "import sys\n\ndef greet(name: str) -> str:\n    '''Greet someone.'''\n    return f'Hello, {name}!'\n"
        )
        (root / "app" / "utils.py").write_text(
            "def add(a: int, b: int) -> int:\n    return a + b\n"
        )
        (root / "app" / "broken.py").write_text(
            "def bad(\n    # SyntaxError: missing closing paren\n"
        )

        # ── syntax check first ─────────────────────────────────────────────────
        print("\n--- syntax_check_directory ---")
        errs = syntax_check_directory(root / "app")
        for p, msg in errs.items():
            print(f"  {p}: {msg[:80]}")

        # Remove broken file for clean compile
        (root / "app" / "broken.py").unlink()

        # ── compile_directory ──────────────────────────────────────────────────
        print("\n--- compile_directory ---")
        ok = compile_directory(root / "app", quiet=1, optimize=0)
        print(f"  compile_directory ok: {ok}")

        # ── audit_pyc ──────────────────────────────────────────────────────────
        print("\n--- audit_pyc ---")
        for info in audit_pyc(root / "app"):
            print(f"  {info}")

        # ── pyc_summary ────────────────────────────────────────────────────────
        print("\n--- pyc_summary ---")
        s = pyc_summary(root / "app")
        print(f"  {s}")

        # ── compile_file + optimize=2 ──────────────────────────────────────────
        print("\n--- compile_file optimize=2 (strip docstrings) ---")
        # Re-add with docstring
        (root / "app" / "main.py").write_text(
            "def greet(name: str) -> str:\n    '''Greet someone.'''\n    return f'Hello, {name}!'\n"
        )
        ok = compile_file(root / "app" / "main.py", optimize=2, force=True)
        print(f"  compiled with optimize=2: {ok}")

        # ── precompile_for_deploy ──────────────────────────────────────────────
        print("\n--- precompile_for_deploy ---")
        result = precompile_for_deploy(root / "app", optimize=1, workers=1)
        print(f"  {result}")

        # ── clean_pycache ──────────────────────────────────────────────────────
        print("\n--- clean_pycache ---")
        n = clean_pycache(root)
        print(f"  removed {n} __pycache__ directories")
        s_after = pyc_summary(root / "app")
        print(f"  after clean: {s_after}")

    print("\n=== done ===")

For the py_compile alternative — py_compile.compile(path, doraise=True) provides single-file compilation and is the building block that compileall calls internally; use py_compile directly when you need to compile-check one file at a time with detailed error capture (the exception includes filename, line number, and message) without the overhead of the directory walker — for example in a pre-commit hook that checks only the staged .py files; use compileall for bulk pre-compilation of entire directories, especially with workers=N for parallel compilation during CI or deployment. For the zipimport / zipapp alternative — zipapp.create_archive("src/", "/tmp/app.pyz") bundles a Python application into a single executable .pyz zip file; pyc files can be included in the zip for faster startup — use zipapp to ship a self-contained Python application as a single file; run compileall.compile_dir("src/", optimize=1) first to populate __pycache__, then include the .pyc files in the archive by pointing zipapp at the compiled tree; Python will automatically use the pyc files when importing from the zip. The Claude Skills 360 bundle includes compileall skill sets covering compile_directory()/compile_file()/compile_check()/syntax_check_directory() compilation drivers, PycInfo with audit_pyc()/pyc_summary() cache auditors, clean_pycache()/strip_pyc_files() cleanup tools, and CompileResult with precompile_for_deploy() full deployment pre-compilation pipeline. Start with the free tier to try bytecode compilation patterns and compileall pipeline code generation.

Keep Reading

AI

Claude Code for email.contentmanager: Python Email Content Accessors

Read and write EmailMessage body content with Python's email.contentmanager module and Claude Code — email contentmanager ContentManager for the class that maps content types to get and set handler functions allowing EmailMessage to support get_content and set_content with type-specific behaviour, email contentmanager raw_data_manager for the ContentManager instance that handles raw bytes and str payloads without any conversion, email contentmanager content_manager for the standard ContentManager instance used by email.policy.default that intelligently handles text plain text html multipart and binary content types, email contentmanager get_content_text for the handler that returns the decoded text payload of a text-star message part as a str, email contentmanager get_content_binary for the handler that returns the raw decoded bytes payload of a non-text message part, email contentmanager get_data_manager for the get-handler lookup used by EmailMessage get_content to find the right reader function for the content type, email contentmanager set_content text for the handler that creates and sets a text part correctly choosing charset and transfer encoding, email contentmanager set_content bytes for the handler that creates and sets a binary part with base64 encoding and optional filename Content-Disposition, email contentmanager EmailMessage get_content for the method that reads the message body using the registered content manager handlers, email contentmanager EmailMessage set_content for the method that sets the message body and MIME headers in one call, email contentmanager EmailMessage make_alternative make_mixed make_related for the methods that convert a simple message into a multipart container, email contentmanager EmailMessage add_attachment for the method that attaches a file or bytes to a multipart message, and email contentmanager integration with email.message and email.policy and email.mime and io for building high-level email readers attachment extractors text body accessors HTML readers and policy-aware MIME construction pipelines.

5 min read Feb 12, 2029
AI

Claude Code for email.charset: Python Email Charset Encoding

Control header and body encoding for international email with Python's email.charset module and Claude Code — email charset Charset for the class that wraps a character set name with the encoding rules for header encoding and body encoding describing how to encode text for that charset in email messages, email charset Charset header_encoding for the attribute specifying whether headers using this charset should use QP quoted-printable encoding BASE64 encoding or no encoding, email charset Charset body_encoding for the attribute specifying the Content-Transfer-Encoding to use for message bodies in this charset such as QP or BASE64, email charset Charset output_codec for the attribute giving the Python codec name used to encode the string to bytes for the wire format, email charset Charset input_codec for the attribute giving the Python codec name used to decode incoming bytes to str, email charset Charset get_output_charset for returning the output charset name, email charset Charset header_encode for encoding a header string using the charset's header_encoding method, email charset Charset body_encode for encoding body content using the charset's body_encoding, email charset Charset convert for converting a string from the input_codec to the output_codec, email charset add_charset for registering a new charset with custom encoding rules in the global charset registry, email charset add_alias for adding an alias name that maps to an existing registered charset, email charset add_codec for registering a codec name mapping for use by the charset machinery, and email charset integration with email.message and email.mime and email.policy and email.encoders for building international email senders non-ASCII header encoders Content-Transfer-Encoding selectors charset-aware message constructors and MIME encoding pipelines.

5 min read Feb 11, 2029
AI

Claude Code for email.utils: Python Email Address and Header Utilities

Parse and format RFC 2822 email addresses and dates with Python's email.utils module and Claude Code — email utils parseaddr for splitting a display-name plus angle-bracket address string into a realname and email address tuple, email utils formataddr for combining a realname and address string into a properly quoted RFC 2822 address with angle brackets, email utils getaddresses for parsing a list of raw address header strings each potentially containing multiple comma-separated addresses into a list of realname address tuples, email utils parsedate for parsing an RFC 2822 date string into a nine-tuple compatible with time.mktime, email utils parsedate_tz for parsing an RFC 2822 date string into a ten-tuple that includes the UTC offset timezone in seconds, email utils parsedate_to_datetime for parsing an RFC 2822 date string into an aware datetime object with timezone, email utils formatdate for formatting a POSIX timestamp or the current time as an RFC 2822 date string with optional usegmt and localtime flags, email utils format_datetime for formatting a datetime object as an RFC 2822 date string, email utils make_msgid for generating a globally unique Message-ID string with optional idstring and domain components, email utils decode_rfc2231 for decoding an RFC 2231 encoded parameter value into a tuple of charset language and value, email utils encode_rfc2231 for encoding a string as an RFC 2231 encoded parameter value, email utils collapse_rfc2231_value for collapsing a decoded RFC 2231 tuple to a Unicode string, and email utils integration with email.message and email.headerregistry and datetime and time for building address parsers date formatters message-id generators header extractors and RFC-compliant email construction utilities.

5 min read Feb 10, 2029

Put these ideas into practice

Claude Skills 360 gives you production-ready skills for everything in this article — and 2,350+ more. Start free or go all-in.

Back to Blog

Get 360 skills free