Python’s tempfile module creates temporary files and directories in a system-appropriate location, with automatic cleanup. import tempfile. Anonymous file: tempfile.TemporaryFile(mode='w+b', suffix=None, prefix=None, dir=None) — no directory entry, deleted on close; use as context manager. Named file: tempfile.NamedTemporaryFile(mode='w+b', delete=True, suffix=None, prefix=None, dir=None) — has a .name path attribute; delete=False leaves the file on disk after closing. Spooled: tempfile.SpooledTemporaryFile(max_size=0, mode='w+b') — buffers in RAM up to max_size bytes, then spills to disk; max_size=0 spools to disk immediately. Directory: tempfile.TemporaryDirectory(suffix=None, prefix=None, dir=None) — use as context manager or call .cleanup() manually; .name gives the path string. Low-level: fd, path = tempfile.mkstemp(suffix=None, prefix='tmp', dir=None) — returns open OS file descriptor + path; caller must close fd and delete file. path = tempfile.mkdtemp(suffix=None, prefix=tmp', dir=None) — caller must delete. Defaults: tempfile.gettempdir() → system temp dir (/tmp on Unix, %TEMP% on Windows); override with tempfile.tempdir = "/custom/tmp". Claude Code generates atomic write helpers, work-directory managers, upload staging areas, and test fixture factories.
CLAUDE.md for tempfile
## tempfile Stack
- Stdlib: import tempfile
- Anon: with tempfile.TemporaryFile() as f: f.write(data)
- Named: with tempfile.NamedTemporaryFile(suffix=".csv", delete=False) as f:
- path = f.name; f.write(data)
- Dir: with tempfile.TemporaryDirectory() as td:
- work = pathlib.Path(td) / "output.txt"
- Low: fd, path = tempfile.mkstemp(suffix=".db")
- os.close(fd) # close the fd; caller deletes file
- Note: NamedTemporaryFile(delete=False) must be manually removed
tempfile Temporary File Pipeline
# app/tempfileutil.py — atomic write, staged upload, work dir, fixtures
from __future__ import annotations
import io
import os
import shutil
import tempfile
from contextlib import contextmanager
from dataclasses import dataclass, field
from pathlib import Path
from typing import Generator, Iterator
# ─────────────────────────────────────────────────────────────────────────────
# 1. Atomic file writer
# ─────────────────────────────────────────────────────────────────────────────
def atomic_write(
dest: "str | Path",
data: bytes | str,
encoding: str = "utf-8",
) -> Path:
"""
Write data to dest atomically: write to a temp file in the same directory,
then rename. Avoids leaving a partial file if the process crashes mid-write.
Example:
atomic_write("/etc/myapp/config.json", json.dumps(cfg))
"""
dest = Path(dest)
dest.parent.mkdir(parents=True, exist_ok=True)
fd, tmp_path = tempfile.mkstemp(dir=dest.parent, prefix=".tmp_")
try:
with os.fdopen(fd, "wb") as f:
if isinstance(data, str):
f.write(data.encode(encoding))
else:
f.write(data)
os.replace(tmp_path, dest)
return dest
except Exception:
try:
os.unlink(tmp_path)
except OSError:
pass
raise
@contextmanager
def atomic_open(
dest: "str | Path",
mode: str = "w",
encoding: str = "utf-8",
) -> Generator[io.TextIOWrapper, None, None]:
"""
Context manager: write to a temp file in dest's directory, then rename on exit.
Yields a file-like object. Deletes the temp file on exception.
Example:
with atomic_open("/data/output.csv") as f:
writer = csv.writer(f)
writer.writerows(rows)
"""
dest = Path(dest)
dest.parent.mkdir(parents=True, exist_ok=True)
fd, tmp_path = tempfile.mkstemp(dir=dest.parent, prefix=".tmp_")
try:
with os.fdopen(fd, mode, encoding=encoding if "b" not in mode else None) as f:
yield f
os.replace(tmp_path, dest)
except Exception:
try:
os.unlink(tmp_path)
except OSError:
pass
raise
# ─────────────────────────────────────────────────────────────────────────────
# 2. Work directory context manager
# ─────────────────────────────────────────────────────────────────────────────
@dataclass
class WorkDir:
"""
A temporary working directory that cleans up on exit.
Path operations use the standard pathlib.Path API.
Example:
with WorkDir(prefix="build_") as wd:
out = wd.path / "result.tar.gz"
subprocess.run(["make", "-C", str(wd.path)])
# directory is deleted here
"""
prefix: str = "work_"
suffix: str = ""
base: "str | Path | None" = None
path: Path = field(default=None, repr=True) # type: ignore
_td: tempfile.TemporaryDirectory = field(default=None, repr=False) # type: ignore
def __enter__(self) -> "WorkDir":
self._td = tempfile.TemporaryDirectory(
prefix=self.prefix,
suffix=self.suffix,
dir=str(self.base) if self.base else None,
)
self.path = Path(self._td.name)
return self
def __exit__(self, *_: object) -> None:
self._td.cleanup()
def subdir(self, name: str) -> Path:
"""Create and return a subdirectory inside the work dir."""
p = self.path / name
p.mkdir(parents=True, exist_ok=True)
return p
def file(self, name: str, content: "bytes | str | None" = None) -> Path:
"""Create a file inside the work dir, optionally with content."""
p = self.path / name
p.parent.mkdir(parents=True, exist_ok=True)
if content is not None:
if isinstance(content, str):
p.write_text(content)
else:
p.write_bytes(content)
else:
p.touch()
return p
# ─────────────────────────────────────────────────────────────────────────────
# 3. Upload staging area
# ─────────────────────────────────────────────────────────────────────────────
@dataclass
class StagedUpload:
"""
Buffer an upload to a NamedTemporaryFile, then move to a destination.
Use when you want to validate or transform before committing to the final path.
Example:
with StagedUpload(suffix=".jpg") as staged:
staged.write(request.body)
staged.validate_image()
# if no exception: file is at staged.final_path
# on exception: temp file is deleted automatically
"""
suffix: str = ""
max_bytes: int = 100 * 1024 * 1024 # 100 MB
_bytes: int = field(default=0, repr=False)
_tf: tempfile.NamedTemporaryFile = field(default=None, repr=False) # type: ignore
final_path: "Path | None" = field(default=None, repr=True)
def __enter__(self) -> "StagedUpload":
self._tf = tempfile.NamedTemporaryFile(
suffix=self.suffix, delete=False
)
return self
def write(self, data: bytes) -> None:
"""Write a chunk to the staging file."""
self._bytes += len(data)
if self._bytes > self.max_bytes:
raise ValueError(
f"Upload exceeds {self.max_bytes} bytes limit"
)
self._tf.write(data)
def flush(self) -> None:
self._tf.flush()
def commit(self, dest: "str | Path") -> Path:
"""Close and move the staged file to dest."""
dest = Path(dest)
dest.parent.mkdir(parents=True, exist_ok=True)
self._tf.close()
shutil.move(str(self._tf.name), dest)
self.final_path = dest
return dest
def __exit__(self, exc_type: object, *_: object) -> None:
if self._tf and not self._tf.closed:
self._tf.close()
if exc_type is not None:
# clean up staging file on error
try:
os.unlink(self._tf.name)
except OSError:
pass
# ─────────────────────────────────────────────────────────────────────────────
# 4. SpooledTemporaryFile for memory-efficient buffering
# ─────────────────────────────────────────────────────────────────────────────
def spool_and_process(
chunks: Iterator[bytes],
max_memory: int = 1024 * 1024,
) -> bytes:
"""
Buffer an iterable of byte chunks in a SpooledTemporaryFile,
spilling to disk if total size exceeds max_memory bytes.
Returns the complete buffered content.
Example:
result = spool_and_process(response.iter_content(8192), max_memory=4_000_000)
"""
with tempfile.SpooledTemporaryFile(max_size=max_memory) as buf:
for chunk in chunks:
buf.write(chunk)
buf.seek(0)
return buf.read()
# ─────────────────────────────────────────────────────────────────────────────
# 5. Test fixture helpers
# ─────────────────────────────────────────────────────────────────────────────
@contextmanager
def temp_text_file(
content: str,
suffix: str = ".txt",
encoding: str = "utf-8",
) -> Generator[Path, None, None]:
"""
Yield a temp file Path containing content, deleted on exit.
Example:
with temp_text_file("col1,col2\\n1,2\\n") as path:
df = csv.reader(path.open())
"""
with tempfile.NamedTemporaryFile(
mode="w", suffix=suffix, encoding=encoding, delete=False
) as f:
f.write(content)
p = Path(f.name)
try:
yield p
finally:
p.unlink(missing_ok=True)
@contextmanager
def temp_binary_file(
content: bytes,
suffix: str = ".bin",
) -> Generator[Path, None, None]:
"""
Yield a temp file Path containing bytes content, deleted on exit.
Example:
with temp_binary_file(png_bytes, suffix=".png") as path:
img = Image.open(path)
"""
with tempfile.NamedTemporaryFile(
mode="wb", suffix=suffix, delete=False
) as f:
f.write(content)
p = Path(f.name)
try:
yield p
finally:
p.unlink(missing_ok=True)
# ─────────────────────────────────────────────────────────────────────────────
# Demo
# ─────────────────────────────────────────────────────────────────────────────
if __name__ == "__main__":
import json
print("=== tempfile demo ===")
# ── gettempdir ────────────────────────────────────────────────────────────
print(f"\n gettempdir: {tempfile.gettempdir()}")
# ── atomic_write ──────────────────────────────────────────────────────────
print("\n--- atomic_write ---")
with tempfile.TemporaryDirectory() as td:
cfg = {"version": 1, "debug": True}
dest = Path(td) / "config.json"
atomic_write(dest, json.dumps(cfg))
print(f" written: {dest.read_text()}")
with atomic_open(dest) as f:
json.dump({"version": 2}, f)
print(f" overwritten: {dest.read_text()}")
# ── WorkDir ───────────────────────────────────────────────────────────────
print("\n--- WorkDir ---")
with WorkDir(prefix="demo_") as wd:
sub = wd.subdir("results")
fp = wd.file("results/out.txt", "hello tempfile")
print(f" workdir: {wd.path.name}")
print(f" file content: {fp.read_text()!r}")
print(f" exists: {wd.path.exists()}")
print(f" cleaned up: {not wd.path.exists()}")
# ── spool_and_process ─────────────────────────────────────────────────────
print("\n--- SpooledTemporaryFile ---")
chunks = [b"chunk-%02d " % i for i in range(10)]
result = spool_and_process(iter(chunks), max_memory=50)
print(f" spooled {len(result)} bytes: {result[:30]!r}...")
# ── temp_text_file ────────────────────────────────────────────────────────
print("\n--- temp_text_file ---")
csv_data = "name,age\nAlice,30\nBob,25\n"
with temp_text_file(csv_data, suffix=".csv") as path:
lines = path.read_text().splitlines()
print(f" path suffix: {path.suffix}")
print(f" lines: {lines}")
print(f" deleted: {not path.exists()}")
# ── mkstemp low-level ─────────────────────────────────────────────────────
print("\n--- mkstemp ---")
fd, path_str = tempfile.mkstemp(suffix=".db", prefix="myapp_")
p = Path(path_str)
os.close(fd)
print(f" created: {p.name} exists={p.exists()}")
p.unlink()
print(f" removed manually: {not p.exists()}")
print("\n=== done ===")
For the pathlib.Path / open() alternative — pathlib.Path(dest).write_bytes(data) and open(path, "wb") write files directly — these are not atomic: a crash between open and close leaves a truncated file; use tempfile-based atomic write when partial writes would corrupt the destination (config files, databases, caches). For the io.BytesIO / io.StringIO alternative — in-memory file objects that never touch disk — use BytesIO/StringIO when the entire content fits comfortably in memory and you never need a filesystem path; use tempfile.SpooledTemporaryFile when size is unbounded or you need a .name attribute to pass to shell commands; use tempfile.TemporaryFile when you need disk persistence without a path, and NamedTemporaryFile when external processes must open the file by path. The Claude Skills 360 bundle includes tempfile skill sets covering atomic_write()/atomic_open() atomic filesystem writes, WorkDir temporary working directory context manager with subdir()/file(), StagedUpload streaming upload buffer with write()/flush()/commit(), spool_and_process() SpooledTemporaryFile chunk buffer, and temp_text_file()/temp_binary_file() test fixture helpers. Start with the free tier to try temporary file patterns and tempfile pipeline code generation.