aiofiles provides async-compatible file I/O for Python asyncio applications. pip install aiofiles. Read: async with aiofiles.open("f.txt") as f: text = await f.read(). Write: async with aiofiles.open("f.txt", "w") as f: await f.write("hello"). Readline: await f.readline(). Readlines: await f.readlines() → list. Iteration: async for line in f:. Seek: await f.seek(0). Tell: await f.tell(). Binary: aiofiles.open("img.png", "rb"). Append: open("log.txt", "a"). Truncate: await f.truncate(0). Parallel read: asyncio.gather(read(f1), read(f2)) — both run concurrently. aiofiles.os.path.exists(p) async path check. aiofiles.os.stat(p). aiofiles.os.remove(p). aiofiles.os.rename(src, dst). aiofiles.os.makedirs(p, exist_ok=True). aiofiles.os.listdir(p) → list. Tempfile: async with aiofiles.tempfile.NamedTemporaryFile() as tmp: await tmp.write(b"data"). TemporaryDirectory. Wrap: aiofiles.threadpool.wrap(sync_file_obj). JSON: json.loads(await f.read()). CSV: read then pass to io.StringIO. FastAPI: UploadFile.read() then aiofiles.open(path, "wb"). BufferedReader. Claude Code generates aiofiles async file utilities, parallel batch readers, and FastAPI upload/download handlers.
CLAUDE.md for aiofiles
## aiofiles Stack
- Version: aiofiles >= 23.2 | pip install aiofiles
- Read: async with aiofiles.open(path) as f: text = await f.read()
- Write: async with aiofiles.open(path, "w") as f: await f.write(text)
- Binary: aiofiles.open(path, "rb") / "wb" — same API as text mode
- Parallel: asyncio.gather(read_file(a), read_file(b)) — concurrent reads
- OS ops: aiofiles.os.path.exists() | .stat() | .remove() | .makedirs()
- Temp: async with aiofiles.tempfile.NamedTemporaryFile() as tmp: ...
aiofiles Async File I/O Pipeline
# app/files.py — aiofiles async file read/write, batch ops, and FastAPI helpers
from __future__ import annotations
import asyncio
import io
import json
import os
from pathlib import Path
from typing import Any, AsyncIterator
import aiofiles
import aiofiles.os
import aiofiles.os.path
import aiofiles.tempfile
# ─────────────────────────────────────────────────────────────────────────────
# 1. Core read/write helpers
# ─────────────────────────────────────────────────────────────────────────────
async def read_text(
path: str | Path,
encoding: str = "utf-8",
errors: str = "replace",
) -> str:
"""Read an entire text file asynchronously."""
async with aiofiles.open(str(path), encoding=encoding, errors=errors) as f:
return await f.read()
async def write_text(
path: str | Path,
content: str,
encoding: str = "utf-8",
mode: str = "w",
) -> int:
"""Write text to a file. Returns bytes written."""
async with aiofiles.open(str(path), mode=mode, encoding=encoding) as f:
return await f.write(content)
async def read_bytes(path: str | Path) -> bytes:
"""Read a binary file."""
async with aiofiles.open(str(path), "rb") as f:
return await f.read()
async def write_bytes(path: str | Path, data: bytes, mode: str = "wb") -> int:
"""Write bytes to a file."""
async with aiofiles.open(str(path), mode) as f:
return await f.write(data)
async def append_text(path: str | Path, line: str, encoding: str = "utf-8") -> None:
"""Append a line to a text file."""
async with aiofiles.open(str(path), "a", encoding=encoding) as f:
await f.write(line if line.endswith("\n") else line + "\n")
async def read_lines(
path: str | Path,
encoding: str = "utf-8",
strip: bool = True,
) -> list[str]:
"""Read all lines from a text file."""
async with aiofiles.open(str(path), encoding=encoding) as f:
lines = await f.readlines()
return [l.rstrip("\n") for l in lines] if strip else lines
# ─────────────────────────────────────────────────────────────────────────────
# 2. JSON helpers
# ─────────────────────────────────────────────────────────────────────────────
async def read_json(path: str | Path) -> Any:
"""Parse a JSON file asynchronously."""
text = await read_text(path)
return json.loads(text)
async def write_json(
path: str | Path,
data: Any,
indent: int = 2,
ensure_ascii: bool = False,
) -> None:
"""Write data as JSON to a file."""
await write_text(path, json.dumps(data, indent=indent, ensure_ascii=ensure_ascii))
async def update_json(path: str | Path, update_fn) -> Any:
"""
Read, modify, and write a JSON file atomically (within a single coroutine).
update_fn: callable(data) → new_data
"""
try:
data = await read_json(path)
except (FileNotFoundError, json.JSONDecodeError):
data = {}
new_data = update_fn(data)
await write_json(path, new_data)
return new_data
# ─────────────────────────────────────────────────────────────────────────────
# 3. Batch / parallel operations
# ─────────────────────────────────────────────────────────────────────────────
async def read_many(
paths: list[str | Path],
encoding: str = "utf-8",
concurrency: int = 20,
) -> list[str]:
"""
Read multiple files concurrently.
Returns list of text content in same order as paths.
Limits concurrency to avoid opening too many file handles.
"""
sem = asyncio.Semaphore(concurrency)
async def _read(path: str | Path) -> str:
async with sem:
return await read_text(path, encoding=encoding)
return await asyncio.gather(*(_read(p) for p in paths))
async def write_many(
files: dict[str | Path, str],
encoding: str = "utf-8",
concurrency: int = 20,
) -> None:
"""Write multiple files concurrently."""
sem = asyncio.Semaphore(concurrency)
async def _write(path: str | Path, content: str) -> None:
async with sem:
await write_text(path, content, encoding=encoding)
await asyncio.gather(*(_write(p, c) for p, c in files.items()))
async def copy_file(src: str | Path, dst: str | Path) -> None:
"""Async file copy."""
data = await read_bytes(src)
await write_bytes(dst, data)
async def copy_many(
pairs: list[tuple[str | Path, str | Path]],
concurrency: int = 10,
) -> None:
"""Copy multiple files concurrently."""
sem = asyncio.Semaphore(concurrency)
async def _copy(src, dst):
async with sem:
await copy_file(src, dst)
await asyncio.gather(*(_copy(s, d) for s, d in pairs))
# ─────────────────────────────────────────────────────────────────────────────
# 4. Line-by-line streaming
# ─────────────────────────────────────────────────────────────────────────────
async def stream_lines(
path: str | Path,
encoding: str = "utf-8",
) -> AsyncIterator[str]:
"""
Async generator that yields lines one at a time — good for large files.
Usage:
async for line in stream_lines("big.log"):
process(line)
"""
async with aiofiles.open(str(path), encoding=encoding, errors="replace") as f:
async for line in f:
yield line.rstrip("\n")
async def tail(
path: str | Path,
n: int = 20,
encoding: str = "utf-8",
) -> list[str]:
"""Return the last n lines of a file."""
lines: list[str] = []
async for line in stream_lines(path, encoding):
lines.append(line)
return lines[-n:]
# ─────────────────────────────────────────────────────────────────────────────
# 5. Async filesystem helpers
# ─────────────────────────────────────────────────────────────────────────────
async def exists(path: str | Path) -> bool:
"""Check if a path exists."""
return await aiofiles.os.path.exists(str(path))
async def file_size(path: str | Path) -> int:
"""Return file size in bytes."""
stat = await aiofiles.os.stat(str(path))
return stat.st_size
async def remove(path: str | Path) -> None:
"""Remove a file."""
await aiofiles.os.remove(str(path))
async def rename(src: str | Path, dst: str | Path) -> None:
"""Rename/move a file."""
await aiofiles.os.rename(str(src), str(dst))
async def makedirs(path: str | Path, exist_ok: bool = True) -> None:
"""Create directory tree."""
await aiofiles.os.makedirs(str(path), exist_ok=exist_ok)
async def listdir(path: str | Path) -> list[str]:
"""List directory contents."""
return await aiofiles.os.listdir(str(path))
# ─────────────────────────────────────────────────────────────────────────────
# 6. Temp file helpers
# ─────────────────────────────────────────────────────────────────────────────
async def write_temp(
data: bytes | str,
suffix: str = "",
prefix: str = "tmp_",
delete: bool = True,
) -> str:
"""
Write data to a temp file; return its path.
delete=False keeps the file after the context exits.
"""
mode = "wb" if isinstance(data, bytes) else "w"
async with aiofiles.tempfile.NamedTemporaryFile(
mode=mode,
suffix=suffix,
prefix=prefix,
delete=delete,
) as tmp:
await tmp.write(data)
path = tmp.name
return path
# ─────────────────────────────────────────────────────────────────────────────
# 7. FastAPI / aiohttp helpers
# ─────────────────────────────────────────────────────────────────────────────
async def save_upload(
upload_file,
destination: str | Path,
chunk_size: int = 1024 * 64,
) -> int:
"""
Save a FastAPI UploadFile to disk asynchronously.
Returns total bytes written.
Usage (FastAPI):
@app.post("/upload")
async def upload(file: UploadFile = File(...)):
size = await save_upload(file, f"uploads/{file.filename}")
return {"bytes": size}
"""
total = 0
async with aiofiles.open(str(destination), "wb") as out:
while chunk := await upload_file.read(chunk_size):
await out.write(chunk)
total += len(chunk)
return total
async def stream_file_response(path: str | Path, chunk_size: int = 1024 * 64):
"""
Async generator for streaming a file — use as FastAPI StreamingResponse body.
Usage:
@app.get("/download/{filename}")
async def download(filename: str):
return StreamingResponse(
stream_file_response(f"files/{filename}"),
media_type="application/octet-stream",
)
"""
async with aiofiles.open(str(path), "rb") as f:
while chunk := await f.read(chunk_size):
yield chunk
# ─────────────────────────────────────────────────────────────────────────────
# 8. Async log writer
# ─────────────────────────────────────────────────────────────────────────────
class AsyncLogWriter:
"""
Write log lines to a file asynchronously via an asyncio.Queue.
Avoids blocking the event loop on every log write.
Usage:
writer = AsyncLogWriter("app.log")
await writer.start()
await writer.log("Starting up")
...
await writer.stop()
"""
def __init__(self, path: str | Path, encoding: str = "utf-8"):
self._path = str(path)
self._encoding = encoding
self._queue: asyncio.Queue[str | None] = asyncio.Queue()
self._task: asyncio.Task | None = None
async def start(self) -> None:
self._task = asyncio.create_task(self._worker())
async def stop(self) -> None:
await self._queue.put(None) # sentinel
if self._task:
await self._task
async def log(self, message: str) -> None:
await self._queue.put(message + "\n")
async def _worker(self) -> None:
async with aiofiles.open(self._path, "a", encoding=self._encoding) as f:
while True:
item = await self._queue.get()
if item is None:
break
await f.write(item)
# ─────────────────────────────────────────────────────────────────────────────
# Demo
# ─────────────────────────────────────────────────────────────────────────────
async def demo():
import tempfile
with tempfile.TemporaryDirectory() as tmpdir:
base = Path(tmpdir)
print("=== Write + Read ===")
await write_text(base / "hello.txt", "Hello, aiofiles!\nLine 2\nLine 3\n")
text = await read_text(base / "hello.txt")
print(f" Read: {text.strip()!r}")
print("\n=== JSON round-trip ===")
data = {"name": "MyApp", "version": "1.0.0", "debug": True}
await write_json(base / "config.json", data)
loaded = await read_json(base / "config.json")
print(f" JSON: {loaded}")
print("\n=== Parallel write + read ===")
files = {base / f"f{i}.txt": f"Content of file {i}" for i in range(5)}
await write_many(files)
texts = await read_many(list(files.keys()))
print(f" Read {len(texts)} files in parallel: {texts[:2]}")
print("\n=== Stream lines ===")
lines = []
async for line in stream_lines(base / "hello.txt"):
lines.append(line)
print(f" Streamed {len(lines)} lines: {lines}")
print("\n=== File size ===")
size = await file_size(base / "config.json")
print(f" config.json: {size} bytes")
print("\n=== AsyncLogWriter ===")
log_path = base / "app.log"
writer = AsyncLogWriter(log_path)
await writer.start()
for i in range(5):
await writer.log(f"Log line {i}")
await writer.stop()
log_lines = await read_lines(log_path)
print(f" Log lines: {log_lines}")
if __name__ == "__main__":
asyncio.run(demo())
For the asyncio.to_thread alternative — asyncio.to_thread(sync_fn) runs any synchronous I/O in a thread pool executor, which works for file operations that don’t have a native async API; aiofiles uses the same thread pool mechanism but provides a native-feeling async file object with await f.read(), async for line in f:, and await f.write(), which is more idiomatic and easier to test than wrapping every stdlib call with to_thread. For the anyio alternative — anyio’s anyio.open_file() is the equivalent for projects using anyio’s backend-agnostic async I/O (works with asyncio and trio); aiofiles is asyncio-specific but has wider adoption and a more complete API including aiofiles.os.* and aiofiles.tempfile. The Claude Skills 360 bundle includes aiofiles skill sets covering aiofiles.open() read/write/append/binary modes, read_text()/write_text()/read_bytes()/write_bytes() helpers, read_json()/write_json()/update_json(), read_many()/write_many() concurrent batch ops, stream_lines() async generator, tail() last-N-lines, aiofiles.os.path.exists/stat/remove/makedirs, write_temp() temporary file, save_upload() FastAPI upload, stream_file_response() FastAPI download, and AsyncLogWriter queue-based async logger. Start with the free tier to try async file I/O code generation.