Python’s json module handles JSON encoding and decoding. import json. dumps: json.dumps(obj) → str. loads: json.loads(s) → Python object. dump/load: json.dump(obj, f); obj = json.load(f). indent: json.dumps(obj, indent=2) — pretty-print. separators: json.dumps(obj, separators=(",",":")) — compact. sort_keys: json.dumps(obj, sort_keys=True). default: json.dumps(obj, default=str) — fallback serializer. JSONEncoder: class MyEncoder(json.JSONEncoder): def default(self, o): .... JSONDecoder: json.loads(s, cls=MyDecoder). object_hook: json.loads(s, object_hook=fn) — called on each dict. object_pairs_hook: json.loads(s, object_pairs_hook=OrderedDict). parse_float: json.loads(s, parse_float=Decimal) — custom float. parse_int: json.loads(s, parse_int=str) — preserve large ints. ensure_ascii: json.dumps(obj, ensure_ascii=False) — embed Unicode. allow_nan: json.dumps(obj, allow_nan=False) — strict IEEE. cls: json.dumps(obj, cls=MyEncoder). Streaming: readline + json.loads per line (JSON Lines). JSONDecodeError: except json.JSONDecodeError as e: e.msg, e.lineno, e.colno. json.JSONDecodeError is a subclass of ValueError. json.encoder.FLOAT_REPR — deprecated. json.loads("null") → None. json.loads("true") → True. Claude Code generates API serializers, config readers, event loggers, and data exchange helpers.
CLAUDE.md for json
## json Stack
- Stdlib: import json
- Encode: json.dumps(obj, indent=2, default=str) | json.dump(obj, f, indent=2)
- Decode: json.loads(s) | json.load(f)
- Custom: JSONEncoder.default() for types that aren't JSON-native (datetime, Decimal, dataclass)
- object_hook: for deserializing dicts into typed objects on load
- Errors: except json.JSONDecodeError as e — shows line/col for debugging
json Serialization Pipeline
# app/serialization.py — JSONEncoder, JSONDecoder, object_hook, streaming, validators
from __future__ import annotations
import dataclasses
import io
import json
import uuid
from dataclasses import dataclass, field, fields, is_dataclass
from datetime import date, datetime
from decimal import Decimal
from enum import Enum
from pathlib import Path
from typing import Any, Callable, Iterator, TypeVar
T = TypeVar("T")
# ─────────────────────────────────────────────────────────────────────────────
# 1. Extended JSON encoder
# ─────────────────────────────────────────────────────────────────────────────
class ExtendedEncoder(json.JSONEncoder):
"""
JSON encoder that handles types not supported by stdlib:
datetime, date, Decimal, Path, UUID, Enum, dataclasses, bytes.
Example:
json.dumps(obj, cls=ExtendedEncoder)
json.dumps(obj, cls=ExtendedEncoder, indent=2)
"""
def default(self, obj: Any) -> Any:
if isinstance(obj, datetime):
return obj.isoformat()
if isinstance(obj, date):
return obj.isoformat()
if isinstance(obj, Decimal):
return str(obj)
if isinstance(obj, Path):
return str(obj)
if isinstance(obj, uuid.UUID):
return str(obj)
if isinstance(obj, Enum):
return obj.value
if isinstance(obj, bytes):
return obj.hex()
if is_dataclass(obj) and not isinstance(obj, type):
return dataclasses.asdict(obj)
if hasattr(obj, "__dict__"):
return obj.__dict__
return super().default(obj)
def dumps(obj: Any, **kwargs) -> str:
"""
Serialize to JSON string using ExtendedEncoder.
Example:
s = dumps({"ts": datetime.now(), "amount": Decimal("9.99")})
"""
kwargs.setdefault("cls", ExtendedEncoder)
return json.dumps(obj, **kwargs)
def dump(obj: Any, fp, **kwargs) -> None:
"""Serialize to file using ExtendedEncoder."""
kwargs.setdefault("cls", ExtendedEncoder)
json.dump(obj, fp, **kwargs)
def pretty(obj: Any) -> str:
"""
Pretty-print JSON with 2-space indent.
Example:
print(pretty({"nested": {"key": [1, 2, 3]}}))
"""
return dumps(obj, indent=2, sort_keys=True)
def compact(obj: Any) -> str:
"""
Compact JSON with no extra whitespace.
Example:
line = compact(event_dict) # for JSON Lines logging
"""
return dumps(obj, separators=(",", ":"))
# ─────────────────────────────────────────────────────────────────────────────
# 2. Safe loading
# ─────────────────────────────────────────────────────────────────────────────
def loads(s: str | bytes, **kwargs) -> Any:
"""
Deserialize JSON string; raise json.JSONDecodeError with context on failure.
Example:
data = loads('{"key": "value"}')
"""
try:
return json.loads(s, **kwargs)
except json.JSONDecodeError as exc:
raise json.JSONDecodeError(
f"{exc.msg} (line {exc.lineno}, col {exc.colno})",
exc.doc, exc.pos,
) from None
def loads_safe(s: str | bytes, default: Any = None) -> Any:
"""
Deserialize JSON; return default instead of raising on bad input.
Example:
data = loads_safe(response_text, default={})
"""
try:
return json.loads(s)
except (json.JSONDecodeError, TypeError, ValueError):
return default
def load_file(path: str | Path, default: Any = None) -> Any:
"""
Load JSON from file; return default if file missing or invalid.
Example:
config = load_file("config.json", default={})
"""
p = Path(path)
if not p.exists():
return default
try:
return json.loads(p.read_text(encoding="utf-8"))
except (json.JSONDecodeError, OSError):
return default
def save_file(obj: Any, path: str | Path, indent: int = 2) -> Path:
"""
Save object as JSON file atomically (write temp → rename).
Example:
save_file(config_dict, "config.json")
"""
p = Path(path)
p.parent.mkdir(parents=True, exist_ok=True)
tmp = p.with_suffix(p.suffix + ".tmp")
tmp.write_text(dumps(obj, indent=indent), encoding="utf-8")
tmp.replace(p)
return p
# ─────────────────────────────────────────────────────────────────────────────
# 3. Decimal-preserving loader
# ─────────────────────────────────────────────────────────────────────────────
def loads_decimal(s: str | bytes) -> Any:
"""
Deserialize JSON with floats parsed as Decimal (no fp precision loss).
Example:
data = loads_decimal('{"price": 9.99, "tax": 0.895}')
type(data["price"]) # Decimal
"""
return json.loads(s, parse_float=Decimal)
# ─────────────────────────────────────────────────────────────────────────────
# 4. Dataclass round-trip
# ─────────────────────────────────────────────────────────────────────────────
def dataclass_to_json(obj: Any, **kwargs) -> str:
"""
Serialize a dataclass instance (recursively) to JSON.
Example:
@dataclass class Point: x: float; y: float
s = dataclass_to_json(Point(1.0, 2.0))
"""
return dumps(dataclasses.asdict(obj), **kwargs)
def dataclass_from_json(cls: type[T], s: str | bytes | dict) -> T:
"""
Construct a flat dataclass from a JSON string or dict.
Unknown keys are silently ignored.
Example:
p = dataclass_from_json(Point, '{"x": 1.0, "y": 2.0, "extra": "ignored"}')
"""
if isinstance(s, (str, bytes)):
data = json.loads(s)
else:
data = s
valid = {f.name for f in fields(cls)}
kwargs = {k: v for k, v in data.items() if k in valid}
return cls(**kwargs)
# ─────────────────────────────────────────────────────────────────────────────
# 5. JSON Lines (JSONL) streaming
# ─────────────────────────────────────────────────────────────────────────────
def iter_jsonl(path: str | Path | io.IOBase) -> Iterator[Any]:
"""
Lazily parse a JSON Lines file (one JSON object per line).
Example:
for record in iter_jsonl("events.jsonl"):
process(record)
"""
if isinstance(path, (str, Path)):
with open(path, encoding="utf-8") as f:
for lineno, line in enumerate(f, 1):
line = line.strip()
if line and not line.startswith("#"):
try:
yield json.loads(line)
except json.JSONDecodeError as exc:
raise json.JSONDecodeError(
f"Line {lineno}: {exc.msg}", exc.doc, exc.pos
) from None
else:
for line in path:
line = line.strip() if isinstance(line, str) else line.strip().decode()
if line:
yield json.loads(line)
def write_jsonl(records: Any, path: str | Path) -> int:
"""
Write iterable of objects to a JSON Lines file.
Returns number of records written.
Example:
n = write_jsonl(events, "events.jsonl")
"""
p = Path(path)
p.parent.mkdir(parents=True, exist_ok=True)
count = 0
with open(p, "w", encoding="utf-8") as f:
for record in records:
f.write(compact(record) + "\n")
count += 1
return count
# ─────────────────────────────────────────────────────────────────────────────
# 6. Deep merge / patch
# ─────────────────────────────────────────────────────────────────────────────
def deep_merge(base: dict, override: dict) -> dict:
"""
Deep-merge two dicts; override values win on conflicts.
Nested dicts are merged recursively; non-dict values are replaced.
Example:
base = {"a": 1, "b": {"x": 1, "y": 2}}
override = {"b": {"y": 99, "z": 3}, "c": 4}
result = {"a": 1, "b": {"x": 1, "y": 99, "z": 3}, "c": 4}
"""
result = dict(base)
for key, val in override.items():
if key in result and isinstance(result[key], dict) and isinstance(val, dict):
result[key] = deep_merge(result[key], val)
else:
result[key] = val
return result
def json_patch(original: str | dict, patch: str | dict) -> dict:
"""
Apply a JSON patch (deep merge) and return the merged dict.
Example:
patched = json_patch(base_config_json, override_json)
"""
base = json.loads(original) if isinstance(original, str) else dict(original)
ovrd = json.loads(patch) if isinstance(patch, str) else dict(patch)
return deep_merge(base, ovrd)
# ─────────────────────────────────────────────────────────────────────────────
# Demo
# ─────────────────────────────────────────────────────────────────────────────
if __name__ == "__main__":
import tempfile
print("=== json demo ===")
# 1. Extended types
@dataclass
class Order:
id: uuid.UUID
created: datetime
amount: Decimal
tags: list[str] = field(default_factory=list)
order = Order(
id=uuid.UUID("550e8400-e29b-41d4-a716-446655440000"),
created=datetime(2024, 1, 15, 10, 30),
amount=Decimal("49.99"),
tags=["premium","annual"],
)
print("\n--- ExtendedEncoder ---")
s = dumps(order)
print(f" {s}")
print("\n--- pretty ---")
d = dataclasses.asdict(order)
print(pretty({"id": str(order.id), "amount": str(order.amount), "tags": order.tags}))
print("\n--- loads_safe ---")
print(f" good: {loads_safe('{\"k\":1}')}")
print(f" bad: {loads_safe('not json', default={'error': True})}")
print("\n--- loads_decimal ---")
data = loads_decimal('{"price": 9.99}')
print(f" type={type(data['price']).__name__} value={data['price']}")
print("\n--- dataclass round-trip ---")
@dataclass
class Point:
x: float
y: float
p = Point(3.0, 4.0)
s2 = dataclass_to_json(p)
p2 = dataclass_from_json(Point, s2)
print(f" original: {p} serialized: {s2!r} restored: {p2}")
print("\n--- JSON Lines ---")
records = [{"id": i, "val": i**2} for i in range(5)]
with tempfile.TemporaryDirectory() as td:
path = f"{td}/data.jsonl"
n = write_jsonl(records, path)
print(f" wrote {n} records")
loaded = list(iter_jsonl(path))
print(f" read back: {loaded}")
print("\n--- deep_merge ---")
base = {"a": 1, "b": {"x": 1, "y": 2}}
patch = {"b": {"y": 99, "z": 3}, "c": 4}
merged = deep_merge(base, patch)
print(f" merged: {merged}")
print("\n--- save_file / load_file ---")
with tempfile.TemporaryDirectory() as td:
cfg_path = f"{td}/config.json"
save_file({"host": "localhost", "port": 8080}, cfg_path)
cfg = load_file(cfg_path)
print(f" loaded: {cfg}")
missing = load_file(f"{td}/nonexistent.json", default={})
print(f" missing: {missing}")
print("\n=== done ===")
For the orjson alternative — orjson (PyPI) is a Rust-backed JSON library that is 10–20x faster than stdlib json.dumps/json.loads, supports datetime, numpy, dataclasses, and UUID natively without a custom encoder, and produces bytes rather than str; stdlib json has zero external dependencies, is always available, and is fast enough for most use cases where JSON is not the throughput bottleneck — use orjson in hot paths (high-frequency API serialization, log pipelines) where benchmarks show json is a bottleneck, stdlib json for all other cases. For the msgspec alternative — msgspec provides JSON and MessagePack encoding/decoding with compile-time schema validation via Struct classes (similar to TypedDict but Rust-backed), 20–30x faster than stdlib json, and zero-copy decoding; stdlib json requires explicit JSONEncoder/object_hook for typed deserialization — use msgspec.Struct for high-performance typed JSON in microservices, stdlib json with dataclasses for all other code where external dependencies add friction. The Claude Skills 360 bundle includes json skill sets covering ExtendedEncoder (datetime/Decimal/UUID/Path/Enum/dataclass), dumps()/dump()/pretty()/compact() encoding helpers, loads()/loads_safe()/load_file()/save_file() decoding helpers, loads_decimal() precision float loading, dataclass_to_json()/dataclass_from_json() round-trip, iter_jsonl()/write_jsonl() JSON Lines streaming, and deep_merge()/json_patch() config merging. Start with the free tier to try JSON serialization and data exchange pipeline code generation.