Python’s operator module exposes built-in operators as callable functions. import operator. itemgetter: operator.itemgetter(0) → lambda x: x[0]; operator.itemgetter("name") → lambda d: d["name"]; multi-key: operator.itemgetter(1, 3) → tuple. attrgetter: operator.attrgetter("name") → lambda obj: obj.name; chained: attrgetter("address.city"). methodcaller: operator.methodcaller("upper") → lambda s: s.upper(); with args: methodcaller("replace", "a", "b"). add: operator.add(a, b) = a + b. sub/mul/truediv/floordiv/mod/pow. neg/pos/abs. eq/ne/lt/le/gt/ge. and_/or_/xor/not_/lshift/rshift. contains: operator.contains(seq, item). indexOf: operator.indexOf(seq, item). countOf: operator.countOf(seq, item). getitem/setitem/delitem. iadd/imul: in-place variants. truth: operator.truth(x) = bool(x). length_hint: operator.length_hint(obj). from operator import itemgetter, attrgetter, methodcaller. Key patterns: sorted(records, key=itemgetter("price")), max(objects, key=attrgetter("score")), list(map(methodcaller("strip"), lines)), functools.reduce(operator.add, numbers). Claude Code generates sort pipelines, reduce chains, comparison key factories, and functional data transformers.
CLAUDE.md for operator
## operator Stack
- Stdlib: import operator; from operator import itemgetter, attrgetter, methodcaller
- Sort by field: sorted(recs, key=itemgetter("field"))
- Sort by attr: sorted(objs, key=attrgetter("attr.nested"))
- Map method: list(map(methodcaller("strip"), lines))
- Reduce sum: functools.reduce(operator.add, numbers, 0)
- Comparison fn: operator.lt as a first-class callable
operator Functional Pipeline
# app/oputil.py — itemgetter, attrgetter, methodcaller, reduce, sort, compose
from __future__ import annotations
import functools
import operator
from collections.abc import Callable, Iterable
from dataclasses import dataclass
from operator import attrgetter, itemgetter, methodcaller
from typing import Any, TypeVar
T = TypeVar("T")
K = TypeVar("K")
# ─────────────────────────────────────────────────────────────────────────────
# 1. Sort / group helpers
# ─────────────────────────────────────────────────────────────────────────────
def sort_by(items: list[T], *keys: str, reverse: bool = False) -> list[T]:
"""
Sort a list of dicts or objects by one or more keys/attributes.
Strings starting with '.' are treated as attribute paths; others as dict keys.
Example:
rows = [{"name": "Bob", "age": 30}, {"name": "Alice", "age": 25}]
sort_by(rows, "age") # ascending by age
sort_by(rows, "name", reverse=True) # descending by name
"""
if not keys:
return sorted(items, reverse=reverse)
# detect whether first item is dict-like or object-like
sample = items[0] if items else None
if sample is None:
return items[:]
if isinstance(sample, dict):
key_fn = itemgetter(*keys) if len(keys) > 1 else itemgetter(keys[0])
else:
key_fn = attrgetter(*keys) if len(keys) > 1 else attrgetter(keys[0])
return sorted(items, key=key_fn, reverse=reverse)
def group_by(items: Iterable[T], key: str) -> dict[Any, list[T]]:
"""
Group a sequence of dicts or objects by a key/attribute value.
Example:
records = [{"dept": "eng", "name": "Alice"}, {"dept": "eng", "name": "Bob"}]
groups = group_by(records, "dept")
groups["eng"] # [{"dept":"eng","name":"Alice"}, ...]
"""
result: dict[Any, list[T]] = {}
for item in items:
k = item[key] if isinstance(item, dict) else getattr(item, key)
result.setdefault(k, []).append(item)
return result
def pluck(items: Iterable[dict], key: str) -> list[Any]:
"""
Extract a single field from each dict in a sequence.
Example:
names = pluck(users, "name")
"""
getter = itemgetter(key)
return [getter(item) for item in items]
def pluck_attr(items: Iterable[Any], attr: str) -> list[Any]:
"""
Extract a single attribute from each object in a sequence.
Example:
ids = pluck_attr(users, "id")
"""
getter = attrgetter(attr)
return [getter(obj) for obj in items]
# ─────────────────────────────────────────────────────────────────────────────
# 2. Reduce / aggregate helpers
# ─────────────────────────────────────────────────────────────────────────────
def sum_field(items: Iterable[dict], key: str) -> Any:
"""
Sum a numeric field across a sequence of dicts.
Example:
total = sum_field(line_items, "amount")
"""
getter = itemgetter(key)
return functools.reduce(operator.add, (getter(item) for item in items), 0)
def product_of(numbers: Iterable[Any], start: Any = 1) -> Any:
"""
Return the product of all values via operator.mul.
Example:
product_of([1, 2, 3, 4]) # 24
"""
return functools.reduce(operator.mul, numbers, start)
def running_sum(numbers: Iterable[Any]) -> list[Any]:
"""
Return cumulative sums using itertools.accumulate with operator.add.
Example:
running_sum([1, 2, 3, 4]) # [1, 3, 6, 10]
"""
import itertools
return list(itertools.accumulate(numbers, operator.add))
def running_product(numbers: Iterable[Any]) -> list[Any]:
"""
Return cumulative products using operator.mul.
Example:
running_product([1, 2, 3, 4]) # [1, 2, 6, 24]
"""
import itertools
return list(itertools.accumulate(numbers, operator.mul))
# ─────────────────────────────────────────────────────────────────────────────
# 3. Comparators and predicates
# ─────────────────────────────────────────────────────────────────────────────
# Map operator names to their operator module equivalents
_CMP_OPS: dict[str, Callable] = {
"eq": operator.eq, "ne": operator.ne,
"lt": operator.lt, "le": operator.le,
"gt": operator.gt, "ge": operator.ge,
"==": operator.eq, "!=": operator.ne,
"<": operator.lt, "<=": operator.le,
">": operator.gt, ">=": operator.ge,
}
def make_predicate(key: str, op: str, value: Any) -> Callable[[dict], bool]:
"""
Build a predicate function for filtering dicts by field comparison.
Example:
is_adult = make_predicate("age", ">=", 18)
is_active = make_predicate("status", "eq", "active")
adults = list(filter(is_adult, users))
"""
cmp = _CMP_OPS.get(op)
if cmp is None:
raise ValueError(f"Unknown operator {op!r}. Choices: {list(_CMP_OPS)}")
getter = itemgetter(key)
return lambda item: cmp(getter(item), value)
def filter_by(items: Iterable[dict], **conditions: Any) -> list[dict]:
"""
Filter dicts where all field==value conditions are met (exact equality).
Example:
active_eng = filter_by(users, dept="eng", active=True)
"""
predicates = [make_predicate(k, "eq", v) for k, v in conditions.items()]
return [
item for item in items
if all(pred(item) for pred in predicates)
]
def negate(fn: Callable[..., bool]) -> Callable[..., bool]:
"""
Return a function that applies operator.not_ to fn's result.
Example:
not_empty = negate(operator.not_) # double negation = truth
non_active = negate(lambda u: u["active"])
"""
return lambda *args, **kwargs: operator.not_(fn(*args, **kwargs))
# ─────────────────────────────────────────────────────────────────────────────
# 4. Method / transform pipelines
# ─────────────────────────────────────────────────────────────────────────────
def apply_method(items: Iterable[Any], method: str, *args: Any, **kwargs: Any) -> list[Any]:
"""
Call a named method on each item using methodcaller.
Example:
stripped = apply_method(lines, "strip")
upper = apply_method(words, "upper")
replaced = apply_method(tags, "replace", "-", "_")
"""
caller = methodcaller(method, *args, **kwargs)
return [caller(item) for item in items]
def compose(*fns: Callable) -> Callable:
"""
Compose functions right-to-left: compose(f, g)(x) == f(g(x)).
Example:
normalize = compose(str.lower, str.strip)
normalize(" Hello ") # "hello"
"""
def composed(x: Any) -> Any:
for fn in reversed(fns):
x = fn(x)
return x
return composed
def pipe(value: Any, *fns: Callable) -> Any:
"""
Pass value through a sequence of functions left-to-right.
Example:
result = pipe(" Hello World ", str.strip, str.lower, str.split)
# ["hello", "world"]
"""
return functools.reduce(lambda v, f: f(v), fns, value)
# ─────────────────────────────────────────────────────────────────────────────
# 5. In-place and item mutation helpers
# ─────────────────────────────────────────────────────────────────────────────
def deep_get(obj: Any, *path: str | int) -> Any:
"""
Get a nested value by a sequence of keys/indices using operator.getitem.
Example:
deep_get(data, "users", 0, "name") # data["users"][0]["name"]
"""
return functools.reduce(operator.getitem, path, obj)
def deep_set(obj: Any, path: list[str | int], value: Any) -> None:
"""
Set a nested value in a mutable structure by path using operator.setitem.
Example:
deep_set(config, ["db", "port"], 5432)
"""
target = functools.reduce(operator.getitem, path[:-1], obj)
operator.setitem(target, path[-1], value)
# ─────────────────────────────────────────────────────────────────────────────
# Demo
# ─────────────────────────────────────────────────────────────────────────────
if __name__ == "__main__":
print("=== operator demo ===")
records = [
{"name": "Alice", "dept": "eng", "salary": 120_000, "active": True},
{"name": "Bob", "dept": "mkt", "salary": 85_000, "active": True},
{"name": "Carol", "dept": "eng", "salary": 140_000, "active": False},
{"name": "Dave", "dept": "mkt", "salary": 92_000, "active": True},
{"name": "Eve", "dept": "eng", "salary": 105_000, "active": True},
]
print("\n--- sort_by ---")
by_salary = sort_by(records, "salary", reverse=True)
for r in by_salary[:3]:
print(f" {r['name']:8s} ${r['salary']:,}")
print("\n--- group_by ---")
groups = group_by(records, "dept")
for dept, members in sorted(groups.items()):
print(f" {dept}: {[m['name'] for m in members]}")
print("\n--- pluck ---")
print(f" names: {pluck(records, 'name')}")
print(f" salaries: {pluck(records, 'salary')}")
print("\n--- sum_field / product_of ---")
total = sum_field(records, "salary")
print(f" total salary: ${total:,}")
print(f" product_of([1,2,3,4,5]): {product_of([1,2,3,4,5])}")
print("\n--- running_sum / running_product ---")
print(f" running_sum([1,2,3,4]): {running_sum([1,2,3,4])}")
print(f" running_product([1,2,3,4]): {running_product([1,2,3,4])}")
print("\n--- filter_by / make_predicate ---")
active_eng = filter_by(records, dept="eng", active=True)
print(f" active eng: {[r['name'] for r in active_eng]}")
high_earner = make_predicate("salary", ">=", 100_000)
print(f" high earners: {[r['name'] for r in records if high_earner(r)]}")
print("\n--- apply_method ---")
lines = [" Hello ", " World ", " Python "]
print(f" stripped: {apply_method(lines, 'strip')}")
words = ["hello", "world"]
print(f" upper: {apply_method(words, 'upper')}")
print("\n--- compose / pipe ---")
normalize = compose(str.lower, str.strip)
print(f" normalize(' Hello '): {normalize(' Hello ')!r}")
result = pipe(" Hello World ", str.strip, str.lower, str.split)
print(f" pipe split: {result}")
print("\n--- deep_get / deep_set ---")
config = {"db": {"host": "localhost", "port": 5432}, "app": {"debug": False}}
print(f" deep_get port: {deep_get(config, 'db', 'port')}")
deep_set(config, ["app", "debug"], True)
print(f" deep_set debug: {config['app']['debug']}")
print("\n--- operator as first-class functions ---")
import functools
nums = [3, 1, 4, 1, 5, 9, 2, 6]
total = functools.reduce(operator.add, nums, 0)
product = functools.reduce(operator.mul, nums, 1)
print(f" sum via reduce(operator.add): {total}")
print(f" product via reduce(operator.mul): {product}")
print(f" max via max(key=itemgetter(0)): {max([(3,'c'),(1,'a'),(2,'b')], key=itemgetter(0))}")
print("\n=== done ===")
For the lambda alternative — anonymous lambda functions inline key extractors and comparators directly at the call site (sorted(rows, key=lambda r: r["age"])) and are always available without an import; operator.itemgetter/attrgetter/methodcaller are faster on CPython because they avoid a Python call frame for each invocation — use lambda for one-off expressions that involve computation beyond simple attribute or item access (e.g., lambda x: x["a"] + x["b"]); use operator functions when the accessor is purely structural (single key, chain of attributes) and performance matters in tight loops over large collections. For the functools alternative — functools.partial creates partially applied functions and pairs naturally with operator callables: functools.partial(operator.mul, 3) returns a “multiply by 3” function; functools.reduce iterates operator binary functions over sequences; the two modules are designed to work together — import both when building functional pipelines: operator for the leaf callables, functools.partial/functools.reduce/functools.lru_cache for higher-order coordination. The Claude Skills 360 bundle includes operator skill sets covering sort_by()/group_by()/pluck()/pluck_attr() sort and extraction helpers, sum_field()/product_of()/running_sum()/running_product() reduce aggregators, make_predicate()/filter_by()/negate() comparator factories, apply_method()/compose()/pipe() transform pipelines, and deep_get()/deep_set() nested item mutation. Start with the free tier to try functional operator patterns and operator pipeline code generation.