funcy is a practical functional programming library for Python. pip install funcy. Compose: from funcy import compose; fn = compose(str, abs, int) — right-to-left. rcompose: left-to-right. partial: from funcy import partial; double = partial(mul, 2). curry: from funcy import curry; @curry; def add(a,b): return a+b; add(1)(2). memoize: from funcy import memoize; @memoize; def fib(n): .... memoize_plain: no cache invalidation. group_by: from funcy import group_by; group_by(len, ["hi","hello","hey"]) → {2:[“hi”],5:[“hello”],3:[“hey”]}. count_by: count_by(len, words) → {2:1,5:1,3:1}. distinct: from funcy import distinct; distinct([1,2,1,3]) → [1,2,3]. flatten: from funcy import flatten; flatten([[1,2],[3,4]]) → [1,2,3,4]. chunks: from funcy import chunks; chunks(2, [1,2,3,4]) → [[1,2],[3,4]]. first: from funcy import first; first([3,2,1]) → 3. last: last(seq). nth: nth(2, seq). take: from funcy import take; take(3, range(100)). drop: drop(2, seq). rest: rest(seq) — skip first. take_while: from funcy import take_while; take_while(lambda x: x<5, range(10)). imap: from funcy import imap; imap(fn, seq) — lazy. ifilter: from funcy import ifilter. merge: from funcy import merge; merge(d1, d2) — deep merge. project: project({"a":1,"b":2}, ["a"]) → {“a”:1}. omit: omit(d, ["key"]). select_keys: select_keys(fn, d). walk_values: from funcy import walk_values; walk_values(str.upper, d). retry: from funcy import retry; @retry(3, errors=RequestException). silent: from funcy import silent; silent(int)("bad") → None. Claude Code generates funcy data transformers, memoized fetchers, retry wrappers, and functional pipelines.
CLAUDE.md for funcy
## funcy Stack
- Version: funcy >= 2.0 | pip install funcy
- Compose: compose(f3, f2, f1)(x) → f3(f2(f1(x))) | rcompose(f1,f2,f3)
- Group: group_by(key_fn, seq) | count_by(key_fn, seq)
- Sequence: take(n, seq) | drop(n, seq) | chunks(n, seq) | flatten(nested)
- Dict: merge(a, b) | project(d, keys) | omit(d, keys) | walk_values(fn, d)
- Retry: @retry(3, errors=MyError) | @silent wraps exceptions → None
funcy Functional Utilities Pipeline
# app/functional.py — funcy compose, group, sequence, dict, memoize, retry, predicates
from __future__ import annotations
import time
from typing import Any, Callable, Iterable, TypeVar
from funcy import (
autocurry,
chunks,
compose,
count_by,
curry,
distinct,
drop,
first,
flatten,
group_by,
ifilter,
imap,
last,
memoize,
merge,
nth,
omit,
partial,
project,
rcompose,
rest,
retry,
select_keys,
silent,
take,
take_while,
walk_keys,
walk_values,
)
T = TypeVar("T")
K = TypeVar("K")
V = TypeVar("V")
# ─────────────────────────────────────────────────────────────────────────────
# 1. Function composition
# ─────────────────────────────────────────────────────────────────────────────
def pipeline(*fns: Callable) -> Callable:
"""
Create a left-to-right function pipeline (rcompose).
Example:
clean = pipeline(str.strip, str.lower, str.title)
clean(" hello world ") # "Hello World"
to_int = pipeline(str.strip, int)
to_int(" 42 ") # 42
"""
return rcompose(*fns)
def transform(value: Any, *fns: Callable) -> Any:
"""
Apply a sequence of functions left-to-right to a value.
Example:
transform(" HELLO ", str.strip, str.lower) # "hello"
transform(4, lambda x: x**2, lambda x: x+1) # 17
"""
result = value
for fn in fns:
result = fn(result)
return result
def memoized(fn: Callable) -> Callable:
"""
Memoize a function using funcy's @memoize.
Results are cached indefinitely (LRU available via functools.lru_cache).
Example:
@memoized
def expensive(n):
time.sleep(0.1)
return n * n
"""
return memoize(fn)
# ─────────────────────────────────────────────────────────────────────────────
# 2. Collection grouping and counting
# ─────────────────────────────────────────────────────────────────────────────
def group_records(records: list[dict], key: str) -> dict[Any, list[dict]]:
"""
Group a list of dicts by a field value.
Example:
users = [{"dept": "eng", "name": "Alice"}, {"dept": "hr", "name": "Bob"}, ...]
by_dept = group_records(users, "dept")
# {"eng": [{"dept":"eng","name":"Alice"},...], "hr": [...]}
"""
return group_by(lambda r: r.get(key), records)
def count_records(records: list[dict], key: str) -> dict[Any, int]:
"""
Count records by a field value.
Example:
count_records(orders, "status")
# {"pending": 3, "shipped": 10, "returned": 1}
"""
return count_by(lambda r: r.get(key), records)
def group_by_fn(key_fn: Callable, seq: Iterable) -> dict:
"""
Group sequence items by a key function.
Example:
group_by_fn(len, ["hi", "hello", "hey"])
# {2: ["hi"], 5: ["hello"], 3: ["hey"]}
group_by_fn(lambda x: x % 3, range(9))
# {0: [0,3,6], 1: [1,4,7], 2: [2,5,8]}
"""
return group_by(key_fn, seq)
def frequencies(seq: Iterable) -> dict:
"""Count occurrences of each element."""
return count_by(lambda x: x, seq)
# ─────────────────────────────────────────────────────────────────────────────
# 3. Sequence utilities
# ─────────────────────────────────────────────────────────────────────────────
def batch(seq: Iterable[T], size: int) -> list[list[T]]:
"""
Split sequence into fixed-size batches.
Example:
batch(range(10), 3) # [[0,1,2], [3,4,5], [6,7,8], [9]]
"""
return list(chunks(size, seq))
def unique(seq: Iterable[T], key: Callable | None = None) -> list[T]:
"""
Remove duplicates while preserving order.
Example:
unique([3, 1, 4, 1, 5, 9, 2, 6, 5]) # [3, 1, 4, 5, 9, 2, 6]
unique(users, key=lambda u: u["email"]) # deduplicate by email
"""
return list(distinct(seq, key=key) if key else distinct(seq))
def flatten_list(nested: Iterable, depth: int = 1) -> list:
"""
Flatten a nested list structure.
depth=1 flattens one level; pass a large number for full flatten.
Example:
flatten_list([[1, 2], [3, [4, 5]]]) # [1, 2, 3, [4, 5]]
flatten_list([[1, 2], [3, [4, 5]]], depth=2) # [1, 2, 3, 4, 5]
"""
result = list(nested)
for _ in range(depth):
result = list(flatten(result))
return result
def window(seq: list[T], size: int, step: int = 1) -> list[list[T]]:
"""
Sliding window over a sequence.
Example:
window([1, 2, 3, 4, 5], 3) # [[1,2,3], [2,3,4], [3,4,5]]
window([1, 2, 3, 4, 5], 3, step=2) # [[1,2,3], [3,4,5]]
"""
return [seq[i:i + size] for i in range(0, len(seq) - size + 1, step)]
def page(seq: list[T], page_num: int, per_page: int = 20) -> list[T]:
"""
Return a single page from a list.
Example:
page(items, 2, per_page=10) # items[10:20]
"""
start = (page_num - 1) * per_page
return seq[start:start + per_page]
def safe_first(seq: Iterable[T], default: T | None = None) -> T | None:
"""Return first element or default."""
result = first(seq)
return result if result is not None else default
def safe_last(seq: list[T], default: T | None = None) -> T | None:
"""Return last element or default."""
result = last(seq)
return result if result is not None else default
# ─────────────────────────────────────────────────────────────────────────────
# 4. Dict operations
# ─────────────────────────────────────────────────────────────────────────────
def pick(d: dict, keys: list) -> dict:
"""
Extract only the specified keys from a dict.
Example:
pick({"a": 1, "b": 2, "c": 3}, ["a", "c"]) # {"a": 1, "c": 3}
"""
return project(d, keys)
def omit_keys(d: dict, keys: list) -> dict:
"""
Return dict without the specified keys.
Example:
omit_keys(user, ["password", "_id"])
"""
return omit(d, keys)
def deep_merge(*dicts: dict) -> dict:
"""
Merge multiple dicts, later values overwrite earlier (shallow).
Example:
deep_merge(defaults, overrides, local_overrides)
"""
result = {}
for d in dicts:
result = merge(result, d)
return result
def rename_keys(d: dict, mapping: dict[str, str]) -> dict:
"""
Rename dict keys.
Example:
rename_keys({"firstName": "Alice", "lastName": "Smith"},
{"firstName": "first_name", "lastName": "last_name"})
# {"first_name": "Alice", "last_name": "Smith"}
"""
return {mapping.get(k, k): v for k, v in d.items()}
def transform_values(d: dict, fn: Callable) -> dict:
"""Apply fn to all values in a dict."""
return walk_values(fn, d)
def transform_keys(d: dict, fn: Callable) -> dict:
"""Apply fn to all keys in a dict."""
return walk_keys(fn, d)
def filter_dict(d: dict, predicate: Callable) -> dict:
"""
Keep only keys that pass the predicate.
Example:
filter_dict({"a": 1, "b": None, "c": 3}, lambda k: d[k] is not None)
"""
return {k: v for k, v in d.items() if predicate(k)}
def filter_values(d: dict, predicate: Callable) -> dict:
"""
Keep only entries whose value passes the predicate.
Example:
filter_values({"a": 1, "b": None, "c": 3}, lambda v: v is not None)
# {"a": 1, "c": 3}
"""
return {k: v for k, v in d.items() if predicate(v)}
# ─────────────────────────────────────────────────────────────────────────────
# 5. Retry and error-silent wrappers
# ─────────────────────────────────────────────────────────────────────────────
def with_retry(
fn: Callable,
times: int = 3,
errors: type | tuple = Exception,
delay: float = 1.0,
) -> Callable:
"""
Wrap a function with retry logic.
delay: seconds to sleep between attempts.
Example:
safe_fetch = with_retry(requests.get, times=3, errors=ConnectionError, delay=2.0)
response = safe_fetch("https://api.example.com/data")
"""
@retry(times, errors=errors)
def wrapper(*args, **kwargs):
try:
return fn(*args, **kwargs)
except errors:
time.sleep(delay)
raise
return wrapper
def silent_call(fn: Callable, *args, default: Any = None, **kwargs) -> Any:
"""
Call fn, returning default instead of raising on any exception.
Example:
val = silent_call(int, "abc", default=0) # 0
val = silent_call(json.loads, "{bad}", default={}) # {}
"""
try:
return fn(*args, **kwargs)
except Exception:
return default
def safe_map(fn: Callable, seq: Iterable, skip_errors: bool = True) -> list:
"""
Map fn over seq, optionally skipping items that raise exceptions.
Example:
ints = safe_map(int, ["1", "2", "bad", "4"]) # [1, 2, 4]
"""
results = []
for item in seq:
try:
results.append(fn(item))
except Exception:
if not skip_errors:
raise
return results
# ─────────────────────────────────────────────────────────────────────────────
# Demo
# ─────────────────────────────────────────────────────────────────────────────
if __name__ == "__main__":
print("=== Function composition ===")
clean = pipeline(str.strip, str.lower)
print(f" clean(' HELLO '): {clean(' HELLO ')!r}")
print(f" transform(4, sq, +1): {transform(4, lambda x: x**2, lambda x: x+1)}")
print("\n=== Grouping ===")
words = ["hi", "hello", "hey", "bye", "world"]
print(f" group_by(len): {group_by_fn(len, words)}")
print(f" count_by(len): {count_by(len, words)}")
print(f" frequencies: {frequencies(['a','b','a','c','b','a'])}")
print("\n=== Sequences ===")
nums = list(range(10))
print(f" batch(range(10),3): {batch(range(10), 3)}")
print(f" unique([3,1,4,1,5]): {unique([3,1,4,1,5])}")
print(f" flatten([[1,2],[3,[4]]],2): {flatten_list([[1,2],[3,[4]]], 2)}")
print(f" window(range(5),3): {window(list(range(5)), 3)}")
print("\n=== Dict ops ===")
user = {"id": 1, "name": "Alice", "password": "secret", "email": "[email protected]"}
print(f" pick keys: {pick(user, ['id','name'])}")
print(f" omit_keys: {omit_keys(user, ['password'])}")
print(f" rename: {rename_keys({'firstName': 'Alice'}, {'firstName': 'first_name'})}")
print(f" filter_vals: {filter_values({'a':1,'b':None,'c':3}, lambda v: v is not None)}")
print("\n=== safe_map ===")
print(f" safe_map(int, ['1','bad','3']): {safe_map(int, ['1','bad','3'])}")
print("\n=== silent_call ===")
import json
print(f" silent_call(int, 'abc', default=0): {silent_call(int, 'abc', default=0)}")
print(f" silent_call(json.loads, '{{bad}}', default={}): {silent_call(json.loads, '{bad}', default={})}")
For the toolz alternative — toolz provides a similar set of functional utilities (compose, curry, merge, groupby, partition) with a focus on performance via C-accelerated cytoolz; funcy has a broader API including retry, silent, memoize_plain, walk_keys/walk_values, first/last/rest, and decorator utilities — use toolz/cytoolz when raw iteration performance is the priority, funcy when you want a broader set of higher-level utilities including error handling and decorators. For the itertools / more-itertools alternative — itertools and more-itertools are stdlib/extension packages for iterator operations; funcy wraps many of their patterns with named, self-documenting functions (chunks, flatten, take_while, distinct) and adds dict utilities, function composition, and retry/silent helpers that itertools does not cover — use itertools for zero-dependency core iteration, funcy when you also need dict ops, memoize, and function composition in the same import. The Claude Skills 360 bundle includes funcy skill sets covering pipeline()/rcompose(), transform(), memoized(), group_records()/count_records()/group_by_fn()/frequencies(), batch()/unique()/flatten_list()/window()/page(), safe_first()/safe_last(), pick()/omit_keys()/deep_merge()/rename_keys()/transform_values()/transform_keys()/filter_dict()/filter_values(), with_retry()/silent_call()/safe_map(). Start with the free tier to try functional programming utility code generation.