itertools provides composable, memory-efficient iterator building blocks. import itertools. chain: itertools.chain([1,2],[3,4]) → 1 2 3 4. chain.from_iterable: itertools.chain.from_iterable([[1,2],[3,4]]). islice: itertools.islice(it, start, stop, step) — lazy slice. count: itertools.count(start, step) — infinite counter. cycle: itertools.cycle([a,b,c]) — infinite repeat. repeat: itertools.repeat(val, n). accumulate: itertools.accumulate([1,2,3], operator.add) → 1 3 6. product: itertools.product("AB","12") → A1 A2 B1 B2. permutations: itertools.permutations([1,2,3], r=2). combinations: itertools.combinations([1,2,3], r=2). combinations_with_replacement: itertools.combinations_with_replacement("ABC", 2). groupby: for key, group in itertools.groupby(sorted_items, key=fn): — consecutive groups. compress: itertools.compress("ABCDE", [1,0,1,0,1]) → A C E. filterfalse: itertools.filterfalse(pred, it) — items where pred is False. takewhile: itertools.takewhile(pred, it) — stop at first False. dropwhile: itertools.dropwhile(pred, it) — skip until first True. starmap: itertools.starmap(fn, [(a1,b1),(a2,b2)]) — unpack each tuple. zip_longest: itertools.zip_longest(a, b, fillvalue=None). tee: a, b = itertools.tee(it, 2) — fork iterator. pairwise: itertools.pairwise([1,2,3,4]) → (1,2)(2,3)(3,4) (Python 3.10+). batched: itertools.batched(it, n) (Python 3.12+) — fixed-size chunks. Claude Code generates lazy ETL pipelines, combinatoric search spaces, sliding windows, and chunked data processors.
CLAUDE.md for itertools
## itertools Stack
- Stdlib: import itertools, operator
- Flatten: itertools.chain.from_iterable(nested) | list(itertools.chain(*lists))
- Chunk: more_itertools.chunked(it, n) | [lst[i:i+n] for i in range(0,len(lst),n)]
- Group: itertools.groupby(sorted(items, key=k), key=k) — MUST sort first
- Combine: itertools.product(*iterables) | combinations | permutations
- Lazy slice: itertools.islice(it, N) | itertools.takewhile/dropwhile(pred, it)
- Running: itertools.accumulate(it, fn, initial=val)
itertools Iterator Pipeline
# app/iters.py — chain, islice, groupby, product, accumulate, tee, pairwise, batched
from __future__ import annotations
import itertools
import operator
from typing import Any, Callable, Iterable, Iterator, TypeVar
T = TypeVar("T")
K = TypeVar("K")
# ─────────────────────────────────────────────────────────────────────────────
# 1. Chaining and flattening
# ─────────────────────────────────────────────────────────────────────────────
def flatten(nested: Iterable[Iterable[T]]) -> Iterator[T]:
"""
One-level flatten of nested iterables.
Example:
list(flatten([[1,2],[3],[4,5]])) # [1, 2, 3, 4, 5]
"""
return itertools.chain.from_iterable(nested)
def deep_flatten(obj: Any) -> Iterator:
"""
Recursively flatten arbitrarily nested lists/tuples.
Example:
list(deep_flatten([1, [2, [3, 4]], 5])) # [1, 2, 3, 4, 5]
"""
for item in obj:
if isinstance(item, (list, tuple)):
yield from deep_flatten(item)
else:
yield item
def interleave(*iterables: Iterable[T]) -> Iterator[T]:
"""
Interleave multiple iterables element-by-element (stops at shortest).
Example:
list(interleave([1,3,5], [2,4,6])) # [1, 2, 3, 4, 5, 6]
"""
return itertools.chain.from_iterable(zip(*iterables))
def roundrobin(*iterables: Iterable[T]) -> Iterator[T]:
"""
Roundrobin until all iterables are exhausted.
Example:
list(roundrobin("ABC", "D", "EF")) # A D E B F C
"""
nexts = [iter(it).__next__ for it in iterables]
while nexts:
new_nexts = []
for nxt in nexts:
try:
yield nxt()
new_nexts.append(nxt)
except StopIteration:
pass
nexts = new_nexts
# ─────────────────────────────────────────────────────────────────────────────
# 2. Slicing and windowing
# ─────────────────────────────────────────────────────────────────────────────
def take(n: int, iterable: Iterable[T]) -> list[T]:
"""
Take first n elements as a list.
Example:
take(3, itertools.count()) # [0, 1, 2]
"""
return list(itertools.islice(iterable, n))
def drop(n: int, iterable: Iterable[T]) -> Iterator[T]:
"""
Skip first n elements.
Example:
list(drop(2, [1,2,3,4,5])) # [3, 4, 5]
"""
return itertools.islice(iterable, n, None)
def chunked(iterable: Iterable[T], size: int) -> Iterator[tuple[T, ...]]:
"""
Split into fixed-size chunks (final chunk may be smaller).
Uses itertools.batched when available (3.12+), otherwise itertools.islice.
Example:
list(chunked(range(7), 3)) # [(0,1,2), (3,4,5), (6,)]
"""
it = iter(iterable)
while True:
chunk = tuple(itertools.islice(it, size))
if not chunk:
break
yield chunk
def windows(iterable: Iterable[T], n: int) -> Iterator[tuple[T, ...]]:
"""
Overlapping windows of size n using tee.
Example:
list(windows([1,2,3,4,5], 3)) # [(1,2,3),(2,3,4),(3,4,5)]
"""
iters = itertools.tee(iterable, n)
advanced = [itertools.islice(it, i, None) for i, it in enumerate(iters)]
return zip(*advanced)
def pairwise(iterable: Iterable[T]) -> Iterator[tuple[T, T]]:
"""
Consecutive (a, b) pairs — backport of itertools.pairwise (3.10+).
Example:
list(pairwise([1,2,3,4])) # [(1,2),(2,3),(3,4)]
"""
a, b = itertools.tee(iterable)
next(b, None)
return zip(a, b)
# ─────────────────────────────────────────────────────────────────────────────
# 3. Grouping and partitioning
# ─────────────────────────────────────────────────────────────────────────────
def group_consecutive(
iterable: Iterable[T],
key: Callable[[T], K] | None = None,
) -> Iterator[tuple[K, list[T]]]:
"""
Group consecutive equal-key elements (must sort first for full grouping).
Example:
items = ["apple","apricot","banana","blueberry","cherry"]
for letter, words in group_consecutive(items, key=lambda w: w[0]):
print(letter, list(words))
"""
for k, group in itertools.groupby(iterable, key=key):
yield k, list(group)
def partition(
pred: Callable[[T], bool],
iterable: Iterable[T],
) -> tuple[list[T], list[T]]:
"""
Split iterable into (true_items, false_items) based on predicate.
Example:
evens, odds = partition(lambda x: x % 2 == 0, range(10))
"""
t, f = itertools.tee(iterable)
return (
list(filter(pred, t)),
list(itertools.filterfalse(pred, f)),
)
def unique_consecutive(iterable: Iterable[T]) -> Iterator[T]:
"""
Remove consecutive duplicate elements (like Unix uniq).
Example:
list(unique_consecutive([1,1,2,3,3,3,2])) # [1, 2, 3, 2]
"""
for key, _ in itertools.groupby(iterable):
yield key
def split_at(
iterable: Iterable[T],
pred: Callable[[T], bool],
) -> Iterator[list[T]]:
"""
Split iterable into sub-lists at positions where pred is True.
Example:
list(split_at([1,2,0,3,4,0,5], lambda x: x == 0))
# [[1, 2], [3, 4], [5]]
"""
group: list[T] = []
for item in iterable:
if pred(item):
yield group
group = []
else:
group.append(item)
yield group
# ─────────────────────────────────────────────────────────────────────────────
# 4. Accumulation and running computations
# ─────────────────────────────────────────────────────────────────────────────
def running_sum(values: Iterable[float]) -> Iterator[float]:
"""
Cumulative sum.
Example:
list(running_sum([1,2,3,4])) # [1.0, 3.0, 6.0, 10.0]
"""
return itertools.accumulate(values, operator.add)
def running_max(values: Iterable[T]) -> Iterator[T]:
"""
Running maximum value seen so far.
Example:
list(running_max([3,1,4,1,5,9,2])) # [3, 3, 4, 4, 5, 9, 9]
"""
return itertools.accumulate(values, max)
def running_min(values: Iterable[T]) -> Iterator[T]:
"""Running minimum value seen so far."""
return itertools.accumulate(values, min)
def cumulative_product(values: Iterable[float]) -> Iterator[float]:
"""
Cumulative product.
Example:
list(cumulative_product([1,2,3,4])) # [1, 2, 6, 24]
"""
return itertools.accumulate(values, operator.mul)
# ─────────────────────────────────────────────────────────────────────────────
# 5. Combinatorics
# ─────────────────────────────────────────────────────────────────────────────
def grid_coords(rows: int, cols: int) -> Iterator[tuple[int, int]]:
"""
All (row, col) pairs for an rows×cols grid.
Example:
list(grid_coords(2, 3)) # [(0,0),(0,1),(0,2),(1,0),(1,1),(1,2)]
"""
return itertools.product(range(rows), range(cols))
def cartesian(*iterables: Iterable[Any]) -> Iterator[tuple]:
"""
Cartesian product of any number of iterables.
Example:
list(cartesian([1,2], ["a","b"], [True])) # [(1,'a',True),(1,'b',True),...]
"""
return itertools.product(*iterables)
def all_subsets(items: list[T]) -> Iterator[tuple[T, ...]]:
"""
All subsets (powerset) of a list.
Example:
list(all_subsets([1,2,3]))
# [(), (1,), (2,), (3,), (1,2), (1,3), (2,3), (1,2,3)]
"""
for r in range(len(items) + 1):
yield from itertools.combinations(items, r)
def hyperparameter_grid(param_grid: dict[str, list[Any]]) -> Iterator[dict[str, Any]]:
"""
Yield all hyperparameter combinations from a grid dict.
Example:
for params in hyperparameter_grid({"lr":[0.1,0.01],"batch":[32,64]}):
train_model(**params)
"""
keys = list(param_grid.keys())
values = list(param_grid.values())
for combo in itertools.product(*values):
yield dict(zip(keys, combo))
# ─────────────────────────────────────────────────────────────────────────────
# 6. Infinite iterators
# ─────────────────────────────────────────────────────────────────────────────
def integers_from(start: int = 0, step: int = 1) -> Iterator[int]:
"""
Infinite integer sequence.
Example:
list(itertools.islice(integers_from(1, 2), 5)) # [1,3,5,7,9]
"""
return itertools.count(start, step)
def cycle_values(values: Iterable[T]) -> Iterator[T]:
"""
Infinitely cycle through values.
Example:
colors = cycle_values(["red","green","blue"])
[next(colors) for _ in range(7)]
"""
return itertools.cycle(values)
def repeat_each(iterable: Iterable[T], n: int) -> Iterator[T]:
"""
Repeat each element n times.
Example:
list(repeat_each([1,2,3], 2)) # [1,1,2,2,3,3]
"""
return flatten(itertools.repeat(x, n) for x in iterable)
# ─────────────────────────────────────────────────────────────────────────────
# Demo
# ─────────────────────────────────────────────────────────────────────────────
if __name__ == "__main__":
print("=== itertools demo ===")
print("\n--- flatten ---")
print(f" {list(flatten([[1,2],[3],[4,5]]))}")
print("\n--- chunked ---")
print(f" {list(chunked(range(7), 3))}")
print("\n--- windows ---")
print(f" {list(windows([1,2,3,4,5], 3))}")
print("\n--- pairwise ---")
print(f" {list(pairwise([1,2,3,4]))}")
print("\n--- group_consecutive ---")
words = sorted(["apple","apricot","banana","blueberry","cherry"], key=lambda w: w[0])
for letter, group in group_consecutive(words, key=lambda w: w[0]):
print(f" {letter}: {group}")
print("\n--- partition ---")
evens, odds = partition(lambda x: x % 2 == 0, range(10))
print(f" evens={evens} odds={odds}")
print("\n--- unique_consecutive ---")
print(f" {list(unique_consecutive([1,1,2,3,3,3,2]))}")
print("\n--- split_at ---")
print(f" {list(split_at([1,2,0,3,4,0,5], lambda x: x == 0))}")
print("\n--- running_sum / running_max ---")
vals = [3.0, 1.0, 4.0, 1.0, 5.0, 9.0]
print(f" running_sum: {list(running_sum(vals))}")
print(f" running_max: {list(running_max(vals))}")
print("\n--- hyperparameter_grid ---")
grid = {"lr": [0.1, 0.01], "batch": [32, 64]}
combos = list(hyperparameter_grid(grid))
print(f" {len(combos)} combos: {combos}")
print("\n--- all_subsets ---")
subsets = list(all_subsets([1,2,3]))
print(f" {subsets}")
print("\n--- roundrobin ---")
print(f" {list(roundrobin('ABC','D','EF'))}")
print("\n--- interleave ---")
print(f" {list(interleave([1,3,5],[2,4,6]))}")
print("\n--- repeat_each ---")
print(f" {list(repeat_each([1,2,3], 2))}")
print("\n=== done ===")
For the more-itertools alternative — more-itertools (PyPI) extends the stdlib with 80+ additional recipes including chunked, windowed, flatten, bucket, peekable, side_effect, spy, distribute, divide, split_before, split_after, and many more; Python’s stdlib itertools covers the fundamental lazy building blocks and common combinatorial functions — use more-itertools when you find yourself writing the same itertools recipes repeatedly or need the richer API (peekable, one, only, first), stdlib itertools when zero external dependencies are required or you are writing a library that should not impose transitive dependencies. For the toolz alternative — toolz provides higher-level iterable functions (curry, pipe, compose, frequencies, groupby, partition_all, mapcat) that complement itertools; where itertools gives composable primitives, toolz gives a complete functional-style data transformation toolkit including currying and function pipelines — use toolz when building data pipelines with a functional-programming style, stdlib itertools when you need lazy combinatorics (products, permutations) or infinite iterators that toolz does not provide. The Claude Skills 360 bundle includes itertools skill sets covering flatten()/deep_flatten()/interleave()/roundrobin() chaining, take()/drop()/chunked()/windows()/pairwise() slicing, group_consecutive()/partition()/unique_consecutive()/split_at() grouping, running_sum()/running_max()/cumulative_product() accumulation, hyperparameter_grid()/all_subsets()/cartesian() combinatorics, and repeat_each()/cycle_values() infinite iterators. Start with the free tier to try iterator composition and itertools pipeline code generation.