Functions in Python are objects. You can pass them around, store them in variables, return them from other functions, and modify them at runtime. This is the foundation for decorators, callbacks, and most of Python’s elegant patterns.
Function Basics
# Basic function
def add(a: int, b: int) -> int:
return a + b
# Default arguments
def greet(name: str, greeting: str = "Hello") -> str:
return f"{greeting}, {name}!"
greet("Alice") # "Hello, Alice!"
greet("Alice", "Hey") # "Hey, Alice!"
# Keyword arguments (order doesn't matter)
greet(greeting="Howdy", name="Bob") # "Howdy, Bob!"*args and **kwargs
# *args — variable positional arguments (collected as tuple)
def total(*nums):
return sum(nums)
total(1, 2, 3) # 6
total(10, 20, 30, 40) # 100
# **kwargs — variable keyword arguments (collected as dict)
def build_user(**kwargs):
return kwargs
build_user(name="Alice", age=30, role="admin")
# {"name": "Alice", "age": 30, "role": "admin"}
# Combined — order matters: positional, *args, keyword, **kwargs
def api_call(method, url, *args, timeout=30, **headers):
print(f"{method} {url}, timeout={timeout}")
print(f"Headers: {headers}")
api_call("GET", "/users", timeout=10, Authorization="Bearer xxx")
# Unpacking into function calls
def add(a, b, c):
return a + b + c
nums = [1, 2, 3]
add(*nums) # 6 — unpacks list into positional args
config = {"a": 10, "b": 20, "c": 30}
add(**config) # 60 — unpacks dict into keyword argsKeyword-Only and Positional-Only Parameters
# Keyword-only (after *)
def fetch(url, *, timeout=30, retries=3):
"""timeout and retries MUST be passed as keyword args."""
...
fetch("/api", timeout=10) # OK
# fetch("/api", 10, 3) # TypeError!
# Positional-only (before /) — Python 3.8+
def power(base, exp, /):
"""base and exp MUST be passed positionally."""
return base ** exp
power(2, 10) # OK
# power(base=2, exp=10) # TypeError!First-Class Functions
# Functions are objects — assign to variables
def square(x):
return x ** 2
f = square # f is now the same function
f(5) # 25
# Pass functions as arguments
def apply(func, value):
return func(value)
apply(square, 5) # 25
apply(len, "hello") # 5
apply(str.upper, "hi") # "HI"
# Store functions in data structures
operations = {
"add": lambda a, b: a + b,
"sub": lambda a, b: a - b,
"mul": lambda a, b: a * b,
}
operations["add"](10, 3) # 13
operations["mul"](4, 5) # 20
# Return functions from functions
def multiplier(factor):
def multiply(x):
return x * factor
return multiply
double = multiplier(2)
triple = multiplier(3)
double(5) # 10
triple(5) # 15Lambda Functions
# Lambda — anonymous single-expression function
square = lambda x: x ** 2
add = lambda a, b: a + b
# Most useful as inline callbacks
names = ["Charlie", "Alice", "Bob"]
sorted(names) # ["Alice", "Bob", "Charlie"]
sorted(names, key=lambda n: len(n)) # ["Bob", "Alice", "Charlie"]
sorted(names, key=lambda n: n[-1]) # sort by last character
# With map/filter
nums = [1, 2, 3, 4, 5]
list(map(lambda x: x ** 2, nums)) # [1, 4, 9, 16, 25]
list(filter(lambda x: x % 2 == 0, nums)) # [2, 4]
# But comprehensions are usually better:
[x ** 2 for x in nums] # Same, more readable
[x for x in nums if x % 2 == 0] # Same, more readableClosures
A closure is a function that remembers variables from its enclosing scope, even after that scope has finished executing.
def counter(start=0):
count = start
def increment():
nonlocal count # Required to modify enclosing variable
count += 1
return count
return increment
c = counter(10)
c() # 11
c() # 12
c() # 13
# The `count` variable lives on inside the closure
# even though counter() has finished executingDecorators
A decorator is a function that takes a function and returns a modified version of it.
import functools
import time
# Basic decorator
def timer(func):
@functools.wraps(func) # Preserves original function's name and docstring
def wrapper(*args, **kwargs):
start = time.perf_counter()
result = func(*args, **kwargs)
elapsed = time.perf_counter() - start
print(f"{func.__name__} took {elapsed:.4f}s")
return result
return wrapper
@timer
def slow_function():
time.sleep(1)
return "done"
slow_function() # prints: "slow_function took 1.0012s", returns "done"Decorator with Arguments
def retry(max_attempts=3, delay=1.0):
"""Decorator that retries a function on exception."""
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
last_exception = None
for attempt in range(1, max_attempts + 1):
try:
return func(*args, **kwargs)
except Exception as e:
last_exception = e
print(f"Attempt {attempt}/{max_attempts} failed: {e}")
if attempt < max_attempts:
time.sleep(delay)
raise last_exception
return wrapper
return decorator
@retry(max_attempts=5, delay=2.0)
def fetch_data(url):
"""Might fail due to network issues."""
...Stacking Decorators
@timer
@retry(max_attempts=3)
def fetch_data(url):
...
# This is equivalent to:
# fetch_data = timer(retry(max_attempts=3)(fetch_data))
# Inner decorator (retry) wraps first, outer (timer) wraps lastClass-Based Decorators
class CacheResult:
"""Decorator that caches function results."""
def __init__(self, func):
self.func = func
self.cache = {}
functools.update_wrapper(self, func)
def __call__(self, *args):
if args in self.cache:
return self.cache[args]
result = self.func(*args)
self.cache[args] = result
return result
@CacheResult
def fibonacci(n):
if n < 2:
return n
return fibonacci(n - 1) + fibonacci(n - 2)
fibonacci(100) # Instant — results are cachedBuilt-in Decorators You Should Know
# @property — getter/setter without calling syntax change
class Circle:
def __init__(self, radius):
self._radius = radius
@property
def radius(self):
return self._radius
@radius.setter
def radius(self, value):
if value < 0:
raise ValueError("Radius must be non-negative")
self._radius = value
@property
def area(self):
return 3.14159 * self._radius ** 2
c = Circle(5)
c.radius # 5 (no parentheses — looks like attribute)
c.area # 78.53975
c.radius = 10 # Uses setter
# c.radius = -1 # ValueError
# @staticmethod / @classmethod
class User:
_count = 0
def __init__(self, name):
self.name = name
User._count += 1
@classmethod
def get_count(cls):
"""Access class state. First arg is the class itself."""
return cls._count
@staticmethod
def validate_name(name):
"""No access to class or instance. Just a namespaced function."""
return len(name) >= 2
# @functools.lru_cache — built-in memoization
@functools.lru_cache(maxsize=128)
def fibonacci(n):
if n < 2:
return n
return fibonacci(n - 1) + fibonacci(n - 2)Generators
A generator is a function that produces a sequence of values lazily — one at a time, on demand — instead of computing everything upfront.
# Regular function — builds entire list in memory
def get_squares(n):
result = []
for i in range(n):
result.append(i ** 2)
return result
# Generator — yields one value at a time
def gen_squares(n):
for i in range(n):
yield i ** 2
# Usage is the same
for sq in gen_squares(1_000_000):
if sq > 100:
break
# Only computed values up to ~10 — the rest were never generatedHow yield Works
def countdown(n):
print("Starting countdown")
while n > 0:
yield n # Pause here, return n
n -= 1 # Resume here on next() call
print("Done!")
gen = countdown(3)
next(gen) # prints "Starting countdown", returns 3
next(gen) # returns 2
next(gen) # returns 1
next(gen) # prints "Done!", raises StopIterationGenerator Expressions
# List comprehension — creates entire list in memory
squares = [x**2 for x in range(1_000_000)] # 1M items in RAM
# Generator expression — lazy, almost zero memory
squares = (x**2 for x in range(1_000_000)) # Generator object
# Use generator expressions when you only iterate once
total = sum(x**2 for x in range(1_000_000)) # No intermediate list
any(x > 100 for x in range(1_000_000)) # Stops at first Trueyield from — Delegating to Sub-generators
def flatten(nested):
for item in nested:
if isinstance(item, list):
yield from flatten(item) # Delegate to recursive call
else:
yield item
list(flatten([1, [2, 3], [4, [5, 6]]]))
# [1, 2, 3, 4, 5, 6]Real-World Generator: Reading Large Files
def read_large_csv(filepath):
"""Read a CSV file line by line without loading it all into memory."""
with open(filepath) as f:
header = f.readline().strip().split(",")
for line in f:
values = line.strip().split(",")
yield dict(zip(header, values))
# Process a 10GB CSV with constant memory usage
for row in read_large_csv("massive_data.csv"):
process(row)Key Takeaways
- Functions are first-class objects — pass them, store them, return them.
- Use
*argsand**kwargsfor flexible function signatures. @decoratoris syntactic sugar forfunc = decorator(func).- Always use
@functools.wrapsin decorators to preserve function metadata. - Generators (
yield) produce values lazily — essential for large datasets. - Generator expressions
(x for x in ...)are the lazy version of list comprehensions. - Use
@propertyto create computed attributes. Use@lru_cachefor free memoization.
