Python Decorators Deep Dive — Beyond the Basics

March 2026 · 18 min read · Python, Metaprogramming, Advanced

You know @decorator syntax. But do you know how to build decorators that accept arguments? That work on both sync and async functions? That preserve type hints for your IDE? This guide takes you from "I kinda get decorators" to "I build production decorator libraries."

How Decorators Actually Work

A decorator is just a function that takes a function and returns a function. The @ syntax is sugar:

# These are identical:
@my_decorator
def greet():
    print("Hello")

# Is the same as:
def greet():
    print("Hello")
greet = my_decorator(greet)

Understanding this equivalence is the key to everything that follows.

The closure pattern

import functools

def simple_logger(fn):
    """Log every call to the decorated function."""
    @functools.wraps(fn)  # preserve __name__, __doc__, __annotations__
    def wrapper(*args, **kwargs):
        print(f"→ Calling {fn.__name__}({args}, {kwargs})")
        result = fn(*args, **kwargs)
        print(f"← {fn.__name__} returned {result!r}")
        return result
    return wrapper


@simple_logger
def add(a: int, b: int) -> int:
    """Add two numbers."""
    return a + b

add(3, 4)
# → Calling add((3, 4), {})
# ← add returned 7

# Thanks to @functools.wraps:
print(add.__name__)  # "add" (not "wrapper")
print(add.__doc__)   # "Add two numbers."
🚨 Always use @functools.wraps(fn). Without it, the decorated function loses its name, docstring, and type annotations. This breaks help(), debugging, and documentation tools.

Parametrized Decorators (Decorators with Arguments)

This is where most tutorials lose people. When a decorator takes arguments, you need an extra layer of nesting:

import functools
import time


def retry(max_attempts: int = 3, delay: float = 1.0, exceptions: tuple = (Exception,)):
    """
    Retry decorator with configurable attempts, delay, and exception types.

    Usage:
        @retry(max_attempts=5, delay=0.5)
        def flaky_call(): ...

        @retry()          # uses defaults
        def another(): ...
    """
    def decorator(fn):
        @functools.wraps(fn)
        def wrapper(*args, **kwargs):
            last_error = None
            for attempt in range(1, max_attempts + 1):
                try:
                    return fn(*args, **kwargs)
                except exceptions as e:
                    last_error = e
                    if attempt < max_attempts:
                        wait = delay * (2 ** (attempt - 1))  # exponential backoff
                        print(f"⚠ {fn.__name__} attempt {attempt}/{max_attempts} "
                              f"failed: {e}. Retrying in {wait:.1f}s...")
                        time.sleep(wait)
            raise last_error
        return wrapper
    return decorator


@retry(max_attempts=4, delay=0.5, exceptions=(ConnectionError, TimeoutError))
def fetch_data(url: str) -> dict:
    import random
    if random.random() < 0.7:
        raise ConnectionError("Server unreachable")
    return {"status": "ok"}

The flexible decorator pattern

Want a decorator that works both with and without parentheses? (@deco and @deco(args))

import functools


def flexible_decorator(fn=None, *, prefix="LOG", include_args=True):
    """
    Works as both @flexible_decorator and @flexible_decorator(prefix="DBG").
    """
    def decorator(fn):
        @functools.wraps(fn)
        def wrapper(*args, **kwargs):
            if include_args:
                print(f"[{prefix}] {fn.__name__}({args}, {kwargs})")
            else:
                print(f"[{prefix}] {fn.__name__}()")
            return fn(*args, **kwargs)
        return wrapper

    if fn is not None:
        # Called as @flexible_decorator (no parentheses)
        return decorator(fn)
    # Called as @flexible_decorator(...) (with parentheses)
    return decorator


@flexible_decorator
def task_a():
    return "done"

@flexible_decorator(prefix="DEBUG", include_args=False)
def task_b(x, y):
    return x + y

task_a()       # [LOG] task_a((), {})
task_b(1, 2)   # [DEBUG] task_b()

Class-Based Decorators

For complex decorators with state, use a class with __call__:

import functools
import time
from collections import defaultdict


class RateLimiter:
    """
    Rate limit function calls per key.

    Usage:
        @RateLimiter(calls=5, period=60)
        def api_call(user_id: str): ...
    """

    def __init__(self, calls: int = 10, period: float = 60.0, key_fn=None):
        self.calls = calls
        self.period = period
        self.key_fn = key_fn  # function to extract rate limit key from args
        self._timestamps: dict[str, list[float]] = defaultdict(list)

    def __call__(self, fn):
        @functools.wraps(fn)
        def wrapper(*args, **kwargs):
            # Determine rate limit key
            if self.key_fn:
                key = self.key_fn(*args, **kwargs)
            else:
                key = "global"

            now = time.time()
            # Clean old timestamps
            self._timestamps[key] = [
                t for t in self._timestamps[key]
                if now - t < self.period
            ]

            if len(self._timestamps[key]) >= self.calls:
                oldest = self._timestamps[key][0]
                wait = self.period - (now - oldest)
                raise RuntimeError(
                    f"Rate limit exceeded for '{key}'. "
                    f"Try again in {wait:.1f}s"
                )

            self._timestamps[key].append(now)
            return fn(*args, **kwargs)

        wrapper.limiter = self  # expose for testing/reset
        return wrapper


@RateLimiter(calls=3, period=10, key_fn=lambda user_id: user_id)
def send_message(user_id: str, text: str):
    print(f"📨 {user_id}: {text}")

send_message("alice", "Hello!")     # ✓
send_message("alice", "How are you?")  # ✓
send_message("bob", "Hi!")          # ✓ (different key)
send_message("alice", "Again!")     # ✓
# send_message("alice", "Too many!") # ✗ RuntimeError: Rate limit exceeded

Async-Aware Decorators

Modern Python code mixes sync and async. Your decorators should handle both. See our async programming guide for more on asyncio.

import functools
import asyncio
import inspect
import time


def timing(fn):
    """
    Timing decorator that works with both sync and async functions.
    """
    if inspect.iscoroutinefunction(fn):
        @functools.wraps(fn)
        async def async_wrapper(*args, **kwargs):
            start = time.perf_counter()
            result = await fn(*args, **kwargs)
            elapsed = time.perf_counter() - start
            print(f"⏱ {fn.__name__}: {elapsed:.3f}s (async)")
            return result
        return async_wrapper
    else:
        @functools.wraps(fn)
        def sync_wrapper(*args, **kwargs):
            start = time.perf_counter()
            result = fn(*args, **kwargs)
            elapsed = time.perf_counter() - start
            print(f"⏱ {fn.__name__}: {elapsed:.3f}s")
            return result
        return sync_wrapper


@timing
def compute(n: int) -> int:
    return sum(i * i for i in range(n))

@timing
async def fetch(url: str) -> str:
    await asyncio.sleep(0.1)  # simulate network
    return f"data from {url}"

compute(1_000_000)  # ⏱ compute: 0.045s

# asyncio.run(fetch("https://api.example.com"))
# ⏱ fetch: 0.101s (async)

Validation Decorators

Enforce constraints on function inputs without cluttering the function body:

import functools
import inspect


def validate(**validators):
    """
    Validate function arguments with custom rules.

    Usage:
        @validate(age=lambda x: 0 < x < 150, name=lambda x: len(x) > 0)
        def create_user(name: str, age: int): ...
    """
    def decorator(fn):
        sig = inspect.signature(fn)

        @functools.wraps(fn)
        def wrapper(*args, **kwargs):
            # Bind arguments to parameter names
            bound = sig.bind(*args, **kwargs)
            bound.apply_defaults()

            for param_name, check_fn in validators.items():
                if param_name in bound.arguments:
                    value = bound.arguments[param_name]
                    if not check_fn(value):
                        raise ValueError(
                            f"Validation failed for '{param_name}': "
                            f"got {value!r}"
                        )
            return fn(*args, **kwargs)
        return wrapper
    return decorator


def type_check(fn):
    """Enforce type annotations at runtime."""
    hints = fn.__annotations__
    sig = inspect.signature(fn)

    @functools.wraps(fn)
    def wrapper(*args, **kwargs):
        bound = sig.bind(*args, **kwargs)
        bound.apply_defaults()

        for name, value in bound.arguments.items():
            if name in hints and hints[name] is not inspect.Parameter.empty:
                expected = hints[name]
                if not isinstance(value, expected):
                    raise TypeError(
                        f"'{name}' expected {expected.__name__}, "
                        f"got {type(value).__name__}"
                    )

        result = fn(*args, **kwargs)

        if "return" in hints:
            expected = hints["return"]
            if not isinstance(result, expected):
                raise TypeError(
                    f"Return expected {expected.__name__}, "
                    f"got {type(result).__name__}"
                )
        return result
    return wrapper


@validate(
    age=lambda x: 0 < x < 150,
    email=lambda x: "@" in x and "." in x,
)
@type_check
def register(name: str, email: str, age: int) -> dict:
    return {"name": name, "email": email, "age": age}

register("Alice", "alice@example.com", 30)  # ✓
# register("Bob", "invalid", 25)            # ValueError: Validation failed
# register("Eve", "eve@test.com", "old")    # TypeError: 'age' expected int

Plugin Registry Decorator

Build extensible systems where plugins register themselves:

class PluginRegistry:
    """Auto-register plugins with decorators."""

    def __init__(self):
        self._plugins: dict[str, type] = {}

    def register(self, name: str = None):
        """Decorator to register a plugin class."""
        def decorator(cls):
            plugin_name = name or cls.__name__.lower()
            if plugin_name in self._plugins:
                raise ValueError(f"Plugin '{plugin_name}' already registered")
            self._plugins[plugin_name] = cls
            return cls
        return decorator

    def get(self, name: str):
        if name not in self._plugins:
            available = ", ".join(self._plugins.keys())
            raise KeyError(f"Unknown plugin: '{name}'. Available: {available}")
        return self._plugins[name]

    def create(self, name: str, **kwargs):
        cls = self.get(name)
        return cls(**kwargs)

    def list(self) -> list[str]:
        return list(self._plugins.keys())


# --- Usage ---
exporters = PluginRegistry()


@exporters.register("csv")
class CsvExporter:
    def __init__(self, delimiter=","):
        self.delimiter = delimiter

    def export(self, data: list[dict]) -> str:
        if not data:
            return ""
        headers = list(data[0].keys())
        lines = [self.delimiter.join(headers)]
        for row in data:
            lines.append(self.delimiter.join(str(row.get(h, "")) for h in headers))
        return "\n".join(lines)


@exporters.register("json")
class JsonExporter:
    def __init__(self, indent=2):
        self.indent = indent

    def export(self, data: list[dict]) -> str:
        import json
        return json.dumps(data, indent=self.indent)


@exporters.register("markdown")
class MarkdownExporter:
    def export(self, data: list[dict]) -> str:
        if not data:
            return ""
        headers = list(data[0].keys())
        lines = [" | ".join(headers), " | ".join("---" for _ in headers)]
        for row in data:
            lines.append(" | ".join(str(row.get(h, "")) for h in headers))
        return "\n".join(lines)


# Dynamic dispatch
data = [{"name": "Alice", "score": 95}, {"name": "Bob", "score": 87}]

for fmt in exporters.list():
    exporter = exporters.create(fmt)
    print(f"\n--- {fmt} ---")
    print(exporter.export(data))

Memoization with LRU Cache

Python's standard library includes production-grade memoization:

from functools import lru_cache, cache
import time


# @cache — unlimited cache (Python 3.9+), same as @lru_cache(maxsize=None)
@cache
def fibonacci(n: int) -> int:
    if n < 2:
        return n
    return fibonacci(n - 1) + fibonacci(n - 2)

start = time.perf_counter()
result = fibonacci(100)
print(f"fib(100) = {result} in {time.perf_counter() - start:.6f}s")
# fib(100) = 354224848179261915075 in 0.000042s

print(fibonacci.cache_info())
# CacheInfo(hits=98, misses=101, maxsize=None, currsize=101)


# @lru_cache — bounded cache with eviction
@lru_cache(maxsize=256)
def expensive_query(user_id: int, include_details: bool = False) -> dict:
    """Simulates a slow database query."""
    time.sleep(0.01)  # 10ms "query"
    return {"id": user_id, "details": include_details}

# First call: slow (cache miss)
expensive_query(42, include_details=True)

# Second call: instant (cache hit)
expensive_query(42, include_details=True)

# Clear cache when data changes
expensive_query.cache_clear()
⚠️ Cache gotcha: @lru_cache uses arguments as cache keys. Mutable arguments (lists, dicts) will raise TypeError. Convert to tuples or frozensets first, or build a custom cache like the one in our design patterns guide.

Decorator Stacking — Order Matters

# Decorators apply bottom-up (closest to function first):
@timing          # 3rd: wraps the retried+cached function
@retry(3)        # 2nd: wraps the cached function
@cache           # 1st: wraps the original function
def fetch_user(user_id: int):
    ...

# Equivalent to:
# fetch_user = timing(retry(3)(cache(fetch_user)))

# Execution order is top-down:
# 1. timing starts timer
# 2. retry catches errors
# 3. cache checks/stores result
# 4. original function runs (on cache miss)

Debugging Decorated Functions

# Problem: decorators hide the original function
@retry(max_attempts=3, delay=0.1)
def process(data):
    return data

# functools.wraps preserves metadata, but you might need the original:
print(process.__name__)      # "process" ✓
print(process.__wrapped__)   # <function process at 0x...> (the original!)

# Call the original (skip decorator):
process.__wrapped__(data)

# In pytest, you can test the unwrapped function:
def test_process_logic():
    result = process.__wrapped__({"key": "value"})
    assert result == {"key": "value"}

Production Decorator Checklist

🚀 Production-ready decorators, utility functions, and automation scripts?

Get the AI Agent Toolkit →

Related Articles

Need advanced Python tools or custom decorators for your project? Reach out on Telegram →