Python Environment Variables & Configuration — Manage Secrets Like a Pro
Hardcoded secrets in source code are the #1 security mistake in Python projects. API keys in config.py, database passwords in settings.py, tokens pushed to GitHub — we've all seen it (or done it). This guide covers every level of configuration management, from basic os.environ to production secret managers.
The 12-Factor App Approach
The 12-factor methodology says: store config in environment variables. Why?
- Separation — config varies between environments (dev/staging/prod), code doesn't
- Security — env vars never end up in version control
- Portability — same Docker image runs anywhere, config comes from the environment
- Simplicity — no config file format wars (YAML vs TOML vs JSON vs INI)
Level 1: os.environ (stdlib)
Python's built-in approach. No dependencies, works everywhere.
import os
# Read with default
database_url = os.environ.get("DATABASE_URL", "sqlite:///local.db")
debug = os.environ.get("DEBUG", "false").lower() == "true"
port = int(os.environ.get("PORT", "8000"))
# Read required (raises KeyError if missing)
api_key = os.environ["API_KEY"]
# Safer: check and fail with a clear message
def require_env(name: str) -> str:
"""Get required env var or raise with helpful message."""
value = os.environ.get(name)
if not value:
raise RuntimeError(
f"Required environment variable '{name}' is not set. "
f"Add it to your .env file or export it."
)
return value
secret_key = require_env("SECRET_KEY")
stripe_key = require_env("STRIPE_SECRET_KEY")
Level 2: python-dotenv
Load .env files into environment variables. The de facto standard for local development.
# Install
pip install python-dotenv
.env file
# .env — NEVER commit this file!
DATABASE_URL=postgresql://user:pass@localhost:5432/mydb
SECRET_KEY=your-secret-key-here
DEBUG=true
REDIS_URL=redis://localhost:6379/0
SMTP_HOST=smtp.gmail.com
SMTP_PORT=587
SMTP_USER=alerts@example.com
SMTP_PASSWORD=app-password-here
ALLOWED_HOSTS=localhost,127.0.0.1,example.com
Loading in Python
from dotenv import load_dotenv
import os
# Load .env file (looks in current directory and parents)
load_dotenv()
# Now os.environ has .env values
db_url = os.environ.get("DATABASE_URL")
debug = os.environ.get("DEBUG", "false").lower() == "true"
# Override existing env vars (useful for testing)
load_dotenv(override=True)
# Load a specific file
load_dotenv(".env.staging")
# Get values directly (without modifying os.environ)
from dotenv import dotenv_values
config = dotenv_values(".env")
print(config["DATABASE_URL"])
.gitignore (critical!)
# .gitignore
.env
.env.local
.env.*.local
*.pem
*.key
secrets/
.env.example (commit this)
# .env.example — Template for developers
# Copy to .env and fill in real values
DATABASE_URL=postgresql://user:password@localhost:5432/dbname
SECRET_KEY=generate-with-openssl-rand-hex-32
DEBUG=true
REDIS_URL=redis://localhost:6379/0
Level 3: Pydantic Settings (Recommended)
Type-safe, validated configuration with automatic env var loading. This is what you should use in any serious project, especially with FastAPI.
# Install
pip install pydantic-settings
# app/config.py
from pydantic_settings import BaseSettings, SettingsConfigDict
from pydantic import Field, field_validator
from functools import lru_cache
class Settings(BaseSettings):
"""Application settings — loaded from env vars automatically."""
model_config = SettingsConfigDict(
env_file=".env",
env_file_encoding="utf-8",
case_sensitive=False, # DATABASE_URL or database_url both work
extra="ignore", # Ignore unknown env vars
)
# Database
database_url: str = "sqlite:///./local.db"
db_pool_size: int = 5
db_max_overflow: int = 10
# Security
secret_key: str
allowed_hosts: list[str] = ["localhost"]
cors_origins: list[str] = ["http://localhost:3000"]
# App
debug: bool = False
log_level: str = "INFO"
port: int = 8000
# External services
redis_url: str = "redis://localhost:6379/0"
smtp_host: str = ""
smtp_port: int = 587
smtp_user: str = ""
smtp_password: str = ""
# API keys (optional)
stripe_secret_key: str = ""
openai_api_key: str = ""
@field_validator("log_level")
@classmethod
def validate_log_level(cls, v: str) -> str:
valid = {"DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"}
if v.upper() not in valid:
raise ValueError(f"Invalid log level: {v}. Must be one of {valid}")
return v.upper()
@field_validator("allowed_hosts", "cors_origins", mode="before")
@classmethod
def split_comma_list(cls, v):
if isinstance(v, str):
return [h.strip() for h in v.split(",")]
return v
@property
def is_production(self) -> bool:
return not self.debug
@property
def smtp_configured(self) -> bool:
return bool(self.smtp_host and self.smtp_user)
# Singleton: parse env once, reuse everywhere
@lru_cache
def get_settings() -> Settings:
return Settings()
# Usage:
# from app.config import get_settings
# settings = get_settings()
# print(settings.database_url)
# print(settings.is_production)
- Auto-validates types — PORT=abc raises a clear error, not a runtime crash
- Lists from comma strings — ALLOWED_HOSTS=a.com,b.com becomes ["a.com", "b.com"]
- Nested models — group related config with prefixes
- Works with FastAPI's dependency injection out of the box
Use with FastAPI
from fastapi import FastAPI, Depends
from app.config import Settings, get_settings
app = FastAPI()
@app.get("/info")
def info(settings: Settings = Depends(get_settings)):
return {
"debug": settings.debug,
"version": "1.0.0",
"smtp_configured": settings.smtp_configured,
}
Multi-environment setup
# Directory structure
project/
├── .env # Local dev (gitignored)
├── .env.example # Template (committed)
├── .env.test # Test overrides
├── config/
│ ├── .env.staging # Staging (gitignored or encrypted)
│ └── .env.production # Production (gitignored or encrypted)
└── app/
└── config.py
# Load the right file based on APP_ENV
import os
from pydantic_settings import BaseSettings, SettingsConfigDict
class Settings(BaseSettings):
model_config = SettingsConfigDict(
env_file=f".env.{os.getenv('APP_ENV', 'development')}",
env_file_encoding="utf-8",
)
# ... fields ...
Level 4: Secret Managers (Production)
For production deployments, env vars in .env files aren't enough. Use a proper secret manager.
AWS Secrets Manager
import json
import boto3
from functools import lru_cache
@lru_cache
def get_secret(secret_name: str, region: str = "us-east-1") -> dict:
"""Fetch secret from AWS Secrets Manager (cached)."""
client = boto3.client("secretsmanager", region_name=region)
response = client.get_secret_value(SecretId=secret_name)
return json.loads(response["SecretString"])
# Usage
db_creds = get_secret("prod/database")
db_url = f"postgresql://{db_creds['username']}:{db_creds['password']}@{db_creds['host']}/{db_creds['dbname']}"
HashiCorp Vault
import hvac
def get_vault_secret(path: str, key: str) -> str:
"""Fetch secret from HashiCorp Vault."""
client = hvac.Client(
url=os.environ["VAULT_ADDR"],
token=os.environ["VAULT_TOKEN"],
)
secret = client.secrets.kv.v2.read_secret_version(path=path)
return secret["data"]["data"][key]
# Usage
api_key = get_vault_secret("myapp/stripe", "secret_key")
SOPS (encrypted files in git)
# Encrypt secrets file with Mozilla SOPS
# Install: brew install sops
# Encrypt (uses AWS KMS, GCP KMS, or PGP)
sops --encrypt --in-place secrets.yaml
# Decrypt in Python
import subprocess
import yaml
def load_sops_file(path: str) -> dict:
"""Decrypt SOPS-encrypted file and parse YAML."""
result = subprocess.run(
["sops", "--decrypt", path],
capture_output=True, text=True, check=True,
)
return yaml.safe_load(result.stdout)
secrets = load_sops_file("secrets.yaml")
db_password = secrets["database"]["password"]
Level 5: Docker & Kubernetes
Docker Compose
# docker-compose.yml
services:
app:
build: .
env_file:
- .env # Base config
- .env.${APP_ENV:-dev} # Environment-specific overrides
environment:
- DATABASE_URL # Pass through from host
- SECRET_KEY=${SECRET_KEY:?SECRET_KEY is required} # Fail if missing
Kubernetes Secrets
# k8s/secret.yaml
apiVersion: v1
kind: Secret
metadata:
name: myapp-secrets
type: Opaque
stringData:
DATABASE_URL: postgresql://user:pass@db:5432/myapp
SECRET_KEY: your-production-secret
---
# k8s/deployment.yaml (excerpt)
spec:
containers:
- name: app
envFrom:
- secretRef:
name: myapp-secrets
- configMapRef:
name: myapp-config
For Docker deployment details, see our Docker guide.
Configuration Anti-Patterns
| ❌ Don't | ✅ Do |
|---|---|
| Hardcode secrets in code | Use env vars or secret manager |
| Commit .env to git | Commit .env.example, gitignore .env |
| Use string booleans without parsing | Use Pydantic Settings for type safety |
| Load config at import time | Use @lru_cache function |
| Different config systems per service | Standardize on one approach |
| Put secrets in Docker images | Inject at runtime via env or mounts |
| Log config values at startup | Log config keys, mask values |
| Share secrets across environments | Unique secrets per environment |
Safe Config Logging
import re
def mask_secret(value: str, visible: int = 4) -> str:
"""Mask a secret value, showing only last N chars."""
if len(value) <= visible:
return "****"
return "*" * (len(value) - visible) + value[-visible:]
def log_config(settings: dict, sensitive_keys: set[str] = None) -> dict:
"""Create a safe-to-log version of config."""
sensitive = sensitive_keys or {
"password", "secret", "token", "key", "api_key",
"private", "credential", "auth",
}
safe = {}
for k, v in settings.items():
if any(s in k.lower() for s in sensitive):
safe[k] = mask_secret(str(v)) if v else "(not set)"
else:
safe[k] = v
return safe
# Usage
import logging
logger = logging.getLogger(__name__)
config = {
"database_url": "postgresql://user:pass@localhost/db",
"secret_key": "super-secret-value-123",
"debug": True,
"stripe_api_key": "sk_live_abc123xyz",
}
safe_config = log_config(config)
logger.info(f"Config loaded: {safe_config}")
# Config loaded: {
# 'database_url': 'postgresql://user:pass@localhost/db',
# 'secret_key': '*********************123',
# 'debug': True,
# 'stripe_api_key': '***************xyz'
# }
Complete Example: Multi-Environment App
# app/config.py — Production-ready configuration
from pydantic_settings import BaseSettings, SettingsConfigDict
from pydantic import Field, field_validator, SecretStr
from functools import lru_cache
from enum import Enum
import os
class Environment(str, Enum):
development = "development"
staging = "staging"
production = "production"
testing = "testing"
class Settings(BaseSettings):
model_config = SettingsConfigDict(
env_file=f".env.{os.getenv('APP_ENV', 'development')}",
env_file_encoding="utf-8",
case_sensitive=False,
extra="ignore",
)
# App
app_env: Environment = Environment.development
app_name: str = "MyApp"
debug: bool = False
port: int = 8000
log_level: str = "INFO"
# Database
database_url: str
db_pool_size: int = Field(5, ge=1, le=50)
db_echo: bool = False # Log SQL queries
# Security — use SecretStr to prevent accidental logging
secret_key: SecretStr
api_key: SecretStr = SecretStr("")
# Redis
redis_url: str = "redis://localhost:6379/0"
cache_ttl: int = 300 # seconds
# Email
smtp_host: str = ""
smtp_port: int = 587
smtp_user: str = ""
smtp_password: SecretStr = SecretStr("")
@field_validator("log_level")
@classmethod
def validate_log_level(cls, v):
valid = {"DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"}
upper = v.upper()
if upper not in valid:
raise ValueError(f"Invalid log level: {v}")
return upper
@property
def is_production(self) -> bool:
return self.app_env == Environment.production
@property
def is_testing(self) -> bool:
return self.app_env == Environment.testing
@lru_cache
def get_settings() -> Settings:
"""Cached settings singleton."""
settings = Settings()
# Validate production requirements
if settings.is_production:
assert not settings.debug, "DEBUG must be False in production"
assert "sqlite" not in settings.database_url, "Use PostgreSQL in production"
assert settings.secret_key.get_secret_value() != "dev-secret", "Change SECRET_KEY in production"
return settings
🚀 Get 50+ production-ready Python scripts with proper configuration patterns built in.
Related Articles
- Build a REST API with FastAPI — uses Pydantic Settings for config
- Dockerize Python Apps — env vars in Docker and Compose
- Python Logging & Monitoring — log config safely
- Python Testing Guide — test configs with overrides
- Python Design Patterns — Singleton pattern for config
Need help setting up secure configuration for your Python project? I build APIs, automation tools, and infrastructure scripts. Reach out on Telegram →