# src/structum_lab.plugins.dynaconf/provider.py
# SPDX-License-Identifier: Apache-2.0
# SPDX-FileCopyrightText: 2025 PythonWoods
"""
Provider Structum basato su Dynaconf.
Espone l'interfaccia ConfigInterface al core, utilizzando la libreria interna
per costruire modelli Pydantic validati e fornire accesso dot-notated.
"""
from __future__ import annotations
import json
import logging
from collections.abc import Generator
from contextlib import contextmanager
from typing import TYPE_CHECKING, Any
from structum_lab.config import ConfigInterface
from structum_lab.plugins.dynaconf.core.builders import AbstractConfigBuilder
from structum_lab.plugins.dynaconf.core.exceptions import ConfigurationError
from structum_lab.plugins.dynaconf.core.manager import ConfigManager
from structum_lab.plugins.dynaconf.features.health import (
ConfigFileIntegrityCheck,
HealthCheckRegistry,
HealthCheckResult,
PydanticValidationCheck,
)
from structum_lab.plugins.dynaconf.features.locks import ReadWriteLock
from structum_lab.plugins.dynaconf.features.transactions import ConfigTransaction
if TYPE_CHECKING:
from structum_lab.plugins.dynaconf.features.watcher import HotReloadManager
log = logging.getLogger(__name__)
[docs]
class DynaconfConfigProvider(ConfigInterface):
"""
Advanced configuration provider built on Dynaconf with Pydantic validation.
Acts as adapter between Structum's key-value :class:`ConfigInterface` and
strongly-typed Pydantic models, providing:
- **Auto-discovery**: Scans ``config/app/*.toml`` and matches with ``config/models/*.py``
- **Type safety**: Pydantic validation on all config updates
- **Hot reload**: Optional file watching with automatic reload
- **Transactions**: Atomic multi-key updates
- **Health checks**: Configuration integrity and validation monitoring
- **Smart caching**: Multi-level cache with invalidation
- **Metrics**: Prometheus-compatible operation metrics
Architecture:
The provider maintains two representations:
1. **Pydantic Models** (source of truth) - Strongly typed, validated
2. **Flat Dictionary** (performance cache) - Fast dot-notation access
All updates flow through Pydantic validation before being persisted.
Configuration Flow::
TOML Files → Dynaconf → Pydantic Models → Flat Cache → get()
↓
Validation Errors
↓
ConfigurationError
Example:
Basic usage with auto-discovery::
from structum_lab.plugins.dynaconf import DynaconfConfigProvider
# Initialize provider
config = DynaconfConfigProvider(
root_path=".",
env_prefix="MYAPP",
enable_hot_reload=True
)
# Auto-discover config files
config.auto_discover(
app_dir="config/app",
models_dir="config/models"
)
# Access configuration
db_host = config.get("database.host", "localhost")
db_port = config.get("database.port", 5432)
# Update with validation
config.set[Any]("database.port", 5433) # Validates against Pydantic model
config.save() # Persist to ~/.structum/
With manual builder registration::
from structum_lab.plugins.dynaconf.core.builders import GenericConfigBuilder
from myapp.config_models import DatabaseConfig
# Register typed configuration
builder = GenericConfigBuilder(
name="database",
files=["config/database.toml"],
model=DatabaseConfig # Pydantic model
)
config.register_builder("database", builder)
# Access is now type-safe
port = config.get("database.port") # Validated as int
Atomic transactions::
# Multiple updates atomically
with config.transaction() as tx:
tx.set[Any]("api.rate_limit", 1000)
tx.set[Any]("api.timeout_seconds", 30)
# All changes applied together on commit
Implementations:
This is the primary implementation of :class:`~structum_lab.config.ConfigInterface`
for Structum Lab applications.
Note:
- Thread-safe with read-write locks
- Supports environment variable overrides (``MYAPP_DATABASE_PORT``)
- Hot reload requires ``watchdog`` package
- Persistence files stored in ``~/.structum/``
Warning:
Do not modify ``_flat_config`` or ``_manager`` directly. Always use
public methods (``get``, ``set[Any]``, ``transaction``) to ensure validation
and cache coherence.
See Also:
:class:`~structum_lab.config.ConfigInterface`: Base protocol
:class:`~structum_lab.plugins.dynaconf.core.builders.AbstractConfigBuilder`: Custom builders
:class:`~structum_lab.plugins.dynaconf.features.transactions.ConfigTransaction`: Transaction API
"""
[docs]
def __init__(
self,
root_path: str = ".",
env_prefix: str = "STRUCTUM",
environments: bool = True,
active_env: str = "development",
enable_hot_reload: bool = False,
enable_cache: bool = True,
) -> None:
"""
Initialize the configuration provider.
Sets up the provider infrastructure including thread-safe locks, configuration
manager, health checks, caching, and optional hot reload capabilities.
Args:
root_path (str): Root directory for resolving configuration file paths.
All relative paths in builders will be resolved from this directory.
Defaults to current directory (``"."``).
env_prefix (str): Prefix for environment variable overrides.
E.g., ``"MYAPP"`` enables ``MYAPP_DATABASE_PORT`` to override
``database.port``. Defaults to ``"STRUCTUM"``.
environments (bool): Enable multi-environment support (``[production]``,
``[development]`` in TOML files). If True, environment-specific
sections are loaded based on ``active_env``. Defaults to True.
active_env (str): Active environment name when ``environments=True``.
Determines which TOML section to load. Defaults to ``"development"``.
enable_hot_reload (bool): Enable automatic file watching and reload.
Requires ``watchdog`` package. When True, watches configuration
files for changes and reloads automatically. Defaults to False.
enable_cache (bool): Enable smart caching for ``get()`` operations.
Significantly improves performance for frequently accessed keys.
Defaults to True.
Raises:
ImportError: If ``enable_hot_reload=True`` but ``watchdog`` not installed.
Example:
Development setup with hot reload::\n
config = DynaconfConfigProvider(
root_path="/app",
env_prefix="MYAPP",
active_env="development",
enable_hot_reload=True # Auto-reload on file changes
)
Production setup (no hot reload, custom prefix)::\n
config = DynaconfConfigProvider(
root_path="/etc/myapp",
env_prefix="PROD",
active_env="production",
enable_hot_reload=False, # Static config in production
enable_cache=True
)
Note:
- Hot reload is started when first builder is registered
- Cache is populated lazily on first ``get()`` call
- Health checks are registered automatically
- No configuration is loaded until ``register_builder()`` or ``auto_discover()``
See Also:
:meth:`auto_discover`: Automatic configuration discovery
:meth:`register_builder`: Manual builder registration
:meth:`enable_hot_reload`: Enable hot reload after initialization
"""
self._root_path = root_path
self._env_prefix = env_prefix
self._environments = environments
self._active_env = active_env
self._rw_lock = ReadWriteLock()
self._manager = ConfigManager()
self._flat_config: dict[str, Any] = {}
self._hot_reload: HotReloadManager | None = None
# Setup Smart Cache
if enable_cache:
from structum_lab.plugins.dynaconf.features.cache import SmartCache
self._cache: SmartCache | None = SmartCache()
else:
self._cache = None
# Setup Metrics
from structum_lab.monitoring import get_metrics
self._metrics = get_metrics("structum.config")
# Setup Health Checks
self._health_registry = HealthCheckRegistry()
self._setup_health_checks()
# Setup Hot Reload (flag only, actual start in enable_hot_reload)
self._enable_hot_reload_flag = enable_hot_reload
# Nota: Non chiamiamo reload() qui perché non ci sono builder registrati all'inizio.
def _setup_health_checks(self) -> None:
"""
Configure default health checks for configuration monitoring.
Registers standard health checks:
- **ConfigFileIntegrityCheck**: Verifies TOML files are readable and parseable
- **PydanticValidationCheck**: Ensures models pass validation
Note:
Called automatically during ``__init__()``. Custom health checks
can be added via ``_health_registry.register()``.
See Also:
:meth:`health_check`: Execute all registered health checks
:class:`~structum_lab.plugins.dynaconf.features.health.HealthCheckRegistry`: Health check registry
"""
self._health_registry.register(ConfigFileIntegrityCheck(self._manager))
self._health_registry.register(PydanticValidationCheck(self._manager))
[docs]
def health_check(self) -> dict[str, HealthCheckResult]:
"""
Execute comprehensive configuration health verification.
Runs all registered health checks and returns detailed results.
Useful for monitoring dashboards, readiness probes, and diagnostics.
Returns:
Dict[str, HealthCheckResult]: Mapping of check names to results.
Each result contains status (healthy/unhealthy), message, and metrics.
Example:
FastAPI readiness probe::
@app.get("/health/config")
def config_health():
results = config.health_check()
all_healthy = all(r.is_healthy for r in results.values())
if all_healthy:
return {"status": "healthy", "checks": results}
else:
return JSONResponse(
{"status": "unhealthy", "checks": results},
status_code=503
)
Prometheus metrics::
results = config.health_check()
for check_name, result in results.items():
metrics.gauge(
"config_health_check",
1 if result.is_healthy else 0,
tags={"check": check_name}
)
Note:
Thread-safe read operation. Does not modify configuration state.
See Also:
:class:`~structum_lab.plugins.dynaconf.features.health.HealthCheckResult`: Result structure
"""
with self._rw_lock.read_lock():
return self._health_registry.run_all()
[docs]
def register_builder(
self,
name: str,
builder: type[AbstractConfigBuilder] | AbstractConfigBuilder,
) -> None:
"""
Register a custom configuration builder.
Builders define how to load and validate configuration from files.
Must be called during application bootstrap before accessing config values.
Args:
name (str): Configuration namespace (e.g., ``"database"``, ``"api"``).
Used as prefix for all keys (``database.host``, ``database.port``).
builder (Union[Type[AbstractConfigBuilder], AbstractConfigBuilder]):
Builder class or instance. Can be:
- Builder class (instantiated internally)
- Builder instance (e.g., pre-configured GenericConfigBuilder)
Raises:
ConfigurationError: If builder registration fails or model validation fails.
Example:
With custom Pydantic model::
from structum_lab.plugins.dynaconf.core.builders import GenericConfigBuilder
from myapp.models import DatabaseConfig
# Define strict Pydantic model
class DatabaseConfig(BaseModel):
host: str = "localhost"
port: int = Field(ge=1, le=65535)
database: str
# Register with model
builder = GenericConfigBuilder(
name="database",
files=["config/database.toml"],
model=DatabaseConfig
)
config.register_builder("database", builder)
# Access validated config
port = config.get("database.port") # Guaranteed int, 1-65535
Without model (dynamic)::
# Load without validation
config.load("cache", "config/cache.toml")
ttl = config.get("cache.ttl", 3600) # No type enforcement
Note:
- Configuration is loaded immediately upon registration
- Validation errors are logged but don't block startup
- Hot reload auto-starts on first builder if ``enable_hot_reload=True``
- Thread-safe write operation
See Also:
:meth:`auto_discover`: Automatic builder registration
:meth:`load`: Shortcut for GenericConfigBuilder
:class:`~structum_lab.plugins.dynaconf.core.builders.AbstractConfigBuilder`: Builder base class
"""
with self._rw_lock.write_lock():
self._manager.register_builder(name, builder)
# Tentiamo di caricare subito la configurazione per popolare la cache flat
try:
# Questo triggera il caricamento e la validazione Pydantic
model = self._manager.get_config(name)
# Aggiorniamo la mappa key-value per l'accesso veloce via get()
self._flatten_model(name, model)
except Exception as e:
# Non blocchiamo l'avvio se manca un file di config, ma logghiamo
log.warning(f"Warning loading configuration '{name}' during registration: {e}")
# Enable hot reload if requested and this is the first builder
if self._enable_hot_reload_flag and len(self._manager._builders) == 1:
try:
self.enable_hot_reload()
except Exception as e:
log.warning(f"Failed to enable hot reload: {e}")
def _flatten_model(self, prefix: str, obj: Any) -> None:
"""
Recursively flatten Pydantic model to dot-notation dict[str, Any].
Converts nested Pydantic models into flat key-value pairs for fast lookup.
Example: ``backend.db.port`` = 5432.
Args:
prefix (str): Current key prefix (namespace).
obj (Any): Pydantic model or dict[str, Any] to flatten.
Note:
Populates ``self._flat_config`` dict[str, Any]. Supports Pydantic v1 and v2.
"""
# Supporto Pydantic V2 e V1
if hasattr(obj, "model_dump"):
obj_dict = obj.model_dump()
obj_dict = obj if isinstance(obj, dict) else {}
for key, value in obj_dict.items():
full_key = f"{prefix}.{key}" if prefix else key
# Se il valore è un modello annidato o un dict[str, Any], scendi ricorsivamente
if hasattr(value, "model_dump") or hasattr(value, "dict") or isinstance(value, dict):
self._flatten_model(full_key, value)
else:
self._flat_config[full_key] = value
def _rebuild_flat_config(self) -> None:
"""
Rebuild flat key-value cache from all Pydantic models.
Clears and repopulates ``self._flat_config`` by flattening all
registered configuration namespaces. Called after :meth:`reload`.
Note:
Acquires write lock. Errors are logged but don't block.
"""
with self._rw_lock.write_lock():
self._flat_config.clear()
# Itera su tutti i builder conosciuti dal manager
# Nota: Accediamo a _builders che è interno, ma siamo nello stesso package
for config_name in self._manager._builders.keys():
try:
model = self._manager.get_config(config_name)
self._flatten_model(config_name, model)
except Exception as e:
log.error(f"Critical error rebuilding config '{config_name}': {e}")
[docs]
@contextmanager
def transaction(self) -> Generator[ConfigTransaction, None, None]:
"""
Atomic transaction context for multiple configuration updates.
Groups multiple :meth:`set[Any]` operations into a single atomic unit.
All changes commit together or rollback on exception.
Yields:
ConfigTransaction: Transaction object with ``set[Any]()`` method.
Raises:
ConfigurationError: If any validation fails during commit.
Exception: Any exception triggers automatic rollback.
Example:
Atomic multi-key update::
with config.transaction() as tx:
tx.set[Any]("api.rate_limit", 1000)
tx.set[Any]("api.timeout_seconds", 30)
tx.set[Any]("api.retry_attempts", 3)
# All changes committed atomically here
Rollback on error::
try:
with config.transaction() as tx:
tx.set[Any]("database.port", 5433)
tx.set[Any]("database.invalid_key", "value") # Error!
except ConfigurationError:
# Transaction auto-rolled back
port = config.get("database.port") # Still old value
Feature flag migration::
# Safely migrate multiple feature flags
with config.transaction() as tx:
tx.set[Any]("features.new_ui", True)
tx.set[Any]("features.old_ui", False)
tx.set[Any]("features.migration_complete", True)
Note:
- Single write lock held for entire transaction
- Auto-saves on successful commit
- Automatic rollback on any exception
See Also:
:meth:`set[Any]`: Single value updates
:class:`~structum_lab.plugins.dynaconf.features.transactions.ConfigTransaction`: Transaction implementation
"""
tx = ConfigTransaction(self)
try:
yield tx
tx.commit()
except Exception:
tx.rollback()
raise
[docs]
def load(self, name: str, file_path: str) -> None:
"""
Load a configuration file manually without Pydantic model.
Convenience shortcut for :class:`GenericConfigBuilder` registration.
Useful for quick configuration loading during development or dynamic configs.
Args:
name (str): Configuration namespace (e.g., ``"payment"``, ``"cache"``).
file_path (str): Relative path to TOML file from ``root_path``
(e.g., ``"config/app/payment.toml"``).
Example:
Quick load without model::
# Load payment config
config.load("payment", "config/app/payment.toml")
# Access immediately
api_key = config.get("payment.stripe.api_key")
webhook_secret = config.get("payment.stripe.webhook_secret")
Multiple configs::
# Load several configs
config.load("cache", "config/cache.toml")
config.load("email", "config/email.toml")
config.load("features", "config/features.toml")
Warning:
No Pydantic validation - values are loaded as-is from TOML.
For strict typing, use :meth:`register_builder` with a model.
Note:
- Internally creates a :class:`GenericConfigBuilder` without model
- Equivalent to ``register_builder(name, GenericConfigBuilder(name, [file_path]))``
- File must be valid TOML format
See Also:
:meth:`register_builder`: Register with Pydantic model
:meth:`auto_discover`: Automatic multi-file discovery
"""
from structum_lab.plugins.dynaconf.core.builders import GenericConfigBuilder
builder = GenericConfigBuilder(name, [file_path])
self.register_builder(name, builder)
[docs]
def auto_discover(
self, app_dir: str = "config/app", models_dir: str = "config/models"
) -> list[str]:
"""
Automatically discover and register configuration files.
Scans directories for TOML files and optionally matches them with Pydantic models.
This is the recommended initialization pattern for structured projects.
**Discovery Process:**
1. Scan ``{root_path}/{app_dir}/*.toml`` for configuration files
2. For each TOML file (e.g., ``database.toml``):
- Check for matching model in ``{root_path}/{models_dir}/database.py``
- If model exists: Load with strict Pydantic validation
- If no model: Load dynamically without validation
3. Register all discovered namespaces
**Directory Structure Example:**
::
myproject/
├── config/
│ ├── app/
│ │ ├── database.toml # → database namespace
│ │ ├── api.toml # → api namespace
│ │ └── cache.toml # → cache namespace
│ └── models/
│ ├── database.py # DatabaseConfig (Pydantic)
│ └── api.py # ApiConfig (Pydantic)
└── app.py
Args:
app_dir (str): Relative directory containing TOML files.
Defaults to ``"config/app"``.
models_dir (str): Relative directory containing Pydantic models.
Defaults to ``"config/models"``.
Returns:
list[str]: List of discovered namespace names (e.g., ``["database", "api", "cache"]``).
Example:
Basic auto-discovery::
from structum_lab.plugins.dynaconf import DynaconfConfigProvider
config = DynaconfConfigProvider(root_path=".")
namespaces = config.auto_discover()
# Logs: Auto-discovered: database [Strict (DatabaseConfig)] → config/app/database.toml
# Logs: Auto-discovered: api [Strict (ApiConfig)] → config/app/api.toml
# Logs: Auto-discovered: cache [Dynamic] → config/app/cache.toml
print(namespaces) # ['database', 'api', 'cache']
Custom directories::
config = DynaconfConfigProvider(root_path="/etc/myapp")
config.auto_discover(
app_dir="settings",
models_dir="schemas"
)
With FastAPI::
from fastapi import FastAPI
app = FastAPI()
@app.on_event("startup")
def load_config():
config = DynaconfConfigProvider()
discovered = config.auto_discover()
app.state.config = config
log.info(f"Loaded {len(discovered)} config namespaces")
**Model File Format** (``config/models/database.py``)::
from pydantic import BaseModel, Field # type: ignore[import-not-found]
class DatabaseConfig(BaseModel):
host: str = "localhost"
port: int = Field(ge=1, le=65535, default=5432)
database: str
username: str
password: str # Will be overridden by env vars
Warning:
- Hidden files (starting with ``.``) are skipped
- Already-registered builders are skipped
- Model loading errors fallback to dynamic mode (logged as warnings)
Note:
- Thread-safe operation
- Models must inherit from ``pydantic.BaseModel``
- Model class name can be anything (first BaseModel found is used)
- Dynamically imports model files via ``importlib``
See Also:
:meth:`register_builder`: Manual builder registration
:meth:`load`: Load single file without discovery
:class:`~structum_lab.plugins.dynaconf.core.builders.GenericConfigBuilder`: Builder implementation
"""
import importlib.util
import inspect
from pathlib import Path
from pydantic import BaseModel # type: ignore[import-not-found]
from structum_lab.plugins.dynaconf.core.builders import GenericConfigBuilder
log.info(f"Starting configuration auto-discovery in {self._root_path}...")
discovered_namespaces = []
# Risolvi path assoluti
root = Path(self._root_path).resolve()
config_app_path = root / app_dir
models_path = root / models_dir
if not config_app_path.exists():
log.warning(f"Auto-discovery skipped: Directory '{config_app_path}' not found.")
return []
for file_path in config_app_path.glob("*.toml"):
if file_path.name.startswith("."):
continue
config_name = file_path.stem
if config_name in self._manager._builders:
log.debug(f"Skipping auto-discovery for '{config_name}': already registered.")
continue
# Check for Model file
model_class = None
model_file = models_path / f"{config_name}.py"
mode_label = "Dynamic"
if model_file.exists():
try:
# Dynamic Import
spec = importlib.util.spec_from_file_location(
f"config.models.{config_name}", model_file
)
if spec and spec.loader:
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
# Find the first user-defined BaseModel
for name, obj in inspect.getmembers(module):
if (
inspect.isclass(obj)
and issubclass(obj, BaseModel)
and obj is not BaseModel
and obj.__module__ == module.__name__
):
model_class = obj
mode_label = f"Strict ({name})"
break
except Exception as e:
log.error(
f"Failed to load model for '{config_name}': {e}. Falling back to Dynamic."
)
# Register
# Calculate relative path from root to file, because Builder expects paths relative to root or absolute
try:
relative_path = file_path.relative_to(root)
except ValueError:
# Fallback to absolute if not relative
relative_path = file_path
builder = GenericConfigBuilder(
name=config_name,
files=[str(relative_path)],
model=model_class,
env_prefix=self._env_prefix, # Pass global env_prefix
)
self.register_builder(config_name, builder)
log.info(f"Auto-discovered: {config_name} [{mode_label}] -> {relative_path}")
discovered_namespaces.append(config_name)
log.info(f"Auto-discovery completed. Loaded {len(discovered_namespaces)} namespaces.")
return discovered_namespaces
# --- ConfigInterface Implementation ---
[docs]
def get(self, key: str, default: Any = None) -> Any:
"""
Retrieve a configuration value by dot-notation key.
Implements :meth:`~structum_lab.config.ConfigInterface.get` with smart caching
and performance metrics. Cache-first lookup minimizes lock contention.
Args:
key (str): Dot-notation configuration key (e.g., ``"database.host"``,
``"api.rate_limit"``).
default (Any): Default value if key not found. Defaults to None.
Returns:
Any: Configuration value (typed according to Pydantic model) or default.
Example:
Basic usage::
# Simple get with default
db_host = config.get("database.host", "localhost")
port = config.get("database.port", 5432)
# Nested keys
timeout = config.get("api.client.timeout_seconds", 30)
Environment override::
# TOML: database.host = "localhost"
# ENV: MYAPP_DATABASE_HOST="prod-db.example.com"
host = config.get("database.host") # Returns "prod-db.example.com"
Type safety with Pydantic::
# Model defines port as int with validation
port = config.get("database.port") # Guaranteed int type
Note:
- Thread-safe read operation
- Cache hit bypasses locks entirely
- First access populates cache
- Metrics emitted for monitoring (cache hit/miss rate, latency)
See Also:
:meth:`set[Any]`: Update configuration values
:meth:`has`: Check key existence
:meth:`get_cache_stats`: Cache performance metrics
"""
import time
start = time.perf_counter()
try:
# Cache lookup first (no lock needed)
if self._cache:
cached = self._cache.get(key)
if cached is not None:
duration = time.perf_counter() - start
self._metrics.increment(
"operations.total",
tags={"operation": "get", "status": "success", "cache": "hit"},
)
self._metrics.timing("operation.duration", duration, tags={"operation": "get"})
return cached
# Cache miss - read from flat_config
with self._rw_lock.read_lock():
value = self._flat_config.get(key, default)
# Populate cache (only if not default)
if self._cache and value is not default:
self._cache.set(key, value)
duration = time.perf_counter() - start
cache_status = "miss" if self._cache else "disabled"
self._metrics.increment(
"operations.total",
tags={"operation": "get", "status": "success", "cache": cache_status},
)
self._metrics.timing("operation.duration", duration, tags={"operation": "get"})
return value
except Exception:
duration = time.perf_counter() - start
self._metrics.increment(
"operations.total",
tags={"operation": "get", "status": "error", "cache": "unknown"},
)
self._metrics.timing("operation.duration", duration, tags={"operation": "get"})
raise
[docs]
def set(self, key: str, value: Any) -> None:
"""
Update a configuration value with Pydantic validation.
Implements :meth:`~structum_lab.config.ConfigInterface.set[Any]` with atomic updates,
cache invalidation, and model validation. Does NOT auto-save - call :meth:`save`
explicitly to persist.
Args:
key (str): Dot-notation key (e.g., ``"database.port"``).
value (Any): New value. Must pass Pydantic model validation.
Raises:
ConfigurationError: If key invalid, namespace not registered, or
value fails Pydantic validation.
Example:
Update with validation::
# Pydantic model validates port is int, 1-65535
config.set[Any]("database.port", 5433) # OK
config.set[Any]("database.port", "invalid") # Raises ConfigurationError
# Must save explicitly
config.save() # Persist to ~/.structum/database.json
Runtime reconfiguration::
@app.post("/admin/config")
def update_config(key: str, value: Any):
try:
config.set[Any](key, value)
config.save()
return {"status": "updated", "key": key}
except ConfigurationError as e:
raise HTTPException(400, str(e))
Cache invalidation behavior::
# Setting "backend.db.port" invalidates:
# - "backend.db.port" (exact key)
# - "backend.db.*" (parent prefix)
# - "backend.*" (grandparent prefix)
config.set[Any]("backend.db.port", 5433)
Warning:
Changes are in-memory only until ``save()`` is called.
Use :meth:`transaction` for atomic multi-key updates.
Note:
- Thread-safe write operation
- Validates against Pydantic model in real-time
- Cascading cache invalidation for consistency
- Metrics emitted for operations and errors
See Also:
:meth:`get`: Retrieve values
:meth:`save`: Persist changes to disk
:meth:`transaction`: Atomic multi-updates
"""
import time
start = time.perf_counter()
try:
with self._rw_lock.write_lock():
self._set_internal(key, value)
# Invalidate cache for this key and related prefixes
if self._cache:
self._cache.invalidate(key)
# If modifying "backend.db.host", also invalidate "backend.db.*"
parts = key.split(".")
for i in range(len(parts) - 1):
prefix = ".".join(parts[: i + 1]) + "."
self._cache.invalidate_prefix(prefix)
# self.save() # REMOVED: Persistence is now explicit via save()
duration = time.perf_counter() - start
self._metrics.increment(
"operations.total",
tags={"operation": "set[Any]", "status": "success", "cache": "n/a"},
)
self._metrics.timing("operation.duration", duration, tags={"operation": "set[Any]"})
except Exception:
duration = time.perf_counter() - start
self._metrics.increment(
"operations.total",
tags={"operation": "set[Any]", "status": "error", "cache": "n/a"},
)
self._metrics.timing("operation.duration", duration, tags={"operation": "set[Any]"})
raise
[docs]
def set_atomic(self, key: str, value: Any) -> None:
"""
Set value atomically without acquiring lock (internal use).
Used by :class:`ConfigTransaction` which already holds write lock.
Applies change and saves immediately.
Args:
key (str): Configuration key.
value (Any): New value.
Note:
Assumes caller holds write lock. Not for direct use.
See Also:
:meth:`set[Any]`: Public set[Any] method
:meth:`transaction`: Atomic multi-updates
"""
# We assume the caller holds the write lock
self._set_internal(key, value)
# Transactions handle saving at the end if needed, or we save here?
# Ideally, transaction commits should trigger a single save.
self.save()
def _set_internal(self, key: str, value: Any) -> None:
"""
Internal set[Any] implementation without locking.
Updates both flat cache and Pydantic model with validation.
Caller must hold write lock.
Args:
key (str): Dot-notation key (must be ``scope.key`` format).
value (Any): New value (validated against Pydantic model).
Raises:
ConfigurationError: If key invalid or validation fails.
Note:
Called by :meth:`set[Any]` and :meth:`set_atomic`.
"""
parts = key.split(".")
if len(parts) < 2:
raise ConfigurationError(f"Key must be 'scope.key' (e.g. backend.port). Got: {key}")
# 1. Update fast cache
self._flat_config[key] = value
# 2. Update Pydantic model (with runtime validation!)
top_scope = parts[0] # e.g. "backend"
# Protected access to manager builders
if top_scope not in self._manager._builders:
raise ConfigurationError(f"Scope '{top_scope}' not registered.")
model = self._manager.get_config(top_scope)
try:
self._update_model(model, parts[1:], value)
except Exception as e:
log.error(f"Pydantic validation failed for {key}={value}: {e}")
raise ConfigurationError(f"Invalid value for {key}: {e}") from e
def _update_model(self, model: Any, path: list[str], value: Any) -> None:
"""
Recursively update nested Pydantic model attribute.
Traverses path components and sets final attribute, triggering
Pydantic validation via ``setattr()``.
Args:
model (Any): Pydantic model to update.
path (list[str]): Remaining path components.
value (Any): Value to set[Any].
Raises:
ConfigurationError: If path not found in model.
Note:
Pydantic validation occurs during ``setattr()`` if
``validate_assignment=True`` in model config.
"""
attr = path[0]
if len(path) == 1:
# This triggers Pydantic validation (if validate_assignment=True)
setattr(model, attr, value)
else:
sub_model = getattr(model, attr, None)
if sub_model is None:
raise ConfigurationError(f"Path not found in model: {attr}")
self._update_model(sub_model, path[1:], value)
[docs]
def has(self, key: str) -> bool:
"""
Check if a configuration key exists.
Implements :meth:`~structum_lab.config.ConfigInterface.has` with thread-safe lookup.
Args:
key (str): Dot-notation configuration key.
Returns:
bool: True if key exists in configuration, False otherwise.
Example:
Conditional configuration::
# Check before accessing
if config.has("features.experimental"):
experimental = config.get("features.experimental")
if experimental:
enable_experimental_features()
# Avoid KeyError in strict mode
if not config.has("optional.setting"):
config.set[Any]("optional.setting", default_value)
Note:
- Thread-safe read operation
- Checks flat config cache (no Pydantic model lookup)
- Does not distinguish between None value and missing key
See Also:
:meth:`get`: Retrieve values with default
"""
with self._rw_lock.read_lock():
return key in self._flat_config
[docs]
def save(self) -> None:
"""
Persist current configuration state to disk.
Saves all registered configuration namespaces to JSON files in ``~/.structum/``.
Each namespace gets its own file (e.g., ``database.json``, ``api.json``).
Raises:
IOError: If file write fails (permissions, disk full, etc.).
Example:
Explicit save after updates::
# Make changes
config.set[Any]("database.port", 5433)
config.set[Any]("database.pool_size", 20)
# Persist to disk
config.save() # Writes ~/.structum/database.json
Transaction with save::
with config.transaction() as tx:
tx.set[Any]("api.rate_limit", 1000)
tx.set[Any]("api.timeout", 30)
# Auto-saved on commit
Periodic auto-save::
from apscheduler.schedulers.background import BackgroundScheduler
scheduler = BackgroundScheduler()
scheduler.add_job(
config.save,
'interval',
minutes=5,
id='config_autosave'
)
scheduler.start()
Warning:
Overwrites existing files. No backup is created automatically.
Consider version control or backup strategy for production.
Note:
- Creates ``~/.structum/`` directory if missing
- Saves Pydantic models as JSON with ``{"default": {...}}`` wrapper
- Uses model's ``by_alias=True`` for UPPERCASE keys if defined
- Errors are logged but don't block (individual namespace failures)
See Also:
:meth:`reload`: Load configuration from disk
:meth:`set[Any]`: Update values (doesn't auto-save)
"""
# Typically called within a write lock context
for config_name in self._manager._builders.keys():
try:
# 1. Get updated model
model = self._manager.get_config(config_name)
# 2. Instantiate builder to know WHERE to save
builder_or_cls = self._manager._builders[config_name]
if isinstance(builder_or_cls, type):
builder = builder_or_cls()
else:
builder = builder_or_cls
file_path = builder.get_persistence_file()
# Ensure directory exists (e.g. ~/.structum)
file_path.parent.mkdir(parents=True, exist_ok=True)
# 3. Dump and Save (wrapping 'default' for Dynaconf)
# by_alias=True uses UPPERCASE keys if defined in the model
data = model.model_dump(mode="json", by_alias=True)
final_data = {"default": data}
file_path.write_text(json.dumps(final_data, indent=2))
log.info(f"Configuration '{config_name}' saved to {file_path}")
except Exception as e:
log.error(f"Failed to save '{config_name}': {e}")
[docs]
def reload(self) -> None:
"""
Reload all configuration from disk.
Re-reads TOML files, JSON overrides, and environment variables.
Rebuilds Pydantic models and flushes cache. Useful for pulling
external configuration changes.
Example:
Manual reload::
# External process updated ~/.structum/database.json
config.reload()
new_port = config.get("database.port") # Fresh value
Reload on signal::
import signal
def handle_sighup(signum, frame):
log.info("Reloading configuration...")
config.reload()
signal.signal(signal.SIGHUP, handle_sighup)
Hot reload (automatic)::
# Instead of manual reload, use hot reload
config.enable_hot_reload()
# Config auto-reloads on file changes
Warning:
- Blocks all config operations during reload (write lock)
- May raise validation errors if files corrupted
- Cache is completely flushed
Note:
- Thread-safe write operation
- Reloads in priority order: TOML → JSON → ENV
- Hot reload feature calls this automatically on file changes
See Also:
:meth:`save`: Persist configuration to disk
:meth:`enable_hot_reload`: Automatic reload on file changes
"""
with self._rw_lock.write_lock():
log.info("Reloading configuration...")
self._manager.load_all() # This essentially clears the cache
self._rebuild_flat_config()
[docs]
def enable_hot_reload(
self,
watch_secrets: bool = True,
debounce_seconds: float = 2.0,
callback: Any | None = None, # Typing Any for brevity for now
) -> None:
"""
Enable automatic configuration reloading on file changes.
Watches configuration files (TOML, JSON) and automatically calls :meth:`reload`
when changes detected. Requires ``watchdog`` package.
Args:
watch_secrets (bool): Whether to watch secret files in addition to config files.
Defaults to True.
debounce_seconds (float): Debounce interval to avoid rapid reloads on
multiple file changes. Defaults to 2.0 seconds.
callback (Optional[Callable]): Optional callback function called after reload.
Signature: ``callback(changed_files: list[str]) -> None``.
Raises:
ImportError: If ``watchdog`` package not installed.
Example:
Development environment::
config = DynaconfConfigProvider(
root_path=".",
enable_hot_reload=False # Start disabled
)
config.auto_discover()
# Enable later with custom debounce
config.enable_hot_reload(debounce_seconds=5.0)
# Now changes to config files auto-reload
With callback::
def on_config_changed(files):
log.info(f"Config reloaded: {files}")
# Notify connected clients
websocket.broadcast({"type": "config_updated"})
config.enable_hot_reload(
debounce_seconds=3.0,
callback=on_config_changed
)
Production (disabled)::
# Don't use hot reload in production
config = DynaconfConfigProvider(
enable_hot_reload=False # Static config
)
Warning:
- Not recommended for production (use SIGHUP reload instead)
- Debounce prevents rapid reload storms but adds latency
- File watcher holds file system resources
Note:
- Watches both TOML source files and JSON persistence files
- Auto-started on first builder if ``enable_hot_reload=True`` in ``__init__``
- Calls are idempotent (ignores if already enabled)
- Cleanup automatic on provider destruction
See Also:
:meth:`reload`: Manual reload
:class:`~structum_lab.plugins.dynaconf.features.watcher.HotReloadManager`: Watcher implementation
"""
if self._hot_reload:
log.warning("Hot reload already enabled")
return
try:
from structum_lab.plugins.dynaconf.features.watcher import HotReloadManager
except ImportError as e:
log.error(f"Failed to enable hot reload: {e}")
raise ImportError(
"Hot reload requires 'watchdog' package. "
"Install with: pip install structum-dynaconf[dev]"
) from e
# Instantiate with provider and debounce settings
self._hot_reload = HotReloadManager(self, debounce_interval=debounce_seconds)
# Monitor all registered builders' files
for config_name in self._manager._builders.keys():
builder_or_cls = self._manager._builders[config_name]
if isinstance(builder_or_cls, type):
builder = builder_or_cls()
else:
builder = builder_or_cls
# Watch persistence file directory
persistence_file = builder.get_persistence_file()
if persistence_file:
self._hot_reload.watch(persistence_file)
# Watch TOML base files directory
try:
root = builder._resolve_config_root()
config_dir = root / "config"
if config_dir.exists():
self._hot_reload.watch(config_dir)
except Exception:
log.debug(f"No config root found for '{config_name}', skipping TOML watch")
self._hot_reload.start()
log.info("Hot reload enabled for all registered configurations")
[docs]
def __del__(self) -> None:
"""
Cleanup hot reload observer on provider destruction.
Automatically called by Python garbage collector. Stops file watchers
to prevent resource leaks.
Note:
Manual cleanup via ``enable_hot_reload().stop()`` not required.
"""
if self._hot_reload:
self._hot_reload.stop()
[docs]
def get_cache_stats(self) -> dict[str, Any]:
"""
Retrieve cache performance metrics for monitoring.
Returns cache hit rate, size, and other statistics. Automatically
emits metrics to Prometheus/statsD via metrics interface.
Returns:
Dict[str, Any]: Cache statistics dictionary:
- ``enabled`` (bool): Whether cache is active
- ``size`` (int): Number of cached keys
- ``hit_rate`` (float): Cache hit ratio (0.0-1.0)
- Additional metrics from cache implementation
Example:
Monitoring dashboard::
stats = config.get_cache_stats()
print(f"Cache enabled: {stats['enabled']}")
print(f"Cache size: {stats['size']} keys")
print(f"Hit rate: {stats['hit_rate']:.2%}")
Prometheus metrics::
# Called automatically by metrics.gauge()
# structum_config_cache_size{} 1234
# structum_config_cache_hit_rate{} 0.95
Health check integration::
@app.get("/health/cache")
def cache_health():
stats = config.get_cache_stats()
if not stats.get('enabled'):
return {"status": "disabled"}
if stats['hit_rate'] < 0.7:
return {"status": "degraded", "hit_rate": stats['hit_rate']}
return {"status": "healthy", **stats}
Note:
- Returns ``{"enabled": False}`` if cache disabled
- Metrics auto-emitted as gauges
- Thread-safe read operation
See Also:
:meth:`__init__`: enable_cache parameter
:class:`~structum_lab.plugins.dynaconf.features.cache.SmartCache`: Cache implementation
"""
if not self._cache:
return {"enabled": False}
stats = {"enabled": True, **self._cache.get_stats()}
# Emit as gauges for Prometheus
self._metrics.gauge("cache.size", stats["size"])
self._metrics.gauge("cache.hit_rate", stats["hit_rate"])
return stats