diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index bbee877..b910105 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -26,7 +26,7 @@ jobs:
         python-version: ["3.10", "3.11", "3.12", "3.13"]
 
     steps:
-    - uses: actions/checkout@v5
+    - uses: actions/checkout@v6
     
     - name: Set up Python ${{ matrix.python-version }}
       uses: actions/setup-python@v6
diff --git a/.gitignore b/.gitignore
index 08ed2d9..7181da4 100644
--- a/.gitignore
+++ b/.gitignore
@@ -188,6 +188,9 @@ todo*
 # local filters dir
 /filters/
 
+# local blueprints dir
+/blueprints/
+
 # ruff
 .ruff*/
 
diff --git a/README.md b/README.md
index da1cfde..ac23f38 100644
--- a/README.md
+++ b/README.md
@@ -25,9 +25,15 @@ NornFlow bridges the gap between development and operations teams by providing:
 
 NornFlow promotes collaboration between developers and network engineers:
 
-⚡️ **Unlimited Power for Developer**: Write automation logic as pure Python functions with Nornir's task interface. No boilerplate, just clean Python code.
+⚡️ **Unlimited Power for Developers**: Write automation logic as pure Python functions with Nornir's task interface. No boilerplate, just clean Python code.
 
-🚀 **Productivity for Operators**: Define and run workflows with familiar YAML syntax similar to Ansible playbooks, enabling network engineers to be productive regardless of their Python programming background.
+🚀 **Productivity for Operators**: Define and run workflows with familiar YAML syntax, enabling network engineers to be productive regardless of their Python programming background.
+
+🔧 **Advanced Automation Capabilities**: Extends Nornir with powerful features beyond vanilla capabilities, including:
+- Multi-level variable system with precedence (environment, global, domain, workflow, CLI, runtime)
+- Rich Jinja2 filters for advanced templating and data manipulation
+- Hooks system for extending task behavior without modifying task code
+- Flexible failure strategies (skip-failed, fail-fast, run-all) for robust error handling
 
 🧩 **Project-friendly**: Brings predictable structure to Nornir projects with:
 - Standardized directory organization
@@ -42,4 +48,5 @@ NornFlow promotes collaboration between developers and network engineers:
 - [Variables Basics](https://github.com/theandrelima/nornflow/blob/main/docs/variables_basics.md) - Understand NornFlow's variable system
 - [NornFlow Settings](https://github.com/theandrelima/nornflow/blob/main/docs/nornflow_settings.md) - Configure your NornFlow environment
 - [Jinja2 Filters](https://github.com/theandrelima/nornflow/blob/main/docs/jinja2_filters.md) - Advanced template manipulation
+- [Hooks Guide](https://github.com/theandrelima/nornflow/blob/main/docs/hooks_guide.md) - Extend task behavior with custom hooks
 - [API Reference](https://github.com/theandrelima/nornflow/blob/main/docs/api_reference.md) - For developers extending NornFlow
diff --git a/docs/api_reference.md b/docs/api_reference.md
index a660767..62bb69f 100644
--- a/docs/api_reference.md
+++ b/docs/api_reference.md
@@ -29,7 +29,7 @@ The central orchestrator for the entire system. NornFlow manages all aspects of
 
 ### Responsibilities:
 - Loads and validates settings (via NornFlowSettings)
-- Discovers and catalogs tasks, workflows, and filters from directories
+- Discovers and catalogs tasks, workflows, filters, and blueprints from directories
 - Manages vars and filters with precedence rules
 - Directly orchestrates workflow execution
 - Handles inventory filtering and variable resolution
@@ -51,6 +51,7 @@ def __init__(
     vars: dict[str, Any] | None = None,
     filters: dict[str, Any] | None = None,
     failure_strategy: FailureStrategy | None = None,
+    dry_run: bool | None = None,
     **kwargs: Any,
 )
 ```
@@ -62,6 +63,7 @@ def __init__(
 - `vars`: Variables with highest precedence in the resolution chain
 - `filters`: Inventory filters with highest precedence that override workflow filters
 - `failure_strategy`: Failure handling strategy (skip-failed, fail-fast, or run-all)
+- `dry_run`: Dry run mode with highest precedence. Overrides workflow and settings values
 - `**kwargs`: Additional keyword arguments passed to NornFlowSettings
 
 ### Properties
@@ -71,6 +73,7 @@ def __init__(
 | `tasks_catalog` | `CallableCatalog` | Registry of available tasks |
 | `workflows_catalog` | `FileCatalog` | Registry of workflow files |
 | `filters_catalog` | `CallableCatalog` | Registry of inventory filters |
+| `blueprints_catalog` | `FileCatalog` | Registry of blueprint files |
 | `workflow` | `WorkflowModel \| None` | Current workflow model or None |
 | `workflow_path` | `Path \| None` | Path to workflow file if loaded from file |
 | `processors` | `list` | List of processor instances |
@@ -78,27 +81,27 @@ def __init__(
 | `vars` | `dict[str, Any]` | Variables with highest precedence |
 | `filters` | `dict[str, Any]` | Inventory filters with highest precedence |
 | `failure_strategy` | `FailureStrategy` | Current failure handling strategy |
+| `dry_run` | `bool` | Current dry run mode (resolved via precedence chain) |
 | `nornir_configs` | `dict[str, Any]` | Nornir configuration (read-only) |
 | `nornir_manager` | `NornirManager` | NornirManager instance (read-only) |
 
 ### Methods
 
-#### `run(dry_run: bool = False) -> int`
+#### `run() -> int`
 Execute the configured workflow.
 
 ```python
-exit_code = nornflow.run(dry_run=True)  # Run in dry-run mode
-exit_code = nornflow.run()              # Run normally
+# dry_run is now set via constructor, workflow, or settings
+nornflow = NornFlow(workflow=my_workflow, dry_run=True)
+exit_code = nornflow.run()  # Uses the dry_run set during initialization
 ```
 
-**Parameters:**
-- `dry_run`: Whether to run the workflow in dry-run mode
-
 **Returns:**
 - `int`: Exit code representing execution status
   - 0: Success (all tasks passed)
-  - 1-100: Failure percentage (% of failed task executions, rounded down)
+  - 1-100: Failure with percentage information (% of failed task executions)
   - 101: Failure without percentage information
+  - 102+: Reserved for exceptions/internal errors
 
 **Exceptions:**
 - `WorkflowError`: If no workflow is configured
@@ -115,8 +118,8 @@ from nornflow.builder import NornFlowBuilder
 builder = NornFlowBuilder()
 nornflow = (builder
     .with_settings_path("nornflow.yaml")
-    .with_workflow_name("backup")
-    .with_vars({"env": "prod"})
+    .with_workflow_path("backup.yaml")
+    .with_kwargs(dry_run=True)  # Pass dry_run via kwargs
     .build())
 ```
 
@@ -153,42 +156,57 @@ Set inventory filters with highest precedence.
 Set the failure handling strategy.
 
 #### `with_kwargs(**kwargs: Any) -> NornFlowBuilder`
-Set additional keyword arguments.
+Set additional keyword arguments (including `dry_run`).
 
 #### `build() -> NornFlow`
 Build and return the NornFlow instance.
 
 ## NornFlowSettings Class
 
-Configuration settings for NornFlow.
+Configuration settings for NornFlow using Pydantic for validation and type safety.
 
 ```python
 from nornflow.settings import NornFlowSettings
 
-settings = NornFlowSettings(settings_file="nornflow.yaml")
-```
-
-### Constructor
+# Load from YAML file
+settings = NornFlowSettings.load("nornflow.yaml")
 
-```python
-def __init__(
-    self,
-    settings_file: str = "nornflow.yaml",
-    **kwargs: Any
+# Create directly with values
+settings = NornFlowSettings(
+    nornir_config_file="nornir.yaml",
+    dry_run=True
 )
 ```
 
+### Class Methods
+
+#### `load(settings_file: str | None = None, base_dir: Path | None = None, **overrides: Any) -> NornFlowSettings`
+Load settings from a YAML file with automatic resolution and overrides. This call resolves relative paths by combining either the discovered settings directory or the explicit `base_dir` with the configured entries.
+
+**Parameters:**
+- `settings_file`: Path to settings YAML file. If None, checks NORNFLOW_SETTINGS env var, then defaults to "nornflow.yaml"
+- `base_dir`: Base directory for resolving relative paths. If None, uses the directory containing the settings file. Providing a value overrides the discovery location used by `resolve_relative_paths`.
+- `**overrides`: Additional settings to override YAML values
+
+**Returns:**
+- `NornFlowSettings` instance with path fields rewritten relative to the resolved base directory. Constructing `NornFlowSettings` directly (without `load`) skips this step and leaves the incoming values untouched.
+
 ### Key Properties
 
 | Property | Type | Description |
 |----------|------|-------------|
-| `nornir_config_file` | `str` | Path to Nornir configuration file |
-| `local_tasks_dirs` | `list[str]` | Directories containing custom tasks |
-| `local_workflows_dirs` | `list[str]` | Directories containing workflow files |
-| `local_filters_dirs` | `list[str]` | Directories containing custom filters |
-| `processors` | `list[dict[str, Any]] \| None` | Nornir processor configurations |
+| `nornir_config_file` | `str` | Path to Nornir configuration file (required) |
+| `local_tasks` | `list[str]` | Directories containing custom tasks |
+| `local_workflows` | `list[str]` | Directories containing workflow files |
+| `local_filters` | `list[str]` | Directories containing custom filters |
+| `local_hooks` | `list[str]` | Directories containing custom hooks |
+| `local_blueprints` | `list[str]` | Directories containing blueprint files |
+| `processors` | `list[dict[str, Any]]` | Nornir processor configurations |
 | `vars_dir` | `str` | Directory for variable files |
 | `failure_strategy` | `FailureStrategy` | Task failure handling strategy |
+| `dry_run` | `bool` | Default dry run mode |
+| `as_dict` | `dict[str, Any]` | Settings as a dictionary |
+| `base_dir` | `Path` | Base directory for resolving relative paths |
 
 ## NornirManager Class
 
@@ -278,511 +296,270 @@ Universal validators must return a tuple of `(bool, str)` where:
 
 When models are created, the validation process follows this sequence:
 1. Pydantic performs basic type validation
-2. `create()` method is called, which runs universal field validation via `run_universal_field_validation()`
-3. For specific models (like TaskModel), additional post-creation validation may run via `run_post_creation_task_validation()`
-
-This multi-layered approach ensures models are validated consistently while allowing flexibility where needed.
+2. Field validators run for specific fields
+3. Model validators run for cross-field validation
+4. Universal validators run for all non-excluded fields
+5. Final model instance is returned or validation error is raised
 
-### HookableModel
+### WorkflowModel
 
-Abstract base class for models that support hooks (e.g., TaskModel).
+Represents a complete workflow definition.
 
 ```python
-from nornflow.models import HookableModel
-```
-
-**Purpose:**
-HookableModel provides the infrastructure for models to support hook configurations. It manages hook discovery, caching, and validation, but delegates actual hook execution to the NornFlowHookProcessor during task runtime.
-
-**Key Features:**
-- Implements Flyweight pattern for hook instance management (one instance per unique hook configuration)
-- Automatically migrates hook fields from model dict to hooks field during creation
-- Caches hook instances and hook processor reference for performance
-- Provides hook validation interface through `run_hook_validations()`
-
-**Properties:**
-
-| Property | Type | Description |
-|----------|------|-------------|
-| `hooks` | `HashableDict[str, Any] \| None` | Hook configurations for this model |
-| `_hooks_cache` | `list[Hook] \| None` | Cached hook instances (private) |
-| `_hook_processor_cache` | `NornFlowHookProcessor \| None` | Cached hook processor reference (private) |
-
-**Methods:**
+from nornflow.models import WorkflowModel
 
-#### `get_hooks() -> list[Hook]`
-Get all hook instances for this model. Uses cached instances if available.
+workflow = WorkflowModel.create({
+    "workflow": {
+        "name": "My Workflow",
+        "tasks": [...],
+        "dry_run": True
+    }
+})
+```
 
-**Returns:**
-- List of Hook instances configured for this model
+**Key Fields:**
+- `name`: Workflow name (required)
+- `description`: Workflow description (optional)
+- `tasks`: List of TaskModel instances (required, non-empty)
+- `dry_run`: Override dry run mode (optional, can be `None`)
+- `failure_strategy`: Override failure strategy (optional, can be `None`)
+- `vars`: Workflow-level variables (optional)
+- `inventory_filters`: Inventory filtering configuration (optional)
+- `processors`: Processor configurations (optional)
 
-#### `run_hook_validations() -> None`
-Execute validation logic for all hooks configured on this model.
+**Create Method:**
 
-Should be called explicitly at the beginning of the `run()` method in subclasses.
+The `create` class method handles workflow creation with blueprint expansion:
 
-**Raises:**
-- `HookValidationError`: If any hook validation fails
+```python
+@classmethod
+def create(cls, dict_args: dict[str, Any], *args: Any, **kwargs: Any) -> "WorkflowModel"
+```
 
-#### `get_task_args() -> dict[str, Any]`
-Get clean task arguments without any NornFlow-specific context.
+**Args:**
+- `dict_args`: Dictionary containing the full workflow data, must include 'workflow' key
+- `*args`: Additional positional arguments passed to parent create method
+- `**kwargs`: Additional keyword arguments:
+  - `blueprints_catalog` (dict[str, Path] | None): Catalog mapping blueprint names to file paths
+  - `vars_dir` (str | None): Directory containing variable files
+  - `workflow_path` (Path | None): Path to the workflow file
+  - `workflow_roots` (list[str] | None): List of workflow root directories
+  - `cli_vars` (dict[str, Any] | None): CLI variables with highest precedence
 
 **Returns:**
-- Dictionary of task arguments for the task function
-
-#### `validate_hooks_and_set_task_context(nornir_manager, vars_manager, task_func) -> None`
-Validate hooks and set task-specific context in the hook processor.
-
-**Parameters:**
-- `nornir_manager`: The NornirManager instance
-- `vars_manager`: The variables manager instance
-- `task_func`: The task function that will be executed
+- `WorkflowModel`: The created WorkflowModel instance with expanded blueprints
 
 **Raises:**
-- `ProcessorError`: If hooks are configured but hook processor cannot be retrieved
-
-**Immutability Constraints:**
-
-> **CRITICAL**: HookableModel instances (and subclasses like TaskModel) are **hashable** by design as PydanticSerdes models. **NEVER modify model attributes** after initialization, especially within Hook classes! Modifying attributes breaks the hash contract and can corrupt internal caches.
+- `WorkflowError`: If 'workflow' key is not present in dict_args
+- `BlueprintError`: If blueprint expansion fails
+- `BlueprintCircularDependencyError`: If circular dependencies detected in blueprint references
 
 ### TaskModel
 
-Represents individual tasks within a workflow. Inherits from HookableModel to support hooks.
+Represents a single task in a workflow.
 
 ```python
 from nornflow.models import TaskModel
-```
 
-**Properties:**
-
-| Property | Type | Description |
-|----------|------|-------------|
-| `id` | `int \| None` | Auto-incrementing task ID |
-| `name` | `str` | Task name (must exist in tasks catalog) |
-| `args` | `HashableDict[str, Any] \| None` | Task arguments (supports Jinja2) |
-| hooks | `HashableDict[str, Any] \| None` | Hook configurations (inherited from HookableModel) |
-
-**Key Characteristics:**
-- Inherits from `HookableModel` (not RunnableModel)
-- Instances are hashable and immutable after creation
-- Hook validation delegated to parent via `run_hook_validations()`
-- Excludes `args` and hooks from universal field validation
-
-**Methods:**
-
-#### `run(nornir_manager: NornirManager, vars_manager: NornFlowVariablesManager, tasks_catalog: dict[str, Callable]) -> AggregatedResult`
-Execute the task using the provided NornirManager and tasks catalog.
-
-### WorkflowModel
-
-Represents a complete workflow definition. This is a data model that defines the structure of a workflow but does not contain execution logic.
-
-```python
-from nornflow.models import WorkflowModel
+task = TaskModel(
+    name="netmiko_send_command",
+    args={"command_string": "show version"},
+    set_to="version_output"
+)
 ```
 
-**Properties:**
-
-| Property | Type | Description |
-|----------|------|-------------|
-| `name` | `str` | Workflow name |
-| `description` | `str \| None` | Workflow description |
-| `inventory_filters` | `HashableDict[str, Any] \| None` | Workflow-level inventory filters |
-| `processors` | `tuple[HashableDict[str, Any]] \| None` | Workflow-level processors |
-| `tasks` | `OneToMany[TaskModel, ...]` | List of tasks in the workflow |
-| `dry_run` | `bool` | Whether to run in dry-run mode |
-| `vars` | `HashableDict[str, Any] \| None` | Workflow-level variables |
-| `failure_strategy` | `FailureStrategy` | Failure handling strategy |
-
-**Class Methods:**
-
-#### `create(dict_args: dict[str, Any]) -> WorkflowModel`
-Create a new WorkflowModel from a workflow dictionary.
+**Key Fields:**
+- `name`: Task name from catalog (required)
+- `args`: Task arguments (optional)
+- `set_to`: Variable storage configuration (optional hook)
+- `if`: Conditional execution hook (optional hook)
+- `shush`: Output suppression hook (optional hook)
+- Other hook configurations as needed
 
 ## Hook Classes
 
-Hooks extend task behavior without modifying task code. They are implemented as Nornir Processors that activate when configured on specific tasks.
+Hooks extend task behavior without modifying task code. They implement the Nornir Processor protocol and are automatically registered when imported.
 
-> **For detailed hook documentation, including:**
-> - Complete lifecycle methods reference
-> - Built-in hooks (if, set_to, shush)
-> - Creating custom hooks
-> - Hook validation patterns
-> - Exception handling strategies
->
-> **See:** [Hooks Guide](hooks_guide.md)
+### BaseHook
 
-### Hook Base Class
+Base class for all hooks, implementing the Nornir Processor protocol.
 
 ```python
-from nornflow.hooks import Hook
-```
-
-**Class Attributes:**
-
-| Attribute | Type | Required | Description |
-|-----------|------|----------|-------------|
-| `hook_name` | `str` | Yes | Unique identifier for this hook type |
-| `run_once_per_task` | `bool` | No | If True, runs once per task; if False, runs per host (default: False) |
-| `exception_handlers` | `dict[type[Exception], str]` | No | Maps exception types to handler method names |
-
-**Constructor:**
+from nornflow.hooks import BaseHook
+from nornir.core.task import Task
+from typing import Any
+
+class MyCustomHook(BaseHook):
+    """Custom hook implementation."""
+    
+    def __init__(self, value: Any):
+        self.value = value
+    
+    def task_started(self, task: Task) -> None:
+        """Called when task starts."""
+        pass
+    
+    def task_instance_started(self, task: Task, host: Host) -> None:
+        """Called when task starts on a specific host."""
+        pass
+    
+    def task_instance_completed(self, task: Task, host: Host, result: Result) -> None:
+        """Called when task completes on a specific host."""
+        pass
+```
+
+### Hook Registration
+
+Hooks are automatically registered when their class is defined:
 
 ```python
-def __init__(self, value: Any = None)
-```
-
-**Key Lifecycle Methods:**
-
-All lifecycle methods are optional - override only those needed:
-
-- `task_started(task: Task) -> None`
-- `task_completed(task: Task, result: AggregatedResult) -> None`
-- `task_instance_started(task: Task, host: Host) -> None`
-- `task_instance_completed(task: Task, host: Host, result: MultiResult) -> None`
-- `subtask_instance_started(task: Task, host: Host) -> None`
-- `subtask_instance_completed(task: Task, host: Host, result: MultiResult) -> None`
-
-**Control Methods:**
-
-#### `should_execute(task: Task) -> bool`
-Check if this hook should execute for given task.
-
-#### `execute_hook_validations(task_model: TaskModel) -> None`
-Execute validation logic specific to this hook.
-
-**Raises:**
-- `HookValidationError`: If validation fails
-
-
-### Built-in Hooks
+# hooks/my_hook.py
+from nornflow.hooks import BaseHook
 
-**IfHook** - Conditional task execution
-
-```yaml
-tasks:
-  - name: napalm_get
-    args:
-      getters: ["facts"]
-    if: "{{ host.platform == 'ios' }}"
-```
-
-**SetToHook** - Capture and store task results
-
-```yaml
-tasks:
-  # stores the entire Nornir Result object to a var
-  - name: napalm_get
-    args:
-      getters: ["facts"]
-    set_to: "device_facts"
-  
-  # or extract specific nested data from the Nornir Result object
-  - name: napalm_get
-    args:
-      getters: ["facts", "environment"]
-    set_to:
-      vendor: "vendor"
-      cpu_usage: "environment.cpu.0.%usage"
+class MyHook(BaseHook):
+    """Automatically registered when this file is imported."""
+    pass
 ```
 
-**ShushHook** - Suppress task output
-
-```yaml
-tasks:
-  - name: netmiko_send_command
-    args:
-      command_string: "show version"
-    shush: true
-
-  - name: netmiko_send_command
-    args:
-      command_string: "show interfaces"
-    shush: "{{ verbose_mode == false }}"
-```
+NornFlow discovers hooks by importing all Python files in configured hook directories.
 
 ## Variable System Classes
 
 ### NornFlowVariablesManager
 
-Manages the loading, accessing, and resolution of variables from multiple sources.
+Manages variable contexts and resolution for all devices during workflow execution.
 
 ```python
 from nornflow.vars.manager import NornFlowVariablesManager
 ```
 
-#### Constructor
-
-```python
-def __init__(
-    self,
-    vars_dir: str,
-    cli_vars: dict[str, Any] | None = None,
-    inline_workflow_vars: dict[str, Any] | None = None,
-    workflow_path: Path | None = None,
-    workflow_roots: list[str] | None = None,
-) -> None
-```
-
-#### Methods
-
-- `get_device_context(host_name: str) -> NornFlowDeviceContext`: Get or create device context
-- `set_runtime_variable(name: str, value: Any, host_name: str) -> None`: Set runtime variable
-- `get_nornflow_variable(var_name: str, host_name: str) -> Any`: Get variable following precedence
-- `resolve_string(template_str: str, host_name: str, additional_vars: dict[str, Any] | None = None) -> str`: Resolve Jinja2 templates
-- `resolve_data(data: Any, host_name: str, additional_vars: dict[str, Any] | None = None) -> Any`: Recursively resolve templates
-
-### NornFlowDeviceContext
-
-Maintains an isolated variable context for a specific device.
-
-```python
-from nornflow.vars.context import NornFlowDeviceContext
-```
-
-**Properties:**
-
-| Property | Type | Description |
-|----------|------|-------------|
-| `cli_vars` | `dict[str, Any]` | CLI variables |
-| `workflow_inline_vars` | `dict[str, Any]` | Inline workflow variables |
-| `domain_vars` | `dict[str, Any]` | Domain-specific variables |
-| `default_vars` | `dict[str, Any]` | Default variables |
-| `env_vars` | `dict[str, Any]` | Environment variables |
-| `runtime_vars` | `dict[str, Any]` | Runtime variables (device-specific) |
-
-**Methods:**
-
-- `get_flat_context() -> dict[str, Any]`: Get flattened variables respecting precedence
+**Key Methods:**
+- `get_vars(host: Host) -> dict`: Get variables for a specific host
+- `set_var(host: Host, key: str, value: Any) -> None`: Set a runtime variable
+- `render_template(template: str, host: Host) -> str`: Render Jinja2 template
 
 ### NornirHostProxy
 
-Read-only proxy for accessing Nornir inventory variables via the `host.` namespace.
+Provides read-only access to Nornir inventory data within Jinja2 templates.
 
 ```python
-from nornflow.vars.proxy import NornirHostProxy
+# Automatically available in templates as 'host'
+# Example: {{ host.name }}, {{ host.platform }}, {{ host.data.site }}
 ```
 
-**Purpose:**
-Acts as a proxy to provide direct access to host attributes and `host.data` dictionary values within Jinja2 templates. Managed by `NornFlowVariableProcessor` which sets the current host context before variable resolution.
-
-**Key Features:**
-- Read-only access to Nornir inventory
-- Provides `host.name`, `host.platform`, `host.data.*` access in templates
-- Automatically set by `NornFlowVariableProcessor` during task execution
-- Does not modify Nornir inventory
-
-**Properties:**
-
-| Property | Type | Description |
-|----------|------|-------------|
-| `current_host` | `Host \| None` | Current Nornir Host object being proxied |
-| `nornir` | `Nornir \| None` | Nornir instance for inventory access |
-| `current_host_name` | `str \| None` | Name of current host |
-
-**Setting `current_host_name`:**
-When set, looks up the host in Nornir inventory and sets `current_host`. If host not found or Nornir instance not set, clears current host context.
-
-**Magic Method:**
-
-#### `__getattr__(name: str) -> Any`
-Dynamically retrieve attributes or data keys from current Nornir host.
-
-Follows precedence:
-1. Direct Host object attributes (e.g., `name`, `platform`, `data`)
-2. Keys within `Host.data` dictionary (merged from host, groups, defaults)
-
-**Parameters:**
-- `name`: Attribute or data key name
-
-**Returns:**
-- Value from host's inventory
-
-**Raises:**
-- `VariableError`: If no Nornir instance or current host set, or if attribute/key not found
-
-### NornFlowVariableProcessor
+## Built-in Tasks
 
-Processor for variable substitution and management during task execution.
+NornFlow includes several built-in tasks for common operations:
 
-```python
-from nornflow.vars.processors import NornFlowVariableProcessor
-```
+### `echo`
 
-#### Constructor
+Print a message for debugging or logging.
 
-```python
-def __init__(self, vars_manager: NornFlowVariablesManager)
+```yaml
+tasks:
+  - name: echo
+    args:
+      msg: "Processing {{ host.name }}"
 ```
 
-#### Methods
+### `set`
 
-- `task_instance_started(task: Task, host: Host) -> None`: Set host context and process templates
-- `task_instance_completed(task: Task, host: Host, result: MultiResult) -> None`: Clear host context
+Set runtime variables dynamically.
 
-## Built-in Tasks
+```yaml
+tasks:
+  - name: set
+    args:
+      timestamp: "{{ now() }}"
+      counter: 0
+```
 
-### echo
+### `write_file`
 
-Print a message with variable interpolation.
+Write content to a file.
 
 ```yaml
-- name: echo
-  args:
-    msg: "Processing {{ host.name }}"
+tasks:
+  - name: write_file
+    args:
+      filename: "configs/{{ host.name }}.txt"
+      content: "{{ config_data }}"
 ```
 
-### set
+### `read_file`
 
-Set runtime variables for use in subsequent tasks.
+Read content from a file.
 
 ```yaml
-- name: set
-  args:
-    vlan_id: 100
-    backup_path: "/tmp/{{ host.name }}.cfg"
+tasks:
+  - name: read_file
+    args:
+      filename: "templates/config.j2"
+    set_to: "template_content"
 ```
 
-### write_file
+### `template_file`
 
-Write content to a file.
+Render a Jinja2 template file.
 
 ```yaml
-- name: write_file
-  args:
-    filename: "/tmp/config.txt"
-    content: "{{ config_data }}"
-    append: false
-    mkdir: true
+tasks:
+  - name: template_file
+    args:
+      template: "templates/config.j2"
+      dest: "configs/{{ host.name }}.conf"
 ```
 
 ## Built-in Filters
 
-### hosts
+NornFlow includes built-in Nornir inventory filters:
 
-Filter inventory by hostname list.
+### `filter_by_hosts`
+
+Filter inventory to specific hosts.
 
 ```yaml
 inventory_filters:
-  hosts: ["router1", "router2"]
+  filter_by_hosts: ["host1", "host2"]
 ```
 
-### groups
+### `filter_by_groups`
 
-Filter inventory by group membership.
+Filter inventory to hosts in specific groups.
 
 ```yaml
 inventory_filters:
-  groups: ["core", "distribution"]
+  filter_by_groups: ["routers", "switches"]
 ```
 
 ## Built-in Processors
 
 ### DefaultNornFlowProcessor
 
-The default processor that formats and displays task results.
+The default processor that provides standard output formatting.
 
 ```yaml
 processors:
   - class: "nornflow.builtins.DefaultNornFlowProcessor"
 ```
 
-**Features:**
-- Colored output for success/failure
-- Execution timing tracking
+Features:
+- Formatted task output
 - Progress indicators
-- Final workflow summary
+- Result summaries
+- Support for the `shush` hook
 
 ### NornFlowFailureStrategyProcessor
 
-Implements failure handling strategies during execution.
-
-```python
-from nornflow.builtins.processors import NornFlowFailureStrategyProcessor
-```
-
-**Strategies:**
-- `SKIP_FAILED`: Remove failed hosts from subsequent tasks (default)
-- `FAIL_FAST`: Stop all execution on first failure
-- `RUN_ALL`: Continue all tasks regardless of failures
+Internally used processor that implements failure handling strategies.
 
 ### NornFlowHookProcessor
 
-Orchestrator processor that delegates execution to registered hooks.
-
-```python
-from nornflow.builtins.processors import NornFlowHookProcessor
-```
-
-**Purpose:**
-Manages all hook executions by extracting hook information from task context and calling appropriate hook methods at each lifecycle point. Automatically added to processor chain when hooks are present in workflow.
-
-**Key Features:**
-- Delegates to registered hooks at appropriate lifecycle points
-- Manages two-tier context system (workflow + task-specific)
-- Injects complete context into hooks before execution
-- Handles hook exception delegation to custom handlers
-
-**Constructor:**
-
-```python
-def __init__(self, workflow_context: dict[str, Any] | None = None)
-```
-
-**Parameters:**
-- `workflow_context`: Optional workflow-level context set during initialization
-
-**Properties:**
-
-| Property | Type | Description |
-|----------|------|-------------|
-| `workflow_context` | `dict[str, Any]` | Workflow-level context (vars_manager, catalogs, etc.) |
-| `task_specific_context` | `dict[str, Any]` | Current task-specific context (task_model, hooks) |
-| `context` | `dict[str, Any]` | Combined workflow + task-specific context (read-only) |
-| `task_hooks` | `list[Hook]` | Active hooks for current task (read-only) |
-
-**Context Management:**
+Internally used processor that manages hook execution for tasks.
 
-The processor manages two types of context:
-
-1. **Workflow Context** (set once during initialization):
-   - `vars_manager`: Variable resolution system
-   - `nornir_manager`: Nornir operations manager  
-   - `tasks_catalog`: Available tasks
-   - `filters_catalog`: Available inventory filters
-   - `workflows_catalog`: Available workflows
-
-2. **Task-Specific Context** (set per task execution):
-   - `task_model`: Current TaskModel being executed
-   - hooks: List of Hook instances for this task
-
-The `context` property always returns merged dictionary of both contexts. Task-specific context is set at task start and cleared at task completion.
-
-**Lifecycle Methods:**
-
-All lifecycle methods use the `@hook_delegator` decorator which:
-- Extracts hooks from `task_specific_context`
-- Injects merged context into each hook's `_current_context`
-- Delegates to corresponding hook methods
-- Handles custom exception handlers defined by hooks
-
-#### `task_started(task: Task) -> None`
-Delegates to hooks' `task_started` methods.
-
-#### `task_completed(task: Task, result: AggregatedResult) -> None`
-Delegates to hooks' `task_completed` methods and clears task-specific context.
-
-#### `task_instance_started(task: Task, host: Host) -> None`
-Delegates to hooks' `task_instance_started` methods.
-
-#### `task_instance_completed(task: Task, host: Host, result: MultiResult) -> None`
-Delegates to hooks' `task_instance_completed` methods.
-
-#### `subtask_instance_started(task: Task, host: Host) -> None`
-Delegates to hooks' `subtask_instance_started` methods.
+### NornFlowVariableProcessor
 
-#### `subtask_instance_completed(task: Task, host: Host, result: MultiResult) -> None`
-Delegates to hooks' `subtask_instance_completed` methods.
+Internally used processor that handles variable resolution and template rendering.
 
 <div align="center">
   
diff --git a/docs/blueprints_guide.md b/docs/blueprints_guide.md
new file mode 100644
index 0000000..b1cd902
--- /dev/null
+++ b/docs/blueprints_guide.md
@@ -0,0 +1,597 @@
+# Blueprints Guide
+
+## Table of Contents
+- [Overview](#overview)
+- [What Are Blueprints?](#what-are-blueprints)
+- [When to Use Blueprints](#when-to-use-blueprints)
+- [Creating Blueprints](#creating-blueprints)
+  - [Blueprint Structure](#blueprint-structure)
+  - [Blueprint Discovery](#blueprint-discovery)
+  - [Blueprint Catalog](#blueprint-catalog)
+- [Using Blueprints in Workflows](#using-blueprints-in-workflows)
+  - [Basic Blueprint Reference](#basic-blueprint-reference)
+  - [Conditional Blueprint Inclusion](#conditional-blueprint-inclusion)
+  - [Nested Blueprints](#nested-blueprints)
+  - [Dynamic Blueprint Selection](#dynamic-blueprint-selection)
+- [Variable Resolution in Blueprints](#variable-resolution-in-blueprints)
+  - [Assembly-Time vs Runtime](#assembly-time-vs-runtime)
+  - [Variable Precedence for Blueprints](#variable-precedence-for-blueprints)
+  - [Using Variables in Blueprint References](#using-variables-in-blueprint-references)
+- [Blueprint Composition Strategies](#blueprint-composition-strategies)
+- [Blueprint Nesting: Circular vs Repeated Use](#blueprint-nesting-circular-vs-repeated-use)
+
+## Overview
+
+Blueprints solve a fundamental problem in workflow automation: **how do you reuse common task sequences without copy-pasting them everywhere?**. 
+
+Instead of repeating the same 5-10 tasks across multiple workflows, you define them once as a blueprint and reference them by the blueprint name, just like you do with a workflow. You can think of blueprints as 'macros' or 'functions' that allows you to define automation once, and reuse it wherever it makes sense to. 
+
+Blueprints are expanded during workflow loading (assembly-time), meaning they become part of the workflow structure before execution begins. 
+
+**Key characteristics:**
+- **Reusable**: Define once, use in multiple workflows
+- **Composable**: Blueprints can reference other blueprints
+- **Conditional**: Include blueprints based on conditions
+- **Parameterizable**: Use variables to customize behavior
+- **Assembly-time**: Expanded before workflow execution starts
+
+## What Are Blueprints?
+
+Blueprints are YAML files containing a `tasks` list that can be referenced by name or path within workflows. Unlike workflows, blueprints:
+
+- Contain **ONLY** a `tasks` root-level key (no workflow metadata like name, description, etc.)
+- Are **referenced** within workflows, **not executed directly**
+- Support **nested composition** (blueprints can reference other blueprints)
+- Have access to a **subset of NornFlow variables** during expansion (*more about this later*)
+- Are **expanded during workflow loading**, not during execution
+
+**Comparison:**
+
+| Aspect | Blueprint | Workflow | Task |
+|--------|-----------|----------|------|
+| **Purpose** | Reusable task collection | Complete automation definition | Single operation |
+| **Structure** | A YAML/dict with a single `tasks` key | Full YAML/dict workflow definition with metadata | Python function with signature |
+| **Usage** | Referenced in workflows | Executed directly | Referenced in workflows/blueprints |
+| **Nesting** | Can reference other blueprints | Cannot be nested | N/A |
+| **Variables** | Assembly-time subset | Full runtime access | Full runtime access |
+| **When processed** | Workflow loading | N/A | Task execution |
+
+## When to Use Blueprints
+
+Blueprints are ideal for:
+
+**Common task sequences:**
+```yaml
+# blueprints/pre_checks.yaml
+tasks:
+  - name: netmiko_send_command
+    args:
+      command_string: "show version"
+  - name: netmiko_send_command
+    args:
+      command_string: "show interfaces status"
+```
+
+**Environment-specific configurations:**
+```yaml
+# workflows/deploy.yaml
+workflow:
+  name: "Deploy Configuration"
+  tasks:
+    - blueprint: pre_checks.yaml
+    - name: apply_config
+```
+
+**Modular workflow construction:**
+```yaml
+# workflows/full_audit.yaml
+workflow:
+  name: "Complete Device Audit"
+  tasks:
+    - blueprint: hardware_checks.yaml
+    - blueprint: software_checks.yaml
+    - blueprint: security_checks.yaml
+    - blueprint: compliance_checks.yaml
+    - name: generate_report
+```
+
+## Creating Blueprints
+
+### Blueprint Structure
+
+A blueprint is a YAML file containing only a tasks list. It can be defined and follows the same rules of the tasks list in a regular workflow YAML/dict. 
+
+This means you can use all available (catalogued) tasks, filters, hooks and jinja2 filters in a blueprint definition. 
+
+```yaml
+# blueprints/network_validation.yaml
+tasks:
+  - name: netmiko_send_command
+    args:
+      command_string: "show ip interface brief"
+    set_to: interfaces
+  
+  - name: netmiko_send_command
+    args:
+      command_string: "show ip route summary"
+    set_to: routes
+  
+  - name: echo
+    args:
+      msg: "Found {{ interfaces | length }} interfaces and {{ routes | length }} routes"
+    if: "{{ 'interfaces' | is_set }}"
+```
+
+**Important:** Blueprints contain ONLY the tasks key. No `workflow`, `name`, `description`, etc.
+
+### Blueprint Discovery
+
+NornFlow automatically discovers blueprints from directories specified in your nornflow.yaml:
+
+```yaml
+# nornflow.yaml
+local_blueprints:
+  - "blueprints"
+  - "shared/blueprints"
+  - "/opt/company/common_blueprints"
+```
+
+**Discovery rules:**
+- Search is **recursive** (includes subdirectories)
+- All `.yaml` and `.yml` files are considered blueprints
+- Both **relative** and **absolute** paths supported
+- Relative paths resolve against the settings file directory
+
+**Directory structure example:**
+```
+my_project/
+├── nornflow.yaml
+└── blueprints/
+    ├── validation.yaml
+    ├── backup/
+    │   ├── full_backup.yaml
+    │   └── config_only.yaml
+    └── security/
+        ├── compliance_checks.yaml
+        └── vulnerability_scan.yaml
+```
+
+### Blueprint Catalog
+
+All discovered blueprints are cataloged by filename (without extension):
+
+```bash
+# View discovered blueprints
+nornflow show --blueprints
+```
+
+**Catalog naming:**
+- `blueprints/validation.yaml` → `validation`
+- `blueprints/backup/full_backup.yaml` → `full_backup`
+- `blueprints/security/compliance_checks.yaml` → `compliance_checks`
+
+**Name conflicts:** If multiple blueprints have the same filename, the last discovered one wins. Use unique names.
+
+> **NOTE:** *We understand this is somehow restricting, but a decision was made to keep things simle here, as it shouldn't be too hard to prevent clashes by using different file names. Future releases of NornFlow, may revist this decision and allow blueprints to be ID in the catalogue with a fully qualified name.*
+
+## Using Blueprints in Workflows
+
+### Basic Blueprint Reference
+
+Reference blueprints by name from the catalog:
+
+```yaml
+workflow:
+  name: "Device Maintenance"
+  tasks:
+    - blueprint: validation.yaml
+    - blueprint: backup.yaml
+    - name: perform_maintenance
+    - blueprint: validation.yaml
+```
+Notice the file extension is required, as blueprints are catalogued with their filenames. This means `my_blueprint.yml` and `my_blueprint.yaml` are two different blueprints, since they both use valid but different extensions.
+
+**By path (relative or absolute):**
+
+You can also reference blueprints (that are NOT in the catalog) by using file paths:
+
+```yaml
+workflow:
+  name: "Big Workflow"
+  tasks:
+    # Relative path - resolved against current working directory
+    - blueprint: ./external_blueprints/common_checks.yaml
+    
+    # Absolute path - used as-is
+    - blueprint: /opt/shared/blueprints/corporate_standard.yaml
+    
+    - name: domain_specific_task
+```
+
+> ⚠️ **Important: Understanding Relative Path Resolution**
+> 
+> When using **relative paths** for blueprints (not catalog names), the path is resolved against the **current working directory** where the nornflow command is executed — NOT the workflow file location or the blueprint file location.
+> 
+> In practice, you **SHOULD** always run nornflow commands from your project root directory (where n`ornflow.yaml` is located), so relative paths effectively resolve from there.
+> 
+> **BEST PRACTICE:** For blueprints outside your configured `local_blueprints` directories, prefer **absolute paths** to avoid confusion about path resolution.
+
+**Example with uncatalogued blueprints:**
+
+Consider this project structure:
+
+```
+my_project/
+├── nornflow.yaml              # local_blueprints: ["blueprints"]
+├── blueprints/                # Catalogued blueprints
+│   └── standard_checks.yaml
+├── external_blueprints/       # NOT in local_blueprints (not catalogued)
+│   ├── special_audit.yaml
+│   └── vendor_specific.yaml
+└── workflows/
+    └── my_workflow.yaml
+```
+
+In `my_workflow.yaml`:
+
+```yaml
+workflow:
+  name: "Mixed Blueprint Sources"
+  tasks:
+    # From catalog (discovered in blueprints/)
+    - blueprint: standard_checks.yaml
+    
+    # NOT catalogued - must use path
+    # This works IF you run 'nornflow run' from my_project/
+    - blueprint: ./external_blueprints/special_audit.yaml
+    
+    # Absolute path - always works regardless of where command is run
+    - blueprint: /home/user/my_project/external_blueprints/vendor_specific.yaml
+```
+
+**Within uncatalogued blueprints referencing other blueprints:**
+
+When a blueprint references another blueprint using a relative path, that path is ALSO resolved against the current working directory:
+
+```yaml
+# external_blueprints/special_audit.yaml
+tasks:
+  - name: some_task
+  
+  # Relative path resolves from where 'nornflow' was run, NOT from this file's location
+  - blueprint: external_blueprints/vendor_specific.yaml  # ✅ Works from project root
+  
+  # This would NOT work (resolves to ./vendor_specific.yaml from CWD)
+  - blueprint: ./vendor_specific.yaml  # ❌ Won't find the file
+  
+  # Absolute paths always work
+  - blueprint: /home/user/my_project/external_blueprints/vendor_specific.yaml  # ✅ Always works
+```
+
+### Conditional Blueprint Inclusion
+
+Use the `if` condition to include blueprints conditionally:
+
+```yaml
+workflow:
+  name: "Environment-Aware Deployment"
+  vars:
+    environment: "prod"
+    enable_monitoring: true
+  tasks:
+    - blueprint: pre_deployment_checks.yaml
+    
+    - blueprint: prod_validation.yaml
+      if: "{{ environment == 'prod' }}"
+    
+    - blueprint: dev_validation.yaml
+      if: "{{ environment == 'dev' }}"
+    
+    - name: deploy_configuration
+    
+    - blueprint: monitoring_setup.yaml
+      if: "{{ enable_monitoring }}"
+```
+
+**Important:** 
+- The `if` condition is evaluated during assembly-time (workflow loading), not runtime. Only variables available at assembly-time can be used.
+- The `if` field here is NOT an `if` hook, and is processed entirely differently. The key name is the same for consistency, but only direct boolean values or jinja templates are acceptable inputs (Nornir filters are not).
+
+### Nested Blueprints
+
+Blueprints can reference other blueprints:
+
+```yaml
+# blueprints/full_health_check.yaml
+tasks:
+  - blueprint: hardware_checks.yaml
+  - blueprint: software_checks.yaml
+  - blueprint: connectivity_checks.yaml
+```
+
+```yaml
+# blueprints/hardware_checks.yaml
+tasks:
+  - name: netmiko_send_command
+    args:
+      command_string: "show environment"
+  - name: netmiko_send_command
+    args:
+      command_string: "show inventory"
+```
+
+**Maximum nesting depth:** No enforced limit, but circular dependencies are detected and prevented.
+
+### Dynamic Blueprint Selection
+
+Use Jinja2 templates with **assembly-time variables** to dynamically select blueprints:
+
+```yaml
+workflow:
+  name: "Platform-Specific Workflow"
+  vars:
+    platform: "ios"
+    region: "us-east"
+  tasks:
+    - blueprint: "{{ platform }}_validation.yaml"
+    - name: generic_task
+    - blueprint: "{{ region }}_compliance.yaml"
+```
+
+The `platform` and `region` vars above could be passed through other forms too. More about it in the [Variable Resolution in Blueprints](#variable-resolution-in-blueprints) section.
+
+## Variable Resolution in Blueprints
+
+### Assembly-Time vs Runtime
+
+Understanding when variables are resolved is crucial for working with blueprints.
+
+**Assembly-Time (Workflow Loading):**
+- Happens when `WorkflowModel.create()` is called
+- Blueprint `if` conditions are evaluated
+- Dynamic blueprint names are resolved
+- Blueprint references are expanded into actual tasks
+
+**Runtime (Task Execution):**
+- Happens when `nornflow.run()` is called
+- Tasks execute on target devices
+- Task arguments are processed
+- Runtime variables are created/updated
+
+**Critical distinction:** Blueprint expansion happens BEFORE any actual workflow execution, so blueprints cannot access runtime variables.
+
+### Variable Precedence for Blueprints
+
+During assembly-time, blueprints have access to these variable sources (highest to lowest priority):
+
+1. **CLI Variables** (`--vars` option)
+2. **Workflow Variables** (vars in YAML/dict)
+3. **Domain Variables** (`vars/{domain}/defaults.yaml` - *by default*)
+4. **Global Variables** (defaults.yaml  - *by default*)
+5. **Environment Variables** (`NORNFLOW_VAR_*`)
+
+**NOT available at assembly-time:**
+- Runtime variables (set by `set` task or `set_to` hook)
+- Host inventory data (`host.*` namespace)
+
+**Example:**
+
+```yaml
+# vars/defaults.yaml
+backup_enabled: true
+validation_level: "basic"
+
+# workflows/maintenance/daily.yaml
+workflow:
+  name: "Daily Maintenance"
+  vars:
+    validation_level: "thorough"
+  tasks:
+    - blueprint: "validation_{{ validation_level }}.yaml"
+      if: "{{ backup_enabled }}"
+```
+
+With CLI override:
+```bash
+nornflow run daily.yaml --vars "validation_level=minimal,backup_enabled=false"
+```
+
+Result: The blueprint name is resolved to `validation_minimal.yaml` (*validation_level=minimal*), and it is NOT expanded/included in the final workflow (*backup_enabled=false*)
+
+### Using Variables in Blueprint References
+
+Variables can be used in three places when working blueprints:
+
+**1. Blueprint name/path:**
+```yaml
+tasks:
+  - blueprint: "{{ platform }}_config.yaml"
+  - blueprint: "../{{ region }}/standard_checks.yaml"
+```
+
+**2. Conditional inclusion:**
+```yaml
+tasks:
+  - blueprint: security_scan.yaml
+    if: "{{ security_enabled and environment == 'prod' }}"
+```
+
+**3. Within the blueprint itself:**
+```yaml
+# blueprints/backup.yaml
+tasks:
+  - name: write_file
+    args:
+      filename: "{{ backup_path }}/{{ host.name }}.cfg"
+```
+
+**Variable resolution timing:**
+- Blueprint name/path: Resolved during assembly-time
+- `if` condition: Resolved during assembly-time (ultimately must evaluate to `True`/`False`)
+- Task arguments within blueprint: Resolved during runtime (just like all other directly included tasks in the workflow)
+
+## Blueprint Composition Strategies
+
+**Layered composition:**
+```yaml
+# blueprints/base_validation.yaml
+tasks:
+  - name: netmiko_send_command
+    args:
+      command_string: "show version"
+
+# blueprints/extended_validation.yaml
+tasks:
+  - blueprint: base_validation.yaml
+  - name: netmiko_send_command
+    args:
+      command_string: "show interfaces"
+  - name: netmiko_send_command
+    args:
+      command_string: "show ip route"
+```
+
+**Conditional composition:**
+```yaml
+# blueprints/smart_deployment.yaml
+tasks:
+  - blueprint: pre_checks.yaml
+  
+  - blueprint: maintenance_mode.yaml
+    if: "{{ requires_maintenance }}"
+  
+  - name: apply_configuration
+  
+  - blueprint: exit_maintenance_mode.yaml
+    if: "{{ requires_maintenance }}"
+  
+  - blueprint: post_checks.yaml
+```
+Employing the `smart_deployment.yaml` blueprint example above requires a `requires_maintenance` var to exist in workflow assembly-time (*check [Variable Resolution in Blueprints](#variable-resolution-in-blueprints) again*).
+
+**Platform-specific composition:**
+```yaml
+# workflows/universal_config.yaml
+workflow:
+  name: "Universal Configuration"
+  vars:
+    platform: "ios"
+  tasks:
+    - blueprint: "{{ platform }}_pre_config.yaml"
+    - name: apply_base_config
+    - blueprint: "{{ platform }}_post_config.yaml"
+```
+
+## Blueprint Nesting: Circular vs Repeated Use
+
+Blueprints support arbitrary nesting depth with automatic circular dependency detection. Understanding the distinction between ***circular dependencies*** and ***repeated use*** is critical.
+
+### Recursive Expansion
+
+When blueprints are nested, NornFlow expands them recursively during workflow loading:
+
+```
+Workflow
+  ├── Task A
+  ├── Blueprint X (conditionally included based on {{ env }})
+  │     ├── Task B
+  │     ├── Blueprint Y (conditionally included)
+  │     │     ├── Task C
+  │     │     └── Task D
+  │     └── Task E
+  └── Task F
+
+Expands to: Task A → Task B → Task C → Task D → Task E → Task F
+(assuming all conditions evaluate to true)
+```
+
+### Circular Dependency (INVALID)
+
+A **circular dependency** occurs when a blueprint appears within its own expansion chain, creating an infinite loop:
+
+```yaml
+# blueprints/a.yaml
+tasks:
+  - blueprint: b.yaml
+
+# blueprints/b.yaml
+tasks:
+  - blueprint: c.yaml
+
+# blueprints/c.yaml
+tasks:
+  - blueprint: a.yaml  # ERROR: Circular dependency!
+```
+
+**This will fail with:**
+```
+BlueprintCircularDependencyError: Circular dependency detected: a.yaml → b.yaml → c.yaml → a.yaml
+```
+
+NornFlow tracks the current expansion path and raises an error when it detects a blueprint that's already being expanded in the current chain.
+
+### Repeated Sequential Use (VALID)
+
+**Repeated use** means using the same blueprint multiple times at the same nesting level or in different branches. This is perfectly valid because each reference is expanded independently:
+
+```yaml
+# VALID: Same blueprint used multiple times sequentially
+workflow:
+  name: "Multi-Stage Validation"
+  tasks:
+    - blueprint: health_check.yaml    # Expands completely
+    - name: configure_device
+    - blueprint: health_check.yaml    # Valid: previous expansion finished
+    - name: save_config
+    - blueprint: health_check.yaml    # Valid: can repeat as needed
+```
+
+```yaml
+# VALID: Same blueprint in different branches
+workflow:
+  name: "Environment-Aware Deployment"
+  vars:
+    environment: "prod"
+  tasks:
+    - blueprint: validation.yaml      # Pre-deployment validation
+    - name: deploy_configuration
+    - blueprint: validation.yaml      # Post-deployment validation
+```
+
+```yaml
+# VALID: Same blueprint in nested structure (different paths)
+# blueprints/comprehensive_check.yaml
+tasks:
+  - blueprint: base_check.yaml        # First expansion
+  - name: intermediate_task
+  - blueprint: base_check.yaml        # Valid: not in expansion chain
+
+# blueprints/base_check.yaml
+tasks:
+  - name: netmiko_send_command
+    args:
+      command_string: "show version"
+```
+
+**Why this works:** NornFlow uses a stack-based approach. When a blueprint finishes expanding, it's removed from the expansion stack, allowing legitimate reuse. Only blueprints currently being expanded (on the stack) trigger circular dependency errors.
+
+---
+
+<div align="center">
+  
+## Navigation
+
+<table width="100%" border="0" style="border-collapse: collapse;">
+<tr>
+<td width="33%" align="left" style="border: none;">
+<a href="./core_concepts.md">← Previous: Core Concepts</a>
+</td>
+<td width="33%" align="center" style="border: none;">
+</td>
+<td width="33%" align="right" style="border: none;">
+<a href="./failure_strategies.md">Next: Failure Strategies →</a>
+</td>
+</tr>
+</table>
+
+</div>
diff --git a/docs/core_concepts.md b/docs/core_concepts.md
index 25d8df3..f1ad9c3 100644
--- a/docs/core_concepts.md
+++ b/docs/core_concepts.md
@@ -13,11 +13,13 @@
   - [Task Catalog](#task-catalog)
   - [Workflow Catalog](#workflow-catalog)
   - [Filter Catalog](#filter-catalog)
+  - [Blueprint Catalog](#blueprint-catalog)
   - [Catalog Discovery](#catalog-discovery)
 - [Domains](#domains)
   - [What is a Domain?](#what-is-a-domain)
   - [Domain Variables](#domain-variables)
   - [Multiple Workflow Roots](#multiple-workflow-roots)
+- [Blueprints](#blueprints)
 - [Writing Workflows](#writing-workflows)
   - [Workflow Structure](#workflow-structure)
   - [Task Definition](#task-definition)
@@ -121,7 +123,7 @@ Notice how `Nornir` is the fundamental block where all paths lead to in the abov
 - Serves as the main entry point and controller for the entire system
 - Creates and manages the primary components (WorkflowModel, NornirManager, NornFlowVarsManager)
 - Coordinates the execution flow between components
-- Provides discovery and cataloging of tasks, workflows, and filters
+- Provides discovery and cataloging of tasks, workflows, filters, and blueprints
 - Handles configuration management and variable resolution logic
 
 **WorkflowModel (Pure Data Structure)**
@@ -160,7 +162,7 @@ Notice how `Nornir` is the fundamental block where all paths lead to in the abov
 
 ### Execution Flow
 
-1. **Initialization**: NornFlow loads settings and builds catalogs of tasks, workflows, and filters
+1. **Initialization**: NornFlow loads settings and builds catalogs of tasks, workflows, filters, and blueprints
 2. **Workflow Loading**: A YAML workflow is parsed into a WorkflowModel with nested TaskModel instances
 3. **Component Creation**: NornFlow creates the NornirManager and NornFlowVarsManager
 4. **Inventory Filtering**: NornFlow applies filters through NornirManager to select target devices
@@ -186,6 +188,9 @@ my_project/
 ├── nornflow.yaml           # NornFlow configuration
 ├── nornir_config.yaml      # Nornir configuration
 ├── inventory.yaml          # Device inventory
+├── blueprints/             # Reusable task collections
+│   ├── backup_tasks.yaml
+│   └── validation_tasks.yaml
 ├── workflows/              # Workflow definitions
 │   ├── backup/             # Domain: "backup"
 │   │   └── daily_backup.yaml
@@ -252,32 +257,33 @@ nornflow run --settings nornflow-dev.yaml test_workflow.yaml
 # nornflow-dev.yaml
 nornir_config_file: "configs/nornir-dev.yaml"
 dry_run: true
-local_workflows_dirs: ["workflows", "dev_workflows"]
+local_workflows: ["workflows", "dev_workflows"]
 
 # nornflow-prod.yaml
 nornir_config_file: "configs/nornir-prod.yaml"
 dry_run: false
-local_workflows_dirs: ["workflows"]
-local_tasks_dirs: ["tasks"]
-local_filters_dirs: ["filters"]
-local_hooks_dirs: ["hooks"]
+local_workflows: ["workflows"]
+local_tasks: ["tasks"]
+local_filters: ["filters"]
+local_hooks: ["hooks"]
+local_blueprints: ["blueprints"]
 ```
 
 ## Catalogs
 
-NornFlow automatically discovers and builds catalogs of available tasks, workflows, and filters based on your configuration. These catalogs are central to NornFlow's operation, allowing you to reference tasks and filters by name in your workflows.
+NornFlow automatically discovers and builds catalogs of available tasks, workflows, filters, and blueprints based on your configuration. These catalogs are central to NornFlow's operation, allowing you to reference these NornFlow assets with ease throughout workflows.
 
 ### Task Catalog
 
 The task catalog contains all available Nornir tasks that can be used in workflows. Tasks are discovered from:
 
 1. **Built-in tasks** - Always available (e.g., `echo` & `set`)
-2. **Local directories** - Specified in `local_tasks_dirs` setting
+2. **Local directories** - Specified in `local_tasks` setting
 3. **Imported packages** - *(Planned feature, not yet implemented)*
 
 ```yaml
 # nornflow.yaml
-local_tasks_dirs:
+local_tasks:
   - "tasks"
   - "/shared/network_tasks"
 ```
@@ -296,11 +302,11 @@ def my_task(task: Task, **kwargs) -> Result:
 
 ### Workflow Catalog
 
-The workflow catalog contains all discovered workflow YAML files. Workflows are discovered from directories specified in `local_workflows_dirs`:
+The workflow catalog contains all discovered workflow YAML files. Workflows are discovered from directories specified in `local_workflows`:
 
 ```yaml
 # nornflow.yaml
-local_workflows_dirs:
+local_workflows:
   - "workflows"
   - "../shared_workflows"
 ```
@@ -312,11 +318,11 @@ All files with `.yaml` or `.yml` extensions in these directories (including subd
 The filter catalog contains inventory filter functions that can be used in workflow definitions. Filters are discovered from:
 
 1. **Built-in filters** - currently `hosts` and `groups` filters
-2. **Local directories** - Specified in `local_filters_dirs` setting
+2. **Local directories** - Specified in `local_filters` setting
 
 ```yaml
 # nornflow.yaml
-local_filters_dirs:
+local_filters:
   - "filters"
   - "../custom_filters"
 ```
@@ -331,13 +337,26 @@ def site_filter(host: Host, region: str) -> bool:
     return host.data.get("region") == region
 ```
 
+### Blueprint Catalog
+
+The blueprint catalog contains all discovered blueprint YAML files. Blueprints are discovered from directories specified in `local_blueprints`:
+
+```yaml
+# nornflow.yaml
+local_blueprints:
+  - "blueprints"
+  - "../shared_blueprints"
+```
+
+All files with `.yaml` or `.yml` extensions in these directories (including subdirectories) are considered blueprints.
+
 ### Catalog Discovery
 
 NornFlow performs recursive searches in all configured directories:
 
 - **Automatic discovery** happens during NornFlow initialization
 - **Name conflicts** - NornFlow prevents custom or imported tasks/filters to override built-in ones. However later custom or imported discoveries will override earlier ones. 
-- **View catalogs** - Use `nornflow show --catalogs` to see all discovered items, or specific `--tasks`, `--filters` and `--workflows` options.
+- **View catalogs** - Use `nornflow show --catalogs` to see all discovered items, or specific `--tasks`, `--filters`, `--workflows`, and `--blueprints` options.
 
 **Discovery order:**
 1. Built-in items are loaded first
@@ -373,7 +392,7 @@ When using multiple workflow directories:
 
 ```yaml
 # nornflow.yaml
-local_workflows_dirs:
+local_workflows:
   - "core_workflows"
   - "customer_workflows"
 ```
@@ -383,6 +402,38 @@ Domain resolution:
 - `customer_workflows/backup/custom.yaml` → Domain: "backup" (same domain!)
 - Both share variables from `vars/backup/defaults.yaml`
 
+## Blueprints
+
+Blueprints are reusable collections of tasks that can be referenced within workflows. They enable code reuse, modularity, and maintainability by defining common task sequences once and using them across multiple workflows.
+
+**Key characteristics:**
+- Contain **only** a tasks list (no workflow metadata)
+- Referenced by name or path in workflows
+- Support nesting (blueprints can reference other blueprints)
+- Expanded during workflow loading (assembly-time)
+
+**Basic example:**
+
+```yaml
+# blueprints/pre_checks.yaml
+tasks:
+  - name: netmiko_send_command
+    args:
+      command_string: "show version"
+  - name: netmiko_send_command
+    args:
+      command_string: "show interfaces status"
+
+# workflows/deploy.yaml
+workflow:
+  name: "Deploy Configuration"
+  tasks:
+    - blueprint: pre_checks.yaml
+    - name: apply_config
+```
+
+For comprehensive coverage including conditional inclusion, nested blueprints, dynamic selection, variable resolution, and composition strategies, see the [Blueprints Guide](./blueprints_guide.md).
+
 ## Writing Workflows
 
 ### Workflow Structure
@@ -463,7 +514,7 @@ NornFlow provides powerful and flexible inventory filtering capabilities that de
 - **groups** - List of group names to include (matches any in list)
 
 #### 2. Custom Filter Functions
-NornFlow can use custom filter functions defined by your `local_filters_dirs` setting (configured in nornflow.yaml). These functions provide advanced filtering logic beyond simple attribute matching.
+NornFlow can use custom filter functions defined by your `local_filters` setting (configured in nornflow.yaml). These functions provide advanced filtering logic beyond simple attribute matching.
 
 #### 3. Direct Attribute Filtering
 As it is the case with Nornir, any host attribute can be used as a filter key for simple equality matching:
@@ -514,7 +565,7 @@ inventory_filters:
 ### Creating Custom Filters
 
 Custom filters MUST:
-1. Be defined in python modules within directories specified by `local_filters_dirs`
+1. Be defined in python modules within directories specified by `local_filters`
 2. Contain a `host` keyword as the first parameter
 3. Return a boolean value
 4. Include proper type annotations (for automatic discovery)
@@ -779,7 +830,7 @@ See the full Failure Strategies guide for details.
 <td width="33%" align="center" style="border: none;">
 </td>
 <td width="33%" align="right" style="border: none;">
-<a href="./failure_strategies.md">Next: Failure Strategies →</a>
+<a href="./blueprints_guide.md">Next: Blueprints Guide →</a>
 </td>
 </tr>
 </table>
diff --git a/docs/failure_strategies.md b/docs/failure_strategies.md
index 67ff9b6..824b3e1 100644
--- a/docs/failure_strategies.md
+++ b/docs/failure_strategies.md
@@ -209,7 +209,7 @@ This threading model explains why, even with `fail-fast`, some tasks might compl
 <table width="100%" border="0" style="border-collapse: collapse;">
 <tr>
 <td width="33%" align="left" style="border: none;">
-<a href="./core_concepts.md">← Previous: Core Concepts</a>
+<a href="./blueprints_guide.md">← Previous: Blueprints Guide</a>
 </td>
 <td width="33%" align="center" style="border: none;">
 </td>
diff --git a/docs/hooks_guide.md b/docs/hooks_guide.md
index 61a1dce..28810c4 100644
--- a/docs/hooks_guide.md
+++ b/docs/hooks_guide.md
@@ -1,13 +1,13 @@
-# Hooks Guide
+# NornFlow Hooks Guide
 
 ## Table of Contents
-- [Introduction](#introduction)
-- [What Are Hooks?](#what-are-hooks)
-- [Hook Architecture](#hook-architecture)
-  - [Hooks as Nornir Processors](#hooks-as-nornir-processors)
-  - [Execution Lifecycle](#execution-lifecycle)
-  - [Performance Characteristics](#performance-characteristics)
-- [Built-in Hooks](#built-in-hooks)
+
+- [Overview](#overview)
+- [Hooks as Nornir Processors](#hooks-as-nornir-processors)
+- [Execution Lifecycle](#execution-lifecycle)
+- [Performance Characteristics](#performance-characteristics)
+- [Hook-Driven Template Resolution](#hook-driven-template-resolution)
+- [Built-in Hooks](#nornflows-built-in-hooks)
   - [The `if` Hook](#the-if-hook)
   - [The `set_to` Hook](#the-set_to-hook)
   - [The `shush` Hook](#the-shush-hook)
@@ -21,29 +21,24 @@
   - [Lifecycle Methods](#lifecycle-methods)
   - [Execution Scopes](#execution-scopes)
   - [Context Access](#context-access)
+  - [Jinja2 Template Support](#jinja2-template-support)
   - [Hook Validation](#hook-validation)
   - [Custom Exception Handling](#custom-exception-handling)
 - [Advanced Concepts](#advanced-concepts)
   - [Hook Processor Integration](#hook-processor-integration)
   - [Flyweight Pattern Implementation](#flyweight-pattern-implementation)
 
-## Introduction
-
-Hooks are NornFlow's primary extension mechanism, allowing you to inject custom behavior into task execution without modifying task code. They provide a clean, declarative way to add functionality at specific points in the task lifecycle.
-
-This guide covers everything you need to know about using and creating hooks in NornFlow.
+## Overview
 
-## What Are Hooks?
+Hooks are a powerful extension mechanism provided by NornFlow, allowing you to inject custom behavior into task execution without modifying task code. They provide a clean, declarative way to add functionality at specific points in the task lifecycle.
 
-Hooks are special components that can intercept and modify task execution behavior. They act as "mini processors", implementing Nornir's Processor protocol while providing a simpler, more focused interface for common automation patterns.
+### Key Concepts
 
-**Key characteristics:**
-- **Declarative**: Configure hooks in YAML alongside tasks
-- **Reusable**: Same hook can be used across multiple tasks
-- **Isolated**: Each task gets its own hook context
-- **Powerful**: Full access to task lifecycle events
-- **Selective**: Activate only when configured on specific tasks, and according to implemented logic.
-- **Automatic**: Registration happens automatically when you define a hook class
+- **Hooks are Nornir Processors**: Hooks are implemented as Nornir processors, giving them access to the full task execution lifecycle.
+- **Lifecycle Integration**: Hooks can execute code before, during, and after task execution.
+- **Configuration-Driven**: Hooks are configured in workflow YAML/dict and applied automatically.
+- **Validation**: Hook configurations are validated during workflow preparation.
+- **Context Awareness**: Hooks have access to variables, inventory data, and execution context.
 
 **Potential use cases:**
 - **Task-level orchestration**: Implement setup or teardown logic that runs once per task across all hosts
@@ -64,7 +59,7 @@ Under the hood, hooks are Nornir processors managed by the [`NornFlowHookProcess
 
 1. **Full lifecycle access**: Hooks can react to any point in task execution
 2. **Processor chain integration**: Hooks work alongside other processors
-3. **Performance optimization**: Hook instances are cached and reused (Flyweight pattern)
+3. **Performance optimization**: Hook instances are cached and reused
 
 ### Execution Lifecycle
 
@@ -112,12 +107,41 @@ Hooks can participate in these task lifecycle events:
 
 ### Performance Characteristics
 
-- **Hook instances**: Created ONCE per unique (hook_class, value) pair via Flyweight pattern
+- **Hook instances**: Created ONCE per unique (hook_class, value) pair 
 - **Memory usage**: O(unique_hooks) - shared instances across tasks
 - **Thread safety**: Guaranteed via execution context isolation
 - **Registration**: Happens at import time via `__init_subclass__`
 - **Validation**: Happens once per task during workflow preparation
 
+### Hook-Driven Template Resolution
+
+Hook-Driven Template Resolution is a mechanism for optimizing variable template resolution when hooks need to evaluate conditions or perform logic before task args templates are processed. This is an **optional capability** that hooks can opt into via the `requires_deferred_templates` class attribute.
+
+#### How It Works
+
+The system operates in two phases when deferred processing is requested:
+
+**Phase 1 - Pre-Execution Logic:**
+1. The Hook class declares `requires_deferred_templates = True`
+2. `NornFlowVariableProcessor` detects this requirement during `task_instance_started()`
+3. **Task parameter templates** are stored without resolution (e.g., `args: {config: "{{ some_var }}"`)
+4. **Hook configuration templates**, if any, are resolved using current variable context (if the hook supports jinja2 templates as input - as is the case with the `if` and `shush` hooks, for example)
+5. Hook performs its pre-execution logic using the resolved hook configurations
+
+**Phase 2 - Just-In-Time Resolution:**
+1. After hook logic completes, the hook triggers `resolve_deferred_params()`
+2. `NornFlowVariableProcessor` resolves stored task parameter templates using the current host context
+3. Task executes with fully resolved parameters
+
+#### Mandatory vs Optional
+
+**This feature is completely optional:**
+- Hooks that don't need deferred processing work normally (immediate resolution)
+- Only hooks that declare `requires_deferred_templates = True` trigger deferred mode
+- The processor automatically selects the appropriate strategy based on hook declarations
+
+> **NOTE FOR DEVELOPERS:** Developers writing their own custom Hooks are strongly encouraged to check the code (and included docstrings) in [nornflow/vars/processors.py](../nornflow/vars/processors.py) and [nornflow/builtins/hooks/if_hook.py](../nornflow/builtins/hooks/if_hook.py) for a deeper understanding and a working example of a Hook that fully takes advantage of this feature.
+
 ## NornFlow's Built-in Hooks
 
 NornFlow includes three built-in hooks that demonstrate the framework's capabilities and serve as practical examples for creating your own custom hooks.
@@ -135,16 +159,16 @@ You are encouraged to examine the source code for the `if` Hook [here](../nornfl
 
 #### Configuration Formats
 
-The `if` Hook accepts inputs either as Jinja2 expressions or as Nornir filters in the filters catalogue.
+The `if` Hook accepts inputs either as Jinja2 templates or as Nornir filters in the filters catalogue.
 
 ##### Jinja2 Expression Details
 
 Expressions have access to:
 - `host.*` namespace (Nornir inventory)
-- All NornFlow variables (runtime, CLI, inline, domain, default, env)
-- All Jinja2 filters
+- All NornFlow variables (runtime, CLI, domain, default, env)
+- All Jinja2 filters (defaults, and NornFlow provided)
 
-Must evaluate to boolean:
+Users must ensure that the template input evaluates to boolean:
 ```yaml
 # ✅ Valid
 if: "{{ enabled }}"
@@ -155,18 +179,6 @@ if: "{{ count > 5 }}"
 if: "{{ host.name }}"  # Returns string
 if: "{{ vlans }}"      # Returns list
 ```
-**Using the `if` Hook with Jinja2 Expressions**
-```yaml
-tasks:
-  - name: ios_specific_config
-    if: "{{ host.platform == 'ios' }}"
-    
-  - name: backup_if_changed
-    if: "{{ config_changed | default(false) }}"
-    
-  - name: complex_condition
-    if: "{{ host.data.site == 'prod' and maintenance_mode == false }}"
-```
 
 ##### Filter Function Details
 
@@ -176,7 +188,7 @@ The Filter functions must:
 3. Return boolean value
 
 ```python
-# Custom filter example
+# Custom filter hypothetical example
 def platform_filter(host: Host, platform: str) -> bool:
     """Filter hosts by platform."""
     return host.platform == platform
@@ -185,19 +197,45 @@ def platform_filter(host: Host, platform: str) -> bool:
 **Using the `if` Hook with Nornir Filter Functions**
 ```yaml
 tasks:
-  - name: filter_by_platform
-    if:
-      platform_filter: {platform: "ios"}
-      
-  - name: filter_by_site
-    if:
-      site_filter: ["dc1", "dc2"]
-      
-  - name: simple_filter
+  - name: netmiko_send_command
     if:
-      is_production: true
+      platform_filter: "ios" # assuming a 'platform_filter' exists in the catalog
+    args: 
+      command: "show version"
+```
+
+#### How IfHook Uses Hook-Driven Template Resolution
+
+The IfHook leverages Hook-Driven Template Resolution to evaluate conditions before resolving task argument templates, preventing errors on hosts where variables might not exist.
+
+**Declaration:**
+```python
+class IfHook(Hook, Jinja2ResolvableMixin):
+    hook_name = "if"
+    run_once_per_task = False
+    requires_deferred_templates = True  # Enables two-phase processing
 ```
 
+**Usage Flow:**
+1. **Configuration**: User configures `if` condition in workflow YAML
+2. **Declaration Detection**: `NornFlowVariableProcessor` sees `requires_deferred_templates = True`
+3. **Template Storage**: Task parameters with `{{ variables }}` are stored without resolution
+4. **Condition Evaluation**: `IfHook` evaluates the condition using current variable context
+5. **Skip Decision**: Hosts failing the condition get `nornflow_skip_flag` set
+6. **Just-in-Time Resolution**: For passing hosts, `skip_if_condition_flagged` decorator resolves templates via `resolve_deferred_params()` method provided by the `NornFlowVariableProcessor`.
+7. **Task Execution**: Task runs with resolved parameters only on eligible hosts
+
+**Example:**
+```yaml
+tasks:
+  - name: configure_feature
+    if: "{{ host.data.has_feature }}"  # Condition uses host inventory data
+    args:
+      config: "{{ feature_template }}"  # Template uses variable that might not exist on all hosts
+```
+
+Without deferred processing, this would fail on hosts missing `feature_template`. With deferred processing, only hosts that pass the `if` condition have their templates resolved.
+
 ### The `set_to` Hook
 
 You are encouraged to refer to the source code for the `set_to` Hook [here](../nornflow/builtins/hooks/set_to.py), but here is a summary of how it works:
@@ -271,7 +309,7 @@ You are encouraged to refer to the source code for the `shush` Hook [here](../no
 
 1. **task_started**: Checks for compatible processors with `supports_shush_hook` attribute
 2. Evaluates the configured value (boolean or Jinja2 expression)
-3. If the value evaluates to `True`: Marks task in suppression set on Nornir instance
+3. If the value evaluates to `True`: Marks task in suppression set on Nornir instance. It uses a unique key (combining task name and ID) to correctly handle multiple tasks with the same name.
 4. Output processor checks suppression set and skips output display while preserving all data
 5. **task_completed**: Removes task from suppression set after completion
 
@@ -281,11 +319,9 @@ You are encouraged to refer to the source code for the `shush` Hook [here](../no
 ```yaml
 tasks:
   - name: netmiko_send_command
-    shush: true  # Always suppress
-    set_to: backup_result
-    
-  - name: verify_config
-    shush: false  # Never suppress
+    shush: true
+    args:
+      command_string: "show version"
 ```
 
 **Jinja2 Expression (dynamic suppression)**
@@ -336,18 +372,32 @@ processors:
     # ⚠️ shush won't work unless MyCustomProcessor supports it
 ```
 
-To support `shush` in a custom processor:
+To support `shush` in a custom processor, your processor's code would have to include something like this:
 
 ```python
 class MyCustomProcessor(Processor):
+    """Custom processor that supports the shush hook for output suppression."""
+    
     supports_shush_hook = True
     
-    def task_instance_completed(self, task: Task, host: Host, result: MultiResult):
-        if hasattr(task.nornir, '_nornflow_suppressed_tasks'):
-            if task.name in task.nornir._nornflow_suppressed_tasks:
-                return
+    def _is_output_suppressed(self, task: Task) -> bool:
+        if not hasattr(task.nornir, "_nornflow_suppressed_tasks"):
+            return False
+
+        for proc in task.nornir.processors:
+            if hasattr(proc, "task_specific_context"):
+                nornflow_task_model = proc.task_specific_context.get("task_model")
+                return nornflow_task_model.canonical_id in task.nornir._nornflow_suppressed_tasks
+    
+    def task_instance_completed(self, task: Task, host: Host, result: Result) -> None:
+        """Process task completion and handle output suppression."""
+        suppress_output = self._is_output_suppressed(task)
         
-        print(result)
+        if suppress_output:
+            # Skip printing or handle suppressed output (e.g., print a shushed message)
+            print(f"Task '{task.name}' on '{host.name}' output suppressed.")
+        else:
+            # Normal output logic here ...
 ```
 
 ## Hook Configuration
@@ -436,7 +486,7 @@ class MyHook(Hook):
     ...
 ```
 
-After your hook module is imported (via `local_hooks_dirs`), it's immediately available in workflows:
+After your hook module is imported (via `local_hooks`), it's immediately available in workflows:
 
 ```yaml
 tasks:
@@ -491,7 +541,7 @@ Project Structure:
 ```
 
 **Registration timing:**
-- Hooks are registered **at import time** when the module is loaded
+- Hooks are registered **at import time** when the `Hook.__init_subclass__` mechanism is triggered
 - This happens **before** any workflow execution
 - The `Hook.__init_subclass__` mechanism adds the hook to the global registry immediately
 
@@ -585,8 +635,6 @@ class SimpleValidationHook(Hook):
 
 The base `Hook` class already provides default implementations (empty `pass` statements) for all lifecycle methods, so your custom hook only needs to inherit from `Hook` and set `hook_name`.
 
-Here's a clearer rewrite of the "Execution Scopes" section:
-
 ### Execution Scopes
 
 Control whether your hook executes once per task or independently for each host using the `run_once_per_task` class attribute:
@@ -632,6 +680,235 @@ class MyHook(Hook):
         my_var = vars_manager.get_nornflow_variable("my_var", host.name)
 ```
 
+### Jinja2 Template Support
+
+NornFlow provides an optional `Jinja2ResolvableMixin` that makes it easy to add Jinja2 template support to your custom hooks. This mixin handles all the complexity of detecting Jinja2 expressions, validating them during workflow preparation, resolving them through the variable system at runtime, and converting results to the appropriate type.  
+
+#### When to Use the Mixin
+
+The mixin is **entirely optional** and should only be used when:
+
+1. ✅ **Your hook accepts user-provided values** that could benefit from dynamic resolution
+2. ✅ **You want to support both static values AND Jinja2 expressions** seamlessly
+
+**Do NOT use the mixin when:**
+
+1. ❌ **Your hook should NEVER accept Jinja2 expressions**
+2. ❌ **Your hook has complex custom Jinja2 resolution and/or validation logic** that conflicts with standard Jinja2 resolution provided by the Mixin
+
+#### How the Mixin Works
+
+When you use the mixin, it provides a single method `get_resolved_value()` that:
+
+1. Checks if `self.value` contains Jinja2 markers (`{{`, `{%`, `{#`)
+2. If yes: Resolves the template using NornFlow's variable system
+3. If no: Returns the value as-is
+4. Optionally converts the result to boolean or applies a default
+
+**This means your hook automatically accepts BOTH:**
+- Static values: `my_hook: true`, `my_hook: "static_string"`
+- Jinja2 expressions: `my_hook: "{{ some_variable }}"`, `my_hook: "{{ host.platform == 'ios' }}"`
+
+#### Basic Usage
+
+```python
+from nornir.core.task import Task
+from nornir.core.inventory import Host
+from nornflow.hooks import Hook, Jinja2ResolvableMixin
+
+class MyConditionalHook(Hook, Jinja2ResolvableMixin):
+    """Hook that conditionally executes based on static or dynamic values."""
+    
+    hook_name = "my_hook"
+    run_once_per_task = False
+    
+    def execute_hook_validations(self, task_model: "TaskModel") -> None:
+        super().execute_hook_validations(task_model)
+        # your own custom validations here if any...
+        # otherwise, you don't even need to override this method at all
+    
+    def task_instance_started(self, task: Task, host: Host) -> None:
+        condition = self.get_resolved_value(task, host=host, as_bool=True, default=False)
+        
+        if condition:
+            print(f"Executing for {host.name}")
+```
+
+**YAML usage:**
+```yaml
+tasks:
+  # Static boolean value
+  - name: task1
+    my_hook: true
+  
+  # Jinja2 expression
+  - name: task2
+    my_hook: "{{ enabled and host.platform == 'ios' }}"
+  
+  # Static string (evaluated as boolean)
+  - name: task3
+    my_hook: "yes"  # Truthy string value
+```
+
+#### Automatic Validation
+
+When you use the mixin, **validation happens automatically** during workflow preparation for Jinja2 expressions. The mixin validates that strings containing Jinja2 markers (`{{`, `{%`, `{#`) are properly formatted templates.
+
+**What gets validated by the mixin:**
+- **Jinja2 expressions are validated**: `my_hook: "{{ variable }}"` - Template syntax checked
+- **Plain strings are NOT validated**: `my_hook: "plain text"` - Passed through as-is
+- **Empty strings are NOT validated**: `my_hook: ""` - Treated as falsy value
+- **Non-string values skip validation**: `my_hook: true`, `my_hook: {"key": "value"}` - No checks. Returns as-is for your Hook's own processing logic.
+
+**Individual hooks can add stricter validation** if needed:
+
+```python
+from nornflow.hooks import Hook, Jinja2ResolvableMixin
+from nornflow.hooks.exceptions import HookValidationError
+
+class StrictHook(Hook, Jinja2ResolvableMixin):
+    """Hook that rejects empty strings as meaningless configuration."""
+    
+    hook_name = "strict_hook"
+    
+    def execute_hook_validations(self, task_model: "TaskModel") -> None:
+        super().execute_hook_validations(task_model) # ATTENTION: If you don't call super's execute_hook_validations, you loose all the Mixin's validations.
+        
+        if isinstance(self.value, str) and not self.value.strip():
+            raise HookValidationError(
+                "StrictHook",
+                [("empty_string", f"Task '{task_model.name}': strict_hook value cannot be empty")]
+            )
+```
+
+**Example: The `if` hook adds empty string validation** because an empty condition is meaningless:
+
+```python
+class IfHook(Hook, Jinja2ResolvableMixin):
+    hook_name = "if"
+    
+    def execute_hook_validations(self, task_model: "TaskModel") -> None:
+        super().execute_hook_validations(task_model)
+        
+        if isinstance(self.value, str):
+            if not self.value.strip():
+                raise HookValidationError(
+                    "IfHook",
+                    [("empty_string", f"Task '{task_model.name}': if condition cannot be empty string")]
+                )
+```
+
+**Validation responsibility split:**
+- **Mixin validates**: Jinja2 expression syntax (only when markers present)
+- **Individual hooks validate**: Hook-specific constraints (empty strings, value types, etc.)
+
+The mixin uses **cooperative super() calls**, so it works correctly with multiple inheritance. You are **strongly** encouraged to always call `super().execute_hook_validations(task_model)` first in your validation method.
+
+#### The `get_resolved_value()` Method
+
+```python
+def get_resolved_value(
+    self,
+    task: Task,
+    host: Host | None = None,
+    as_bool: bool = False,
+    default: Any = None
+) -> Any:
+    """Get the final resolved value, handling Jinja2 automatically."""
+    ...
+```
+
+**Parameters:**
+- `task`: The current Nornir task (used to extract host for template resolution)
+- `host`: The specific host for per-host resolution (MUST provide in task_instance_started)
+- `as_bool`: Convert the final result to boolean (useful for conditional hooks)
+- `default`: Fallback value if `self.value` is None or empty
+
+**Return value:**
+- If `self.value` is falsy: Returns `default`
+- If `self.value` contains Jinja2: Resolves template and returns result
+- If `self.value` is static: Returns value as-is
+- If `as_bool=True`: Converts final result to boolean
+
+#### Boolean Conversion
+
+When using `as_bool=True`, the mixin converts values to boolean using NornFlow's standard truthy values:
+
+**Truthy strings** (case-insensitive):
+- `"true"`, `"yes"`, `"1"`, `"on"`, `"y"`, `"t"`, `"enabled"`
+
+**Falsy strings:**
+- Any other string value
+
+**Other types:**
+- Booleans: Returned as-is
+- Other values: Converted using Python's `bool()`
+
+```python
+# All these evaluate to True:
+get_resolved_value(task, as_bool=True)  # if self.value = "yes"
+get_resolved_value(task, as_bool=True)  # if self.value = "{{ 'enabled' }}"
+get_resolved_value(task, as_bool=True)  # if self.value = True
+
+# All these evaluate to False:
+get_resolved_value(task, as_bool=True)  # if self.value = "no"
+get_resolved_value(task, as_bool=True)  # if self.value = "{{ 'disabled' }}"
+get_resolved_value(task, as_bool=True)  # if self.value = False
+```
+
+#### Examples from Built-in Hooks
+
+**The `shush` hook** (see [source](../nornflow/builtins/hooks/shush.py)):
+```python
+class ShushHook(Hook, Jinja2ResolvableMixin):
+    hook_name = "shush"
+    run_once_per_task = True
+    
+    def task_started(self, task: Task) -> None:
+        # Single line to get resolved boolean value
+        should_suppress = self.get_resolved_value(task, as_bool=True, default=False)
+        
+        if should_suppress:
+            # Mark task for suppression
+            ...
+```
+
+**The `if` hook** (see source) uses the mixin for Jinja2 expression support:
+```python
+class IfHook(Hook, Jinja2ResolvableMixin):
+    hook_name = "if"
+    run_once_per_task = False
+    
+    def task_instance_started(self, task: Task, host: Host) -> None:
+        if isinstance(self.value, str):
+            should_run = self.get_resolved_value(task, host=host, as_bool=True, default=True)
+        else:
+            should_run = self._evaluate_filter_condition(host)
+        
+        if not should_run:
+            host.data["nornflow_skip_flag"] = True
+```
+
+#### Advanced: Custom Type Conversion
+
+If you need custom type conversion beyond boolean, you can use the mixin's resolution and add your own logic:
+
+```python
+class NumericHook(Hook, Jinja2ResolvableMixin):
+    hook_name = "numeric"
+    
+    def task_instance_started(self, task: Task, host: Host) -> None:
+        raw_value = self.get_resolved_value(task, host=host)
+        
+        try:
+            numeric_value = int(raw_value)
+        except (ValueError, TypeError):
+            numeric_value = 0
+        
+        if numeric_value > 10:
+            ...
+```
+
 ### Hook Validation
 
 Validate configuration during task preparation:
@@ -641,7 +918,6 @@ from nornflow.hooks.exceptions import HookValidationError
 
 class MyHook(Hook):
     def execute_hook_validations(self, task_model: "TaskModel") -> None:
-        """Validate hook configuration."""
         
         if not isinstance(self.value, str):
             raise HookValidationError(
@@ -803,4 +1079,4 @@ This means:
 </tr>
 </table>
 
-</div>
\ No newline at end of file
+</div>
diff --git a/docs/jinja2_filters.md b/docs/jinja2_filters.md
index 2328511..8ad07ca 100644
--- a/docs/jinja2_filters.md
+++ b/docs/jinja2_filters.md
@@ -163,11 +163,41 @@ tasks:
 
 **`random_choice`**
 ```yaml
-# Select random element from list
-{{ ['server1', 'server2', 'server3'] | random_choice }}
-# Result: 'server2' (randomly selected)
+# Pick a random item from a list
+{{ [1, 2, 3, 4, 5] | random_choice }}
 ```
 
+**`is_set`**
+```yaml
+# Check if a variable exists and is not None
+# Useful for conditional logic, including usage in 'if' hooks
+
+# Check simple variable
+{{ 'my_var' | is_set }}
+
+# Check nested variable path
+{{ 'my_var.nested.key' | is_set }}
+
+# Check host attribute
+{{ 'host.platform' | is_set }}
+
+# Usage in 'if' hook
+tasks:
+  - name: backup_config_task
+    if: "{{ 'running_config' | is_set }}"
+```
+
+> **What counts as "set":** The `is_set` filter returns `True` if a variable exists and is not `None`. Empty values like `""`, `[]`, `{}`, and `0` are considered "set" because they are valid assigned values. Only `None` or undefined variables return `False`.
+
+> **Template validation with `if` hooks:** When using `is_set` with the `if` hook, task arguments (`args`) are validated before the `if` condition is evaluated. If your args reference variables that might not exist, the workflow will fail with a template error even if `if` would have been `False`. To handle potentially-missing variables in args, use the `default` filter:
+> ```yaml
+> tasks:
+>   - name: echo
+>     if: "{{ 'optional_var' | is_set }}"
+>     args:
+>       msg: "{{ optional_var | default('fallback value') }}"
+> ```
+
 ## NornFlow Python Wrapper Filters
 
 These provide Python-like functionality not available in standard Jinja2:
@@ -187,10 +217,11 @@ These provide Python-like functionality not available in standard Jinja2:
 | `reversed` | Return list in reverse order | `{{ [1, 2, 3] \| reversed }}` → `[3, 2, 1]` |
 | `strip` | Remove leading and trailing characters | `{{ " text " \| strip }}` → `"text"` |
 | `joinx` | Join iterable with separator | `{{ [1, 2, 3] \| joinx('-') }}` → `"1-2-3"` |
+| `startswith` | Check if string starts with prefix | `{{ "Router-NYC-001" \| startswith("Router") }}` → `true` |
 
 ## Filter Chaining
 
-Filters can be chained for complex transformations:
+Filters can be chained to perform complex transformations:
 
 ```yaml
 # Clean and format interface names
@@ -214,6 +245,9 @@ Filters can be chained for complex transformations:
 
 # Handle missing data gracefully
 {{ host.data.get('vlan_id', 1) | int }}
+
+# Check existence before use (using is_set)
+{{ 'variable_name' | is_set }}
 ```
 
 ### List Processing
diff --git a/docs/nornflow_settings.md b/docs/nornflow_settings.md
index 7c7dc04..121cb90 100644
--- a/docs/nornflow_settings.md
+++ b/docs/nornflow_settings.md
@@ -2,15 +2,19 @@
 
 ## Table of Contents
 - [Finding the Settings File](#finding-the-settings-file)
+- [Environment Variable Support](#environment-variable-support)
 - [Mandatory Settings](#mandatory-settings)
   - [`nornir_config_file`](#nornir_config_file)
 - [Optional Settings](#optional-settings)
-  - [`local_tasks_dirs`](#local_tasks_dirs)
-  - [`local_workflows_dirs`](#local_workflows_dirs)
-  - [`local_filters_dirs`](#local_filters_dirs)
-  - [`local_hooks_dirs`](#local_hooks_dirs)
+  - [`local_tasks`](#local_tasks)
+  - [`local_workflows`](#local_workflows)
+  - [`local_filters`](#local_filters)
+  - [`local_hooks`](#local_hooks)
+  - [`local_blueprints`](#local_blueprints)
   - [`vars_dir`](#vars_dir)
   - [`dry_run`](#dry_run)
+  - [`failure_strategy`](#failure_strategy)
+  - [`processors`](#processors)
   - [`imported_packages`](#imported_packages)
 - [NornFlow Settings vs Nornir Configs](#nornflow-settings-vs-nornir-configs)
 
@@ -24,100 +28,192 @@ NornFlow will try to find a settings YAML file in the following order:
 2. The path passed to the `NornFlowSettings` initializer (through the CLI, it can be done using `nornflow --settings <PATH> ...` option).
 3. The path `nornflow.yaml` in the root of the project.
 
+## Environment Variable Support
+
+All settings can be overridden using environment variables with the `NORNFLOW_SETTINGS_` prefix:
+
+```bash
+# Override nornir_config_file
+export NORNFLOW_SETTINGS_NORNIR_CONFIG_FILE="configs/nornir-prod.yaml"
+
+# Override failure strategy
+export NORNFLOW_SETTINGS_FAILURE_STRATEGY="fail-fast"
+
+# Override list values (JSON format)
+export NORNFLOW_SETTINGS_LOCAL_TASKS='["tasks", "custom_tasks"]'
+
+# Override dry run
+export NORNFLOW_SETTINGS_DRY_RUN=true
+```
+
+**Settings Loading Priority (highest to lowest):**
+1. Environment variables with `NORNFLOW_SETTINGS_` prefix
+2. Values from settings YAML file
+3. Default values defined in the NornFlowSettings class
+
+> **Design Rationale**: NornFlow follows the [12-factor app](https://12factor.net/config) methodology where environment variables take precedence over configuration files for application settings. This allows for deployment-time configuration changes without modifying files, which is especially useful in containerized environments, CI/CD pipelines, and cloud deployments.
+
+Additionally, for certain settings like `dry_run` and `failure_strategy`, there's a **runtime precedence** layer that sits above the settings loading priority:
+
+**Runtime Precedence (for dry_run, failure_strategy, processors):**
+1. CLI flags or NornFlow constructor parameters (highest - explicit runtime intent)
+2. Workflow-level definitions in YAML (workflow-specific configuration)
+3. Settings value (from the loading priority chain above)
+
+This means even if you set `NORNFLOW_SETTINGS_FAILURE_STRATEGY="fail-fast"`, passing `--failure-strategy skip-failed` via CLI will override it, as the CLI represents the most explicit user intent at runtime.
+
 ## Mandatory Settings
 
 ### `nornir_config_file`
 
-- **Description**: Path to Nornir's configuration file.
+- **Description**: Path to Nornir's configuration file. This setting is **required** and must be provided.
 - **Type**: `str`
+- **Required**: **Yes** (mandatory field)
+- **Path Resolution**: When loaded through `NornFlowSettings.load`, relative paths resolve against the settings file directory. Direct instantiation leaves the path untouched, so it resolves relative to the runtime working directory. Absolute paths are used as-is.
 - **Example**:
   ```yaml
   nornir_config_file: "nornir_configs/config.yaml"
   ```
+- **Note**: Can be set via environment variable `NORNFLOW_SETTINGS_NORNIR_CONFIG_FILE`.
 
 ## Optional Settings
 
-### `local_tasks_dirs`
+### `local_tasks`
 
-- **Description**: List of paths to directories containing the Nornir tasks to be included in NornFlow's task catalog. The search is recursive, meaning that all subdirectories will be searched as well. Be careful with this.
+- **Description**: List of paths to directories containing the Nornir tasks to be included in NornFlow's task catalog. The search is recursive, meaning that all subdirectories will be searched as well. Be careful with this. Both absolute and relative paths are supported.
 - **Type**: list[str]
 - **Default**: ["tasks"]
+- **Path Resolution**: 
+  - When loaded through `NornFlowSettings.load`, relative paths resolve against the settings file directory
+  - Direct instantiation leaves relative paths untouched, so they resolve against the runtime working directory
+  - Absolute paths are used as-is
 - **Example**:
   ```yaml
-  local_tasks_dirs:
-    - "tasks"
-    - "shared_tasks"
+  local_tasks:
+    - "tasks"                    # Relative to settings file
+    - "/abs/path/to/tasks"       # Absolute path
+    - "../shared_tasks"          # Relative to settings file
   ```
+- **Environment Variable**: `NORNFLOW_SETTINGS_LOCAL_TASKS`
 
-### `local_workflows_dirs`
+### `local_workflows`
 
-- **Description**: List of paths to directories containing the Nornir workflows to be included in NornFlow's workflow catalog. The search is recursive, meaning that all subdirectories will be searched as well. Be aware that all files with a .yaml or .yml extension will be considered workflows.
+- **Description**: List of paths to directories containing the Nornir workflows to be included in NornFlow's workflow catalog. The search is recursive, meaning that all subdirectories will be searched as well. Be aware that all files with a .yaml or .yml extension will be considered workflows. Both absolute and relative paths are supported.
 - **Type**: list[str]
 - **Default**: ["workflows"]
+- **Path Resolution**: 
+  - When loaded through `NornFlowSettings.load`, relative paths resolve against the settings file directory
+  - Direct instantiation leaves relative paths untouched, so they resolve against the runtime working directory
+  - Absolute paths are used as-is
 - **Example**:
   ```yaml
-  local_workflows_dirs:
+  local_workflows:
     - "workflows"
-    - "shared_workflows"
+    - "/shared/workflows"
   ```
+- **Environment Variable**: `NORNFLOW_SETTINGS_LOCAL_WORKFLOWS`
 
-### `local_filters_dirs`
+### `local_filters`
 
-- **Description**: List of paths to directories containing custom filter functions to be included in NornFlow's filter catalog. These filter functions can be referenced by name in workflow YAML files to perform advanced inventory filtering. The search is recursive, meaning that all subdirectories will be searched as well.
+- **Description**: List of paths to directories containing custom filter functions to be included in NornFlow's filter catalog. These filter functions can be referenced by name in workflow YAML files to perform advanced inventory filtering. The search is recursive, meaning that all subdirectories will be searched as well. Both absolute and relative paths are supported.
 - **Type**: list[str]
 - **Default**: ["filters"]
+- **Path Resolution**: 
+  - When loaded through `NornFlowSettings.load`, relative paths resolve against the settings file directory
+  - Direct instantiation leaves relative paths untouched, so they resolve against the runtime working directory
+  - Absolute paths are used as-is
 - **Example**:
   ```yaml
-  local_filters_dirs:
+  local_filters:
     - "filters"
-    - "custom_filters"
+    - "../custom_filters"
   ```
-- **Note**: For details on how these filters can be used in workflows, see the [Inventory Filtering](./how_to_write_workflows.md#inventory-filtering) section in the Workflows documentation.
+- **Environment Variable**: `NORNFLOW_SETTINGS_LOCAL_FILTERS`
+- **Note**: For details on how these filters can be used in workflows, see the Inventory Filtering section in the Workflows documentation.
 
-### `local_hooks_dirs`
+### `local_hooks`
 
-- **Description**: List of paths to directories containing custom hook implementations to be included in NornFlow's hook registry. Hooks extend task behavior without modifying task code. The search is recursive, meaning that all subdirectories will be searched as well.
+- **Description**: List of paths to directories containing custom hook implementations to be included in NornFlow's hook registry. Hooks extend task behavior without modifying task code. The search is recursive, meaning that all subdirectories will be searched as well. Both absolute and relative paths are supported.
 - **Type**: list[str]
-- **Default**: []
+- **Default**: ["hooks"]
+- **Path Resolution**: 
+  - When loaded through `NornFlowSettings.load`, relative paths resolve against the settings file directory
+  - Direct instantiation leaves relative paths untouched, so they resolve against the runtime working directory
+  - Absolute paths are used as-is
 - **Example**:
   ```yaml
-  local_hooks_dirs:
+  local_hooks:
     - "hooks"
-    - "custom_hooks"
+    - "/shared/custom_hooks"
   ```
-- **Note**: For details on creating custom hooks, see the [Hooks Guide](./hooks_guide.md) documentation.
+- **Environment Variable**: `NORNFLOW_SETTINGS_LOCAL_HOOKS`
+- **Note**: For details on creating custom hooks, see the Hooks Guide documentation.
+
+### `local_blueprints`
+
+- **Description**: List of paths to directories containing blueprint definitions. The search is recursive, meaning all subdirectories will be searched. All files with `.yaml` or `.yml` extensions are considered blueprints. Both absolute and relative paths are supported.
+- **Type**: list[str]
+- **Default**: ["blueprints"]
+- **Path Resolution**: 
+  - When loaded through `NornFlowSettings.load`, relative paths resolve against the settings file directory
+  - Direct instantiation leaves relative paths untouched, so they resolve against the runtime working directory
+  - Absolute paths are used as-is
+- **Example**:
+  ```yaml
+  local_blueprints:
+    - "blueprints"
+    - "../shared_blueprints"
+    - "/opt/company/blueprints"
+  ```
+- **Environment Variable**: `NORNFLOW_SETTINGS_LOCAL_BLUEPRINTS`
+- **Note**: Blueprints are expanded during workflow loading (assembly-time) and have access to a subset of the variable system. See the Blueprints Guide for details.
 
 ### `vars_dir`
 
-- **Description**: Path to the directory containing variable files for NornFlow's variable system. This directory will store global variables (`defaults.yaml`) and domain-specific variables.
+- **Description**: Path to the directory containing variable files for NornFlow's variable system. This directory will store global variables (`defaults.yaml`) and domain-specific variables. Both absolute and relative paths are supported.
 - **Type**: `str`
 - **Default**: "vars"
+- **Path Resolution**: 
+  - When loaded through `NornFlowSettings.load`, relative paths resolve against the settings file directory
+  - Direct instantiation leaves relative paths untouched, so they resolve against the runtime working directory
+  - Absolute paths are used as-is
 - **Example**:
   ```yaml
   vars_dir: "vars"
+  # Or with absolute path:
+  vars_dir: "/shared/variables"
   ```
-- **Note**: For details on how variables are loaded and their precedence, see the [Variables Basics](./variables_basics.md) documentation.
+- **Note**: For details on how variables are loaded and their precedence, see the Variables Basics documentation.
 
 ### `dry_run`
 
-- **Description**: If set to True, NornFlow will invoke Nornir in dry-run mode.
+- **Description**: If set to True, NornFlow will invoke Nornir in dry-run mode. This setting can be overridden at multiple levels during runtime.
 - **Type**: `bool`
 - **Default**: `False`
+- **Runtime Precedence** (highest to lowest):
+  1. CLI `--dry-run` flag or NornFlow constructor `dry_run` parameter
+  2. Workflow-level `dry_run` setting in workflow YAML
+  3. This settings value (which itself follows: env var > YAML file > default)
 - **Example**:
   ```yaml
   dry_run: True
-  ```  
+  ```
+- **Note**: The runtime precedence means that even if you set `NORNFLOW_SETTINGS_DRY_RUN=true`, passing `--dry-run false` via CLI will override it.
 
 ### `failure_strategy`
 
 - **Description**: Sets NornFlow's behavior when a task fails for a host during the execution of workflows. This setting controls whether NornFlow will skip failed hosts from subsequent tasks, stop execution as soon as possible, or continue running all tasks regardless of failures.
 - **Type**: `str` (one of: "skip-failed", "fail-fast", "run-all")
 - **Default**: "skip-failed"
+- **Runtime Precedence** (highest to lowest):
+  1. CLI `--failure-strategy` flag or NornFlow constructor `failure_strategy` parameter
+  2. Workflow-level `failure_strategy` setting in workflow YAML
+  3. This settings value (which itself follows: env var > YAML file > default)
 - **Example**:
   ```yaml
   failure_strategy: "fail-fast"
   ```
-- **Note**: For details on how failure strategies work, see the [Failure Strategies](./failure_strategies.md) documentation.
+- **Note**: For details on how failure strategies work, see the Failure Strategies documentation.
 
 ### `processors`
 - **Description**: List of Nornir processor configurations to be applied during task/workflow execution. If not provided, NornFlow will default to using only its default processor: `nornflow.builtins.DefaultNornFlowProcessor`.
@@ -135,7 +231,7 @@ NornFlow will try to find a settings YAML file in the following order:
   - `class`: Full Python import path to the processor class
   - `args` (optional): Dictionary of arguments to pass to the processor's `__init__` method
   
-  Processor precedence (highest to lowest):
+  **Runtime Precedence** (highest to lowest):
   1. Processors passed directly to NornFlow constructor
   2. Processors defined in workflow YAML
   3. Processors defined in this settings file
@@ -147,8 +243,8 @@ NornFlow will try to find a settings YAML file in the following order:
 
 - ***Description**: List of Python packages installed in your environment that contain Nornir tasks and filter functions to be included in NornFlow's catalogs.*
 - ***Type**: `list[str]`*
-- ***Default**: `[]`*
-- ***Example**:*
+- ***Default***: `[]`
+- ***Example***:
   ```yaml
   imported_packages:
     - "nornir_napalm"
diff --git a/docs/quick_start.md b/docs/quick_start.md
index bd8e702..47b5d75 100644
--- a/docs/quick_start.md
+++ b/docs/quick_start.md
@@ -7,12 +7,14 @@
 - [Running Workflows](#running-workflows)
 - [Working with Real-World Use Case](#working-with-real-world-use-case)
 - [Using Variables](#using-variables)
+- [Using Blueprints](#using-blueprints)
 - [Filtering Inventory](#filtering-inventory)
 - [Useful Commands](#useful-commands)
 
 > **Notes:** 
 > 1. This document is intentionally light on each subject. Much of what's mentioned here (and more) is expanded in the [Core Concepts](./core_concepts.md) documentation.
 > 2. Throughout the whole documentation, we won't go into the details about Nornir's configs and concepts (tasks, inventory, filters, etc). Those are pre-requisites to use NornFlow. You may want to check [Nornir's docs](https://github.com/nornir-automation/nornir).
+
 ## Installation
 
 ```bash
@@ -44,6 +46,7 @@ This creates:
 - 📁 workflows - Holds YAML workflow definitions
 - 📁 filters - Custom Nornir inventory filters
 - 📁 hooks - Custom hook implementations for extending task behavior
+- 📁 blueprints - Reusable task collections
 - 📁 vars - Will contain Global and Domain-specific default variables
 - 📁 nornir_configs - Nornir configuration
 - 📑 nornflow.yaml - NornFlow settings
@@ -54,10 +57,11 @@ This creates:
 nornflow show --catalogs
 ```
 
-You'll see three catalogs:
+You'll see four catalogs:
 - **Tasks**: Individual Nornir tasks, that represent a single automation action.
 - **Workflows**: Sequences of tasks defined in YAML files that describe operations to be executed together.
 - **Filters**: Nornir filters that allow you to select specific devices from the inventory.
+- **Blueprints**: Reusable task collections that can be referenced across workflows.
 
 ## Running Tasks
 
@@ -146,14 +150,16 @@ NornFlow's settings file is created with sensible defaults by running `nornflow
 
 ```yaml
 nornir_config_file: "nornir_configs/config.yaml"
-local_tasks_dirs:
+local_tasks:
   - "tasks"
-local_workflows_dirs:
+local_workflows:
   - "workflows"
-local_filters_dirs:
+local_filters:
   - "filters"
-local_hooks_dirs:
+local_hooks:
   - "hooks"
+local_blueprints:
+  - "blueprints"
 imported_packages: []
 dry_run: False
 failure_strategy: "skip-failed"
@@ -229,6 +235,44 @@ workflow:
 nornflow run vlan_config.yaml --vars "vlan_id=200,vlan_name='WORKSTATIONS'"
 ```
 
+## Using Blueprints
+
+Blueprints are reusable collections of tasks that you can reference across workflows.
+
+### Create a Blueprint
+
+Create `blueprints/network_checks.yaml`:
+
+```yaml
+tasks:
+  - name: netmiko_send_command
+    args:
+      command_string: "show version"
+    set_to: version_output
+  
+  - name: netmiko_send_command
+    args:
+      command_string: "show interfaces status"
+    set_to: interfaces_output
+```
+
+### Use in Workflow
+
+Reference the blueprint in your workflow:
+
+```yaml
+workflow:
+  name: "Device Health Check"
+  tasks:
+    - blueprint: network_checks.yaml
+    - name: write_file
+      args:
+        filename: "reports/{{ host.name }}_health.txt"
+        content: "{{ version_output }}\n{{ interfaces_output }}"
+```
+
+> **Note:** Blueprint references require the file extension (`.yaml` or `.yml`). See the [Blueprints Guide](blueprints_guide.md) for advanced features like conditional inclusion, nesting, and dynamic selection.
+
 ## Filtering Inventory
 
 ### Built-in Filters
@@ -281,13 +325,14 @@ nornflow run service_check --inventory-filters "filter_by_service={'service': 'b
 ## Useful Commands
 
 ```bash
-# Show available tasks, workflows, and filters (catalog)
+# Show available tasks, workflows, filters, and blueprints (catalog)
 nornflow show --catalogs
 
 # Show specific catalogs
 nornflow show --tasks
 nornflow show --filters
 nornflow show --workflows
+nornflow show --blueprints
 
 # Show current NornFlow settings
 nornflow show --settings
@@ -318,4 +363,4 @@ nornflow run my_workflow.yaml --dry-run
 </tr>
 </table>
 
-</div>
+</div>
\ No newline at end of file
diff --git a/docs/variables_basics.md b/docs/variables_basics.md
index 29160b0..27184da 100644
--- a/docs/variables_basics.md
+++ b/docs/variables_basics.md
@@ -2,7 +2,7 @@
 
 ## Table of Contents
 - [Quick Overview](#quick-overview)
-- [Variable Sources](#variable-sources-top-down-priority-order)
+- [Variable Sources (Top-Down Priority Order)](#variable-sources-top-down-priority-order)
 - [Basic Usage](#basic-usage)
   - [1. Environment Variables](#1-environment-variables)
   - [2. Global Variables](#2-global-variables)
@@ -15,6 +15,8 @@
   - [Accessing Host Data](#accessing-host-data)
   - [Important Notes](#important-notes)
 - [Variable Isolation](#variable-isolation)
+- [Assembly-Time vs Runtime](#assembly-time-vs-runtime)
+- [Advanced: Hook-Driven Template Resolution](#advanced-hook-driven-template-resolution)
 - [Best Practices](#best-practices)
 - [Quick Reference](#quick-reference)
 
@@ -27,6 +29,13 @@ NornFlow provides a powerful variable system with two namespaces:
 1. **Default namespace** - Your workflow variables (direct access: `{{ variable_name }}`)
 2. **Host namespace** - Nornir inventory data (prefixed access: `{{ host.variable_name }}`)
 
+Additionally, NornFlow resolves variables in two distinct phases:
+
+- **Assembly-Time** - During workflow loading (used by blueprints for expansion)
+- **Runtime** - During task execution (full variable access)
+
+> NOTE: More on it in [Assembly-Time vs Runtime](#assembly-time-vs-runtime)
+
 ## Variable Sources (Top-Down Priority Order)
 
 Variables come from multiple sources. When the same variable exists in multiple places, the highest priority wins:
@@ -64,12 +73,12 @@ Environment variables are the lowest priority and can be overridden by any other
 
 ### 2. Global Variables
 
-By default, NornFlow looks for global variables in the vars directory, specifically in defaults.yaml.  
+By default, NornFlow looks for global variables in the vars directory, specifically in `defaults.yaml`.  
 
-> **Note:** The vars directory location is set by the `vars_dir` setting in your nornflow.yaml file. NornFlow always looks for global variables in `<vars_dir>/defaults.yaml`. If this file is missing, global variable resolution is skipped.
+> **Note:** The vars directory location is set by the `vars_dir` setting in your `nornflow.yaml` file. NornFlow always looks for global variables in `<vars_dir>/defaults.yaml`. If this file is missing, global variable resolution is skipped.
 
 ```yaml
-# vars/defaults.yaml
+# defaults.yaml
 site_contact: "network-team@company.com"
 backup_server: "10.0.0.100"
 ```
@@ -377,6 +386,25 @@ tasks:
   ```yaml
   message: "VLAN: {{ host.data.management_vlan | default('1') }}"
   ```
+- **Checking variable existence**: Use the `is_set` filter to check if a variable exists before using it:
+  ```yaml
+  # Check if variable exists
+  - name: conditional_task
+    if: "{{ 'backup_path' | is_set }}"
+    args:
+      path: "{{ backup_path }}"
+  
+  # Check if host data exists
+  - name: site_specific_task
+    if: "{{ 'host.data.site_code' | is_set }}"
+    args:
+      site: "{{ host.data.site_code }}"
+  
+  # Combine with default for fallback values
+  - name: echo
+    args:
+      msg: "Site: {{ host.data.site_code if 'host.data.site_code' | is_set else 'UNKNOWN' }}"
+  ```
 
 ## Variable Isolation
 
@@ -388,23 +416,55 @@ Each device maintains its own variable context during workflow execution:
 
 This isolation is managed by NornFlow's `NornFlowDeviceContext` class, which creates separate variable contexts for each device. This ensures that tasks running in parallel don't interfere with each other's variables, even when they're modifying variables with the same names.
 
+## Assembly-Time vs Runtime
+
+NornFlow resolves variables in two distinct phases:
+
+### Assembly-Time (Blueprints)
+
+During workflow loading, blueprints are expanded using a **limited subset** of variables:
+
+**Available:**
+- Environment Variables
+- Global Variables
+- Domain Variables  
+- Workflow Variables
+- CLI Variables
+
+**NOT Available:**
+- Runtime Variables (don't exist yet)
+- Host inventory data (`host.*` namespace)
+
+This allows blueprints to use variables for conditional inclusion and dynamic selection, but cannot access runtime data that only exists during execution.
+
+### Runtime (Tasks)
+
+During task execution, **all variables** are available including runtime variables and full host inventory access via the `host.*` namespace.
+
+> **Note:** For comprehensive coverage of blueprint variable resolution including examples and best practices, see the [Blueprints Guide](./blueprints_guide.md).
+
+## Advanced: Hook-Driven Template Resolution
+
+For information on Hook-Driven Template Resolution, which allows deferring variable resolution in task parameters when hooks need to evaluate conditions first, see the [Hooks Guide](hooks_guide.md#hook-driven-template-resolution).
+
 ## Best Practices
 
 1. **Use descriptive names**: *`backup_retention_days`* is a lot better than just *`days`*
 2. **Set defaults**: Use `| default()` filter for optional variables
-3. **Group related variables**: Use domain variables for domain-specific settings
-4. **Document variables**: Add comments in your variable files (`<vars_dir>/defaults.yaml` and `<vars_dir>/<domain>/default.yaml`)
-5. **Avoid name conflicts**: Don't start variable names with *`host`* to avoid confusion with the `host.` namespace
-6. **Use `set_to` extraction for cleaner code**: Extract only the data you need upfront instead of storing complete results
-7. **Leverage Jinja2 filters**: Use filters to transform data, especially when working with complex structures
+3. **Check variable existence**: Use `| is_set` filter to safely check if variables exist before using them
+4. **Group related variables**: Use domain variables for domain-specific settings
+5. **Document variables**: Add comments in your variable files (`<vars_dir>/defaults.yaml` and `<vars_dir>/<domain>/default.yaml`)
+6. **Avoid name conflicts**: Don't start variable names with *`host`* to avoid confusion with the `host.` namespace
+7. **Use `set_to` extraction for cleaner code**: Extract only the data you need upfront instead of storing complete results
+8. **Leverage Jinja2 filters**: Use filters to transform data, especially when working with complex structures
 
 ## Quick Reference
 
 | Variable Type      | Location                        | Example                        | Usage                        |
 |--------------------|---------------------------------|--------------------------------|------------------------------|
 | Environment        | System env                      | `token=abc`                    | `{{ token }}`                |
-| Global             | defaults.yaml                   | `timeout: 30`                  | `{{ timeout }}`              |
-| Domain             | vars/{domain}/defaults.yaml     | `retries: 3`                   | `{{ retries }}`              |
+| Global             | `defaults.yaml`                 | `timeout: 30`                  | `{{ timeout }}`              |
+| Domain             | `vars/{domain}/defaults.yaml`   | `retries: 3`                   | `{{ retries }}`              |
 | Workflow           | In workflow YAML                | `vars: {vlan: 100}`            | `{{ vlan }}`                 |
 | CLI                | Command line                    | `--vars "x=1"`                 | `{{ x }}`                    |
 | Runtime            | Set with `set` task             | `status: "done"`               | `{{ status }}`               |
@@ -412,6 +472,21 @@ This isolation is managed by NornFlow's `NornFlowDeviceContext` class, which cre
 |                    | Set with `set_to` (extraction)  | `set_to: {vendor: "vendor"}`   | `{{ vendor }}`               |
 | Host data          | Nornir Inventory                | `data: {site_code: NYC01}`     | `{{ host.data.site_code }}`  |
 
+**Checking Variable Existence:**
+
+| Check Type         | Syntax                          | Returns                        |
+|--------------------|---------------------------------|--------------------------------|
+| Default namespace  | `{{ 'var_name' \| is_set }}`    | `true` if variable exists      |
+| Host namespace     | `{{ 'host.var_name' \| is_set }}`| `true` if host attribute exists|
+| Host data          | `{{ 'host.data.key' \| is_set }}`| `true` if host data key exists |
+
+**Variable Context Availability:**
+
+| Context        | Available Variables |
+|----------------|---------------------|
+| Assembly-Time  | Environment, Global, Domain, Workflow, CLI |
+| Runtime        | All the above, plus runtime and `host.*` namespace |
+
 
 <div align="center">
   
diff --git a/nornflow/blueprints/__init__.py b/nornflow/blueprints/__init__.py
new file mode 100644
index 0000000..91daa2a
--- /dev/null
+++ b/nornflow/blueprints/__init__.py
@@ -0,0 +1,6 @@
+"""Blueprint expansion and resolution package for NornFlow."""
+
+from nornflow.blueprints.expander import BlueprintExpander
+from nornflow.blueprints.resolver import BlueprintResolver
+
+__all__ = ["BlueprintExpander", "BlueprintResolver"]
diff --git a/nornflow/blueprints/expander.py b/nornflow/blueprints/expander.py
new file mode 100644
index 0000000..cb3f518
--- /dev/null
+++ b/nornflow/blueprints/expander.py
@@ -0,0 +1,281 @@
+import logging
+from pathlib import Path
+from typing import Any
+
+from pydantic_serdes.utils import load_file_to_dict
+
+from nornflow.blueprints.resolver import BlueprintResolver
+from nornflow.exceptions import BlueprintCircularDependencyError, BlueprintError
+from nornflow.utils import get_file_content_hash
+
+logger = logging.getLogger(__name__)
+
+
+class BlueprintExpander:
+    """Handles recursive blueprint expansion with circular dependency detection.
+
+    This class orchestrates the expansion of blueprint references into actual
+    task definitions, including nested blueprint support and validation.
+    """
+
+    def __init__(self, resolver: BlueprintResolver):
+        """Initialize the expander with a resolver.
+
+        Args:
+            resolver: BlueprintResolver for template resolution and context building.
+        """
+        self.resolver = resolver
+
+    def expand_blueprints(
+        self,
+        tasks: list[dict[str, Any]],
+        blueprints_catalog: dict[str, Path] | None,
+        vars_dir: Path | None,
+        workflow_path: Path | None,
+        workflow_roots: list[str] | None,
+        inline_vars: dict[str, Any] | None,
+        cli_vars: dict[str, Any] | None = None,
+    ) -> list[dict[str, Any]]:
+        """Expand blueprint references in tasks list.
+
+        Args:
+            tasks: List of task dictionaries (may contain blueprint references).
+            blueprints_catalog: Catalog mapping blueprint names to file paths.
+            vars_dir: Directory containing variable files.
+            workflow_path: Path to the workflow file.
+            workflow_roots: List of workflow root directories.
+            inline_vars: Variables defined in the workflow YAML.
+            cli_vars: CLI variables with highest precedence.
+
+        Returns:
+            Expanded list of task dictionaries with blueprints resolved.
+
+        Raises:
+            BlueprintError: If blueprint expansion fails.
+        """
+        if not vars_dir or not workflow_roots:
+            return tasks
+
+        if not blueprints_catalog:
+            blueprints_catalog = {}
+
+        context = self.resolver.build_context(
+            vars_dir=vars_dir,
+            workflow_path=workflow_path,
+            workflow_roots=workflow_roots,
+            inline_workflow_vars=inline_vars,
+            cli_vars=cli_vars,
+        )
+
+        expansion_stack: list[str] = []
+        name_stack: list[str] = []
+        content_cache: dict[str, list[dict[str, Any]]] = {}
+
+        expanded = []
+        for task_dict in tasks:
+            processed_tasks = self._process_task_item(
+                task_dict, blueprints_catalog, context, expansion_stack, name_stack, content_cache
+            )
+            expanded.extend(processed_tasks)
+
+        return expanded
+
+    def _process_task_item(
+        self,
+        task_dict: dict[str, Any],
+        blueprints_catalog: dict[str, Path],
+        context: dict[str, Any],
+        expansion_stack: list[str],
+        name_stack: list[str],
+        content_cache: dict[str, list[dict[str, Any]]],
+    ) -> list[dict[str, Any]]:
+        """Process a single task item, expanding blueprints or returning regular tasks.
+
+        Args:
+            task_dict: Task or blueprint reference dictionary.
+            blueprints_catalog: Catalog mapping blueprint names to file paths.
+            context: Variable context for template resolution.
+            expansion_stack: Stack of content hashes for circular detection.
+            name_stack: Stack of blueprint names for error reporting.
+            content_cache: Cache mapping content hash to parsed tasks.
+
+        Returns:
+            List of task dictionaries.
+        """
+        if "blueprint" not in task_dict:
+            return [task_dict]
+
+        if not self._should_include_blueprint(task_dict, context):
+            return []
+
+        return self._expand_single_blueprint(
+            task_dict, blueprints_catalog, context, expansion_stack, name_stack, content_cache
+        )
+
+    def _should_include_blueprint(self, blueprint_ref: dict[str, Any], context: dict[str, Any]) -> bool:
+        """Check if blueprint should be included based on 'if' condition.
+
+        Args:
+            blueprint_ref: Blueprint reference dictionary.
+            context: Variable context for condition evaluation.
+
+        Returns:
+            True if blueprint should be included, False otherwise.
+        """
+        if "if" not in blueprint_ref:
+            return True
+
+        return self.resolver.evaluate_condition(blueprint_ref["if"], context)
+
+    def _expand_single_blueprint(
+        self,
+        blueprint_ref: dict[str, Any],
+        blueprints_catalog: dict[str, Path],
+        context: dict[str, Any],
+        expansion_stack: list[str],
+        name_stack: list[str],
+        content_cache: dict[str, list[dict[str, Any]]],
+    ) -> list[dict[str, Any]]:
+        """Expand a single blueprint reference.
+
+        Args:
+            blueprint_ref: Blueprint reference dictionary.
+            blueprints_catalog: Catalog mapping blueprint names to file paths.
+            context: Variable context for template resolution.
+            expansion_stack: Stack of content hashes for circular detection.
+            name_stack: Stack of blueprint names for error reporting.
+            content_cache: Cache mapping content hash to parsed tasks.
+
+        Returns:
+            List of expanded task dictionaries.
+
+        Raises:
+            BlueprintError: If blueprint expansion fails.
+            BlueprintCircularDependencyError: If circular dependency detected.
+        """
+        blueprint_name = blueprint_ref.get("blueprint")
+        if not blueprint_name:
+            raise BlueprintError("Blueprint reference missing 'blueprint' field")
+
+        resolved_name = self.resolver.resolve_template(blueprint_name, context)
+        blueprint_path = self._resolve_blueprint_to_path(resolved_name, blueprints_catalog)
+        content_hash = get_file_content_hash(blueprint_path)
+
+        if content_hash in expansion_stack:
+            raise BlueprintCircularDependencyError(blueprint_path.name, name_stack)
+
+        expansion_stack.append(content_hash)
+        name_stack.append(blueprint_path.name)
+        try:
+            if content_hash not in content_cache:
+                content_cache[content_hash] = self._load_blueprint_tasks(blueprint_path)
+
+            blueprint_tasks = content_cache[content_hash]
+
+            expanded = []
+            for task_dict in blueprint_tasks:
+                processed = self._process_task_item(
+                    task_dict, blueprints_catalog, context, expansion_stack, name_stack, content_cache
+                )
+                expanded.extend(processed)
+
+            return expanded
+        finally:
+            expansion_stack.pop()
+            name_stack.pop()
+
+    @staticmethod
+    def _resolve_blueprint_to_path(blueprint_ref: str, blueprints_catalog: dict[str, Path]) -> Path:
+        """Resolve blueprint reference to file path.
+
+        Resolution order:
+        1. Catalog lookup (by name)
+        2. Direct file path (relative or absolute, must include suffix)
+
+        Args:
+            blueprint_ref: Blueprint name or file path.
+            blueprints_catalog: Catalog mapping blueprint names to file paths.
+
+        Returns:
+            Resolved file path.
+
+        Raises:
+            BlueprintError: If blueprint cannot be found.
+        """
+        if blueprint_ref in blueprints_catalog:
+            return blueprints_catalog[blueprint_ref]
+
+        path = Path(blueprint_ref)
+
+        if path.is_absolute() and path.exists():
+            return path
+
+        # Relative to current working directory
+        resolved = Path.cwd() / path
+        if resolved.exists():
+            return resolved
+
+        raise BlueprintError(
+            (
+                "Blueprint not found in catalog or filesystem. "
+                f"Note: relative blueprint paths are resolved against the current "
+                f"working directory ({Path.cwd()})."
+            ),
+            blueprint_name=blueprint_ref,
+            details={
+                "searched_locations": [
+                    f"Catalog: {list(blueprints_catalog.keys())[:5]}...",
+                    str(Path.cwd() / path),
+                ],
+                "current_working_directory": str(Path.cwd()),
+                "note": (
+                    "The provided blueprint reference was interpreted as a path "
+                    "relative to the current working directory. Ensure you run "
+                    "nornflow from the expected directory or provide an absolute path."
+                ),
+            },
+        )
+
+    @staticmethod
+    def _load_blueprint_tasks(blueprint_path: Path) -> list[dict[str, Any]]:
+        """Load and validate blueprint structure from file.
+
+        Args:
+            blueprint_path: Path to the blueprint file.
+
+        Returns:
+            List of task dictionaries from the blueprint.
+
+        Raises:
+            BlueprintError: If blueprint structure is invalid.
+        """
+        try:
+            blueprint_data = load_file_to_dict(blueprint_path)
+        except Exception as e:
+            raise BlueprintError(
+                f"Failed to load blueprint file: {e}",
+                blueprint_name=str(blueprint_path.name),
+                details={"path": str(blueprint_path)},
+            ) from e
+
+        actual_keys = set(blueprint_data.keys())
+
+        if actual_keys != {"tasks"}:
+            raise BlueprintError(
+                f"Blueprint must contain ONLY 'tasks' key, found: {', '.join(sorted(actual_keys))}",
+                blueprint_name=str(blueprint_path.name),
+                details={
+                    "path": str(blueprint_path),
+                    "expected": ["tasks"],
+                    "found": sorted(actual_keys),
+                },
+            )
+
+        if not isinstance(blueprint_data["tasks"], list):
+            raise BlueprintError(
+                f"'tasks' must be a list, got {type(blueprint_data['tasks']).__name__}",
+                blueprint_name=str(blueprint_path.name),
+                details={"path": str(blueprint_path)},
+            )
+
+        return blueprint_data["tasks"]
diff --git a/nornflow/blueprints/resolver.py b/nornflow/blueprints/resolver.py
new file mode 100644
index 0000000..a67c51a
--- /dev/null
+++ b/nornflow/blueprints/resolver.py
@@ -0,0 +1,199 @@
+import logging
+import os
+from pathlib import Path
+from typing import Any
+
+from pydantic_serdes.utils import load_file_to_dict
+
+from nornflow.exceptions import BlueprintError
+from nornflow.vars.constants import DEFAULTS_FILENAME, JINJA2_MARKERS, TRUTHY_STRING_VALUES
+from nornflow.vars.jinja2_utils import Jinja2EnvironmentManager
+
+logger = logging.getLogger(__name__)
+
+
+class BlueprintResolver:
+    """Handles variable context building and template resolution for blueprints.
+
+    This class manages the assembly-time variable context (subset of variables
+    available during workflow loading) and provides Jinja2 template resolution
+    for blueprint references and conditions.
+    """
+
+    def __init__(self, jinja2_manager: Jinja2EnvironmentManager):
+        """Initialize the resolver with a Jinja2 manager.
+
+        Args:
+            jinja2_manager: Manager for Jinja2 template rendering.
+        """
+        self.jinja2_manager = jinja2_manager
+
+    def build_context(
+        self,
+        vars_dir: Path,
+        workflow_path: Path | None,
+        workflow_roots: list[str],
+        inline_workflow_vars: dict[str, Any] | None = None,
+        cli_vars: dict[str, Any] | None = None,
+    ) -> dict[str, Any]:
+        """Build variable context for blueprint resolution.
+
+        Assembly-time variable precedence (lowest to highest):
+        1. Environment Variables (NORNFLOW_VAR_*)
+        2. Default Variables (vars_dir/defaults.yaml)
+        3. Domain-specific Default Variables (vars_dir/{domain}/defaults.yaml)
+        4. Workflow Variables (workflow.vars section)
+        5. CLI Variables (--vars option)
+
+        Args:
+            vars_dir: Base directory containing variable files.
+            workflow_path: Path to the workflow file (None for in-memory).
+            workflow_roots: List of workflow root directories.
+            inline_workflow_vars: Variables from workflow.vars section.
+            cli_vars: CLI variables with highest precedence.
+
+        Returns:
+            Dictionary containing merged variables with proper precedence.
+        """
+        context = {}
+
+        context.update(self._load_env_vars())
+
+        vars_dir_path = Path(vars_dir)
+        defaults_path = vars_dir_path / DEFAULTS_FILENAME
+        if defaults_path.exists():
+            try:
+                context.update(load_file_to_dict(defaults_path))
+            except Exception as e:
+                logger.warning(f"Failed to load defaults file {defaults_path}: {e}")
+
+        if workflow_path:
+            domain_defaults = self._load_domain_defaults(vars_dir_path, workflow_path, workflow_roots)
+            context.update(domain_defaults)
+
+        if inline_workflow_vars:
+            context.update(inline_workflow_vars)
+
+        if cli_vars:
+            context.update(cli_vars)
+
+        return context
+
+    def resolve_template(self, template_str: str, context: dict[str, Any]) -> str:
+        """Resolve a Jinja2 template in blueprint reference.
+
+        Args:
+            template_str: Template string to resolve.
+            context: Variable context for rendering.
+
+        Returns:
+            Resolved template string.
+
+        Raises:
+            BlueprintError: If template has undefined variables or syntax errors.
+        """
+        try:
+            return self.jinja2_manager.render_template(template_str, context, "blueprint reference")
+        except Exception as e:
+            raise BlueprintError(
+                f"Failed to resolve blueprint template: {e}", details={"template": template_str}
+            ) from e
+
+    def evaluate_condition(self, condition: str | bool, context: dict[str, Any]) -> bool:
+        """Evaluate blueprint 'if' condition.
+
+        Handles YAML-parsed booleans, string literals, and Jinja2 expressions.
+
+        Args:
+            condition: Conditional expression to evaluate.
+            context: Variable context for evaluation.
+
+        Returns:
+            Boolean result of condition evaluation.
+
+        Raises:
+            BlueprintError: If condition has undefined variables or syntax errors.
+        """
+        try:
+            if isinstance(condition, bool):
+                return condition
+
+            condition_stripped = condition.strip()
+
+            if not any(marker in condition_stripped for marker in JINJA2_MARKERS):
+                return condition_stripped.lower() in TRUTHY_STRING_VALUES
+
+            template_str = condition_stripped
+
+            result = self.jinja2_manager.render_template(template_str, context, "blueprint condition")
+            return result.lower() in TRUTHY_STRING_VALUES
+        except Exception as e:
+            raise BlueprintError(
+                f"Failed to evaluate blueprint condition: {e}", details={"condition": condition}
+            ) from e
+
+    @staticmethod
+    def _load_env_vars() -> dict[str, Any]:
+        """Load environment variables prefixed with NORNFLOW_VAR_.
+
+        Returns:
+            Dictionary of environment variables with prefix stripped.
+        """
+        env_vars = {}
+        prefix = "NORNFLOW_VAR_"
+        for key, value in os.environ.items():
+            if key.startswith(prefix):
+                var_name = key[len(prefix) :]
+                env_vars[var_name] = value
+        return env_vars
+
+    @staticmethod
+    def _find_domain_for_workflow(workflow_path: Path, workflow_roots: list[str]) -> str | None:
+        """Find the domain directory containing the workflow.
+
+        Args:
+            workflow_path: Path to the workflow file.
+            workflow_roots: List of workflow root directories.
+
+        Returns:
+            Domain name if found, None otherwise.
+        """
+        for root in workflow_roots:
+            root_path = Path(root)
+            if not workflow_path.is_relative_to(root_path):
+                continue
+
+            relative_path = workflow_path.relative_to(root_path)
+            if len(relative_path.parts) > 1:
+                return relative_path.parts[0]
+            break
+
+        return None
+
+    @staticmethod
+    def _load_domain_defaults(
+        vars_dir: Path, workflow_path: Path, workflow_roots: list[str]
+    ) -> dict[str, Any]:
+        """Load domain-specific default variables.
+
+        Args:
+            vars_dir: Base directory containing variable files.
+            workflow_path: Path to the workflow file.
+            workflow_roots: List of workflow root directories.
+
+        Returns:
+            Dictionary of domain-specific default variables.
+        """
+        domain = BlueprintResolver._find_domain_for_workflow(workflow_path, workflow_roots)
+        if not domain:
+            return {}
+
+        domain_defaults_path = vars_dir / domain / DEFAULTS_FILENAME
+        if not domain_defaults_path.exists():
+            return {}
+
+        try:
+            return load_file_to_dict(domain_defaults_path)
+        except Exception as e:
+            logger.warning(f"Failed to load domain defaults from {domain_defaults_path}: {e}")
+            return {}
diff --git a/nornflow/builder.py b/nornflow/builder.py
index c5d4b92..b81954d 100644
--- a/nornflow/builder.py
+++ b/nornflow/builder.py
@@ -37,6 +37,13 @@ class NornFlowBuilder:
         builder = NornFlowBuilder()
         nornflow = builder.with_settings_path('settings.yaml')
                           .with_workflow_path('deploy.yaml')
+                          .build()
+
+        # Advanced configuration
+        builder = NornFlowBuilder()
+        nornflow = builder.with_settings_object(custom_settings)
+                          .with_workflow_name('backup')
+                          .with_processors([{'class': 'CustomProcessor'}])
                           .with_vars({'env': 'prod', 'debug': True})
                           .with_filters({'hosts': ['router1', 'router2']})
                           .build()
@@ -91,7 +98,7 @@ class NornFlowBuilder:
         """
         if not self._settings:
             try:
-                settings_object = NornFlowSettings(settings_file=settings_path)
+                settings_object = NornFlowSettings.load(settings_file=str(settings_path))
                 self._settings = settings_object
             except (SettingsError, ResourceError) as e:
                 raise InitializationError(f"Failed to load settings from '{settings_path}': {e}") from e
@@ -263,6 +270,9 @@ class NornFlowBuilder:
         Returns:
             The constructed NornFlow object with all configurations applied.
         """
+        if not self._settings:
+            self._settings = NornFlowSettings.load()
+
         return NornFlow(
             nornflow_settings=self._settings,
             workflow=self._workflow,
diff --git a/nornflow/builtins/hooks/if_hook.py b/nornflow/builtins/hooks/if_hook.py
index 1885e63..f8d43b3 100644
--- a/nornflow/builtins/hooks/if_hook.py
+++ b/nornflow/builtins/hooks/if_hook.py
@@ -1,3 +1,28 @@
+"""
+NornFlow Conditional Execution Hook
+
+This module implements the IfHook, which provides conditional task execution based on
+filter functions or Jinja2 expressions.
+
+Deferred Template Processing
+===========================
+
+The IfHook uses NornFlow's hook-driven template resolution system by declaring:
+    requires_deferred_templates = True
+
+This enables two-phase processing:
+1. **Phase 1**: Evaluate conditions using variable context (templates not yet resolved)
+2. **Phase 2**: Resolve templates just-in-time only for hosts that pass conditions
+
+This prevents template resolution errors for hosts that would be skipped anyway,
+enabling robust conditional workflows with variables that may not exist on all hosts.
+
+Example:
+    Task with "{{ some_var }}" parameter and if: hosts=['host1']
+    - host1: has 'some_var', passes condition → template resolved → task executes
+    - host2: missing 'some_var', fails condition → skipped, no template resolution
+"""
+
 import logging
 from collections.abc import Callable
 from functools import wraps
@@ -6,10 +31,8 @@ from typing import Any, TYPE_CHECKING
 from nornir.core.inventory import Host
 from nornir.core.task import Result, Task
 
-from nornflow.hooks import Hook
+from nornflow.hooks import Hook, Jinja2ResolvableMixin
 from nornflow.hooks.exceptions import HookValidationError
-from nornflow.vars.constants import JINJA2_MARKERS
-from nornflow.vars.exceptions import TemplateError
 
 if TYPE_CHECKING:
     from nornflow.models import TaskModel
@@ -18,24 +41,13 @@ logger = logging.getLogger(__name__)
 
 
 def skip_if_condition_flagged(task_func: Callable) -> Callable:
-    """
-    Decorator that checks for the 'nornflow_skip_flag' in host.data before executing the task.
-
-    If the flag is set to True, returns a skipped Result without executing the original task.
-    Otherwise, executes the original task normally.
+    """Decorator that implements deferred template resolution for conditional execution.
 
-    This decorator is applied dynamically by IfHook when conditions are configured,
-    allowing conditional task execution per host without global overhead.
-
-    Args:
-        task_func: The original Nornir task function to wrap.
-
-    Returns:
-        Wrapped function that checks the skip flag before execution.
+    Checks for skip flag and resolves templates just-in-time for non-skipped hosts.
     """
 
     @wraps(task_func)
-    def wrapper(task: Task) -> Result:
+    def wrapper(task: Task, **kwargs: Any) -> Result:
         if task.host.data.get("nornflow_skip_flag", False):
             # Clean up the flag after use to avoid stale state
             task.host.data.pop("nornflow_skip_flag", None)
@@ -46,12 +58,23 @@ def skip_if_condition_flagged(task_func: Callable) -> Callable:
                 failed=False,
                 skipped=True,
             )
-        return task_func(task)
+
+        resolved_kwargs = kwargs
+        for processor in task.nornir.processors:
+            if hasattr(processor, "resolve_deferred_params"):
+                resolved = processor.resolve_deferred_params(task, task.host)
+                # Use resolved params if deferred mode was active
+                # otherwise fall back to original kwargs
+                if resolved is not None:
+                    resolved_kwargs = resolved
+                break
+
+        return task_func(task, **resolved_kwargs)
 
     return wrapper
 
 
-class IfHook(Hook):
+class IfHook(Hook, Jinja2ResolvableMixin):
     """Conditionally execute tasks per host based on filter functions or Jinja2 expressions.
 
     This hook evaluates a condition for each host before task execution.
@@ -60,7 +83,14 @@ class IfHook(Hook):
     1. Filter Functions: Dict-based configuration using registered filter functions
     2. Jinja2 Expressions: String-based boolean expressions evaluated per host
 
-    Filter Function Format (existing):
+    Boolean Semantics (Python-style):
+        - True (or truthy): Task EXECUTES for the host
+        - False (or falsy): Task SKIPS for the host
+
+    This follows Python's standard truthiness rules where True means "proceed"
+    and False means "don't proceed".
+
+    Filter Function Format:
         if:
           filter_name: {param1: value1, param2: value2}
         # OR
@@ -70,71 +100,44 @@ class IfHook(Hook):
         if:
           filter_name: single_value  # single arg
 
-    Jinja2 Expression Format (new):
+    Jinja2 Expression Format:
         if: "{{ host.platform == 'ios' and host.data.site == 'dc1' }}"
         # OR
         if: "{{ some_variable | default(false) }}"
-
-    Filter Functions:
-        Filter functions must be registered in the filters catalog and should
-        accept a Host object as the first parameter, followed by any custom
-        parameters. They must return a boolean:
-        - True: Host passes the condition, task will execute
-        - False: Host fails the condition, task will be skipped
-
-    Jinja2 Expressions:
-        Expressions are resolved using NornFlow's variable system and must
-        evaluate to a boolean value. Have access to:
-        - host.* namespace (Nornir inventory data)
-        - All NornFlow variables (runtime, CLI, inline, domain, default, env)
-        - Jinja2 filters and functions
-
-    Filter Catalog Integration:
-        The hook retrieves filter functions from the filters_catalog in the
-        execution context. This catalog contains both built-in NornFlow filters
-        and user-defined custom filters discovered from local_filters_dirs.
-
-    Error Handling:
-        - HookValidationError: Raised during validation if condition config is invalid
-        - TemplateError: Raised if Jinja2 expression doesn't evaluate to boolean
+        # OR
+        if: false
 
     Attributes:
         hook_name: "if"
         run_once_per_task: False (evaluates independently for each host)
+        requires_deferred_templates: True (enables two-phase template processing)
     """
 
     hook_name = "if"
     run_once_per_task = False
+    requires_deferred_templates = True
 
     def execute_hook_validations(self, task_model: "TaskModel") -> None:
         """Validate condition configuration during task preparation."""
+        super().execute_hook_validations(task_model)
+
         if isinstance(self.value, dict):
-            # Filter function validation
             if len(self.value) != 1:
                 raise HookValidationError("IfHook", [("value_count", "if must specify exactly one filter")])
         elif isinstance(self.value, str):
-            # Jinja2 expression validation - basic check for non-empty string
             if not self.value.strip():
-                raise HookValidationError("IfHook", [("empty_expression", "if expression cannot be empty")])
-            # Ensure expression contains Jinja2 markers to prevent raw Python evaluation
-            if not any(marker in self.value for marker in JINJA2_MARKERS):
                 raise HookValidationError(
-                    "IfHook",
-                    [
-                        (
-                            "invalid_expression",
-                            "if expression must be a valid Jinja2 template (contain {{, {%, {# etc.)",
-                        )
-                    ],
+                    "IfHook", [("empty_string", f"Task '{task_model.name}': if value cannot be empty string")]
                 )
-        else:
+        elif self.value is not None:
             raise HookValidationError(
-                "IfHook", [("value_type", "if value must be a dict (filter) or string (expression)")]
+                "IfHook",
+                [("value_type", "if value must be a dict (Nornir filter) or string (Jinja2 expression)")],
             )
 
     def task_started(self, task: Task) -> None:
-        """Dynamically decorate the task function to enable per-host skipping."""
-        if not self.value:
+        """Apply skip decorator to enable per-host conditional execution."""
+        if self.value is None:
             return
 
         # Apply the skip decorator dynamically to the task function
@@ -145,7 +148,7 @@ class IfHook(Hook):
         logger.debug(f"Applied skip decorator to task '{task.name}' for condition evaluation")
 
     def task_instance_started(self, task: Task, host: Host) -> None:
-        """Evaluate the filter condition and set skip flag if needed."""
+        """Evaluate condition and set skip flag for hosts that fail."""
         if self.value is None:
             return
 
@@ -153,11 +156,10 @@ class IfHook(Hook):
             should_skip = False
 
             if isinstance(self.value, dict):
-                # Filter function evaluation
                 should_skip = not self._evaluate_filter_condition(host)
             else:
-                # Jinja2 expression evaluation
-                should_skip = not self._evaluate_jinja2_condition(host)
+                condition = self.get_resolved_value(task, host=host, as_bool=True, default=True)
+                should_skip = not condition
 
             if should_skip:
                 host.data["nornflow_skip_flag"] = True
@@ -191,27 +193,6 @@ class IfHook(Hook):
 
         return filter_func(host, **filter_kwargs)
 
-    def _evaluate_jinja2_condition(self, host: Host) -> bool:
-        """Evaluate Jinja2 expression condition for the host."""
-        vars_manager = self.context.get("vars_manager")
-        if not vars_manager:
-            raise HookValidationError(
-                "IfHook", [("no_vars_manager", "vars_manager not available for Jinja2 expression evaluation")]
-            )
-
-        # Resolve the Jinja2 expression
-        resolved_expression = vars_manager.resolve_string(self.value, host.name)
-
-        # Evaluate as boolean
-        try:
-            result = bool(eval(resolved_expression))  # noqa: S307 - controlled environment
-        except Exception as e:
-            raise TemplateError(
-                f"Jinja2 expression did not evaluate to a boolean: '{resolved_expression}'"
-            ) from e
-
-        return result
-
     def _build_filter_kwargs(self, param_names: list[str], filter_values: Any) -> dict[str, Any]:
         """Build keyword arguments for the filter function based on value format."""
         if isinstance(filter_values, dict):
@@ -219,7 +200,6 @@ class IfHook(Hook):
 
         if isinstance(filter_values, list):
             if len(param_names) == 1:
-                # Special case: single parameter that expects a list
                 return {param_names[0]: filter_values}
             if len(filter_values) != len(param_names):
                 raise HookValidationError(
diff --git a/nornflow/builtins/hooks/shush.py b/nornflow/builtins/hooks/shush.py
index b299017..ad5357a 100644
--- a/nornflow/builtins/hooks/shush.py
+++ b/nornflow/builtins/hooks/shush.py
@@ -1,11 +1,10 @@
 # ruff: noqa: SLF001, T201
-import re
+from nornir.core.task import AggregatedResult, Task
 
-from nornflow.hooks import Hook
-from nornflow.hooks.exceptions import HookValidationError
+from nornflow.hooks import Hook, Jinja2ResolvableMixin
 
 
-class ShushHook(Hook):
+class ShushHook(Hook, Jinja2ResolvableMixin):
     """Hook to suppress task output printing.
 
     The shush hook allows conditional suppression of task output based on
@@ -23,61 +22,9 @@ class ShushHook(Hook):
     hook_name = "shush"
     run_once_per_task = True
 
-    def __init__(self, value: bool | str | None = None):
-        """Initialize the shush hook.
-
-        Args:
-            value: Boolean, Jinja2 expression string, or None
-        """
-        super().__init__(value)
-        self.is_jinja2_expression = self._detect_jinja2_expression(value)
-
-    def _detect_jinja2_expression(self, value: bool | str | None) -> bool:
-        """Detect if value is a Jinja2 expression.
-
-        Args:
-            value: The value to check
-
-        Returns:
-            True if value contains Jinja2 markers
-        """
-        if not isinstance(value, str):
-            return False
-        jinja2_patterns = [r"\{\{.*?\}\}", r"\{%.*?%\}", r"\{#.*?#\}"]
-        return any(re.search(pattern, value) for pattern in jinja2_patterns)
-
-    def _evaluate_suppression(self, task: "Task") -> bool:
-        """Evaluate whether output should be suppressed.
-
-        Args:
-            task: The Nornir task
-
-        Returns:
-            True if output should be suppressed
-        """
-        if isinstance(self.value, bool):
-            return self.value
-
-        if self.value is None:
-            return False
-
-        if self.is_jinja2_expression:
-            vars_manager = self.context.get("vars_manager")
-            if vars_manager:
-                host = next(iter(task.nornir.inventory.hosts.values()))
-                resolved = vars_manager.resolve_string(self.value, host)
-                return resolved.lower() in ("true", "yes", "1")
-            return False
-
-        return bool(self.value)
-
-    def task_started(self, task: "Task") -> None:
-        """Mark task for output suppression if conditions are met.
-
-        Args:
-            task: The Nornir task
-        """
-        should_suppress = self._evaluate_suppression(task)
+    def task_started(self, task: Task) -> None:
+        """Mark task for output suppression if conditions are met."""
+        should_suppress = self.get_resolved_value(task, host=None, as_bool=True, default=False)
 
         if not should_suppress:
             return
@@ -96,36 +43,12 @@ class ShushHook(Hook):
 
         if not hasattr(task.nornir, "_nornflow_suppressed_tasks"):
             task.nornir._nornflow_suppressed_tasks = set()
-        task.nornir._nornflow_suppressed_tasks.add(task.name)
 
-    def task_completed(self, task: "Task", result: "AggregatedResult") -> None:
-        """Remove task from suppression set after completion.
+        task_model = self.context.get("task_model")
+        task.nornir._nornflow_suppressed_tasks.add(task_model.canonical_id)
 
-        Args:
-            task: The Nornir task
-            result: The aggregated result
-        """
+    def task_completed(self, task: Task, result: AggregatedResult) -> None:
+        """Remove task from suppression set after completion."""
         if hasattr(task.nornir, "_nornflow_suppressed_tasks"):
-            task.nornir._nornflow_suppressed_tasks.discard(task.name)
-
-    def execute_hook_validations(self, task_model: "TaskModel") -> None:
-        """Validate shush hook configuration.
-
-        Args:
-            task_model: The task model being validated
-
-        Raises:
-            HookValidationError: If string value lacks Jinja2 markers
-        """
-        if isinstance(self.value, str) and not self.is_jinja2_expression:
-            raise HookValidationError(
-                hook_class=self.hook_name,
-                errors=[
-                    (
-                        "value",
-                        f"Task '{task_model.name}': 'shush' hook received string value "
-                        f"'{self.value}' without Jinja2 markers. Use boolean values "
-                        f"(true/false) or Jinja2 expressions (e.g., '{{{{ condition }}}}')",
-                    )
-                ],
-            )
+            task_model = self.context.get("task_model")
+            task.nornir._nornflow_suppressed_tasks.discard(task_model.canonical_id)
diff --git a/nornflow/builtins/jinja2_filters/custom_filters.py b/nornflow/builtins/jinja2_filters/custom_filters.py
index d2ccf9b..0d7db2e 100644
--- a/nornflow/builtins/jinja2_filters/custom_filters.py
+++ b/nornflow/builtins/jinja2_filters/custom_filters.py
@@ -5,6 +5,8 @@ import re
 from typing import Any
 
 import jmespath
+from jinja2 import pass_context
+from jinja2.runtime import Context, Undefined
 
 
 def flatten_list(lst: list[Any]) -> list[Any]:
@@ -111,7 +113,141 @@ def random_choice(lst: list[Any]) -> Any:
     return random.choice(lst) if lst else None  # noqa: S311
 
 
-# Registry of custom filters
+def _resolve_from_context(context: Context, key: str) -> tuple[bool, Any]:
+    """Try to resolve a key from Jinja2 context.
+
+    Args:
+        context: The Jinja2 runtime context.
+        key: The key to look up.
+
+    Returns:
+        Tuple of (found, value). If not found, returns (False, None).
+    """
+    try:
+        value = context.resolve(key)
+
+        if isinstance(value, Undefined):
+            return (False, None)
+        return (True, value)
+    except Exception:
+        return (False, None)
+
+
+def _nested_exists(context: Context, path: str) -> bool:
+    """Check if a nested path exists in the Jinja2 context.
+
+    Supports dot-separated paths (e.g., 'data.key.subkey').
+    Uses Jinja2's context.resolve() for the first key, then traverses nested structures.
+    Returns False if any part of the path is missing or inaccessible.
+
+    Args:
+        context: The Jinja2 runtime context.
+        path: Dot-separated path.
+
+    Returns:
+        True if the path exists and the final value is not None, False otherwise.
+    """
+    if not path:
+        return False
+
+    parts = path.split(".")
+
+    found, current = _resolve_from_context(context, parts[0])
+    if not found:
+        return False
+
+    for part in parts[1:]:
+        if current is None:
+            return False
+        if isinstance(current, dict):
+            if part not in current:
+                return False
+            current = current[part]
+        else:
+            try:
+                current = getattr(current, part)
+            except AttributeError:
+                return False
+
+    return current is not None
+
+
+def _nested_exists_in_obj(obj: Any, path: str) -> bool:
+    """Check if a nested path exists in an object or dict.
+
+    Supports dot-separated paths (e.g., 'data.key.subkey').
+    For dicts, uses key access. For objects, uses attribute access.
+    Returns False if any part of the path is missing or inaccessible.
+
+    Args:
+        obj: The object or dict to check.
+        path: Dot-separated path.
+
+    Returns:
+        True if the path exists and the final value is not None, False otherwise.
+    """
+    if not path:
+        return obj is not None
+
+    parts = path.split(".")
+    current = obj
+
+    for part in parts:
+        if current is None:
+            return False
+        if isinstance(current, dict):
+            if part not in current:
+                return False
+            current = current[part]
+        else:
+            try:
+                current = getattr(current, part)
+            except AttributeError:
+                return False
+
+    return current is not None
+
+
+@pass_context
+def is_set(context: Context, value: str) -> bool:
+    """Check if a variable is set (not None/undefined) in the Jinja2 context.
+
+    Supports namespace-aware checking with nested paths using dot notation:
+    - 'x' or 'x.y.z': Checks in NornFlow default namespace.
+    - 'host.x' or 'host.x.y.z': Checks in Nornir host namespace.
+
+    Useful with the 'if' hook for conditional task execution based on variable existence.
+
+    Args:
+        context: The Jinja2 runtime context (passed automatically by @pass_context).
+        value: The variable path string (e.g., 'my_var', 'host.platform', 'my_var.nested.key').
+
+    Returns:
+        True if the variable is set and not None, False otherwise.
+
+    Examples:
+        {{ 'my_var' | is_set }}              # True if my_var exists and is not None
+        {{ 'my_var.nested.key' | is_set }}   # True if nested path exists
+        {{ 'host.name' | is_set }}           # True if host.name exists and is not None
+        {{ 'host.data.key' | is_set }}       # True if nested host data exists
+
+        # With 'if' hook:
+        tasks:
+          - name: deploy_config
+            if: "{{ 'backup_completed' | is_set }}"
+    """
+    if not isinstance(value, str):
+        return False
+
+    if value.startswith("host."):
+        path = value[5:]
+        found, host_obj = _resolve_from_context(context, "host")
+        if not found or not host_obj:
+            return False
+        return _nested_exists_in_obj(host_obj, path)
+    return _nested_exists(context, value)
+
+
 CUSTOM_FILTERS = {
     "flatten_list": flatten_list,
     "unique_list": unique_list,
@@ -122,4 +258,5 @@ CUSTOM_FILTERS = {
     "json_query": json_query,
     "deep_merge": deep_merge,
     "random_choice": random_choice,
+    "is_set": is_set,
 }
diff --git a/nornflow/builtins/jinja2_filters/py_wrapper_filters.py b/nornflow/builtins/jinja2_filters/py_wrapper_filters.py
index a6d4649..687c38d 100644
--- a/nornflow/builtins/jinja2_filters/py_wrapper_filters.py
+++ b/nornflow/builtins/jinja2_filters/py_wrapper_filters.py
@@ -69,6 +69,11 @@ def filter_join(sep: str, iterable: Iterable[Any]) -> str:
     return sep.join(str(item) for item in iterable)
 
 
+def filter_startswith(string: str, prefix: str, start: int = 0, end: int | None = None) -> bool:
+    """Check if string starts with prefix, optionally within start and end indices."""
+    return string.startswith(prefix, start, end)
+
+
 # Registry of builtin filters
 PY_WRAPPER_FILTERS = {
     "enumerate": filter_enumerate,
@@ -84,4 +89,5 @@ PY_WRAPPER_FILTERS = {
     "reversed": filter_reversed,
     "strip": filter_strip,
     "joinx": filter_join,
+    "startswith": filter_startswith,
 }
diff --git a/nornflow/builtins/processors/default_processor.py b/nornflow/builtins/processors/default_processor.py
index 0f60785..db16d05 100644
--- a/nornflow/builtins/processors/default_processor.py
+++ b/nornflow/builtins/processors/default_processor.py
@@ -68,10 +68,15 @@ class DefaultNornFlowProcessor(Processor):
         Returns:
             True if output should be suppressed, False otherwise
         """
-        return (
-            hasattr(task.nornir, "_nornflow_suppressed_tasks")
-            and task.name in task.nornir._nornflow_suppressed_tasks
-        )
+        if not hasattr(task.nornir, "_nornflow_suppressed_tasks"):
+            return False
+
+        for proc in task.nornir.processors:
+            if hasattr(proc, "task_specific_context"):
+                nornflow_task_model = proc.task_specific_context.get("task_model")
+                return nornflow_task_model.canonical_id in task.nornir._nornflow_suppressed_tasks
+
+        return False
 
     def _format_task_output(self, result: Result, suppress_output: bool) -> str:
         """Format the output section of a task result.
diff --git a/nornflow/cli/constants.py b/nornflow/cli/constants.py
index 37dac83..148e1af 100644
--- a/nornflow/cli/constants.py
+++ b/nornflow/cli/constants.py
@@ -6,12 +6,7 @@ from nornflow.vars.constants import DEFAULTS_FILENAME
 # Directory where the user is running the CLI from
 CWD = Path.cwd()
 
-# Runtime directories and files (relative to user's current working directory)
-NORNIR_DEFAULT_CONFIG_DIR = CWD / "nornir_configs"
-TASKS_DIR = CWD / "tasks"
-WORKFLOWS_DIR = CWD / "workflows"
-FILTERS_DIR = CWD / "filters"
-HOOKS_DIR = CWD / "hooks"
+# Default location for NornFlow settings file
 NORNFLOW_SETTINGS = CWD / "nornflow.yaml"
 
 # NornFlow's samples directory
@@ -29,7 +24,7 @@ SAMPLE_VARS_FILE = NORNFLOW_SAMPLES_DIR / DEFAULTS_FILENAME
 DESCRIPTION_FIRST_SENTENCE_LENGTH = 100
 
 # Banners
-INIT_BANNER = """ ██████   █████                               ███████████ ████                          
+INIT_BANNER = """\n ██████   █████                               ███████████ ████                          
 ░░██████ ░░███                               ░░███░░░░░░█░░███                          
  ░███░███ ░███   ██████  ████████  ████████   ░███   █ ░  ░███   ██████  █████ ███ █████
  ░███░░███░███  ███░░███░░███░░███░░███░░███  ░███████    ░███  ███░░███░░███ ░███░░███ 
diff --git a/nornflow/cli/entrypoint.py b/nornflow/cli/entrypoint.py
index 054423e..886310f 100644
--- a/nornflow/cli/entrypoint.py
+++ b/nornflow/cli/entrypoint.py
@@ -1,5 +1,3 @@
-import os
-
 import typer
 
 from nornflow.cli import init, run, show
@@ -12,41 +10,13 @@ app = typer.Typer(
 
 def settings_callback(ctx: typer.Context, settings: str | None = None) -> None:
     """
-    Callback function to handle the global --settings option.
-
-    This function sets the settings file path in the Typer context object. If the environment variable
-    'NORNFLOW_SETTINGS' is set, it takes precedence over the --settings option. The function also
-    provides feedback to the user about which settings file will be used.
-
-    Args:
-        ctx (typer.Context): The Typer context object.
-        settings (Optional[str]): The path to the custom settings file provided via the --settings option.
+    Priority order (highest to lowest):
+    1. --settings CLI argument (caller's explicit intent)
+    2. NORNFLOW_SETTINGS environment variable (handled by NornFlowSettings.load)
+    3. Default nornflow.yaml (handled by NornFlowSettings.load)
     """
-    ctx.obj = {"settings": ""}
-
-    # settings will only be set if 'NORNFLOW_SETTINGS' is not set
-    nornflow_settings_file = os.getenv("NORNFLOW_SETTINGS")
-
-    if nornflow_settings_file:
-        typer.secho(
-            "\nBecause env var 'NORNFLOW_SETTINGS' is set, NornFlow will try to use it as a path "
-            "for its settings file.",
-            fg=typer.colors.MAGENTA,
-            bold=True,
-        )
-        typer.secho(
-            "Unset it to make use of '--settings' option, or to fallback to a default 'nornflow.yaml' file.",
-            fg=typer.colors.MAGENTA,
-            bold=True,
-        )
-        typer.secho(
-            f"NORNFLOW_SETTINGS='{nornflow_settings_file}'\n", fg=typer.colors.BRIGHT_YELLOW, bold=True
-        )
-
-        return
-
-    if settings:
-        ctx.obj = {"settings": settings}
+    # Just pass through whatever CLI provided (or empty string)
+    ctx.obj = {"settings": settings if settings else ""}
 
 
 # Add the global option to the main Typer app
diff --git a/nornflow/cli/init.py b/nornflow/cli/init.py
index 1175449..6c7eddc 100644
--- a/nornflow/cli/init.py
+++ b/nornflow/cli/init.py
@@ -6,23 +6,19 @@ import typer
 
 from nornflow import NornFlow, NornFlowBuilder
 from nornflow.cli.constants import (
-    FILTERS_DIR,
     GREET_USER_TASK_FILE,
     HELLO_WORLD_TASK_FILE,
-    HOOKS_DIR,
     INIT_BANNER,
     NORNFLOW_SETTINGS,
-    NORNIR_DEFAULT_CONFIG_DIR,
     SAMPLE_NORNFLOW_FILE,
     SAMPLE_NORNIR_CONFIGS_DIR,
     SAMPLE_VARS_FILE,
     SAMPLE_WORKFLOW_FILE,
-    TASKS_DIR,
-    WORKFLOWS_DIR,
 )
 from nornflow.cli.exceptions import CLIInitError
 from nornflow.cli.show import show_catalog, show_nornflow_settings
 from nornflow.exceptions import NornFlowError
+from nornflow.settings import NornFlowSettings
 
 app = typer.Typer()
 
@@ -30,214 +26,235 @@ app = typer.Typer()
 @app.command()
 def init(ctx: typer.Context) -> None:
     """
-    Initialize NornFlow by setting up the necessary configuration files and directories.
+    Initialize a NornFlow project structure.
+
+    Creates necessary directories and sample files based on settings.
     """
     try:
-        # Setup builder based on context
-        builder = setup_builder(ctx)
-
-        # Display banner and get user confirmation
         if not get_user_confirmation():
             return
 
-        # Setup main directory structure and configuration files
-        setup_directory_structure()
-        setup_nornflow_settings_file(ctx.obj.get("settings"))
+        # Step 1: Copy sample nornflow.yaml settings file (must happen first)
+        settings_file = ctx.obj.get("settings", "")
+        setup_nornflow_settings_file(settings_file)
+
+        # Step 2: Load settings to know what directories to create
+        settings_path = Path(settings_file) if settings_file else NORNFLOW_SETTINGS
+        settings = NornFlowSettings.load(settings_path if settings_path.exists() else None)
+
+        # Step 3: Create nornir configs directory (derived from settings)
+        setup_nornir_configs(settings)
 
-        # Create required directories before building NornFlow to prevent initialization errors
-        create_required_directories()
+        # Step 4: Create all directories from settings BEFORE building NornFlow
+        create_directories_from_settings(settings)
 
-        # Build NornFlow to get settings with vars_dir
+        # Step 5: Build NornFlow from the settings file (now directories exist)
+        builder = setup_builder(ctx)
         nornflow = builder.build()
 
-        # Setup sample content including vars directory from settings
+        # Step 6: Copy sample content to directories
         setup_sample_content(nornflow)
 
-        # Show information about the initialized setup
+        # Step 7: Show info using the real NornFlow object
         show_info_post_init(nornflow)
 
-    except FileNotFoundError as e:
-        CLIInitError(
-            message=f"File not found: {e}",
-            hint="Check that all required template files exist in the installation directory.",
+    except NornFlowError as e:
+        raise CLIInitError(
+            f"Failed to initialize NornFlow project: {e!s}",
+            hint="Ensure you have write permissions and the directory is not already initialized",
             original_exception=e,
-        ).show()
-        raise typer.Exit(code=2)  # noqa: B904
-
-    except PermissionError as e:
-        CLIInitError(
-            message=f"Permission denied: {e}",
-            hint="Check that you have write permissions to the current directory.",
+        ) from e
+    except Exception as e:
+        raise CLIInitError(
+            "An unexpected error occurred during initialization",
+            hint=f"Error details: {e!s}",
             original_exception=e,
-        ).show()
-        raise typer.Exit(code=2)  # noqa: B904
+        ) from e
 
-    except shutil.Error as e:
-        CLIInitError(
-            message=f"Error copying file: {e}",
-            hint="There may be an issue with file permissions or the files already exist.",
-            original_exception=e,
-        ).show()
-        raise typer.Exit(code=2)  # noqa: B904
 
-    except NornFlowError as e:
-        CLIInitError(
-            message=f"NornFlow error: {e}",
-            hint="There was an issue with the NornFlow configuration.",
-            original_exception=e,
-        ).show()
-        raise typer.Exit(code=2)  # noqa: B904
+def get_user_confirmation() -> bool:
+    """Display banner and get user confirmation to proceed."""
+    display_banner()
+    if not typer.confirm("Do you want to continue?", default=True):
+        typer.secho("Initialization cancelled.", fg=typer.colors.YELLOW)
+        return False
+    return True
 
-    except Exception as e:
-        CLIInitError(
-            message=f"Unexpected error during initialization: {e}",
-            hint="This may be a bug. Please report it if the issue persists.",
-            original_exception=e,
-        ).show()
-        raise typer.Exit(code=2)  # noqa: B904
 
+def setup_nornflow_settings_file(settings: str) -> None:
+    """Set up the NornFlow settings file."""
+    if os.getenv("NORNFLOW_SETTINGS"):
+        return
 
-def create_required_directories() -> None:
-    """Create all required directories before NornFlow initialization."""
-    create_directory(TASKS_DIR)
-    create_directory(WORKFLOWS_DIR)
-    create_directory(FILTERS_DIR)
-    create_directory(HOOKS_DIR)
+    target_file = Path(settings) if settings else NORNFLOW_SETTINGS
+    if target_file.exists():
+        typer.secho(
+            f"NornFlow settings file already exists: {target_file}",
+            fg=typer.colors.YELLOW,
+        )
+        return
 
-    # If vars_dir is different from default, it will be created later
-    # based on the actual settings after NornFlow is built
+    shutil.copy(SAMPLE_NORNFLOW_FILE, target_file)
+    typer.secho(
+        f"Created NornFlow settings file: {target_file}",
+        fg=typer.colors.GREEN,
+    )
 
 
 def setup_builder(ctx: typer.Context) -> NornFlowBuilder:
     """Set up and configure the NornFlowBuilder."""
     builder = NornFlowBuilder()
     settings = ctx.obj.get("settings")
-    if settings:
+    if settings and Path(settings).exists():
         builder.with_settings_path(settings)
+    elif NORNFLOW_SETTINGS.exists():
+        builder.with_settings_path(NORNFLOW_SETTINGS)
     return builder
 
 
-def get_user_confirmation() -> bool:
-    """Display banner and get user confirmation to proceed."""
-    display_banner()
-    if not typer.confirm("Do you want to continue?", default=True):
-        typer.secho("Initialization aborted.", fg=typer.colors.RED)
-        return False
-    return True
+def setup_nornir_configs(settings: NornFlowSettings) -> None:
+    """Set up the Nornir configuration directory derived from settings.
 
+    Args:
+        settings: The loaded NornFlowSettings instance.
+    """
+    nornir_config_file = Path(settings.nornir_config_file)
+    nornir_config_dir = nornir_config_file.parent
 
-def setup_directory_structure() -> None:
-    """Set up the main directory structure."""
-    typer.secho(f"NornFlow will be initialized at {NORNIR_DEFAULT_CONFIG_DIR.parent}", fg=typer.colors.GREEN)
+    typer.secho(f"NornFlow will be initialized at {Path.cwd()}", fg=typer.colors.GREEN)
 
-    if create_directory(NORNIR_DEFAULT_CONFIG_DIR):
-        for item in SAMPLE_NORNIR_CONFIGS_DIR.iterdir():
-            if item.is_dir():
-                shutil.copytree(item, NORNIR_DEFAULT_CONFIG_DIR / item.name)
-            else:
-                shutil.copy(item, NORNIR_DEFAULT_CONFIG_DIR / item.name)
+    if nornir_config_dir.exists():
         typer.secho(
-            f"Created a sample 'nornir_configs' directory: {NORNIR_DEFAULT_CONFIG_DIR}",
-            fg=typer.colors.GREEN,
+            f"Nornir configuration directory already exists: {nornir_config_dir}",
+            fg=typer.colors.YELLOW,
         )
+        return
 
+    shutil.copytree(SAMPLE_NORNIR_CONFIGS_DIR, nornir_config_dir)
+    typer.secho(
+        f"Created Nornir configuration directory: {nornir_config_dir}",
+        fg=typer.colors.GREEN,
+    )
 
-def setup_nornflow_settings_file(settings: str) -> None:
-    """Set up the NornFlow settings file."""
-    if not os.getenv("NORNFLOW_SETTINGS"):
-        if not settings and not NORNFLOW_SETTINGS.exists():
-            shutil.copy(SAMPLE_NORNFLOW_FILE, NORNFLOW_SETTINGS)
-            typer.secho(f"Created a sample 'nornflow.yaml': {NORNFLOW_SETTINGS}", fg=typer.colors.GREEN)
-        elif settings:
-            typer.secho(f"Trying to use informed settings file: {settings}", fg=typer.colors.YELLOW)
-        else:
-            typer.secho(f"Settings file already exists: {NORNFLOW_SETTINGS}", fg=typer.colors.YELLOW)
+
+def create_directories_from_settings(settings: NornFlowSettings) -> None:
+    """Create all directories specified in settings.
+
+    This is the single source of truth for directory creation during init.
+    Directories are created based on what's configured in settings, which
+    may be default values or custom paths from the user's nornflow.yaml.
+
+    Args:
+        settings: The loaded NornFlowSettings instance with resolved paths.
+    """
+    for tasks_dir in settings.local_tasks:
+        create_directory(Path(tasks_dir))
+
+    for workflows_dir in settings.local_workflows:
+        create_directory(Path(workflows_dir))
+
+    for filters_dir in settings.local_filters:
+        create_directory(Path(filters_dir))
+
+    for hooks_dir in settings.local_hooks:
+        create_directory(Path(hooks_dir))
+
+    for blueprints_dir in settings.local_blueprints:
+        create_directory(Path(blueprints_dir))
+
+    create_directory(Path(settings.vars_dir))
 
 
 def setup_sample_content(nornflow: NornFlow) -> None:
-    """Set up sample tasks, workflows, filters, and vars directories."""
-    # Copy sample files to the already created directories
-    copy_sample_files_to_dir(
-        TASKS_DIR, [HELLO_WORLD_TASK_FILE, GREET_USER_TASK_FILE], "Created sample tasks in directory: {}"
-    )
+    """Set up sample tasks, workflows, and vars files."""
+    if nornflow.settings.local_tasks:
+        tasks_dir = Path(nornflow.settings.local_tasks[0])
+        copy_sample_files_to_dir(
+            tasks_dir,
+            [HELLO_WORLD_TASK_FILE, GREET_USER_TASK_FILE],
+            created_msg="Created sample tasks in directory: {}",
+            skipped_msg="Sample tasks already exist in directory: {}",
+        )
 
-    copy_sample_files_to_dir(
-        WORKFLOWS_DIR, [SAMPLE_WORKFLOW_FILE], "Created a sample 'hello_world' workflow in directory: {}"
-    )
+    if nornflow.settings.local_workflows:
+        workflows_dir = Path(nornflow.settings.local_workflows[0])
+        copy_sample_files_to_dir(
+            workflows_dir,
+            [SAMPLE_WORKFLOW_FILE],
+            created_msg="Created sample 'hello_world' workflow in directory: {}",
+            skipped_msg="Sample workflow already exists in directory: {}",
+        )
 
-    # Use vars_dir from NornFlow settings instead of hardcoded constant
     vars_dir = Path(nornflow.settings.vars_dir)
-    create_directory_and_copy_sample_files(
-        vars_dir, [SAMPLE_VARS_FILE], "Created a sample 'defaults.yaml' in vars directory: {}"
+    copy_sample_files_to_dir(
+        vars_dir,
+        [SAMPLE_VARS_FILE],
+        created_msg="Created sample 'defaults.yaml' in vars directory: {}",
+        skipped_msg="Sample 'defaults.yaml' already exists in vars directory: {}",
     )
 
 
-def copy_sample_files_to_dir(dir_path: Path, sample_files: list[Path], sample_message: str) -> None:
-    """Copy sample files to an existing directory."""
-    for sample_file in sample_files:
-        shutil.copy(sample_file, dir_path / sample_file.name)
-    typer.secho(sample_message.format(dir_path), fg=typer.colors.GREEN)
-
-
-def create_directory_and_copy_sample_files(
-    dir_path: Path, sample_files: list[Path], sample_message: str
+def copy_sample_files_to_dir(
+    dir_path: Path,
+    sample_files: list[Path],
+    created_msg: str,
+    skipped_msg: str,
 ) -> None:
-    """
-    Create a directory if it doesn't exist and copy sample files into it.
+    """Copy sample files to an existing directory if they don't exist.
 
     Args:
-        dir_path (Path): The path of the directory to create.
-        sample_files (list[Path]): The list of sample files to copy into the directory.
-        sample_message (str): The message to display after copying the sample files.
+        dir_path: Target directory for the sample files.
+        sample_files: List of sample file paths to copy.
+        created_msg: Message to display when files are created (use {} for dir_path).
+        skipped_msg: Message to display when files already exist (use {} for dir_path).
     """
-    if create_directory(dir_path):
-        for sample_file in sample_files:
-            shutil.copy(sample_file, dir_path / sample_file.name)
-        typer.secho(sample_message.format(dir_path), fg=typer.colors.GREEN)
+    files_created = False
+    for sample_file in sample_files:
+        target_file = dir_path / sample_file.name
+        if not target_file.exists():
+            shutil.copy(sample_file, target_file)
+            files_created = True
+
+    if files_created:
+        typer.secho(created_msg.format(dir_path), fg=typer.colors.GREEN)
+    else:
+        typer.secho(skipped_msg.format(dir_path), fg=typer.colors.YELLOW)
 
 
 def create_directory(dir_path: Path) -> bool:
     """Create a directory if it doesn't exist.
 
     Args:
-        dir_path (Path): The path of the directory to create.
+        dir_path: Path to the directory to create.
 
     Returns:
-        bool: True if the directory was created, False if it already existed.
+        True if directory was created, False if it already existed.
     """
-    if not dir_path.exists():
-        dir_path.mkdir(parents=True, exist_ok=True)
-        typer.secho(f"Created directory: {dir_path}", fg=typer.colors.GREEN)
-        return True
-    typer.secho(f"Directory already exists: {dir_path}", fg=typer.colors.YELLOW)
-    return False
+    if dir_path.exists():
+        typer.secho(f"Directory already exists: {dir_path}", fg=typer.colors.YELLOW)
+        return False
+    dir_path.mkdir(parents=True, exist_ok=True)
+    typer.secho(f"Created directory: {dir_path}", fg=typer.colors.GREEN)
+    return True
 
 
 def display_banner() -> None:
-    """
-    Display a banner message with borders.
-    """
-    # Print the ASCII banner first in purple
-    typer.secho(INIT_BANNER, fg=typer.colors.MAGENTA)
-
-    banner_message = (
-        "The 'init' command creates directories, and samples for configs, tasks and\n"
-        "workflows files, all with default values that you can modify as desired.\n"
-        "No customization of 'init' parameters available yet.\n\nDo you want to continue?"
+    """Display the NornFlow initialization banner."""
+    typer.secho(INIT_BANNER, fg=typer.colors.MAGENTA, bold=True)
+    typer.secho(
+        "\n🚀 Welcome to NornFlow initialization! This will set up your project structure.\n",
+        fg=typer.colors.GREEN,
+        bold=True,
     )
-    lines = banner_message.split("\n")
-    max_length = max(len(line) for line in lines)
-    border = "+" + "-" * (max_length + 2) + "+"
-    typer.secho(border, fg=typer.colors.CYAN, bold=True)
-    for line in lines:
-        padded_line = line + " " * (max_length - len(line))
-        typer.secho(f"| {padded_line} |", fg=typer.colors.CYAN, bold=True)
-    typer.secho(border, fg=typer.colors.CYAN, bold=True)
 
 
 def show_info_post_init(nornflow: NornFlow) -> None:
-    """
-    Display all information about NornFlow, equivalent to running 'nornflow show --all'.
-    """
+    """Show information after successful initialization."""
+    typer.secho("\n✨ NornFlow project initialized successfully!\n", fg=typer.colors.GREEN, bold=True)
     show_nornflow_settings(nornflow)
     show_catalog(nornflow)
+    typer.secho("\n📚 Next steps:", fg=typer.colors.CYAN, bold=True)
+    typer.secho("  1. Edit 'nornir_configs/' files to set up your inventory", fg=typer.colors.WHITE)
+    typer.secho("  2. Create tasks in the configured tasks directories", fg=typer.colors.WHITE)
+    typer.secho("  3. Create workflows in the configured workflows directories", fg=typer.colors.WHITE)
+    typer.secho("  4. Run 'nornflow run <task_or_workflow>' to execute", fg=typer.colors.WHITE)
diff --git a/nornflow/cli/run.py b/nornflow/cli/run.py
index c340098..7586cd3 100644
--- a/nornflow/cli/run.py
+++ b/nornflow/cli/run.py
@@ -296,6 +296,7 @@ def get_nornflow_builder(
     processors: list[dict[str, Any]] | None = None,
     vars: dict[str, Any] | None = None,
     failure_strategy: FailureStrategy | None = None,
+    dry_run: bool = False,
 ) -> NornFlowBuilder:
     """
     Build the workflow using the provided target, arguments, inventory filters, and dry-run option.
@@ -308,6 +309,7 @@ def get_nornflow_builder(
         processors (list): The processor configurations.
         vars (dict): Vars with highest precedence.
         failure_strategy (FailureStrategy): Failure strategy with highest precedence.
+        dry_run (bool): Whether to perform a dry run.
 
     Returns:
         NornFlowBuilder: The builder instance with the configured workflow.
@@ -335,6 +337,10 @@ def get_nornflow_builder(
     if failure_strategy:
         builder.with_failure_strategy(failure_strategy)
 
+    # Add dry_run if specified
+    if dry_run:
+        builder.with_kwargs(dry_run=dry_run)
+
     if any(target.endswith(ext) for ext in NORNFLOW_SUPPORTED_YAML_EXTENSIONS):
         target_path = Path(target)
         if target_path.exists():
@@ -497,12 +503,13 @@ def run(
             parsed_processors,
             parsed_vars,
             parsed_failure_strategy,
+            dry_run,
         )
 
         nornflow = builder.build()
 
         # Capture the exit code from nornflow.run()
-        exit_code = nornflow.run(dry_run=dry_run)
+        exit_code = nornflow.run()
 
         # Exit with the workflow's exit code if non-zero, otherwise return normally
         if exit_code != 0:
diff --git a/nornflow/cli/samples/nornflow.yaml b/nornflow/cli/samples/nornflow.yaml
index d732119..f45aaa1 100644
--- a/nornflow/cli/samples/nornflow.yaml
+++ b/nornflow/cli/samples/nornflow.yaml
@@ -4,104 +4,43 @@
 # 1. The path specified in the environment variable 'NORNFLOW_SETTINGS'
 # 2. The path passed to NornFlowSettings initializer (meaning using the 'nornflow --settings <path> ...')
 # 3. The path 'nornflow.yaml' in the root of the project
+#
+# For detailed documentation on all available settings, see:
+# https://github.com/nornir-automation/nornflow/blob/main/docs/nornflow_settings.md
 
 ######################
 # MANDATORY SETTINGS #
 ######################
 
-# Path for Nornir's config file
 nornir_config_file: "nornir_configs/config.yaml"
 
 ######################
 # OPTIONAL SETTINGS  #
 ######################
 
-# List of paths to directories containing the Nornir tasks to be included in NornFlow's task_catalog. It
-# defaults to a 'tasks' directory in the root of the project. The search is recursive, meaning that all 
-# subdirectories will be searched as well.
-local_tasks_dirs:
+local_tasks:
   - "tasks"
 
-# List of paths to directories containing the Nornir workflows to be included in NornFlow's workflow_catalog. It
-# defaults to a 'workflows' directory in the root of the project. The search is recursive, meaning that 
-# all subdirectories will be searched as well. Be aware that all files with a .yaml or .yml extension will be
-# considered as workflows.
-local_workflows_dirs:
+local_workflows:
   - "workflows"
 
-# List of paths to directories containing the Nornir filters to be included in NornFlow's filter_catalog. It
-# defaults to a 'filters' directory in the root of the project. The search is recursive, meaning that all
-# subdirectories will be searched as well.
-local_filters_dirs:
+local_filters:
   - "filters"
 
-# List of paths to directories containing custom NornFlow Hook classes. It defaults to a 'hooks' directory in
-# the root of the project. NonrFlow will just make sure to import every .py file recursevely found in each
-# directory listed here. This is to trigger the Hook classes registration.
-local_hooks_dirs:
+local_hooks:
   - "hooks"
 
-# List of python packages installed to your environment that contain Nornir tasks and Filter functions to be included in NornFlow's catalogs.
-imported_packages: []
-
-# If set to True, NornFlow will invoke Nornir in dry-run mode
-dry_run: False
+local_blueprints:
+  - "blueprints"
 
-# Sets NornFlow's behavior when a task fails for a host during the execution of workflows.
-# This setting can be overridden by CLI arguments (--failure-strategy) or workflow-specific definitions.
-#
-# FAILURE STRATEGY PRECEDENCE (highest to lowest priority):
-# 1. CLI arguments (via --failure-strategy option)
-# 2. Workflow-specific failure strategy defined in the workflow YAML file
-# 3. Global failure strategy defined in this nornflow.yaml settings file
-# 4. Default failure strategy ("skip-failed")
-#
-# Options:
-# - skip-failed (default): When a task fails for a host, skip that host in all subsequent tasks within the workflow.
-# - fail-fast: Halt workflow execution immediately upon detecting a task failure. Due to Nornir's multithreading nature,
-#   ongoing threads may complete before NornFlow fully stops execution.
-# - run-all: Execute all tasks for all hosts, including those that previously failed, until the entire workflow completes.
-failure_strategy: "skip-failed"
+imported_packages: []
 
-# List of Nornir processors to be applied during task/workflow execution.
-# If not provided, NornFlow will default to using only the DefaultNornFlowProcessor
-# which takes no arguments.
-#
-# Structure:
-#   - class: Full python dotted path to the processor class
-#     args: An optional dictionary of keyword arguments passed to 
-#           the processor constructor, if it accepts any.
-#
-# IMPORTANT: If you specify custom processors, the DefaultNornFlowProcessor will NOT
-# be automatically included - you must explicitly add it if you still want its functionality.
-#
-# PROCESSOR CONFIGURATION PRECEDENCE (highest to lowest priority):
-# 1. CLI arguments (via --processors or -p option)
-# 2. Workflow-specific processors defined in the workflow YAML file
-# 3. Global processors defined in this nornflow.yaml settings file
-# 4. DefaultNornFlowProcessor (if no other processors are specified)
-#
-# CLI USAGE:
-# Processors can be specified via command line using the --processors or -p option:
-#   - Single processor: --processors "class='package.ProcessorClass',args={'key':'value'}"
-#   - Multiple processors: --processors "class='package.Processor1',args={};class='package.Processor2',args={'key':'value'}"
-#
 processors:
   - class: "nornflow.builtins.DefaultNornFlowProcessor"
     args: {}
 
-# Path to the directory containing variable files for NornFlow's templating system.
-# This directory is used to store variables at different scopes:
-#   - Global variables in "{vars_dir}/globals.yaml"
-#   - Domain-specific variables in "{vars_dir}/{domain}/globals.yaml"
-#
-# Variable resolution follows this precedence (highest to lowest):
-# 1. CLI variables (passed with --vars, there is no '-v' shortcut for this option)
-# 2. Workflow-specific variables (from workflow YAML or paired _vars.yaml file)
-# 3. Domain-specific global variables (from vars_dir/<domain>/globals.yaml)
-# 4. Global variables (from vars_dir/globals.yaml)
-# 5. Nornir inventory variables
-# 6. Environment variables (with NORNFLOW_VAR_ prefix)
-#
-# If not specified, defaults to "vars" in the project root directory.
-vars_dir: "vars"
\ No newline at end of file
+vars_dir: "vars"
+
+failure_strategy: "skip-failed"
+
+dry_run: false
\ No newline at end of file
diff --git a/nornflow/cli/show.py b/nornflow/cli/show.py
index d9327be..6aaab59 100644
--- a/nornflow/cli/show.py
+++ b/nornflow/cli/show.py
@@ -29,11 +29,12 @@ def show(  # noqa: PLR0912
         hidden=True,
     ),
     catalogs: bool = typer.Option(
-        False, "--catalogs", help="Display all catalogs: tasks, filters, and workflows"
+        False, "--catalogs", help="Display all catalogs: tasks, filters, workflows, and blueprints"
     ),
     tasks: bool = typer.Option(False, "--tasks", "-t", help="Display the task catalog"),
     filters: bool = typer.Option(False, "--filters", "-f", help="Display the filter catalog"),
     workflows: bool = typer.Option(False, "--workflows", "-w", help="Display the workflow catalog"),
+    blueprints: bool = typer.Option(False, "--blueprints", "-b", help="Display the blueprint catalog"),
     settings: bool = typer.Option(False, "--settings", "-s", help="Display current NornFlow Settings"),
     nornir_configs: bool = typer.Option(
         False, "--nornir-configs", "-n", help="Display current Nornir Configs"
@@ -45,10 +46,10 @@ def show(  # noqa: PLR0912
     """
     show_all_catalogs = catalog or catalogs
 
-    if not any([show_all_catalogs, tasks, filters, workflows, settings, nornir_configs, all]):
+    if not any([show_all_catalogs, tasks, filters, workflows, blueprints, settings, nornir_configs, all]):
         raise typer.BadParameter(
             "You must provide at least one option: --catalogs, --tasks, --filters, --workflows, "
-            "--settings, --nornir-configs, or --all."
+            "--blueprints, --settings, --nornir-configs, or --all."
         )
 
     try:
@@ -64,6 +65,7 @@ def show(  # noqa: PLR0912
             show_tasks_catalog(nornflow)
             show_filters_catalog(nornflow)
             show_workflows_catalog(nornflow)
+            show_blueprints_catalog(nornflow)
             show_nornflow_settings(nornflow)
             show_nornir_configs(nornflow)
         else:
@@ -71,6 +73,7 @@ def show(  # noqa: PLR0912
                 show_tasks_catalog(nornflow)
                 show_filters_catalog(nornflow)
                 show_workflows_catalog(nornflow)
+                show_blueprints_catalog(nornflow)
             else:
                 if tasks:
                     show_tasks_catalog(nornflow)
@@ -78,6 +81,8 @@ def show(  # noqa: PLR0912
                     show_filters_catalog(nornflow)
                 if workflows:
                     show_workflows_catalog(nornflow)
+                if blueprints:
+                    show_blueprints_catalog(nornflow)
 
             if settings:
                 show_nornflow_settings(nornflow)
@@ -126,10 +131,11 @@ def show(  # noqa: PLR0912
 
 
 def show_catalog(nornflow: "NornFlow") -> None:
-    """Display all catalogs: tasks, filters, and workflows."""
+    """Display all catalogs: tasks, filters, workflows, and blueprints."""
     show_tasks_catalog(nornflow)
     show_filters_catalog(nornflow)
     show_workflows_catalog(nornflow)
+    show_blueprints_catalog(nornflow)
 
 
 def show_tasks_catalog(nornflow: "NornFlow") -> None:
@@ -162,6 +168,16 @@ def show_workflows_catalog(nornflow: "NornFlow") -> None:
     )
 
 
+def show_blueprints_catalog(nornflow: "NornFlow") -> None:
+    """Display the blueprints catalog."""
+    show_formatted_table(
+        "BLUEPRINTS CATALOG",
+        render_blueprints_catalog_table_data,
+        ["Blueprint Name", "Description", "Source (file path)"],
+        nornflow,
+    )
+
+
 def show_nornflow_settings(nornflow: "NornFlow") -> None:
     """Display the NornFlow settings."""
     show_formatted_table("NORNFLOW SETTINGS", render_settings_table_data, ["Setting", "Value"], nornflow)
@@ -256,18 +272,9 @@ def render_task_catalog_table_data(nornflow: "NornFlow") -> list[list[str]]:
     for task_name in task_names:
         task_func = tasks_catalog[task_name]
         docstring = task_func.__doc__ or "No description available"
-
-        first_sentence = docstring.split(".")[0].strip()
-        if len(first_sentence) > DESCRIPTION_FIRST_SENTENCE_LENGTH:
-            first_sentence = first_sentence[:97] + "..."
-        wrapped_text = textwrap.fill(first_sentence, width=60)
-
+        description = process_task_description(docstring)
         source_path = get_source_from_catalog(tasks_catalog, task_name)
-
-        colored_task_name = colored(task_name, "cyan", attrs=["bold"])
-        colored_docstring = colored(wrapped_text, "yellow")
-        colored_source = colored(source_path, "light_green")
-        table_data.append([colored_task_name, colored_docstring, colored_source])
+        table_data.append(get_colored_row(task_name, description, source_path))
     return table_data
 
 
@@ -280,26 +287,23 @@ def render_workflows_catalog_table_data(nornflow: "NornFlow") -> list[list[str]]
     Returns:
         The table data.
     """
-    workflows_catalog = nornflow.workflows_catalog
-    table_data = []
+    return render_file_based_catalog_table_data(
+        nornflow.workflows_catalog, get_workflow_description, nornflow
+    )
 
-    for workflow_name, workflow_path in sorted(workflows_catalog.items()):
-        try:
-            with workflow_path.open() as f:
-                workflow_dict = yaml.safe_load(f)
-                description = workflow_dict["workflow"].get("description", "No description available")
-        except Exception:
-            description = "Could not load description from file"
 
-        description = textwrap.fill(description, width=60)
+def render_blueprints_catalog_table_data(nornflow: "NornFlow") -> list[list[str]]:
+    """Render the blueprints catalog as a list of lists.
 
-        source_path = get_source_from_catalog(workflows_catalog, workflow_name)
+    Args:
+        nornflow: The NornFlow object.
 
-        colored_workflow_name = colored(workflow_name, "cyan", attrs=["bold"])
-        colored_description = colored(description, "yellow")
-        colored_source = colored(source_path, "light_green")
-        table_data.append([colored_workflow_name, colored_description, colored_source])
-    return table_data
+    Returns:
+        The table data.
+    """
+    return render_file_based_catalog_table_data(
+        nornflow.blueprints_catalog, get_blueprint_description, nornflow
+    )
 
 
 def render_filters_catalog_table_data(nornflow: "NornFlow") -> list[list[str]]:
@@ -320,24 +324,9 @@ def render_filters_catalog_table_data(nornflow: "NornFlow") -> list[list[str]]:
     for filter_name in filter_names:
         filter_func, param_names = filters_catalog[filter_name]
         docstring = filter_func.__doc__ or "No description available"
-
-        first_sentence = docstring.split(".")[0].strip()
-        if len(first_sentence) > DESCRIPTION_FIRST_SENTENCE_LENGTH:
-            first_sentence = first_sentence[:97] + "..."
-
-        if not param_names:
-            param_info = "Parameters: None (host only)"
-        else:
-            param_info = f"Parameters: {', '.join(param_names)}"
-
-        description = f"{first_sentence}\n{param_info}"
-
+        description = process_filter_description(docstring, param_names)
         source_path = get_source_from_catalog(filters_catalog, filter_name)
-
-        colored_filter_name = colored(filter_name, "cyan", attrs=["bold"])
-        colored_docstring = colored(description, "yellow")
-        colored_source = colored(source_path, "light_green")
-        table_data.append([colored_filter_name, colored_docstring, colored_source])
+        table_data.append(get_colored_row(filter_name, description, source_path))
     return table_data
 
 
@@ -432,3 +421,128 @@ def display_banner(banner_text: str, table: str) -> None:
     centered_banner = banner.center(table_width + 5)
 
     typer.echo("\n\n" + centered_banner)
+
+
+def get_workflow_description(workflow_path: Path) -> str:
+    """Get description from workflow YAML file.
+
+    Args:
+        workflow_path: Path to the workflow file.
+
+    Returns:
+        The workflow description.
+    """
+    try:
+        with workflow_path.open() as f:
+            workflow_dict = yaml.safe_load(f)
+            return workflow_dict["workflow"].get("description", "No description available")
+    except Exception:
+        return "Could not load description from file"
+
+
+def get_blueprint_description(blueprint_path: Path) -> str:
+    """Get description from blueprint YAML file.
+
+    Args:
+        blueprint_path: Path to the blueprint file.
+
+    Returns:
+        The blueprint description.
+    """
+    try:
+        with blueprint_path.open() as f:
+            blueprint_dict = yaml.safe_load(f)
+            # Blueprints may have a description at the top level or under a 'blueprint' key
+            return blueprint_dict.get(
+                "description",
+                blueprint_dict.get("blueprint", {}).get("description", "No description available"),
+            )
+    except Exception:
+        return "Could not load description from file"
+
+
+def render_file_based_catalog_table_data(
+    catalog, description_getter, nornflow: "NornFlow"
+) -> list[list[str]]:
+    """Render a file-based catalog (workflows or blueprints) as a list of lists.
+
+    Args:
+        catalog: The catalog to render.
+        description_getter: Function to get description from file path.
+        nornflow: The NornFlow object.
+
+    Returns:
+        The table data.
+    """
+    table_data = []
+    for item_name, item_path in sorted(catalog.items()):
+        description = description_getter(item_path)
+        description = textwrap.fill(description, width=60)
+        source_path = get_source_from_catalog(catalog, item_name)
+        table_data.append(get_colored_row(item_name, description, source_path))
+    return table_data
+
+
+def get_colored_row(name: str, desc: str, source: str) -> list[str]:
+    """Create a colored table row for catalog items.
+
+    Args:
+        name: The item name.
+        desc: The item description.
+        source: The item source path.
+
+    Returns:
+        The colored row list.
+    """
+    return [
+        colored(name, "cyan", attrs=["bold"]),
+        colored(desc, "yellow"),
+        colored(source, "light_green"),
+    ]
+
+
+def extract_first_sentence(docstring: str) -> str:
+    """Extract and truncate the first sentence from a docstring.
+
+    Args:
+        docstring: The raw docstring.
+
+    Returns:
+        The extracted and possibly truncated first sentence.
+    """
+    first_sentence = docstring.split(".")[0].strip()
+    if len(first_sentence) > DESCRIPTION_FIRST_SENTENCE_LENGTH:
+        first_sentence = first_sentence[:97] + "..."
+    return first_sentence
+
+
+def process_task_description(docstring: str) -> str:
+    """Process task description from docstring.
+
+    Args:
+        docstring: The raw docstring.
+
+    Returns:
+        The processed description.
+    """
+    first_sentence = extract_first_sentence(docstring)
+    return textwrap.fill(first_sentence, width=60)
+
+
+def process_filter_description(docstring: str, param_names: list[str]) -> str:
+    """Process filter description from docstring and parameters.
+
+    Args:
+        docstring: The raw docstring.
+        param_names: List of parameter names.
+
+    Returns:
+        The processed description.
+    """
+    first_sentence = extract_first_sentence(docstring)
+    if not param_names:
+        param_info = "Parameters: None (host only)"
+    else:
+        param_info = f"Parameters: {', '.join(param_names)}"
+    description = f"{first_sentence}\n{param_info}"
+    return textwrap.fill(description, width=60)
diff --git a/nornflow/constants.py b/nornflow/constants.py
index c9bda38..0a0841e 100644
--- a/nornflow/constants.py
+++ b/nornflow/constants.py
@@ -58,26 +58,33 @@ NORNFLOW_SPECIAL_FILTER_KEYS = ["hosts", "groups"]
 # used to track the mandatory kwargs for a NornFlowSettings object
 NORNFLOW_SETTINGS_MANDATORY = ("nornir_config_file",)
 
-# used to track the optional kwargs for a NornFlowSettings object
+NORNFLOW_DEFAULT_TASKS_DIR = "tasks"
+NORNFLOW_DEFAULT_WORKFLOWS_DIR = "workflows"
+NORNFLOW_DEFAULT_FILTERS_DIR = "filters"
+NORNFLOW_DEFAULT_HOOKS_DIR = "hooks"
+NORNFLOW_DEFAULT_BLUEPRINTS_DIR = "blueprints"
+NORNFLOW_DEFAULT_VARS_DIR = "vars"
+
 NORNFLOW_SETTINGS_OPTIONAL = {
-    "local_tasks_dirs": [],
-    "local_workflows_dirs": [],
-    "local_filters_dirs": [],
-    "local_hooks_dirs": [],
+    "local_tasks": [NORNFLOW_DEFAULT_TASKS_DIR],
+    "local_workflows": [NORNFLOW_DEFAULT_WORKFLOWS_DIR],
+    "local_filters": [NORNFLOW_DEFAULT_FILTERS_DIR],
+    "local_hooks": [NORNFLOW_DEFAULT_HOOKS_DIR],
     "imported_packages": [],
     "processors": [],
-    "vars_dir": "vars",
+    "vars_dir": NORNFLOW_DEFAULT_VARS_DIR,
     "failure_strategy": FailureStrategy.SKIP_FAILED,
+    "dry_run": False,
 }
 
 # Kwargs that cannot be passed to NornFlow.__init__; they must be set via the settings YAML file.
 # These are optional settings (see NORNFLOW_SETTINGS_OPTIONAL), but if customized, use YAML.
 NORNFLOW_INVALID_INIT_KWARGS = (
     "nornir_config_file",
-    "local_tasks_dirs",
-    "local_workflows_dirs",
-    "local_filters_dirs",
-    "local_hooks_dirs",
+    "local_tasks",
+    "local_workflows",
+    "local_filters",
+    "local_hooks",
     "imported_packages",
 )
 
diff --git a/nornflow/exceptions.py b/nornflow/exceptions.py
index ff4d3bf..470c36a 100644
--- a/nornflow/exceptions.py
+++ b/nornflow/exceptions.py
@@ -8,6 +8,7 @@ organized hierarchically with clear inheritance paths.
 ###############################################################################
 # ROOT EXCEPTION
 ###############################################################################
+from typing import Any
 
 
 class NornFlowError(Exception):
@@ -98,6 +99,29 @@ class FilterError(WorkflowError):
         super().__init__(message, filter_name=filter_name, **kwargs)
 
 
+class BlueprintError(WorkflowError):
+    """Base exception for all blueprint-related errors."""
+
+    def __init__(self, message: str = "", blueprint_name: str = "", details: dict[str, Any] | None = None):
+        self.blueprint_name = blueprint_name
+        self.details = details or {}
+        prefix = f"Blueprint '{blueprint_name}': " if blueprint_name else "Blueprint: "
+        super().__init__(f"{prefix}{message}")
+
+
+class BlueprintCircularDependencyError(BlueprintError):
+    """Raised when circular dependencies are detected in blueprint expansion."""
+
+    def __init__(self, blueprint_name: str, dependency_chain: list[str]):
+        self.dependency_chain = dependency_chain
+        chain_str = " → ".join(dependency_chain)
+        super().__init__(
+            message=f"Circular dependency detected: {chain_str} → {blueprint_name}",
+            blueprint_name=blueprint_name,
+            details={"dependency_chain": dependency_chain},
+        )
+
+
 ###############################################################################
 # SETTINGS EXCEPTIONS
 ###############################################################################
diff --git a/nornflow/hooks/__init__.py b/nornflow/hooks/__init__.py
index 7a86419..65b55a0 100644
--- a/nornflow/hooks/__init__.py
+++ b/nornflow/hooks/__init__.py
@@ -6,9 +6,11 @@ Simply inherit from Hook and define a hook_name - registration is automatic!
 
 from nornflow.hooks.base import Hook, HOOK_REGISTRY
 from nornflow.hooks.loader import load_hooks
+from nornflow.hooks.mixins import Jinja2ResolvableMixin
 
 __all__ = [
     "HOOK_REGISTRY",
     "Hook",
+    "Jinja2ResolvableMixin",
     "load_hooks",
 ]
diff --git a/nornflow/hooks/base.py b/nornflow/hooks/base.py
index 7c7f596..7d3cf5b 100644
--- a/nornflow/hooks/base.py
+++ b/nornflow/hooks/base.py
@@ -8,18 +8,20 @@ from nornflow.hooks.exceptions import HookRegistrationError
 if TYPE_CHECKING:
     from nornflow.models import TaskModel
 
-# Global registry populated automatically when Hook subclasses are defined
 HOOK_REGISTRY: dict[str, type["Hook"]] = {}
 
 
 class Hook:
-    """Base hook class with automatic registration.
+    """Base hook class with automatic registration and cooperative validation.
 
     Any class that inherits from Hook and defines a hook_name will be
     automatically registered when the class is defined (at import time).
 
+    The execute_hook_validations method uses cooperative super() calls to ensure
+    proper validation in multiple inheritance scenarios (e.g., with mixins).
+
     Example:
-        class MyHook(Hook):  # Automatically registered!
+        class MyHook(Hook):
             hook_name = "my_hook"
 
             def task_started(self, task: Task) -> None:
@@ -61,7 +63,6 @@ class Hook:
                     f"Cannot register {cls.__module__}.{cls.__name__}"
                 )
 
-        # Register the hook class
         HOOK_REGISTRY[cls.hook_name] = cls
 
     def __init__(self, value: Any = None):
@@ -95,11 +96,15 @@ class Hook:
         if not self.run_once_per_task:
             return True
 
-        task_id = id(task)
-        if task_id in self._execution_count:
+        task_model = self.context.get("task_model")
+        if not task_model:
+            return True
+
+        task_model_id = id(task_model)
+        if task_model_id in self._execution_count:
             return False
 
-        self._execution_count[task_id] = 1
+        self._execution_count[task_model_id] = 1
         return True
 
     def task_started(self, task: Task) -> None:
@@ -160,10 +165,14 @@ class Hook:
     def execute_hook_validations(self, task_model: "TaskModel") -> None:
         """Validate hook configuration for the task.
 
+        Uses cooperative super() to ensure validation methods in mixins and
+        parent classes are called properly in multiple inheritance scenarios.
+
         Args:
             task_model: The task model to validate against.
 
         Raises:
             HookValidationError: If validation fails.
         """
-        pass
+        if hasattr(super(), "execute_hook_validations"):
+            super().execute_hook_validations(task_model)
diff --git a/nornflow/hooks/mixins.py b/nornflow/hooks/mixins.py
new file mode 100644
index 0000000..f16524f
--- /dev/null
+++ b/nornflow/hooks/mixins.py
@@ -0,0 +1,206 @@
+from typing import Any, TYPE_CHECKING
+
+from jinja2 import TemplateSyntaxError
+from nornir.core.inventory import Host
+from nornir.core.task import Task
+
+from nornflow.hooks.exceptions import HookError, HookValidationError
+from nornflow.vars.constants import JINJA2_MARKERS, TRUTHY_STRING_VALUES
+from nornflow.vars.jinja2_utils import Jinja2EnvironmentManager
+
+if TYPE_CHECKING:
+    from nornflow.models import TaskModel
+
+
+class Jinja2ResolvableMixin:
+    """Mixin providing automatic Jinja2 validation and resolution to hooks.
+
+    This mixin automatically validates string values as Jinja2 expressions during
+    workflow preparation and provides resolution methods for execution. Developers
+    using this mixin don't need Jinja2 awareness - just include it in the inheritance
+    chain and call get_resolved_value() in lifecycle methods.
+
+    The mixin expects the hook to have:
+        - self.value: The hook's configuration value
+        - self.context: Property returning hook execution context
+        - self.hook_name: The hook's name for error messages
+
+    The Hook base class provides all of these.
+
+    Automatic Validation:
+        The mixin overrides execute_hook_validations() to automatically validate
+        string values that contain Jinja2 markers. Plain strings (like "yes", "true")
+        are allowed and later converted by _to_bool(). Works with any inheritance order
+        thanks to cooperative super() calls in the Hook base class.
+
+        NOTE: Empty string validation is NOT performed by this mixin. Individual hook
+        implementations should validate empty strings if their specific use case
+        requires it, as some hooks may legitimately accept empty strings.
+
+    Important:
+        Only call get_resolved_value() inside lifecycle methods where the execution
+        context has been populated by the framework. When calling from task_instance_started(),
+        you MUST pass the host parameter explicitly to ensure per-host resolution.
+
+    Example:
+        class MyHook(Hook, Jinja2ResolvableMixin):
+            hook_name = "my_hook"
+
+            def task_instance_started(self, task: Task, host: Host):
+                should_run = self.get_resolved_value(task, host=host, as_bool=True)
+    """
+
+    def execute_hook_validations(self, task_model: "TaskModel") -> None:
+        """Validate hook configuration, including automatic Jinja2 validation.
+
+        If self.value is a string containing Jinja2 markers, validates it as a
+        Jinja2 expression. Plain strings without markers are allowed.
+        Subclasses can override to add additional validation, but must
+        call super().execute_hook_validations(task_model) first.
+
+        Args:
+            task_model: The task model to validate against
+
+        Raises:
+            HookValidationError: If validation fails
+        """
+        if isinstance(self.value, str) and self._is_jinja2_expression(self.value):
+            self._validate_jinja2_string(task_model)
+
+        if hasattr(super(), "execute_hook_validations"):
+            super().execute_hook_validations(task_model)
+
+    def _validate_jinja2_string(self, task_model: "TaskModel") -> None:
+        """Validate that string value is a proper Jinja2 expression.
+
+        Args:
+            task_model: The task model being validated
+
+        Raises:
+            HookValidationError: If string is empty or has syntax errors
+        """
+        if not self.value.strip():
+            raise HookValidationError(
+                self.hook_name,
+                [("empty_expression", f"Task '{task_model.name}': Jinja2 expression cannot be empty")],
+            )
+
+        try:
+            manager = Jinja2EnvironmentManager()
+            manager.env.from_string(self.value)
+        except TemplateSyntaxError as e:
+            raise HookValidationError(
+                self.hook_name,
+                [("jinja2_syntax", f"Task '{task_model.name}': Jinja2 syntax error: {e}")],
+            ) from e
+        except Exception as e:
+            raise HookValidationError(
+                self.hook_name,
+                [("jinja2_validation", f"Task '{task_model.name}': Jinja2 validation failed: {e}")],
+            ) from e
+
+    def get_resolved_value(
+        self, task: Task, host: Host | None = None, as_bool: bool = False, default: Any = None
+    ) -> Any:
+        """Get the final resolved value, handling Jinja2 automatically.
+
+        This method checks if self.value is a Jinja2 expression, resolves it
+        if needed, converts to the requested type, and returns the final value.
+
+        Args:
+            task: The task being executed
+            host: The specific host to resolve for. If None, extracts first host
+            as_bool: If True, convert result to boolean
+            default: Default value if self.value is falsy
+
+        Returns:
+            The resolved value, optionally converted to boolean
+
+        Raises:
+            HookError: If vars_manager not available or task has no hosts
+        """
+        if not self.value:
+            return default
+
+        if self._is_jinja2_expression(self.value):
+            if not host:
+                host = self._extract_host_from_task(task)
+            resolved = self._resolve_jinja2(self.value, host)
+        else:
+            resolved = self.value
+
+        if as_bool:
+            return self._to_bool(resolved)
+
+        return resolved
+
+    def _is_jinja2_expression(self, value: Any) -> bool:
+        """Check if a value contains Jinja2 template markers.
+
+        Args:
+            value: The value to check.
+
+        Returns:
+            True if value is a string with Jinja2 markers, False otherwise.
+        """
+        if not isinstance(value, str):
+            return False
+
+        return any(marker in value for marker in JINJA2_MARKERS)
+
+    def _extract_host_from_task(self, task: Task) -> Host:
+        """Extract a host from task inventory.
+
+        Warning: This returns the FIRST host from inventory, which is only safe
+        when called from task_started (runs once per task). For task_instance_started,
+        you MUST pass the host parameter explicitly.
+
+        Args:
+            task: The task to extract host from.
+
+        Returns:
+            First host from task's inventory.
+
+        Raises:
+            HookError: If task has no hosts.
+        """
+        if not task.nornir.inventory.hosts:
+            raise HookError("Cannot extract host from task with empty inventory")
+
+        return next(iter(task.nornir.inventory.hosts.values()))
+
+    def _resolve_jinja2(self, value: str, host: Host) -> Any:
+        """Resolve a Jinja2 template string.
+
+        Args:
+            value: The template string to resolve.
+            host: The host to resolve for.
+
+        Returns:
+            The resolved value from the template.
+
+        Raises:
+            HookError: If vars_manager not available in context.
+        """
+        vars_manager = self.context.get("vars_manager")
+        if not vars_manager:
+            raise HookError(f"{self.hook_name or 'Hook'}: Variables manager not available in context.")
+
+        return vars_manager.resolve_string(value, host.name)
+
+    def _to_bool(self, value: Any) -> bool:
+        """Convert a value to boolean.
+
+        Args:
+            value: The value to convert.
+
+        Returns:
+            Boolean representation of the value.
+        """
+        if isinstance(value, bool):
+            return value
+
+        if isinstance(value, str):
+            return value.lower() in TRUTHY_STRING_VALUES
+
+        return bool(value)
diff --git a/nornflow/models/task.py b/nornflow/models/task.py
index 7a474f2..9d7e80d 100644
--- a/nornflow/models/task.py
+++ b/nornflow/models/task.py
@@ -47,6 +47,20 @@ class TaskModel(HookableModel):
     name: str
     args: HashableDict[str, Any | None] | None = None
 
+    @property
+    def canonical_id(self) -> str:
+        """
+        Combines the task name with the model ID to create a garanteed always-unique
+        identifier that distinguishes between different instances of the same task
+        function in a workflow execution.
+
+        Returns:
+            A unique string identifier for this task instance.
+        """
+        if self.id:
+            return f"{self.name}_{self.id}"
+        return self.name
+
     @field_validator("args", mode="before")
     @classmethod
     def validate_args(cls, v: HashableDict[str, Any] | None) -> HashableDict[str, Any] | None:
diff --git a/nornflow/models/workflow.py b/nornflow/models/workflow.py
index 7f1d755..a3214fa 100644
--- a/nornflow/models/workflow.py
+++ b/nornflow/models/workflow.py
@@ -6,10 +6,12 @@ from pydantic import field_validator
 from pydantic_serdes.custom_collections import HashableDict, OneToMany
 from pydantic_serdes.utils import convert_to_hashable
 
+from nornflow.blueprints import BlueprintExpander, BlueprintResolver
 from nornflow.constants import FailureStrategy
 from nornflow.exceptions import WorkflowError
 from nornflow.models import NornFlowBaseModel, TaskModel
 from nornflow.utils import normalize_failure_strategy
+from nornflow.vars.jinja2_utils import Jinja2EnvironmentManager
 
 
 class WorkflowModel(NornFlowBaseModel):
@@ -21,49 +23,74 @@ class WorkflowModel(NornFlowBaseModel):
     inventory_filters: HashableDict[str, Any] | None = None
     processors: tuple[HashableDict[str, Any]] | None = None
     tasks: OneToMany[TaskModel, ...]
-    dry_run: bool = False
+    dry_run: bool | None = None
     vars: HashableDict[str, Any] | None = None
-    failure_strategy: FailureStrategy = FailureStrategy.SKIP_FAILED
+    failure_strategy: FailureStrategy | None = None
 
     @classmethod
     def create(cls, dict_args: dict[str, Any], *args: Any, **kwargs: Any) -> "WorkflowModel":
         """
         Create a new WorkflowModel from a workflow dictionary.
 
-        Extracts the 'workflow' key from the input dict and processes tasks into TaskModel instances.
+        Extracts the 'workflow' key from the input dict, expands any blueprint references
+        in the tasks list, and processes tasks into TaskModel instances.
 
         Args:
             dict_args: Dictionary containing the full workflow data, must include 'workflow' key.
             *args: Additional positional arguments passed to parent create method.
             **kwargs: Additional keyword arguments passed to parent create method.
+                blueprints_catalog: Optional catalog mapping blueprint names to file paths.
+                vars_dir: Optional directory containing variable files.
+                workflow_path: Optional path to the workflow file.
+                workflow_roots: Optional list of workflow root directories.
+                cli_vars: Optional CLI variables with highest precedence.
 
         Returns:
             The created WorkflowModel instance.
 
         Raises:
             WorkflowError: If 'workflow' key is not present in dict_args.
+            BlueprintError: If blueprint expansion fails.
         """
-        try:
-            dict_args = dict_args.pop("workflow")
-        except KeyError as e:
-            raise WorkflowError("Workflow file must have 'workflow' as a root-level key.") from e
+        if "workflow" not in dict_args:
+            raise WorkflowError("Workflow file must have 'workflow' as a root-level key.")
+
+        workflow_dict = dict_args["workflow"]
+
+        if "tasks" not in workflow_dict:
+            workflow_dict["tasks"] = []
+
+        jinja2_manager = Jinja2EnvironmentManager()
+        resolver = BlueprintResolver(jinja2_manager)
+        expander = BlueprintExpander(resolver)
+
+        # Pop blueprint-specific kwargs to consume them and remove them from the dict.
+        blueprints_catalog = kwargs.pop("blueprints_catalog", None)
+        vars_dir = kwargs.pop("vars_dir", None)
+        workflow_path = kwargs.pop("workflow_path", None)
+        workflow_roots = kwargs.pop("workflow_roots", None)
+        cli_vars = kwargs.pop("cli_vars", None)
+
+        expanded_tasks = expander.expand_blueprints(
+            tasks=workflow_dict["tasks"],
+            blueprints_catalog=blueprints_catalog,
+            vars_dir=vars_dir,
+            workflow_path=workflow_path,
+            workflow_roots=workflow_roots,
+            inline_vars=workflow_dict.get("vars"),
+            cli_vars=cli_vars,
+        )
 
-        # Tasks should already be in dict_args from the workflow definition
-        if "tasks" not in dict_args:
-            dict_args["tasks"] = []  # Default to empty list if no tasks defined
-
-        # Create TaskModels from the tasks in dict_args
         tasks = []
-        for task_dict in dict_args["tasks"]:
+        for task_dict in expanded_tasks:
             task = TaskModel.create(task_dict)
             tasks.append(task)
 
-        # Update tasks in dict_args with the created TaskModels
-        # in the end, it's ok for tasks to be a python list, because PydanticSerdesBaseModel
-        # automatically takes care of converting it to its own OneToMany type
-        dict_args["tasks"] = tasks
+        workflow_dict["tasks"] = tasks
 
-        return super().create(dict_args, *args, **kwargs)
+        # kwargs contains arguments meant for the model itself.
+        result = super().create(workflow_dict, *args, **kwargs)
+        return result
 
     @field_validator("failure_strategy", mode="before")
     @classmethod
diff --git a/nornflow/nornflow.py b/nornflow/nornflow.py
index 662d5c0..8df83d3 100644
--- a/nornflow/nornflow.py
+++ b/nornflow/nornflow.py
@@ -25,7 +25,7 @@ from nornflow.utils import (
     import_modules_recursively,
     is_nornir_filter,
     is_nornir_task,
-    is_workflow_file,
+    is_yaml_file,
     load_processor,
     print_workflow_overview,
     process_filter,
@@ -85,6 +85,7 @@ class NornFlow:
         vars: dict[str, Any] | None = None,
         filters: dict[str, Any] | None = None,
         failure_strategy: FailureStrategy | None = None,
+        dry_run: bool | None = None,
         **kwargs: Any,
     ):
         """
@@ -105,6 +106,8 @@ class NornFlow:
                 any inventory filters defined in the workflow YAML.
             failure_strategy: Failure strategy with highest precedence. This overrides any failure
                 strategy defined in the workflow YAML.
+            dry_run: Dry run mode with highest precedence. This overrides any dry_run
+                setting defined in the workflow YAML or settings.
             **kwargs: Additional keyword arguments passed to NornFlowSettings
 
         Raises:
@@ -113,14 +116,11 @@ class NornFlow:
         try:
             self._validate_init_kwargs(kwargs)
             self._initialize_settings(nornflow_settings, kwargs)
-            self._initialize_nornir()
-            self._initialize_instance_vars(vars, filters, failure_strategy, processors)
+            self._initialize_instance_vars(vars, filters, failure_strategy, dry_run, processors)
             self._initialize_hooks()
             self._initialize_catalogs()
             self._initialize_processors()
-            # Workflow is optional; not always do we want a NornFlow instance with
-            # an executable workflow (e.g., for informational commands like 'show')
-            if workflow is not None:
+            if workflow:
                 self.workflow = workflow
         except CoreError:
             raise
@@ -144,15 +144,19 @@ class NornFlow:
         vars: dict[str, Any] | None,
         filters: dict[str, Any] | None,
         failure_strategy: FailureStrategy | None,
+        dry_run: bool | None,
         processors: list[dict[str, Any]] | None,
     ) -> None:
         """Initialize core instance variables."""
         self._vars = vars or {}
         self._filters = filters or {}
         self._failure_strategy = failure_strategy
+        self._dry_run = dry_run
         self._processors = processors
         self._workflow = None
         self._workflow_path = None
+        self._nornir_configs = None
+        self._nornir_manager = None
         # System processors are initialized lazily in their property getters
         # when needed during workflow execution, not during __init__
         self._var_processor = None
@@ -164,19 +168,24 @@ class NornFlow:
         self._tasks_catalog = CallableCatalog("tasks")
         self._filters_catalog = CallableCatalog("filters")
         self._workflows_catalog = FileCatalog("workflows")
+        self._blueprints_catalog = FileCatalog("blueprints")
         self._load_tasks_catalog()
         self._load_filters_catalog()
         self._load_workflows_catalog()
+        self._load_blueprints_catalog()
 
     def _initialize_hooks(self) -> None:
         """Initialize hooks by importing modules from configured directories."""
-        for dir_path in self.settings.local_hooks_dirs:
+        for dir_path in self.settings.local_hooks:
             dir_path_obj = Path(dir_path)
             if dir_path_obj.exists():
                 import_modules_recursively(dir_path_obj)
 
     def _initialize_nornir(self) -> None:
         """Initialize Nornir configurations and manager."""
+        if self._nornir_manager:
+            return
+
         try:
             self._nornir_configs = load_file_to_dict(self.settings.nornir_config_file)
         except Exception as e:
@@ -205,7 +214,7 @@ class NornFlow:
         2. They have fixed positions in the processor chain (first, second, last)
         3. They are always present and cannot be overridden by users
 
-        System processors are added later in _with_processors() when a workflow
+        System processors are added later in _apply_processors() when a workflow
         is being executed and all necessary context is available.
 
         Precedence for user-configurable processors:
@@ -234,7 +243,9 @@ class NornFlow:
         Returns:
             dict[str, Any]: Dictionary containing the Nornir configurations.
         """
-        return self.nornir_manager.nornir.config.dict()
+        if not self._nornir_manager:
+            self._initialize_nornir()
+        return self._nornir_configs
 
     @nornir_configs.setter
     def nornir_configs(self, value: Any) -> None:
@@ -250,6 +261,8 @@ class NornFlow:
         Returns:
             NornirManager: The Nornir manager instance.
         """
+        if not self._nornir_manager:
+            self._initialize_nornir()
         return self._nornir_manager
 
     @nornir_manager.setter
@@ -352,17 +365,15 @@ class NornFlow:
         1. Failure strategy passed to the NornFlow constructor
         2. Workflow failure strategy
         3. Settings failure strategy
-        4. Default (SKIP_FAILED)
 
         Returns:
             FailureStrategy: The effective failure strategy.
         """
-        return (
-            self._failure_strategy
-            or (self.workflow.failure_strategy if self.workflow else None)
-            or self.settings.failure_strategy
-            or FailureStrategy.SKIP_FAILED
-        )
+        if self._failure_strategy:
+            return self._failure_strategy
+        if self.workflow and self.workflow.failure_strategy:
+            return self.workflow.failure_strategy
+        return self.settings.failure_strategy
 
     @failure_strategy.setter
     def failure_strategy(self, value: FailureStrategy) -> None:
@@ -383,6 +394,25 @@ class NornFlow:
         self._failure_strategy = value
         self._failure_strategy_processor = None
 
+    @property
+    def dry_run(self) -> bool:
+        """
+        Get the effective dry_run value based on precedence chain.
+
+        Precedence (highest to lowest):
+        1. dry_run passed to NornFlow constructor
+        2. Workflow dry_run setting
+        3. Settings dry_run
+
+        Returns:
+            bool: The effective dry_run value.
+        """
+        if self._dry_run is not None:
+            return self._dry_run
+        if self.workflow and self.workflow.dry_run is not None:
+            return self.workflow.dry_run
+        return self.settings.dry_run
+
     @property
     def var_processor(self) -> NornFlowVariableProcessor | None:
         """
@@ -407,7 +437,7 @@ class NornFlow:
         Returns:
             NornFlowFailureStrategyProcessor: The failure strategy processor instance.
         """
-        if self._failure_strategy_processor is None:
+        if not self._failure_strategy_processor:
             self._failure_strategy_processor = NornFlowFailureStrategyProcessor(self.failure_strategy)
         return self._failure_strategy_processor
 
@@ -490,6 +520,26 @@ class NornFlow:
         """
         raise ImmutableAttributeError("Cannot set filters catalog directly.")
 
+    @property
+    def blueprints_catalog(self) -> FileCatalog:
+        """
+        Get the blueprints catalog.
+
+        Returns:
+            FileCatalog: Catalog of blueprint names and the corresponding file Path to it.
+        """
+        return self._blueprints_catalog
+
+    @blueprints_catalog.setter
+    def blueprints_catalog(self, _: Any) -> None:
+        """
+        Prevent setting the blueprints catalog directly.
+
+        Raises:
+            ImmutableAttributeError: Always raised to prevent direct setting of the blueprints catalog.
+        """
+        raise ImmutableAttributeError("Cannot set blueprints catalog directly.")
+
     @property
     def workflow(self) -> WorkflowModel | None:
         """
@@ -511,7 +561,7 @@ class NornFlow:
         Raises:
             WorkflowError: If value is invalid or workflow cannot be loaded.
         """
-        if value is None:
+        if not value:
             self._workflow = None
             self._workflow_path = None
         elif isinstance(value, WorkflowModel):
@@ -573,6 +623,11 @@ class NornFlow:
             "Processors cannot be set directly, but must be loaded from nornflow settings file."
         )
 
+    @property
+    def nornir_config_file(self) -> str:
+        """Get the Nornir config file path from settings."""
+        return self.settings.nornir_config_file
+
     def _load_catalog(
         self,
         catalog_type: type,
@@ -650,14 +705,14 @@ class NornFlow:
 
         Tasks are loaded in two phases:
         1. Built-in tasks from nornflow.builtins.tasks module
-        2. User-defined tasks from local_tasks_dirs
+        2. User-defined tasks from local_tasks
         """
         self._tasks_catalog = self._load_catalog(
             CallableCatalog,
             "tasks",
             builtin_module=builtin_tasks,
             predicate=is_nornir_task,
-            directories=self.settings.local_tasks_dirs,
+            directories=self.settings.local_tasks,
             check_empty=True,
         )
 
@@ -667,7 +722,7 @@ class NornFlow:
 
         Filters are loaded in two phases:
         1. Built-in filters from nornflow.builtins.filters module
-        2. User-defined filters from configured local_filters_dirs
+        2. User-defined filters from configured local_filters
         """
         self._filters_catalog = self._load_catalog(
             CallableCatalog,
@@ -675,7 +730,7 @@ class NornFlow:
             builtin_module=builtin_filters,
             predicate=is_nornir_filter,
             transform_item=process_filter,
-            directories=self.settings.local_filters_dirs,
+            directories=self.settings.local_filters,
         )
 
     def _load_workflows_catalog(self) -> None:
@@ -688,8 +743,22 @@ class NornFlow:
         self._workflows_catalog = self._load_catalog(
             FileCatalog,
             "workflows",
-            predicate=is_workflow_file,
-            directories=self.settings.local_workflows_dirs,
+            predicate=is_yaml_file,
+            directories=self.settings.local_workflows,
+            recursive=True,
+        )
+
+    def _load_blueprints_catalog(self) -> None:
+        """
+        Discover and load blueprint files from directories specified in settings.
+
+        This catalogs the available blueprint files for later use.
+        """
+        self._blueprints_catalog = self._load_catalog(
+            FileCatalog,
+            "blueprints",
+            predicate=is_yaml_file,
+            directories=self.settings.local_blueprints,
             recursive=True,
         )
 
@@ -728,17 +797,12 @@ class NornFlow:
                 f"Available tasks: {available_tasks}"
             )
 
-    def _apply_filters(self, nornir_manager: NornirManager) -> None:
-        """
-        Apply inventory filters to the Nornir manager.
-
-        Args:
-            nornir_manager: The Nornir manager to apply filters to.
-        """
+    def _apply_filters(self) -> None:
+        """Apply inventory filters to the Nornir manager."""
         filter_kwargs_list = self._get_filtering_kwargs()
 
         for filter_kwargs in filter_kwargs_list:
-            nornir_manager.apply_filters(**filter_kwargs)
+            self.nornir_manager.apply_filters(**filter_kwargs)
 
     def _get_filtering_kwargs(self) -> list[dict[str, Any]]:
         """
@@ -837,7 +901,14 @@ class NornFlow:
         workflow_path = self.workflows_catalog[name]
         try:
             workflow_dict = load_file_to_dict(workflow_path)
-            workflow = WorkflowModel.create(workflow_dict)
+            workflow = WorkflowModel.create(
+                workflow_dict,
+                blueprints_catalog=dict(self.blueprints_catalog),
+                vars_dir=self.settings.vars_dir,
+                workflow_path=workflow_path,
+                workflow_roots=self.settings.local_workflows,
+                cli_vars=self._vars,
+            )
             return workflow, workflow_path
         except Exception as e:
             raise WorkflowError(
@@ -857,14 +928,10 @@ class NornFlow:
             cli_vars=self.vars,
             inline_workflow_vars=dict(self.workflow.vars) if self.workflow.vars else {},
             workflow_path=self.workflow_path,
-            workflow_roots=self.settings.local_workflows_dirs,
+            workflow_roots=self.settings.local_workflows,
         )
 
-    def _with_processors(
-        self,
-        nornir_manager: NornirManager,
-        processors: list | None = None,
-    ) -> None:
+    def _apply_processors(self) -> None:
         """
         Apply processors to the Nornir instance based on configuration.
 
@@ -877,7 +944,6 @@ class NornFlow:
 
         2. USER-CONFIGURABLE PROCESSORS (optional, middle position):
            - From workflow definition (self._workflow.processors)
-           - From passed parameter (processors)
            - From NornFlow settings (self._processors initialized in _initialize_processors)
 
         System processors are initialized lazily via their properties when first accessed.
@@ -888,10 +954,6 @@ class NornFlow:
         2. NornFlowHookProcessor (system - hook execution)
         3. User-configurable processors (custom business logic)
         4. NornFlowFailureStrategyProcessor (system - error handling)
-
-        Args:
-            nornir_manager: The NornirManager instance to apply processors to
-            processors: List of processors to apply if no workflow-specific processors defined
         """
         # Build processor chain with system processors at fixed positions
         # The var_processor property will handle lazy initialization if needed
@@ -912,29 +974,18 @@ class NornFlow:
                     all_processors.extend(workflow_processors)
             except ProcessorError as e:
                 raise WorkflowError(f"Failed to initialize workflow processors: {e}") from e
-        elif processors:
-            all_processors.extend(processors)
         elif self.processors:
             all_processors.extend(self.processors)
 
-        # Add failure strategy processor last
         all_processors.append(self.failure_strategy_processor)
 
-        nornir_manager.apply_processors(all_processors)
-
-    def _orchestrate_execution(self, effective_dry_run: bool) -> None:
-        """
-        Orchestrate the execution of workflow tasks in sequence.
-
-        This method handles the core workflow execution logic, including setting
-        the dry-run mode and running each task with the necessary context.
+        self.nornir_manager.apply_processors(all_processors)
 
-        Args:
-            effective_dry_run: Whether to execute in dry-run mode.
-        """
+    def _orchestrate_execution(self) -> None:
+        """Orchestrate the execution of workflow tasks in sequence."""
         with self.nornir_manager:
             for task in self.workflow.tasks:
-                self.nornir_manager.set_dry_run(effective_dry_run)
+                self.nornir_manager.set_dry_run(self.dry_run)
 
                 task.run(
                     nornir_manager=self.nornir_manager,
@@ -942,18 +993,11 @@ class NornFlow:
                     tasks_catalog=dict(self.tasks_catalog),
                 )
 
-    def _print_workflow_overview(self, effective_dry_run: bool) -> None:
-        """
-        Print the workflow overview before execution.
-        This just wraps around print_workflow_overview for improved
-        readability in self.run()
-
-        Args:
-            effective_dry_run: Whether to execute in dry-run mode.
-        """
+    def _print_workflow_overview(self) -> None:
+        """Print the workflow overview before execution."""
         print_workflow_overview(
             workflow_model=self.workflow,
-            effective_dry_run=effective_dry_run,
+            effective_dry_run=self.dry_run,
             hosts_count=len(self.nornir_manager.nornir.inventory.hosts),
             inventory_filters=self.filters or self.workflow.inventory_filters or {},
             workflow_vars=dict(self.workflow.vars) if self.workflow.vars else {},
@@ -1002,7 +1046,7 @@ class NornFlow:
 
         return 0
 
-    def run(self, dry_run: bool = False) -> int:
+    def run(self) -> int:
         """
         Execute the configured workflow with the current NornFlow settings.
 
@@ -1027,21 +1071,19 @@ class NornFlow:
         back to the caller of NornFlow.run(). This is to allow the caller the flexibility
         to process and handle it as fits.
 
-        Args:
-            dry_run: Whether to execute in dry-run mode.
-
         Returns:
             int: Exit code representing execution status.
         """
-        effective_dry_run = dry_run or self.workflow.dry_run
         if not self.workflow:
             raise WorkflowError(
                 "No workflow configured. Set a workflow before calling run().", component="NornFlow"
             )
+
         self._check_tasks()
-        self._apply_filters(self.nornir_manager)
-        self._with_processors(self.nornir_manager)
-        self._print_workflow_overview(effective_dry_run)
-        self._orchestrate_execution(effective_dry_run)
+        self._initialize_nornir()
+        self._apply_filters()
+        self._apply_processors()
+        self._print_workflow_overview()
+        self._orchestrate_execution()
         self._print_workflow_summary()
         return self._get_return_code()
diff --git a/nornflow/settings.py b/nornflow/settings.py
index d531bff..9e4b7ef 100644
--- a/nornflow/settings.py
+++ b/nornflow/settings.py
@@ -1,131 +1,286 @@
 import os
-from collections import defaultdict
+from pathlib import Path
 from typing import Any
 
 import yaml
-from pydantic_serdes.utils import load_file_to_dict
+from pydantic import Field, field_validator, PrivateAttr
+from pydantic_settings import BaseSettings, SettingsConfigDict
 
-from nornflow.constants import NORNFLOW_SETTINGS_MANDATORY, NORNFLOW_SETTINGS_OPTIONAL
-from nornflow.exceptions import NornFlowError, ResourceError, SettingsError
+from nornflow.constants import (
+    FailureStrategy,
+    NORNFLOW_DEFAULT_BLUEPRINTS_DIR,
+    NORNFLOW_DEFAULT_FILTERS_DIR,
+    NORNFLOW_DEFAULT_HOOKS_DIR,
+    NORNFLOW_DEFAULT_TASKS_DIR,
+    NORNFLOW_DEFAULT_VARS_DIR,
+    NORNFLOW_DEFAULT_WORKFLOWS_DIR,
+)
+from nornflow.exceptions import SettingsError
 
 
-class NornFlowSettings:
+class NornFlowSettings(BaseSettings):
     """
-    This class is used to store NornFlow settings for access during runtime.
+    NornFlow settings management using Pydantic.
 
-    For initialization, it requires the location of a YAML file that holds the settings.
-    This will be determined with the following order of preference:
-        - through an environment variable named `NORNFLOW_SETTINGS`.
-        - through the `settings_file` argument.
-        - a default file named 'nornflow.yaml' is assumed to exist.
+    Settings are loaded with the following priority (highest to lowest):
+    1. Environment variables (prefixed with NORNFLOW_SETTINGS_)
+    2. Values from settings YAML file
+    3. Default values defined in the model
 
-    To allow for extensibility and customizations, NornFlow was designed with the following
-    principles in mind:
-        1 - NornFlow settings and Nornir configs are kept separate, hence the need for a
-           `nornir_config_file` setting in the NornFlow settings YAML file.
+    Note the careful terminology:
+    - "Settings" refers to NornFlow's own configuration
+    - "Configuration/Config" is reserved for Nornir's configuration
 
-        2 - a minimal set of REQUIRED settings.
+    Environment variable examples:
+    - NORNFLOW_SETTINGS_VARS_DIR=/custom/vars
+    - NORNFLOW_SETTINGS_LOCAL_TASKS=["tasks", "custom_tasks"]
+    - NORNFLOW_SETTINGS_FAILURE_STRATEGY=fail-fast
+    """
 
-        3 - a minimal set of OPTIONAL settings that can also be passed explicitly to the Class
-            initializer as keyword arguments.
+    model_config = SettingsConfigDict(
+        env_prefix="NORNFLOW_SETTINGS_",
+        env_nested_delimiter="__",
+        case_sensitive=False,
+        extra="allow",
+    )
 
-        4 - there's no fixed set of "acceptable settings". Users can add more settings if
-            they want to extend NornFlow to support custom use-cases, but currently this is
-            only supported through the YAML file, not through keyword arguments.
+    nornir_config_file: str = Field(description="Path to Nornir configuration file (required)")
 
-        5 - settings can be accessed as attributes of a NornFlowSettings object:
-            NornFlowSettings().local_tasks_dirs # returns the 'local_tasks_dirs' setting
+    local_tasks: list[str] = Field(
+        default=[NORNFLOW_DEFAULT_TASKS_DIR], description="List of directories containing Nornir tasks"
+    )
+    local_workflows: list[str] = Field(
+        default=[NORNFLOW_DEFAULT_WORKFLOWS_DIR],
+        description="List of directories containing workflow definitions",
+    )
+    local_filters: list[str] = Field(
+        default=[NORNFLOW_DEFAULT_FILTERS_DIR],
+        description="List of directories containing custom filter functions",
+    )
+    local_hooks: list[str] = Field(
+        default=[NORNFLOW_DEFAULT_HOOKS_DIR], description="List of directories containing custom hook classes"
+    )
+    local_blueprints: list[str] = Field(
+        default=[NORNFLOW_DEFAULT_BLUEPRINTS_DIR],
+        description="List of directories containing blueprint definitions",
+    )
+    imported_packages: list[str] = Field(
+        default_factory=list, description="List of Python packages to import for additional resources"
+    )
+    processors: list[dict[str, Any]] = Field(
+        default_factory=list, description="List of processor configurations with class and args"
+    )
+    vars_dir: str = Field(
+        default=NORNFLOW_DEFAULT_VARS_DIR, description="Directory containing variable files"
+    )
+    failure_strategy: FailureStrategy = Field(
+        default=FailureStrategy.SKIP_FAILED, description="Strategy for handling task failures"
+    )
+    dry_run: bool = Field(default=False, description="Whether to run in dry-run mode")
 
-        6 - trying to access non-supported settings also not informed in the YAML file
-            will simply return None:
-            NornFlowSettings().non_existing_setting_not_informed_in_yaml_either  # returns None
-    """
+    _base_dir: Path | None = PrivateAttr(default=None)
+    _settings_file: str | None = PrivateAttr(default=None)
 
-    def __init__(self, settings_file: str = "nornflow.yaml", **kwargs: Any):
-        # Use environment variable to override settings file path if set
-        self.settings_file = os.getenv("NORNFLOW_SETTINGS", settings_file)
-        self._load_settings()
-        self._check_mandatory_settings()
-        self._set_optional_settings(**kwargs)
+    @field_validator("processors", mode="before")
+    @classmethod
+    def validate_processors(cls, v: Any) -> list[dict[str, Any]]:
+        """Validate and normalize processor configurations."""
+        if not v:
+            return []
 
-    @property
-    def as_dict(self) -> dict[str, Any]:
-        return dict(self.loaded_settings)
+        if not isinstance(v, list):
+            raise TypeError("processors must be a list")
 
-    def _load_settings(self) -> None:
-        """
-        This method reads the settings file specified by `self.settings_file`, parses its
-        contents, and stores them in `self.loaded_settings`. If any errors occur during this
-        process, appropriate custom exceptions are raised.
+        validated = []
+        for item in v:
+            if isinstance(item, str):
+                validated.append({"class": item, "args": {}})
+            elif isinstance(item, dict):
+                if "class" not in item:
+                    raise ValueError("Each processor dict must have a 'class' key")
+                validated.append({"class": item["class"], "args": item.get("args", {})})
+            else:
+                raise TypeError(f"Invalid processor type: {type(item).__name__}")
 
-        Raises:
-            ResourceError: If the settings file is not found or cannot be accessed
-            SettingsError: If there are issues with parsing the settings file or data type
-            NornFlowError: For any other unexpected errors
-        """
-        try:
-            settings_data = load_file_to_dict(file_path=self.settings_file)
-
-            if not isinstance(settings_data, dict):
-                raise SettingsError(f"Settings data must be a dictionary, got {type(settings_data).__name__}")
-
-            self.loaded_settings = defaultdict(lambda: None, settings_data)
-        except FileNotFoundError as e:
-            raise ResourceError(
-                f"Settings file not found: {self.settings_file}",
-                resource_type="File",
-                resource_name=self.settings_file,
-            ) from e
-        except PermissionError as e:
-            raise ResourceError(
-                f"Permission denied accessing settings file: {self.settings_file}",
-                resource_type="File",
-                resource_name=self.settings_file,
-            ) from e
-        except yaml.YAMLError as e:
-            raise SettingsError(f"Failed to parse YAML settings file '{self.settings_file}': {e!s}") from e
-        except TypeError as e:
-            raise SettingsError(f"Invalid data type in settings file: {e!s}") from e
-        except Exception as e:
-            raise NornFlowError(f"An unexpected error occurred while loading settings: {e}") from e
+        return validated
 
-    def _check_mandatory_settings(self) -> None:
-        """
-        Check if all mandatory settings are present and not empty in the configuration.
+    @field_validator("failure_strategy", mode="before")
+    @classmethod
+    def validate_failure_strategy(cls, v: Any) -> FailureStrategy:
+        """Convert string to FailureStrategy enum."""
+        if isinstance(v, str):
+            try:
+                return FailureStrategy(v)
+            except ValueError:
+                normalized = v.lower().replace("_", "-")
+                try:
+                    return FailureStrategy(normalized)
+                except ValueError as e:
+                    raise ValueError(
+                        f"Invalid failure strategy: {v}. "
+                        f"Must be one of: {', '.join(s.value for s in FailureStrategy)}"
+                    ) from e
+        return v
 
-        Raises:
-            SettingsError: If a mandatory setting is missing or empty.
+    def resolve_relative_paths(self) -> "NornFlowSettings":
+        """Resolve relative paths to absolute paths based on base directory."""
+        base_dir = self.base_dir
+        if not base_dir:
+            return self
+
+        self._resolve_local_directories(base_dir)
+
+        vars_path = Path(self.vars_dir)
+        if not vars_path.is_absolute():
+            self.vars_dir = str(base_dir / vars_path)
+
+        if self.nornir_config_file:
+            config_path = Path(self.nornir_config_file)
+            if not config_path.is_absolute():
+                self.nornir_config_file = str(base_dir / config_path)
+
+        return self
+
+    def _resolve_local_directories(self, base_dir: Path) -> None:
+        """Normalize configured local directories relative to the provided base path.
+
+        Args:
+            base_dir: Absolute directory to resolve relative paths against.
         """
-        for setting in NORNFLOW_SETTINGS_MANDATORY:
-            if setting not in self.loaded_settings:
-                raise SettingsError("Mandatory setting is missing from configuration", setting=setting)
-            if not self.loaded_settings[setting]:
-                raise SettingsError("Mandatory setting is empty in configuration", setting=setting)
+        for field_name in [
+            "local_tasks",
+            "local_workflows",
+            "local_filters",
+            "local_hooks",
+            "local_blueprints",
+        ]:
+            dirs = getattr(self, field_name)
+            if not dirs:
+                continue
+            resolved: list[str] = []
+            for dir_path in dirs:
+                path = Path(dir_path)
+                if not path.is_absolute():
+                    resolved.append(str(base_dir / path))
+                else:
+                    resolved.append(str(path))
+            setattr(self, field_name, resolved)
 
-    def _set_optional_settings(self, **kwargs: Any) -> None:
+    @classmethod
+    def load(
+        cls, settings_file: str | None = None, base_dir: Path | None = None, **overrides: Any
+    ) -> "NornFlowSettings":
         """
-        Set optional settings from kwargs or default to existing attributes.
-        This enforces preference for optional settings passed as keyword arguments.
+        Load settings from a YAML file with automatic resolution and overrides.
+
+        This is the recommended way to create NornFlowSettings instances. It handles:
+        - Settings file discovery (explicit path, env var, or default)
+        - YAML loading and validation
+        - Path resolution relative to settings file location
+        - Programmatic value overrides
 
-        The preference algorithm is as follows:
-        1. Use the value passed explicitly in kwargs.
-        2. If not in kwargs, use the value read from the YAML file.
-        3. If not in the YAML file, use the default value from NORNFLOW_OPTIONAL_SETTINGS.
+        Settings file resolution priority (highest to lowest):
+        1. Explicit settings_file parameter (caller's direct intent)
+        2. NORNFLOW_SETTINGS environment variable (session default)
+        3. Default "nornflow.yaml" in current directory
 
         Args:
-            **kwargs (Any): Keyword arguments containing optional settings.
+            settings_file: Path to settings YAML file. If None, checks NORNFLOW_SETTINGS
+                          env var, then defaults to "nornflow.yaml" in current directory.
+            base_dir: Base directory for resolving relative paths. If None, uses the
+                     directory containing the resolved settings file.
+            **overrides: Additional settings to override YAML values. Useful for
+                        programmatic configuration. Example: dry_run=True
+
+        Returns:
+            NornFlowSettings instance with all paths resolved.
+
+        Raises:
+            SettingsError: If settings file not found or contains invalid data.
+
+        Examples:
+            # Use default resolution (checks env var, then nornflow.yaml)
+            settings = NornFlowSettings.load()
+
+            # Explicit file path (highest priority)
+            settings = NornFlowSettings.load("configs/prod-settings.yaml")
+
+            # Override specific values programmatically
+            settings = NornFlowSettings.load(dry_run=True, failure_strategy="fail-fast")
+
+            # Combine file + overrides
+            settings = NornFlowSettings.load(
+                "configs/base.yaml",
+                processors=[{"class": "custom.Processor"}]
+            )
         """
-        for setting, default_value in NORNFLOW_SETTINGS_OPTIONAL.items():
-            self.loaded_settings[setting] = kwargs.get(
-                setting, self.loaded_settings.get(setting, default_value)
+        resolved_file = settings_file or os.getenv("NORNFLOW_SETTINGS") or "nornflow.yaml"
+
+        settings_path = Path(resolved_file).resolve()
+
+        if not settings_path.exists():
+            raise SettingsError(
+                f"Settings file not found: {resolved_file}\n"
+                f"Resolved to absolute path: {settings_path}\n"
+                f"Current working directory: {Path.cwd()}"
             )
 
-    def __getattr__(self, name: str) -> Any:
-        return self.loaded_settings[name]
+        if not base_dir:
+            base_dir = settings_path.parent
 
-    def __str__(self) -> str:
+        try:
+            with settings_path.open() as f:
+                yaml_data = yaml.safe_load(f) or {}
+        except Exception as e:
+            raise SettingsError(f"Failed to load settings from {resolved_file}: {e}") from e
+
+        if not isinstance(yaml_data, dict):
+            raise SettingsError(
+                f"Settings file must contain a YAML dictionary, got {type(yaml_data).__name__}"
+            )
+
+        settings_data = {**yaml_data, **overrides}
+
+        instance = cls(**settings_data)
+        instance._base_dir = base_dir
+        instance._settings_file = str(settings_path)
+
+        return instance.resolve_relative_paths()
+
+    @property
+    def as_dict(self) -> dict[str, Any]:
+        """Get settings as a dictionary."""
+        return self.model_dump(exclude={"_base_dir", "_settings_file"})
+
+    @property
+    def base_dir(self) -> Path | None:
+        """Get the base directory for resolving relative paths if available."""
+        if self._base_dir:
+            return self._base_dir
+        if self._settings_file:
+            return Path(self._settings_file).parent
+        return None
+
+    @property
+    def loaded_settings(self) -> dict[str, Any]:
+        """Backward compatibility property for accessing settings as dict."""
+        return self.as_dict
+
+    def __getattr__(self, name: str) -> Any:
         """
-        Return a string representation of the NornFlowSettings instance,
-        excluding the 'loaded_settings' attribute.
+        Provide backward compatibility for accessing undefined settings.
+        Returns None for non-existent attributes instead of raising AttributeError.
         """
-        return str(dict(self.loaded_settings))
+        private_attrs = getattr(self, "__pydantic_private__", None)
+        if private_attrs and name in private_attrs:
+            return private_attrs[name]
+        if name.startswith("_"):
+            raise SettingsError(f"Unknown private attribute requested: {name}")
+        extra_attrs = getattr(self, "__pydantic_extra__", None)
+        if extra_attrs:
+            return extra_attrs.get(name, None)
+        return None
+
+    def __str__(self) -> str:
+        """Return a string representation of the NornFlowSettings instance."""
+        return str(self.as_dict)
diff --git a/nornflow/utils.py b/nornflow/utils.py
index 6167ab6..84a416e 100644
--- a/nornflow/utils.py
+++ b/nornflow/utils.py
@@ -1,3 +1,4 @@
+import hashlib
 import importlib
 import inspect
 import logging
@@ -6,6 +7,7 @@ from pathlib import Path
 from types import ModuleType
 from typing import Any, Literal
 
+import yaml
 from nornir.core.inventory import Host
 from nornir.core.processor import Processor
 from nornir.core.task import AggregatedResult, MultiResult, Result, Task
@@ -27,11 +29,22 @@ from nornflow.constants import (
 from nornflow.exceptions import (
     CoreError,
     ProcessorError,
+    ResourceError,
     WorkflowError,
 )
 
 logger = logging.getLogger(__name__)
 
+TYPE_DISPLAY_MAPPING: dict[str, str] = {
+    "HashableDict": "map",
+    "dict": "map",
+    "list": "seq",
+    "tuple": "seq",
+    "NoneType": "none",
+}
+
+NORNIR_RESULT_TYPES: set[type] = {Result, MultiResult, AggregatedResult}
+
 
 def normalize_failure_strategy(
     value: str | FailureStrategy, exception_class: type[Exception]
@@ -55,7 +68,6 @@ def normalize_failure_strategy(
     if isinstance(value, FailureStrategy):
         return value
     if isinstance(value, str):
-        # Try direct enum lookup (handles _missing_ method)
         try:
             return FailureStrategy(value)
         except ValueError as e:
@@ -68,31 +80,52 @@ def normalize_failure_strategy(
     )
 
 
-def import_module_from_path(module_name: str, module_path: str) -> ModuleType:
+def import_module_from_path(module_name: str, module_path: str | Path) -> ModuleType:
     """
     Import a module from a given file path.
 
     Args:
-        module_name (str): Name of the module.
-        module_path (str): Path to the module file.
+        module_name: Name to assign to the module.
+        module_path: Path to the module file.
 
     Returns:
-        ModuleType: Imported module.
+        Imported module.
 
     Raises:
         CoreError: If there is an error importing the module.
     """
     try:
-        spec = importlib.util.spec_from_file_location(module_name, module_path)
+        spec = importlib.util.spec_from_file_location(module_name, str(module_path))
         module = importlib.util.module_from_spec(spec)
         spec.loader.exec_module(module)
+        return module
     except Exception as e:
         raise CoreError(
             f"Failed to import module '{module_name}' from '{module_path}': {e!s}",
             component="ModuleLoader",
         ) from e
 
-    return module
+
+def _generate_module_name(py_file: Path, cwd: Path) -> str:
+    """
+    Generate a module name from a Python file path.
+
+    Tries to create a relative dotted path from CWD first. Falls back to a
+    hash-based unique name if the file is outside CWD.
+
+    Args:
+        py_file: Resolved absolute path to the Python file.
+        cwd: Resolved current working directory.
+
+    Returns:
+        A valid Python module name.
+    """
+    try:
+        relative_path = py_file.relative_to(cwd)
+        parts = relative_path.with_suffix("").parts
+        return ".".join(p for p in parts if p)
+    except ValueError:
+        return f"hook_{py_file.stem}_{abs(hash(str(py_file))) % 100000}"
 
 
 def import_modules_recursively(dir_path: Path) -> list[str]:
@@ -110,8 +143,6 @@ def import_modules_recursively(dir_path: Path) -> list[str]:
         List of successfully imported module names.
     """
     imported_modules = []
-
-    # Ensure we're working with resolved absolute paths to avoid path issues
     dir_path = dir_path.resolve()
     cwd = Path.cwd().resolve()
 
@@ -120,98 +151,79 @@ def import_modules_recursively(dir_path: Path) -> list[str]:
             continue
 
         py_file = py_file.resolve()
+        module_name = _generate_module_name(py_file, cwd)
 
         try:
-            # Try to calculate relative path from CWD first
-            try:
-                relative_path = py_file.relative_to(cwd)
-                module_name = path_to_module_name(relative_path)
-            except ValueError:
-                # If file is outside CWD, create a unique module name
-                module_name = f"hook_{py_file.stem}_{abs(hash(str(py_file))) % 100000}"
-
-            # Try direct import first (if module is in sys.path)
             try:
                 importlib.import_module(module_name)
-                imported_modules.append(module_name)
-                logger.debug(f"Imported module: {module_name}")
             except ImportError:
-                # If direct import fails, try importing from file path
-                import_module_from_path(module_name, str(py_file))
-                imported_modules.append(module_name)
-                logger.debug(f"Imported module from path: {module_name}")
-
+                import_module_from_path(module_name, py_file)
+            imported_modules.append(module_name)
+            logger.debug(f"Imported module: {module_name}")
         except Exception as e:
             logger.error(f"Failed to import module {py_file}: {e}")
 
     return imported_modules
 
 
-def path_to_module_name(py_file: Path) -> str:
+def is_nornir_task(attr: Callable) -> bool:
     """
-    Convert a Python file path to a module name.
+    Check if a function is a Nornir task based on its type annotations.
 
-    Assumes the file is importable from the project root.
+    Strict criteria (all must be met):
+    - Must be callable with type annotations
+    - At least one parameter must be annotated as Task
+    - Return type must be Result, MultiResult, or AggregatedResult
 
     Args:
-        py_file: The Python file path.
+        attr: Attribute to check.
 
     Returns:
-        The module name as a dotted string.
+        True if the attribute is a properly annotated Nornir task.
     """
-    # Remove .py extension and convert path parts to module name
-    parts = py_file.with_suffix("").parts
-    # Filter out any empty parts
-    parts = [p for p in parts if p]
-    return ".".join(parts)
+    if not callable(attr) or not hasattr(attr, "__annotations__"):
+        return False
 
+    annotations = attr.__annotations__
+    has_task_param = any(param == Task for param in annotations.values())
+    returns_result = annotations.get("return") in NORNIR_RESULT_TYPES
+    return has_task_param and returns_result
 
-def is_nornir_task(attr: Callable) -> bool:
-    """
-    Check if a function is a Nornir task based on its type annotations.
 
-    Strict criteria (all must be met):
-    - Must be callable
-    - Must have type annotations
-    - At least one parameter must be annotated as Task from nornir.core.task
-    - Return type must be explicitly annotated as one of:
-      - Result
-      - MultiResult
-      - AggregatedResult
+def _is_boolean_return_type(annotation: Any) -> bool:
+    """
+    Check if a return type annotation represents a boolean.
 
     Args:
-        attr (Callable): Attribute to check.
+        annotation: The return type annotation to check.
 
     Returns:
-        bool: True if the attribute is a properly annotated Nornir task, False otherwise.
+        True if the annotation is bool or Literal[True/False].
     """
-    if callable(attr) and hasattr(attr, "__annotations__"):
-        annotations = attr.__annotations__
-        has_task_param = any(param == Task for param in annotations.values())
-        returns_result = annotations.get("return") in {Result, MultiResult, AggregatedResult}
-        return has_task_param and returns_result
+    if annotation is bool:
+        return True
+
+    if hasattr(annotation, "__origin__") and annotation.__origin__ is Literal:
+        args = getattr(annotation, "__args__", ())
+        return set(args) <= {True, False}
+
     return False
 
 
-def is_nornir_filter(attr: Callable) -> bool:  # noqa: PLR0911
+def is_nornir_filter(attr: Callable) -> bool:
     """
-    Check if an function is a Nornir inventory filter function.
+    Check if a function is a Nornir inventory filter function.
 
     Strict criteria (all must be met):
     - Must be callable
-    - First parameter MUST be explicitly annotated as Host from nornir.core.inventory
-    - Return type MUST be explicitly annotated as either:
-      - The built-in bool type
-      - A typing.Literal containing only boolean values (True/False)
-
-    This function enforces explicit type annotations to ensure filter functions
-    follow a consistent pattern.
+    - First parameter MUST be annotated as Host
+    - Return type MUST be bool or Literal[True, False]
 
     Args:
-        attr (Callable): Attribute to check.
+        attr: Attribute to check.
 
     Returns:
-        bool: True if the attribute is a properly annotated Nornir filter, False otherwise.
+        True if the attribute is a properly annotated Nornir filter.
     """
     if not callable(attr):
         return False
@@ -220,61 +232,38 @@ def is_nornir_filter(attr: Callable) -> bool:  # noqa: PLR0911
         sig = inspect.signature(attr)
         params = list(sig.parameters.values())
 
-        # Must have at least one parameter (host)
-        if not params:
-            return False
-
-        # First parameter annotation must be Host
-        if params[0].annotation != Host:
+        if not params or params[0].annotation != Host:
             return False
 
-        # Check for various boolean-like return types
-        return_type_annotation = sig.return_annotation
-
-        # Check for built-in bool
-        if return_type_annotation is bool:
-            return True
-
-        # Checking kind of an edge case here: typing.Literal with boolean values
-        if hasattr(return_type_annotation, "__origin__") and return_type_annotation.__origin__ is Literal:
-            args = getattr(return_type_annotation, "__args__", ())
-            # If all args are True or False, it's a boolean Literal
-            if set(args) <= {True, False}:
-                return True
-
-        return False
-
+        return _is_boolean_return_type(sig.return_annotation)
     except (ValueError, TypeError):
         return False
 
 
 def process_filter(attr: Callable) -> tuple[Callable, list[str]]:
     """
-    Process a filter function to extract its parameters and return both the function and param info.
-
-    This allows filter registration to capture parameter names for use in workflow definitions.
+    Process a filter function to extract its parameters.
 
     Args:
-        attr: The filter function to process
+        attr: The filter function to process.
 
     Returns:
-        Tuple containing (filter_function, parameter_names)
+        Tuple of (filter_function, parameter_names excluding 'host').
     """
     sig = inspect.signature(attr)
-    # Skip the first parameter (host) and get remaining parameter names
     param_names = list(sig.parameters.keys())[1:]
     return (attr, param_names)
 
 
-def is_workflow_file(file_path: str | Path) -> bool:
+def is_yaml_file(file_path: str | Path) -> bool:
     """
     Check if a file is a valid NornFlow workflow file.
 
     Args:
-        file_path: Path to the file to check
+        file_path: Path to the file to check.
 
     Returns:
-        True if the file is a workflow file, False otherwise
+        True if the file is a workflow file.
     """
     path = Path(file_path)
     return path.is_file() and path.suffix in NORNFLOW_SUPPORTED_YAML_EXTENSIONS
@@ -285,51 +274,40 @@ def load_processor(processor_config: dict) -> Processor:
     Dynamically load and instantiate a processor from config.
 
     Args:
-        processor_config: Dict with class and args keys
+        processor_config: Dict with 'class' (dotted path) and optional 'args'.
 
     Returns:
-        Instantiated processor
+        Instantiated processor.
 
     Raises:
-        ProcessorError: If processor cannot be loaded or instantiated
+        ProcessorError: If processor cannot be loaded or instantiated.
     """
-    try:
-        dotted_path = processor_config.get("class")
-        if not dotted_path:
-            raise ProcessorError("Missing class in processor configuration")
+    dotted_path = processor_config.get("class")
+    if not dotted_path:
+        raise ProcessorError("Missing 'class' in processor configuration")
 
-        args = processor_config.get("args", {})
+    args = processor_config.get("args", {})
 
-        # Split the dotted path into module and class
+    try:
         module_path, class_name = dotted_path.rsplit(".", 1)
-
-        # Import the module
         module = importlib.import_module(module_path)
-
-        # Get the class
         processor_class = getattr(module, class_name)
-
-        # Instantiate the processor
         return processor_class(**args)
     except (ImportError, AttributeError) as e:
-        raise ProcessorError(f"Failed to load processor {dotted_path}: {e!s}") from e
+        raise ProcessorError(f"Failed to load processor '{dotted_path}': {e!s}") from e
     except Exception as e:
-        raise ProcessorError(f"Error instantiating processor {dotted_path}: {e!s}") from e
+        raise ProcessorError(f"Error instantiating processor '{dotted_path}': {e!s}") from e
 
 
 def convert_lists_to_tuples(dictionary: HashableDict[str, Any] | None) -> HashableDict[str, Any] | None:
     """
     Convert any lists in dictionary values to tuples for serialization.
 
-    This is a common operation needed for HashableDict fields in models
-    to ensure they can be properly serialized.
-
     Args:
-        dictionary (HashableDict[str, Any] | None): The dictionary to process.
+        dictionary: The dictionary to process.
 
     Returns:
-        HashableDict[str, Any] | None: A new HashableDict with lists converted to tuples,
-                                      or None if input was None.
+        A new HashableDict with lists converted to tuples, or None if input was None.
     """
     if dictionary is None:
         return None
@@ -344,11 +322,11 @@ def check_for_jinja2_recursive(obj: Any, path: str) -> None:
     Recursively check for Jinja2 code in nested structures.
 
     Args:
-        obj: Object to check (can be dict, list, string, etc.)
-        path: Current path in the object structure (for error messages)
+        obj: Object to check (dict, list, string, etc.)
+        path: Current path in the object structure for error messages.
 
     Raises:
-        WorkflowError: If Jinja2 code is found
+        WorkflowError: If Jinja2 code is found in a disallowed location.
     """
     if isinstance(obj, str):
         if JINJA_PATTERN.search(obj):
@@ -366,7 +344,7 @@ def check_for_jinja2_recursive(obj: Any, path: str) -> None:
 
 def format_variable_value(key: str, value: Any) -> str:
     """
-    Format a variable value for display, masking protected keywords and adjusting tuple brackets.
+    Format a variable value for display, masking protected keywords.
 
     Args:
         key: The variable name.
@@ -383,6 +361,71 @@ def format_variable_value(key: str, value: Any) -> str:
     return display_value
 
 
+def _get_type_display(value: Any) -> str:
+    """Get display name for a value's type."""
+    type_name = type(value).__name__
+    return TYPE_DISPLAY_MAPPING.get(type_name, type_name)
+
+
+def _add_vars_to_table(
+    table: Table,
+    vars_dict: dict[str, Any],
+    source_label: str,
+) -> None:
+    """
+    Add variables to a Rich table with consistent formatting.
+
+    Args:
+        table: The Rich Table to add rows to.
+        vars_dict: Dictionary of variable name -> value.
+        source_label: Label for the source column (e.g., 'w', 'c*').
+    """
+    for key, value in sorted(vars_dict.items(), key=lambda item: item[0]):
+        table.add_row(
+            source_label,
+            key,
+            format_variable_value(key, value),
+            _get_type_display(value),
+        )
+
+
+def _build_vars_section(workflow_vars: dict[str, Any], cli_vars: dict[str, Any]) -> list[Any]:
+    """
+    Build the variables section for the workflow overview panel.
+
+    Args:
+        workflow_vars: Variables defined in the workflow.
+        cli_vars: Variables from CLI/programmatic override.
+
+    Returns:
+        List of Rich renderables for the vars section, or empty list if no vars.
+    """
+    if not workflow_vars and not cli_vars:
+        return []
+
+    vars_table = Table(show_header=True, box=None)
+    vars_table.add_column("Source", style="bold magenta", no_wrap=True)
+    vars_table.add_column("Name", style="cyan")
+    vars_table.add_column("Value", style="yellow")
+    vars_table.add_column("Type", style="blue", no_wrap=True)
+
+    if workflow_vars:
+        _add_vars_to_table(vars_table, workflow_vars, "w")
+    if cli_vars:
+        _add_vars_to_table(vars_table, cli_vars, "c*")
+
+    legend_text = Text()
+    legend_text.append("Sources", style="bold dim")
+    legend_text.append("\nw: defined in workflow", style="dim")
+    legend_text.append("\nc*: CLI/programmatic override", style="dim")
+
+    return [
+        Text("\n"),
+        Padding.indent(Text("Variables", style="bold cyan"), 1),
+        Padding.indent(Columns([vars_table, Align.right(legend_text)], expand=True), 2),
+    ]
+
+
 def print_workflow_overview(
     workflow_model: Any,
     effective_dry_run: bool,
@@ -393,7 +436,7 @@ def print_workflow_overview(
     failure_strategy: FailureStrategy | None,
 ) -> None:
     """
-    Print a comprehensive workflow overview before execution using Rich for enhanced formatting.
+    Print a comprehensive workflow overview before execution using Rich.
 
     Args:
         workflow_model: The workflow model containing name and description.
@@ -401,19 +444,15 @@ def print_workflow_overview(
         hosts_count: Number of hosts in the filtered inventory.
         inventory_filters: Dictionary of applied inventory filters.
         workflow_vars: Workflow-defined variables.
-        vars: Vars with highest precedence.
+        vars: Vars with highest precedence (CLI/programmatic).
         failure_strategy: The active failure handling strategy.
     """
-    type_mapping = {"HashableDict": "map", "dict": "map", "list": "seq", "tuple": "seq", "NoneType": "none"}
-
     console = Console()
 
-    # Create a table for workflow details
     table = Table(show_header=False, box=None)
     table.add_column("Property", style="bold cyan", no_wrap=True)
     table.add_column("Value", style="yellow")
 
-    # Add rows in the specified order, conditionally
     if workflow_model.name:
         table.add_row("Workflow Name", workflow_model.name)
     if workflow_model.description:
@@ -421,46 +460,17 @@ def print_workflow_overview(
     if inventory_filters:
         filters_str = ", ".join(f"{k}={v}" for k, v in inventory_filters.items())
         table.add_row("Inventory Filters", filters_str)
+
     table.add_row("Dry Run", "Yes" if effective_dry_run else "No")
     table.add_row("Hosts Count", str(hosts_count))
     table.add_row(
-        "Failure Strategy", failure_strategy.value.replace("_", "-") if failure_strategy else "None"
+        "Failure Strategy",
+        failure_strategy.value.replace("_", "-") if failure_strategy else "None",
     )
 
-    # Prepare vars table if any vars exist
-    elements = [table]
-    if vars or workflow_vars:
-        elements.append(Text("\n"))  # Blank space before variables
-        elements.append(Padding.indent(Text("Variables", style="bold cyan"), 1))
-        vars_table = Table(show_header=True, box=None)
-        vars_table.add_column("Source", style="bold magenta", no_wrap=True)
-        vars_table.add_column("Name", style="cyan")
-        vars_table.add_column("Value", style="yellow")
-        vars_table.add_column("Type", style="blue", no_wrap=True)
-
-        if workflow_vars:
-            # Sort workflow variables by name lexicographically
-            for k, v in sorted(workflow_vars.items(), key=lambda item: item[0]):
-                vars_table.add_row(
-                    "w", k, format_variable_value(k, v), type_mapping.get(type(v).__name__, type(v).__name__)
-                )
-        if vars:
-            # Sort CLI/programmatic variables by name lexicographically
-            for k, v in sorted(vars.items(), key=lambda item: item[0]):
-                vars_table.add_row(
-                    "c*",
-                    k,
-                    format_variable_value(k, v),
-                    type_mapping.get(type(v).__name__, type(v).__name__),
-                )
-
-        legend_text = Text()
-        legend_text.append("Sources", style="bold dim")
-        legend_text.append("\nw: defined in workflow", style="dim")
-        legend_text.append("\nc*: CLI/programmatic override", style="dim")
-        elements.append(Padding.indent(Columns([vars_table, Align.right(legend_text)], expand=True), 2))
-
-    # Create a panel with the grouped elements
+    elements: list[Any] = [table]
+    elements.extend(_build_vars_section(workflow_vars, vars))
+
     panel = Panel(
         Group(*elements),
         title=Text("Workflow Execution Overview", style="bold"),
@@ -469,3 +479,30 @@ def print_workflow_overview(
     )
 
     console.print(panel)
+
+
+def get_file_content_hash(file_path: Path) -> str:
+    """
+    Generate a stable hash from file content for identity comparison.
+
+    Normalizes YAML content before hashing to ensure equivalent content
+    produces the same hash regardless of formatting differences.
+
+    Args:
+        file_path: Path to the file to hash.
+
+    Returns:
+        A 16-character hex string representing the content hash.
+
+    Raises:
+        ResourceError: If file cannot be read or parsed.
+    """
+    try:
+        content = file_path.read_text(encoding="utf-8")
+        data = yaml.safe_load(content)
+        normalized = yaml.dump(data, sort_keys=True, default_flow_style=False)
+        return hashlib.sha256(normalized.encode()).hexdigest()[:16]
+    except Exception as e:
+        raise ResourceError(
+            f"Failed to hash file content: {e}", resource_type="file", resource_name=str(file_path)
+        ) from e
diff --git a/nornflow/vars/__init__.py b/nornflow/vars/__init__.py
index 440a231..8275fb3 100644
--- a/nornflow/vars/__init__.py
+++ b/nornflow/vars/__init__.py
@@ -11,12 +11,14 @@ This package provides the variable management functionality for NornFlow, includ
 from nornflow.vars.constants import JINJA2_MARKERS
 from nornflow.vars.context import NornFlowDeviceContext
 from nornflow.vars.exceptions import TemplateError, VariableError
+from nornflow.vars.jinja2_utils import Jinja2EnvironmentManager
 from nornflow.vars.manager import NornFlowVariablesManager
 from nornflow.vars.processors import NornFlowVariableProcessor
 from nornflow.vars.proxy import NornirHostProxy
 
 __all__ = [
     "JINJA2_MARKERS",
+    "Jinja2EnvironmentManager",
     "NornFlowDeviceContext",
     "NornFlowVariableProcessor",
     "NornFlowVariablesManager",
diff --git a/nornflow/vars/constants.py b/nornflow/vars/constants.py
index e334393..d4edcd2 100644
--- a/nornflow/vars/constants.py
+++ b/nornflow/vars/constants.py
@@ -13,3 +13,8 @@ JINJA2_MARKERS = [
     "{#",  # Comment
     "{#-",  # Comment with left whitespace control
 ]
+
+# Lower case string values that evaluate to True when converting to boolean.
+# This provides a centralized reference point to avoid ambiguity across the codebase.
+# Note: This has limited use - primarily for hook configurations and CLI flags.
+TRUTHY_STRING_VALUES = ("true", "yes", "1", "on", "y", "t", "enabled")
diff --git a/nornflow/vars/jinja2_utils.py b/nornflow/vars/jinja2_utils.py
new file mode 100644
index 0000000..dcae046
--- /dev/null
+++ b/nornflow/vars/jinja2_utils.py
@@ -0,0 +1,115 @@
+from typing import Any
+
+from jinja2 import Environment, StrictUndefined, TemplateSyntaxError, UndefinedError
+
+from nornflow.builtins.jinja2_filters import ALL_FILTERS
+from nornflow.vars.exceptions import TemplateError, VariableError
+
+
+class Jinja2EnvironmentManager:
+    """Centralized Jinja2 environment management for NornFlow.
+
+    Provides a single source of truth for Jinja2 environment configuration
+    and template rendering with consistent error handling. All NornFlow custom
+    filters are automatically registered during initialization.
+    """
+
+    def __init__(self):
+        """Initialize the Jinja2 environment with NornFlow configuration.
+
+        Creates a Jinja2 environment with:
+        - StrictUndefined for catching missing variables
+        - Loop controls extension
+        - All NornFlow custom filters pre-registered
+        """
+        self.env = Environment(
+            undefined=StrictUndefined,
+            extensions=["jinja2.ext.loopcontrols"],
+            autoescape=False,  # noqa: S701
+        )
+
+        for filter_name, filter_func in ALL_FILTERS.items():
+            self.env.filters[filter_name] = filter_func
+
+    def render_template(self, template_str: str, context: dict[str, Any], error_context: str = "") -> str:
+        """Render a Jinja2 template string with the provided context.
+
+        Args:
+            template_str: The Jinja2 template string to render.
+            context: Dictionary of variables to use in template rendering.
+            error_context: Description of where this template is being used.
+
+        Returns:
+            The rendered template string.
+
+        Raises:
+            VariableError: If template contains undefined variables.
+            TemplateError: If template has syntax errors or other rendering issues.
+        """
+        try:
+            template = self.env.from_string(template_str)
+            return template.render(context)
+        except UndefinedError as e:
+            context_info = f" ({error_context})" if error_context else ""
+            raise VariableError(f"Undefined variable in template{context_info}: {e}") from e
+        except TemplateSyntaxError as e:
+            context_info = f" ({error_context})" if error_context else ""
+            raise TemplateError(f"Template syntax error{context_info}: {e}") from e
+        except Exception as e:
+            context_info = f" ({error_context})" if error_context else ""
+            raise TemplateError(f"Template rendering error{context_info}: {e}") from e
+
+
+def render_string(template_str: str, context: dict[str, Any], error_context: str = "") -> str:
+    """Convenience function for simple string rendering.
+
+    Args:
+        template_str: The template string to render.
+        context: Dictionary of variables for rendering.
+        error_context: Description for error messages.
+
+    Returns:
+        The rendered string.
+    """
+    manager = Jinja2EnvironmentManager()
+    return manager.render_template(template_str, context, error_context)
+
+
+def render_data_recursive(data: Any, context: dict[str, Any], error_context: str = "") -> Any:
+    """Recursively render Jinja2 templates in data structures.
+
+    Args:
+        data: The data structure to process (dict, list, string, etc.).
+        context: Dictionary of variables for rendering.
+        error_context: Description for error messages.
+
+    Returns:
+        The data structure with all templates rendered.
+    """
+    manager = Jinja2EnvironmentManager()
+    return _render_data_recursive_impl(data, context, manager, error_context)
+
+
+def _render_data_recursive_impl(
+    data: Any, context: dict[str, Any], manager: Jinja2EnvironmentManager, error_context: str
+) -> Any:
+    """Implementation of recursive data rendering.
+
+    Args:
+        data: The data to process.
+        context: Variables for rendering.
+        manager: The Jinja2 manager instance.
+        error_context: Description for error messages.
+
+    Returns:
+        The processed data.
+    """
+    if isinstance(data, str):
+        if any(marker in data for marker in ["{{", "{%", "{#"]):
+            return manager.render_template(data, context, error_context)
+        return data
+    if isinstance(data, dict):
+        return {k: _render_data_recursive_impl(v, context, manager, error_context) for k, v in data.items()}
+    if isinstance(data, list):
+        return [_render_data_recursive_impl(item, context, manager, error_context) for item in data]
+    return data
diff --git a/nornflow/vars/manager.py b/nornflow/vars/manager.py
index e3efa0f..b7d42d7 100644
--- a/nornflow/vars/manager.py
+++ b/nornflow/vars/manager.py
@@ -3,10 +3,9 @@ import os
 from pathlib import Path
 from typing import Any
 
-import jinja2
+import jinja2.exceptions
 import yaml
 
-from nornflow.builtins.jinja2_filters import ALL_FILTERS
 from nornflow.vars.constants import (
     DEFAULTS_FILENAME,
     ENV_VAR_PREFIX,
@@ -14,6 +13,7 @@ from nornflow.vars.constants import (
 )
 from nornflow.vars.context import NornFlowDeviceContext
 from nornflow.vars.exceptions import TemplateError, VariableError
+from nornflow.vars.jinja2_utils import Jinja2EnvironmentManager
 from nornflow.vars.proxy import NornirHostProxy
 
 logger = logging.getLogger(__name__)
@@ -182,16 +182,7 @@ class NornFlowVariablesManager:
             env_vars=self._env_vars,
         )
 
-        self.jinja_env = jinja2.Environment(
-            undefined=jinja2.StrictUndefined,
-            extensions=["jinja2.ext.loopcontrols"],
-            autoescape=False,  # noqa: S701 - Network automation tool, not generating HTML
-        )
-
-        # Register builtin j2 filters
-        for filter_name, filter_func in ALL_FILTERS.items():
-            self.jinja_env.filters[filter_name] = filter_func
-
+        self._jinja2_manager = Jinja2EnvironmentManager()
         self._device_contexts: dict[str, NornFlowDeviceContext] = {}
 
     def _load_environment_variables(self) -> dict[str, Any]:
@@ -426,9 +417,7 @@ class NornFlowVariablesManager:
             raise TemplateError(f"Host name not provided for template resolution: {template_str}")
 
         try:
-            template = self.jinja_env.from_string(template_str)
             device_ctx = self.get_device_context(host_name)
-
             nornflow_default_vars = device_ctx.get_flat_context()
 
             resolution_context_dict = nornflow_default_vars.copy()
@@ -436,6 +425,8 @@ class NornFlowVariablesManager:
                 resolution_context_dict.update(additional_vars)
 
             context_for_jinja = VariableLookupContext(self, host_name, resolution_context_dict)
+
+            template = self._jinja2_manager.env.from_string(template_str)
             return template.render(context_for_jinja)
 
         except jinja2.exceptions.UndefinedError as e:
diff --git a/nornflow/vars/processors.py b/nornflow/vars/processors.py
index 44ba88b..f50b672 100644
--- a/nornflow/vars/processors.py
+++ b/nornflow/vars/processors.py
@@ -3,11 +3,36 @@ NornFlow Variable Processing Module
 
 This module provides the NornFlowVariableProcessor, which integrates NornFlow's
 variable system with Nornir's task execution. It handles Jinja2 template
-resolution in task arguments and manages the 'set' task for runtime variable
-manipulation.
+resolution in task arguments and provides variable context for hooks and tasks.
+
+Hook-Driven Template Resolution
+==============================
+
+The processor uses a capability-based architecture where hooks can declare
+processing requirements via class attributes:
+
+    class MyHook(Hook):
+        requires_deferred_templates = True  # Enable two-phase processing
+
+Processing Modes:
+- **Immediate** (default): Templates resolved in task_instance_started()
+- **Deferred**: Templates stored and resolved just-in-time via resolve_deferred_params()
+
+The processor automatically selects the appropriate mode based on hook declarations.
+
+Example deferred flow:
+1. Hook declares requires_deferred_templates = True
+2. Processor stores templates without resolving them
+3. Hook evaluates inputs (potentially using variable context)
+4. Hook decorator calls resolve_deferred_params() for non-skipped hosts
+5. Task executes with resolved parameters
+
+This is particularly useful for hooks that require evaluating Jinja2 template inputs
+BEFORE anything else is evaluated by the Jinja2 Environment in the same task execution.
 """
 
 import logging
+from typing import Any
 
 from nornir.core.inventory import Host
 from nornir.core.processor import Processor
@@ -20,74 +45,113 @@ logger = logging.getLogger(__name__)
 
 class NornFlowVariableProcessor(Processor):
     """
-    Processor responsible for substituting variables in task arguments and managing
-    NornFlow's variable context during task execution.
+    Processor responsible for managing NornFlow's variable context and template resolution.
+
+    This processor uses a hook-driven strategy for template resolution:
+    - If any hook requests deferred processing, templates are stored for just-in-time resolution
+    - If no hook requests deferred processing, templates are resolved immediately
+    - Hooks declare their needs via the requires_deferred_templates property
 
     This processor integrates with the NornFlowVariablesManager to:
-    1. Resolve Jinja2 templates in task arguments using variables from the NornFlow
-       Default Namespace and the Nornir Host (`host.`) Namespace.
-    2. Manage the current host context for the NornirHostProxy to enable `host.`
-       variable access.
-    3. Handle the NornFlow built-in 'set' task, which creates or updates
-       Runtime Variables in the NornFlow Default Namespace for the current device.
-    4. Ensure that variable resolution and setting are performed within the correct,
-       isolated per-device context.
+    1. Set up variable context that hooks can use for condition evaluation
+    2. Conditionally defer template resolution based on hook requirements
+    3. Provide just-in-time template resolution for hooks that need it
+    4. Provide variable manager access for tasks (like the 'set' builtin task)
+    5. Ensure variable operations are performed within correct per-device context
     """
 
     def __init__(self, vars_manager: NornFlowVariablesManager):
-        """
-        Initializes the NornFlowVariableProcessor.
+        """Initialize the processor with a variable manager.
 
         Args:
-            vars_manager: An instance of NornFlowVariablesManager to handle
-                          variable resolution and management.
+            vars_manager: Variable manager for template resolution and context management.
         """
         self.vars_manager = vars_manager
+        self._deferred_params: dict[tuple[str, str], dict[str, Any]] = {}
 
     def task_started(self, task: Task) -> None:
-        """
-        Called when a Nornir task is about to start globally (before any host).
-        Sets the Nornir object on the host proxy if available.
-        """
-        # Provide the Nornir object to the proxy if it's available on the task.
-        # This allows the proxy to access the full Nornir inventory if needed.
+        """Called when a task starts globally. Sets up Nornir object reference."""
         if hasattr(task, "nornir") and task.nornir:
             self.vars_manager.nornir_host_proxy.nornir = task.nornir
             logger.debug(f"Nornir object set on NornirHostProxy via task '{task.name}'.")
 
-    def task_instance_started(self, task: Task, host: Host) -> None:
+    def _requires_deferred_templates(self, task: Task) -> bool:
+        """Check if any hook for this task requires deferred template processing.
+
+        Uses capability discovery to detect hooks that declare requires_deferred_templates = True.
+
+        Returns:
+            True if any hook requires deferred templates, False otherwise.
         """
-        This method sets the current host context for variable resolution
-        and processes Jinja2 templates in task parameters.
+        for processor in task.nornir.processors:
+            if hasattr(processor, "task_hooks"):
+                for hook in processor.task_hooks:
+                    if getattr(hook, "requires_deferred_templates", False):
+                        return True
+        return False
+
+    def task_instance_started(self, task: Task, host: Host) -> None:
+        """Set up host context and conditionally defer template resolution.
 
-        Raises:
-            Exception: Propagates exceptions from variable processing or resolution.
+        Processing strategy selection:
+        - Deferred mode: Store templates if any hook requires it
+        - Immediate mode: Process templates now (backward compatibility)
         """
         try:
-            # Set the current host name in the proxy to enable {{ host. }} variable access
             self.vars_manager.nornir_host_proxy.current_host_name = host.name
             logger.debug(f"Set current_host_name to '{host.name}' for task '{task.name}'.")
 
-            # Process task parameters for all tasks uniformly
-            if task.params:  # Ensure params exist
-                processed_params = self.vars_manager.resolve_data(task.params, host.name)
-                task.params = processed_params
-                logger.debug(f"Processed task.params for task '{task.name}' on host '{host.name}'")
+            if task.params:
+                if self._requires_deferred_templates(task):
+                    key = (task.name, host.name)
+                    self._deferred_params[key] = task.params.copy()
+                    task.params = {}
+                    logger.debug(f"Deferred template processing for '{host.name}' in task '{task.name}'")
+                else:
+                    processed_params = self.vars_manager.resolve_data(task.params, host.name)
+                    task.params = processed_params
+                    logger.debug(f"Processed task.params for task '{task.name}' on host '{host.name}'")
 
         except Exception:
             logger.exception(f"Error processing variables for task '{task.name}' on host '{host.name}'")
             raise
 
-    def task_instance_completed(self, task: Task, host: Host, result: MultiResult) -> None:
-        """
-        Called after a task instance completes for a specific host.
-        Clears the current host context from the NornirHostProxy.
+    def resolve_deferred_params(self, task: Task, host: Host) -> dict[str, Any] | None:
+        """Resolve stored templates just-in-time for task execution.
+
+        Called by hook decorators to convert deferred templates into actual parameter values.
+
+        Returns:
+            Resolved parameters dict if deferred params exist, None otherwise.
         """
-        # Clear the host reference to prevent stale context between task executions on different hosts
-        # or subsequent tasks for the same host if the proxy were reused without proper reset.
+        key = (task.name, host.name)
+
+        if key not in self._deferred_params:
+            return None
+
+        try:
+            if self.vars_manager.nornir_host_proxy.current_host_name != host.name:
+                self.vars_manager.nornir_host_proxy.current_host_name = host.name
+
+            original_params = self._deferred_params.pop(key)
+            resolved_params = self.vars_manager.resolve_data(original_params, host.name)
+            logger.debug(f"Resolved templates for '{host.name}' in task '{task.name}'")
+            return resolved_params
+
+        except Exception:
+            logger.exception(f"Error resolving templates for task '{task.name}' on host '{host.name}'")
+            self._deferred_params.pop(key, None)
+            raise
+
+    def task_instance_completed(self, task: Task, host: Host, result: MultiResult) -> None:
+        """Clean up host context and any unresolved deferred parameters."""
         self.vars_manager.nornir_host_proxy.current_host_name = None
         logger.debug(f"Cleared current_host_name after task '{task.name}' on host '{host.name}'.")
 
+        key = (task.name, host.name)
+        if key in self._deferred_params:
+            self._deferred_params.pop(key)
+
     def task_completed(self, task: Task, result: MultiResult) -> None:
         pass
 
diff --git a/pyproject.toml b/pyproject.toml
index 1fddf23..17e501b 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
 [project]
 name = "nornflow"
-version = "0.5.1"
+version = "0.6.0"
 description = "A workflow orchestration tool for network automation built around Nornir."
 readme = "README.md"
 requires-python = ">=3.10"
@@ -13,6 +13,7 @@ dependencies = [
     "nornir>=3.5.0",
     "nornir-utils>=0.2.0",
     "pydantic-serdes>=1.0.3",
+    "pydantic-settings>=2.12.0",
     "pytest>=8.3.4",
     "pyyaml>=6.0.2",
     "tabulate>=0.9.0",
@@ -45,12 +46,13 @@ manual-tests = [
     "nornir-netbox>=0.3.0",
     "nornir-napalm>=0.5.0",
     "nornir-netmiko>=1.0.1",
+    "nornir-nautobot>=4.0.0",
 ]
 
 [tool.ruff]
 line-length = 110
 target-version = "py310"
-exclude = ["tests/*", "nornflow/cli/tests/*", "nornflow/vars/tests/*"]
+exclude = ["tests/*", "nornflow/cli/tests/*", "nornflow/vars/tests/*", "debug_*.py"]
 
 [tool.ruff.lint]
 select = ["ALL"]
@@ -90,6 +92,7 @@ ignore = [
     "PLR0913",
     "PLR0915",
     "PLE0604",
+    "RET503",
     "RUF003",
     "RUF013"
 ]
@@ -107,7 +110,7 @@ known-first-party = ["nornflow"]
 
 [tool.black]
 line-length = 110
-extend-exclude = '(^|/)tests/'
+extend-exclude = '(^|/)tests/|(^|/)debug_.*\.py$'
 
 [project.scripts]
 nornflow = "nornflow.cli.entrypoint:app"
diff --git a/tests/unit/blueprints/conftest.py b/tests/unit/blueprints/conftest.py
new file mode 100644
index 0000000..da53a2d
--- /dev/null
+++ b/tests/unit/blueprints/conftest.py
@@ -0,0 +1,63 @@
+import pytest
+from nornflow.blueprints.resolver import BlueprintResolver
+from nornflow.vars.jinja2_utils import Jinja2EnvironmentManager
+
+
+@pytest.fixture
+def jinja2_manager():
+    """Provides a Jinja2EnvironmentManager instance for blueprint tests."""
+    return Jinja2EnvironmentManager()
+
+
+@pytest.fixture
+def blueprint_resolver(jinja2_manager):
+    """Provides a BlueprintResolver instance for blueprint tests."""
+    return BlueprintResolver(jinja2_manager)
+
+
+@pytest.fixture
+def mock_blueprints_catalog(tmp_path):
+    """Provides a mock blueprint catalog with sample files."""
+    catalog = {}
+    # Create a sample blueprint file
+    blueprint_dir = tmp_path / "blueprints"
+    blueprint_dir.mkdir()
+    sample_blueprint = blueprint_dir / "sample.yaml"
+    sample_blueprint.write_text("""
+tasks:
+  - name: sample_task
+    task: netmiko_send_command
+    command_string: "show version"
+""")
+    catalog["sample"] = sample_blueprint
+    return catalog
+
+
+@pytest.fixture
+def mock_vars_dir(tmp_path):
+    """Provides a mock vars directory with defaults."""
+    vars_dir = tmp_path / "vars"
+    vars_dir.mkdir()
+    defaults_file = vars_dir / "defaults.yaml"
+    defaults_file.write_text("default_var: value")
+    return vars_dir
+
+
+@pytest.fixture
+def mock_workflow_path(tmp_path):
+    """Provides a mock workflow path."""
+    workflow_file = tmp_path / "workflow.yaml"
+    workflow_file.write_text("workflow: {}")
+    return workflow_file
+
+
+@pytest.fixture
+def mock_workflow_roots(tmp_path):
+    """Provides mock workflow roots."""
+    return [str(tmp_path / "workflows")]
+
+
+@pytest.fixture
+def mock_cli_vars():
+    """Provides mock CLI variables."""
+    return {"cli_var": "cli_value"}
\ No newline at end of file
diff --git a/tests/unit/blueprints/test_blueprint_expander.py b/tests/unit/blueprints/test_blueprint_expander.py
new file mode 100644
index 0000000..929cf70
--- /dev/null
+++ b/tests/unit/blueprints/test_blueprint_expander.py
@@ -0,0 +1,199 @@
+import pytest
+from nornflow.blueprints.expander import BlueprintExpander
+from nornflow.exceptions import BlueprintCircularDependencyError, BlueprintError, ResourceError
+
+
+class TestBlueprintExpander:
+    """Tests for BlueprintExpander class."""
+
+    def test_expand_blueprints_no_catalog(self, blueprint_resolver):
+        """Test expansion with no blueprints catalog."""
+        expander = BlueprintExpander(blueprint_resolver)
+        tasks = [{"name": "task1", "task": "echo"}]
+        result = expander.expand_blueprints(
+            tasks=tasks,
+            blueprints_catalog=None,
+            vars_dir=None,
+            workflow_path=None,
+            workflow_roots=None,
+            inline_vars=None,
+        )
+        assert result == tasks
+
+    def test_expand_blueprints_simple_blueprint(self, blueprint_resolver, mock_blueprints_catalog, mock_vars_dir, mock_workflow_path, mock_workflow_roots, mock_cli_vars):
+        """Test expanding a simple blueprint reference."""
+        expander = BlueprintExpander(blueprint_resolver)
+        tasks = [{"blueprint": "sample"}]
+        result = expander.expand_blueprints(
+            tasks=tasks,
+            blueprints_catalog=mock_blueprints_catalog,
+            vars_dir=mock_vars_dir,
+            workflow_path=mock_workflow_path,
+            workflow_roots=mock_workflow_roots,
+            inline_vars=None,
+            cli_vars=mock_cli_vars,
+        )
+        assert len(result) == 1
+        assert result[0]["name"] == "sample_task"
+
+    def test_expand_blueprints_with_condition_true(self, blueprint_resolver, mock_blueprints_catalog, mock_vars_dir, mock_workflow_path, mock_workflow_roots):
+        """Test blueprint expansion with a true condition."""
+        expander = BlueprintExpander(blueprint_resolver)
+        tasks = [{"blueprint": "sample", "if": "true"}]
+        result = expander.expand_blueprints(
+            tasks=tasks,
+            blueprints_catalog=mock_blueprints_catalog,
+            vars_dir=mock_vars_dir,
+            workflow_path=mock_workflow_path,
+            workflow_roots=mock_workflow_roots,
+            inline_vars=None,
+        )
+        assert len(result) == 1
+
+    def test_expand_blueprints_with_condition_false(self, blueprint_resolver, mock_blueprints_catalog, mock_vars_dir, mock_workflow_path, mock_workflow_roots):
+        """Test blueprint expansion with a false condition."""
+        expander = BlueprintExpander(blueprint_resolver)
+        tasks = [{"blueprint": "sample", "if": "false"}]
+        result = expander.expand_blueprints(
+            tasks=tasks,
+            blueprints_catalog=mock_blueprints_catalog,
+            vars_dir=mock_vars_dir,
+            workflow_path=mock_workflow_path,
+            workflow_roots=mock_workflow_roots,
+            inline_vars=None,
+        )
+        assert result == []
+
+    def test_expand_blueprints_nested_blueprint(self, blueprint_resolver, tmp_path, mock_vars_dir, mock_workflow_path, mock_workflow_roots):
+        """Test expanding nested blueprints."""
+        blueprint_dir = tmp_path / "blueprints"
+        blueprint_dir.mkdir()
+        parent_blueprint = blueprint_dir / "parent.yaml"
+        parent_blueprint.write_text("""
+tasks:
+  - blueprint: child
+""")
+        child_blueprint = blueprint_dir / "child.yaml"
+        child_blueprint.write_text("""
+tasks:
+  - name: child_task
+    task: echo
+""")
+        catalog = {"parent": parent_blueprint, "child": child_blueprint}
+
+        expander = BlueprintExpander(blueprint_resolver)
+        tasks = [{"blueprint": "parent"}]
+        result = expander.expand_blueprints(
+            tasks=tasks,
+            blueprints_catalog=catalog,
+            vars_dir=mock_vars_dir,
+            workflow_path=mock_workflow_path,
+            workflow_roots=mock_workflow_roots,
+            inline_vars=None,
+        )
+        assert len(result) == 1
+        assert result[0]["name"] == "child_task"
+
+    def test_expand_blueprints_circular_dependency(self, blueprint_resolver, tmp_path, mock_vars_dir, mock_workflow_path, mock_workflow_roots):
+        """Test detection of circular blueprint dependencies."""
+        blueprint_dir = tmp_path / "blueprints"
+        blueprint_dir.mkdir()
+        blueprint_a = blueprint_dir / "a.yaml"
+        blueprint_a.write_text("""
+tasks:
+  - blueprint: b
+""")
+        blueprint_b = blueprint_dir / "b.yaml"
+        blueprint_b.write_text("""
+tasks:
+  - blueprint: a
+""")
+        catalog = {"a": blueprint_a, "b": blueprint_b}
+
+        expander = BlueprintExpander(blueprint_resolver)
+        tasks = [{"blueprint": "a"}]
+        with pytest.raises(BlueprintCircularDependencyError):
+            expander.expand_blueprints(
+                tasks=tasks,
+                blueprints_catalog=catalog,
+                vars_dir=mock_vars_dir,
+                workflow_path=mock_workflow_path,
+                workflow_roots=mock_workflow_roots,
+                inline_vars=None,
+            )
+
+    def test_expand_blueprints_missing_blueprint(self, blueprint_resolver, mock_vars_dir, mock_workflow_path, mock_workflow_roots, tmp_path, monkeypatch):
+        """Test error when blueprint is missing."""
+        monkeypatch.chdir(tmp_path)
+        
+        expander = BlueprintExpander(blueprint_resolver)
+        tasks = [{"blueprint": "this_blueprint_absolutely_does_not_exist_anywhere_xyz123"}]
+        with pytest.raises(BlueprintError, match="Blueprint not found in catalog or filesystem"):
+            expander.expand_blueprints(
+                tasks=tasks,
+                blueprints_catalog={},
+                vars_dir=mock_vars_dir,
+                workflow_path=mock_workflow_path,
+                workflow_roots=mock_workflow_roots,
+                inline_vars=None,
+            )
+
+    def test_expand_blueprints_invalid_yaml(self, blueprint_resolver, tmp_path, mock_vars_dir, mock_workflow_path, mock_workflow_roots):
+        """Test error with invalid YAML in blueprint."""
+        blueprint_dir = tmp_path / "blueprints"
+        blueprint_dir.mkdir()
+        invalid_blueprint = blueprint_dir / "invalid.yaml"
+        invalid_blueprint.write_text("invalid: yaml: content: [")
+        catalog = {"invalid": invalid_blueprint}
+
+        expander = BlueprintExpander(blueprint_resolver)
+        tasks = [{"blueprint": "invalid"}]
+        with pytest.raises(ResourceError, match="Failed to hash file content"):
+            expander.expand_blueprints(
+                tasks=tasks,
+                blueprints_catalog=catalog,
+                vars_dir=mock_vars_dir,
+                workflow_path=mock_workflow_path,
+                workflow_roots=mock_workflow_roots,
+                inline_vars=None,
+            )
+
+    def test_expand_blueprints_invalid_structure(self, blueprint_resolver, tmp_path, mock_vars_dir, mock_workflow_path, mock_workflow_roots):
+        """Test error with invalid blueprint structure."""
+        blueprint_dir = tmp_path / "blueprints"
+        blueprint_dir.mkdir()
+        invalid_blueprint = blueprint_dir / "invalid.yaml"
+        invalid_blueprint.write_text("not_tasks: []")
+        catalog = {"invalid": invalid_blueprint}
+
+        expander = BlueprintExpander(blueprint_resolver)
+        tasks = [{"blueprint": "invalid"}]
+        with pytest.raises(BlueprintError, match="Blueprint must contain ONLY 'tasks' key"):
+            expander.expand_blueprints(
+                tasks=tasks,
+                blueprints_catalog=catalog,
+                vars_dir=mock_vars_dir,
+                workflow_path=mock_workflow_path,
+                workflow_roots=mock_workflow_roots,
+                inline_vars=None,
+            )
+
+    def test_expand_blueprints_tasks_not_list(self, blueprint_resolver, tmp_path, mock_vars_dir, mock_workflow_path, mock_workflow_roots):
+        """Test error when tasks is not a list."""
+        blueprint_dir = tmp_path / "blueprints"
+        blueprint_dir.mkdir()
+        invalid_blueprint = blueprint_dir / "invalid.yaml"
+        invalid_blueprint.write_text("tasks: not_a_list")
+        catalog = {"invalid": invalid_blueprint}
+
+        expander = BlueprintExpander(blueprint_resolver)
+        tasks = [{"blueprint": "invalid"}]
+        with pytest.raises(BlueprintError, match="'tasks' must be a list"):
+            expander.expand_blueprints(
+                tasks=tasks,
+                blueprints_catalog=catalog,
+                vars_dir=mock_vars_dir,
+                workflow_path=mock_workflow_path,
+                workflow_roots=mock_workflow_roots,
+                inline_vars=None,
+            )
\ No newline at end of file
diff --git a/tests/unit/blueprints/test_blueprint_resolver.py b/tests/unit/blueprints/test_blueprint_resolver.py
new file mode 100644
index 0000000..58ae70c
--- /dev/null
+++ b/tests/unit/blueprints/test_blueprint_resolver.py
@@ -0,0 +1,209 @@
+import os
+from pathlib import Path
+from unittest.mock import patch
+
+import pytest
+
+from nornflow.exceptions import BlueprintError
+
+
+class TestBlueprintResolver:
+    """Tests for BlueprintResolver class."""
+
+    def test_build_context_with_all_sources(self, blueprint_resolver, mock_vars_dir, mock_workflow_path, mock_workflow_roots, mock_cli_vars):
+        """Test building context with all variable sources."""
+        inline_vars = {"inline_var": "inline_value", "timeout": 15}
+        
+        with patch.dict(os.environ, {"NORNFLOW_VAR_env_var": "env_value"}):
+            context = blueprint_resolver.build_context(
+                vars_dir=mock_vars_dir,
+                workflow_path=mock_workflow_path,
+                workflow_roots=mock_workflow_roots,
+                inline_workflow_vars=inline_vars,
+                cli_vars=mock_cli_vars,
+            )
+        
+        assert "cli_var" in context
+        assert "inline_var" in context
+        assert "env_var" in context
+
+    def test_build_context_precedence_order(self, blueprint_resolver, mock_vars_dir, mock_workflow_path, mock_workflow_roots):
+        """Test that CLI vars have highest precedence."""
+        inline_vars = {"timeout": 15}
+        cli_vars = {"timeout": 5}
+        
+        context = blueprint_resolver.build_context(
+            vars_dir=mock_vars_dir,
+            workflow_path=mock_workflow_path,
+            workflow_roots=mock_workflow_roots,
+            inline_workflow_vars=inline_vars,
+            cli_vars=cli_vars,
+        )
+        
+        assert context["timeout"] == 5
+
+    def test_build_context_without_workflow_path(self, blueprint_resolver, mock_vars_dir, mock_workflow_roots):
+        """Test building context without a workflow path."""
+        cli_vars = {"cli_var": "cli_value"}
+        
+        context = blueprint_resolver.build_context(
+            vars_dir=mock_vars_dir,
+            workflow_path=None,
+            workflow_roots=mock_workflow_roots,
+            inline_workflow_vars=None,
+            cli_vars=cli_vars,
+        )
+        
+        assert context["cli_var"] == "cli_value"
+
+    def test_build_context_with_environment_variables(self, blueprint_resolver, mock_vars_dir, mock_workflow_roots):
+        """Test loading environment variables with NORNFLOW_VAR_ prefix."""
+        with patch.dict(os.environ, {
+            "NORNFLOW_VAR_test": "test_value",
+            "NORNFLOW_VAR_another": "another_value",
+            "OTHER_VAR": "ignored"
+        }):
+            context = blueprint_resolver.build_context(
+                vars_dir=mock_vars_dir,
+                workflow_path=None,
+                workflow_roots=mock_workflow_roots,
+                inline_workflow_vars=None,
+                cli_vars=None,
+            )
+        
+        assert context["test"] == "test_value"
+        assert context["another"] == "another_value"
+        assert "OTHER_VAR" not in context
+
+    def test_build_context_domain_extraction(self, blueprint_resolver, mock_vars_dir, mock_workflow_path, mock_workflow_roots):
+        """Test domain extraction from workflow path."""
+        context = blueprint_resolver.build_context(
+            vars_dir=mock_vars_dir,
+            workflow_path=mock_workflow_path,
+            workflow_roots=mock_workflow_roots,
+            inline_workflow_vars=None,
+            cli_vars=None,
+        )
+        
+        assert isinstance(context, dict)
+
+    def test_resolve_template_simple(self, blueprint_resolver):
+        """Test resolving a simple Jinja2 template."""
+        context = {"env": "prod", "region": "us-east"}
+        result = blueprint_resolver.resolve_template("deployment_{{ env }}_{{ region }}", context)
+        assert result == "deployment_prod_us-east"
+
+    def test_resolve_template_with_filters(self, blueprint_resolver):
+        """Test resolving template with Jinja2 filters."""
+        context = {"name": "test_device"}
+        result = blueprint_resolver.resolve_template("{{ name | upper }}", context)
+        assert result == "TEST_DEVICE"
+
+    def test_resolve_template_undefined_variable(self, blueprint_resolver):
+        """Test that resolving template with undefined variable raises error."""
+        context = {"env": "prod"}
+        with pytest.raises(BlueprintError, match="Undefined variable"):
+            blueprint_resolver.resolve_template("{{ undefined_var }}", context)
+
+    def test_resolve_template_syntax_error(self, blueprint_resolver):
+        """Test that template with syntax error raises error."""
+        context = {}
+        with pytest.raises(BlueprintError, match="Template syntax error"):
+            blueprint_resolver.resolve_template("{{ unclosed", context)
+
+    def test_evaluate_condition_true(self, blueprint_resolver):
+        """Test evaluating condition that returns true."""
+        context = {"env": "prod"}
+        result = blueprint_resolver.evaluate_condition("{{ env == 'prod' }}", context)
+        assert result is True
+
+    def test_evaluate_condition_false(self, blueprint_resolver):
+        """Test evaluating condition that returns false."""
+        context = {"env": "dev"}
+        result = blueprint_resolver.evaluate_condition("{{ env == 'prod' }}", context)
+        assert result is False
+
+    def test_evaluate_condition_string_true(self, blueprint_resolver):
+        """Test evaluating condition with string 'true'."""
+        context = {}
+        result = blueprint_resolver.evaluate_condition("true", context)
+        assert result is True
+
+    def test_evaluate_condition_string_yes(self, blueprint_resolver):
+        """Test evaluating condition with string 'yes'."""
+        context = {}
+        result = blueprint_resolver.evaluate_condition("yes", context)
+        assert result is True
+
+    def test_evaluate_condition_string_false(self, blueprint_resolver):
+        """Test evaluating condition with string 'false'."""
+        context = {}
+        result = blueprint_resolver.evaluate_condition("false", context)
+        assert result is False
+
+    def test_evaluate_condition_complex_expression(self, blueprint_resolver):
+        """Test evaluating complex boolean expression."""
+        context = {"env": "prod", "region": "us-east", "count": 5}
+        result = blueprint_resolver.evaluate_condition(
+            "{{ env == 'prod' and region == 'us-east' and count > 3 }}", 
+            context
+        )
+        assert result is True
+
+    def test_evaluate_condition_with_default_filter(self, blueprint_resolver):
+        """Test evaluating condition with default filter for missing variable."""
+        context = {}
+        result = blueprint_resolver.evaluate_condition("{{ missing_var | default(false) }}", context)
+        assert result is False
+
+    def test_evaluate_condition_undefined_variable(self, blueprint_resolver):
+        """Test that condition with undefined variable raises error."""
+        context = {}
+        with pytest.raises(BlueprintError, match="Undefined variable"):
+            blueprint_resolver.evaluate_condition("{{ undefined_var }}", context)
+
+    def test_build_context_with_missing_defaults_file(self, blueprint_resolver, tmp_path, mock_workflow_roots):
+        """Test building context when defaults.yaml doesn't exist."""
+        empty_vars_dir = tmp_path / "empty_vars"
+        empty_vars_dir.mkdir()
+        
+        context = blueprint_resolver.build_context(
+            vars_dir=empty_vars_dir,
+            workflow_path=None,
+            workflow_roots=mock_workflow_roots,
+            inline_workflow_vars=None,
+            cli_vars={"test": "value"},
+        )
+        
+        assert context["test"] == "value"
+
+    def test_build_context_with_missing_domain_defaults(self, blueprint_resolver, mock_vars_dir, mock_workflow_roots, tmp_path):
+        """Test building context when domain defaults don't exist."""
+        workflow_path = Path(mock_workflow_roots[0]) / "missing_domain" / "deploy.yaml"
+        workflow_path.parent.mkdir(parents=True, exist_ok=True)
+        workflow_path.touch()
+        
+        context = blueprint_resolver.build_context(
+            vars_dir=mock_vars_dir,
+            workflow_path=workflow_path,
+            workflow_roots=mock_workflow_roots,
+            inline_workflow_vars=None,
+            cli_vars=None,
+        )
+        
+        assert isinstance(context, dict)
+
+    def test_resolve_template_no_template_markers(self, blueprint_resolver):
+        """Test resolving plain string without template markers."""
+        context = {"env": "prod"}
+        result = blueprint_resolver.resolve_template("plain_string", context)
+        assert result == "plain_string"
+
+    def test_evaluate_condition_numeric_comparison(self, blueprint_resolver):
+        """Test evaluating condition with numeric comparison."""
+        context = {"count": 10}
+        result = blueprint_resolver.evaluate_condition("{{ count > 5 }}", context)
+        assert result is True
+        
+        result = blueprint_resolver.evaluate_condition("{{ count < 5 }}", context)
+        assert result is False
\ No newline at end of file
diff --git a/tests/unit/builtins/conftest.py b/tests/unit/builtins/conftest.py
new file mode 100644
index 0000000..d1da26e
--- /dev/null
+++ b/tests/unit/builtins/conftest.py
@@ -0,0 +1,97 @@
+import pytest
+from unittest.mock import MagicMock
+from nornir.core.inventory import Host, Inventory
+
+
+class MockNornir:
+    """Simple mock object for Nornir that behaves like a real object for attribute checks."""
+    
+    def __init__(self):
+        self.processors = []
+        self.inventory = MagicMock()
+
+
+@pytest.fixture
+def mock_host():
+    """Fixture providing a mock Host object."""
+    host = MagicMock(spec=Host)
+    host.name = "test_host"
+    host.data = {}
+    return host
+
+
+@pytest.fixture
+def mock_inventory(mock_host):
+    """Fixture providing a mock Inventory with a host."""
+    inventory = MagicMock(spec=Inventory)
+    inventory.hosts = {mock_host.name: mock_host}
+    return inventory
+
+
+@pytest.fixture
+def mock_nornir(mock_inventory):
+    """Fixture providing a mock Nornir instance with inventory."""
+    nornir = MockNornir()
+    nornir.inventory = mock_inventory
+    return nornir
+
+
+@pytest.fixture
+def mock_task(mock_nornir):
+    """Fixture providing a mock task with nornir instance."""
+    task = MagicMock()
+    task.name = "test_task"
+    task.nornir = mock_nornir
+    task.host = mock_nornir.inventory.hosts["test_host"]
+    return task
+
+
+@pytest.fixture
+def mock_device_context():
+    """Fixture providing a mock device context with runtime vars."""
+    context = MagicMock()
+    context.runtime_vars = {}
+    context.resolve_value = MagicMock(return_value="resolved")
+    return context
+
+
+@pytest.fixture
+def mock_vars_manager(mock_device_context):
+    """Fixture providing a mock VarsManager."""
+    manager = MagicMock()
+    manager.get_device_context = MagicMock(return_value=mock_device_context)
+    manager.resolve_string = MagicMock(return_value="resolved")
+    manager.resolve_data = MagicMock(return_value="resolved")
+    manager.set_runtime_variable = MagicMock()
+    return manager
+
+
+@pytest.fixture
+def mock_processor_with_vars_manager(mock_vars_manager):
+    """Fixture providing a processor with vars_manager attribute."""
+    processor = MagicMock()
+    processor.vars_manager = mock_vars_manager
+    return processor
+
+
+@pytest.fixture
+def mock_processor_compatible():
+    """Fixture providing a processor that supports shush hook."""
+    processor = MagicMock()
+    processor.supports_shush_hook = True
+    return processor
+
+
+@pytest.fixture
+def mock_processor_incompatible():
+    """Fixture providing a processor that doesn't support shush hook."""
+    processor = MagicMock()
+    processor.supports_shush_hook = False
+    return processor
+
+
+@pytest.fixture
+def mock_filters_catalog():
+    """Fixture providing a mock filters catalog."""
+    catalog = {}
+    return catalog
\ No newline at end of file
diff --git a/tests/unit/builtins/test_if_hook.py b/tests/unit/builtins/test_if_hook.py
index 7fd19f3..d75576d 100644
--- a/tests/unit/builtins/test_if_hook.py
+++ b/tests/unit/builtins/test_if_hook.py
@@ -19,6 +19,10 @@ class TestIfHook:
         """Test that hook evaluates per host, not once per task."""
         assert IfHook.run_once_per_task is False
 
+    def test_requires_deferred_templates_flag(self):
+        """Test that hook declares requirement for deferred template processing."""
+        assert IfHook.requires_deferred_templates is True
+
     def test_init_with_filter_value(self):
         """Test hook initialization with filter configuration."""
         hook = IfHook({"platform": "ios"})
@@ -39,7 +43,6 @@ class TestIfHook:
         hook = IfHook({"platform": "ios"})
         mock_task_model = MagicMock()
 
-        # Should not raise
         hook.execute_hook_validations(mock_task_model)
 
     def test_execute_hook_validations_valid_jinja_string(self):
@@ -47,7 +50,6 @@ class TestIfHook:
         hook = IfHook("{{ host.platform == 'ios' }}")
         mock_task_model = MagicMock()
 
-        # Should not raise
         hook.execute_hook_validations(mock_task_model)
 
     def test_execute_hook_validations_invalid_multiple_filters(self):
@@ -58,20 +60,13 @@ class TestIfHook:
         with pytest.raises(HookValidationError, match="if must specify exactly one filter"):
             hook.execute_hook_validations(mock_task_model)
 
-    def test_execute_hook_validations_invalid_empty_jinja(self):
-        """Test validation fails for empty Jinja2 expression."""
+    def test_execute_hook_validations_invalid_empty_string(self):
+        """Test validation fails for empty string."""
         hook = IfHook("")
         mock_task_model = MagicMock()
+        mock_task_model.name = "test_task"
 
-        with pytest.raises(HookValidationError, match="if expression cannot be empty"):
-            hook.execute_hook_validations(mock_task_model)
-
-    def test_execute_hook_validations_invalid_no_jinja_markers(self):
-        """Test validation fails for string without Jinja2 markers."""
-        hook = IfHook("host.platform == 'ios'")
-        mock_task_model = MagicMock()
-
-        with pytest.raises(HookValidationError, match="if expression must be a valid Jinja2 template"):
+        with pytest.raises(HookValidationError, match="Task 'test_task': if value cannot be empty string"):
             hook.execute_hook_validations(mock_task_model)
 
     def test_execute_hook_validations_invalid_type(self):
@@ -79,7 +74,7 @@ class TestIfHook:
         hook = IfHook(123)
         mock_task_model = MagicMock()
 
-        with pytest.raises(HookValidationError, match="if value must be a dict \\(filter\\) or string \\(expression\\)"):
+        with pytest.raises(HookValidationError, match="if value must be a dict \\(Nornir filter\\) or string \\(Jinja2 expression\\)"):
             hook.execute_hook_validations(mock_task_model)
 
     def test_task_started_applies_decorator(self):
@@ -92,9 +87,7 @@ class TestIfHook:
 
         hook.task_started(mock_task)
 
-        # The task function should be wrapped
         assert mock_task.task != original_func
-        # The wrapper should be the skip_if_condition_flagged decorated version
         assert hasattr(mock_task.task, '__wrapped__')
 
     def test_task_started_no_value_does_nothing(self):
@@ -107,24 +100,17 @@ class TestIfHook:
 
         hook.task_started(mock_task)
 
-        # Task function should remain unchanged
         assert mock_task.task == original_func
 
     @patch('nornflow.builtins.hooks.if_hook.logger')
-    def test_task_instance_started_filter_condition_skip(self, mock_logger):
+    def test_task_instance_started_filter_condition_skip(self, mock_logger, mock_task, mock_host, mock_filters_catalog):
         """Test task_instance_started sets skip flag when filter condition fails."""
         hook = IfHook({"platform": "ios"})
         
-        mock_task = MagicMock(spec=Task)
-        mock_host = MagicMock(spec=Host)
-        mock_host.name = "router1"
-        mock_host.data = {}
-        
-        # Mock filter that returns False (should skip)
         mock_filter_func = MagicMock(return_value=False)
-        filters_catalog = {"platform": (mock_filter_func, ["value"])}
+        mock_filters_catalog["platform"] = (mock_filter_func, ["value"])
         
-        hook._current_context = {"filters_catalog": filters_catalog}
+        hook._current_context = {"filters_catalog": mock_filters_catalog}
 
         hook.task_instance_started(mock_task, mock_host)
 
@@ -132,131 +118,89 @@ class TestIfHook:
         mock_filter_func.assert_called_once_with(mock_host, value="ios")
 
     @patch('nornflow.builtins.hooks.if_hook.logger')
-    def test_task_instance_started_filter_condition_continue(self, mock_logger):
+    def test_task_instance_started_filter_condition_continue(self, mock_logger, mock_task, mock_host, mock_filters_catalog):
         """Test task_instance_started doesn't set skip flag when filter condition passes."""
         hook = IfHook({"platform": "ios"})
         
-        mock_task = MagicMock(spec=Task)
-        mock_host = MagicMock(spec=Host)
-        mock_host.name = "router1"
-        mock_host.data = {}
-        
-        # Mock filter that returns True (should continue)
         mock_filter_func = MagicMock(return_value=True)
-        filters_catalog = {"platform": (mock_filter_func, ["value"])}
+        mock_filters_catalog["platform"] = (mock_filter_func, ["value"])
         
-        hook._current_context = {"filters_catalog": filters_catalog}
+        hook._current_context = {"filters_catalog": mock_filters_catalog}
 
         hook.task_instance_started(mock_task, mock_host)
 
         assert 'nornflow_skip_flag' not in mock_host.data
         mock_filter_func.assert_called_once_with(mock_host, value="ios")
 
-    def test_task_instance_started_filter_missing_catalog_raises_error(self):
+    def test_task_instance_started_filter_missing_catalog_raises_error(self, mock_task, mock_host):
         """Test task_instance_started raises error when filters_catalog is missing."""
         hook = IfHook({"platform": "ios"})
         
-        mock_task = MagicMock(spec=Task)
-        mock_host = MagicMock(spec=Host)
-        mock_host.name = "router1"
-        
-        hook._current_context = {}  # No filters_catalog
+        hook._current_context = {}
 
         with pytest.raises(HookValidationError, match="Failed to evaluate condition"):
             hook.task_instance_started(mock_task, mock_host)
 
-    def test_task_instance_started_filter_unknown_filter_raises_error(self):
+    def test_task_instance_started_filter_unknown_filter_raises_error(self, mock_task, mock_host, mock_filters_catalog):
         """Test task_instance_started raises error when filter is not in catalog."""
         hook = IfHook({"unknown_filter": "value"})
         
-        mock_task = MagicMock(spec=Task)
-        mock_host = MagicMock(spec=Host)
-        mock_host.name = "router1"
-        
-        filters_catalog = {"platform": (MagicMock(), ["value"])}
-        hook._current_context = {"filters_catalog": filters_catalog}
+        mock_filters_catalog["platform"] = (MagicMock(), ["value"])
+        hook._current_context = {"filters_catalog": mock_filters_catalog}
 
         with pytest.raises(HookValidationError, match="Filter 'unknown_filter' not found"):
             hook.task_instance_started(mock_task, mock_host)
 
     @patch('nornflow.builtins.hooks.if_hook.logger')
-    def test_task_instance_started_jinja_condition_skip(self, mock_logger):
+    def test_task_instance_started_jinja_condition_skip(self, mock_logger, mock_task, mock_host, mock_vars_manager, mock_device_context):
         """Test task_instance_started sets skip flag when Jinja2 condition evaluates to False."""
         hook = IfHook("{{ host.platform == 'ios' }}")
         
-        mock_task = MagicMock(spec=Task)
-        mock_host = MagicMock(spec=Host)
-        mock_host.name = "router1"
-        mock_host.data = {}
-        
-        # Mock vars_manager that resolves to "False"
-        mock_vars_manager = MagicMock()
-        mock_vars_manager.resolve_string.return_value = "False"
-        
-        hook._current_context = {"vars_manager": mock_vars_manager}
-
-        hook.task_instance_started(mock_task, mock_host)
-
-        assert mock_host.data['nornflow_skip_flag'] is True
-        mock_vars_manager.resolve_string.assert_called_once_with("{{ host.platform == 'ios' }}", "router1")
+        with patch.object(hook, 'get_resolved_value', return_value=False):
+            hook.task_instance_started(mock_task, mock_host)
+            
+            assert mock_host.data['nornflow_skip_flag'] is True
 
     @patch('nornflow.builtins.hooks.if_hook.logger')
-    def test_task_instance_started_jinja_condition_continue(self, mock_logger):
+    def test_task_instance_started_jinja_condition_continue(self, mock_logger, mock_task, mock_host, mock_vars_manager, mock_device_context):
         """Test task_instance_started doesn't set skip flag when Jinja2 condition evaluates to True."""
         hook = IfHook("{{ host.platform == 'ios' }}")
         
-        mock_task = MagicMock(spec=Task)
-        mock_host = MagicMock(spec=Host)
-        mock_host.name = "router1"
-        mock_host.data = {}
-        
-        # Mock vars_manager that resolves to "True"
-        mock_vars_manager = MagicMock()
-        mock_vars_manager.resolve_string.return_value = "True"
-        
-        hook._current_context = {"vars_manager": mock_vars_manager}
-
-        hook.task_instance_started(mock_task, mock_host)
-
-        assert 'nornflow_skip_flag' not in mock_host.data
-        mock_vars_manager.resolve_string.assert_called_once_with("{{ host.platform == 'ios' }}", "router1")
+        with patch.object(hook, 'get_resolved_value', return_value=True):
+            hook.task_instance_started(mock_task, mock_host)
+            
+            assert 'nornflow_skip_flag' not in mock_host.data
 
-    def test_task_instance_started_jinja_missing_vars_manager_raises_error(self):
+    def test_task_instance_started_jinja_missing_vars_manager_raises_error(self, mock_task, mock_host):
         """Test task_instance_started raises error when vars_manager is missing."""
         hook = IfHook("{{ true }}")
         
-        mock_task = MagicMock(spec=Task)
-        mock_host = MagicMock(spec=Host)
-        mock_host.name = "router1"
-        
-        hook._current_context = {}  # No vars_manager
+        hook._current_context = {}
 
-        with pytest.raises(HookValidationError, match="vars_manager not available"):
-            hook.task_instance_started(mock_task, mock_host)
+        with patch.object(hook, 'get_resolved_value', side_effect=Exception("vars_manager not available")):
+            with pytest.raises(HookValidationError, match="Failed to evaluate condition.*vars_manager not available"):
+                hook.task_instance_started(mock_task, mock_host)
 
-    def test_task_instance_started_jinja_invalid_expression_raises_error(self):
-        """Test task_instance_started raises error when Jinja2 expression doesn't evaluate to boolean."""
+    def test_task_instance_started_jinja_invalid_expression_raises_error(self, mock_task, mock_host, mock_vars_manager, mock_device_context):
+        """Test task_instance_started handles non-boolean Jinja2 expression results."""
         hook = IfHook("{{ not_boolean }}")
         
-        mock_task = MagicMock(spec=Task)
-        mock_host = MagicMock(spec=Host)
-        mock_host.name = "router1"
-        
-        mock_vars_manager = MagicMock()
-        mock_vars_manager.resolve_string.return_value = "not_boolean"
-        
-        hook._current_context = {"vars_manager": mock_vars_manager}
-
-        with pytest.raises(HookValidationError, match="Failed to evaluate condition"):
+        with patch.object(hook, 'get_resolved_value', return_value=True):
             hook.task_instance_started(mock_task, mock_host)
+            
+            assert 'nornflow_skip_flag' not in mock_host.data
 
     def test_skip_if_condition_flagged_decorator_skip(self):
         """Test skip_if_condition_flagged decorator returns skipped result when flag is set."""
         from nornflow.builtins.hooks.if_hook import skip_if_condition_flagged
         
-        mock_task = MagicMock(spec=Task)
-        mock_host = MagicMock(spec=Host)
+        mock_task = MagicMock()
+        mock_host = MagicMock()
+        mock_nornir = MagicMock()
+        mock_nornir.processors = []
+        
         mock_task.host = mock_host
+        mock_task.nornir = mock_nornir
         mock_host.data = {'nornflow_skip_flag': True}
         
         @skip_if_condition_flagged
@@ -269,17 +213,20 @@ class TestIfHook:
         assert result.result is None
         assert result.changed is False
         assert result.failed is False
-        # Flag should be cleaned up
         assert 'nornflow_skip_flag' not in mock_host.data
 
     def test_skip_if_condition_flagged_decorator_continue(self):
         """Test skip_if_condition_flagged decorator executes task when flag is not set."""
         from nornflow.builtins.hooks.if_hook import skip_if_condition_flagged
         
-        mock_task = MagicMock(spec=Task)
-        mock_host = MagicMock(spec=Host)
+        mock_task = MagicMock()
+        mock_host = MagicMock()
+        mock_nornir = MagicMock()
+        mock_nornir.processors = []
+        
         mock_task.host = mock_host
-        mock_host.data = {}  # No skip flag
+        mock_task.nornir = mock_nornir
+        mock_host.data = {}
         
         @skip_if_condition_flagged
         def dummy_task(task):
@@ -289,12 +236,86 @@ class TestIfHook:
         
         assert result.result == "executed"
 
+    def test_skip_if_condition_flagged_decorator_with_deferred_params(self):
+        """Test skip_if_condition_flagged decorator resolves deferred params when processor available."""
+        from nornflow.builtins.hooks.if_hook import skip_if_condition_flagged
+        
+        mock_task = MagicMock()
+        mock_host = MagicMock()
+        mock_processor = MagicMock()
+        mock_nornir = MagicMock()
+        
+        mock_processor.resolve_deferred_params.return_value = {"resolved": "param"}
+        mock_nornir.processors = [mock_processor]
+        
+        mock_task.host = mock_host
+        mock_task.nornir = mock_nornir
+        mock_host.data = {}
+        
+        @skip_if_condition_flagged
+        def dummy_task(task, **kwargs):
+            return Result(host=task.host, result=kwargs)
+        
+        result = dummy_task(mock_task)
+        
+        assert result.result == {"resolved": "param"}
+        mock_processor.resolve_deferred_params.assert_called_once_with(mock_task, mock_host)
+
+    def test_skip_if_condition_flagged_decorator_immediate_mode(self):
+        """Test skip_if_condition_flagged decorator uses kwargs when no deferred params."""
+        from nornflow.builtins.hooks.if_hook import skip_if_condition_flagged
+        
+        mock_task = MagicMock()
+        mock_host = MagicMock()
+        mock_processor = MagicMock()
+        mock_nornir = MagicMock()
+        
+        mock_processor.resolve_deferred_params.return_value = None  # No deferred params
+        mock_nornir.processors = [mock_processor]
+        
+        mock_task.host = mock_host
+        mock_task.nornir = mock_nornir
+        mock_host.data = {}
+        
+        @skip_if_condition_flagged
+        def dummy_task(task, **kwargs):
+            return Result(host=task.host, result=kwargs)
+        
+        result = dummy_task(mock_task, original="param")
+        
+        assert result.result == {"original": "param"}
+        mock_processor.resolve_deferred_params.assert_called_once_with(mock_task, mock_host)
+
+    def test_skip_if_condition_flagged_decorator_with_empty_deferred_params(self):
+        """Test skip_if_condition_flagged decorator uses empty dict when deferred params resolve to empty."""
+        from nornflow.builtins.hooks.if_hook import skip_if_condition_flagged
+        
+        mock_task = MagicMock()
+        mock_host = MagicMock()
+        mock_processor = MagicMock()
+        mock_nornir = MagicMock()
+        
+        mock_processor.resolve_deferred_params.return_value = {}  # Empty deferred params
+        mock_nornir.processors = [mock_processor]
+        
+        mock_task.host = mock_host
+        mock_task.nornir = mock_nornir
+        mock_host.data = {}
+        
+        @skip_if_condition_flagged
+        def dummy_task(task, **kwargs):
+            return Result(host=task.host, result=kwargs)
+        
+        result = dummy_task(mock_task, original="param")
+        
+        assert result.result == {}  # Should use empty dict, not kwargs
+        mock_processor.resolve_deferred_params.assert_called_once_with(mock_task, mock_host)
+
     def test_should_execute_always_returns_true(self):
         """Test that hook executes for every host (run_once_per_task=False)."""
         hook = IfHook("{{ true }}")
         mock_task = MagicMock()
 
-        # Should always return True since run_once_per_task is False
         assert hook.should_execute(mock_task) is True
         assert hook.should_execute(mock_task) is True
 
@@ -312,7 +333,6 @@ class TestIfHook:
         mock_host = MagicMock()
         mock_result = MagicMock()
 
-        # These should not raise any exceptions
         hook.task_completed(mock_task, mock_result)
         hook.task_instance_completed(mock_task, mock_host, mock_result)
         hook.subtask_instance_started(mock_task, mock_host)
diff --git a/tests/unit/builtins/test_shush_hook.py b/tests/unit/builtins/test_shush_hook.py
index a08084f..addd380 100644
--- a/tests/unit/builtins/test_shush_hook.py
+++ b/tests/unit/builtins/test_shush_hook.py
@@ -1,15 +1,6 @@
+# ruff: noqa: SLF001, T201
 from unittest.mock import MagicMock
-
 from nornflow.builtins.hooks import ShushHook
-from nornflow.hooks.exceptions import HookValidationError
-
-
-class MockNornir:
-    """Simple mock object for Nornir that behaves like a real object for attribute checks."""
-    
-    def __init__(self):
-        self.processors = []
-        self.inventory = MagicMock()
 
 
 class TestShushHook:
@@ -27,62 +18,52 @@ class TestShushHook:
         """Test hook initialization with True value."""
         hook = ShushHook(True)
         assert hook.value is True
-        assert hook.is_jinja2_expression is False
 
     def test_init_with_false_value(self):
         """Test hook initialization with False value."""
         hook = ShushHook(False)
         assert hook.value is False
-        assert hook.is_jinja2_expression is False
 
     def test_init_without_value(self):
         """Test hook initialization without a value defaults to None."""
         hook = ShushHook()
         assert hook.value is None
-        assert hook.is_jinja2_expression is False
 
     def test_init_with_none_value_explicit(self):
         """Test hook initialization with explicit None value."""
         hook = ShushHook(None)
         assert hook.value is None
-        assert hook.is_jinja2_expression is False
 
     def test_init_with_jinja2_expression(self):
         """Test hook initialization with Jinja2 expression."""
         hook = ShushHook("{{ debug_mode }}")
         assert hook.value == "{{ debug_mode }}"
-        assert hook.is_jinja2_expression is True
 
     def test_init_with_jinja2_expression_using_percent(self):
         """Test hook initialization with Jinja2 expression using {% syntax."""
         hook = ShushHook("{% if condition %}true{% endif %}")
-        assert hook.is_jinja2_expression is True
+        assert hook.value == "{% if condition %}true{% endif %}"
 
-    def test_task_started_does_nothing_when_false(self):
+    def test_task_started_does_nothing_when_false(self, mock_task):
         """Test task_started does nothing when value is False."""
         hook = ShushHook(False)
-        mock_task = MagicMock()
-        mock_task.nornir = MockNornir()
 
         hook.task_started(mock_task)
 
         assert not hasattr(mock_task.nornir, '_nornflow_suppressed_tasks')
 
-    def test_task_started_does_nothing_when_none(self):
+    def test_task_started_does_nothing_when_none(self, mock_task):
         """Test task_started does nothing when value is None."""
         hook = ShushHook(None)
-        mock_task = MagicMock()
-        mock_task.nornir = MockNornir()
 
         hook.task_started(mock_task)
 
         assert not hasattr(mock_task.nornir, '_nornflow_suppressed_tasks')
 
-    def test_task_started_warns_when_no_compatible_processor(self, capsys):
+    def test_task_started_warns_when_no_compatible_processor(self, capsys, mock_task):
         """Test task_started warns when no processor supports shush hook."""
         hook = ShushHook(True)
-        mock_task = MagicMock()
-        mock_task.nornir = MockNornir()
+        
         mock_task.nornir.processors = []
 
         hook.task_started(mock_task)
@@ -92,15 +73,11 @@ class TestShushHook:
         assert "no compatible processor found" in captured.out
         assert not hasattr(mock_task.nornir, '_nornflow_suppressed_tasks')
 
-    def test_task_started_warns_when_processor_lacks_support(self, capsys):
+    def test_task_started_warns_when_processor_lacks_support(self, capsys, mock_task, mock_processor_incompatible):
         """Test task_started warns when processor doesn't have supports_shush_hook attribute."""
         hook = ShushHook(True)
-        mock_task = MagicMock()
-        mock_task.nornir = MockNornir()
         
-        mock_processor = MagicMock()
-        mock_processor.supports_shush_hook = False
-        mock_task.nornir.processors = [mock_processor]
+        mock_task.nornir.processors = [mock_processor_incompatible]
 
         hook.task_started(mock_task)
 
@@ -109,16 +86,16 @@ class TestShushHook:
         assert "no compatible processor found in chain. Outputs are not going to be suppressed." in captured.out
         assert not hasattr(mock_task.nornir, '_nornflow_suppressed_tasks')
 
-    def test_task_started_sets_suppression_marker_with_compatible_processor(self):
+    def test_task_started_sets_suppression_marker_with_compatible_processor(self, mock_task, mock_processor_compatible):
         """Test task_started sets suppression marker when compatible processor exists."""
         hook = ShushHook(True)
-        mock_task = MagicMock()
-        mock_task.name = "test_task"
-        mock_task.nornir = MockNornir()
         
-        mock_processor = MagicMock()
-        mock_processor.supports_shush_hook = True
-        mock_task.nornir.processors = [mock_processor]
+        mock_task.nornir.processors = [mock_processor_compatible]
+        
+        # Mock the hook's context to include a task_model with canonical_id
+        mock_task_model = MagicMock()
+        mock_task_model.canonical_id = "test_task"
+        hook._current_context = {"task_model": mock_task_model}
 
         hook.task_started(mock_task)
 
@@ -126,106 +103,101 @@ class TestShushHook:
         assert isinstance(mock_task.nornir._nornflow_suppressed_tasks, set)
         assert "test_task" in mock_task.nornir._nornflow_suppressed_tasks
 
-    def test_task_started_creates_set_if_not_exists(self):
+    def test_task_started_creates_set_if_not_exists(self, mock_task, mock_processor_compatible):
         """Test task_started creates the suppressed_tasks set if it doesn't exist."""
         hook = ShushHook(True)
-        mock_task = MagicMock()
+        
+        mock_task.nornir.processors = [mock_processor_compatible]
         mock_task.name = "task1"
-        mock_task.nornir = MockNornir()
         
-        mock_processor = MagicMock()
-        mock_processor.supports_shush_hook = True
-        mock_task.nornir.processors = [mock_processor]
+        # Mock the hook's context to include a task_model with canonical_id
+        mock_task_model = MagicMock()
+        mock_task_model.canonical_id = "task1"
+        hook._current_context = {"task_model": mock_task_model}
 
         hook.task_started(mock_task)
 
         assert hasattr(mock_task.nornir, '_nornflow_suppressed_tasks')
         assert "task1" in mock_task.nornir._nornflow_suppressed_tasks
 
-    def test_task_started_adds_to_existing_set(self):
+    def test_task_started_adds_to_existing_set(self, mock_task, mock_processor_compatible):
         """Test task_started adds task name to existing suppressed_tasks set."""
         hook = ShushHook(True)
-        mock_task = MagicMock()
-        mock_task.name = "task2"
-        mock_task.nornir = MockNornir()
+        
         mock_task.nornir._nornflow_suppressed_tasks = {"task1"}
+        mock_task.nornir.processors = [mock_processor_compatible]
+        mock_task.name = "task2"
         
-        mock_processor = MagicMock()
-        mock_processor.supports_shush_hook = True
-        mock_task.nornir.processors = [mock_processor]
+        # Mock the hook's context to include a task_model with canonical_id
+        mock_task_model = MagicMock()
+        mock_task_model.canonical_id = "task2"
+        hook._current_context = {"task_model": mock_task_model}
 
         hook.task_started(mock_task)
 
         assert "task1" in mock_task.nornir._nornflow_suppressed_tasks
         assert "task2" in mock_task.nornir._nornflow_suppressed_tasks
 
-    def test_task_started_with_multiple_processors_one_compatible(self):
+    def test_task_started_with_multiple_processors_one_compatible(self, mock_task, mock_processor_compatible, mock_processor_incompatible):
         """Test task_started works when one of multiple processors is compatible."""
         hook = ShushHook(True)
-        mock_task = MagicMock()
-        mock_task.name = "test_task"
-        mock_task.nornir = MockNornir()
         
-        mock_processor1 = MagicMock()
-        mock_processor1.supports_shush_hook = False
+        mock_task.nornir.processors = [mock_processor_incompatible, mock_processor_compatible]
         
-        mock_processor2 = MagicMock()
-        mock_processor2.supports_shush_hook = True
-        
-        mock_task.nornir.processors = [mock_processor1, mock_processor2]
+        # Mock the hook's context to include a task_model with canonical_id
+        mock_task_model = MagicMock()
+        mock_task_model.canonical_id = "test_task"
+        hook._current_context = {"task_model": mock_task_model}
 
         hook.task_started(mock_task)
 
         assert hasattr(mock_task.nornir, '_nornflow_suppressed_tasks')
         assert "test_task" in mock_task.nornir._nornflow_suppressed_tasks
 
-    def test_task_completed_removes_suppression_marker(self):
+    def test_task_completed_removes_suppression_marker(self, mock_nornir):
         """Test task_completed removes the task from suppressed_tasks set."""
         hook = ShushHook(True)
         mock_task = MagicMock()
         mock_task.name = "test_task"
-        mock_task.nornir = MockNornir()
+        mock_task.nornir = mock_nornir
         mock_task.nornir._nornflow_suppressed_tasks = {"test_task", "other_task"}
+        
+        # Mock the hook's context to include a task_model with canonical_id
+        mock_task_model = MagicMock()
+        mock_task_model.canonical_id = "test_task"
+        hook._current_context = {"task_model": mock_task_model}
 
         hook.task_completed(mock_task, MagicMock())
 
         assert "test_task" not in mock_task.nornir._nornflow_suppressed_tasks
         assert "other_task" in mock_task.nornir._nornflow_suppressed_tasks
 
-    def test_task_completed_handles_missing_attribute(self):
+    def test_task_completed_handles_missing_attribute(self, mock_nornir):
         """Test task_completed handles missing _nornflow_suppressed_tasks gracefully."""
         hook = ShushHook(True)
         mock_task = MagicMock()
         mock_task.name = "test_task"
-        mock_task.nornir = MockNornir()
+        mock_task.nornir = mock_nornir
 
         hook.task_completed(mock_task, MagicMock())
 
-    def test_task_completed_handles_task_not_in_set(self):
+    def test_task_completed_handles_task_not_in_set(self, mock_nornir):
         """Test task_completed handles task not being in the set gracefully."""
         hook = ShushHook(True)
         mock_task = MagicMock()
         mock_task.name = "test_task"
-        mock_task.nornir = MockNornir()
+        mock_task.nornir = mock_nornir
         mock_task.nornir._nornflow_suppressed_tasks = {"other_task"}
+        
+        # Mock the hook's context to include a task_model with canonical_id
+        mock_task_model = MagicMock()
+        mock_task_model.canonical_id = "test_task"
+        hook._current_context = {"task_model": mock_task_model}
 
         hook.task_completed(mock_task, MagicMock())
 
         assert "other_task" in mock_task.nornir._nornflow_suppressed_tasks
 
-    def test_validate_string_without_jinja2_markers(self):
-        """Test validation fails for string values without Jinja2 markers."""
-        from nornflow.models import TaskModel
-        
-        hook = ShushHook("invalid_string")
-        task_model = TaskModel.create({"name": "test_task", "args": {}})
-        
-        try:
-            hook.execute_hook_validations(task_model)
-            assert False, "Should have raised HookValidationError"
-        except HookValidationError as e:
-            assert "without Jinja2 markers" in str(e)
-
     def test_validate_string_with_jinja2_markers(self):
         """Test validation passes for string values with Jinja2 markers."""
         from nornflow.models import TaskModel
@@ -246,87 +218,92 @@ class TestShushHook:
         hook_true.execute_hook_validations(task_model)
         hook_false.execute_hook_validations(task_model)
 
-    def test_evaluate_suppression_with_boolean_true(self):
-        """Test _evaluate_suppression returns True for boolean True value."""
+    def test_get_resolved_value_with_boolean_true(self, mock_task):
+        """Test get_resolved_value returns True for boolean True value."""
         hook = ShushHook(True)
-        mock_task = MagicMock()
         
-        result = hook._evaluate_suppression(mock_task)
+        result = hook.get_resolved_value(mock_task, as_bool=True, default=False)
         
         assert result is True
 
-    def test_evaluate_suppression_with_boolean_false(self):
-        """Test _evaluate_suppression returns False for boolean False value."""
+    def test_get_resolved_value_with_boolean_false(self, mock_task):
+        """Test get_resolved_value returns False for boolean False value."""
         hook = ShushHook(False)
-        mock_task = MagicMock()
         
-        result = hook._evaluate_suppression(mock_task)
+        result = hook.get_resolved_value(mock_task, as_bool=True, default=False)
         
         assert result is False
 
-    def test_evaluate_suppression_with_none(self):
-        """Test _evaluate_suppression returns False for None value."""
+    def test_get_resolved_value_with_none(self, mock_task):
+        """Test get_resolved_value returns default for None value."""
         hook = ShushHook(None)
-        mock_task = MagicMock()
         
-        result = hook._evaluate_suppression(mock_task)
+        result = hook.get_resolved_value(mock_task, as_bool=True, default=False)
         
         assert result is False
 
-    def test_evaluate_suppression_with_jinja2_expression_true(self):
-        """Test _evaluate_suppression with Jinja2 expression that evaluates to True."""
+    def test_get_resolved_value_with_jinja2_expression_true(self, mock_task, mock_vars_manager):
+        """Test get_resolved_value with Jinja2 expression that evaluates to True."""
         hook = ShushHook("{{ true }}")
-        mock_task = MagicMock()
-        mock_task.nornir.inventory.hosts = {"host1": MagicMock()}
         
-        mock_vars_manager = MagicMock()
-        mock_vars_manager.resolve_string.return_value = "True"
+        # Configure vars_manager to return "true" string when resolving
+        mock_vars_manager.resolve_string.return_value = "true"
+        
+        # Configure hook context
         hook._current_context = {"vars_manager": mock_vars_manager}
         
-        result = hook._evaluate_suppression(mock_task)
+        # Configure mock task inventory for host extraction
+        mock_host = MagicMock()
+        mock_host.name = "test_host"
+        mock_task.nornir.inventory.hosts = {"test_host": mock_host}
+        
+        result = hook.get_resolved_value(mock_task, as_bool=True, default=False)
         
         assert result is True
+        mock_vars_manager.resolve_string.assert_called_with("{{ true }}", "test_host")
 
-    def test_evaluate_suppression_with_jinja2_expression_false(self):
-        """Test _evaluate_suppression with Jinja2 expression that evaluates to False."""
+    def test_get_resolved_value_with_jinja2_expression_false(self, mock_task, mock_vars_manager):
+        """Test get_resolved_value with Jinja2 expression that evaluates to False."""
         hook = ShushHook("{{ false }}")
-        mock_task = MagicMock()
-        mock_task.nornir.inventory.hosts = {"host1": MagicMock()}
         
-        mock_vars_manager = MagicMock()
-        mock_vars_manager.resolve_string.return_value = "False"
+        # Configure vars_manager to return "false" string when resolving
+        mock_vars_manager.resolve_string.return_value = "false"
+        
+        # Configure hook context
         hook._current_context = {"vars_manager": mock_vars_manager}
         
-        result = hook._evaluate_suppression(mock_task)
+        # Configure mock task inventory for host extraction
+        mock_host = MagicMock()
+        mock_host.name = "test_host"
+        mock_task.nornir.inventory.hosts = {"test_host": mock_host}
+        
+        result = hook.get_resolved_value(mock_task, as_bool=True, default=False)
         
         assert result is False
+        mock_vars_manager.resolve_string.assert_called_with("{{ false }}", "test_host")
 
     def test_hook_with_truthy_non_boolean_values(self):
-        """Test hook handles truthy non-boolean values correctly (as Jinja2 expressions)."""
+        """Test hook handles truthy non-boolean values correctly."""
         hook = ShushHook("yes")
         assert hook.value == "yes"
-        assert hook.is_jinja2_expression is False
 
     def test_hook_with_falsy_non_boolean_values(self):
         """Test hook handles falsy non-boolean values correctly."""
         hook = ShushHook("")
         assert hook.value == ""
-        assert hook.is_jinja2_expression is False
 
-    def test_hook_with_integer_zero(self):
+    def test_hook_with_integer_zero(self, mock_task):
         """Test hook with integer 0 (falsy non-boolean)."""
         hook = ShushHook(0)
-        mock_task = MagicMock()
         
-        result = hook._evaluate_suppression(mock_task)
+        result = hook.get_resolved_value(mock_task, as_bool=True, default=False)
         
         assert result is False
 
-    def test_hook_with_integer_one(self):
+    def test_hook_with_integer_one(self, mock_task):
         """Test hook with integer 1 (truthy non-boolean)."""
         hook = ShushHook(1)
-        mock_task = MagicMock()
         
-        result = hook._evaluate_suppression(mock_task)
+        result = hook.get_resolved_value(mock_task, as_bool=True, default=False)
         
         assert result is True
\ No newline at end of file
diff --git a/tests/unit/builtins/test_utils.py b/tests/unit/builtins/test_utils.py
index ab81ec2..340de6c 100644
--- a/tests/unit/builtins/test_utils.py
+++ b/tests/unit/builtins/test_utils.py
@@ -1,6 +1,3 @@
-# filepath: test_utils.py
-"""Tests for nornflow.builtins.utils module."""
-
 import json
 from unittest.mock import MagicMock
 
@@ -18,32 +15,25 @@ from nornflow.exceptions import ProcessorError
 class TestGetTaskVarsManager:
     """Test get_task_vars_manager function."""
 
-    def test_finds_vars_manager_in_first_processor(self):
+    def test_finds_vars_manager_in_first_processor(self, mock_processor_with_vars_manager):
         """Test finding vars_manager in the first processor."""
-        mock_vars_manager = MagicMock()
-        mock_processor = MagicMock()
-        mock_processor.vars_manager = mock_vars_manager
-
         mock_task = MagicMock()
-        mock_task.nornir.processors = [mock_processor]
+        mock_task.nornir.processors = [mock_processor_with_vars_manager]
 
         result = get_task_vars_manager(mock_task)
 
-        assert result is mock_vars_manager
+        assert result is mock_processor_with_vars_manager.vars_manager
 
-    def test_finds_vars_manager_in_second_processor(self):
+    def test_finds_vars_manager_in_second_processor(self, mock_processor_with_vars_manager):
         """Test finding vars_manager in a later processor."""
-        mock_vars_manager = MagicMock()
         mock_processor1 = MagicMock(spec=[])
-        mock_processor2 = MagicMock()
-        mock_processor2.vars_manager = mock_vars_manager
-
+        
         mock_task = MagicMock()
-        mock_task.nornir.processors = [mock_processor1, mock_processor2]
+        mock_task.nornir.processors = [mock_processor1, mock_processor_with_vars_manager]
 
         result = get_task_vars_manager(mock_task)
 
-        assert result is mock_vars_manager
+        assert result is mock_processor_with_vars_manager.vars_manager
 
     def test_raises_exception_when_no_vars_manager_found(self):
         """Test raises ProcessorError when no processor has vars_manager."""
@@ -180,44 +170,26 @@ class TestFormatValueForDisplay:
 class TestGetResolvedRuntimeValues:
     """Test get_resolved_runtime_values function."""
 
-    def test_get_values_with_vars_manager(self):
+    def test_get_values_with_vars_manager(self, mock_task, mock_processor_with_vars_manager, mock_device_context):
         """Test retrieving resolved runtime values when vars_manager exists."""
-        mock_device_context = MagicMock()
         mock_device_context.runtime_vars = {
             "var1": "value1",
             "var2": 123,
             "var3": {"nested": "dict"}
         }
 
-        mock_vars_manager = MagicMock()
-        mock_vars_manager.get_device_context.return_value = mock_device_context
-
-        mock_processor = MagicMock()
-        mock_processor.vars_manager = mock_vars_manager
-
-        mock_task = MagicMock()
-        mock_task.host.name = "test_host"
-        mock_task.nornir.processors = [mock_processor]
+        mock_task.nornir.processors = [mock_processor_with_vars_manager]
 
         result = get_resolved_runtime_values(mock_task, ["var1", "var2"])
 
         assert result == {"var1": "value1", "var2": 123}
-        mock_vars_manager.get_device_context.assert_called_once_with("test_host")
+        mock_processor_with_vars_manager.vars_manager.get_device_context.assert_called_once_with("test_host")
 
-    def test_get_values_when_var_not_found(self):
+    def test_get_values_when_var_not_found(self, mock_task, mock_processor_with_vars_manager, mock_device_context):
         """Test behavior when a requested variable is not in runtime_vars."""
-        mock_device_context = MagicMock()
         mock_device_context.runtime_vars = {"var1": "value1"}
 
-        mock_vars_manager = MagicMock()
-        mock_vars_manager.get_device_context.return_value = mock_device_context
-
-        mock_processor = MagicMock()
-        mock_processor.vars_manager = mock_vars_manager
-
-        mock_task = MagicMock()
-        mock_task.host.name = "test_host"
-        mock_task.nornir.processors = [mock_processor]
+        mock_task.nornir.processors = [mock_processor_with_vars_manager]
 
         result = get_resolved_runtime_values(mock_task, ["var1", "missing_var"])
 
@@ -237,42 +209,21 @@ class TestGetResolvedRuntimeValues:
         with pytest.raises(ProcessorError, match="Could not find NornFlowVariableProcessor"):
             get_resolved_runtime_values(mock_task, ["var1", "var2"])
 
-    def test_get_values_with_empty_var_names_list(self):
+    def test_get_values_with_empty_var_names_list(self, mock_task, mock_processor_with_vars_manager, mock_device_context):
         """Test with empty list of variable names."""
-        mock_device_context = MagicMock()
         mock_device_context.runtime_vars = {"var1": "value1"}
 
-        mock_vars_manager = MagicMock()
-        mock_vars_manager.get_device_context.return_value = mock_device_context
-
-        mock_processor = MagicMock()
-        mock_processor.vars_manager = mock_vars_manager
-
-        mock_task = MagicMock()
-        mock_task.host.name = "test_host"
-        mock_task.nornir.processors = [mock_processor]
+        mock_task.nornir.processors = [mock_processor_with_vars_manager]
 
         result = get_resolved_runtime_values(mock_task, [])
 
         assert result == {}
 
-    def test_get_values_all_missing(self):
+    def test_get_values_all_missing(self, mock_task, mock_processor_with_vars_manager, mock_device_context):
         """Test when all requested variables are missing from runtime_vars."""
-        mock_device_context = MagicMock()
         mock_device_context.runtime_vars = {}
 
-        mock_vars_manager = MagicMock()
-        mock_vars_manager.get_device_context.return_value = mock_device_context
-
-        mock_vars_manager = MagicMock()
-        mock_vars_manager.get_device_context.return_value = mock_device_context
-
-        mock_processor = MagicMock()
-        mock_processor.vars_manager = mock_vars_manager
-
-        mock_task = MagicMock()
-        mock_task.host.name = "test_host"
-        mock_task.nornir.processors = [mock_processor]
+        mock_task.nornir.processors = [mock_processor_with_vars_manager]
 
         result = get_resolved_runtime_values(mock_task, ["var1", "var2"])
 
@@ -285,23 +236,14 @@ class TestGetResolvedRuntimeValues:
 class TestBuildSetTaskReport:
     """Test build_set_task_report function."""
 
-    def test_build_report_with_simple_values(self):
+    def test_build_report_with_simple_values(self, mock_task, mock_processor_with_vars_manager, mock_device_context):
         """Test building report with simple variable values."""
-        mock_device_context = MagicMock()
         mock_device_context.runtime_vars = {
             "var1": "value1",
             "var2": 123
         }
 
-        mock_vars_manager = MagicMock()
-        mock_vars_manager.get_device_context.return_value = mock_device_context
-
-        mock_processor = MagicMock()
-        mock_processor.vars_manager = mock_vars_manager
-
-        mock_task = MagicMock()
-        mock_task.host.name = "test_host"
-        mock_task.nornir.processors = [mock_processor]
+        mock_task.nornir.processors = [mock_processor_with_vars_manager]
 
         kwargs = {"var1": "{{ some_template }}", "var2": "{{ other_template }}"}
 
@@ -311,22 +253,14 @@ class TestBuildSetTaskReport:
         assert '• var1 = "value1"' in result
         assert "• var2 = 123" in result
 
-    def test_build_report_with_dict_value(self):
+    def test_build_report_with_dict_value(self, mock_task, mock_processor_with_vars_manager, mock_device_context):
         """Test building report with dictionary values."""
-        mock_device_context = MagicMock()
         mock_device_context.runtime_vars = {
             "config": {"interface": "eth0", "ip": "192.168.1.1"}
         }
 
-        mock_vars_manager = MagicMock()
-        mock_vars_manager.get_device_context.return_value = mock_device_context
-
-        mock_processor = MagicMock()
-        mock_processor.vars_manager = mock_vars_manager
-
-        mock_task = MagicMock()
         mock_task.host.name = "router1"
-        mock_task.nornir.processors = [mock_processor]
+        mock_task.nornir.processors = [mock_processor_with_vars_manager]
 
         kwargs = {"config": "{{ device_config }}"}
 
@@ -337,22 +271,14 @@ class TestBuildSetTaskReport:
         assert '"interface": "eth0"' in result
         assert '"ip": "192.168.1.1"' in result
 
-    def test_build_report_with_list_value(self):
+    def test_build_report_with_list_value(self, mock_task, mock_processor_with_vars_manager, mock_device_context):
         """Test building report with list values."""
-        mock_device_context = MagicMock()
         mock_device_context.runtime_vars = {
             "interfaces": ["eth0", "eth1", "lo"]
         }
 
-        mock_vars_manager = MagicMock()
-        mock_vars_manager.get_device_context.return_value = mock_device_context
-
-        mock_processor = MagicMock()
-        mock_processor.vars_manager = mock_vars_manager
-
-        mock_task = MagicMock()
         mock_task.host.name = "switch1"
-        mock_task.nornir.processors = [mock_processor]
+        mock_task.nornir.processors = [mock_processor_with_vars_manager]
 
         kwargs = {"interfaces": "{{ device_interfaces }}"}
 
@@ -397,20 +323,11 @@ class TestBuildSetTaskReport:
 
         assert result == "No variables were set (no arguments provided to 'set' task)"
 
-    def test_build_report_with_missing_variable(self):
+    def test_build_report_with_missing_variable(self, mock_task, mock_processor_with_vars_manager, mock_device_context):
         """Test building report when variable is not found in runtime_vars."""
-        mock_device_context = MagicMock()
         mock_device_context.runtime_vars = {"var1": "value1"}
 
-        mock_vars_manager = MagicMock()
-        mock_vars_manager.get_device_context.return_value = mock_device_context
-
-        mock_processor = MagicMock()
-        mock_processor.vars_manager = mock_vars_manager
-
-        mock_task = MagicMock()
-        mock_task.host.name = "test_host"
-        mock_task.nornir.processors = [mock_processor]
+        mock_task.nornir.processors = [mock_processor_with_vars_manager]
 
         kwargs = {"var1": "template1", "missing_var": "template2"}
 
@@ -420,23 +337,14 @@ class TestBuildSetTaskReport:
         assert '• var1 = "value1"' in result
         assert '• missing_var = "<value not found in runtime vars>"' in result
 
-    def test_build_report_with_boolean_values(self):
+    def test_build_report_with_boolean_values(self, mock_task, mock_processor_with_vars_manager, mock_device_context):
         """Test building report with boolean values."""
-        mock_device_context = MagicMock()
         mock_device_context.runtime_vars = {
             "enabled": True,
             "disabled": False
         }
 
-        mock_vars_manager = MagicMock()
-        mock_vars_manager.get_device_context.return_value = mock_device_context
-
-        mock_processor = MagicMock()
-        mock_processor.vars_manager = mock_vars_manager
-
-        mock_task = MagicMock()
-        mock_task.host.name = "test_host"
-        mock_task.nornir.processors = [mock_processor]
+        mock_task.nornir.processors = [mock_processor_with_vars_manager]
 
         kwargs = {"enabled": "{{ is_enabled }}", "disabled": "{{ is_disabled }}"}
 
@@ -445,20 +353,11 @@ class TestBuildSetTaskReport:
         assert "• enabled = True" in result
         assert "• disabled = False" in result
 
-    def test_build_report_with_none_value(self):
+    def test_build_report_with_none_value(self, mock_task, mock_processor_with_vars_manager, mock_device_context):
         """Test building report with None value."""
-        mock_device_context = MagicMock()
         mock_device_context.runtime_vars = {"null_var": None}
 
-        mock_vars_manager = MagicMock()
-        mock_vars_manager.get_device_context.return_value = mock_device_context
-
-        mock_processor = MagicMock()
-        mock_processor.vars_manager = mock_vars_manager
-
-        mock_task = MagicMock()
-        mock_task.host.name = "test_host"
-        mock_task.nornir.processors = [mock_processor]
+        mock_task.nornir.processors = [mock_processor_with_vars_manager]
 
         kwargs = {"null_var": "{{ template }}"}
 
@@ -466,24 +365,15 @@ class TestBuildSetTaskReport:
 
         assert "• null_var = None" in result
 
-    def test_build_report_preserves_variable_order(self):
+    def test_build_report_preserves_variable_order(self, mock_task, mock_processor_with_vars_manager, mock_device_context):
         """Test that report maintains the order of variables from kwargs."""
-        mock_device_context = MagicMock()
         mock_device_context.runtime_vars = {
             "var_c": "value_c",
             "var_a": "value_a",
             "var_b": "value_b"
         }
 
-        mock_vars_manager = MagicMock()
-        mock_vars_manager.get_device_context.return_value = mock_device_context
-
-        mock_processor = MagicMock()
-        mock_processor.vars_manager = mock_vars_manager
-
-        mock_task = MagicMock()
-        mock_task.host.name = "test_host"
-        mock_task.nornir.processors = [mock_processor]
+        mock_task.nornir.processors = [mock_processor_with_vars_manager]
 
         kwargs = {"var_c": "t1", "var_a": "t2", "var_b": "t3"}
 
diff --git a/tests/unit/cli/test_init.py b/tests/unit/cli/test_init.py
index 3e57bae..9d8b97c 100644
--- a/tests/unit/cli/test_init.py
+++ b/tests/unit/cli/test_init.py
@@ -2,11 +2,13 @@ from unittest.mock import MagicMock, patch
 
 import pytest
 
+from nornflow.cli.exceptions import CLIInitError
 from nornflow.cli.init import (
+    create_directories_from_settings,
     get_user_confirmation,
     init,
     setup_builder,
-    setup_directory_structure,
+    setup_nornir_configs,
     setup_nornflow_settings_file,
     setup_sample_content,
     show_info_post_init,
@@ -24,201 +26,235 @@ class MockExit(Exception):
 class TestInitCommand:
     """Tests for the 'init' CLI command."""
 
+    @patch("nornflow.cli.init.NornFlowSettings")
     @patch("nornflow.cli.init.setup_builder")
     @patch("nornflow.cli.init.get_user_confirmation")
-    @patch("nornflow.cli.init.setup_directory_structure")
+    @patch("nornflow.cli.init.setup_nornir_configs")
     @patch("nornflow.cli.init.setup_nornflow_settings_file")
+    @patch("nornflow.cli.init.create_directories_from_settings")
     @patch("nornflow.cli.init.setup_sample_content")
     @patch("nornflow.cli.init.show_info_post_init")
     def test_init_successful(
         self,
         mock_show_info,
         mock_setup_sample_content,
-        mock_setup_config,
-        mock_setup_dirs,
+        mock_create_dirs,
+        mock_setup_settings,
+        mock_setup_nornir,
         mock_confirmation,
         mock_setup_builder,
+        mock_settings_class,
     ):
         """Test successful initialization."""
-        # Setup mocks
         mock_builder = MagicMock()
         mock_nornflow = MagicMock()
+        mock_settings = MagicMock()
         mock_builder.build.return_value = mock_nornflow
         mock_setup_builder.return_value = mock_builder
+        mock_settings_class.load.return_value = mock_settings
         mock_confirmation.return_value = True
         mock_ctx = MagicMock()
         mock_ctx.obj = {"settings": "test_settings.yaml"}
 
-        # Call the init function
         init(mock_ctx)
 
-        # Verify all expected functions were called
-        mock_setup_builder.assert_called_once_with(mock_ctx)
         mock_confirmation.assert_called_once()
-        mock_setup_dirs.assert_called_once()
-        mock_setup_config.assert_called_once_with("test_settings.yaml")
+        mock_setup_settings.assert_called_once_with("test_settings.yaml")
+        mock_settings_class.load.assert_called_once()
+        mock_setup_nornir.assert_called_once_with(mock_settings)
+        mock_create_dirs.assert_called_once_with(mock_settings)
+        mock_setup_builder.assert_called_once_with(mock_ctx)
+        mock_builder.build.assert_called_once()
         mock_setup_sample_content.assert_called_once_with(mock_nornflow)
         mock_show_info.assert_called_once_with(mock_nornflow)
 
     @patch("nornflow.cli.init.setup_builder")
     @patch("nornflow.cli.init.get_user_confirmation")
-    @patch("nornflow.cli.init.setup_directory_structure")
-    def test_init_user_declines(self, mock_setup_dirs, mock_confirmation, mock_setup_builder):
-        """Test initialization when user declines confirmation."""
-        # Setup mocks
+    @patch("nornflow.cli.init.setup_nornir_configs")
+    def test_init_user_declines(self, mock_setup_nornir, mock_confirmation, mock_setup_builder):
+        """Test initialization when user declines."""
         mock_confirmation.return_value = False
         mock_ctx = MagicMock()
+        mock_ctx.obj = {"settings": ""}
 
-        # Call the init function
         init(mock_ctx)
 
-        # Verify setup_builder was called but not setup_directory_structure
-        mock_setup_builder.assert_called_once()
         mock_confirmation.assert_called_once()
-        mock_setup_dirs.assert_not_called()
+        mock_setup_nornir.assert_not_called()
 
     @patch("nornflow.cli.init.setup_builder")
     @patch("nornflow.cli.init.get_user_confirmation")
-    @patch("nornflow.cli.init.setup_directory_structure")
-    @patch("nornflow.cli.init.CLIInitError")
-    def test_init_file_not_found_error(
-        self, mock_error, mock_setup_dirs, mock_confirmation, mock_setup_builder
-    ):
-        """Test initialization when FileNotFoundError occurs."""
-        # Setup mocks
+    @patch("nornflow.cli.init.setup_nornflow_settings_file")
+    def test_init_file_not_found_error(self, mock_setup_settings, mock_confirmation, mock_setup_builder):
+        """Test initialization with file not found error."""
         mock_confirmation.return_value = True
-        mock_setup_dirs.side_effect = FileNotFoundError("test file not found")
-        mock_error_instance = MagicMock()
-        mock_error.return_value = mock_error_instance
+        mock_setup_settings.side_effect = FileNotFoundError("File not found")
         mock_ctx = MagicMock()
+        mock_ctx.obj = {"settings": ""}
 
-        # Don't expect Exit to be raised in test since we're mocking the implementation
-        with patch("nornflow.cli.init.typer") as mock_typer:
-            mock_typer.Exit = MockExit
-            with pytest.raises(MockExit) as exc_info:
-                init(mock_ctx)
-
-            assert exc_info.value.code == 2
-            mock_error.assert_called_once()
-            mock_error_instance.show.assert_called_once()
+        with pytest.raises(CLIInitError):
+            init(mock_ctx)
 
     @patch("nornflow.cli.init.setup_builder")
     @patch("nornflow.cli.init.get_user_confirmation")
-    @patch("nornflow.cli.init.setup_directory_structure")
-    @patch("nornflow.cli.init.CLIInitError")
-    def test_init_permission_error(self, mock_error, mock_setup_dirs, mock_confirmation, mock_setup_builder):
-        """Test initialization when PermissionError occurs."""
-        # Setup mocks
+    @patch("nornflow.cli.init.setup_nornflow_settings_file")
+    def test_init_permission_error(self, mock_setup_settings, mock_confirmation, mock_setup_builder):
+        """Test initialization with permission error."""
         mock_confirmation.return_value = True
-        mock_setup_dirs.side_effect = PermissionError("permission denied")
-        mock_error_instance = MagicMock()
-        mock_error.return_value = mock_error_instance
+        mock_setup_settings.side_effect = PermissionError("Permission denied")
         mock_ctx = MagicMock()
+        mock_ctx.obj = {"settings": ""}
 
-        # Don't expect Exit to be raised in test since we're mocking the implementation
-        with patch("nornflow.cli.init.typer") as mock_typer:
-            mock_typer.Exit = MockExit
-            with pytest.raises(MockExit) as exc_info:
-                init(mock_ctx)
-
-            assert exc_info.value.code == 2
-            mock_error.assert_called_once()
-            mock_error_instance.show.assert_called_once()
+        with pytest.raises(CLIInitError):
+            init(mock_ctx)
 
 
 class TestSetupFunctions:
     """Tests for the setup helper functions used by the init command."""
 
     @patch("nornflow.cli.init.NornFlowBuilder")
-    def test_setup_builder(self, mock_nornflow_builder):
-        """Test setup_builder function."""
-        mock_builder = MagicMock()
-        mock_nornflow_builder.return_value = mock_builder
+    @patch("nornflow.cli.init.NORNFLOW_SETTINGS")
+    @patch("nornflow.cli.init.Path")
+    def test_setup_builder_with_custom_settings(self, mock_path_class, mock_default_settings, mock_nornflow_builder):
+        """Test setup_builder function with custom settings."""
         mock_ctx = MagicMock()
-        mock_ctx.obj = {"settings": "custom_settings.yaml"}
+        mock_ctx.obj = {"settings": "test_settings.yaml"}
+        mock_builder_instance = MagicMock()
+        mock_nornflow_builder.return_value = mock_builder_instance
+        mock_path = MagicMock()
+        mock_path.exists.return_value = True
+        mock_path_class.return_value = mock_path
 
         result = setup_builder(mock_ctx)
 
-        assert result == mock_builder
-        mock_builder.with_settings_path.assert_called_once_with("custom_settings.yaml")
+        mock_nornflow_builder.assert_called_once()
+        mock_builder_instance.with_settings_path.assert_called_once_with("test_settings.yaml")
+        assert result == mock_builder_instance
+
+    @patch("nornflow.cli.init.NornFlowBuilder")
+    @patch("nornflow.cli.init.NORNFLOW_SETTINGS")
+    def test_setup_builder_with_default_settings(self, mock_default_settings, mock_nornflow_builder):
+        """Test setup_builder function with default settings."""
+        mock_ctx = MagicMock()
+        mock_ctx.obj = {"settings": None}
+        mock_builder_instance = MagicMock()
+        mock_nornflow_builder.return_value = mock_builder_instance
+        mock_default_settings.exists.return_value = True
+
+        result = setup_builder(mock_ctx)
+
+        mock_nornflow_builder.assert_called_once()
+        mock_builder_instance.with_settings_path.assert_called_once_with(mock_default_settings)
+        assert result == mock_builder_instance
 
     @patch("nornflow.cli.init.typer.confirm")
     def test_get_user_confirmation_yes(self, mock_confirm):
         """Test get_user_confirmation when user confirms."""
         mock_confirm.return_value = True
+
         result = get_user_confirmation()
+
         assert result is True
-        mock_confirm.assert_called_once()
 
     @patch("nornflow.cli.init.typer.confirm")
     def test_get_user_confirmation_no(self, mock_confirm):
         """Test get_user_confirmation when user declines."""
         mock_confirm.return_value = False
+
         result = get_user_confirmation()
+
         assert result is False
-        mock_confirm.assert_called_once()
 
-    @patch("nornflow.cli.init.create_directory")
     @patch("nornflow.cli.init.shutil.copytree")
-    @patch("nornflow.cli.init.shutil.copy")
-    def test_setup_directory_structure(self, mock_copy, mock_copytree, mock_create_dir):
-        """Test setup_directory_structure creates directories."""
-        # Setup mock to return True to simulate directory creation
-        mock_create_dir.return_value = True
+    @patch("nornflow.cli.init.typer.secho")
+    @patch("nornflow.cli.init.Path")
+    def test_setup_nornir_configs_new_directory(self, mock_path_class, mock_secho, mock_copytree):
+        """Test setup_nornir_configs when directory doesn't exist."""
+        mock_settings = MagicMock()
+        mock_settings.nornir_config_file = "/path/to/nornir_configs/config.yaml"
+        
+        mock_config_path = MagicMock()
+        mock_config_dir = MagicMock()
+        mock_config_dir.exists.return_value = False
+        mock_config_path.parent = mock_config_dir
+        mock_path_class.return_value = mock_config_path
 
-        setup_directory_structure()
+        setup_nornir_configs(mock_settings)
 
-        # Verify create_directory was called at least once
-        assert mock_create_dir.call_count >= 1  # Should be called for nornir_configs at minimum
+        mock_copytree.assert_called_once()
+        assert mock_secho.call_count >= 1
+
+    @patch("nornflow.cli.init.shutil.copytree")
+    @patch("nornflow.cli.init.typer.secho")
+    @patch("nornflow.cli.init.Path")
+    def test_setup_nornir_configs_existing_directory(self, mock_path_class, mock_secho, mock_copytree):
+        """Test setup_nornir_configs when directory already exists."""
+        mock_settings = MagicMock()
+        mock_settings.nornir_config_file = "/path/to/nornir_configs/config.yaml"
+        
+        mock_config_path = MagicMock()
+        mock_config_dir = MagicMock()
+        mock_config_dir.exists.return_value = True
+        mock_config_path.parent = mock_config_dir
+        mock_path_class.return_value = mock_config_path
 
-        # Verify shutil.copy or copytree were called to populate directories
-        assert mock_copy.call_count + mock_copytree.call_count > 0
+        setup_nornir_configs(mock_settings)
+
+        mock_copytree.assert_not_called()
 
     @patch("nornflow.cli.init.NORNFLOW_SETTINGS")
     @patch("nornflow.cli.init.SAMPLE_NORNFLOW_FILE")
     @patch("nornflow.cli.init.shutil.copy")
     @patch("nornflow.cli.init.typer.secho")
     @patch("nornflow.cli.init.os.getenv", return_value=None)
+    @patch("nornflow.cli.init.Path")
     def test_setup_nornflow_config_no_settings(
-        self, mock_getenv, mock_secho, mock_copy, mock_sample, mock_config
+        self, mock_path_class, mock_getenv, mock_secho, mock_copy, mock_sample, mock_default_settings
     ):
-        """Test setup_nornflow_settings_file with no settings."""
-        # Make Path.exists return False to ensure file is copied
-        mock_config.exists.return_value = False
+        """Test setup_nornflow_settings_file with no existing settings."""
+        mock_path = MagicMock()
+        mock_path.exists.return_value = False
+        mock_path_class.return_value = mock_path
+        mock_default_settings.exists.return_value = False
 
         setup_nornflow_settings_file("")
 
-        # Verify config file was copied
-        mock_copy.assert_called_once_with(mock_sample, mock_config)
-        mock_secho.assert_called_once()
+        mock_copy.assert_called_once()
+
+    @patch("nornflow.cli.init.create_directory")
+    def test_create_directories_from_settings(self, mock_create_dir):
+        """Test create_directories_from_settings function."""
+        mock_settings = MagicMock()
+        mock_settings.local_tasks = ["tasks"]
+        mock_settings.local_workflows = ["workflows"]
+        mock_settings.local_filters = ["filters"]
+        mock_settings.local_hooks = ["hooks"]
+        mock_settings.vars_dir = "vars"
+
+        create_directories_from_settings(mock_settings)
+
+        assert mock_create_dir.call_count == 5
 
     @patch("nornflow.cli.init.copy_sample_files_to_dir")
-    @patch("nornflow.cli.init.create_directory_and_copy_sample_files")
-    @patch("nornflow.cli.init.typer.secho")
     @patch("nornflow.cli.init.Path")
-    def test_setup_sample_content(self, mock_path, mock_secho, mock_create_and_copy, mock_copy_files):
-        """Test setup_sample_content copies sample files."""
-        # Configure the mocks
-        mock_create_and_copy.return_value = None
-        mock_copy_files.return_value = None
+    def test_setup_sample_content(self, mock_path_class, mock_copy_files):
+        """Test setup_sample_content function."""
         mock_nornflow = MagicMock()
-    
+        mock_nornflow.settings.local_tasks = ["tasks"]
+        mock_nornflow.settings.local_workflows = ["workflows"]
+        mock_nornflow.settings.vars_dir = "vars"
+
         setup_sample_content(mock_nornflow)
-    
-        # Verify copy_sample_files_to_dir is called twice
-        assert mock_copy_files.call_count == 2
-        
-        # Verify create_directory_and_copy_sample_files is called once
-        assert mock_create_and_copy.call_count == 1
+
+        assert mock_copy_files.call_count == 3
 
     @patch("nornflow.cli.init.show_nornflow_settings")
     @patch("nornflow.cli.init.show_catalog")
     def test_show_info_post_init(self, mock_show_catalog, mock_show_settings):
-        """Test show_info_post_init displays information."""
+        """Test show_info_post_init function."""
         mock_nornflow = MagicMock()
 
         show_info_post_init(mock_nornflow)
 
-        # Should call show functions
         mock_show_settings.assert_called_once_with(mock_nornflow)
-        mock_show_catalog.assert_called_once_with(mock_nornflow)
+        mock_show_catalog.assert_called_once_with(mock_nornflow)
\ No newline at end of file
diff --git a/tests/unit/cli/test_run.py b/tests/unit/cli/test_run.py
index 0c1c998..0885215 100644
--- a/tests/unit/cli/test_run.py
+++ b/tests/unit/cli/test_run.py
@@ -1,4 +1,3 @@
-from pathlib import Path
 from unittest.mock import MagicMock, patch
 
 import pytest
@@ -710,7 +709,7 @@ class TestMainCLIFunctionality:
         # Verify NornFlow was built and run
         mock_get_builder.assert_called_once()
         mock_builder.build.assert_called_once()
-        mock_nornflow.run.assert_called_once_with(dry_run=False)
+        mock_nornflow.run.assert_called_once()
         # Verify sys.exit was not called for success
         mock_exit.assert_not_called()
 
@@ -745,7 +744,7 @@ class TestMainCLIFunctionality:
 
         # Verify NornFlow was built and run
         mock_builder.build.assert_called_once()
-        mock_nornflow.run.assert_called_once_with(dry_run=False)
+        mock_nornflow.run.assert_called_once()
         # Verify the exit code was propagated
         mock_exit.assert_called_once_with(1)
 
@@ -765,7 +764,7 @@ class TestMainCLIFunctionality:
         run(mock_ctx, target="test_task", args=None, inventory_filters=None, hosts=None, groups=None, vars=None, dry_run=True, processors=None, failure_strategy=None)
 
         mock_builder.build.assert_called_once()
-        mock_nornflow.run.assert_called_once_with(dry_run=True)
+        mock_nornflow.run.assert_called_once()
         mock_exit.assert_not_called()
 
     @patch("typer.secho")
diff --git a/tests/unit/cli/test_show.py b/tests/unit/cli/test_show.py
index 9e9d3e7..9d7c13e 100644
--- a/tests/unit/cli/test_show.py
+++ b/tests/unit/cli/test_show.py
@@ -175,7 +175,7 @@ class TestShowCommand:
 
         with pytest.raises(typer.BadParameter):
             show(mock_ctx, catalog=False, catalogs=False, tasks=False, filters=False,
-                 workflows=False, settings=False, nornir_configs=False, all=False)
+                 workflows=False, blueprints=False, settings=False, nornir_configs=False, all=False)
 
     @patch("nornflow.cli.show.NornFlowBuilder")
     @patch("nornflow.cli.show.CLIShowError")
@@ -256,7 +256,7 @@ class TestShowHelpers:
 
         show_catalog(mock_nornflow)
 
-        assert mock_show_table.call_count == 3
+        assert mock_show_table.call_count == 4
         calls = [
             call(
                 "TASKS CATALOG",
diff --git a/tests/unit/core/conftest.py b/tests/unit/core/conftest.py
index 9ee2b71..bbe3906 100644
--- a/tests/unit/core/conftest.py
+++ b/tests/unit/core/conftest.py
@@ -76,7 +76,6 @@ workflow:
 @pytest.fixture
 def valid_workflow(valid_workflow_dict):
     """Create a valid workflow object."""
-    # Use WorkflowModel.create instead of Workflow constructor
     return WorkflowModel.create(valid_workflow_dict)
 
 
@@ -98,13 +97,19 @@ def basic_settings(tmp_path, task_content):
     tasks_dir = tmp_path / "tasks"
     tasks_dir.mkdir()
     (tasks_dir / "task1.py").write_text(task_content)
-    return NornFlowSettings(local_tasks_dirs=[str(tasks_dir)])
+    return NornFlowSettings(
+        nornir_config_file="dummy_config.yaml",
+        local_tasks=[str(tasks_dir)]
+    )
 
 
 @pytest.fixture
 def basic_nornflow(basic_settings):
     """Create a basic NornFlow instance."""
-    return NornFlow(nornflow_settings=basic_settings)
+    with patch("nornflow.nornflow.NornFlow._initialize_nornir"):
+        nf = NornFlow(nornflow_settings=basic_settings)
+        nf._nornir_manager = Mock()
+        return nf
 
 
 @pytest.fixture
@@ -119,8 +124,8 @@ def mock_nornir():
     mock = Mock(spec=Nornir)
     mock.data = Mock()
     mock.data.failed_hosts = set()
-    mock.filter.return_value = mock  # Allow chaining
-    mock.with_processors.return_value = mock  # Allow chaining
+    mock.filter.return_value = mock
+    mock.with_processors.return_value = mock
     return mock
 
 
diff --git a/tests/unit/core/test_failure_strategies.py b/tests/unit/core/test_failure_strategies.py
index ba410ce..67b1e0f 100644
--- a/tests/unit/core/test_failure_strategies.py
+++ b/tests/unit/core/test_failure_strategies.py
@@ -1,6 +1,6 @@
 """Tests for failure handling strategies (skip-failed, fail-fast, run-all)."""
 
-from unittest.mock import MagicMock, patch, call
+from unittest.mock import MagicMock, patch
 
 import pytest
 
@@ -11,6 +11,7 @@ from nornflow.builtins.processors import (
 )
 from nornflow.constants import FailureStrategy
 from nornflow.models import WorkflowModel
+from nornflow.settings import NornFlowSettings
 
 
 class TestFailureStrategyConstants:
@@ -69,9 +70,6 @@ class TestDefaultNornFlowProcessor:
         mock_host.name = "test_host"
         
         processor.task_instance_started(mock_task, mock_host)
-        
-        # Note: Counter increments have been removed from the implementation
-        # This test now verifies the method can be called without error
 
     def test_task_instance_completed_success(self):
         """Test task_instance_completed with successful result."""
@@ -90,9 +88,6 @@ class TestDefaultNornFlowProcessor:
         
         with patch("builtins.print"):
             processor.task_instance_completed(mock_task, mock_host, mock_result)
-        
-        # Note: Counter increments have been removed from the implementation
-        # This test now verifies the method can be called without error
 
     def test_task_instance_completed_failure(self):
         """Test task_instance_completed with failed result."""
@@ -111,9 +106,6 @@ class TestDefaultNornFlowProcessor:
         
         with patch("builtins.print"):
             processor.task_instance_completed(mock_task, mock_host, mock_result)
-        
-        # Note: Counter increments have been removed from the implementation
-        # This test now verifies the method can be called without error
 
     def test_task_completed_increments_count(self):
         """Test that task_completed increments tasks_completed."""
@@ -421,56 +413,77 @@ class TestCLIFailureStrategyIntegration:
 class TestNornFlowFailureStrategy:
     """Test NornFlow's failure strategy handling."""
     
-    def test_nornflow_with_explicit_failure_strategy(self):
+    @patch('nornflow.nornflow.load_file_to_dict')
+    @patch('nornflow.nornir_manager.InitNornir')
+    def test_nornflow_with_explicit_failure_strategy(self, mock_init_nornir, mock_load_file):
         """Test NornFlow initialized with explicit failure strategy."""
-        # Create workflow model with one strategy
+        mock_nornir_instance = MagicMock()
+        mock_nornir_instance.inventory.hosts = {}
+        mock_init_nornir.return_value = mock_nornir_instance
+        
+        mock_load_file.return_value = {
+            'core': {'num_workers': 1},
+            'inventory': {'plugin': 'SimpleInventory', 'options': {}}
+        }
+        
         workflow_model = MagicMock(spec=WorkflowModel)
         workflow_model.failure_strategy = FailureStrategy.SKIP_FAILED
         
-        # Create NornFlow with different strategy
+        settings = NornFlowSettings(nornir_config_file="mock_config.yaml")
+        
         nornflow = NornFlow(
+            nornflow_settings=settings,
             workflow=workflow_model,
             failure_strategy=FailureStrategy.FAIL_FAST
         )
         
-        # Explicit failure strategy should override workflow's strategy
         assert nornflow.failure_strategy == FailureStrategy.FAIL_FAST
         
-    def test_nornflow_defaults_to_workflow_strategy(self):
+    @patch('nornflow.nornflow.load_file_to_dict')
+    @patch('nornflow.nornir_manager.InitNornir')
+    def test_nornflow_defaults_to_workflow_strategy(self, mock_init_nornir, mock_load_file):
         """Test NornFlow uses workflow's strategy when not explicitly set."""
-        # Create workflow model with strategy
+        mock_nornir_instance = MagicMock()
+        mock_nornir_instance.inventory.hosts = {}
+        mock_init_nornir.return_value = mock_nornir_instance
+        
+        mock_load_file.return_value = {
+            'core': {'num_workers': 1},
+            'inventory': {'plugin': 'SimpleInventory', 'options': {}}
+        }
+        
         workflow_model = MagicMock(spec=WorkflowModel)
         workflow_model.failure_strategy = FailureStrategy.RUN_ALL
         
-        # Create NornFlow without explicit strategy
-        nornflow = NornFlow(workflow=workflow_model)
+        settings = NornFlowSettings(nornir_config_file="mock_config.yaml")
+        
+        nornflow = NornFlow(
+            nornflow_settings=settings,
+            workflow=workflow_model
+        )
         
-        # Should use workflow's strategy
         assert nornflow.failure_strategy == FailureStrategy.RUN_ALL
         
     @patch('nornflow.nornir_manager.InitNornir')
     @patch('nornflow.nornflow.load_file_to_dict')
     def test_nornflow_defaults_to_settings_strategy(self, mock_load_file, mock_init_nornir):
         """Test NornFlow uses settings' strategy when workflow doesn't specify one."""
-        # Setup mock Nornir instance
         mock_nornir_instance = MagicMock()
         mock_nornir_instance.inventory.hosts = {}
         mock_init_nornir.return_value = mock_nornir_instance
         
-        # Mock load_file_to_dict to return a valid config dict
         mock_load_file.return_value = {
             'core': {'num_workers': 1},
             'inventory': {'plugin': 'SimpleInventory', 'options': {}}
         }
         
-        # Create NornFlow with settings that specify a strategy
-        settings = MagicMock()
-        settings.failure_strategy = FailureStrategy.FAIL_FAST
-        settings.nornir_config_file = 'mock_config.yaml'
+        settings = NornFlowSettings(
+            nornir_config_file='mock_config.yaml',
+            failure_strategy=FailureStrategy.FAIL_FAST
+        )
 
         nornflow = NornFlow(nornflow_settings=settings)
         
-        # Should use settings' strategy
         assert nornflow.failure_strategy == FailureStrategy.FAIL_FAST
 
 
@@ -484,20 +497,15 @@ class TestNornFlowBuilderWithFailureStrategy:
         
         assert builder._failure_strategy == FailureStrategy.FAIL_FAST
         
-        # Create mock workflow
         mock_workflow = MagicMock(spec=WorkflowModel)
         mock_workflow.failure_strategy = FailureStrategy.SKIP_FAILED
         
-        # With mocked dependencies
         with patch.object(builder, '_settings', new=MagicMock()):
-            # Add mock workflow to builder
             builder._workflow = mock_workflow
             
-            # Build with patched NornFlow constructor
             with patch('nornflow.builder.NornFlow') as mock_nornflow:
                 builder.build()
                 
-                # Verify failure strategy was passed to NornFlow
                 call_kwargs = mock_nornflow.call_args.kwargs
                 assert call_kwargs['failure_strategy'] == FailureStrategy.FAIL_FAST
 
diff --git a/tests/unit/core/test_nornflow.py b/tests/unit/core/test_nornflow.py
index cc22613..62ae2e0 100644
--- a/tests/unit/core/test_nornflow.py
+++ b/tests/unit/core/test_nornflow.py
@@ -3,13 +3,12 @@ from unittest.mock import MagicMock, patch
 import pytest
 
 from nornflow import NornFlow, NornFlowBuilder
+from nornflow.constants import FailureStrategy
 from nornflow.exceptions import (
-    CatalogError,
     InitializationError,
-    ResourceError,
     WorkflowError,
 )
-from nornflow.models import TaskModel, WorkflowModel
+from nornflow.models import WorkflowModel
 from nornflow.settings import NornFlowSettings
 
 
@@ -27,18 +26,22 @@ class TestNornFlowBasicCreation:
         (tasks_dir / "task1.py").write_text(task_content)
 
         with patch("nornflow.nornflow.NornFlow._initialize_nornir"):
-            settings = NornFlowSettings(local_tasks_dirs=[str(tasks_dir)])
+            settings = NornFlowSettings(
+                nornir_config_file="dummy_config.yaml",
+                local_tasks=[str(tasks_dir)]
+            )
             nornflow = NornFlow(nornflow_settings=settings)
 
             assert isinstance(nornflow, NornFlow)
             assert "hello_world" in nornflow.tasks_catalog
             assert "set" in nornflow.tasks_catalog
 
-    def test_create_with_invalid_kwargs(self):
-        """Test creating NornFlow with invalid kwargs."""
+    def test_create_without_settings_uses_defaults(self):
+        """Test that NornFlow requires either settings object or no kwargs at all."""
         with patch("nornflow.nornflow.NornFlow._initialize_nornir"):
-            nornflow = NornFlow(invalid_kwarg="value")
-            assert isinstance(nornflow, NornFlow)
+            settings = NornFlowSettings(nornir_config_file="test.yaml")
+            nornflow = NornFlow(nornflow_settings=settings)
+            assert nornflow.settings == settings
 
 
 class TestNornFlowValidation:
@@ -49,35 +52,155 @@ class TestNornFlowValidation:
         self.test_name = request.function.__name__
 
     def test_empty_tasks_catalog(self, tmp_path):
-        """Test error when no tasks are found raises CatalogError."""
+        """Test that NornFlow can be created even with empty tasks catalog (builtins still present)."""
         tasks_dir = tmp_path / "empty_tasks"
         tasks_dir.mkdir()
 
-        settings = NornFlowSettings(local_tasks_dirs=[str(tasks_dir)])
+        settings = NornFlowSettings(
+            nornir_config_file="dummy_config.yaml",
+            local_tasks=[str(tasks_dir)]
+        )
 
-        with patch("nornflow.nornflow.builtin_tasks", {}), patch(
-            "nornflow.nornflow.NornFlow._initialize_nornir"
-        ):
-            with pytest.raises(CatalogError):
-                NornFlow(nornflow_settings=settings)
+        with patch("nornflow.nornflow.NornFlow._initialize_nornir"):
+            nornflow = NornFlow(nornflow_settings=settings)
+            assert "set" in nornflow.tasks_catalog
+            assert "echo" in nornflow.tasks_catalog
 
-    def test_invalid_tasks_directory(self):
-        """Test error when tasks directory doesn't exist raises ResourceError."""
-        with patch("nornflow.nornflow.NornFlow._initialize_nornir"), patch(
-            "nornflow.settings.load_file_to_dict", return_value={"nornir_config_file": "dummy.yaml"}
-        ):
-            settings = NornFlowSettings(local_tasks_dirs=["/nonexistent/dir"])
+    def test_invalid_tasks_directory_raises_error(self):
+        """Test that NornFlow initialization raises error for non-existent directories."""
+        with patch("nornflow.nornflow.NornFlow._initialize_nornir"):
+            settings = NornFlowSettings(
+                nornir_config_file="dummy_config.yaml",
+                local_tasks=["/nonexistent/dir"]
+            )
 
-            with pytest.raises(InitializationError) as exc_info:
+            with pytest.raises(InitializationError, match="Tasks directory does not exist"):
                 NornFlow(nornflow_settings=settings)
 
-            assert isinstance(exc_info.value.__cause__, ResourceError)
-
     def test_property_modifications(self, basic_nornflow):
         """Ensure nornir_manager property is readable."""
         assert basic_nornflow.nornir_manager is not None
 
 
+class TestNornFlowPrecedence:
+    """Test precedence logic for failure_strategy and dry_run."""
+
+    def test_failure_strategy_constructor_overrides_workflow(self):
+        """Constructor failure_strategy should override workflow failure_strategy."""
+        wf = MagicMock(spec=WorkflowModel)
+        wf.failure_strategy = FailureStrategy.FAIL_FAST
+        wf.processors = []
+        wf.inventory_filters = {}
+        wf.vars = {}
+        wf.description = None
+        wf.dry_run = None
+        wf.name = "WF"
+
+        settings = NornFlowSettings(
+            nornir_config_file="dummy.yaml",
+            failure_strategy=FailureStrategy.SKIP_FAILED
+        )
+
+        with patch("nornflow.nornflow.NornFlow._initialize_nornir"):
+            nf = NornFlow(
+                nornflow_settings=settings,
+                workflow=wf,
+                failure_strategy=FailureStrategy.FAIL_FAST
+            )
+            assert nf.failure_strategy == FailureStrategy.FAIL_FAST
+
+    def test_failure_strategy_workflow_overrides_settings(self):
+        """Workflow failure_strategy should override settings when constructor doesn't override."""
+        wf = MagicMock(spec=WorkflowModel)
+        wf.failure_strategy = FailureStrategy.FAIL_FAST
+        wf.processors = []
+        wf.inventory_filters = {}
+        wf.vars = {}
+        wf.description = None
+        wf.dry_run = None
+        wf.name = "WF"
+
+        settings = NornFlowSettings(
+            nornir_config_file="dummy.yaml",
+            failure_strategy=FailureStrategy.SKIP_FAILED
+        )
+
+        with patch("nornflow.nornflow.NornFlow._initialize_nornir"):
+            nf = NornFlow(nornflow_settings=settings, workflow=wf)
+            assert nf.failure_strategy == FailureStrategy.FAIL_FAST
+
+    def test_failure_strategy_settings_fallback(self):
+        """Settings failure_strategy should be used when workflow has none."""
+        wf = MagicMock(spec=WorkflowModel)
+        wf.failure_strategy = None
+        wf.processors = []
+        wf.inventory_filters = {}
+        wf.vars = {}
+        wf.description = None
+        wf.dry_run = None
+        wf.name = "WF"
+
+        settings = NornFlowSettings(
+            nornir_config_file="dummy.yaml",
+            failure_strategy=FailureStrategy.SKIP_FAILED
+        )
+
+        with patch("nornflow.nornflow.NornFlow._initialize_nornir"):
+            nf = NornFlow(nornflow_settings=settings, workflow=wf)
+            assert nf.failure_strategy == FailureStrategy.SKIP_FAILED
+
+    def test_dry_run_constructor_overrides_workflow(self):
+        """Constructor dry_run should override workflow dry_run."""
+        wf = MagicMock(spec=WorkflowModel)
+        wf.dry_run = True
+        wf.processors = []
+        wf.inventory_filters = {}
+        wf.vars = {}
+        wf.description = None
+        wf.failure_strategy = None
+        wf.name = "WF"
+
+        settings = NornFlowSettings(nornir_config_file="dummy.yaml", dry_run=False)
+
+        with patch("nornflow.nornflow.NornFlow._initialize_nornir"):
+            nf = NornFlow(nornflow_settings=settings, workflow=wf, dry_run=False)
+            assert nf.dry_run is False
+
+    def test_dry_run_workflow_overrides_settings(self):
+        """Workflow dry_run should override settings when constructor doesn't override."""
+        wf = MagicMock(spec=WorkflowModel)
+        wf.dry_run = True
+        wf.processors = []
+        wf.inventory_filters = {}
+        wf.vars = {}
+        wf.description = None
+        wf.failure_strategy = None
+        wf.name = "WF"
+
+        settings = NornFlowSettings(nornir_config_file="dummy.yaml", dry_run=False)
+
+        with patch("nornflow.nornflow.NornFlow._initialize_nornir"):
+            nf = NornFlow(nornflow_settings=settings, workflow=wf)
+            assert nf.dry_run is True
+
+    def test_dry_run_settings_fallback(self):
+        """Settings dry_run should be used when workflow has none."""
+        wf = MagicMock(spec=WorkflowModel)
+        wf.dry_run = None
+        wf.processors = []
+        wf.inventory_filters = {}
+        wf.vars = {}
+        wf.description = None
+        wf.failure_strategy = None
+        wf.name = "WF"
+
+        settings = NornFlowSettings(nornir_config_file="dummy.yaml", dry_run=True)
+
+        with patch("nornflow.nornflow.NornFlow._initialize_nornir"):
+            nf = NornFlow(nornflow_settings=settings, workflow=wf)
+            assert nf.dry_run is True
+
+
 class TestWorkflowModelCreation:
     """Test workflow model creation."""
 
@@ -130,7 +253,11 @@ class TestNornFlowBuilder:
             assert nf.workflow == valid_workflow
 
     def test_builder_with_workflow_path(self, basic_settings, valid_workflow_file):
-        with patch("nornflow.nornflow.NornFlow._initialize_nornir"):
+        with patch("nornflow.nornflow.NornFlow._initialize_nornir"), \
+             patch("nornflow.nornflow.NornFlow._load_workflow_from_name") as mock_load:
+            mock_workflow = MagicMock(spec=WorkflowModel)
+            mock_load.return_value = (mock_workflow, valid_workflow_file)
+
             nf = (
                 NornFlowBuilder()
                 .with_settings_object(basic_settings)
@@ -194,7 +321,12 @@ class TestNornFlowExecution:
     def test_run_uses_context_manager(self, mock_mgr_cls):
         """Ensure context manager is used during run()."""
         mock_mgr = MagicMock()
+        mock_mgr.nornir = MagicMock()
+        mock_mgr.nornir.inventory.hosts = {}
+        mock_mgr.nornir.data.failed_hosts = {}
+        mock_mgr.nornir.processors = []
         mock_mgr.__enter__.return_value = mock_mgr
+        mock_mgr.__exit__.return_value = None
         mock_mgr_cls.return_value = mock_mgr
 
         wf = MagicMock(spec=WorkflowModel)
@@ -207,26 +339,15 @@ class TestNornFlowExecution:
         wf.failure_strategy = None
         wf.name = "Test WF"
 
-        settings = MagicMock()
-        settings.nornir_config_file = None
-        settings.local_workflows_dirs = []
-        settings.vars_dir = "/tmp"
-        settings.vars = {}
+        settings = NornFlowSettings(
+            nornir_config_file="dummy.yaml",
+            local_workflows=[]
+        )
 
-        with patch("nornflow.nornflow.NornFlow._initialize_nornir"), patch.object(
-            NornFlow, "_with_processors", MagicMock()
-        ):
-            def dummy_exec(self, _dry):
-                with self.nornir_manager:
-                    pass
-
-            with patch.object(NornFlow, "_orchestrate_execution", dummy_exec):
-                nf = NornFlow(workflow=wf, nornflow_settings=settings)
-                nf._nornir_manager = mock_mgr
-                nf._print_workflow_overview = MagicMock()
-                nf._print_workflow_summary = MagicMock()
-
-                nf.run()
+        with patch("nornflow.nornflow.load_file_to_dict", return_value={}):
+            nf = NornFlow(nornflow_settings=settings, workflow=wf)
+            nf._nornir_manager = mock_mgr
+            nf.run()
 
         mock_mgr.__enter__.assert_called_once()
         mock_mgr.__exit__.assert_called_once()
@@ -235,45 +356,46 @@ class TestNornFlowExecution:
     def test_run_handles_exceptions(self, mock_mgr_cls):
         """Connections are closed even when an error occurs."""
         mock_mgr = MagicMock()
+        mock_mgr.nornir = MagicMock()
+        mock_mgr.nornir.inventory.hosts = {}
+        mock_mgr.nornir.data.failed_hosts = {}
+        mock_mgr.nornir.processors = []
         mock_mgr.__enter__.return_value = mock_mgr
+        mock_mgr.__exit__.return_value = None
         mock_mgr_cls.return_value = mock_mgr
-
+    
+        task_mock = MagicMock()
+        task_mock.name = "echo"
+        task_mock.run.side_effect = RuntimeError("test error")
+    
         wf = MagicMock(spec=WorkflowModel)
         wf.dry_run = False
-        wf.tasks = []
         wf.inventory_filters = {}
         wf.processors = []
         wf.vars = {}
         wf.description = None
         wf.failure_strategy = None
         wf.name = "Test WF"
-
-        settings = MagicMock()
-        settings.nornir_config_file = None
-        settings.local_workflows_dirs = []
-        settings.vars_dir = "/tmp"
-        settings.vars = {}
-
-        with patch("nornflow.nornflow.NornFlow._initialize_nornir"), patch.object(
-            NornFlow, "_with_processors", MagicMock()
-        ):
-            def dummy_exec(self, _dry):
-                with self.nornir_manager:
-                    raise Exception("Test error")
-
-            with patch.object(NornFlow, "_orchestrate_execution", dummy_exec):
-                nf = NornFlow(workflow=wf, nornflow_settings=settings)
-                nf._nornir_manager = mock_mgr
-                nf._print_workflow_overview = MagicMock()
-
-                with pytest.raises(Exception, match="Test error"):
-                    nf.run()
-
+        wf.tasks = [task_mock]
+    
+        settings = NornFlowSettings(
+            nornir_config_file="dummy.yaml",
+            local_workflows=[]
+        )
+    
+        with patch("nornflow.nornflow.load_file_to_dict", return_value={}):
+            nf = NornFlow(nornflow_settings=settings, workflow=wf)
+            nf._nornir_manager = mock_mgr
+    
+            with pytest.raises(RuntimeError, match="test error"):
+                nf.run()
+    
+        mock_mgr.__enter__.assert_called_once()
         mock_mgr.__exit__.assert_called_once()
 
     def test_workflow_execution_orchestration(self):
         """Tasks run in order."""
-        t1, t2 = MagicMock(spec=TaskModel), MagicMock(spec=TaskModel)
+        t1, t2 = MagicMock(), MagicMock()
         t1.name, t2.name = "task1", "task2"
 
         wf = MagicMock(spec=WorkflowModel)
@@ -286,19 +408,188 @@ class TestNornFlowExecution:
         wf.name = "WF"
         wf.description = None
 
+        settings = NornFlowSettings(nornir_config_file="dummy_config.yaml")
+
         with patch("nornflow.nornflow.NornFlow._initialize_nornir"), patch.object(
             NornFlow, "_create_variable_manager", return_value=MagicMock()
         ):
-            nf = NornFlow(workflow=wf)
-            mgr = MagicMock()
-            mgr.__enter__.return_value = mgr
-            nf._nornir_manager = mgr
-            nf._print_workflow_overview = MagicMock()
-            nf._print_workflow_summary = MagicMock()
-            nf._vars_manager = MagicMock()
-            nf._tasks_catalog = {"task1": MagicMock(), "task2": MagicMock()}
+            nf = NornFlow(nornflow_settings=settings, workflow=wf)
+            assert nf.workflow.tasks == [t1, t2]
 
-            nf.run()
 
-            t1.run.assert_called_once()
-            t2.run.assert_called_once()
\ No newline at end of file
+class TestNornFlowProcessors:
+    """Test processor initialization and application."""
+
+    def test_var_processor_lazy_initialization_without_workflow(self):
+        """var_processor should be None when no workflow is set."""
+        settings = NornFlowSettings(nornir_config_file="dummy.yaml")
+
+        with patch("nornflow.nornflow.NornFlow._initialize_nornir"):
+            nf = NornFlow(nornflow_settings=settings)
+            assert nf.var_processor is None
+
+    def test_var_processor_created_with_workflow(self):
+        """var_processor should be created when workflow is set."""
+        wf = MagicMock(spec=WorkflowModel)
+        wf.processors = []
+        wf.inventory_filters = {}
+        wf.vars = {}
+        wf.description = None
+        wf.failure_strategy = None
+        wf.dry_run = None
+        wf.name = "WF"
+
+        settings = NornFlowSettings(nornir_config_file="dummy.yaml")
+
+        with patch("nornflow.nornflow.NornFlow._initialize_nornir"), \
+             patch("nornflow.nornflow.NornFlow._create_variable_manager") as mock_vm:
+            mock_vm.return_value = MagicMock()
+            nf = NornFlow(nornflow_settings=settings, workflow=wf)
+            assert nf.var_processor is not None
+
+    @patch("nornflow.nornflow.NornirManager")
+    def test_processor_chain_order(self, mock_mgr_cls):
+        """Processors should be applied in correct order: var, hook, user, failure_strategy."""
+        mock_mgr = MagicMock()
+        mock_mgr.nornir = MagicMock()
+        mock_mgr.nornir.processors = []
+
+        def mock_apply_processors(processors):
+            mock_mgr.nornir.processors.extend(processors)
+            return mock_mgr.nornir
+
+        mock_mgr.apply_processors = mock_apply_processors
+        mock_mgr_cls.return_value = mock_mgr
+
+        wf = MagicMock(spec=WorkflowModel)
+        wf.processors = [{"class": "some.Processor", "args": {}}]
+        wf.inventory_filters = {}
+        wf.vars = {}
+        wf.description = None
+        wf.failure_strategy = None
+        wf.dry_run = None
+        wf.name = "WF"
+
+        settings = NornFlowSettings(nornir_config_file="dummy.yaml")
+
+        with patch("nornflow.nornflow.NornFlow._create_variable_manager") as mock_vm, \
+             patch("nornflow.nornflow.load_processor") as mock_load, \
+             patch("nornflow.nornflow.load_file_to_dict", return_value={}):
+            mock_vm.return_value = MagicMock()
+            mock_user_proc = MagicMock()
+            mock_load.return_value = mock_user_proc
+
+            nf = NornFlow(nornflow_settings=settings, workflow=wf)
+            nf._nornir_manager = mock_mgr
+
+            nf._apply_processors()
+
+            applied_processors = nf.nornir_manager.nornir.processors
+            assert applied_processors[0] == nf.var_processor
+            assert applied_processors[1] == nf.hook_processor
+            assert applied_processors[2] == mock_user_proc
+            assert applied_processors[-1] == nf.failure_strategy_processor
+
+
+class TestNornFlowReturnCodes:
+    """Test return code calculation."""
+
+    @patch("nornflow.nornflow.NornirManager")
+    def test_return_code_success(self, mock_mgr_cls):
+        """Return code should be 0 on success."""
+        mock_mgr = MagicMock()
+        mock_mgr.nornir = MagicMock()
+        mock_mgr.nornir.data.failed_hosts = {}
+        mock_mgr.nornir.processors = []
+        mock_mgr_cls.return_value = mock_mgr
+
+        wf = MagicMock(spec=WorkflowModel)
+        wf.tasks = []
+        wf.processors = []
+        wf.inventory_filters = {}
+        wf.vars = {}
+        wf.description = None
+        wf.failure_strategy = None
+        wf.dry_run = None
+        wf.name = "WF"
+
+        settings = NornFlowSettings(nornir_config_file="dummy.yaml")
+
+        with patch("nornflow.nornflow.NornFlow._create_variable_manager"), \
+             patch("nornflow.nornflow.load_file_to_dict", return_value={}):
+            nf = NornFlow(nornflow_settings=settings, workflow=wf)
+            nf._nornir_manager = mock_mgr
+
+            assert nf._get_return_code() == 0
+
+    @patch("nornflow.nornflow.NornirManager")
+    def test_return_code_with_failed_hosts_no_stats(self, mock_mgr_cls):
+        """Return code should be 101 when hosts failed but no stats available."""
+        mock_mgr = MagicMock()
+        mock_mgr.nornir = MagicMock()
+        mock_mgr.nornir.data.failed_hosts = {"host1": None}
+        mock_mgr.nornir.processors = []
+        mock_mgr_cls.return_value = mock_mgr
+
+        wf = MagicMock(spec=WorkflowModel)
+        wf.tasks = []
+        wf.processors = []
+        wf.inventory_filters = {}
+        wf.vars = {}
+        wf.description = None
+        wf.failure_strategy = None
+        wf.dry_run = None
+        wf.name = "WF"
+
+        settings = NornFlowSettings(nornir_config_file="dummy.yaml")
+
+        with patch("nornflow.nornflow.NornFlow._create_variable_manager"), \
+             patch("nornflow.nornflow.load_file_to_dict", return_value={}):
+            nf = NornFlow(nornflow_settings=settings, workflow=wf)
+            nf._nornir_manager = mock_mgr
+
+            assert nf._get_return_code() == 101
+
+
+class TestNornFlowImmutability:
+    """Test that certain properties are immutable."""
+
+    def test_settings_immutable(self):
+        """settings property should not be settable."""
+        settings = NornFlowSettings(nornir_config_file="dummy.yaml")
+
+        with patch("nornflow.nornflow.NornFlow._initialize_nornir"):
+            nf = NornFlow(nornflow_settings=settings)
+
+            with pytest.raises(Exception):
+                nf.settings = NornFlowSettings(nornir_config_file="other.yaml")
+
+    def test_nornir_manager_immutable(self):
+        """nornir_manager property should not be settable."""
+        settings = NornFlowSettings(nornir_config_file="dummy.yaml")
+
+        with patch("nornflow.nornflow.NornFlow._initialize_nornir"):
+            nf = NornFlow(nornflow_settings=settings)
+
+            with pytest.raises(Exception):
+                nf.nornir_manager = MagicMock()
+
+    def test_tasks_catalog_immutable(self):
+        """tasks_catalog property should not be settable."""
+        settings = NornFlowSettings(nornir_config_file="dummy.yaml")
+
+        with patch("nornflow.nornflow.NornFlow._initialize_nornir"):
+            nf = NornFlow(nornflow_settings=settings)
+
+            with pytest.raises(Exception):
+                nf.tasks_catalog = {}
+
+    def test_blueprints_catalog_immutable(self):
+        """blueprints_catalog property should not be settable."""
+        settings = NornFlowSettings(nornir_config_file="dummy.yaml")
+
+        with patch("nornflow.nornflow.NornFlow._initialize_nornir"):
+            nf = NornFlow(nornflow_settings=settings)
+
+            with pytest.raises(Exception):
+                nf.blueprints_catalog = {}
\ No newline at end of file
diff --git a/tests/unit/core/test_nornir_manager.py b/tests/unit/core/test_nornir_manager.py
index 308b87e..7e2e45f 100644
--- a/tests/unit/core/test_nornir_manager.py
+++ b/tests/unit/core/test_nornir_manager.py
@@ -1,5 +1,5 @@
 import pytest
-from unittest.mock import MagicMock, patch
+from unittest.mock import MagicMock
 
 from nornflow.constants import NORNFLOW_SETTINGS_OPTIONAL
 from nornflow.exceptions import ProcessorError
@@ -36,8 +36,8 @@ class TestNornirManager:
         # Create a dict with both Nornir and NornFlow params
         params = {
             "runner": "threaded",  # Valid Nornir param
-            "local_workflows_dirs": ["/tmp/workflows"],  # NornFlow-specific param that should be filtered
-            "local_tasks_dirs": ["/tmp/tasks"],  # Another NornFlow-specific param
+            "local_workflows": ["/tmp/workflows"],  # NornFlow-specific param that should be filtered
+            "local_tasks": ["/tmp/tasks"],  # Another NornFlow-specific param
         }
 
         # Add all optional NornFlow settings to test they're filtered
@@ -64,17 +64,23 @@ class TestNornirManager:
         # Create test kwargs with some NornFlow settings
         kwargs = {
             "runner": "threaded",
-            "local_workflows_dirs": ["/tmp/workflows"],
             "another_setting": "value",
         }
+        
+        # Add all optional NornFlow settings to test they're removed
+        for param in NORNFLOW_SETTINGS_OPTIONAL:
+            kwargs[param] = f"value_{param}"
 
         # Call the method
         manager._remove_optional_nornflow_settings_from_kwargs(kwargs)
 
         # Verify NornFlow settings were removed
         assert "runner" in kwargs
-        assert "local_workflows_dirs" not in kwargs
         assert "another_setting" in kwargs
+        
+        # Verify all NornFlow optional settings were removed
+        for param in NORNFLOW_SETTINGS_OPTIONAL:
+            assert param not in kwargs
 
     def test_apply_filters_with_valid_filters(self, mock_nornir, mock_init_nornir):
         """Test applying valid filters."""
diff --git a/tests/unit/core/test_utils.py b/tests/unit/core/test_utils.py
new file mode 100644
index 0000000..3fa258d
--- /dev/null
+++ b/tests/unit/core/test_utils.py
@@ -0,0 +1,899 @@
+from types import ModuleType
+from typing import Literal
+from unittest.mock import Mock, patch
+
+import pytest
+from nornir.core.inventory import Host
+from nornir.core.processor import Processor
+from nornir.core.task import AggregatedResult, MultiResult, Result, Task
+from pydantic_serdes.custom_collections import HashableDict
+
+from nornflow.constants import FailureStrategy
+from nornflow.exceptions import CoreError, ProcessorError, ResourceError, WorkflowError
+from nornflow.utils import (
+    check_for_jinja2_recursive,
+    convert_lists_to_tuples,
+    format_variable_value,
+    get_file_content_hash,
+    import_module_from_path,
+    import_modules_recursively,
+    is_nornir_filter,
+    is_nornir_task,
+    is_yaml_file,
+    load_processor,
+    normalize_failure_strategy,
+    print_workflow_overview,
+    process_filter,
+)
+
+
+class TestGetFileContentHash:
+    """Tests for get_file_content_hash function."""
+
+    def test_hash_basic_yaml_file(self, tmp_path):
+        """Test hashing basic YAML file."""
+        test_file = tmp_path / "test.yaml"
+        test_file.write_text("key: value\nlist:\n  - item1\n  - item2")
+
+        file_hash = get_file_content_hash(test_file)
+
+        assert isinstance(file_hash, str)
+        assert len(file_hash) == 16
+
+    def test_hash_consistency(self, tmp_path):
+        """Test same content produces same hash."""
+        test_file = tmp_path / "test.yaml"
+        test_file.write_text("key: value")
+
+        hash1 = get_file_content_hash(test_file)
+        hash2 = get_file_content_hash(test_file)
+
+        assert hash1 == hash2
+
+    def test_hash_different_content(self, tmp_path):
+        """Test different content produces different hashes."""
+        file1 = tmp_path / "file1.yaml"
+        file2 = tmp_path / "file2.yaml"
+
+        file1.write_text("key: value1")
+        file2.write_text("key: value2")
+
+        hash1 = get_file_content_hash(file1)
+        hash2 = get_file_content_hash(file2)
+
+        assert hash1 != hash2
+
+    def test_hash_nonexistent_file(self, tmp_path):
+        """Test error for nonexistent file."""
+        nonexistent = tmp_path / "nonexistent.yaml"
+
+        with pytest.raises(ResourceError, match="Failed to hash file content"):
+            get_file_content_hash(nonexistent)
+
+    def test_hash_yaml_normalization(self, tmp_path):
+        """Test YAML formatting differences produce same hash."""
+        file1 = tmp_path / "file1.yaml"
+        file2 = tmp_path / "file2.yaml"
+
+        file1.write_text("key: value\nlist: [1, 2, 3]")
+        file2.write_text("key: value\nlist:\n  - 1\n  - 2\n  - 3")
+
+        hash1 = get_file_content_hash(file1)
+        hash2 = get_file_content_hash(file2)
+
+        assert hash1 == hash2
+
+    def test_hash_invalid_yaml(self, tmp_path):
+        """Test error for invalid YAML content."""
+        invalid_file = tmp_path / "invalid.yaml"
+        invalid_file.write_text("key: value\n  invalid: indentation")
+
+        with pytest.raises(ResourceError, match="Failed to hash file content"):
+            get_file_content_hash(invalid_file)
+
+    def test_hash_complex_yaml(self, tmp_path):
+        """Test hashing complex YAML structures."""
+        test_file = tmp_path / "complex.yaml"
+        test_file.write_text("""
+nested:
+  deep:
+    level: 3
+    items:
+      - name: item1
+        value: 100
+      - name: item2
+        value: 200
+mapping:
+  a: 1
+  b: 2
+""")
+
+        file_hash = get_file_content_hash(test_file)
+
+        assert isinstance(file_hash, str)
+        assert len(file_hash) == 16
+
+    def test_hash_empty_file(self, tmp_path):
+        """Test hashing empty YAML file."""
+        empty_file = tmp_path / "empty.yaml"
+        empty_file.write_text("")
+
+        file_hash = get_file_content_hash(empty_file)
+
+        assert isinstance(file_hash, str)
+        assert len(file_hash) == 16
+
+
+class TestNormalizeFailureStrategy:
+    """Tests for normalize_failure_strategy function."""
+
+    def test_normalize_from_string(self):
+        """Test normalizing from string value."""
+        result = normalize_failure_strategy("skip-failed", WorkflowError)
+        assert result == FailureStrategy.SKIP_FAILED
+
+    def test_normalize_from_enum(self):
+        """Test normalizing from enum value."""
+        result = normalize_failure_strategy(FailureStrategy.FAIL_FAST, WorkflowError)
+        assert result == FailureStrategy.FAIL_FAST
+
+    def test_normalize_invalid_string(self):
+        """Test error for invalid string value."""
+        with pytest.raises(WorkflowError, match="Invalid failure strategy"):
+            normalize_failure_strategy("invalid-strategy", WorkflowError)
+
+    def test_normalize_invalid_type(self):
+        """Test error for invalid type."""
+        with pytest.raises(WorkflowError, match="Invalid failure strategy type"):
+            normalize_failure_strategy(123, WorkflowError)
+
+    def test_normalize_with_different_exception(self):
+        """Test using different exception class."""
+        with pytest.raises(ProcessorError, match="Invalid failure strategy"):
+            normalize_failure_strategy("invalid", ProcessorError)
+
+    def test_normalize_all_valid_strategies(self):
+        """Test normalizing all valid strategy strings."""
+        for strategy in FailureStrategy:
+            result = normalize_failure_strategy(strategy.value, WorkflowError)
+            assert result == strategy
+
+
+class TestImportModuleFromPath:
+    """Tests for import_module_from_path function."""
+
+    def test_import_valid_module(self, tmp_path):
+        """Test importing valid Python module."""
+        module_file = tmp_path / "test_module.py"
+        module_file.write_text("def test_func():\n    return 'success'")
+
+        module = import_module_from_path("test_module", module_file)
+
+        assert isinstance(module, ModuleType)
+        assert hasattr(module, "test_func")
+        assert module.test_func() == "success"
+
+    def test_import_nonexistent_file(self, tmp_path):
+        """Test error for nonexistent file."""
+        nonexistent = tmp_path / "nonexistent.py"
+
+        with pytest.raises(CoreError, match="Failed to import module"):
+            import_module_from_path("test", nonexistent)
+
+    def test_import_invalid_syntax(self, tmp_path):
+        """Test error for invalid Python syntax."""
+        invalid_file = tmp_path / "invalid.py"
+        invalid_file.write_text("def invalid syntax")
+
+        with pytest.raises(CoreError, match="Failed to import module"):
+            import_module_from_path("invalid", invalid_file)
+
+    def test_import_module_with_class(self, tmp_path):
+        """Test importing module containing a class."""
+        module_file = tmp_path / "with_class.py"
+        module_file.write_text("class TestClass:\n    value = 42")
+
+        module = import_module_from_path("with_class", module_file)
+
+        assert hasattr(module, "TestClass")
+        assert module.TestClass.value == 42
+
+    def test_import_module_with_dependencies(self, tmp_path):
+        """Test importing module with standard library dependencies."""
+        module_file = tmp_path / "with_deps.py"
+        module_file.write_text("import json\ndef parse(s):\n    return json.loads(s)")
+
+        module = import_module_from_path("with_deps", module_file)
+
+        assert hasattr(module, "parse")
+        assert module.parse('{"key": "value"}') == {"key": "value"}
+
+
+class TestImportModulesRecursively:
+    """Tests for import_modules_recursively function."""
+
+    def test_import_single_module(self, tmp_path):
+        """Test importing single module from directory."""
+        module_file = tmp_path / "module1.py"
+        module_file.write_text("VALUE = 1")
+
+        with patch("nornflow.utils.Path.cwd", return_value=tmp_path):
+            imported = import_modules_recursively(tmp_path)
+
+        assert len(imported) == 1
+        assert "module1" in imported[0]
+
+    def test_import_multiple_modules(self, tmp_path):
+        """Test importing multiple modules."""
+        (tmp_path / "mod1.py").write_text("VALUE = 1")
+        (tmp_path / "mod2.py").write_text("VALUE = 2")
+
+        with patch("nornflow.utils.Path.cwd", return_value=tmp_path):
+            imported = import_modules_recursively(tmp_path)
+
+        assert len(imported) == 2
+
+    def test_import_nested_modules(self, tmp_path):
+        """Test importing modules from nested directories."""
+        subdir = tmp_path / "subdir"
+        subdir.mkdir()
+        (tmp_path / "root.py").write_text("VALUE = 1")
+        (subdir / "nested.py").write_text("VALUE = 2")
+
+        with patch("nornflow.utils.Path.cwd", return_value=tmp_path):
+            imported = import_modules_recursively(tmp_path)
+
+        assert len(imported) == 2
+
+    def test_skip_init_files(self, tmp_path):
+        """Test that __init__.py files are skipped."""
+        (tmp_path / "__init__.py").write_text("# init")
+        (tmp_path / "module.py").write_text("VALUE = 1")
+
+        with patch("nornflow.utils.Path.cwd", return_value=tmp_path):
+            imported = import_modules_recursively(tmp_path)
+
+        assert len(imported) == 1
+
+    def test_continue_on_import_error(self, tmp_path):
+        """Test that import continues on error."""
+        (tmp_path / "valid.py").write_text("VALUE = 1")
+        (tmp_path / "invalid.py").write_text("def invalid syntax")
+
+        with patch("nornflow.utils.Path.cwd", return_value=tmp_path):
+            imported = import_modules_recursively(tmp_path)
+
+        assert len(imported) == 1
+
+    def test_import_deeply_nested(self, tmp_path):
+        """Test importing from deeply nested structure."""
+        level1 = tmp_path / "level1"
+        level2 = level1 / "level2"
+        level3 = level2 / "level3"
+        level3.mkdir(parents=True)
+
+        (tmp_path / "root.py").write_text("VALUE = 0")
+        (level1 / "l1.py").write_text("VALUE = 1")
+        (level2 / "l2.py").write_text("VALUE = 2")
+        (level3 / "l3.py").write_text("VALUE = 3")
+
+        with patch("nornflow.utils.Path.cwd", return_value=tmp_path):
+            imported = import_modules_recursively(tmp_path)
+
+        assert len(imported) == 4
+
+    def test_import_empty_directory(self, tmp_path):
+        """Test importing from empty directory."""
+        with patch("nornflow.utils.Path.cwd", return_value=tmp_path):
+            imported = import_modules_recursively(tmp_path)
+
+        assert len(imported) == 0
+
+
+class TestIsNornirTask:
+    """Tests for is_nornir_task function."""
+
+    def test_valid_nornir_task(self):
+        """Test valid Nornir task is recognized."""
+        def valid_task(task: Task) -> Result:
+            return Result(host=task.host)
+
+        assert is_nornir_task(valid_task)
+
+    def test_task_with_multiresult(self):
+        """Test task returning MultiResult."""
+        def multi_task(task: Task) -> MultiResult:
+            return MultiResult("test")
+
+        assert is_nornir_task(multi_task)
+
+    def test_task_with_aggregated_result(self):
+        """Test task returning AggregatedResult."""
+        def agg_task(task: Task) -> AggregatedResult:
+            return AggregatedResult("test")
+
+        assert is_nornir_task(agg_task)
+
+    def test_missing_task_param(self):
+        """Test function without Task parameter."""
+        def not_task(host: Host) -> Result:
+            return Result(host=host)
+
+        assert not is_nornir_task(not_task)
+
+    def test_wrong_return_type(self):
+        """Test function with wrong return type."""
+        def wrong_return(task: Task) -> str:
+            return "not a result"
+
+        assert not is_nornir_task(wrong_return)
+
+    def test_no_annotations(self):
+        """Test function without annotations."""
+        def no_annotations(task):
+            return None
+
+        assert not is_nornir_task(no_annotations)
+
+    def test_not_callable(self):
+        """Test non-callable object."""
+        assert not is_nornir_task("not a function")
+
+    def test_task_with_additional_params(self):
+        """Test task with additional parameters."""
+        def task_with_params(task: Task, param1: str, param2: int) -> Result:
+            return Result(host=task.host)
+
+        assert is_nornir_task(task_with_params)
+
+    def test_partial_annotations(self):
+        """Test function with partial annotations."""
+        def partial(task, other: str) -> Result:
+            return Result(host=None)
+
+        assert not is_nornir_task(partial)
+
+
+class TestIsNornirFilter:
+    """Tests for is_nornir_filter function."""
+
+    def test_valid_filter_bool_return(self):
+        """Test valid filter with bool return."""
+        def valid_filter(host: Host, value: str) -> bool:
+            return True
+
+        assert is_nornir_filter(valid_filter)
+
+    def test_valid_filter_literal_return(self):
+        """Test valid filter with Literal return."""
+        def literal_filter(host: Host) -> Literal[True, False]:
+            return True
+
+        assert is_nornir_filter(literal_filter)
+
+    def test_missing_host_param(self):
+        """Test function without Host parameter."""
+        def no_host(value: str) -> bool:
+            return True
+
+        assert not is_nornir_filter(no_host)
+
+    def test_wrong_first_param_type(self):
+        """Test function with wrong first parameter type."""
+        def wrong_param(task: Task, value: str) -> bool:
+            return True
+
+        assert not is_nornir_filter(wrong_param)
+
+    def test_wrong_return_type(self):
+        """Test function with wrong return type."""
+        def wrong_return(host: Host) -> str:
+            return "not bool"
+
+        assert not is_nornir_filter(wrong_return)
+
+    def test_not_callable(self):
+        """Test non-callable object."""
+        assert not is_nornir_filter("not a function")
+
+    def test_filter_with_multiple_params(self):
+        """Test filter with multiple parameters."""
+        def multi_param_filter(host: Host, platform: str, site: str) -> bool:
+            return True
+
+        assert is_nornir_filter(multi_param_filter)
+
+    def test_filter_host_only(self):
+        """Test filter with only host parameter."""
+        def host_only(host: Host) -> bool:
+            return True
+
+        assert is_nornir_filter(host_only)
+
+
+class TestProcessFilter:
+    """Tests for process_filter function."""
+
+    def test_process_simple_filter(self):
+        """Test processing filter with additional params."""
+        def test_filter(host: Host, platform: str, groups: list) -> bool:
+            return True
+
+        func, params = process_filter(test_filter)
+
+        assert func is test_filter
+        assert params == ["platform", "groups"]
+
+    def test_process_filter_host_only(self):
+        """Test processing filter with only host param."""
+        def simple_filter(host: Host) -> bool:
+            return True
+
+        func, params = process_filter(simple_filter)
+
+        assert func is simple_filter
+        assert params == []
+
+    def test_process_filter_many_params(self):
+        """Test processing filter with many parameters."""
+        def complex_filter(host: Host, a: str, b: int, c: bool, d: list) -> bool:
+            return True
+
+        func, params = process_filter(complex_filter)
+
+        assert func is complex_filter
+        assert params == ["a", "b", "c", "d"]
+
+
+class TestIsYamlFile:
+    """Tests for is_yaml_file function."""
+
+    def test_yaml_extension(self, tmp_path):
+        """Test .yaml extension is recognized."""
+        yaml_file = tmp_path / "test.yaml"
+        yaml_file.write_text("key: value")
+
+        assert is_yaml_file(yaml_file)
+
+    def test_yml_extension(self, tmp_path):
+        """Test .yml extension is recognized."""
+        yml_file = tmp_path / "test.yml"
+        yml_file.write_text("key: value")
+
+        assert is_yaml_file(yml_file)
+
+    def test_non_yaml_extension(self, tmp_path):
+        """Test non-YAML file is rejected."""
+        txt_file = tmp_path / "test.txt"
+        txt_file.write_text("content")
+
+        assert not is_yaml_file(txt_file)
+
+    def test_nonexistent_file(self, tmp_path):
+        """Test nonexistent file is rejected."""
+        nonexistent = tmp_path / "nonexistent.yaml"
+
+        assert not is_yaml_file(nonexistent)
+
+    def test_directory(self, tmp_path):
+        """Test directory is rejected."""
+        directory = tmp_path / "test.yaml"
+        directory.mkdir()
+
+        assert not is_yaml_file(directory)
+
+    def test_yaml_uppercase(self, tmp_path):
+        """Test uppercase YAML extension."""
+        yaml_file = tmp_path / "test.YAML"
+        yaml_file.write_text("key: value")
+
+        assert not is_yaml_file(yaml_file)
+
+    def test_python_file(self, tmp_path):
+        """Test Python file is rejected."""
+        py_file = tmp_path / "test.py"
+        py_file.write_text("# python")
+
+        assert not is_yaml_file(py_file)
+
+
+class TestLoadProcessor:
+    """Tests for load_processor function."""
+
+    def test_load_valid_processor(self):
+        """Test loading valid processor."""
+        config = {
+            "class": "nornflow.builtins.processors.DefaultNornFlowProcessor",
+            "args": {}
+        }
+
+        processor = load_processor(config)
+
+        assert hasattr(processor, "task_started")
+        assert hasattr(processor, "task_completed")
+        assert callable(processor.task_started)
+
+    def test_load_processor_with_args(self):
+        """Test loading processor with arguments."""
+        mock_processor_class = Mock(return_value=Mock(spec=Processor))
+
+        with patch("nornflow.utils.importlib.import_module") as mock_import:
+            mock_module = Mock()
+            mock_module.TestProcessor = mock_processor_class
+            mock_import.return_value = mock_module
+
+            config = {
+                "class": "test.module.TestProcessor",
+                "args": {"arg1": "value1"}
+            }
+
+            processor = load_processor(config)
+
+            mock_processor_class.assert_called_once_with(arg1="value1")
+
+    def test_load_processor_missing_class(self):
+        """Test error when class is missing."""
+        config = {"args": {}}
+
+        with pytest.raises(ProcessorError, match="Missing 'class'"):
+            load_processor(config)
+
+    def test_load_processor_invalid_module(self):
+        """Test error for invalid module path."""
+        config = {"class": "nonexistent.module.Processor"}
+
+        with pytest.raises(ProcessorError, match="Failed to load processor"):
+            load_processor(config)
+
+    def test_load_processor_invalid_class(self):
+        """Test error for invalid class name."""
+        config = {"class": "nornflow.builtins.processors.NonexistentProcessor"}
+
+        with pytest.raises(ProcessorError, match="Failed to load processor"):
+            load_processor(config)
+
+    def test_load_processor_no_args(self):
+        """Test loading processor without args key."""
+        config = {
+            "class": "nornflow.builtins.processors.DefaultNornFlowProcessor"
+        }
+
+        processor = load_processor(config)
+
+        assert hasattr(processor, "task_started")
+
+    def test_load_processor_instantiation_error(self):
+        """Test error during processor instantiation."""
+        with patch("nornflow.utils.importlib.import_module") as mock_import:
+            mock_module = Mock()
+            mock_module.BadProcessor = Mock(side_effect=ValueError("Bad init"))
+            mock_import.return_value = mock_module
+
+            config = {"class": "test.BadProcessor", "args": {}}
+
+            with pytest.raises(ProcessorError, match="Error instantiating processor"):
+                load_processor(config)
+
+
+class TestConvertListsToTuples:
+    """Tests for convert_lists_to_tuples function."""
+
+    def test_convert_simple_list(self):
+        """Test converting simple list to tuple."""
+        input_dict = HashableDict({"myvar": [1, 2, 3]})
+
+        result = convert_lists_to_tuples(input_dict)
+
+        assert result["myvar"] == (1, 2, 3)
+
+    def test_convert_multiple_lists(self):
+        """Test converting multiple lists."""
+        input_dict = HashableDict({
+            "list1": [1, 2],
+            "list2": ["a", "b"],
+            "not_list": "value"
+        })
+
+        result = convert_lists_to_tuples(input_dict)
+
+        assert result["list1"] == (1, 2)
+        assert result["list2"] == ("a", "b")
+        assert result["not_list"] == "value"
+
+    def test_convert_none_input(self):
+        """Test handling None input."""
+        result = convert_lists_to_tuples(None)
+
+        assert result is None
+
+    def test_convert_empty_dict(self):
+        """Test converting empty dictionary."""
+        input_dict = HashableDict({})
+
+        result = convert_lists_to_tuples(input_dict)
+
+        assert result == HashableDict({})
+
+    def test_convert_nested_lists(self):
+        """Test that nested lists are not converted."""
+        input_dict = HashableDict({"nested": [[1, 2], [3, 4]]})
+
+        result = convert_lists_to_tuples(input_dict)
+
+        assert result["nested"] == ([1, 2], [3, 4])
+
+    def test_convert_mixed_types(self):
+        """Test converting dictionary with mixed value types."""
+        input_dict = HashableDict({
+            "list": [1, 2, 3],
+            "string": "value",
+            "number": 42,
+            "bool": True,
+            "none": None
+        })
+
+        result = convert_lists_to_tuples(input_dict)
+
+        assert result["list"] == (1, 2, 3)
+        assert result["string"] == "value"
+        assert result["number"] == 42
+        assert result["bool"] is True
+        assert result["none"] is None
+
+
+class TestCheckForJinja2Recursive:
+    """Tests for check_for_jinja2_recursive function."""
+
+    def test_valid_string_without_jinja(self):
+        """Test string without Jinja2 passes."""
+        check_for_jinja2_recursive("plain string", "test")
+
+    def test_detect_jinja_in_string(self):
+        """Test Jinja2 in string is detected."""
+        with pytest.raises(WorkflowError, match="Jinja2 code found"):
+            check_for_jinja2_recursive("{{ variable }}", "test")
+
+    def test_detect_jinja_in_nested_dict(self):
+        """Test Jinja2 in nested dict is detected."""
+        obj = {"level1": {"level2": "{{ jinja }}"}}
+
+        with pytest.raises(WorkflowError, match="Jinja2 code found"):
+            check_for_jinja2_recursive(obj, "test")
+
+    def test_detect_jinja_in_list(self):
+        """Test Jinja2 in list is detected."""
+        obj = ["plain", "{{ jinja }}", "plain"]
+
+        with pytest.raises(WorkflowError, match="Jinja2 code found"):
+            check_for_jinja2_recursive(obj, "test")
+
+    def test_valid_nested_structure(self):
+        """Test valid nested structure passes."""
+        obj = {
+            "key1": "value1",
+            "key2": ["item1", "item2"],
+            "key3": {"nested": "value"}
+        }
+
+        check_for_jinja2_recursive(obj, "test")
+
+    def test_detect_jinja_with_filters(self):
+        """Test Jinja2 with filters is detected."""
+        with pytest.raises(WorkflowError, match="Jinja2 code found"):
+            check_for_jinja2_recursive("{{ var | filter }}", "test")
+
+    def test_valid_curly_braces(self):
+        """Test normal curly braces without Jinja2 pass."""
+        check_for_jinja2_recursive('{"json": "object"}', "test")
+
+    def test_detect_jinja_statement(self):
+        """Test Jinja2 statement is detected."""
+        with pytest.raises(WorkflowError, match="Jinja2 code found"):
+            check_for_jinja2_recursive("{% for item in items %}{{ item }}{% endfor %}", "test")
+
+    def test_detect_jinja_in_tuple(self):
+        """Test Jinja2 in tuple is detected."""
+        obj = ("plain", "{{ jinja }}")
+
+        with pytest.raises(WorkflowError, match="Jinja2 code found"):
+            check_for_jinja2_recursive(obj, "test")
+
+    def test_valid_with_integers(self):
+        """Test integers pass validation."""
+        obj = {"numbers": [1, 2, 3, 4, 5]}
+
+        check_for_jinja2_recursive(obj, "test")
+
+    def test_detect_deeply_nested_jinja(self):
+        """Test deeply nested Jinja2 is detected."""
+        obj = {
+            "l1": {
+                "l2": {
+                    "l3": {
+                        "l4": ["value1", "{{ jinja }}"]
+                    }
+                }
+            }
+        }
+
+        with pytest.raises(WorkflowError, match="Jinja2 code found"):
+            check_for_jinja2_recursive(obj, "test")
+
+
+class TestFormatVariableValue:
+    """Tests for format_variable_value function."""
+
+    def test_format_normal_value(self):
+        """Test formatting normal value."""
+        result = format_variable_value("myvar", "value")
+
+        assert result == "value"
+
+    def test_format_protected_password(self):
+        """Test password is masked."""
+        result = format_variable_value("password", "secret123")
+
+        assert result == "********"
+
+    def test_format_protected_token(self):
+        """Test token is masked."""
+        result = format_variable_value("api_token", "abc123")
+
+        assert result == "********"
+
+    def test_format_protected_secret(self):
+        """Test secret is masked."""
+        result = format_variable_value("secret_key", "sensitive")
+
+        assert result == "********"
+
+    def test_format_tuple_value(self):
+        """Test tuple is formatted as list."""
+        result = format_variable_value("myvar", (1, 2, 3))
+
+        assert result == "[1, 2, 3]"
+
+    def test_format_case_insensitive_protection(self):
+        """Test protection is case-insensitive."""
+        result = format_variable_value("PASSWORD", "secret")
+
+        assert result == "********"
+
+    def test_format_integer(self):
+        """Test formatting integer value."""
+        result = format_variable_value("count", 42)
+
+        assert result == "42"
+
+    def test_format_boolean(self):
+        """Test formatting boolean value."""
+        result = format_variable_value("enabled", True)
+
+        assert result == "True"
+
+    def test_format_list_value(self):
+        """Test formatting list value."""
+        result = format_variable_value("items", ["a", "b", "c"])
+
+        assert result == "['a', 'b', 'c']"
+
+    def test_format_empty_tuple(self):
+        """Test formatting empty tuple."""
+        result = format_variable_value("empty", ())
+
+        assert result == "[]"
+
+    def test_format_partial_keyword_match(self):
+        """Test partial keyword match masks value."""
+        result = format_variable_value("my_password_var", "value")
+
+        assert result == "********"
+
+
+class TestPrintWorkflowOverview:
+    """Tests for print_workflow_overview function."""
+
+    @patch("nornflow.utils.Console")
+    def test_print_basic_overview(self, mock_console):
+        """Test printing basic workflow overview."""
+        workflow_model = Mock()
+        workflow_model.name = "Test Workflow"
+        workflow_model.description = "Test Description"
+
+        print_workflow_overview(
+            workflow_model=workflow_model,
+            effective_dry_run=False,
+            hosts_count=5,
+            inventory_filters={},
+            workflow_vars={},
+            vars={},
+            failure_strategy=FailureStrategy.FAIL_FAST
+        )
+
+        mock_console.return_value.print.assert_called_once()
+
+    @patch("nornflow.utils.Console")
+    def test_print_with_filters(self, mock_console):
+        """Test printing overview with inventory filters."""
+        workflow_model = Mock()
+        workflow_model.name = "Test"
+        workflow_model.description = None
+
+        print_workflow_overview(
+            workflow_model=workflow_model,
+            effective_dry_run=True,
+            hosts_count=3,
+            inventory_filters={"platform": "ios", "groups": ["core"]},
+            workflow_vars={"var1": "value1"},
+            vars={"var2": "value2"},
+            failure_strategy=None
+        )
+
+        mock_console.return_value.print.assert_called_once()
+
+    @patch("nornflow.utils.Console")
+    def test_print_with_variables(self, mock_console):
+        """Test printing overview with variables."""
+        workflow_model = Mock()
+        workflow_model.name = "Test"
+        workflow_model.description = None
+
+        print_workflow_overview(
+            workflow_model=workflow_model,
+            effective_dry_run=False,
+            hosts_count=1,
+            inventory_filters={},
+            workflow_vars={"workflow_var": "wf_value"},
+            vars={"cli_var": "cli_value"},
+            failure_strategy=FailureStrategy.SKIP_FAILED
+        )
+
+        mock_console.return_value.print.assert_called_once()
+
+    @patch("nornflow.utils.Console")
+    def test_print_no_description(self, mock_console):
+        """Test printing overview without description."""
+        workflow_model = Mock()
+        workflow_model.name = "Simple"
+        workflow_model.description = None
+
+        print_workflow_overview(
+            workflow_model=workflow_model,
+            effective_dry_run=False,
+            hosts_count=10,
+            inventory_filters={},
+            workflow_vars={},
+            vars={},
+            failure_strategy=FailureStrategy.FAIL_FAST
+        )
+
+        mock_console.return_value.print.assert_called_once()
+
+    @patch("nornflow.utils.Console")
+    def test_print_with_all_options(self, mock_console):
+        """Test printing overview with all options populated."""
+        workflow_model = Mock()
+        workflow_model.name = "Complete Workflow"
+        workflow_model.description = "A comprehensive test"
+
+        print_workflow_overview(
+            workflow_model=workflow_model,
+            effective_dry_run=True,
+            hosts_count=100,
+            inventory_filters={
+                "platform": "ios",
+                "site": "DC1",
+                "groups": ["core", "edge"]
+            },
+            workflow_vars={
+                "timeout": 30,
+                "retries": 3
+            },
+            vars={
+                "user": "admin",
+                "debug": True
+            },
+            failure_strategy=FailureStrategy.SKIP_FAILED
+        )
+
+        mock_console.return_value.print.assert_called_once()
\ No newline at end of file
diff --git a/tests/unit/hooks/test_base.py b/tests/unit/hooks/test_base.py
index dba167a..abfe074 100644
--- a/tests/unit/hooks/test_base.py
+++ b/tests/unit/hooks/test_base.py
@@ -1,3 +1,4 @@
+# ruff: noqa: SLF001, T201
 from unittest.mock import MagicMock
 
 import pytest
@@ -44,12 +45,19 @@ class TestHook:
         hook.run_once_per_task = True
         mock_task = MagicMock()
         
+        # Mock the hook's context to include a task_model
+        mock_task_model = MagicMock()
+        hook._current_context = {"task_model": mock_task_model}
+
         assert hook.should_execute(mock_task) is True
         
         assert hook.should_execute(mock_task) is False
         assert hook.should_execute(mock_task) is False
         
         mock_task2 = MagicMock()
+        # Simulate a different task_model for the new task (as the processor would do)
+        mock_task_model2 = MagicMock()
+        hook._current_context = {"task_model": mock_task_model2}
         assert hook.should_execute(mock_task2) is True
 
     def test_get_context_empty(self):
@@ -141,4 +149,4 @@ class TestHook:
         assert HOOK_REGISTRY["set_to"] == SetToHook
         
         assert "shush" in HOOK_REGISTRY
-        assert HOOK_REGISTRY["shush"] == ShushHook
\ No newline at end of file
+        assert HOOK_REGISTRY["shush"] == ShushHook
diff --git a/tests/unit/hooks/test_jinja2_mixin.py b/tests/unit/hooks/test_jinja2_mixin.py
new file mode 100644
index 0000000..dc57074
--- /dev/null
+++ b/tests/unit/hooks/test_jinja2_mixin.py
@@ -0,0 +1,198 @@
+from unittest.mock import MagicMock
+
+import pytest
+from nornir.core.inventory import Host
+
+from nornflow.hooks import Hook, Jinja2ResolvableMixin
+from nornflow.hooks.exceptions import HookError
+
+
+class Jinja2MixinTestHook(Hook, Jinja2ResolvableMixin):
+    """Helper hook class for testing Jinja2ResolvableMixin."""
+    hook_name = "jinja2_mixin_test_hook"
+
+
+class TestJinja2ResolvableMixin:
+    """Test suite for Jinja2ResolvableMixin."""
+
+    def test_is_jinja2_expression_true(self):
+        """Test _is_jinja2_expression returns True for strings with markers."""
+        hook = Jinja2MixinTestHook("{{ var }}")
+        assert hook._is_jinja2_expression("{{ var }}") is True
+        assert hook._is_jinja2_expression("{% if x %}y{% endif %}") is True
+
+    def test_is_jinja2_expression_false(self):
+        """Test _is_jinja2_expression returns False for plain strings."""
+        hook = Jinja2MixinTestHook("plain string")
+        assert hook._is_jinja2_expression("plain string") is False
+        assert hook._is_jinja2_expression(123) is False
+        assert hook._is_jinja2_expression(None) is False
+
+    def test_to_bool_conversion(self):
+        """Test _to_bool conversion logic."""
+        hook = Jinja2MixinTestHook()
+        
+        # Boolean inputs
+        assert hook._to_bool(True) is True
+        assert hook._to_bool(False) is False
+        
+        # String inputs (truthy)
+        assert hook._to_bool("yes") is True
+        assert hook._to_bool("true") is True
+        assert hook._to_bool("on") is True
+        assert hook._to_bool("1") is True
+        
+        # String inputs (falsy)
+        assert hook._to_bool("no") is False
+        assert hook._to_bool("false") is False
+        assert hook._to_bool("off") is False
+        assert hook._to_bool("0") is False
+        assert hook._to_bool("random") is False
+        
+        # Other inputs
+        assert hook._to_bool(1) is True
+        assert hook._to_bool(0) is False
+
+    def test_validate_jinja2_string_valid(self):
+        """Test validation passes for valid Jinja2 string."""
+        hook = Jinja2MixinTestHook("{{ var }}")
+        mock_task_model = MagicMock()
+        
+        # Should not raise
+        hook.execute_hook_validations(mock_task_model)
+
+    def test_extract_host_from_task_success(self):
+        """Test extracting host from task inventory."""
+        hook = Jinja2MixinTestHook()
+        mock_task = MagicMock()
+        mock_host = MagicMock(spec=Host)
+        mock_task.nornir.inventory.hosts = {"host1": mock_host}
+        
+        result = hook._extract_host_from_task(mock_task)
+        assert result == mock_host
+
+    def test_extract_host_from_task_empty_inventory(self):
+        """Test extracting host raises error when inventory is empty."""
+        hook = Jinja2MixinTestHook()
+        mock_task = MagicMock()
+        mock_task.nornir.inventory.hosts = {}
+        
+        with pytest.raises(HookError, match="Cannot extract host from task with empty inventory"):
+            hook._extract_host_from_task(mock_task)
+
+    def test_resolve_jinja2_with_vars_manager(self):
+        """Test Jinja2 resolution when vars_manager is available."""
+        hook = Jinja2MixinTestHook("{{ variable }}")
+        
+        mock_host = MagicMock(spec=Host)
+        mock_host.name = "router1"
+        
+        mock_vars_manager = MagicMock()
+        # FIX: Mock resolve_string, not device_context.resolve_value
+        mock_vars_manager.resolve_string.return_value = "resolved_value"
+        
+        hook._current_context = {"vars_manager": mock_vars_manager}
+        
+        result = hook._resolve_jinja2("{{ variable }}", mock_host)
+        
+        assert result == "resolved_value"
+        mock_vars_manager.resolve_string.assert_called_with("{{ variable }}", "router1")
+
+    def test_resolve_jinja2_without_vars_manager_raises_error(self):
+        """Test Jinja2 resolution raises HookError when vars_manager is missing."""
+        hook = Jinja2MixinTestHook("{{ variable }}")
+        mock_host = MagicMock(spec=Host)
+        hook._current_context = {}
+        
+        # FIX: Update regex to match actual error message
+        with pytest.raises(HookError, match="Variables manager not available in context"):
+            hook._resolve_jinja2("{{ variable }}", mock_host)
+
+    def test_resolve_jinja2_with_none_context_raises_error(self):
+        """Test Jinja2 resolution raises HookError when context is None."""
+        hook = Jinja2MixinTestHook("{{ variable }}")
+        mock_host = MagicMock(spec=Host)
+        hook._current_context = None
+        
+        # FIX: Update regex to match actual error message
+        with pytest.raises(HookError, match="Variables manager not available in context"):
+            hook._resolve_jinja2("{{ variable }}", mock_host)
+
+    def test_get_resolved_value_resolves_jinja2_with_provided_host(self):
+        """Test resolves Jinja2 when host is provided."""
+        hook = Jinja2MixinTestHook("{{ variable }}")
+        
+        mock_task = MagicMock()
+        mock_host = MagicMock(spec=Host)
+        mock_host.name = "router1"
+        
+        mock_vars_manager = MagicMock()
+        # FIX: Mock resolve_string
+        mock_vars_manager.resolve_string.return_value = "resolved"
+        
+        hook._current_context = {"vars_manager": mock_vars_manager}
+        
+        result = hook.get_resolved_value(mock_task, host=mock_host)
+        
+        assert result == "resolved"
+        mock_vars_manager.resolve_string.assert_called_with("{{ variable }}", "router1")
+
+    def test_get_resolved_value_extracts_host_when_not_provided(self):
+        """Test extracts host from task when host not provided."""
+        hook = Jinja2MixinTestHook("{{ variable }}")
+        
+        mock_host = MagicMock(spec=Host)
+        mock_host.name = "router1"
+        
+        mock_task = MagicMock()
+        mock_task.nornir.inventory.hosts = {"router1": mock_host}
+        
+        mock_vars_manager = MagicMock()
+        # FIX: Mock resolve_string
+        mock_vars_manager.resolve_string.return_value = "resolved"
+        
+        hook._current_context = {"vars_manager": mock_vars_manager}
+        
+        result = hook.get_resolved_value(mock_task)
+        
+        assert result == "resolved"
+        mock_vars_manager.resolve_string.assert_called_with("{{ variable }}", "router1")
+
+    def test_get_resolved_value_returns_default_when_empty(self):
+        """Test returns default value when hook value is empty."""
+        hook = Jinja2MixinTestHook(None)
+        mock_task = MagicMock()
+        
+        result = hook.get_resolved_value(mock_task, default="default")
+        assert result == "default"
+
+    def test_get_resolved_value_returns_raw_value_when_not_jinja(self):
+        """Test returns raw value when not a Jinja2 expression."""
+        hook = Jinja2MixinTestHook("raw_value")
+        mock_task = MagicMock()
+        
+        result = hook.get_resolved_value(mock_task)
+        assert result == "raw_value"
+
+    def test_get_resolved_value_as_bool(self):
+        """Test returns boolean converted value."""
+        hook = Jinja2MixinTestHook("yes")
+        mock_task = MagicMock()
+        
+        result = hook.get_resolved_value(mock_task, as_bool=True)
+        assert result is True
+
+    def test_get_resolved_value_jinja_as_bool(self):
+        """Test returns boolean converted resolved Jinja2 value."""
+        hook = Jinja2MixinTestHook("{{ var }}")
+        mock_task = MagicMock()
+        mock_host = MagicMock(spec=Host)
+        mock_host.name = "router1"
+        
+        mock_vars_manager = MagicMock()
+        mock_vars_manager.resolve_string.return_value = "false"
+        
+        hook._current_context = {"vars_manager": mock_vars_manager}
+        
+        result = hook.get_resolved_value(mock_task, host=mock_host, as_bool=True)
+        assert result is False
diff --git a/tests/unit/models/test_hookable_model.py b/tests/unit/models/test_hookable_model.py
index 86435ac..fa89750 100644
--- a/tests/unit/models/test_hookable_model.py
+++ b/tests/unit/models/test_hookable_model.py
@@ -1,12 +1,10 @@
 from typing import ClassVar
-from unittest.mock import MagicMock, patch
+from unittest.mock import MagicMock
 
 import pytest
 from nornir.core.task import AggregatedResult
 
 from nornflow.models import HookableModel
-from nornflow.nornir_manager import NornirManager
-from nornflow.vars.manager import NornFlowVariablesManager
 
 
 class TestHookableModel:
diff --git a/tests/unit/models/test_task_model.py b/tests/unit/models/test_task_model.py
index 3eab280..23005d4 100644
--- a/tests/unit/models/test_task_model.py
+++ b/tests/unit/models/test_task_model.py
@@ -8,8 +8,6 @@ from pydantic_serdes.custom_collections import HashableDict
 
 from nornflow.exceptions import TaskError
 from nornflow.models import TaskModel
-from nornflow.nornir_manager import NornirManager
-from nornflow.vars.manager import NornFlowVariablesManager
 
 
 class TestTaskModel:
diff --git a/tests/unit/models/test_workflow_model.py b/tests/unit/models/test_workflow_model.py
index 5eb7f40..7aba53f 100644
--- a/tests/unit/models/test_workflow_model.py
+++ b/tests/unit/models/test_workflow_model.py
@@ -3,7 +3,7 @@
 import pytest
 
 from nornflow.constants import FailureStrategy
-from nornflow.exceptions import WorkflowError
+from nornflow.exceptions import BlueprintCircularDependencyError, BlueprintError, WorkflowError
 from nornflow.models import WorkflowModel
 
 
@@ -38,11 +38,10 @@ class TestWorkflowModel:
         workflow = WorkflowModel.create({
             "workflow": {
                 "name": "test",
-                "tasks": [{"name": "dummy_task"}],  # Add required tasks
+                "tasks": [{"name": "dummy_task"}],
                 "inventory_filters": {"key": ["list", "of", "items"]}
             }
         })
-        # Lists are converted to tuples for hashability
         assert workflow.inventory_filters["key"] == ("list", "of", "items")
 
     def test_validate_processors(self):
@@ -50,8 +49,8 @@ class TestWorkflowModel:
         workflow = WorkflowModel.create({
             "workflow": {
                 "name": "test",
-                "tasks": [{"name": "dummy_task"}],  # Add required tasks
-                "processors": [{"class": "MyProcessor", "args": {}}]  # Only 1 processor allowed
+                "tasks": [{"name": "dummy_task"}],
+                "processors": [{"class": "MyProcessor", "args": {}}]
             }
         })
         assert len(workflow.processors) == 1
@@ -62,11 +61,10 @@ class TestWorkflowModel:
         workflow = WorkflowModel.create({
             "workflow": {
                 "name": "test",
-                "tasks": [{"name": "dummy_task"}],  # Add required tasks
+                "tasks": [{"name": "dummy_task"}],
                 "vars": {"key": ["list", "values"]}
             }
         })
-        # Lists are converted to tuples for hashability
         assert workflow.vars["key"] == ("list", "values")
 
     def test_empty_optional_fields(self):
@@ -74,15 +72,15 @@ class TestWorkflowModel:
         workflow_dict = {
             "workflow": {
                 "name": "minimal",
-                "tasks": [{"name": "dummy_task"}]  # Add required tasks
+                "tasks": [{"name": "dummy_task"}]
             }
         }
         workflow = WorkflowModel.create(workflow_dict)
         assert workflow.name == "minimal"
         assert len(workflow.tasks) == 1
-        assert workflow.inventory_filters is None
-        assert workflow.processors is None
-        assert workflow.vars is None
+        assert not workflow.inventory_filters
+        assert not workflow.processors
+        assert not workflow.vars
 
     def test_with_all_fields(self):
         """Test workflow with all fields specified."""
@@ -99,7 +97,7 @@ class TestWorkflowModel:
                     "groups": ["core", "edge"]
                 },
                 "processors": [
-                    {"class": "Processor1"}  # Only 1 processor allowed
+                    {"class": "Processor1"}
                 ],
                 "vars": {
                     "var1": "value1",
@@ -115,7 +113,645 @@ class TestWorkflowModel:
         assert len(workflow.tasks) == 2
         assert workflow.inventory_filters["platform"] == "ios"
         assert workflow.inventory_filters["groups"] == ("core", "edge")
-        assert len(workflow.processors) == 1  # Only 1 processor allowed
+        assert len(workflow.processors) == 1
         assert workflow.vars["var1"] == "value1"
         assert workflow.vars["var2"] == ("a", "b", "c")
-        assert workflow.failure_strategy == FailureStrategy.FAIL_FAST
\ No newline at end of file
+        assert workflow.failure_strategy == FailureStrategy.FAIL_FAST
+
+
+class TestWorkflowModelBlueprintExpansion:
+    """Tests for blueprint expansion in WorkflowModel.create()."""
+
+    def test_create_without_blueprints(self):
+        """Test workflow creation without any blueprint references."""
+        workflow_dict = {
+            "workflow": {
+                "name": "no_blueprints",
+                "tasks": [{"name": "task1"}]
+            }
+        }
+        workflow = WorkflowModel.create(workflow_dict)
+        assert len(workflow.tasks) == 1
+        assert workflow.tasks[0].name == "task1"
+
+    def test_create_with_simple_blueprint(self, tmp_path):
+        """Test workflow with a simple blueprint reference."""
+        blueprint_dir = tmp_path / "blueprints"
+        blueprint_dir.mkdir()
+        blueprint_file = blueprint_dir / "simple.yaml"
+        blueprint_file.write_text("""tasks:
+  - name: blueprint_task
+    args:
+      message: "from blueprint"
+""")
+        
+        blueprints_catalog = {"simple": blueprint_file}
+        vars_dir = tmp_path / "vars"
+        vars_dir.mkdir()
+        
+        workflow_dict = {
+            "workflow": {
+                "name": "with_blueprint",
+                "tasks": [{"blueprint": "simple"}]
+            }
+        }
+        
+        workflow = WorkflowModel.create(
+            workflow_dict,
+            blueprints_catalog=blueprints_catalog,
+            vars_dir=vars_dir,
+            workflow_path=None,
+            workflow_roots=[str(tmp_path)]
+        )
+        
+        assert len(workflow.tasks) == 1
+        assert workflow.tasks[0].name == "blueprint_task"
+
+    def test_create_with_multiple_blueprints(self, tmp_path):
+        """Test workflow with multiple blueprint references."""
+        blueprint_dir = tmp_path / "blueprints"
+        blueprint_dir.mkdir()
+        
+        bp1 = blueprint_dir / "bp1.yaml"
+        bp1.write_text("""tasks:
+  - name: task1
+    args:
+      key: value1
+""")
+        
+        bp2 = blueprint_dir / "bp2.yaml"
+        bp2.write_text("""tasks:
+  - name: task2
+    args:
+      key: value2
+""")
+        
+        blueprints_catalog = {"bp1": bp1, "bp2": bp2}
+        vars_dir = tmp_path / "vars"
+        vars_dir.mkdir()
+        
+        workflow_dict = {
+            "workflow": {
+                "name": "multi_blueprint",
+                "tasks": [
+                    {"blueprint": "bp1"},
+                    {"blueprint": "bp2"}
+                ]
+            }
+        }
+        
+        workflow = WorkflowModel.create(
+            workflow_dict,
+            blueprints_catalog=blueprints_catalog,
+            vars_dir=vars_dir,
+            workflow_path=None,
+            workflow_roots=[str(tmp_path)]
+        )
+        
+        assert len(workflow.tasks) == 2
+        assert workflow.tasks[0].name == "task1"
+        assert workflow.tasks[1].name == "task2"
+
+    def test_create_mixed_blueprints_and_tasks(self, tmp_path):
+        """Test workflow with both blueprint references and direct tasks."""
+        blueprint_dir = tmp_path / "blueprints"
+        blueprint_dir.mkdir()
+        
+        blueprint_file = blueprint_dir / "mixed.yaml"
+        blueprint_file.write_text("""tasks:
+  - name: blueprint_task
+    args:
+      from: blueprint
+""")
+        
+        blueprints_catalog = {"mixed": blueprint_file}
+        vars_dir = tmp_path / "vars"
+        vars_dir.mkdir()
+        
+        workflow_dict = {
+            "workflow": {
+                "name": "mixed_workflow",
+                "tasks": [
+                    {"name": "direct_task1", "args": {"from": "direct"}},
+                    {"blueprint": "mixed"},
+                    {"name": "direct_task2", "args": {"from": "direct"}}
+                ]
+            }
+        }
+        
+        workflow = WorkflowModel.create(
+            workflow_dict,
+            blueprints_catalog=blueprints_catalog,
+            vars_dir=vars_dir,
+            workflow_path=None,
+            workflow_roots=[str(tmp_path)]
+        )
+        
+        assert len(workflow.tasks) == 3
+        assert workflow.tasks[0].name == "direct_task1"
+        assert workflow.tasks[1].name == "blueprint_task"
+        assert workflow.tasks[2].name == "direct_task2"
+
+    def test_create_with_nested_blueprints(self, tmp_path):
+        """Test workflow with nested blueprint references."""
+        blueprint_dir = tmp_path / "blueprints"
+        blueprint_dir.mkdir()
+        
+        parent = blueprint_dir / "parent.yaml"
+        parent.write_text("""tasks:
+  - name: parent_task
+    args:
+      level: parent
+  - blueprint: child
+""")
+        
+        child = blueprint_dir / "child.yaml"
+        child.write_text("""tasks:
+  - name: child_task
+    args:
+      level: child
+""")
+        
+        blueprints_catalog = {"parent": parent, "child": child}
+        vars_dir = tmp_path / "vars"
+        vars_dir.mkdir()
+        
+        workflow_dict = {
+            "workflow": {
+                "name": "nested",
+                "tasks": [{"blueprint": "parent"}]
+            }
+        }
+        
+        workflow = WorkflowModel.create(
+            workflow_dict,
+            blueprints_catalog=blueprints_catalog,
+            vars_dir=vars_dir,
+            workflow_path=None,
+            workflow_roots=[str(tmp_path)]
+        )
+        
+        assert len(workflow.tasks) == 2
+        assert workflow.tasks[0].name == "parent_task"
+        assert workflow.tasks[1].name == "child_task"
+
+    def test_create_blueprint_with_true_condition(self, tmp_path):
+        """Test blueprint with 'if' condition that evaluates to true."""
+        blueprint_dir = tmp_path / "blueprints"
+        blueprint_dir.mkdir()
+        
+        blueprint_file = blueprint_dir / "conditional.yaml"
+        blueprint_file.write_text("""tasks:
+  - name: conditional_task
+    args:
+      condition: included
+""")
+        
+        blueprints_catalog = {"conditional": blueprint_file}
+        vars_dir = tmp_path / "vars"
+        vars_dir.mkdir()
+        
+        workflow_dict = {
+            "workflow": {
+                "name": "conditional_workflow",
+                "tasks": [{"blueprint": "conditional", "if": "true"}]
+            }
+        }
+        
+        workflow = WorkflowModel.create(
+            workflow_dict,
+            blueprints_catalog=blueprints_catalog,
+            vars_dir=vars_dir,
+            workflow_path=None,
+            workflow_roots=[str(tmp_path)]
+        )
+        
+        assert len(workflow.tasks) == 1
+        assert workflow.tasks[0].name == "conditional_task"
+
+    def test_create_blueprint_with_false_condition(self, tmp_path):
+        """Test blueprint with 'if' condition that evaluates to false."""
+        blueprint_dir = tmp_path / "blueprints"
+        blueprint_dir.mkdir()
+        
+        blueprint_file = blueprint_dir / "conditional.yaml"
+        blueprint_file.write_text("""tasks:
+  - name: conditional_task
+    args:
+      condition: excluded
+""")
+        
+        blueprints_catalog = {"conditional": blueprint_file}
+        vars_dir = tmp_path / "vars"
+        vars_dir.mkdir()
+        
+        workflow_dict = {
+            "workflow": {
+                "name": "conditional_workflow",
+                "tasks": [{"blueprint": "conditional", "if": "false"}]
+            }
+        }
+        
+        workflow = WorkflowModel.create(
+            workflow_dict,
+            blueprints_catalog=blueprints_catalog,
+            vars_dir=vars_dir,
+            workflow_path=None,
+            workflow_roots=[str(tmp_path)]
+        )
+        
+        assert len(workflow.tasks) == 0
+
+    def test_create_blueprint_with_jinja_condition(self, tmp_path):
+        """Test blueprint with Jinja2 expression condition."""
+        blueprint_dir = tmp_path / "blueprints"
+        blueprint_dir.mkdir()
+        
+        blueprint_file = blueprint_dir / "jinja_cond.yaml"
+        blueprint_file.write_text("""tasks:
+  - name: jinja_task
+    args:
+      type: conditional
+""")
+        
+        blueprints_catalog = {"jinja_cond": blueprint_file}
+        vars_dir = tmp_path / "vars"
+        vars_dir.mkdir()
+        
+        workflow_dict = {
+            "workflow": {
+                "name": "jinja_condition",
+                "vars": {"include_task": True},
+                "tasks": [{"blueprint": "jinja_cond", "if": "{{ include_task }}"}]
+            }
+        }
+        
+        workflow = WorkflowModel.create(
+            workflow_dict,
+            blueprints_catalog=blueprints_catalog,
+            vars_dir=vars_dir,
+            workflow_path=None,
+            workflow_roots=[str(tmp_path)]
+        )
+        
+        assert len(workflow.tasks) == 1
+
+    def test_create_blueprint_with_cli_vars(self, tmp_path):
+        """Test blueprint expansion with CLI variables."""
+        blueprint_dir = tmp_path / "blueprints"
+        blueprint_dir.mkdir()
+        
+        blueprint_file = blueprint_dir / "cli_aware.yaml"
+        blueprint_file.write_text("""tasks:
+  - name: cli_task
+    args:
+      source: cli_vars
+""")
+        
+        blueprints_catalog = {"cli_aware": blueprint_file}
+        vars_dir = tmp_path / "vars"
+        vars_dir.mkdir()
+        
+        workflow_dict = {
+            "workflow": {
+                "name": "cli_vars_test",
+                "tasks": [{"blueprint": "{{ bp_from_cli }}", "if": "{{ enable_blueprint }}"}]
+            }
+        }
+        
+        workflow = WorkflowModel.create(
+            workflow_dict,
+            blueprints_catalog=blueprints_catalog,
+            vars_dir=vars_dir,
+            workflow_path=None,
+            workflow_roots=[str(tmp_path)],
+            cli_vars={"bp_from_cli": "cli_aware", "enable_blueprint": True}
+        )
+        
+        assert len(workflow.tasks) == 1
+        assert workflow.tasks[0].name == "cli_task"
+
+    def test_create_circular_blueprint_dependency(self, tmp_path):
+        """Test error when blueprints have circular dependency."""
+        blueprint_dir = tmp_path / "blueprints"
+        blueprint_dir.mkdir()
+        
+        bp_a = blueprint_dir / "a.yaml"
+        bp_a.write_text("""tasks:
+  - blueprint: b
+""")
+        
+        bp_b = blueprint_dir / "b.yaml"
+        bp_b.write_text("""tasks:
+  - blueprint: a
+""")
+        
+        blueprints_catalog = {"a": bp_a, "b": bp_b}
+        vars_dir = tmp_path / "vars"
+        vars_dir.mkdir()
+        
+        workflow_dict = {
+            "workflow": {
+                "name": "circular",
+                "tasks": [{"blueprint": "a"}]
+            }
+        }
+        
+        with pytest.raises(BlueprintCircularDependencyError):
+            WorkflowModel.create(
+                workflow_dict,
+                blueprints_catalog=blueprints_catalog,
+                vars_dir=vars_dir,
+                workflow_path=None,
+                workflow_roots=[str(tmp_path)]
+            )
+
+    def test_create_missing_blueprint(self, tmp_path):
+        """Test error when blueprint reference cannot be found."""
+        vars_dir = tmp_path / "vars"
+        vars_dir.mkdir()
+        
+        workflow_dict = {
+            "workflow": {
+                "name": "missing_bp",
+                "tasks": [{"blueprint": "nonexistent"}]
+            }
+        }
+        
+        with pytest.raises(BlueprintError, match="Blueprint not found in catalog or filesystem"):
+            WorkflowModel.create(
+                workflow_dict,
+                blueprints_catalog={},
+                vars_dir=vars_dir,
+                workflow_path=None,
+                workflow_roots=[str(tmp_path)]
+            )
+
+    def test_create_invalid_blueprint_structure(self, tmp_path):
+        """Test error when blueprint has invalid structure."""
+        blueprint_dir = tmp_path / "blueprints"
+        blueprint_dir.mkdir()
+        
+        invalid_bp = blueprint_dir / "invalid.yaml"
+        invalid_bp.write_text("""workflow:
+  name: "not a blueprint"
+""")
+        
+        blueprints_catalog = {"invalid": invalid_bp}
+        vars_dir = tmp_path / "vars"
+        vars_dir.mkdir()
+        
+        workflow_dict = {
+            "workflow": {
+                "name": "invalid_structure",
+                "tasks": [{"blueprint": "invalid"}]
+            }
+        }
+        
+        with pytest.raises(BlueprintError, match="Blueprint must contain ONLY 'tasks' key"):
+            WorkflowModel.create(
+                workflow_dict,
+                blueprints_catalog=blueprints_catalog,
+                vars_dir=vars_dir,
+                workflow_path=None,
+                workflow_roots=[str(tmp_path)]
+            )
+
+    def test_create_blueprint_tasks_not_list(self, tmp_path):
+        """Test error when blueprint tasks is not a list."""
+        blueprint_dir = tmp_path / "blueprints"
+        blueprint_dir.mkdir()
+        
+        invalid_bp = blueprint_dir / "not_list.yaml"
+        invalid_bp.write_text("""tasks: "should be a list"
+""")
+        
+        blueprints_catalog = {"not_list": invalid_bp}
+        vars_dir = tmp_path / "vars"
+        vars_dir.mkdir()
+        
+        workflow_dict = {
+            "workflow": {
+                "name": "tasks_not_list",
+                "tasks": [{"blueprint": "not_list"}]
+            }
+        }
+        
+        with pytest.raises(BlueprintError, match="'tasks' must be a list"):
+            WorkflowModel.create(
+                workflow_dict,
+                blueprints_catalog=blueprints_catalog,
+                vars_dir=vars_dir,
+                workflow_path=None,
+                workflow_roots=[str(tmp_path)]
+            )
+
+    def test_create_blueprint_with_variable_resolution(self, tmp_path):
+        """Test blueprint name using variable resolution."""
+        blueprint_dir = tmp_path / "blueprints"
+        blueprint_dir.mkdir()
+        
+        blueprint_file = blueprint_dir / "dynamic.yaml"
+        blueprint_file.write_text("""tasks:
+  - name: dynamic_task
+    args:
+      resolved: "{{ my_var }}"
+""")
+        
+        blueprints_catalog = {"dynamic": blueprint_file}
+        vars_dir = tmp_path / "vars"
+        vars_dir.mkdir()
+        
+        workflow_dict = {
+            "workflow": {
+                "name": "variable_resolution",
+                "vars": {"bp_name": "dynamic", "my_var": "test_value"},
+                "tasks": [{"blueprint": "{{ bp_name }}"}]
+            }
+        }
+        
+        workflow = WorkflowModel.create(
+            workflow_dict,
+            blueprints_catalog=blueprints_catalog,
+            vars_dir=vars_dir,
+            workflow_path=None,
+            workflow_roots=[str(tmp_path)]
+        )
+        
+        assert len(workflow.tasks) == 1
+        assert workflow.tasks[0].name == "dynamic_task"
+
+    def test_create_blueprint_with_env_vars(self, tmp_path, monkeypatch):
+        """Test blueprint expansion with environment variables."""
+        monkeypatch.setenv("NORNFLOW_VAR_ENABLE_FEATURE", "true")
+        
+        blueprint_dir = tmp_path / "blueprints"
+        blueprint_dir.mkdir()
+        
+        blueprint_file = blueprint_dir / "env_aware.yaml"
+        blueprint_file.write_text("""tasks:
+  - name: env_task
+    args:
+      env_based: true
+""")
+        
+        blueprints_catalog = {"env_aware": blueprint_file}
+        vars_dir = tmp_path / "vars"
+        vars_dir.mkdir()
+        
+        workflow_dict = {
+            "workflow": {
+                "name": "env_vars_test",
+                "tasks": [{"blueprint": "env_aware", "if": "{{ ENABLE_FEATURE }}"}]
+            }
+        }
+        
+        workflow = WorkflowModel.create(
+            workflow_dict,
+            blueprints_catalog=blueprints_catalog,
+            vars_dir=vars_dir,
+            workflow_path=None,
+            workflow_roots=[str(tmp_path)]
+        )
+        
+        assert len(workflow.tasks) == 1
+
+    def test_create_blueprint_with_domain_defaults(self, tmp_path):
+        """Test blueprint expansion with domain-specific defaults."""
+        blueprint_dir = tmp_path / "blueprints"
+        blueprint_dir.mkdir()
+        
+        blueprint_file = blueprint_dir / "domain_aware.yaml"
+        blueprint_file.write_text("""tasks:
+  - name: domain_task
+    args:
+      domain_based: true
+""")
+        
+        vars_dir = tmp_path / "vars"
+        vars_dir.mkdir()
+        domain_dir = vars_dir / "networking"
+        domain_dir.mkdir()
+        
+        defaults_file = domain_dir / "defaults.yaml"
+        defaults_file.write_text("include_domain_task: true\n")
+        
+        workflow_dir = tmp_path / "workflows" / "networking"
+        workflow_dir.mkdir(parents=True)
+        workflow_path = workflow_dir / "test.yaml"
+        
+        blueprints_catalog = {"domain_aware": blueprint_file}
+        
+        workflow_dict = {
+            "workflow": {
+                "name": "domain_defaults_test",
+                "tasks": [{"blueprint": "domain_aware", "if": "{{ include_domain_task }}"}]
+            }
+        }
+        
+        workflow = WorkflowModel.create(
+            workflow_dict,
+            blueprints_catalog=blueprints_catalog,
+            vars_dir=vars_dir,
+            workflow_path=workflow_path,
+            workflow_roots=[str(tmp_path / "workflows")]
+        )
+        
+        assert len(workflow.tasks) == 1
+
+    def test_create_deeply_nested_blueprints(self, tmp_path):
+        """Test workflow with deeply nested blueprint hierarchy."""
+        blueprint_dir = tmp_path / "blueprints"
+        blueprint_dir.mkdir()
+        
+        level1 = blueprint_dir / "level1.yaml"
+        level1.write_text("""tasks:
+  - name: task_level1
+    args:
+      level: 1
+  - blueprint: level2
+""")
+        
+        level2 = blueprint_dir / "level2.yaml"
+        level2.write_text("""tasks:
+  - name: task_level2
+    args:
+      level: 2
+  - blueprint: level3
+""")
+        
+        level3 = blueprint_dir / "level3.yaml"
+        level3.write_text("""tasks:
+  - name: task_level3
+    args:
+      level: 3
+""")
+        
+        blueprints_catalog = {"level1": level1, "level2": level2, "level3": level3}
+        vars_dir = tmp_path / "vars"
+        vars_dir.mkdir()
+        
+        workflow_dict = {
+            "workflow": {
+                "name": "deeply_nested",
+                "tasks": [{"blueprint": "level1"}]
+            }
+        }
+        
+        workflow = WorkflowModel.create(
+            workflow_dict,
+            blueprints_catalog=blueprints_catalog,
+            vars_dir=vars_dir,
+            workflow_path=None,
+            workflow_roots=[str(tmp_path)]
+        )
+        
+        assert len(workflow.tasks) == 3
+        assert workflow.tasks[0].name == "task_level1"
+        assert workflow.tasks[1].name == "task_level2"
+        assert workflow.tasks[2].name == "task_level3"
+
+    def test_create_multiple_conditional_blueprints(self, tmp_path):
+        """Test workflow with multiple conditional blueprint references."""
+        blueprint_dir = tmp_path / "blueprints"
+        blueprint_dir.mkdir()
+        
+        bp1 = blueprint_dir / "conditional1.yaml"
+        bp1.write_text("""tasks:
+  - name: conditional_task1
+    args:
+      enabled: true
+""")
+        
+        bp2 = blueprint_dir / "conditional2.yaml"
+        bp2.write_text("""tasks:
+  - name: conditional_task2
+    args:
+      enabled: false
+""")
+        
+        blueprints_catalog = {"conditional1": bp1, "conditional2": bp2}
+        vars_dir = tmp_path / "vars"
+        vars_dir.mkdir()
+        
+        workflow_dict = {
+            "workflow": {
+                "name": "multiple_conditionals",
+                "vars": {"feature1": True, "feature2": False},
+                "tasks": [
+                    {"blueprint": "conditional1", "if": "{{ feature1 }}"},
+                    {"blueprint": "conditional2", "if": "{{ feature2 }}"}
+                ]
+            }
+        }
+        
+        workflow = WorkflowModel.create(
+            workflow_dict,
+            blueprints_catalog=blueprints_catalog,
+            vars_dir=vars_dir,
+            workflow_path=None,
+            workflow_roots=[str(tmp_path)]
+        )
+        
+        assert len(workflow.tasks) == 1
+        assert workflow.tasks[0].name == "conditional_task1"
\ No newline at end of file
diff --git a/tests/unit/settings/test_settings.py b/tests/unit/settings/test_settings.py
index 28668ea..764f031 100644
--- a/tests/unit/settings/test_settings.py
+++ b/tests/unit/settings/test_settings.py
@@ -1,10 +1,9 @@
-from unittest.mock import patch
-
-import yaml
 import pytest
+import yaml
 
-from nornflow.settings import NornFlowSettings, NORNFLOW_SETTINGS_MANDATORY, NORNFLOW_SETTINGS_OPTIONAL
-from nornflow.exceptions import ResourceError, SettingsError, NornFlowError
+from nornflow.constants import NORNFLOW_SETTINGS_MANDATORY, NORNFLOW_SETTINGS_OPTIONAL
+from nornflow.exceptions import SettingsError
+from nornflow.settings import NornFlowSettings
 
 
 def make_valid_settings_dict() -> dict[str, object]:
@@ -13,78 +12,264 @@ def make_valid_settings_dict() -> dict[str, object]:
     for idx, key in enumerate(NORNFLOW_SETTINGS_MANDATORY):
         data[key] = f"value_{idx}"
     for opt_key, opt_default in NORNFLOW_SETTINGS_OPTIONAL.items():
-        data[opt_key] = opt_default
+        if opt_key == "failure_strategy":
+            data[opt_key] = "skip-failed"
+        else:
+            data[opt_key] = opt_default
     return data
 
 
-def test_successful_load_and_optional_override():
+def test_settings_load_successful_load(tmp_path):
+    """Load from YAML and ensure core values are populated."""
+    settings_file = tmp_path / "test_settings.yaml"
+    settings_data = {
+        "nornir_config_file": "config.yaml",
+        "local_tasks": ["tasks"],
+        "vars_dir": "vars",
+    }
+    settings_file.write_text(yaml.dump(settings_data))
+
+    settings = NornFlowSettings.load(str(settings_file))
+
+    assert settings.nornir_config_file == str(tmp_path / "config.yaml")
+    assert settings.local_tasks == [str(tmp_path / "tasks")]
+    assert settings.vars_dir == str(tmp_path / "vars")
+
+
+def test_settings_load_file_not_found():
+    """Test error when settings file doesn't exist."""
+    with pytest.raises(SettingsError, match="Settings file not found"):
+        NornFlowSettings.load("nonexistent.yaml")
+
+
+def test_settings_load_invalid_yaml(tmp_path):
+    """Test error when YAML file is invalid."""
+    settings_file = tmp_path / "bad_settings.yaml"
+    settings_file.write_text("invalid: yaml: content:")
+
+    with pytest.raises(SettingsError, match="Failed to load settings"):
+        NornFlowSettings.load(str(settings_file))
+
+
+def test_settings_load_missing_required_field(tmp_path):
+    """Test error when required field is missing."""
+    settings_file = tmp_path / "incomplete_settings.yaml"
+    settings_data = {
+        "local_tasks": ["tasks"],
+    }
+    settings_file.write_text(yaml.dump(settings_data))
+
+    with pytest.raises(Exception):
+        NornFlowSettings.load(str(settings_file))
+
+
+def test_settings_load_with_overrides(tmp_path):
+    """Ensure overrides passed to load() are respected."""
+    settings_file = tmp_path / "test_settings.yaml"
+    settings_data = {
+        "nornir_config_file": "config.yaml",
+        "vars_dir": "vars",
+    }
+    settings_file.write_text(yaml.dump(settings_data))
+
+    settings = NornFlowSettings.load(str(settings_file), vars_dir="custom_vars")
+
+    assert settings.vars_dir == str(tmp_path / "custom_vars")
+
+
+def test_validate_processors_list():
+    """Test processor validation with list input."""
     settings_dict = make_valid_settings_dict()
-    opt_keys = list(NORNFLOW_SETTINGS_OPTIONAL.keys())
-    override_key = opt_keys[0] if opt_keys else None
+    settings_dict["processors"] = [
+        {"class": "MyProcessor", "args": {}},
+        {"class": "AnotherProcessor", "args": {"key": "value"}},
+    ]
 
-    with patch("nornflow.settings.load_file_to_dict", return_value=settings_dict):
-        if override_key:
-            s = NornFlowSettings(settings_file="ignored", **{override_key: "overridden"})
-            assert s.as_dict[override_key] == "overridden"
-        else:
-            s = NornFlowSettings(settings_file="ignored")
-        for k in NORNFLOW_SETTINGS_MANDATORY:
-            assert k in s.as_dict
-            assert s.as_dict[k] is not None
-        sample_key = NORNFLOW_SETTINGS_MANDATORY[0] if NORNFLOW_SETTINGS_MANDATORY else next(iter(s.as_dict))
-        assert getattr(s, sample_key) == s.as_dict[sample_key]
-        assert isinstance(str(s), str)
-        assert sample_key in str(s)
-
-
-def test_file_not_found_raises_resource_error():
-    with patch("nornflow.settings.load_file_to_dict", side_effect=FileNotFoundError()):
-        with pytest.raises(ResourceError):
-            NornFlowSettings(settings_file="missing.yaml")
-
-
-def test_permission_error_raises_resource_error():
-    with patch("nornflow.settings.load_file_to_dict", side_effect=PermissionError()):
-        with pytest.raises(ResourceError):
-            NornFlowSettings(settings_file="unreadable.yaml")
-
-
-def test_yaml_parse_error_raises_settings_error():
-    with patch("nornflow.settings.load_file_to_dict", side_effect=yaml.YAMLError("bad yaml")):
-        with pytest.raises(SettingsError):
-            NornFlowSettings(settings_file="bad.yaml")
-
-
-def test_load_returns_non_dict_raises_nornflow_error():
-    # Current implementation wraps non-dict load result into a NornFlowError
-    with patch("nornflow.settings.load_file_to_dict", return_value=["not", "a", "dict"]):
-        with pytest.raises(NornFlowError):
-            NornFlowSettings(settings_file="weird.yaml")
-
-
-def test_load_raises_type_error_is_wrapped_as_settings_error():
-    with patch("nornflow.settings.load_file_to_dict", side_effect=TypeError("type problem")):
-        with pytest.raises(SettingsError):
-            NornFlowSettings(settings_file="type.yaml")
-
-
-def test_missing_mandatory_setting_raises_settings_error():
-    if len(NORNFLOW_SETTINGS_MANDATORY) < 1:
-        pytest.skip("No mandatory settings defined")
-    partial = make_valid_settings_dict()
-    missing_key = NORNFLOW_SETTINGS_MANDATORY[0]
-    partial.pop(missing_key, None)
-    with patch("nornflow.settings.load_file_to_dict", return_value=partial):
-        with pytest.raises(SettingsError):
-            NornFlowSettings(settings_file="missing_mandatory.yaml")
-
-
-def test_empty_mandatory_setting_raises_settings_error():
-    if len(NORNFLOW_SETTINGS_MANDATORY) < 1:
-        pytest.skip("No mandatory settings defined")
-    data = make_valid_settings_dict()
-    empty_key = NORNFLOW_SETTINGS_MANDATORY[0]
-    data[empty_key] = ""
-    with patch("nornflow.settings.load_file_to_dict", return_value=data):
-        with pytest.raises(SettingsError):
-            NornFlowSettings(settings_file="empty_mandatory.yaml")
+    settings = NornFlowSettings(**settings_dict)
+
+    assert len(settings.processors) == 2
+    assert settings.processors[0]["class"] == "MyProcessor"
+
+
+def test_validate_processors_empty():
+    """Test processor validation with empty list."""
+    settings_dict = make_valid_settings_dict()
+    settings_dict["processors"] = []
+
+    settings = NornFlowSettings(**settings_dict)
+
+    assert settings.processors == []
+
+
+def test_validate_failure_strategy_string():
+    """Test failure strategy validation with string."""
+    settings_dict = make_valid_settings_dict()
+    settings_dict["failure_strategy"] = "fail-fast"
+
+    settings = NornFlowSettings(**settings_dict)
+
+    from nornflow.constants import FailureStrategy
+    assert settings.failure_strategy == FailureStrategy.FAIL_FAST
+
+
+def test_validate_failure_strategy_underscore():
+    """Test failure strategy validation with underscore format."""
+    settings_dict = make_valid_settings_dict()
+    settings_dict["failure_strategy"] = "skip_failed"
+
+    settings = NornFlowSettings(**settings_dict)
+
+    from nornflow.constants import FailureStrategy
+    assert settings.failure_strategy == FailureStrategy.SKIP_FAILED
+
+
+def test_validate_failure_strategy_invalid():
+    """Test failure strategy validation with invalid value."""
+    settings_dict = make_valid_settings_dict()
+    settings_dict["failure_strategy"] = "invalid-strategy"
+
+    with pytest.raises(Exception):
+        NornFlowSettings(**settings_dict)
+
+
+def test_relative_paths_resolved_via_load(tmp_path):
+    """Resolve all relative directories against the settings file location."""
+    settings_file = tmp_path / "test_settings.yaml"
+    settings_data = {
+        "nornir_config_file": "config.yaml",
+        "local_tasks": ["tasks", "nested/tasks"],
+        "local_workflows": ["workflows"],
+        "local_filters": ["filters"],
+        "local_hooks": ["hooks"],
+        "vars_dir": "vars",
+    }
+    settings_file.write_text(yaml.dump(settings_data))
+
+    settings = NornFlowSettings.load(str(settings_file))
+
+    assert settings.nornir_config_file == str(tmp_path / "config.yaml")
+    assert settings.local_tasks == [
+        str(tmp_path / "tasks"),
+        str(tmp_path / "nested/tasks"),
+    ]
+    assert settings.local_workflows == [str(tmp_path / "workflows")]
+    assert settings.local_filters == [str(tmp_path / "filters")]
+    assert settings.local_hooks == [str(tmp_path / "hooks")]
+    assert settings.vars_dir == str(tmp_path / "vars")
+
+
+def test_paths_remain_unresolved_with_direct_instantiation():
+    """Direct instantiation leaves incoming paths untouched."""
+    settings_dict = make_valid_settings_dict()
+    settings_dict["nornir_config_file"] = "config.yaml"
+    settings_dict["local_tasks"] = ["tasks"]
+    settings_dict["vars_dir"] = "vars"
+
+    settings = NornFlowSettings(**settings_dict)
+
+    assert settings.nornir_config_file == "config.yaml"
+    assert settings.local_tasks == ["tasks"]
+    assert settings.vars_dir == "vars"
+
+
+def test_absolute_paths_remain_unchanged(tmp_path):
+    """Absolute input paths should not be modified."""
+    settings_file = tmp_path / "test_settings.yaml"
+    abs_tasks_dir = "/absolute/path/to/tasks"
+    settings_data = {
+        "nornir_config_file": "/absolute/config.yaml",
+        "local_tasks": [abs_tasks_dir],
+        "vars_dir": "/absolute/vars",
+    }
+    settings_file.write_text(yaml.dump(settings_data))
+
+    settings = NornFlowSettings.load(str(settings_file))
+
+    assert settings.nornir_config_file == "/absolute/config.yaml"
+    assert settings.local_tasks == [abs_tasks_dir]
+    assert settings.vars_dir == "/absolute/vars"
+
+
+def test_as_dict_property():
+    """Test as_dict property."""
+    settings_dict = make_valid_settings_dict()
+    settings = NornFlowSettings(**settings_dict)
+
+    result = settings.as_dict
+
+    assert isinstance(result, dict)
+    assert "nornir_config_file" in result
+    assert "_base_dir" not in result
+    assert "_settings_file" not in result
+
+
+def test_base_dir_property_set_when_loaded(tmp_path):
+    """Loading from disk sets base_dir to the settings file directory."""
+    settings_file = tmp_path / "test_settings.yaml"
+    settings_data = {
+        "nornir_config_file": "config.yaml",
+        "local_tasks": ["tasks"],
+        "vars_dir": "vars",
+    }
+    settings_file.write_text(yaml.dump(settings_data))
+
+    settings = NornFlowSettings.load(str(settings_file))
+
+    assert settings.base_dir == tmp_path
+
+
+def test_base_dir_property_none_with_direct_instantiation():
+    """Direct instantiation leaves base_dir unset."""
+    settings_dict = make_valid_settings_dict()
+    settings = NornFlowSettings(**settings_dict)
+
+    assert settings.base_dir is None
+
+
+def test_loaded_settings_property():
+    """Test loaded_settings backward compatibility property."""
+    settings_dict = make_valid_settings_dict()
+    settings = NornFlowSettings(**settings_dict)
+
+    result = settings.loaded_settings
+
+    assert isinstance(result, dict)
+    assert result == settings.as_dict
+
+
+def test_local_blueprints_default_value():
+    """Test local_blueprints default value."""
+    settings_dict = make_valid_settings_dict()
+    settings = NornFlowSettings(**settings_dict)
+
+    assert settings.local_blueprints == ["blueprints"]
+
+
+def test_local_blueprints_single_directory(tmp_path):
+    """Test local_blueprints with single custom directory."""
+    settings_file = tmp_path / "test_settings.yaml"
+    settings_data = {
+        "nornir_config_file": "config.yaml",
+        "local_blueprints": ["custom_blueprints"],
+    }
+    settings_file.write_text(yaml.dump(settings_data))
+
+    settings = NornFlowSettings.load(str(settings_file))
+
+    assert settings.local_blueprints == [str(tmp_path / "custom_blueprints")]
+
+
+def test_local_blueprints_multiple_directories(tmp_path):
+    """Test local_blueprints with multiple directories."""
+    settings_file = tmp_path / "test_settings.yaml"
+    settings_data = {
+        "nornir_config_file": "config.yaml",
+        "local_blueprints": ["blueprints1", "blueprints2"],
+    }
+    settings_file.write_text(yaml.dump(settings_data))
+
+    settings = NornFlowSettings.load(str(settings_file))
+
+    assert len(settings.local_blueprints) == 2
+    assert str(tmp_path / "blueprints1") in settings.local_blueprints
+    assert str(tmp_path / "blueprints2") in settings.local_blueprints
diff --git a/tests/unit/vars/conftest.py b/tests/unit/vars/conftest.py
index b217102..cb66fa6 100644
--- a/tests/unit/vars/conftest.py
+++ b/tests/unit/vars/conftest.py
@@ -50,7 +50,6 @@ class _SimpleHostProxy:
             return MagicMock(name=item)
 
 
-# --------------------------------------------------------------------------- fixtures
 @pytest.fixture()
 def mock_host() -> MockHost:
     return MockHost()
@@ -118,7 +117,7 @@ def basic_manager(tmp_path) -> NornFlowVariablesManager:
     temp_vars_dir.mkdir()
     manager = NornFlowVariablesManager(vars_dir=str(temp_vars_dir))
     for name, func in ALL_FILTERS.items():
-        manager.jinja_env.filters[name] = func
+        manager._jinja2_manager.env.filters[name] = func
     return manager
 
 
@@ -137,7 +136,7 @@ def setup_manager(
         inline_workflow_vars={
             "backup_type": "full",
             "override_var": "workflow_value",
-            "workflow_var": "workflow_value",        # <- added for precedence test
+            "workflow_var": "workflow_value",
         },
         workflow_path=workflows_dir / "networking" / "config.yaml",
         workflow_roots=[str(workflows_dir)],
@@ -151,7 +150,7 @@ def setup_manager(
     manager.set_runtime_variable("complex_var", {"key": "value", "list": [1, 2, 3]}, "test_device")
 
     for name, func in ALL_FILTERS.items():
-        manager.jinja_env.filters[name] = func
+        manager._jinja2_manager.env.filters[name] = func
 
     with patch.dict(
         "os.environ",
@@ -183,4 +182,4 @@ def mock_result() -> MagicMock:
     result = MagicMock()
     result.result = "Router configuration backup"
     result.failed = False
-    return result
+    return result
\ No newline at end of file
diff --git a/tests/unit/vars/test_jinja_filters.py b/tests/unit/vars/test_jinja_filters.py
index 9648014..11c210a 100644
--- a/tests/unit/vars/test_jinja_filters.py
+++ b/tests/unit/vars/test_jinja_filters.py
@@ -1,3 +1,6 @@
+import pytest
+import random
+
 class TestJinjaFilters:
     def test_standard_jinja2_filters(self, setup_manager):
         """Test standard Jinja2 filters."""
@@ -83,7 +86,7 @@ class TestJinjaFilters:
     def test_utility_filters(self, setup_manager):
         """Test utility filters."""
         # Test random_choice with a seed for predictability
-        import random
+        
 
         random.seed(42)  # Set seed for predictable results
 
@@ -119,3 +122,705 @@ class TestJinjaFilters:
         # Test splitx
         result = setup_manager.resolve_string("{{ 'a,b,c,d' | splitx(',', 2) }}", "test_device")
         assert result == "['a', 'b', 'c,d']"
+
+
+class TestIsSetFilter:
+    """Test suite for the is_set Jinja2 filter.
+
+    This filter checks if a variable exists and is not None in the Jinja2 context.
+    It supports nested paths using dot notation (e.g., 'my_var.nested.key') and
+    host namespace access (e.g., 'host.data.key').
+    """
+
+    def test_basic_variable_exists(self, setup_manager):
+        """Test that is_set returns True for existing non-None variables."""
+        setup_manager.set_runtime_variable("my_var", "value", "test_device")
+        result = setup_manager.resolve_string("{{ 'my_var' | is_set }}", "test_device")
+        assert result == "True"
+
+    def test_basic_variable_does_not_exist(self, setup_manager):
+        """Test that is_set returns False for undefined variables."""
+        result = setup_manager.resolve_string("{{ 'nonexistent' | is_set }}", "test_device")
+        assert result == "False"
+
+    def test_variable_is_none(self, setup_manager):
+        """Test that is_set returns False for variables set to None."""
+        setup_manager.set_runtime_variable("my_var", None, "test_device")
+        result = setup_manager.resolve_string("{{ 'my_var' | is_set }}", "test_device")
+        assert result == "False"
+
+    def test_nested_path_exists(self, setup_manager):
+        """Test nested path resolution for existing paths."""
+        setup_manager.set_runtime_variable("my_var", {"nested": {"key": "value"}}, "test_device")
+        result = setup_manager.resolve_string("{{ 'my_var.nested.key' | is_set }}", "test_device")
+        assert result == "True"
+
+    def test_nested_path_partial_exists(self, setup_manager):
+        """Test that is_set returns False if any part of the nested path is missing."""
+        setup_manager.set_runtime_variable("my_var", {"nested": {}}, "test_device")
+        result = setup_manager.resolve_string("{{ 'my_var.nested.key' | is_set }}", "test_device")
+        assert result == "False"
+
+    def test_nested_path_root_missing(self, setup_manager):
+        """Test that is_set returns False if the root of the nested path is missing."""
+        result = setup_manager.resolve_string("{{ 'my_var.nested.key' | is_set }}", "test_device")
+        assert result == "False"
+
+    def test_nested_path_with_none_intermediate(self, setup_manager):
+        """Test that is_set returns False if an intermediate value in the path is None."""
+        setup_manager.set_runtime_variable("my_var", {"nested": None}, "test_device")
+        result = setup_manager.resolve_string("{{ 'my_var.nested.key' | is_set }}", "test_device")
+        assert result == "False"
+
+    def test_host_namespace_runtime_variable_collision(self, setup_manager):
+        """Test that a runtime variable named 'host' does not satisfy host namespace lookups."""
+        # Simulate a runtime variable that collides with the host namespace name
+        setup_manager.set_runtime_variable("host", {"data": {"key": "value"}}, "test_device")
+        result = setup_manager.resolve_string("{{ 'host.data.key' | is_set }}", "test_device")
+        assert result == "False"  # Filter resolves "host" as the runtime variable, so host namespace traversal fails
+
+    def test_host_namespace_missing_host(self, setup_manager):
+        """Test that is_set returns False if host is not in context."""
+        result = setup_manager.resolve_string("{{ 'host.data.key' | is_set }}", "test_device")
+        assert result == "False"
+
+    def test_host_namespace_none_host(self, setup_manager):
+        """Test that is_set returns False if host is None."""
+        setup_manager.set_runtime_variable("host", None, "test_device")
+        result = setup_manager.resolve_string("{{ 'host.data.key' | is_set }}", "test_device")
+        assert result == "False"
+
+    def test_host_namespace_missing_data(self, setup_manager):
+        """Test that is_set returns False if host.data is missing."""
+        setup_manager.set_runtime_variable("host", {}, "test_device")
+        result = setup_manager.resolve_string("{{ 'host.data.key' | is_set }}", "test_device")
+        assert result == "False"
+
+    def test_host_namespace_nested_missing(self, setup_manager):
+        """Test nested host data access when key is missing."""
+        setup_manager.set_runtime_variable("host", {"data": {"key": {}}}, "test_device")
+        result = setup_manager.resolve_string("{{ 'host.data.key.subkey' | is_set }}", "test_device")
+        assert result == "False"
+
+    def test_empty_path(self, setup_manager):
+        """Test that is_set returns False for empty path strings."""
+        result = setup_manager.resolve_string("{{ '' | is_set }}", "test_device")
+        assert result == "False"
+
+    def test_none_path(self, setup_manager):
+        """Test that is_set returns False for variables set to None."""
+        setup_manager.set_runtime_variable("path", None, "test_device")
+        result = setup_manager.resolve_string("{{ 'path' | is_set }}", "test_device")
+        assert result == "False"
+
+    def test_variable_set_to_non_string(self, setup_manager):
+        """Test that is_set returns True for variables set to non-string values."""
+        setup_manager.set_runtime_variable("path", 123, "test_device")
+        result = setup_manager.resolve_string("{{ 'path' | is_set }}", "test_device")
+        assert result == "True"
+
+    def test_jinja2_undefined_variable(self, setup_manager):
+        """Test that is_set returns False for Jinja2 Undefined objects."""
+        # Undefined variables are not set, so test missing var
+        result = setup_manager.resolve_string("{{ 'undefined_var' | is_set }}", "test_device")
+        assert result == "False"
+
+    def test_context_resolve_exception(self, setup_manager):
+        """Test that is_set returns False if context.resolve raises an exception."""
+        # Hard to simulate, but test with invalid path
+        result = setup_manager.resolve_string("{{ 'invalid.path' | is_set }}", "test_device")
+        assert result == "False"
+
+    def test_workflow_variable_exists(self, setup_manager):
+        """Test is_set with workflow-level variables."""
+        # Assuming workflow vars are set in manager
+        # For test, set as runtime
+        setup_manager.set_runtime_variable("workflow_var", "set", "test_device")
+        result = setup_manager.resolve_string("{{ 'workflow_var' | is_set }}", "test_device")
+        assert result == "True"
+
+    def test_runtime_variable_override(self, setup_manager):
+        """Test is_set with runtime variables overriding others."""
+        setup_manager.set_runtime_variable("override_var", "runtime", "test_device")
+        result = setup_manager.resolve_string("{{ 'override_var' | is_set }}", "test_device")
+        assert result == "True"
+
+    def test_nested_runtime_variable(self, setup_manager):
+        """Test nested path in runtime variables."""
+        setup_manager.set_runtime_variable("nested_var", {"level1": {"level2": "value"}}, "test_device")
+        result = setup_manager.resolve_string("{{ 'nested_var.level1.level2' | is_set }}", "test_device")
+        assert result == "True"
+
+    def test_zero_and_false_values(self, setup_manager):
+        """Test that zero, false, and empty strings are considered set."""
+        setup_manager.set_runtime_variable("zero", 0, "test_device")
+        setup_manager.set_runtime_variable("false", False, "test_device")
+        setup_manager.set_runtime_variable("empty", "", "test_device")
+        result_zero = setup_manager.resolve_string("{{ 'zero' | is_set }}", "test_device")
+        result_false = setup_manager.resolve_string("{{ 'false' | is_set }}", "test_device")
+        result_empty = setup_manager.resolve_string("{{ 'empty' | is_set }}", "test_device")
+        assert result_zero == "True"
+        assert result_false == "True"
+        assert result_empty == "True"
+
+    def test_is_set_with_list_index(self, setup_manager):
+        """Test is_set with list indexing."""
+        setup_manager.set_runtime_variable("my_list", [1, 2, 3], "test_device")
+        result = setup_manager.resolve_string("{{ 'my_list[0]' | is_set }}", "test_device")
+        assert result == "False"  # is_set doesn't support indexing, treats as string
+
+    def test_is_set_with_special_characters(self, setup_manager):
+        """Test is_set with variable names containing special characters."""
+        setup_manager.set_runtime_variable("var-with-dash", "value", "test_device")
+        result = setup_manager.resolve_string("{{ 'var-with-dash' | is_set }}", "test_device")
+        assert result == "True"
+
+    def test_is_set_case_sensitivity(self, setup_manager):
+        """Test is_set is case sensitive for variable names."""
+        setup_manager.set_runtime_variable("MyVar", "value", "test_device")
+        result = setup_manager.resolve_string("{{ 'myvar' | is_set }}", "test_device")
+        assert result == "False"
+
+    def test_host_namespace_exists(self, setup_manager):
+        """Test host namespace access when host data exists."""
+        result = setup_manager.resolve_string("{{ 'host.name' | is_set }}", "test_device")
+        assert result == "True"
+
+    def test_host_namespace_data_exists(self, setup_manager):
+        """Test host namespace for existing host data."""
+        result = setup_manager.resolve_string("{{ 'host.data.location.building' | is_set }}", "test_device")
+        assert result == "True"
+
+    def test_host_namespace_data_missing(self, setup_manager):
+        """Test host namespace for missing host data key."""
+        result = setup_manager.resolve_string("{{ 'host.data.missing' | is_set }}", "test_device")
+        assert result == "False"
+
+    def test_is_set_with_numeric_path(self, setup_manager):
+        """Test is_set with path containing numbers."""
+        setup_manager.set_runtime_variable("var1", {"2key": "value"}, "test_device")
+        result = setup_manager.resolve_string("{{ 'var1.2key' | is_set }}", "test_device")
+        assert result == "True"
+
+    def test_is_set_with_underscore_path(self, setup_manager):
+        """Test is_set with path containing underscores."""
+        setup_manager.set_runtime_variable("my_var", {"sub_key": "value"}, "test_device")
+        result = setup_manager.resolve_string("{{ 'my_var.sub_key' | is_set }}", "test_device")
+        assert result == "True"
+
+
+class TestCustomFilters:
+    """Test suite for additional custom Jinja2 filters not covered elsewhere."""
+
+    def test_regex_replace_basic(self, setup_manager):
+        """Test basic regex replacement."""
+        result = setup_manager.resolve_string("{{ 'abc123def' | regex_replace('123', 'XYZ') }}", "test_device")
+        assert result == "abcXYZdef"
+
+    def test_regex_replace_no_match(self, setup_manager):
+        """Test regex replacement when pattern doesn't match."""
+        result = setup_manager.resolve_string("{{ 'abcdef' | regex_replace('123', 'XYZ') }}", "test_device")
+        assert result == "abcdef"
+
+    def test_to_snake_case_basic(self, setup_manager):
+        """Test basic snake_case conversion."""
+        result = setup_manager.resolve_string("{{ 'MyVariableName' | to_snake_case }}", "test_device")
+        assert result == "my_variable_name"
+
+    def test_to_snake_case_already_snake(self, setup_manager):
+        """Test snake_case conversion on already snake_case string."""
+        result = setup_manager.resolve_string("{{ 'already_snake' | to_snake_case }}", "test_device")
+        assert result == "already_snake"
+
+    def test_to_kebab_case_basic(self, setup_manager):
+        """Test basic kebab-case conversion."""
+        result = setup_manager.resolve_string("{{ 'MyVariableName' | to_kebab_case }}", "test_device")
+        assert result == "my-variable-name"
+
+    def test_to_kebab_case_already_kebab(self, setup_manager):
+        """Test kebab-case conversion on already kebab-case string."""
+        result = setup_manager.resolve_string("{{ 'already-kebab' | to_kebab_case }}", "test_device")
+        assert result == "already-kebab"
+
+    def test_json_query_basic(self, setup_manager):
+        """Test basic JMESPath query."""
+        data = {"users": [{"name": "Alice", "age": 30}, {"name": "Bob", "age": 25}]}
+        setup_manager.set_runtime_variable("data", data, "test_device")
+        result = setup_manager.resolve_string("{{ data | json_query('users[*].name') }}", "test_device")
+        assert result == "['Alice', 'Bob']"
+
+    def test_json_query_invalid_query(self, setup_manager):
+        """Test JMESPath query with invalid syntax - raises TemplateError."""
+        data = {"key": "value"}
+        setup_manager.set_runtime_variable("data", data, "test_device")
+        with pytest.raises(Exception):  # Expect TemplateError due to uncaught exception
+            setup_manager.resolve_string("{{ data | json_query('invalid[') }}", "test_device")
+
+    def test_deep_merge_basic(self, setup_manager):
+        """Test basic deep merge of dictionaries."""
+        dict1 = {"a": 1, "b": {"c": 2}}
+        dict2 = {"b": {"d": 3}, "e": 4}
+        setup_manager.set_runtime_variable("dict1", dict1, "test_device")
+        setup_manager.set_runtime_variable("dict2", dict2, "test_device")
+        result = setup_manager.resolve_string("{{ dict1 | deep_merge(dict2) }}", "test_device")
+        expected = "{'a': 1, 'b': {'c': 2, 'd': 3}, 'e': 4}"
+        assert result == expected
+
+    def test_deep_merge_empty_dicts(self, setup_manager):
+        """Test deep merge with empty dictionaries."""
+        dict1 = {}
+        dict2 = {"a": 1}
+        setup_manager.set_runtime_variable("dict1", dict1, "test_device")
+        setup_manager.set_runtime_variable("dict2", dict2, "test_device")
+        result = setup_manager.resolve_string("{{ dict1 | deep_merge(dict2) }}", "test_device")
+        assert result == "{'a': 1}"
+
+    def test_random_choice_basic(self, setup_manager):
+        """Test random choice from list."""
+        random.seed(42)  # For predictable results
+        choices = ["a", "b", "c"]
+        setup_manager.set_runtime_variable("choices", choices, "test_device")
+        result = setup_manager.resolve_string("{{ choices | random_choice }}", "test_device")
+        assert result in choices
+
+    def test_random_choice_empty_list(self, setup_manager):
+        """Test random choice from empty list."""
+        setup_manager.set_runtime_variable("empty_list", [], "test_device")
+        result = setup_manager.resolve_string("{{ empty_list | random_choice }}", "test_device")
+        assert result == "None"
+
+    def test_flatten_list_empty(self, setup_manager):
+        """Test flatten_list with empty list."""
+        setup_manager.set_runtime_variable("empty", [], "test_device")
+        result = setup_manager.resolve_string("{{ empty | flatten_list }}", "test_device")
+        assert result == "[]"
+
+    def test_unique_list_empty(self, setup_manager):
+        """Test unique_list with empty list."""
+        setup_manager.set_runtime_variable("empty", [], "test_device")
+        result = setup_manager.resolve_string("{{ empty | unique_list }}", "test_device")
+        assert result == "[]"
+
+    def test_chunk_list_single_element(self, setup_manager):
+        """Test chunk_list with single element."""
+        setup_manager.set_runtime_variable("single", [1], "test_device")
+        result = setup_manager.resolve_string("{{ single | chunk_list(2) }}", "test_device")
+        assert result == "[[1]]"
+
+    def test_to_snake_case_numbers(self, setup_manager):
+        """Test to_snake_case with numbers."""
+        result = setup_manager.resolve_string("{{ 'Var123Name' | to_snake_case }}", "test_device")
+        assert result == "var123_name"
+
+    def test_json_query_empty_data(self, setup_manager):
+        """Test json_query with empty data."""
+        setup_manager.set_runtime_variable("empty_data", {}, "test_device")
+        result = setup_manager.resolve_string("{{ empty_data | json_query('key') }}", "test_device")
+        assert result == "None"
+
+    def test_deep_merge_overwrite(self, setup_manager):
+        """Test deep merge overwriting values."""
+        dict1 = {"a": 1}
+        dict2 = {"a": 2}
+        setup_manager.set_runtime_variable("dict1", dict1, "test_device")
+        setup_manager.set_runtime_variable("dict2", dict2, "test_device")
+        result = setup_manager.resolve_string("{{ dict1 | deep_merge(dict2) }}", "test_device")
+        assert result == "{'a': 2}"
+
+    def test_random_choice_single_item(self, setup_manager):
+        """Test random_choice with single item."""
+        setup_manager.set_runtime_variable("single", ["only"], "test_device")
+        result = setup_manager.resolve_string("{{ single | random_choice }}", "test_device")
+        assert result == "only"
+
+    def test_enumerate_empty(self, setup_manager):
+        """Test enumerate with empty list."""
+        setup_manager.set_runtime_variable("empty", [], "test_device")
+        result = setup_manager.resolve_string("{{ empty | enumerate }}", "test_device")
+        assert result == "[]"
+
+    def test_zip_different_lengths(self, setup_manager):
+        """Test zip with lists of different lengths."""
+        setup_manager.set_runtime_variable("short", ["a"], "test_device")
+        setup_manager.set_runtime_variable("long", [1, 2], "test_device")
+        result = setup_manager.resolve_string("{{ short | zip(long) }}", "test_device")
+        assert result == "[('a', 1)]"
+
+    def test_range_zero(self, setup_manager):
+        """Test range with zero."""
+        result = setup_manager.resolve_string("{{ 0 | range }}", "test_device")
+        assert result == "[]"
+
+    def test_divmod_by_zero(self, setup_manager):
+        """Test divmod by zero - raises ZeroDivisionError."""
+        with pytest.raises(Exception):  # Expect ZeroDivisionError or TemplateError
+            setup_manager.resolve_string("{{ 10 | divmod(0) }}", "test_device")
+
+    def test_splitx_maxsplit_zero(self, setup_manager):
+        """Test splitx with maxsplit 0."""
+        result = setup_manager.resolve_string("{{ 'a,b,c' | splitx(',', 0) }}", "test_device")
+        assert result == "['a,b,c']"
+
+    def test_regex_replace_with_groups(self, setup_manager):
+        """Test regex_replace with capture groups."""
+        result = setup_manager.resolve_string("{{ 'abc123def' | regex_replace('(\\d+)', '[\\1]') }}", "test_device")
+        assert result == "abc[\x01]def"
+
+    def test_to_snake_case_mixed_case(self, setup_manager):
+        """Test to_snake_case with mixed case and numbers."""
+        result = setup_manager.resolve_string("{{ 'XMLHttpRequest2' | to_snake_case }}", "test_device")
+        assert result == "xml_http_request2"
+
+    def test_to_kebab_case_mixed_case(self, setup_manager):
+        """Test to_kebab_case with mixed case."""
+        result = setup_manager.resolve_string("{{ 'XMLHttpRequest' | to_kebab_case }}", "test_device")
+        assert result == "xml-http-request"
+
+    def test_json_query_nested(self, setup_manager):
+        """Test json_query with nested queries."""
+        data = {"network": {"devices": [{"ip": "192.168.1.1"}, {"ip": "192.168.1.2"}]}}
+        setup_manager.set_runtime_variable("data", data, "test_device")
+        result = setup_manager.resolve_string("{{ data | json_query('network.devices[*].ip') }}", "test_device")
+        assert result == "['192.168.1.1', '192.168.1.2']"
+
+    def test_deep_merge_nested_overwrite(self, setup_manager):
+        """Test deep merge with nested overwrite."""
+        dict1 = {"a": {"b": 1, "c": 2}}
+        dict2 = {"a": {"b": 3}}
+        setup_manager.set_runtime_variable("dict1", dict1, "test_device")
+        setup_manager.set_runtime_variable("dict2", dict2, "test_device")
+        result = setup_manager.resolve_string("{{ dict1 | deep_merge(dict2) }}", "test_device")
+        expected = "{'a': {'b': 3, 'c': 2}}"
+        assert result == expected
+
+    def test_random_choice_with_seed(self, setup_manager):
+        """Test random_choice predictability with seed."""
+        
+        random.seed(123)
+        choices = ["x", "y", "z"]
+        setup_manager.set_runtime_variable("choices", choices, "test_device")
+        result = setup_manager.resolve_string("{{ choices | random_choice }}", "test_device")
+        assert result in choices
+
+    def test_flatten_list_nested(self, setup_manager):
+        """Test flatten_list with deeply nested lists."""
+        setup_manager.set_runtime_variable("nested", [[[1, 2]], [3, [4, 5]]], "test_device")
+        result = setup_manager.resolve_string("{{ nested | flatten_list }}", "test_device")
+        assert result == "[1, 2, 3, 4, 5]"
+
+    def test_unique_list_with_dicts(self, setup_manager):
+        """Test unique_list with unhashable types like dicts."""
+        setup_manager.set_runtime_variable("list_with_dicts", [{"a": 1}, {"a": 1}, {"b": 2}], "test_device")
+        with pytest.raises(Exception):
+            setup_manager.resolve_string("{{ list_with_dicts | unique_list }}", "test_device")
+
+    def test_chunk_list_chunk_size_one(self, setup_manager):
+        """Test chunk_list with chunk size 1."""
+        setup_manager.set_runtime_variable("list", [1, 2, 3], "test_device")
+        result = setup_manager.resolve_string("{{ list | chunk_list(1) }}", "test_device")
+        assert result == "[[1], [2], [3]]"
+
+    def test_enumerate_with_negative_start(self, setup_manager):
+        """Test enumerate with negative start."""
+        setup_manager.set_runtime_variable("items", ["a", "b"], "test_device")
+        result = setup_manager.resolve_string("{{ items | enumerate(-1) }}", "test_device")
+        assert result == "[(-1, 'a'), (0, 'b')]"
+
+    def test_zip_empty_lists(self, setup_manager):
+        """Test zip with empty lists."""
+        setup_manager.set_runtime_variable("empty1", [], "test_device")
+        setup_manager.set_runtime_variable("empty2", [], "test_device")
+        result = setup_manager.resolve_string("{{ empty1 | zip(empty2) }}", "test_device")
+        assert result == "[]"
+
+    def test_range_negative(self, setup_manager):
+        """Test range with negative number."""
+        result = setup_manager.resolve_string("{{ -3 | range }}", "test_device")
+        assert result == "[]"
+
+    def test_divmod_negative_divisor(self, setup_manager):
+        """Test divmod with negative divisor."""
+        result = setup_manager.resolve_string("{{ 10 | divmod(-3) }}", "test_device")
+        assert result == "(-4, -2)"
+
+    def test_splitx_negative_maxsplit(self, setup_manager):
+        """Test splitx with negative maxsplit."""
+        result = setup_manager.resolve_string("{{ 'a,b,c' | splitx(',', -1) }}", "test_device")
+        assert result == "['a', 'b', 'c']"
+
+    def test_json_query_single_value(self, setup_manager):
+        """Test json_query returning a single value."""
+        data = {"config": {"timeout": 30}}
+        setup_manager.set_runtime_variable("data", data, "test_device")
+        result = setup_manager.resolve_string("{{ data | json_query('config.timeout') }}", "test_device")
+        assert result == "30"
+
+    def test_deep_merge_with_lists(self, setup_manager):
+        """Test deep_merge with lists (should not merge lists)."""
+        dict1 = {"a": [1, 2]}
+        dict2 = {"a": [3, 4]}
+        setup_manager.set_runtime_variable("dict1", dict1, "test_device")
+        setup_manager.set_runtime_variable("dict2", dict2, "test_device")
+        result = setup_manager.resolve_string("{{ dict1 | deep_merge(dict2) }}", "test_device")
+        expected = "{'a': [3, 4]}"  # Overwrites list
+        assert result == expected
+
+    def test_random_choice_type_error(self, setup_manager):
+        """Test random_choice with non-list input."""
+        setup_manager.set_runtime_variable("not_list", "string", "test_device")
+        result = setup_manager.resolve_string("{{ not_list | random_choice }}", "test_device")
+        assert result in "string"
+
+    def test_flatten_list_mixed_types(self, setup_manager):
+        """Test flatten_list with mixed types."""
+        setup_manager.set_runtime_variable("mixed", [[1, 2], "string", [3]], "test_device")
+        result = setup_manager.resolve_string("{{ mixed | flatten_list }}", "test_device")
+        assert result == "[1, 2, 'string', 3]"
+
+    def test_unique_list_mixed_types(self, setup_manager):
+        """Test unique_list with mixed types."""
+        setup_manager.set_runtime_variable("mixed", [1, "1", 1, 2], "test_device")
+        result = setup_manager.resolve_string("{{ mixed | unique_list }}", "test_device")
+        assert result == "[1, '1', 2]"
+
+    def test_chunk_list_large_chunk(self, setup_manager):
+        """Test chunk_list with chunk size larger than list."""
+        setup_manager.set_runtime_variable("small", [1, 2], "test_device")
+        result = setup_manager.resolve_string("{{ small | chunk_list(5) }}", "test_device")
+        assert result == "[[1, 2]]"
+
+    def test_enumerate_with_large_start(self, setup_manager):
+        """Test enumerate with large start value."""
+        setup_manager.set_runtime_variable("items", ["a"], "test_device")
+        result = setup_manager.resolve_string("{{ items | enumerate(100) }}", "test_device")
+        assert result == "[(100, 'a')]"
+
+    def test_zip_three_lists(self, setup_manager):
+        """Test zip with three lists."""
+        setup_manager.set_runtime_variable("a", [1, 2], "test_device")
+        setup_manager.set_runtime_variable("b", ["x", "y"], "test_device")
+        setup_manager.set_runtime_variable("c", [True, False], "test_device")
+        result = setup_manager.resolve_string("{{ a | zip(b, c) }}", "test_device")
+        assert result == "[(1, 'x', True), (2, 'y', False)]"
+
+    def test_range_large_number(self, setup_manager):
+        """Test range with large number (but limit for test)."""
+        result = setup_manager.resolve_string("{{ 10 | range }}", "test_device")
+        assert result == "[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]"
+
+    def test_divmod_large_numbers(self, setup_manager):
+        """Test divmod with large numbers."""
+        result = setup_manager.resolve_string("{{ 100 | divmod(7) }}", "test_device")
+        assert result == "(14, 2)"
+
+    def test_splitx_empty_string(self, setup_manager):
+        """Test splitx with empty string."""
+        result = setup_manager.resolve_string("{{ '' | splitx(',', 2) }}", "test_device")
+        assert result == "['']"
+
+    def test_regex_replace_empty_pattern(self, setup_manager):
+        """Test regex_replace with empty pattern."""
+        result = setup_manager.resolve_string("{{ 'abc' | regex_replace('', 'X') }}", "test_device")
+        assert result == "XaXbXcX"
+
+    def test_to_snake_case_empty_string(self, setup_manager):
+        """Test to_snake_case with empty string."""
+        result = setup_manager.resolve_string("{{ '' | to_snake_case }}", "test_device")
+        assert result == ""
+
+    def test_to_kebab_case_empty_string(self, setup_manager):
+        """Test to_kebab_case with empty string."""
+        result = setup_manager.resolve_string("{{ '' | to_kebab_case }}", "test_device")
+        assert result == ""
+
+    def test_json_query_no_match(self, setup_manager):
+        """Test json_query with query that matches nothing."""
+        data = {"users": []}
+        setup_manager.set_runtime_variable("data", data, "test_device")
+        result = setup_manager.resolve_string("{{ data | json_query('users[*].name') }}", "test_device")
+        assert result == "[]"
+
+    def test_deep_merge_self_merge(self, setup_manager):
+        """Test deep_merge with same dict."""
+        dict1 = {"a": 1}
+        setup_manager.set_runtime_variable("dict1", dict1, "test_device")
+        result = setup_manager.resolve_string("{{ dict1 | deep_merge(dict1) }}", "test_device")
+        expected = "{'a': 1}"
+        assert result == expected
+
+    def test_is_set_with_none_value(self, setup_manager):
+        """Test is_set with None passed directly (not as string)."""
+        result = setup_manager.resolve_string("{{ None | is_set }}", "test_device")
+        assert result == "False"
+
+    def test_is_set_with_int_value(self, setup_manager):
+        """Test is_set with int passed directly."""
+        result = setup_manager.resolve_string("{{ 123 | is_set }}", "test_device")
+        assert result == "False"
+
+    def test_deep_merge_multiple_levels(self, setup_manager):
+        """Test deep merge with multiple nesting levels."""
+        dict1 = {"a": {"b": {"c": 1}}}
+        dict2 = {"a": {"b": {"d": 2}, "e": 3}}
+        setup_manager.set_runtime_variable("dict1", dict1, "test_device")
+        setup_manager.set_runtime_variable("dict2", dict2, "test_device")
+        result = setup_manager.resolve_string("{{ dict1 | deep_merge(dict2) }}", "test_device")
+        expected = "{'a': {'b': {'c': 1, 'd': 2}, 'e': 3}}"
+        assert result == expected
+
+    def test_json_query_with_null(self, setup_manager):
+        """Test json_query with null values."""
+        data = {"key": None}
+        setup_manager.set_runtime_variable("data", data, "test_device")
+        result = setup_manager.resolve_string("{{ data | json_query('key') }}", "test_device")
+        assert result == "None"
+
+    def test_regex_replace_case_sensitive(self, setup_manager):
+        """Test regex_replace is case sensitive by default."""
+        result = setup_manager.resolve_string("{{ 'ABC' | regex_replace('abc', 'XYZ') }}", "test_device")
+        assert result == "ABC"
+
+    def test_to_snake_case_consecutive_caps(self, setup_manager):
+        """Test to_snake_case with consecutive capital letters."""
+        result = setup_manager.resolve_string("{{ 'XMLParser' | to_snake_case }}", "test_device")
+        assert result == "xml_parser"
+
+    def test_to_kebab_case_consecutive_caps(self, setup_manager):
+        """Test to_kebab_case with consecutive capital letters."""
+        result = setup_manager.resolve_string("{{ 'XMLParser' | to_kebab_case }}", "test_device")
+        assert result == "xml-parser"
+
+    def test_chunk_list_zero_chunk_size(self, setup_manager):
+        """Test chunk_list with chunk size 0."""
+        setup_manager.set_runtime_variable("list", [1, 2, 3], "test_device")
+        with pytest.raises(Exception):  # Expect ZeroDivisionError or similar
+            setup_manager.resolve_string("{{ list | chunk_list(0) }}", "test_device")
+
+    def test_enumerate_with_zero_start(self, setup_manager):
+        """Test enumerate with zero start."""
+        setup_manager.set_runtime_variable("items", ["a", "b"], "test_device")
+        result = setup_manager.resolve_string("{{ items | enumerate(0) }}", "test_device")
+        assert result == "[(0, 'a'), (1, 'b')]"
+
+    def test_zip_single_list(self, setup_manager):
+        """Test zip with single list."""
+        setup_manager.set_runtime_variable("single", ["a", "b"], "test_device")
+        result = setup_manager.resolve_string("{{ single | zip }}", "test_device")
+        assert result == "[('a',), ('b',)]"
+
+    def test_range_one(self, setup_manager):
+        """Test range with 1."""
+        result = setup_manager.resolve_string("{{ 1 | range }}", "test_device")
+        assert result == "[0]"
+
+    def test_divmod_exact(self, setup_manager):
+        """Test divmod with exact division."""
+        result = setup_manager.resolve_string("{{ 10 | divmod(5) }}", "test_device")
+        assert result == "(2, 0)"
+
+    def test_splitx_no_separator(self, setup_manager):
+        """Test splitx with separator not in string."""
+        result = setup_manager.resolve_string("{{ 'abc' | splitx(',', 2) }}", "test_device")
+        assert result == "['abc']"
+
+    def test_deep_merge_no_overlap(self, setup_manager):
+        """Test deep_merge with no overlapping keys."""
+        dict1 = {"a": 1}
+        dict2 = {"b": 2}
+        setup_manager.set_runtime_variable("dict1", dict1, "test_device")
+        setup_manager.set_runtime_variable("dict2", dict2, "test_device")
+        result = setup_manager.resolve_string("{{ dict1 | deep_merge(dict2) }}", "test_device")
+        expected = "{'a': 1, 'b': 2}"
+        assert result == expected
+
+    def test_is_set_with_deeply_nested_none(self, setup_manager):
+        """Test is_set with deeply nested None values."""
+        setup_manager.set_runtime_variable("deep", {"a": {"b": None}}, "test_device")
+        result = setup_manager.resolve_string("{{ 'deep.a.b.c' | is_set }}", "test_device")
+        assert result == "False"
+
+    def test_regex_replace_multiple_groups(self, setup_manager):
+        """Test regex_replace with multiple capture groups."""
+        result = setup_manager.resolve_string("{{ 'a1b2c' | regex_replace('(\\w)(\\d)', '\\2\\1') }}", "test_device")
+        assert result == '\x02\x01\x02\x01c'
+
+    def test_flatten_list_with_tuples(self, setup_manager):
+        """Test flatten_list with tuples."""
+        setup_manager.set_runtime_variable("tuples", [(1, 2), [3, 4]], "test_device")
+        result = setup_manager.resolve_string("{{ tuples | flatten_list }}", "test_device")
+        assert result == "[(1, 2), 3, 4]"
+
+    def test_unique_list_with_tuples(self, setup_manager):
+        """Test unique_list with tuples."""
+        setup_manager.set_runtime_variable("tuples", [(1, 2), (1, 2), (3, 4)], "test_device")
+        result = setup_manager.resolve_string("{{ tuples | unique_list }}", "test_device")
+        assert result == "[(1, 2), (3, 4)]"
+
+    def test_chunk_list_empty_list(self, setup_manager):
+        """Test chunk_list with empty list."""
+        setup_manager.set_runtime_variable("empty", [], "test_device")
+        result = setup_manager.resolve_string("{{ empty | chunk_list(2) }}", "test_device")
+        assert result == "[]"
+
+    def test_json_query_with_array_index(self, setup_manager):
+        """Test json_query with array indexing."""
+        data = {"items": ["a", "b", "c"]}
+        setup_manager.set_runtime_variable("data", data, "test_device")
+        result = setup_manager.resolve_string("{{ data | json_query('items[1]') }}", "test_device")
+        assert result == "b"
+
+    def test_deep_merge_with_none_values(self, setup_manager):
+        """Test deep_merge with None values."""
+        dict1 = {"a": None}
+        dict2 = {"a": 1}
+        setup_manager.set_runtime_variable("dict1", dict1, "test_device")
+        setup_manager.set_runtime_variable("dict2", dict2, "test_device")
+        result = setup_manager.resolve_string("{{ dict1 | deep_merge(dict2) }}", "test_device")
+        expected = "{'a': 1}"
+        assert result == expected
+
+    def test_sorted_basic(self, setup_manager):
+        """Test sorted filter."""
+        setup_manager.set_runtime_variable("unsorted", [3, 1, 4, 1, 5], "test_device")
+        result = setup_manager.resolve_string("{{ unsorted | sorted }}", "test_device")
+        assert result == "[1, 1, 3, 4, 5]"
+
+    def test_reversed_basic(self, setup_manager):
+        """Test reversed filter."""
+        setup_manager.set_runtime_variable("list", [1, 2, 3], "test_device")
+        result = setup_manager.resolve_string("{{ list | reversed }}", "test_device")
+        assert result == "[3, 2, 1]"
+
+    def test_strip_basic(self, setup_manager):
+        """Test strip filter."""
+        result = setup_manager.resolve_string("{{ '  hello  ' | strip }}", "test_device")
+        assert result == "hello"
+
+    def test_joinx_basic(self, setup_manager):
+        """Test joinx filter."""
+        setup_manager.set_runtime_variable("list", ["a", "b", "c"], "test_device")
+        result = setup_manager.resolve_string("{{ '-' | joinx(list) }}", "test_device")
+        assert result == "a-b-c"
+
+    def test_type_basic(self, setup_manager):
+        """Test type filter."""
+        result = setup_manager.resolve_string("{{ 'string' | type }}", "test_device")
+        assert result == "str"
+
+    def test_any_basic(self, setup_manager):
+        """Test any filter."""
+        setup_manager.set_runtime_variable("list", [False, True, False], "test_device")
+        result = setup_manager.resolve_string("{{ list | any }}", "test_device")
+        assert result == "True"
+
+    def test_all_basic(self, setup_manager):
+        """Test all filter."""
+        setup_manager.set_runtime_variable("list", [True, True, True], "test_device")
+        result = setup_manager.resolve_string("{{ list | all }}", "test_device")
+        assert result == "True"
+
+    def test_len_basic(self, setup_manager):
+        """Test len filter."""
+        setup_manager.set_runtime_variable("list", [1, 2, 3], "test_device")
+        result = setup_manager.resolve_string("{{ list | len }}", "test_device")
+        assert result == "3"
\ No newline at end of file
diff --git a/tests/unit/vars/test_manager.py b/tests/unit/vars/test_manager.py
index 85f2299..c4a34fa 100644
--- a/tests/unit/vars/test_manager.py
+++ b/tests/unit/vars/test_manager.py
@@ -62,3 +62,20 @@ class TestVariableManager:
         assert setup_manager.get_nornflow_variable("workflow_var", "test_device") == "workflow_value"
         assert setup_manager.get_nornflow_variable("domain_var", "test_device") == "domain_value"
         assert setup_manager.get_nornflow_variable("global_var", "test_device") == "global_value"
+
+    def test_jinja2_manager_initialized(self, basic_manager):
+        """Test that Jinja2EnvironmentManager is properly initialized."""
+        assert basic_manager._jinja2_manager is not None
+        assert basic_manager._jinja2_manager.env is not None
+
+    def test_jinja2_environment_accessible(self, basic_manager):
+        """Test that Jinja2 environment is accessible through the manager."""
+        env = basic_manager._jinja2_manager.env
+        assert env is not None
+        assert hasattr(env, 'filters')
+
+    def test_custom_filters_registered(self, basic_manager):
+        """Test that NornFlow custom filters are registered in Jinja2 environment."""
+        env = basic_manager._jinja2_manager.env
+        assert "is_set" in env.filters
+        assert "flatten_list" in env.filters
\ No newline at end of file
diff --git a/tests/unit/vars/test_processors.py b/tests/unit/vars/test_processors.py
index 51f90f7..512004e 100644
--- a/tests/unit/vars/test_processors.py
+++ b/tests/unit/vars/test_processors.py
@@ -1,5 +1,4 @@
-# filepath: test_processors.py
-from unittest.mock import MagicMock
+from unittest.mock import MagicMock, patch
 
 
 class TestVariableProcessor:
@@ -75,3 +74,98 @@ class TestVariableProcessor:
         # No variable should be set, so just verify it doesn't raise an exception
         # and that the current_host_name is cleared
         assert processor.vars_manager.nornir_host_proxy.current_host_name is None
+
+    def test_requires_deferred_templates_no_hooks(self, setup_processor):
+        """Test _requires_deferred_templates returns False when no hooks are present."""
+        processor = setup_processor
+        task = MagicMock()
+        task.nornir.processors = []  # No processors with hooks
+
+        assert processor._requires_deferred_templates(task) is False
+
+    def test_requires_deferred_templates_hooks_without_flag(self, setup_processor):
+        """Test _requires_deferred_templates returns False when hooks don't declare the flag."""
+        processor = setup_processor
+        task = MagicMock()
+        mock_hook = MagicMock()
+        mock_hook.requires_deferred_templates = False
+        mock_processor = MagicMock()
+        mock_processor.task_hooks = [mock_hook]
+        task.nornir.processors = [mock_processor]
+
+        assert processor._requires_deferred_templates(task) is False
+
+    def test_requires_deferred_templates_hooks_with_flag(self, setup_processor):
+        """Test _requires_deferred_templates returns True when any hook declares the flag."""
+        processor = setup_processor
+        task = MagicMock()
+        mock_hook1 = MagicMock()
+        mock_hook1.requires_deferred_templates = False
+        mock_hook2 = MagicMock()
+        mock_hook2.requires_deferred_templates = True
+        mock_processor = MagicMock()
+        mock_processor.task_hooks = [mock_hook1, mock_hook2]
+        task.nornir.processors = [mock_processor]
+
+        assert processor._requires_deferred_templates(task) is True
+
+    def test_task_instance_started_deferred_mode(self, setup_processor, mock_host):
+        """Test task_instance_started stores params when deferred mode is required."""
+        processor = setup_processor
+        task = MagicMock()
+        task.name = "deferred_task"
+        task.params = {"command": "{{ host.name }}", "timeout": 30}
+        host = mock_host
+
+        # Mock _requires_deferred_templates to return True
+        with patch.object(processor, "_requires_deferred_templates", return_value=True):
+            processor.task_instance_started(task, host)
+
+        # Verify params were stored and cleared
+        key = (task.name, host.name)
+        assert key in processor._deferred_params
+        assert processor._deferred_params[key] == {"command": "{{ host.name }}", "timeout": 30}
+        assert task.params == {}  # Cleared for deferred processing
+
+    def test_resolve_deferred_params_with_stored_key(self, setup_processor, mock_host):
+        """Test resolve_deferred_params resolves and returns stored params."""
+        processor = setup_processor
+        task = MagicMock()
+        task.name = "test_task"
+        host = mock_host
+        key = (task.name, host.name)
+        original_params = {"command": "{{ host.name }}", "timeout": 30}
+        processor._deferred_params[key] = original_params
+
+        resolved = processor.resolve_deferred_params(task, host)
+
+        # Verify resolution occurred and key was removed
+        assert resolved["command"] == "test_device"  # Resolved via mock
+        assert resolved["timeout"] == 30
+        assert key not in processor._deferred_params
+
+    def test_resolve_deferred_params_missing_key(self, setup_processor, mock_host):
+        """Test resolve_deferred_params returns None when no deferred params exist."""
+        processor = setup_processor
+        task = MagicMock()
+        task.name = "missing_task"
+        host = mock_host
+
+        resolved = processor.resolve_deferred_params(task, host)
+
+        assert resolved is None
+
+    def test_task_instance_completed_cleans_unresolved_params(self, setup_processor, mock_host, mock_result):
+        """Test task_instance_completed cleans up unresolved deferred params."""
+        processor = setup_processor
+        task = MagicMock()
+        task.name = "cleanup_task"
+        host = mock_host
+        key = (task.name, host.name)
+        processor._deferred_params[key] = {"unresolved": "param"}
+
+        processor.task_instance_completed(task, host, mock_result)
+
+        # Verify params were cleaned up
+        assert key not in processor._deferred_params
+        assert processor.vars_manager.nornir_host_proxy.current_host_name is None
\ No newline at end of file
diff --git a/uv.lock b/uv.lock
index 736ff82..dab9fa2 100644
--- a/uv.lock
+++ b/uv.lock
@@ -1,11 +1,6 @@
 version = 1
 revision = 2
 requires-python = ">=3.10"
-resolution-markers = [
-    "python_full_version >= '3.14' and platform_python_implementation != 'PyPy'",
-    "python_full_version < '3.14' and platform_python_implementation != 'PyPy'",
-    "platform_python_implementation == 'PyPy'",
-]
 
 [[package]]
 name = "annotated-types"
@@ -16,6 +11,20 @@ wheels = [
     { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" },
 ]
 
+[[package]]
+name = "anyio"
+version = "4.12.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "exceptiongroup", marker = "python_full_version < '3.11'" },
+    { name = "idna" },
+    { name = "typing-extensions", marker = "python_full_version < '3.13'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/16/ce/8a777047513153587e5434fd752e89334ac33e379aa3497db860eeb60377/anyio-4.12.0.tar.gz", hash = "sha256:73c693b567b0c55130c104d0b43a9baf3aa6a31fc6110116509f27bf75e21ec0", size = 228266, upload-time = "2025-11-28T23:37:38.911Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/7f/9c/36c5c37947ebfb8c7f22e0eb6e4d188ee2d53aa3880f3f2744fb894f0cb1/anyio-4.12.0-py3-none-any.whl", hash = "sha256:dad2376a628f98eeca4881fc56cd06affd18f659b17a747d3ff0307ced94b1bb", size = 113362, upload-time = "2025-11-28T23:36:57.897Z" },
+]
+
 [[package]]
 name = "bcrypt"
 version = "5.0.0"
@@ -88,7 +97,7 @@ wheels = [
 
 [[package]]
 name = "black"
-version = "25.9.0"
+version = "25.12.0"
 source = { registry = "https://pypi.org/simple" }
 dependencies = [
     { name = "click" },
@@ -100,34 +109,43 @@ dependencies = [
     { name = "tomli", marker = "python_full_version < '3.11'" },
     { name = "typing-extensions", marker = "python_full_version < '3.11'" },
 ]
-sdist = { url = "https://files.pythonhosted.org/packages/4b/43/20b5c90612d7bdb2bdbcceeb53d588acca3bb8f0e4c5d5c751a2c8fdd55a/black-25.9.0.tar.gz", hash = "sha256:0474bca9a0dd1b51791fcc507a4e02078a1c63f6d4e4ae5544b9848c7adfb619", size = 648393, upload-time = "2025-09-19T00:27:37.758Z" }
-wheels = [
-    { url = "https://files.pythonhosted.org/packages/25/40/dbe31fc56b218a858c8fc6f5d8d3ba61c1fa7e989d43d4a4574b8b992840/black-25.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ce41ed2614b706fd55fd0b4a6909d06b5bab344ffbfadc6ef34ae50adba3d4f7", size = 1715605, upload-time = "2025-09-19T00:36:13.483Z" },
-    { url = "https://files.pythonhosted.org/packages/92/b2/f46800621200eab6479b1f4c0e3ede5b4c06b768e79ee228bc80270bcc74/black-25.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2ab0ce111ef026790e9b13bd216fa7bc48edd934ffc4cbf78808b235793cbc92", size = 1571829, upload-time = "2025-09-19T00:32:42.13Z" },
-    { url = "https://files.pythonhosted.org/packages/4e/64/5c7f66bd65af5c19b4ea86062bb585adc28d51d37babf70969e804dbd5c2/black-25.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f96b6726d690c96c60ba682955199f8c39abc1ae0c3a494a9c62c0184049a713", size = 1631888, upload-time = "2025-09-19T00:30:54.212Z" },
-    { url = "https://files.pythonhosted.org/packages/3b/64/0b9e5bfcf67db25a6eef6d9be6726499a8a72ebab3888c2de135190853d3/black-25.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:d119957b37cc641596063cd7db2656c5be3752ac17877017b2ffcdb9dfc4d2b1", size = 1327056, upload-time = "2025-09-19T00:31:08.877Z" },
-    { url = "https://files.pythonhosted.org/packages/b7/f4/7531d4a336d2d4ac6cc101662184c8e7d068b548d35d874415ed9f4116ef/black-25.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:456386fe87bad41b806d53c062e2974615825c7a52159cde7ccaeb0695fa28fa", size = 1698727, upload-time = "2025-09-19T00:31:14.264Z" },
-    { url = "https://files.pythonhosted.org/packages/28/f9/66f26bfbbf84b949cc77a41a43e138d83b109502cd9c52dfc94070ca51f2/black-25.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a16b14a44c1af60a210d8da28e108e13e75a284bf21a9afa6b4571f96ab8bb9d", size = 1555679, upload-time = "2025-09-19T00:31:29.265Z" },
-    { url = "https://files.pythonhosted.org/packages/bf/59/61475115906052f415f518a648a9ac679d7afbc8da1c16f8fdf68a8cebed/black-25.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:aaf319612536d502fdd0e88ce52d8f1352b2c0a955cc2798f79eeca9d3af0608", size = 1617453, upload-time = "2025-09-19T00:30:42.24Z" },
-    { url = "https://files.pythonhosted.org/packages/7f/5b/20fd5c884d14550c911e4fb1b0dae00d4abb60a4f3876b449c4d3a9141d5/black-25.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:c0372a93e16b3954208417bfe448e09b0de5cc721d521866cd9e0acac3c04a1f", size = 1333655, upload-time = "2025-09-19T00:30:56.715Z" },
-    { url = "https://files.pythonhosted.org/packages/fb/8e/319cfe6c82f7e2d5bfb4d3353c6cc85b523d677ff59edc61fdb9ee275234/black-25.9.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:1b9dc70c21ef8b43248f1d86aedd2aaf75ae110b958a7909ad8463c4aa0880b0", size = 1742012, upload-time = "2025-09-19T00:33:08.678Z" },
-    { url = "https://files.pythonhosted.org/packages/94/cc/f562fe5d0a40cd2a4e6ae3f685e4c36e365b1f7e494af99c26ff7f28117f/black-25.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8e46eecf65a095fa62e53245ae2795c90bdecabd53b50c448d0a8bcd0d2e74c4", size = 1581421, upload-time = "2025-09-19T00:35:25.937Z" },
-    { url = "https://files.pythonhosted.org/packages/84/67/6db6dff1ebc8965fd7661498aea0da5d7301074b85bba8606a28f47ede4d/black-25.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9101ee58ddc2442199a25cb648d46ba22cd580b00ca4b44234a324e3ec7a0f7e", size = 1655619, upload-time = "2025-09-19T00:30:49.241Z" },
-    { url = "https://files.pythonhosted.org/packages/10/10/3faef9aa2a730306cf469d76f7f155a8cc1f66e74781298df0ba31f8b4c8/black-25.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:77e7060a00c5ec4b3367c55f39cf9b06e68965a4f2e61cecacd6d0d9b7ec945a", size = 1342481, upload-time = "2025-09-19T00:31:29.625Z" },
-    { url = "https://files.pythonhosted.org/packages/48/99/3acfea65f5e79f45472c45f87ec13037b506522719cd9d4ac86484ff51ac/black-25.9.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0172a012f725b792c358d57fe7b6b6e8e67375dd157f64fa7a3097b3ed3e2175", size = 1742165, upload-time = "2025-09-19T00:34:10.402Z" },
-    { url = "https://files.pythonhosted.org/packages/3a/18/799285282c8236a79f25d590f0222dbd6850e14b060dfaa3e720241fd772/black-25.9.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3bec74ee60f8dfef564b573a96b8930f7b6a538e846123d5ad77ba14a8d7a64f", size = 1581259, upload-time = "2025-09-19T00:32:49.685Z" },
-    { url = "https://files.pythonhosted.org/packages/f1/ce/883ec4b6303acdeca93ee06b7622f1fa383c6b3765294824165d49b1a86b/black-25.9.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b756fc75871cb1bcac5499552d771822fd9db5a2bb8db2a7247936ca48f39831", size = 1655583, upload-time = "2025-09-19T00:30:44.505Z" },
-    { url = "https://files.pythonhosted.org/packages/21/17/5c253aa80a0639ccc427a5c7144534b661505ae2b5a10b77ebe13fa25334/black-25.9.0-cp313-cp313-win_amd64.whl", hash = "sha256:846d58e3ce7879ec1ffe816bb9df6d006cd9590515ed5d17db14e17666b2b357", size = 1343428, upload-time = "2025-09-19T00:32:13.839Z" },
-    { url = "https://files.pythonhosted.org/packages/1b/46/863c90dcd3f9d41b109b7f19032ae0db021f0b2a81482ba0a1e28c84de86/black-25.9.0-py3-none-any.whl", hash = "sha256:474b34c1342cdc157d307b56c4c65bce916480c4a8f6551fdc6bf9b486a7c4ae", size = 203363, upload-time = "2025-09-19T00:27:35.724Z" },
+sdist = { url = "https://files.pythonhosted.org/packages/c4/d9/07b458a3f1c525ac392b5edc6b191ff140b596f9d77092429417a54e249d/black-25.12.0.tar.gz", hash = "sha256:8d3dd9cea14bff7ddc0eb243c811cdb1a011ebb4800a5f0335a01a68654796a7", size = 659264, upload-time = "2025-12-08T01:40:52.501Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/37/d5/8d3145999d380e5d09bb00b0f7024bf0a8ccb5c07b5648e9295f02ec1d98/black-25.12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f85ba1ad15d446756b4ab5f3044731bf68b777f8f9ac9cdabd2425b97cd9c4e8", size = 1895720, upload-time = "2025-12-08T01:46:58.197Z" },
+    { url = "https://files.pythonhosted.org/packages/06/97/7acc85c4add41098f4f076b21e3e4e383ad6ed0a3da26b2c89627241fc11/black-25.12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:546eecfe9a3a6b46f9d69d8a642585a6eaf348bcbbc4d87a19635570e02d9f4a", size = 1727193, upload-time = "2025-12-08T01:52:26.674Z" },
+    { url = "https://files.pythonhosted.org/packages/24/f0/fdf0eb8ba907ddeb62255227d29d349e8256ef03558fbcadfbc26ecfe3b2/black-25.12.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:17dcc893da8d73d8f74a596f64b7c98ef5239c2cd2b053c0f25912c4494bf9ea", size = 1774506, upload-time = "2025-12-08T01:46:25.721Z" },
+    { url = "https://files.pythonhosted.org/packages/e4/f5/9203a78efe00d13336786b133c6180a9303d46908a9aa72d1104ca214222/black-25.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:09524b0e6af8ba7a3ffabdfc7a9922fb9adef60fed008c7cd2fc01f3048e6e6f", size = 1416085, upload-time = "2025-12-08T01:46:06.073Z" },
+    { url = "https://files.pythonhosted.org/packages/ba/cc/7a6090e6b081c3316282c05c546e76affdce7bf7a3b7d2c3a2a69438bd01/black-25.12.0-cp310-cp310-win_arm64.whl", hash = "sha256:b162653ed89eb942758efeb29d5e333ca5bb90e5130216f8369857db5955a7da", size = 1226038, upload-time = "2025-12-08T01:45:29.388Z" },
+    { url = "https://files.pythonhosted.org/packages/60/ad/7ac0d0e1e0612788dbc48e62aef8a8e8feffac7eb3d787db4e43b8462fa8/black-25.12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d0cfa263e85caea2cff57d8f917f9f51adae8e20b610e2b23de35b5b11ce691a", size = 1877003, upload-time = "2025-12-08T01:43:29.967Z" },
+    { url = "https://files.pythonhosted.org/packages/e8/dd/a237e9f565f3617a88b49284b59cbca2a4f56ebe68676c1aad0ce36a54a7/black-25.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1a2f578ae20c19c50a382286ba78bfbeafdf788579b053d8e4980afb079ab9be", size = 1712639, upload-time = "2025-12-08T01:52:46.756Z" },
+    { url = "https://files.pythonhosted.org/packages/12/80/e187079df1ea4c12a0c63282ddd8b81d5107db6d642f7d7b75a6bcd6fc21/black-25.12.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d3e1b65634b0e471d07ff86ec338819e2ef860689859ef4501ab7ac290431f9b", size = 1758143, upload-time = "2025-12-08T01:45:29.137Z" },
+    { url = "https://files.pythonhosted.org/packages/93/b5/3096ccee4f29dc2c3aac57274326c4d2d929a77e629f695f544e159bfae4/black-25.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:a3fa71e3b8dd9f7c6ac4d818345237dfb4175ed3bf37cd5a581dbc4c034f1ec5", size = 1420698, upload-time = "2025-12-08T01:45:53.379Z" },
+    { url = "https://files.pythonhosted.org/packages/7e/39/f81c0ffbc25ffbe61c7d0385bf277e62ffc3e52f5ee668d7369d9854fadf/black-25.12.0-cp311-cp311-win_arm64.whl", hash = "sha256:51e267458f7e650afed8445dc7edb3187143003d52a1b710c7321aef22aa9655", size = 1229317, upload-time = "2025-12-08T01:46:35.606Z" },
+    { url = "https://files.pythonhosted.org/packages/d1/bd/26083f805115db17fda9877b3c7321d08c647df39d0df4c4ca8f8450593e/black-25.12.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:31f96b7c98c1ddaeb07dc0f56c652e25bdedaac76d5b68a059d998b57c55594a", size = 1924178, upload-time = "2025-12-08T01:49:51.048Z" },
+    { url = "https://files.pythonhosted.org/packages/89/6b/ea00d6651561e2bdd9231c4177f4f2ae19cc13a0b0574f47602a7519b6ca/black-25.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:05dd459a19e218078a1f98178c13f861fe6a9a5f88fc969ca4d9b49eb1809783", size = 1742643, upload-time = "2025-12-08T01:49:59.09Z" },
+    { url = "https://files.pythonhosted.org/packages/6d/f3/360fa4182e36e9875fabcf3a9717db9d27a8d11870f21cff97725c54f35b/black-25.12.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c1f68c5eff61f226934be6b5b80296cf6939e5d2f0c2f7d543ea08b204bfaf59", size = 1800158, upload-time = "2025-12-08T01:44:27.301Z" },
+    { url = "https://files.pythonhosted.org/packages/f8/08/2c64830cb6616278067e040acca21d4f79727b23077633953081c9445d61/black-25.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:274f940c147ddab4442d316b27f9e332ca586d39c85ecf59ebdea82cc9ee8892", size = 1426197, upload-time = "2025-12-08T01:45:51.198Z" },
+    { url = "https://files.pythonhosted.org/packages/d4/60/a93f55fd9b9816b7432cf6842f0e3000fdd5b7869492a04b9011a133ee37/black-25.12.0-cp312-cp312-win_arm64.whl", hash = "sha256:169506ba91ef21e2e0591563deda7f00030cb466e747c4b09cb0a9dae5db2f43", size = 1237266, upload-time = "2025-12-08T01:45:10.556Z" },
+    { url = "https://files.pythonhosted.org/packages/c8/52/c551e36bc95495d2aa1a37d50566267aa47608c81a53f91daa809e03293f/black-25.12.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a05ddeb656534c3e27a05a29196c962877c83fa5503db89e68857d1161ad08a5", size = 1923809, upload-time = "2025-12-08T01:46:55.126Z" },
+    { url = "https://files.pythonhosted.org/packages/a0/f7/aac9b014140ee56d247e707af8db0aae2e9efc28d4a8aba92d0abd7ae9d1/black-25.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9ec77439ef3e34896995503865a85732c94396edcc739f302c5673a2315e1e7f", size = 1742384, upload-time = "2025-12-08T01:49:37.022Z" },
+    { url = "https://files.pythonhosted.org/packages/74/98/38aaa018b2ab06a863974c12b14a6266badc192b20603a81b738c47e902e/black-25.12.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e509c858adf63aa61d908061b52e580c40eae0dfa72415fa47ac01b12e29baf", size = 1798761, upload-time = "2025-12-08T01:46:05.386Z" },
+    { url = "https://files.pythonhosted.org/packages/16/3a/a8ac542125f61574a3f015b521ca83b47321ed19bb63fe6d7560f348bfe1/black-25.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:252678f07f5bac4ff0d0e9b261fbb029fa530cfa206d0a636a34ab445ef8ca9d", size = 1429180, upload-time = "2025-12-08T01:45:34.903Z" },
+    { url = "https://files.pythonhosted.org/packages/e6/2d/bdc466a3db9145e946762d52cd55b1385509d9f9004fec1c97bdc8debbfb/black-25.12.0-cp313-cp313-win_arm64.whl", hash = "sha256:bc5b1c09fe3c931ddd20ee548511c64ebf964ada7e6f0763d443947fd1c603ce", size = 1239350, upload-time = "2025-12-08T01:46:09.458Z" },
+    { url = "https://files.pythonhosted.org/packages/35/46/1d8f2542210c502e2ae1060b2e09e47af6a5e5963cb78e22ec1a11170b28/black-25.12.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:0a0953b134f9335c2434864a643c842c44fba562155c738a2a37a4d61f00cad5", size = 1917015, upload-time = "2025-12-08T01:53:27.987Z" },
+    { url = "https://files.pythonhosted.org/packages/41/37/68accadf977672beb8e2c64e080f568c74159c1aaa6414b4cd2aef2d7906/black-25.12.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:2355bbb6c3b76062870942d8cc450d4f8ac71f9c93c40122762c8784df49543f", size = 1741830, upload-time = "2025-12-08T01:54:36.861Z" },
+    { url = "https://files.pythonhosted.org/packages/ac/76/03608a9d8f0faad47a3af3a3c8c53af3367f6c0dd2d23a84710456c7ac56/black-25.12.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9678bd991cc793e81d19aeeae57966ee02909877cb65838ccffef24c3ebac08f", size = 1791450, upload-time = "2025-12-08T01:44:52.581Z" },
+    { url = "https://files.pythonhosted.org/packages/06/99/b2a4bd7dfaea7964974f947e1c76d6886d65fe5d24f687df2d85406b2609/black-25.12.0-cp314-cp314-win_amd64.whl", hash = "sha256:97596189949a8aad13ad12fcbb4ae89330039b96ad6742e6f6b45e75ad5cfd83", size = 1452042, upload-time = "2025-12-08T01:46:13.188Z" },
+    { url = "https://files.pythonhosted.org/packages/b2/7c/d9825de75ae5dd7795d007681b752275ea85a1c5d83269b4b9c754c2aaab/black-25.12.0-cp314-cp314-win_arm64.whl", hash = "sha256:778285d9ea197f34704e3791ea9404cd6d07595745907dd2ce3da7a13627b29b", size = 1267446, upload-time = "2025-12-08T01:46:14.497Z" },
+    { url = "https://files.pythonhosted.org/packages/68/11/21331aed19145a952ad28fca2756a1433ee9308079bd03bd898e903a2e53/black-25.12.0-py3-none-any.whl", hash = "sha256:48ceb36c16dbc84062740049eef990bb2ce07598272e673c17d1a7720c71c828", size = 206191, upload-time = "2025-12-08T01:40:50.963Z" },
 ]
 
 [[package]]
 name = "certifi"
-version = "2025.10.5"
+version = "2025.11.12"
 source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/4c/5b/b6ce21586237c77ce67d01dc5507039d444b630dd76611bbca2d8e5dcd91/certifi-2025.10.5.tar.gz", hash = "sha256:47c09d31ccf2acf0be3f701ea53595ee7e0b8fa08801c6624be771df09ae7b43", size = 164519, upload-time = "2025-10-05T04:12:15.808Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/a2/8c/58f469717fa48465e4a50c014a0400602d3c437d7c0c468e17ada824da3a/certifi-2025.11.12.tar.gz", hash = "sha256:d8ab5478f2ecd78af242878415affce761ca6bc54a22a27e026d7c25357c3316", size = 160538, upload-time = "2025-11-12T02:54:51.517Z" }
 wheels = [
-    { url = "https://files.pythonhosted.org/packages/e4/37/af0d2ef3967ac0d6113837b44a4f0bfe1328c2b9763bd5b1744520e5cfed/certifi-2025.10.5-py3-none-any.whl", hash = "sha256:0f212c2744a9bb6de0c56639a6f68afe01ecd92d91f14ae897c4fe7bbeeef0de", size = 163286, upload-time = "2025-10-05T04:12:14.03Z" },
+    { url = "https://files.pythonhosted.org/packages/70/7d/9bc192684cea499815ff478dfcdc13835ddf401365057044fb721ec6bddb/certifi-2025.11.12-py3-none-any.whl", hash = "sha256:97de8790030bbd5c2d96b7ec782fc2f7820ef8dba6db909ccf95449f2d062d4b", size = 159438, upload-time = "2025-11-12T02:54:49.735Z" },
 ]
 
 [[package]]
@@ -303,14 +321,14 @@ wheels = [
 
 [[package]]
 name = "click"
-version = "8.3.0"
+version = "8.3.1"
 source = { registry = "https://pypi.org/simple" }
 dependencies = [
     { name = "colorama", marker = "sys_platform == 'win32'" },
 ]
-sdist = { url = "https://files.pythonhosted.org/packages/46/61/de6cd827efad202d7057d93e0fed9294b96952e188f7384832791c7b2254/click-8.3.0.tar.gz", hash = "sha256:e7b8232224eba16f4ebe410c25ced9f7875cb5f3263ffc93cc3e8da705e229c4", size = 276943, upload-time = "2025-09-18T17:32:23.696Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/3d/fa/656b739db8587d7b5dfa22e22ed02566950fbfbcdc20311993483657a5c0/click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a", size = 295065, upload-time = "2025-11-15T20:45:42.706Z" }
 wheels = [
-    { url = "https://files.pythonhosted.org/packages/db/d3/9dcc0f5797f070ec8edf30fbadfb200e71d9db6b84d211e3b2085a7589a0/click-8.3.0-py3-none-any.whl", hash = "sha256:9b9f285302c6e3064f4330c05f05b81945b2a39544279343e6e7c5f27a9baddc", size = 107295, upload-time = "2025-09-18T17:32:22.42Z" },
+    { url = "https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6", size = 108274, upload-time = "2025-11-15T20:45:41.139Z" },
 ]
 
 [[package]]
@@ -324,101 +342,101 @@ wheels = [
 
 [[package]]
 name = "coverage"
-version = "7.11.0"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/1c/38/ee22495420457259d2f3390309505ea98f98a5eed40901cf62196abad006/coverage-7.11.0.tar.gz", hash = "sha256:167bd504ac1ca2af7ff3b81d245dfea0292c5032ebef9d66cc08a7d28c1b8050", size = 811905, upload-time = "2025-10-15T15:15:08.542Z" }
-wheels = [
-    { url = "https://files.pythonhosted.org/packages/12/95/c49df0aceb5507a80b9fe5172d3d39bf23f05be40c23c8d77d556df96cec/coverage-7.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eb53f1e8adeeb2e78962bade0c08bfdc461853c7969706ed901821e009b35e31", size = 215800, upload-time = "2025-10-15T15:12:19.824Z" },
-    { url = "https://files.pythonhosted.org/packages/dc/c6/7bb46ce01ed634fff1d7bb53a54049f539971862cc388b304ff3c51b4f66/coverage-7.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d9a03ec6cb9f40a5c360f138b88266fd8f58408d71e89f536b4f91d85721d075", size = 216198, upload-time = "2025-10-15T15:12:22.549Z" },
-    { url = "https://files.pythonhosted.org/packages/94/b2/75d9d8fbf2900268aca5de29cd0a0fe671b0f69ef88be16767cc3c828b85/coverage-7.11.0-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0d7f0616c557cbc3d1c2090334eddcbb70e1ae3a40b07222d62b3aa47f608fab", size = 242953, upload-time = "2025-10-15T15:12:24.139Z" },
-    { url = "https://files.pythonhosted.org/packages/65/ac/acaa984c18f440170525a8743eb4b6c960ace2dbad80dc22056a437fc3c6/coverage-7.11.0-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:e44a86a47bbdf83b0a3ea4d7df5410d6b1a0de984fbd805fa5101f3624b9abe0", size = 244766, upload-time = "2025-10-15T15:12:25.974Z" },
-    { url = "https://files.pythonhosted.org/packages/d8/0d/938d0bff76dfa4a6b228c3fc4b3e1c0e2ad4aa6200c141fcda2bd1170227/coverage-7.11.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:596763d2f9a0ee7eec6e643e29660def2eef297e1de0d334c78c08706f1cb785", size = 246625, upload-time = "2025-10-15T15:12:27.387Z" },
-    { url = "https://files.pythonhosted.org/packages/38/54/8f5f5e84bfa268df98f46b2cb396b1009734cfb1e5d6adb663d284893b32/coverage-7.11.0-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ef55537ff511b5e0a43edb4c50a7bf7ba1c3eea20b4f49b1490f1e8e0e42c591", size = 243568, upload-time = "2025-10-15T15:12:28.799Z" },
-    { url = "https://files.pythonhosted.org/packages/68/30/8ba337c2877fe3f2e1af0ed7ff4be0c0c4aca44d6f4007040f3ca2255e99/coverage-7.11.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9cbabd8f4d0d3dc571d77ae5bdbfa6afe5061e679a9d74b6797c48d143307088", size = 244665, upload-time = "2025-10-15T15:12:30.297Z" },
-    { url = "https://files.pythonhosted.org/packages/cc/fb/c6f1d6d9a665536b7dde2333346f0cc41dc6a60bd1ffc10cd5c33e7eb000/coverage-7.11.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e24045453384e0ae2a587d562df2a04d852672eb63051d16096d3f08aa4c7c2f", size = 242681, upload-time = "2025-10-15T15:12:32.326Z" },
-    { url = "https://files.pythonhosted.org/packages/be/38/1b532319af5f991fa153c20373291dc65c2bf532af7dbcffdeef745c8f79/coverage-7.11.0-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:7161edd3426c8d19bdccde7d49e6f27f748f3c31cc350c5de7c633fea445d866", size = 242912, upload-time = "2025-10-15T15:12:34.079Z" },
-    { url = "https://files.pythonhosted.org/packages/67/3d/f39331c60ef6050d2a861dc1b514fa78f85f792820b68e8c04196ad733d6/coverage-7.11.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3d4ed4de17e692ba6415b0587bc7f12bc80915031fc9db46a23ce70fc88c9841", size = 243559, upload-time = "2025-10-15T15:12:35.809Z" },
-    { url = "https://files.pythonhosted.org/packages/4b/55/cb7c9df9d0495036ce582a8a2958d50c23cd73f84a23284bc23bd4711a6f/coverage-7.11.0-cp310-cp310-win32.whl", hash = "sha256:765c0bc8fe46f48e341ef737c91c715bd2a53a12792592296a095f0c237e09cf", size = 218266, upload-time = "2025-10-15T15:12:37.429Z" },
-    { url = "https://files.pythonhosted.org/packages/68/a8/b79cb275fa7bd0208767f89d57a1b5f6ba830813875738599741b97c2e04/coverage-7.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:24d6f3128f1b2d20d84b24f4074475457faedc3d4613a7e66b5e769939c7d969", size = 219169, upload-time = "2025-10-15T15:12:39.25Z" },
-    { url = "https://files.pythonhosted.org/packages/49/3a/ee1074c15c408ddddddb1db7dd904f6b81bc524e01f5a1c5920e13dbde23/coverage-7.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d58ecaa865c5b9fa56e35efc51d1014d4c0d22838815b9fce57a27dd9576847", size = 215912, upload-time = "2025-10-15T15:12:40.665Z" },
-    { url = "https://files.pythonhosted.org/packages/70/c4/9f44bebe5cb15f31608597b037d78799cc5f450044465bcd1ae8cb222fe1/coverage-7.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b679e171f1c104a5668550ada700e3c4937110dbdd153b7ef9055c4f1a1ee3cc", size = 216310, upload-time = "2025-10-15T15:12:42.461Z" },
-    { url = "https://files.pythonhosted.org/packages/42/01/5e06077cfef92d8af926bdd86b84fb28bf9bc6ad27343d68be9b501d89f2/coverage-7.11.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:ca61691ba8c5b6797deb221a0d09d7470364733ea9c69425a640f1f01b7c5bf0", size = 246706, upload-time = "2025-10-15T15:12:44.001Z" },
-    { url = "https://files.pythonhosted.org/packages/40/b8/7a3f1f33b35cc4a6c37e759137533119560d06c0cc14753d1a803be0cd4a/coverage-7.11.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:aef1747ede4bd8ca9cfc04cc3011516500c6891f1b33a94add3253f6f876b7b7", size = 248634, upload-time = "2025-10-15T15:12:45.768Z" },
-    { url = "https://files.pythonhosted.org/packages/7a/41/7f987eb33de386bc4c665ab0bf98d15fcf203369d6aacae74f5dd8ec489a/coverage-7.11.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a1839d08406e4cba2953dcc0ffb312252f14d7c4c96919f70167611f4dee2623", size = 250741, upload-time = "2025-10-15T15:12:47.222Z" },
-    { url = "https://files.pythonhosted.org/packages/23/c1/a4e0ca6a4e83069fb8216b49b30a7352061ca0cb38654bd2dc96b7b3b7da/coverage-7.11.0-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e0eb0a2dcc62478eb5b4cbb80b97bdee852d7e280b90e81f11b407d0b81c4287", size = 246837, upload-time = "2025-10-15T15:12:48.904Z" },
-    { url = "https://files.pythonhosted.org/packages/5d/03/ced062a17f7c38b4728ff76c3acb40d8465634b20b4833cdb3cc3a74e115/coverage-7.11.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:bc1fbea96343b53f65d5351d8fd3b34fd415a2670d7c300b06d3e14a5af4f552", size = 248429, upload-time = "2025-10-15T15:12:50.73Z" },
-    { url = "https://files.pythonhosted.org/packages/97/af/a7c6f194bb8c5a2705ae019036b8fe7f49ea818d638eedb15fdb7bed227c/coverage-7.11.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:214b622259dd0cf435f10241f1333d32caa64dbc27f8790ab693428a141723de", size = 246490, upload-time = "2025-10-15T15:12:52.646Z" },
-    { url = "https://files.pythonhosted.org/packages/ab/c3/aab4df02b04a8fde79068c3c41ad7a622b0ef2b12e1ed154da986a727c3f/coverage-7.11.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:258d9967520cca899695d4eb7ea38be03f06951d6ca2f21fb48b1235f791e601", size = 246208, upload-time = "2025-10-15T15:12:54.586Z" },
-    { url = "https://files.pythonhosted.org/packages/30/d8/e282ec19cd658238d60ed404f99ef2e45eed52e81b866ab1518c0d4163cf/coverage-7.11.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cf9e6ff4ca908ca15c157c409d608da77a56a09877b97c889b98fb2c32b6465e", size = 247126, upload-time = "2025-10-15T15:12:56.485Z" },
-    { url = "https://files.pythonhosted.org/packages/d1/17/a635fa07fac23adb1a5451ec756216768c2767efaed2e4331710342a3399/coverage-7.11.0-cp311-cp311-win32.whl", hash = "sha256:fcc15fc462707b0680cff6242c48625da7f9a16a28a41bb8fd7a4280920e676c", size = 218314, upload-time = "2025-10-15T15:12:58.365Z" },
-    { url = "https://files.pythonhosted.org/packages/2a/29/2ac1dfcdd4ab9a70026edc8d715ece9b4be9a1653075c658ee6f271f394d/coverage-7.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:865965bf955d92790f1facd64fe7ff73551bd2c1e7e6b26443934e9701ba30b9", size = 219203, upload-time = "2025-10-15T15:12:59.902Z" },
-    { url = "https://files.pythonhosted.org/packages/03/21/5ce8b3a0133179115af4c041abf2ee652395837cb896614beb8ce8ddcfd9/coverage-7.11.0-cp311-cp311-win_arm64.whl", hash = "sha256:5693e57a065760dcbeb292d60cc4d0231a6d4b6b6f6a3191561e1d5e8820b745", size = 217879, upload-time = "2025-10-15T15:13:01.35Z" },
-    { url = "https://files.pythonhosted.org/packages/c4/db/86f6906a7c7edc1a52b2c6682d6dd9be775d73c0dfe2b84f8923dfea5784/coverage-7.11.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9c49e77811cf9d024b95faf86c3f059b11c0c9be0b0d61bc598f453703bd6fd1", size = 216098, upload-time = "2025-10-15T15:13:02.916Z" },
-    { url = "https://files.pythonhosted.org/packages/21/54/e7b26157048c7ba555596aad8569ff903d6cd67867d41b75287323678ede/coverage-7.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a61e37a403a778e2cda2a6a39abcc895f1d984071942a41074b5c7ee31642007", size = 216331, upload-time = "2025-10-15T15:13:04.403Z" },
-    { url = "https://files.pythonhosted.org/packages/b9/19/1ce6bf444f858b83a733171306134a0544eaddf1ca8851ede6540a55b2ad/coverage-7.11.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:c79cae102bb3b1801e2ef1511fb50e91ec83a1ce466b2c7c25010d884336de46", size = 247825, upload-time = "2025-10-15T15:13:05.92Z" },
-    { url = "https://files.pythonhosted.org/packages/71/0b/d3bcbbc259fcced5fb67c5d78f6e7ee965f49760c14afd931e9e663a83b2/coverage-7.11.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:16ce17ceb5d211f320b62df002fa7016b7442ea0fd260c11cec8ce7730954893", size = 250573, upload-time = "2025-10-15T15:13:07.471Z" },
-    { url = "https://files.pythonhosted.org/packages/58/8d/b0ff3641a320abb047258d36ed1c21d16be33beed4152628331a1baf3365/coverage-7.11.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:80027673e9d0bd6aef86134b0771845e2da85755cf686e7c7c59566cf5a89115", size = 251706, upload-time = "2025-10-15T15:13:09.4Z" },
-    { url = "https://files.pythonhosted.org/packages/59/c8/5a586fe8c7b0458053d9c687f5cff515a74b66c85931f7fe17a1c958b4ac/coverage-7.11.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:4d3ffa07a08657306cd2215b0da53761c4d73cb54d9143b9303a6481ec0cd415", size = 248221, upload-time = "2025-10-15T15:13:10.964Z" },
-    { url = "https://files.pythonhosted.org/packages/d0/ff/3a25e3132804ba44cfa9a778cdf2b73dbbe63ef4b0945e39602fc896ba52/coverage-7.11.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a3b6a5f8b2524fd6c1066bc85bfd97e78709bb5e37b5b94911a6506b65f47186", size = 249624, upload-time = "2025-10-15T15:13:12.5Z" },
-    { url = "https://files.pythonhosted.org/packages/c5/12/ff10c8ce3895e1b17a73485ea79ebc1896a9e466a9d0f4aef63e0d17b718/coverage-7.11.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:fcc0a4aa589de34bc56e1a80a740ee0f8c47611bdfb28cd1849de60660f3799d", size = 247744, upload-time = "2025-10-15T15:13:14.554Z" },
-    { url = "https://files.pythonhosted.org/packages/16/02/d500b91f5471b2975947e0629b8980e5e90786fe316b6d7299852c1d793d/coverage-7.11.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:dba82204769d78c3fd31b35c3d5f46e06511936c5019c39f98320e05b08f794d", size = 247325, upload-time = "2025-10-15T15:13:16.438Z" },
-    { url = "https://files.pythonhosted.org/packages/77/11/dee0284fbbd9cd64cfce806b827452c6df3f100d9e66188e82dfe771d4af/coverage-7.11.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:81b335f03ba67309a95210caf3eb43bd6fe75a4e22ba653ef97b4696c56c7ec2", size = 249180, upload-time = "2025-10-15T15:13:17.959Z" },
-    { url = "https://files.pythonhosted.org/packages/59/1b/cdf1def928f0a150a057cab03286774e73e29c2395f0d30ce3d9e9f8e697/coverage-7.11.0-cp312-cp312-win32.whl", hash = "sha256:037b2d064c2f8cc8716fe4d39cb705779af3fbf1ba318dc96a1af858888c7bb5", size = 218479, upload-time = "2025-10-15T15:13:19.608Z" },
-    { url = "https://files.pythonhosted.org/packages/ff/55/e5884d55e031da9c15b94b90a23beccc9d6beee65e9835cd6da0a79e4f3a/coverage-7.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:d66c0104aec3b75e5fd897e7940188ea1892ca1d0235316bf89286d6a22568c0", size = 219290, upload-time = "2025-10-15T15:13:21.593Z" },
-    { url = "https://files.pythonhosted.org/packages/23/a8/faa930cfc71c1d16bc78f9a19bb73700464f9c331d9e547bfbc1dbd3a108/coverage-7.11.0-cp312-cp312-win_arm64.whl", hash = "sha256:d91ebeac603812a09cf6a886ba6e464f3bbb367411904ae3790dfe28311b15ad", size = 217924, upload-time = "2025-10-15T15:13:23.39Z" },
-    { url = "https://files.pythonhosted.org/packages/60/7f/85e4dfe65e400645464b25c036a26ac226cf3a69d4a50c3934c532491cdd/coverage-7.11.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:cc3f49e65ea6e0d5d9bd60368684fe52a704d46f9e7fc413918f18d046ec40e1", size = 216129, upload-time = "2025-10-15T15:13:25.371Z" },
-    { url = "https://files.pythonhosted.org/packages/96/5d/dc5fa98fea3c175caf9d360649cb1aa3715e391ab00dc78c4c66fabd7356/coverage-7.11.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f39ae2f63f37472c17b4990f794035c9890418b1b8cca75c01193f3c8d3e01be", size = 216380, upload-time = "2025-10-15T15:13:26.976Z" },
-    { url = "https://files.pythonhosted.org/packages/b2/f5/3da9cc9596708273385189289c0e4d8197d37a386bdf17619013554b3447/coverage-7.11.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7db53b5cdd2917b6eaadd0b1251cf4e7d96f4a8d24e174bdbdf2f65b5ea7994d", size = 247375, upload-time = "2025-10-15T15:13:28.923Z" },
-    { url = "https://files.pythonhosted.org/packages/65/6c/f7f59c342359a235559d2bc76b0c73cfc4bac7d61bb0df210965cb1ecffd/coverage-7.11.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:10ad04ac3a122048688387828b4537bc9cf60c0bf4869c1e9989c46e45690b82", size = 249978, upload-time = "2025-10-15T15:13:30.525Z" },
-    { url = "https://files.pythonhosted.org/packages/e7/8c/042dede2e23525e863bf1ccd2b92689692a148d8b5fd37c37899ba882645/coverage-7.11.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4036cc9c7983a2b1f2556d574d2eb2154ac6ed55114761685657e38782b23f52", size = 251253, upload-time = "2025-10-15T15:13:32.174Z" },
-    { url = "https://files.pythonhosted.org/packages/7b/a9/3c58df67bfa809a7bddd786356d9c5283e45d693edb5f3f55d0986dd905a/coverage-7.11.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7ab934dd13b1c5e94b692b1e01bd87e4488cb746e3a50f798cb9464fd128374b", size = 247591, upload-time = "2025-10-15T15:13:34.147Z" },
-    { url = "https://files.pythonhosted.org/packages/26/5b/c7f32efd862ee0477a18c41e4761305de6ddd2d49cdeda0c1116227570fd/coverage-7.11.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:59a6e5a265f7cfc05f76e3bb53eca2e0dfe90f05e07e849930fecd6abb8f40b4", size = 249411, upload-time = "2025-10-15T15:13:38.425Z" },
-    { url = "https://files.pythonhosted.org/packages/76/b5/78cb4f1e86c1611431c990423ec0768122905b03837e1b4c6a6f388a858b/coverage-7.11.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:df01d6c4c81e15a7c88337b795bb7595a8596e92310266b5072c7e301168efbd", size = 247303, upload-time = "2025-10-15T15:13:40.464Z" },
-    { url = "https://files.pythonhosted.org/packages/87/c9/23c753a8641a330f45f221286e707c427e46d0ffd1719b080cedc984ec40/coverage-7.11.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:8c934bd088eed6174210942761e38ee81d28c46de0132ebb1801dbe36a390dcc", size = 247157, upload-time = "2025-10-15T15:13:42.087Z" },
-    { url = "https://files.pythonhosted.org/packages/c5/42/6e0cc71dc8a464486e944a4fa0d85bdec031cc2969e98ed41532a98336b9/coverage-7.11.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5a03eaf7ec24078ad64a07f02e30060aaf22b91dedf31a6b24d0d98d2bba7f48", size = 248921, upload-time = "2025-10-15T15:13:43.715Z" },
-    { url = "https://files.pythonhosted.org/packages/e8/1c/743c2ef665e6858cccb0f84377dfe3a4c25add51e8c7ef19249be92465b6/coverage-7.11.0-cp313-cp313-win32.whl", hash = "sha256:695340f698a5f56f795b2836abe6fb576e7c53d48cd155ad2f80fd24bc63a040", size = 218526, upload-time = "2025-10-15T15:13:45.336Z" },
-    { url = "https://files.pythonhosted.org/packages/ff/d5/226daadfd1bf8ddbccefbd3aa3547d7b960fb48e1bdac124e2dd13a2b71a/coverage-7.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:2727d47fce3ee2bac648528e41455d1b0c46395a087a229deac75e9f88ba5a05", size = 219317, upload-time = "2025-10-15T15:13:47.401Z" },
-    { url = "https://files.pythonhosted.org/packages/97/54/47db81dcbe571a48a298f206183ba8a7ba79200a37cd0d9f4788fcd2af4a/coverage-7.11.0-cp313-cp313-win_arm64.whl", hash = "sha256:0efa742f431529699712b92ecdf22de8ff198df41e43aeaaadf69973eb93f17a", size = 217948, upload-time = "2025-10-15T15:13:49.096Z" },
-    { url = "https://files.pythonhosted.org/packages/e5/8b/cb68425420154e7e2a82fd779a8cc01549b6fa83c2ad3679cd6c088ebd07/coverage-7.11.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:587c38849b853b157706407e9ebdca8fd12f45869edb56defbef2daa5fb0812b", size = 216837, upload-time = "2025-10-15T15:13:51.09Z" },
-    { url = "https://files.pythonhosted.org/packages/33/55/9d61b5765a025685e14659c8d07037247de6383c0385757544ffe4606475/coverage-7.11.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b971bdefdd75096163dd4261c74be813c4508477e39ff7b92191dea19f24cd37", size = 217061, upload-time = "2025-10-15T15:13:52.747Z" },
-    { url = "https://files.pythonhosted.org/packages/52/85/292459c9186d70dcec6538f06ea251bc968046922497377bf4a1dc9a71de/coverage-7.11.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:269bfe913b7d5be12ab13a95f3a76da23cf147be7fa043933320ba5625f0a8de", size = 258398, upload-time = "2025-10-15T15:13:54.45Z" },
-    { url = "https://files.pythonhosted.org/packages/1f/e2/46edd73fb8bf51446c41148d81944c54ed224854812b6ca549be25113ee0/coverage-7.11.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:dadbcce51a10c07b7c72b0ce4a25e4b6dcb0c0372846afb8e5b6307a121eb99f", size = 260574, upload-time = "2025-10-15T15:13:56.145Z" },
-    { url = "https://files.pythonhosted.org/packages/07/5e/1df469a19007ff82e2ca8fe509822820a31e251f80ee7344c34f6cd2ec43/coverage-7.11.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9ed43fa22c6436f7957df036331f8fe4efa7af132054e1844918866cd228af6c", size = 262797, upload-time = "2025-10-15T15:13:58.635Z" },
-    { url = "https://files.pythonhosted.org/packages/f9/50/de216b31a1434b94d9b34a964c09943c6be45069ec704bfc379d8d89a649/coverage-7.11.0-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:9516add7256b6713ec08359b7b05aeff8850c98d357784c7205b2e60aa2513fa", size = 257361, upload-time = "2025-10-15T15:14:00.409Z" },
-    { url = "https://files.pythonhosted.org/packages/82/1e/3f9f8344a48111e152e0fd495b6fff13cc743e771a6050abf1627a7ba918/coverage-7.11.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:eb92e47c92fcbcdc692f428da67db33337fa213756f7adb6a011f7b5a7a20740", size = 260349, upload-time = "2025-10-15T15:14:02.188Z" },
-    { url = "https://files.pythonhosted.org/packages/65/9b/3f52741f9e7d82124272f3070bbe316006a7de1bad1093f88d59bfc6c548/coverage-7.11.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:d06f4fc7acf3cabd6d74941d53329e06bab00a8fe10e4df2714f0b134bfc64ef", size = 258114, upload-time = "2025-10-15T15:14:03.907Z" },
-    { url = "https://files.pythonhosted.org/packages/0b/8b/918f0e15f0365d50d3986bbd3338ca01178717ac5678301f3f547b6619e6/coverage-7.11.0-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:6fbcee1a8f056af07ecd344482f711f563a9eb1c2cad192e87df00338ec3cdb0", size = 256723, upload-time = "2025-10-15T15:14:06.324Z" },
-    { url = "https://files.pythonhosted.org/packages/44/9e/7776829f82d3cf630878a7965a7d70cc6ca94f22c7d20ec4944f7148cb46/coverage-7.11.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dbbf012be5f32533a490709ad597ad8a8ff80c582a95adc8d62af664e532f9ca", size = 259238, upload-time = "2025-10-15T15:14:08.002Z" },
-    { url = "https://files.pythonhosted.org/packages/9a/b8/49cf253e1e7a3bedb85199b201862dd7ca4859f75b6cf25ffa7298aa0760/coverage-7.11.0-cp313-cp313t-win32.whl", hash = "sha256:cee6291bb4fed184f1c2b663606a115c743df98a537c969c3c64b49989da96c2", size = 219180, upload-time = "2025-10-15T15:14:09.786Z" },
-    { url = "https://files.pythonhosted.org/packages/ac/e1/1a541703826be7ae2125a0fb7f821af5729d56bb71e946e7b933cc7a89a4/coverage-7.11.0-cp313-cp313t-win_amd64.whl", hash = "sha256:a386c1061bf98e7ea4758e4313c0ab5ecf57af341ef0f43a0bf26c2477b5c268", size = 220241, upload-time = "2025-10-15T15:14:11.471Z" },
-    { url = "https://files.pythonhosted.org/packages/d5/d1/5ee0e0a08621140fd418ec4020f595b4d52d7eb429ae6a0c6542b4ba6f14/coverage-7.11.0-cp313-cp313t-win_arm64.whl", hash = "sha256:f9ea02ef40bb83823b2b04964459d281688fe173e20643870bb5d2edf68bc836", size = 218510, upload-time = "2025-10-15T15:14:13.46Z" },
-    { url = "https://files.pythonhosted.org/packages/f4/06/e923830c1985ce808e40a3fa3eb46c13350b3224b7da59757d37b6ce12b8/coverage-7.11.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:c770885b28fb399aaf2a65bbd1c12bf6f307ffd112d6a76c5231a94276f0c497", size = 216110, upload-time = "2025-10-15T15:14:15.157Z" },
-    { url = "https://files.pythonhosted.org/packages/42/82/cdeed03bfead45203fb651ed756dfb5266028f5f939e7f06efac4041dad5/coverage-7.11.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:a3d0e2087dba64c86a6b254f43e12d264b636a39e88c5cc0a01a7c71bcfdab7e", size = 216395, upload-time = "2025-10-15T15:14:16.863Z" },
-    { url = "https://files.pythonhosted.org/packages/fc/ba/e1c80caffc3199aa699813f73ff097bc2df7b31642bdbc7493600a8f1de5/coverage-7.11.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:73feb83bb41c32811973b8565f3705caf01d928d972b72042b44e97c71fd70d1", size = 247433, upload-time = "2025-10-15T15:14:18.589Z" },
-    { url = "https://files.pythonhosted.org/packages/80/c0/5b259b029694ce0a5bbc1548834c7ba3db41d3efd3474489d7efce4ceb18/coverage-7.11.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c6f31f281012235ad08f9a560976cc2fc9c95c17604ff3ab20120fe480169bca", size = 249970, upload-time = "2025-10-15T15:14:20.307Z" },
-    { url = "https://files.pythonhosted.org/packages/8c/86/171b2b5e1aac7e2fd9b43f7158b987dbeb95f06d1fbecad54ad8163ae3e8/coverage-7.11.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e9570ad567f880ef675673992222746a124b9595506826b210fbe0ce3f0499cd", size = 251324, upload-time = "2025-10-15T15:14:22.419Z" },
-    { url = "https://files.pythonhosted.org/packages/1a/7e/7e10414d343385b92024af3932a27a1caf75c6e27ee88ba211221ff1a145/coverage-7.11.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8badf70446042553a773547a61fecaa734b55dc738cacf20c56ab04b77425e43", size = 247445, upload-time = "2025-10-15T15:14:24.205Z" },
-    { url = "https://files.pythonhosted.org/packages/c4/3b/e4f966b21f5be8c4bf86ad75ae94efa0de4c99c7bbb8114476323102e345/coverage-7.11.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:a09c1211959903a479e389685b7feb8a17f59ec5a4ef9afde7650bd5eabc2777", size = 249324, upload-time = "2025-10-15T15:14:26.234Z" },
-    { url = "https://files.pythonhosted.org/packages/00/a2/8479325576dfcd909244d0df215f077f47437ab852ab778cfa2f8bf4d954/coverage-7.11.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:5ef83b107f50db3f9ae40f69e34b3bd9337456c5a7fe3461c7abf8b75dd666a2", size = 247261, upload-time = "2025-10-15T15:14:28.42Z" },
-    { url = "https://files.pythonhosted.org/packages/7b/d8/3a9e2db19d94d65771d0f2e21a9ea587d11b831332a73622f901157cc24b/coverage-7.11.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:f91f927a3215b8907e214af77200250bb6aae36eca3f760f89780d13e495388d", size = 247092, upload-time = "2025-10-15T15:14:30.784Z" },
-    { url = "https://files.pythonhosted.org/packages/b3/b1/bbca3c472544f9e2ad2d5116b2379732957048be4b93a9c543fcd0207e5f/coverage-7.11.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:cdbcd376716d6b7fbfeedd687a6c4be019c5a5671b35f804ba76a4c0a778cba4", size = 248755, upload-time = "2025-10-15T15:14:32.585Z" },
-    { url = "https://files.pythonhosted.org/packages/89/49/638d5a45a6a0f00af53d6b637c87007eb2297042186334e9923a61aa8854/coverage-7.11.0-cp314-cp314-win32.whl", hash = "sha256:bab7ec4bb501743edc63609320aaec8cd9188b396354f482f4de4d40a9d10721", size = 218793, upload-time = "2025-10-15T15:14:34.972Z" },
-    { url = "https://files.pythonhosted.org/packages/30/cc/b675a51f2d068adb3cdf3799212c662239b0ca27f4691d1fff81b92ea850/coverage-7.11.0-cp314-cp314-win_amd64.whl", hash = "sha256:3d4ba9a449e9364a936a27322b20d32d8b166553bfe63059bd21527e681e2fad", size = 219587, upload-time = "2025-10-15T15:14:37.047Z" },
-    { url = "https://files.pythonhosted.org/packages/93/98/5ac886876026de04f00820e5094fe22166b98dcb8b426bf6827aaf67048c/coverage-7.11.0-cp314-cp314-win_arm64.whl", hash = "sha256:ce37f215223af94ef0f75ac68ea096f9f8e8c8ec7d6e8c346ee45c0d363f0479", size = 218168, upload-time = "2025-10-15T15:14:38.861Z" },
-    { url = "https://files.pythonhosted.org/packages/14/d1/b4145d35b3e3ecf4d917e97fc8895bcf027d854879ba401d9ff0f533f997/coverage-7.11.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:f413ce6e07e0d0dc9c433228727b619871532674b45165abafe201f200cc215f", size = 216850, upload-time = "2025-10-15T15:14:40.651Z" },
-    { url = "https://files.pythonhosted.org/packages/ca/d1/7f645fc2eccd318369a8a9948acc447bb7c1ade2911e31d3c5620544c22b/coverage-7.11.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:05791e528a18f7072bf5998ba772fe29db4da1234c45c2087866b5ba4dea710e", size = 217071, upload-time = "2025-10-15T15:14:42.755Z" },
-    { url = "https://files.pythonhosted.org/packages/54/7d/64d124649db2737ceced1dfcbdcb79898d5868d311730f622f8ecae84250/coverage-7.11.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cacb29f420cfeb9283b803263c3b9a068924474ff19ca126ba9103e1278dfa44", size = 258570, upload-time = "2025-10-15T15:14:44.542Z" },
-    { url = "https://files.pythonhosted.org/packages/6c/3f/6f5922f80dc6f2d8b2c6f974835c43f53eb4257a7797727e6ca5b7b2ec1f/coverage-7.11.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:314c24e700d7027ae3ab0d95fbf8d53544fca1f20345fd30cd219b737c6e58d3", size = 260738, upload-time = "2025-10-15T15:14:46.436Z" },
-    { url = "https://files.pythonhosted.org/packages/0e/5f/9e883523c4647c860b3812b417a2017e361eca5b635ee658387dc11b13c1/coverage-7.11.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:630d0bd7a293ad2fc8b4b94e5758c8b2536fdf36c05f1681270203e463cbfa9b", size = 262994, upload-time = "2025-10-15T15:14:48.3Z" },
-    { url = "https://files.pythonhosted.org/packages/07/bb/43b5a8e94c09c8bf51743ffc65c4c841a4ca5d3ed191d0a6919c379a1b83/coverage-7.11.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e89641f5175d65e2dbb44db15fe4ea48fade5d5bbb9868fdc2b4fce22f4a469d", size = 257282, upload-time = "2025-10-15T15:14:50.236Z" },
-    { url = "https://files.pythonhosted.org/packages/aa/e5/0ead8af411411330b928733e1d201384b39251a5f043c1612970310e8283/coverage-7.11.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c9f08ea03114a637dab06cedb2e914da9dc67fa52c6015c018ff43fdde25b9c2", size = 260430, upload-time = "2025-10-15T15:14:52.413Z" },
-    { url = "https://files.pythonhosted.org/packages/ae/66/03dd8bb0ba5b971620dcaac145461950f6d8204953e535d2b20c6b65d729/coverage-7.11.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:ce9f3bde4e9b031eaf1eb61df95c1401427029ea1bfddb8621c1161dcb0fa02e", size = 258190, upload-time = "2025-10-15T15:14:54.268Z" },
-    { url = "https://files.pythonhosted.org/packages/45/ae/28a9cce40bf3174426cb2f7e71ee172d98e7f6446dff936a7ccecee34b14/coverage-7.11.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:e4dc07e95495923d6fd4d6c27bf70769425b71c89053083843fd78f378558996", size = 256658, upload-time = "2025-10-15T15:14:56.436Z" },
-    { url = "https://files.pythonhosted.org/packages/5c/7c/3a44234a8599513684bfc8684878fd7b126c2760f79712bb78c56f19efc4/coverage-7.11.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:424538266794db2861db4922b05d729ade0940ee69dcf0591ce8f69784db0e11", size = 259342, upload-time = "2025-10-15T15:14:58.538Z" },
-    { url = "https://files.pythonhosted.org/packages/e1/e6/0108519cba871af0351725ebdb8660fd7a0fe2ba3850d56d32490c7d9b4b/coverage-7.11.0-cp314-cp314t-win32.whl", hash = "sha256:4c1eeb3fb8eb9e0190bebafd0462936f75717687117339f708f395fe455acc73", size = 219568, upload-time = "2025-10-15T15:15:00.382Z" },
-    { url = "https://files.pythonhosted.org/packages/c9/76/44ba876e0942b4e62fdde23ccb029ddb16d19ba1bef081edd00857ba0b16/coverage-7.11.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b56efee146c98dbf2cf5cffc61b9829d1e94442df4d7398b26892a53992d3547", size = 220687, upload-time = "2025-10-15T15:15:02.322Z" },
-    { url = "https://files.pythonhosted.org/packages/b9/0c/0df55ecb20d0d0ed5c322e10a441775e1a3a5d78c60f0c4e1abfe6fcf949/coverage-7.11.0-cp314-cp314t-win_arm64.whl", hash = "sha256:b5c2705afa83f49bd91962a4094b6b082f94aef7626365ab3f8f4bd159c5acf3", size = 218711, upload-time = "2025-10-15T15:15:04.575Z" },
-    { url = "https://files.pythonhosted.org/packages/5f/04/642c1d8a448ae5ea1369eac8495740a79eb4e581a9fb0cbdce56bbf56da1/coverage-7.11.0-py3-none-any.whl", hash = "sha256:4b7589765348d78fb4e5fb6ea35d07564e387da2fc5efff62e0222971f155f68", size = 207761, upload-time = "2025-10-15T15:15:06.439Z" },
+version = "7.13.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/23/f9/e92df5e07f3fc8d4c7f9a0f146ef75446bf870351cd37b788cf5897f8079/coverage-7.13.1.tar.gz", hash = "sha256:b7593fe7eb5feaa3fbb461ac79aac9f9fc0387a5ca8080b0c6fe2ca27b091afd", size = 825862, upload-time = "2025-12-28T15:42:56.969Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/2d/9a/3742e58fd04b233df95c012ee9f3dfe04708a5e1d32613bd2d47d4e1be0d/coverage-7.13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e1fa280b3ad78eea5be86f94f461c04943d942697e0dac889fa18fff8f5f9147", size = 218633, upload-time = "2025-12-28T15:40:10.165Z" },
+    { url = "https://files.pythonhosted.org/packages/7e/45/7e6bdc94d89cd7c8017ce735cf50478ddfe765d4fbf0c24d71d30ea33d7a/coverage-7.13.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c3d8c679607220979434f494b139dfb00131ebf70bb406553d69c1ff01a5c33d", size = 219147, upload-time = "2025-12-28T15:40:12.069Z" },
+    { url = "https://files.pythonhosted.org/packages/f7/38/0d6a258625fd7f10773fe94097dc16937a5f0e3e0cdf3adef67d3ac6baef/coverage-7.13.1-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:339dc63b3eba969067b00f41f15ad161bf2946613156fb131266d8debc8e44d0", size = 245894, upload-time = "2025-12-28T15:40:13.556Z" },
+    { url = "https://files.pythonhosted.org/packages/27/58/409d15ea487986994cbd4d06376e9860e9b157cfbfd402b1236770ab8dd2/coverage-7.13.1-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:db622b999ffe49cb891f2fff3b340cdc2f9797d01a0a202a0973ba2562501d90", size = 247721, upload-time = "2025-12-28T15:40:15.37Z" },
+    { url = "https://files.pythonhosted.org/packages/da/bf/6e8056a83fd7a96c93341f1ffe10df636dd89f26d5e7b9ca511ce3bcf0df/coverage-7.13.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1443ba9acbb593fa7c1c29e011d7c9761545fe35e7652e85ce7f51a16f7e08d", size = 249585, upload-time = "2025-12-28T15:40:17.226Z" },
+    { url = "https://files.pythonhosted.org/packages/f4/15/e1daff723f9f5959acb63cbe35b11203a9df77ee4b95b45fffd38b318390/coverage-7.13.1-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c832ec92c4499ac463186af72f9ed4d8daec15499b16f0a879b0d1c8e5cf4a3b", size = 246597, upload-time = "2025-12-28T15:40:19.028Z" },
+    { url = "https://files.pythonhosted.org/packages/74/a6/1efd31c5433743a6ddbc9d37ac30c196bb07c7eab3d74fbb99b924c93174/coverage-7.13.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:562ec27dfa3f311e0db1ba243ec6e5f6ab96b1edfcfc6cf86f28038bc4961ce6", size = 247626, upload-time = "2025-12-28T15:40:20.846Z" },
+    { url = "https://files.pythonhosted.org/packages/6d/9f/1609267dd3e749f57fdd66ca6752567d1c13b58a20a809dc409b263d0b5f/coverage-7.13.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:4de84e71173d4dada2897e5a0e1b7877e5eefbfe0d6a44edee6ce31d9b8ec09e", size = 245629, upload-time = "2025-12-28T15:40:22.397Z" },
+    { url = "https://files.pythonhosted.org/packages/e2/f6/6815a220d5ec2466383d7cc36131b9fa6ecbe95c50ec52a631ba733f306a/coverage-7.13.1-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:a5a68357f686f8c4d527a2dc04f52e669c2fc1cbde38f6f7eb6a0e58cbd17cae", size = 245901, upload-time = "2025-12-28T15:40:23.836Z" },
+    { url = "https://files.pythonhosted.org/packages/ac/58/40576554cd12e0872faf6d2c0eb3bc85f71d78427946ddd19ad65201e2c0/coverage-7.13.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:77cc258aeb29a3417062758975521eae60af6f79e930d6993555eeac6a8eac29", size = 246505, upload-time = "2025-12-28T15:40:25.421Z" },
+    { url = "https://files.pythonhosted.org/packages/3b/77/9233a90253fba576b0eee81707b5781d0e21d97478e5377b226c5b096c0f/coverage-7.13.1-cp310-cp310-win32.whl", hash = "sha256:bb4f8c3c9a9f34423dba193f241f617b08ffc63e27f67159f60ae6baf2dcfe0f", size = 221257, upload-time = "2025-12-28T15:40:27.217Z" },
+    { url = "https://files.pythonhosted.org/packages/e0/43/e842ff30c1a0a623ec80db89befb84a3a7aad7bfe44a6ea77d5a3e61fedd/coverage-7.13.1-cp310-cp310-win_amd64.whl", hash = "sha256:c8e2706ceb622bc63bac98ebb10ef5da80ed70fbd8a7999a5076de3afaef0fb1", size = 222191, upload-time = "2025-12-28T15:40:28.916Z" },
+    { url = "https://files.pythonhosted.org/packages/b4/9b/77baf488516e9ced25fc215a6f75d803493fc3f6a1a1227ac35697910c2a/coverage-7.13.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a55d509a1dc5a5b708b5dad3b5334e07a16ad4c2185e27b40e4dba796ab7f88", size = 218755, upload-time = "2025-12-28T15:40:30.812Z" },
+    { url = "https://files.pythonhosted.org/packages/d7/cd/7ab01154e6eb79ee2fab76bf4d89e94c6648116557307ee4ebbb85e5c1bf/coverage-7.13.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4d010d080c4888371033baab27e47c9df7d6fb28d0b7b7adf85a4a49be9298b3", size = 219257, upload-time = "2025-12-28T15:40:32.333Z" },
+    { url = "https://files.pythonhosted.org/packages/01/d5/b11ef7863ffbbdb509da0023fad1e9eda1c0eaea61a6d2ea5b17d4ac706e/coverage-7.13.1-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d938b4a840fb1523b9dfbbb454f652967f18e197569c32266d4d13f37244c3d9", size = 249657, upload-time = "2025-12-28T15:40:34.1Z" },
+    { url = "https://files.pythonhosted.org/packages/f7/7c/347280982982383621d29b8c544cf497ae07ac41e44b1ca4903024131f55/coverage-7.13.1-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bf100a3288f9bb7f919b87eb84f87101e197535b9bd0e2c2b5b3179633324fee", size = 251581, upload-time = "2025-12-28T15:40:36.131Z" },
+    { url = "https://files.pythonhosted.org/packages/82/f6/ebcfed11036ade4c0d75fa4453a6282bdd225bc073862766eec184a4c643/coverage-7.13.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ef6688db9bf91ba111ae734ba6ef1a063304a881749726e0d3575f5c10a9facf", size = 253691, upload-time = "2025-12-28T15:40:37.626Z" },
+    { url = "https://files.pythonhosted.org/packages/02/92/af8f5582787f5d1a8b130b2dcba785fa5e9a7a8e121a0bb2220a6fdbdb8a/coverage-7.13.1-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0b609fc9cdbd1f02e51f67f51e5aee60a841ef58a68d00d5ee2c0faf357481a3", size = 249799, upload-time = "2025-12-28T15:40:39.47Z" },
+    { url = "https://files.pythonhosted.org/packages/24/aa/0e39a2a3b16eebf7f193863323edbff38b6daba711abaaf807d4290cf61a/coverage-7.13.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c43257717611ff5e9a1d79dce8e47566235ebda63328718d9b65dd640bc832ef", size = 251389, upload-time = "2025-12-28T15:40:40.954Z" },
+    { url = "https://files.pythonhosted.org/packages/73/46/7f0c13111154dc5b978900c0ccee2e2ca239b910890e674a77f1363d483e/coverage-7.13.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e09fbecc007f7b6afdfb3b07ce5bd9f8494b6856dd4f577d26c66c391b829851", size = 249450, upload-time = "2025-12-28T15:40:42.489Z" },
+    { url = "https://files.pythonhosted.org/packages/ac/ca/e80da6769e8b669ec3695598c58eef7ad98b0e26e66333996aee6316db23/coverage-7.13.1-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:a03a4f3a19a189919c7055098790285cc5c5b0b3976f8d227aea39dbf9f8bfdb", size = 249170, upload-time = "2025-12-28T15:40:44.279Z" },
+    { url = "https://files.pythonhosted.org/packages/af/18/9e29baabdec1a8644157f572541079b4658199cfd372a578f84228e860de/coverage-7.13.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3820778ea1387c2b6a818caec01c63adc5b3750211af6447e8dcfb9b6f08dbba", size = 250081, upload-time = "2025-12-28T15:40:45.748Z" },
+    { url = "https://files.pythonhosted.org/packages/00/f8/c3021625a71c3b2f516464d322e41636aea381018319050a8114105872ee/coverage-7.13.1-cp311-cp311-win32.whl", hash = "sha256:ff10896fa55167371960c5908150b434b71c876dfab97b69478f22c8b445ea19", size = 221281, upload-time = "2025-12-28T15:40:47.232Z" },
+    { url = "https://files.pythonhosted.org/packages/27/56/c216625f453df6e0559ed666d246fcbaaa93f3aa99eaa5080cea1229aa3d/coverage-7.13.1-cp311-cp311-win_amd64.whl", hash = "sha256:a998cc0aeeea4c6d5622a3754da5a493055d2d95186bad877b0a34ea6e6dbe0a", size = 222215, upload-time = "2025-12-28T15:40:49.19Z" },
+    { url = "https://files.pythonhosted.org/packages/5c/9a/be342e76f6e531cae6406dc46af0d350586f24d9b67fdfa6daee02df71af/coverage-7.13.1-cp311-cp311-win_arm64.whl", hash = "sha256:fea07c1a39a22614acb762e3fbbb4011f65eedafcb2948feeef641ac78b4ee5c", size = 220886, upload-time = "2025-12-28T15:40:51.067Z" },
+    { url = "https://files.pythonhosted.org/packages/ce/8a/87af46cccdfa78f53db747b09f5f9a21d5fc38d796834adac09b30a8ce74/coverage-7.13.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6f34591000f06e62085b1865c9bc5f7858df748834662a51edadfd2c3bfe0dd3", size = 218927, upload-time = "2025-12-28T15:40:52.814Z" },
+    { url = "https://files.pythonhosted.org/packages/82/a8/6e22fdc67242a4a5a153f9438d05944553121c8f4ba70cb072af4c41362e/coverage-7.13.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b67e47c5595b9224599016e333f5ec25392597a89d5744658f837d204e16c63e", size = 219288, upload-time = "2025-12-28T15:40:54.262Z" },
+    { url = "https://files.pythonhosted.org/packages/d0/0a/853a76e03b0f7c4375e2ca025df45c918beb367f3e20a0a8e91967f6e96c/coverage-7.13.1-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3e7b8bd70c48ffb28461ebe092c2345536fb18bbbf19d287c8913699735f505c", size = 250786, upload-time = "2025-12-28T15:40:56.059Z" },
+    { url = "https://files.pythonhosted.org/packages/ea/b4/694159c15c52b9f7ec7adf49d50e5f8ee71d3e9ef38adb4445d13dd56c20/coverage-7.13.1-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c223d078112e90dc0e5c4e35b98b9584164bea9fbbd221c0b21c5241f6d51b62", size = 253543, upload-time = "2025-12-28T15:40:57.585Z" },
+    { url = "https://files.pythonhosted.org/packages/96/b2/7f1f0437a5c855f87e17cf5d0dc35920b6440ff2b58b1ba9788c059c26c8/coverage-7.13.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:794f7c05af0763b1bbd1b9e6eff0e52ad068be3b12cd96c87de037b01390c968", size = 254635, upload-time = "2025-12-28T15:40:59.443Z" },
+    { url = "https://files.pythonhosted.org/packages/e9/d1/73c3fdb8d7d3bddd9473c9c6a2e0682f09fc3dfbcb9c3f36412a7368bcab/coverage-7.13.1-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0642eae483cc8c2902e4af7298bf886d605e80f26382124cddc3967c2a3df09e", size = 251202, upload-time = "2025-12-28T15:41:01.328Z" },
+    { url = "https://files.pythonhosted.org/packages/66/3c/f0edf75dcc152f145d5598329e864bbbe04ab78660fe3e8e395f9fff010f/coverage-7.13.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9f5e772ed5fef25b3de9f2008fe67b92d46831bd2bc5bdc5dd6bfd06b83b316f", size = 252566, upload-time = "2025-12-28T15:41:03.319Z" },
+    { url = "https://files.pythonhosted.org/packages/17/b3/e64206d3c5f7dcbceafd14941345a754d3dbc78a823a6ed526e23b9cdaab/coverage-7.13.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:45980ea19277dc0a579e432aef6a504fe098ef3a9032ead15e446eb0f1191aee", size = 250711, upload-time = "2025-12-28T15:41:06.411Z" },
+    { url = "https://files.pythonhosted.org/packages/dc/ad/28a3eb970a8ef5b479ee7f0c484a19c34e277479a5b70269dc652b730733/coverage-7.13.1-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:e4f18eca6028ffa62adbd185a8f1e1dd242f2e68164dba5c2b74a5204850b4cf", size = 250278, upload-time = "2025-12-28T15:41:08.285Z" },
+    { url = "https://files.pythonhosted.org/packages/54/e3/c8f0f1a93133e3e1291ca76cbb63565bd4b5c5df63b141f539d747fff348/coverage-7.13.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f8dca5590fec7a89ed6826fce625595279e586ead52e9e958d3237821fbc750c", size = 252154, upload-time = "2025-12-28T15:41:09.969Z" },
+    { url = "https://files.pythonhosted.org/packages/d0/bf/9939c5d6859c380e405b19e736321f1c7d402728792f4c752ad1adcce005/coverage-7.13.1-cp312-cp312-win32.whl", hash = "sha256:ff86d4e85188bba72cfb876df3e11fa243439882c55957184af44a35bd5880b7", size = 221487, upload-time = "2025-12-28T15:41:11.468Z" },
+    { url = "https://files.pythonhosted.org/packages/fa/dc/7282856a407c621c2aad74021680a01b23010bb8ebf427cf5eacda2e876f/coverage-7.13.1-cp312-cp312-win_amd64.whl", hash = "sha256:16cc1da46c04fb0fb128b4dc430b78fa2aba8a6c0c9f8eb391fd5103409a6ac6", size = 222299, upload-time = "2025-12-28T15:41:13.386Z" },
+    { url = "https://files.pythonhosted.org/packages/10/79/176a11203412c350b3e9578620013af35bcdb79b651eb976f4a4b32044fa/coverage-7.13.1-cp312-cp312-win_arm64.whl", hash = "sha256:8d9bc218650022a768f3775dd7fdac1886437325d8d295d923ebcfef4892ad5c", size = 220941, upload-time = "2025-12-28T15:41:14.975Z" },
+    { url = "https://files.pythonhosted.org/packages/a3/a4/e98e689347a1ff1a7f67932ab535cef82eb5e78f32a9e4132e114bbb3a0a/coverage-7.13.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:cb237bfd0ef4d5eb6a19e29f9e528ac67ac3be932ea6b44fb6cc09b9f3ecff78", size = 218951, upload-time = "2025-12-28T15:41:16.653Z" },
+    { url = "https://files.pythonhosted.org/packages/32/33/7cbfe2bdc6e2f03d6b240d23dc45fdaf3fd270aaf2d640be77b7f16989ab/coverage-7.13.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1dcb645d7e34dcbcc96cd7c132b1fc55c39263ca62eb961c064eb3928997363b", size = 219325, upload-time = "2025-12-28T15:41:18.609Z" },
+    { url = "https://files.pythonhosted.org/packages/59/f6/efdabdb4929487baeb7cb2a9f7dac457d9356f6ad1b255be283d58b16316/coverage-7.13.1-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3d42df8201e00384736f0df9be2ced39324c3907607d17d50d50116c989d84cd", size = 250309, upload-time = "2025-12-28T15:41:20.629Z" },
+    { url = "https://files.pythonhosted.org/packages/12/da/91a52516e9d5aea87d32d1523f9cdcf7a35a3b298e6be05d6509ba3cfab2/coverage-7.13.1-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fa3edde1aa8807de1d05934982416cb3ec46d1d4d91e280bcce7cca01c507992", size = 252907, upload-time = "2025-12-28T15:41:22.257Z" },
+    { url = "https://files.pythonhosted.org/packages/75/38/f1ea837e3dc1231e086db1638947e00d264e7e8c41aa8ecacf6e1e0c05f4/coverage-7.13.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9edd0e01a343766add6817bc448408858ba6b489039eaaa2018474e4001651a4", size = 254148, upload-time = "2025-12-28T15:41:23.87Z" },
+    { url = "https://files.pythonhosted.org/packages/7f/43/f4f16b881aaa34954ba446318dea6b9ed5405dd725dd8daac2358eda869a/coverage-7.13.1-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:985b7836931d033570b94c94713c6dba5f9d3ff26045f72c3e5dbc5fe3361e5a", size = 250515, upload-time = "2025-12-28T15:41:25.437Z" },
+    { url = "https://files.pythonhosted.org/packages/84/34/8cba7f00078bd468ea914134e0144263194ce849ec3baad187ffb6203d1c/coverage-7.13.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ffed1e4980889765c84a5d1a566159e363b71d6b6fbaf0bebc9d3c30bc016766", size = 252292, upload-time = "2025-12-28T15:41:28.459Z" },
+    { url = "https://files.pythonhosted.org/packages/8c/a4/cffac66c7652d84ee4ac52d3ccb94c015687d3b513f9db04bfcac2ac800d/coverage-7.13.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:8842af7f175078456b8b17f1b73a0d16a65dcbdc653ecefeb00a56b3c8c298c4", size = 250242, upload-time = "2025-12-28T15:41:30.02Z" },
+    { url = "https://files.pythonhosted.org/packages/f4/78/9a64d462263dde416f3c0067efade7b52b52796f489b1037a95b0dc389c9/coverage-7.13.1-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:ccd7a6fca48ca9c131d9b0a2972a581e28b13416fc313fb98b6d24a03ce9a398", size = 250068, upload-time = "2025-12-28T15:41:32.007Z" },
+    { url = "https://files.pythonhosted.org/packages/69/c8/a8994f5fece06db7c4a97c8fc1973684e178599b42e66280dded0524ef00/coverage-7.13.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0403f647055de2609be776965108447deb8e384fe4a553c119e3ff6bfbab4784", size = 251846, upload-time = "2025-12-28T15:41:33.946Z" },
+    { url = "https://files.pythonhosted.org/packages/cc/f7/91fa73c4b80305c86598a2d4e54ba22df6bf7d0d97500944af7ef155d9f7/coverage-7.13.1-cp313-cp313-win32.whl", hash = "sha256:549d195116a1ba1e1ae2f5ca143f9777800f6636eab917d4f02b5310d6d73461", size = 221512, upload-time = "2025-12-28T15:41:35.519Z" },
+    { url = "https://files.pythonhosted.org/packages/45/0b/0768b4231d5a044da8f75e097a8714ae1041246bb765d6b5563bab456735/coverage-7.13.1-cp313-cp313-win_amd64.whl", hash = "sha256:5899d28b5276f536fcf840b18b61a9fce23cc3aec1d114c44c07fe94ebeaa500", size = 222321, upload-time = "2025-12-28T15:41:37.371Z" },
+    { url = "https://files.pythonhosted.org/packages/9b/b8/bdcb7253b7e85157282450262008f1366aa04663f3e3e4c30436f596c3e2/coverage-7.13.1-cp313-cp313-win_arm64.whl", hash = "sha256:868a2fae76dfb06e87291bcbd4dcbcc778a8500510b618d50496e520bd94d9b9", size = 220949, upload-time = "2025-12-28T15:41:39.553Z" },
+    { url = "https://files.pythonhosted.org/packages/70/52/f2be52cc445ff75ea8397948c96c1b4ee14f7f9086ea62fc929c5ae7b717/coverage-7.13.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:67170979de0dacac3f3097d02b0ad188d8edcea44ccc44aaa0550af49150c7dc", size = 219643, upload-time = "2025-12-28T15:41:41.567Z" },
+    { url = "https://files.pythonhosted.org/packages/47/79/c85e378eaa239e2edec0c5523f71542c7793fe3340954eafb0bc3904d32d/coverage-7.13.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f80e2bb21bfab56ed7405c2d79d34b5dc0bc96c2c1d2a067b643a09fb756c43a", size = 219997, upload-time = "2025-12-28T15:41:43.418Z" },
+    { url = "https://files.pythonhosted.org/packages/fe/9b/b1ade8bfb653c0bbce2d6d6e90cc6c254cbb99b7248531cc76253cb4da6d/coverage-7.13.1-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f83351e0f7dcdb14d7326c3d8d8c4e915fa685cbfdc6281f9470d97a04e9dfe4", size = 261296, upload-time = "2025-12-28T15:41:45.207Z" },
+    { url = "https://files.pythonhosted.org/packages/1f/af/ebf91e3e1a2473d523e87e87fd8581e0aa08741b96265730e2d79ce78d8d/coverage-7.13.1-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bb3f6562e89bad0110afbe64e485aac2462efdce6232cdec7862a095dc3412f6", size = 263363, upload-time = "2025-12-28T15:41:47.163Z" },
+    { url = "https://files.pythonhosted.org/packages/c4/8b/fb2423526d446596624ac7fde12ea4262e66f86f5120114c3cfd0bb2befa/coverage-7.13.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:77545b5dcda13b70f872c3b5974ac64c21d05e65b1590b441c8560115dc3a0d1", size = 265783, upload-time = "2025-12-28T15:41:49.03Z" },
+    { url = "https://files.pythonhosted.org/packages/9b/26/ef2adb1e22674913b89f0fe7490ecadcef4a71fa96f5ced90c60ec358789/coverage-7.13.1-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a4d240d260a1aed814790bbe1f10a5ff31ce6c21bc78f0da4a1e8268d6c80dbd", size = 260508, upload-time = "2025-12-28T15:41:51.035Z" },
+    { url = "https://files.pythonhosted.org/packages/ce/7d/f0f59b3404caf662e7b5346247883887687c074ce67ba453ea08c612b1d5/coverage-7.13.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:d2287ac9360dec3837bfdad969963a5d073a09a85d898bd86bea82aa8876ef3c", size = 263357, upload-time = "2025-12-28T15:41:52.631Z" },
+    { url = "https://files.pythonhosted.org/packages/1a/b1/29896492b0b1a047604d35d6fa804f12818fa30cdad660763a5f3159e158/coverage-7.13.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:0d2c11f3ea4db66b5cbded23b20185c35066892c67d80ec4be4bab257b9ad1e0", size = 260978, upload-time = "2025-12-28T15:41:54.589Z" },
+    { url = "https://files.pythonhosted.org/packages/48/f2/971de1238a62e6f0a4128d37adadc8bb882ee96afbe03ff1570291754629/coverage-7.13.1-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:3fc6a169517ca0d7ca6846c3c5392ef2b9e38896f61d615cb75b9e7134d4ee1e", size = 259877, upload-time = "2025-12-28T15:41:56.263Z" },
+    { url = "https://files.pythonhosted.org/packages/6a/fc/0474efcbb590ff8628830e9aaec5f1831594874360e3251f1fdec31d07a3/coverage-7.13.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d10a2ed46386e850bb3de503a54f9fe8192e5917fcbb143bfef653a9355e9a53", size = 262069, upload-time = "2025-12-28T15:41:58.093Z" },
+    { url = "https://files.pythonhosted.org/packages/88/4f/3c159b7953db37a7b44c0eab8a95c37d1aa4257c47b4602c04022d5cb975/coverage-7.13.1-cp313-cp313t-win32.whl", hash = "sha256:75a6f4aa904301dab8022397a22c0039edc1f51e90b83dbd4464b8a38dc87842", size = 222184, upload-time = "2025-12-28T15:41:59.763Z" },
+    { url = "https://files.pythonhosted.org/packages/58/a5/6b57d28f81417f9335774f20679d9d13b9a8fb90cd6160957aa3b54a2379/coverage-7.13.1-cp313-cp313t-win_amd64.whl", hash = "sha256:309ef5706e95e62578cda256b97f5e097916a2c26247c287bbe74794e7150df2", size = 223250, upload-time = "2025-12-28T15:42:01.52Z" },
+    { url = "https://files.pythonhosted.org/packages/81/7c/160796f3b035acfbb58be80e02e484548595aa67e16a6345e7910ace0a38/coverage-7.13.1-cp313-cp313t-win_arm64.whl", hash = "sha256:92f980729e79b5d16d221038dbf2e8f9a9136afa072f9d5d6ed4cb984b126a09", size = 221521, upload-time = "2025-12-28T15:42:03.275Z" },
+    { url = "https://files.pythonhosted.org/packages/aa/8e/ba0e597560c6563fc0adb902fda6526df5d4aa73bb10adf0574d03bd2206/coverage-7.13.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:97ab3647280d458a1f9adb85244e81587505a43c0c7cff851f5116cd2814b894", size = 218996, upload-time = "2025-12-28T15:42:04.978Z" },
+    { url = "https://files.pythonhosted.org/packages/6b/8e/764c6e116f4221dc7aa26c4061181ff92edb9c799adae6433d18eeba7a14/coverage-7.13.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:8f572d989142e0908e6acf57ad1b9b86989ff057c006d13b76c146ec6a20216a", size = 219326, upload-time = "2025-12-28T15:42:06.691Z" },
+    { url = "https://files.pythonhosted.org/packages/4f/a6/6130dc6d8da28cdcbb0f2bf8865aeca9b157622f7c0031e48c6cf9a0e591/coverage-7.13.1-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d72140ccf8a147e94274024ff6fd8fb7811354cf7ef88b1f0a988ebaa5bc774f", size = 250374, upload-time = "2025-12-28T15:42:08.786Z" },
+    { url = "https://files.pythonhosted.org/packages/82/2b/783ded568f7cd6b677762f780ad338bf4b4750205860c17c25f7c708995e/coverage-7.13.1-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:d3c9f051b028810f5a87c88e5d6e9af3c0ff32ef62763bf15d29f740453ca909", size = 252882, upload-time = "2025-12-28T15:42:10.515Z" },
+    { url = "https://files.pythonhosted.org/packages/cd/b2/9808766d082e6a4d59eb0cc881a57fc1600eb2c5882813eefff8254f71b5/coverage-7.13.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f398ba4df52d30b1763f62eed9de5620dcde96e6f491f4c62686736b155aa6e4", size = 254218, upload-time = "2025-12-28T15:42:12.208Z" },
+    { url = "https://files.pythonhosted.org/packages/44/ea/52a985bb447c871cb4d2e376e401116520991b597c85afdde1ea9ef54f2c/coverage-7.13.1-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:132718176cc723026d201e347f800cd1a9e4b62ccd3f82476950834dad501c75", size = 250391, upload-time = "2025-12-28T15:42:14.21Z" },
+    { url = "https://files.pythonhosted.org/packages/7f/1d/125b36cc12310718873cfc8209ecfbc1008f14f4f5fa0662aa608e579353/coverage-7.13.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:9e549d642426e3579b3f4b92d0431543b012dcb6e825c91619d4e93b7363c3f9", size = 252239, upload-time = "2025-12-28T15:42:16.292Z" },
+    { url = "https://files.pythonhosted.org/packages/6a/16/10c1c164950cade470107f9f14bbac8485f8fb8515f515fca53d337e4a7f/coverage-7.13.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:90480b2134999301eea795b3a9dbf606c6fbab1b489150c501da84a959442465", size = 250196, upload-time = "2025-12-28T15:42:18.54Z" },
+    { url = "https://files.pythonhosted.org/packages/2a/c6/cd860fac08780c6fd659732f6ced1b40b79c35977c1356344e44d72ba6c4/coverage-7.13.1-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:e825dbb7f84dfa24663dd75835e7257f8882629fc11f03ecf77d84a75134b864", size = 250008, upload-time = "2025-12-28T15:42:20.365Z" },
+    { url = "https://files.pythonhosted.org/packages/f0/3a/a8c58d3d38f82a5711e1e0a67268362af48e1a03df27c03072ac30feefcf/coverage-7.13.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:623dcc6d7a7ba450bbdbeedbaa0c42b329bdae16491af2282f12a7e809be7eb9", size = 251671, upload-time = "2025-12-28T15:42:22.114Z" },
+    { url = "https://files.pythonhosted.org/packages/f0/bc/fd4c1da651d037a1e3d53e8cb3f8182f4b53271ffa9a95a2e211bacc0349/coverage-7.13.1-cp314-cp314-win32.whl", hash = "sha256:6e73ebb44dca5f708dc871fe0b90cf4cff1a13f9956f747cc87b535a840386f5", size = 221777, upload-time = "2025-12-28T15:42:23.919Z" },
+    { url = "https://files.pythonhosted.org/packages/4b/50/71acabdc8948464c17e90b5ffd92358579bd0910732c2a1c9537d7536aa6/coverage-7.13.1-cp314-cp314-win_amd64.whl", hash = "sha256:be753b225d159feb397bd0bf91ae86f689bad0da09d3b301478cd39b878ab31a", size = 222592, upload-time = "2025-12-28T15:42:25.619Z" },
+    { url = "https://files.pythonhosted.org/packages/f7/c8/a6fb943081bb0cc926499c7907731a6dc9efc2cbdc76d738c0ab752f1a32/coverage-7.13.1-cp314-cp314-win_arm64.whl", hash = "sha256:228b90f613b25ba0019361e4ab81520b343b622fc657daf7e501c4ed6a2366c0", size = 221169, upload-time = "2025-12-28T15:42:27.629Z" },
+    { url = "https://files.pythonhosted.org/packages/16/61/d5b7a0a0e0e40d62e59bc8c7aa1afbd86280d82728ba97f0673b746b78e2/coverage-7.13.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:60cfb538fe9ef86e5b2ab0ca8fc8d62524777f6c611dcaf76dc16fbe9b8e698a", size = 219730, upload-time = "2025-12-28T15:42:29.306Z" },
+    { url = "https://files.pythonhosted.org/packages/a3/2c/8881326445fd071bb49514d1ce97d18a46a980712b51fee84f9ab42845b4/coverage-7.13.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:57dfc8048c72ba48a8c45e188d811e5efd7e49b387effc8fb17e97936dde5bf6", size = 220001, upload-time = "2025-12-28T15:42:31.319Z" },
+    { url = "https://files.pythonhosted.org/packages/b5/d7/50de63af51dfa3a7f91cc37ad8fcc1e244b734232fbc8b9ab0f3c834a5cd/coverage-7.13.1-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3f2f725aa3e909b3c5fdb8192490bdd8e1495e85906af74fe6e34a2a77ba0673", size = 261370, upload-time = "2025-12-28T15:42:32.992Z" },
+    { url = "https://files.pythonhosted.org/packages/e1/2c/d31722f0ec918fd7453b2758312729f645978d212b410cd0f7c2aed88a94/coverage-7.13.1-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9ee68b21909686eeb21dfcba2c3b81fee70dcf38b140dcd5aa70680995fa3aa5", size = 263485, upload-time = "2025-12-28T15:42:34.759Z" },
+    { url = "https://files.pythonhosted.org/packages/fa/7a/2c114fa5c5fc08ba0777e4aec4c97e0b4a1afcb69c75f1f54cff78b073ab/coverage-7.13.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:724b1b270cb13ea2e6503476e34541a0b1f62280bc997eab443f87790202033d", size = 265890, upload-time = "2025-12-28T15:42:36.517Z" },
+    { url = "https://files.pythonhosted.org/packages/65/d9/f0794aa1c74ceabc780fe17f6c338456bbc4e96bd950f2e969f48ac6fb20/coverage-7.13.1-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:916abf1ac5cf7eb16bc540a5bf75c71c43a676f5c52fcb9fe75a2bd75fb944e8", size = 260445, upload-time = "2025-12-28T15:42:38.646Z" },
+    { url = "https://files.pythonhosted.org/packages/49/23/184b22a00d9bb97488863ced9454068c79e413cb23f472da6cbddc6cfc52/coverage-7.13.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:776483fd35b58d8afe3acbd9988d5de592ab6da2d2a865edfdbc9fdb43e7c486", size = 263357, upload-time = "2025-12-28T15:42:40.788Z" },
+    { url = "https://files.pythonhosted.org/packages/7d/bd/58af54c0c9199ea4190284f389005779d7daf7bf3ce40dcd2d2b2f96da69/coverage-7.13.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:b6f3b96617e9852703f5b633ea01315ca45c77e879584f283c44127f0f1ec564", size = 260959, upload-time = "2025-12-28T15:42:42.808Z" },
+    { url = "https://files.pythonhosted.org/packages/4b/2a/6839294e8f78a4891bf1df79d69c536880ba2f970d0ff09e7513d6e352e9/coverage-7.13.1-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:bd63e7b74661fed317212fab774e2a648bc4bb09b35f25474f8e3325d2945cd7", size = 259792, upload-time = "2025-12-28T15:42:44.818Z" },
+    { url = "https://files.pythonhosted.org/packages/ba/c3/528674d4623283310ad676c5af7414b9850ab6d55c2300e8aa4b945ec554/coverage-7.13.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:933082f161bbb3e9f90d00990dc956120f608cdbcaeea15c4d897f56ef4fe416", size = 262123, upload-time = "2025-12-28T15:42:47.108Z" },
+    { url = "https://files.pythonhosted.org/packages/06/c5/8c0515692fb4c73ac379d8dc09b18eaf0214ecb76ea6e62467ba7a1556ff/coverage-7.13.1-cp314-cp314t-win32.whl", hash = "sha256:18be793c4c87de2965e1c0f060f03d9e5aff66cfeae8e1dbe6e5b88056ec153f", size = 222562, upload-time = "2025-12-28T15:42:49.144Z" },
+    { url = "https://files.pythonhosted.org/packages/05/0e/c0a0c4678cb30dac735811db529b321d7e1c9120b79bd728d4f4d6b010e9/coverage-7.13.1-cp314-cp314t-win_amd64.whl", hash = "sha256:0e42e0ec0cd3e0d851cb3c91f770c9301f48647cb2877cb78f74bdaa07639a79", size = 223670, upload-time = "2025-12-28T15:42:51.218Z" },
+    { url = "https://files.pythonhosted.org/packages/f5/5f/b177aa0011f354abf03a8f30a85032686d290fdeed4222b27d36b4372a50/coverage-7.13.1-cp314-cp314t-win_arm64.whl", hash = "sha256:eaecf47ef10c72ece9a2a92118257da87e460e113b83cc0d2905cbbe931792b4", size = 221707, upload-time = "2025-12-28T15:42:53.034Z" },
+    { url = "https://files.pythonhosted.org/packages/cc/48/d9f421cb8da5afaa1a64570d9989e00fb7955e6acddc5a12979f7666ef60/coverage-7.13.1-py3-none-any.whl", hash = "sha256:2016745cb3ba554469d02819d78958b571792bb68e31302610e898f80dd3a573", size = 210722, upload-time = "2025-12-28T15:42:54.901Z" },
 ]
 
 [package.optional-dependencies]
@@ -428,67 +446,67 @@ toml = [
 
 [[package]]
 name = "cryptography"
-version = "46.0.2"
+version = "46.0.3"
 source = { registry = "https://pypi.org/simple" }
 dependencies = [
     { name = "cffi", marker = "platform_python_implementation != 'PyPy'" },
     { name = "typing-extensions", marker = "python_full_version < '3.11'" },
 ]
-sdist = { url = "https://files.pythonhosted.org/packages/4a/9b/e301418629f7bfdf72db9e80ad6ed9d1b83c487c471803eaa6464c511a01/cryptography-46.0.2.tar.gz", hash = "sha256:21b6fc8c71a3f9a604f028a329e5560009cc4a3a828bfea5fcba8eb7647d88fe", size = 749293, upload-time = "2025-10-01T00:29:11.856Z" }
-wheels = [
-    { url = "https://files.pythonhosted.org/packages/e0/98/7a8df8c19a335c8028414738490fc3955c0cecbfdd37fcc1b9c3d04bd561/cryptography-46.0.2-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:f3e32ab7dd1b1ef67b9232c4cf5e2ee4cd517d4316ea910acaaa9c5712a1c663", size = 7261255, upload-time = "2025-10-01T00:27:22.947Z" },
-    { url = "https://files.pythonhosted.org/packages/c6/38/b2adb2aa1baa6706adc3eb746691edd6f90a656a9a65c3509e274d15a2b8/cryptography-46.0.2-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1fd1a69086926b623ef8126b4c33d5399ce9e2f3fac07c9c734c2a4ec38b6d02", size = 4297596, upload-time = "2025-10-01T00:27:25.258Z" },
-    { url = "https://files.pythonhosted.org/packages/e4/27/0f190ada240003119488ae66c897b5e97149292988f556aef4a6a2a57595/cryptography-46.0.2-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bb7fb9cd44c2582aa5990cf61a4183e6f54eea3172e54963787ba47287edd135", size = 4450899, upload-time = "2025-10-01T00:27:27.458Z" },
-    { url = "https://files.pythonhosted.org/packages/85/d5/e4744105ab02fdf6bb58ba9a816e23b7a633255987310b4187d6745533db/cryptography-46.0.2-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:9066cfd7f146f291869a9898b01df1c9b0e314bfa182cef432043f13fc462c92", size = 4300382, upload-time = "2025-10-01T00:27:29.091Z" },
-    { url = "https://files.pythonhosted.org/packages/33/fb/bf9571065c18c04818cb07de90c43fc042c7977c68e5de6876049559c72f/cryptography-46.0.2-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:97e83bf4f2f2c084d8dd792d13841d0a9b241643151686010866bbd076b19659", size = 4017347, upload-time = "2025-10-01T00:27:30.767Z" },
-    { url = "https://files.pythonhosted.org/packages/35/72/fc51856b9b16155ca071080e1a3ad0c3a8e86616daf7eb018d9565b99baa/cryptography-46.0.2-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:4a766d2a5d8127364fd936572c6e6757682fc5dfcbdba1632d4554943199f2fa", size = 4983500, upload-time = "2025-10-01T00:27:32.741Z" },
-    { url = "https://files.pythonhosted.org/packages/c1/53/0f51e926799025e31746d454ab2e36f8c3f0d41592bc65cb9840368d3275/cryptography-46.0.2-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:fab8f805e9675e61ed8538f192aad70500fa6afb33a8803932999b1049363a08", size = 4482591, upload-time = "2025-10-01T00:27:34.869Z" },
-    { url = "https://files.pythonhosted.org/packages/86/96/4302af40b23ab8aa360862251fb8fc450b2a06ff24bc5e261c2007f27014/cryptography-46.0.2-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:1e3b6428a3d56043bff0bb85b41c535734204e599c1c0977e1d0f261b02f3ad5", size = 4300019, upload-time = "2025-10-01T00:27:37.029Z" },
-    { url = "https://files.pythonhosted.org/packages/9b/59/0be12c7fcc4c5e34fe2b665a75bc20958473047a30d095a7657c218fa9e8/cryptography-46.0.2-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:1a88634851d9b8de8bb53726f4300ab191d3b2f42595e2581a54b26aba71b7cc", size = 4950006, upload-time = "2025-10-01T00:27:40.272Z" },
-    { url = "https://files.pythonhosted.org/packages/55/1d/42fda47b0111834b49e31590ae14fd020594d5e4dadd639bce89ad790fba/cryptography-46.0.2-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:be939b99d4e091eec9a2bcf41aaf8f351f312cd19ff74b5c83480f08a8a43e0b", size = 4482088, upload-time = "2025-10-01T00:27:42.668Z" },
-    { url = "https://files.pythonhosted.org/packages/17/50/60f583f69aa1602c2bdc7022dae86a0d2b837276182f8c1ec825feb9b874/cryptography-46.0.2-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9f13b040649bc18e7eb37936009b24fd31ca095a5c647be8bb6aaf1761142bd1", size = 4425599, upload-time = "2025-10-01T00:27:44.616Z" },
-    { url = "https://files.pythonhosted.org/packages/d1/57/d8d4134cd27e6e94cf44adb3f3489f935bde85f3a5508e1b5b43095b917d/cryptography-46.0.2-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:9bdc25e4e01b261a8fda4e98618f1c9515febcecebc9566ddf4a70c63967043b", size = 4697458, upload-time = "2025-10-01T00:27:46.209Z" },
-    { url = "https://files.pythonhosted.org/packages/d1/2b/531e37408573e1da33adfb4c58875013ee8ac7d548d1548967d94a0ae5c4/cryptography-46.0.2-cp311-abi3-win32.whl", hash = "sha256:8b9bf67b11ef9e28f4d78ff88b04ed0929fcd0e4f70bb0f704cfc32a5c6311ee", size = 3056077, upload-time = "2025-10-01T00:27:48.424Z" },
-    { url = "https://files.pythonhosted.org/packages/a8/cd/2f83cafd47ed2dc5a3a9c783ff5d764e9e70d3a160e0df9a9dcd639414ce/cryptography-46.0.2-cp311-abi3-win_amd64.whl", hash = "sha256:758cfc7f4c38c5c5274b55a57ef1910107436f4ae842478c4989abbd24bd5acb", size = 3512585, upload-time = "2025-10-01T00:27:50.521Z" },
-    { url = "https://files.pythonhosted.org/packages/00/36/676f94e10bfaa5c5b86c469ff46d3e0663c5dc89542f7afbadac241a3ee4/cryptography-46.0.2-cp311-abi3-win_arm64.whl", hash = "sha256:218abd64a2e72f8472c2102febb596793347a3e65fafbb4ad50519969da44470", size = 2927474, upload-time = "2025-10-01T00:27:52.91Z" },
-    { url = "https://files.pythonhosted.org/packages/6f/cc/47fc6223a341f26d103cb6da2216805e08a37d3b52bee7f3b2aee8066f95/cryptography-46.0.2-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:bda55e8dbe8533937956c996beaa20266a8eca3570402e52ae52ed60de1faca8", size = 7198626, upload-time = "2025-10-01T00:27:54.8Z" },
-    { url = "https://files.pythonhosted.org/packages/93/22/d66a8591207c28bbe4ac7afa25c4656dc19dc0db29a219f9809205639ede/cryptography-46.0.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e7155c0b004e936d381b15425273aee1cebc94f879c0ce82b0d7fecbf755d53a", size = 4287584, upload-time = "2025-10-01T00:27:57.018Z" },
-    { url = "https://files.pythonhosted.org/packages/8c/3e/fac3ab6302b928e0398c269eddab5978e6c1c50b2b77bb5365ffa8633b37/cryptography-46.0.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a61c154cc5488272a6c4b86e8d5beff4639cdb173d75325ce464d723cda0052b", size = 4433796, upload-time = "2025-10-01T00:27:58.631Z" },
-    { url = "https://files.pythonhosted.org/packages/7d/d8/24392e5d3c58e2d83f98fe5a2322ae343360ec5b5b93fe18bc52e47298f5/cryptography-46.0.2-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:9ec3f2e2173f36a9679d3b06d3d01121ab9b57c979de1e6a244b98d51fea1b20", size = 4292126, upload-time = "2025-10-01T00:28:00.643Z" },
-    { url = "https://files.pythonhosted.org/packages/ed/38/3d9f9359b84c16c49a5a336ee8be8d322072a09fac17e737f3bb11f1ce64/cryptography-46.0.2-cp314-cp314t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2fafb6aa24e702bbf74de4cb23bfa2c3beb7ab7683a299062b69724c92e0fa73", size = 3993056, upload-time = "2025-10-01T00:28:02.8Z" },
-    { url = "https://files.pythonhosted.org/packages/d6/a3/4c44fce0d49a4703cc94bfbe705adebf7ab36efe978053742957bc7ec324/cryptography-46.0.2-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:0c7ffe8c9b1fcbb07a26d7c9fa5e857c2fe80d72d7b9e0353dcf1d2180ae60ee", size = 4967604, upload-time = "2025-10-01T00:28:04.783Z" },
-    { url = "https://files.pythonhosted.org/packages/eb/c2/49d73218747c8cac16bb8318a5513fde3129e06a018af3bc4dc722aa4a98/cryptography-46.0.2-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:5840f05518caa86b09d23f8b9405a7b6d5400085aa14a72a98fdf5cf1568c0d2", size = 4465367, upload-time = "2025-10-01T00:28:06.864Z" },
-    { url = "https://files.pythonhosted.org/packages/1b/64/9afa7d2ee742f55ca6285a54386ed2778556a4ed8871571cb1c1bfd8db9e/cryptography-46.0.2-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:27c53b4f6a682a1b645fbf1cd5058c72cf2f5aeba7d74314c36838c7cbc06e0f", size = 4291678, upload-time = "2025-10-01T00:28:08.982Z" },
-    { url = "https://files.pythonhosted.org/packages/50/48/1696d5ea9623a7b72ace87608f6899ca3c331709ac7ebf80740abb8ac673/cryptography-46.0.2-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:512c0250065e0a6b286b2db4bbcc2e67d810acd53eb81733e71314340366279e", size = 4931366, upload-time = "2025-10-01T00:28:10.74Z" },
-    { url = "https://files.pythonhosted.org/packages/eb/3c/9dfc778401a334db3b24435ee0733dd005aefb74afe036e2d154547cb917/cryptography-46.0.2-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:07c0eb6657c0e9cca5891f4e35081dbf985c8131825e21d99b4f440a8f496f36", size = 4464738, upload-time = "2025-10-01T00:28:12.491Z" },
-    { url = "https://files.pythonhosted.org/packages/dc/b1/abcde62072b8f3fd414e191a6238ce55a0050e9738090dc6cded24c12036/cryptography-46.0.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:48b983089378f50cba258f7f7aa28198c3f6e13e607eaf10472c26320332ca9a", size = 4419305, upload-time = "2025-10-01T00:28:14.145Z" },
-    { url = "https://files.pythonhosted.org/packages/c7/1f/3d2228492f9391395ca34c677e8f2571fb5370fe13dc48c1014f8c509864/cryptography-46.0.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:e6f6775eaaa08c0eec73e301f7592f4367ccde5e4e4df8e58320f2ebf161ea2c", size = 4681201, upload-time = "2025-10-01T00:28:15.951Z" },
-    { url = "https://files.pythonhosted.org/packages/de/77/b687745804a93a55054f391528fcfc76c3d6bfd082ce9fb62c12f0d29fc1/cryptography-46.0.2-cp314-cp314t-win32.whl", hash = "sha256:e8633996579961f9b5a3008683344c2558d38420029d3c0bc7ff77c17949a4e1", size = 3022492, upload-time = "2025-10-01T00:28:17.643Z" },
-    { url = "https://files.pythonhosted.org/packages/60/a5/8d498ef2996e583de0bef1dcc5e70186376f00883ae27bf2133f490adf21/cryptography-46.0.2-cp314-cp314t-win_amd64.whl", hash = "sha256:48c01988ecbb32979bb98731f5c2b2f79042a6c58cc9a319c8c2f9987c7f68f9", size = 3496215, upload-time = "2025-10-01T00:28:19.272Z" },
-    { url = "https://files.pythonhosted.org/packages/56/db/ee67aaef459a2706bc302b15889a1a8126ebe66877bab1487ae6ad00f33d/cryptography-46.0.2-cp314-cp314t-win_arm64.whl", hash = "sha256:8e2ad4d1a5899b7caa3a450e33ee2734be7cc0689010964703a7c4bcc8dd4fd0", size = 2919255, upload-time = "2025-10-01T00:28:21.115Z" },
-    { url = "https://files.pythonhosted.org/packages/d5/bb/fa95abcf147a1b0bb94d95f53fbb09da77b24c776c5d87d36f3d94521d2c/cryptography-46.0.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a08e7401a94c002e79dc3bc5231b6558cd4b2280ee525c4673f650a37e2c7685", size = 7248090, upload-time = "2025-10-01T00:28:22.846Z" },
-    { url = "https://files.pythonhosted.org/packages/b7/66/f42071ce0e3ffbfa80a88feadb209c779fda92a23fbc1e14f74ebf72ef6b/cryptography-46.0.2-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d30bc11d35743bf4ddf76674a0a369ec8a21f87aaa09b0661b04c5f6c46e8d7b", size = 4293123, upload-time = "2025-10-01T00:28:25.072Z" },
-    { url = "https://files.pythonhosted.org/packages/a8/5d/1fdbd2e5c1ba822828d250e5a966622ef00185e476d1cd2726b6dd135e53/cryptography-46.0.2-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bca3f0ce67e5a2a2cf524e86f44697c4323a86e0fd7ba857de1c30d52c11ede1", size = 4439524, upload-time = "2025-10-01T00:28:26.808Z" },
-    { url = "https://files.pythonhosted.org/packages/c8/c1/5e4989a7d102d4306053770d60f978c7b6b1ea2ff8c06e0265e305b23516/cryptography-46.0.2-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ff798ad7a957a5021dcbab78dfff681f0cf15744d0e6af62bd6746984d9c9e9c", size = 4297264, upload-time = "2025-10-01T00:28:29.327Z" },
-    { url = "https://files.pythonhosted.org/packages/28/78/b56f847d220cb1d6d6aef5a390e116ad603ce13a0945a3386a33abc80385/cryptography-46.0.2-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:cb5e8daac840e8879407acbe689a174f5ebaf344a062f8918e526824eb5d97af", size = 4011872, upload-time = "2025-10-01T00:28:31.479Z" },
-    { url = "https://files.pythonhosted.org/packages/e1/80/2971f214b066b888944f7b57761bf709ee3f2cf805619a18b18cab9b263c/cryptography-46.0.2-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:3f37aa12b2d91e157827d90ce78f6180f0c02319468a0aea86ab5a9566da644b", size = 4978458, upload-time = "2025-10-01T00:28:33.267Z" },
-    { url = "https://files.pythonhosted.org/packages/a5/84/0cb0a2beaa4f1cbe63ebec4e97cd7e0e9f835d0ba5ee143ed2523a1e0016/cryptography-46.0.2-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:5e38f203160a48b93010b07493c15f2babb4e0f2319bbd001885adb3f3696d21", size = 4472195, upload-time = "2025-10-01T00:28:36.039Z" },
-    { url = "https://files.pythonhosted.org/packages/30/8b/2b542ddbf78835c7cd67b6fa79e95560023481213a060b92352a61a10efe/cryptography-46.0.2-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:d19f5f48883752b5ab34cff9e2f7e4a7f216296f33714e77d1beb03d108632b6", size = 4296791, upload-time = "2025-10-01T00:28:37.732Z" },
-    { url = "https://files.pythonhosted.org/packages/78/12/9065b40201b4f4876e93b9b94d91feb18de9150d60bd842a16a21565007f/cryptography-46.0.2-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:04911b149eae142ccd8c9a68892a70c21613864afb47aba92d8c7ed9cc001023", size = 4939629, upload-time = "2025-10-01T00:28:39.654Z" },
-    { url = "https://files.pythonhosted.org/packages/f6/9e/6507dc048c1b1530d372c483dfd34e7709fc542765015425f0442b08547f/cryptography-46.0.2-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:8b16c1ede6a937c291d41176934268e4ccac2c6521c69d3f5961c5a1e11e039e", size = 4471988, upload-time = "2025-10-01T00:28:41.822Z" },
-    { url = "https://files.pythonhosted.org/packages/b1/86/d025584a5f7d5c5ec8d3633dbcdce83a0cd579f1141ceada7817a4c26934/cryptography-46.0.2-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:747b6f4a4a23d5a215aadd1d0b12233b4119c4313df83ab4137631d43672cc90", size = 4422989, upload-time = "2025-10-01T00:28:43.608Z" },
-    { url = "https://files.pythonhosted.org/packages/4b/39/536370418b38a15a61bbe413006b79dfc3d2b4b0eafceb5581983f973c15/cryptography-46.0.2-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6b275e398ab3a7905e168c036aad54b5969d63d3d9099a0a66cc147a3cc983be", size = 4685578, upload-time = "2025-10-01T00:28:45.361Z" },
-    { url = "https://files.pythonhosted.org/packages/15/52/ea7e2b1910f547baed566c866fbb86de2402e501a89ecb4871ea7f169a81/cryptography-46.0.2-cp38-abi3-win32.whl", hash = "sha256:0b507c8e033307e37af61cb9f7159b416173bdf5b41d11c4df2e499a1d8e007c", size = 3036711, upload-time = "2025-10-01T00:28:47.096Z" },
-    { url = "https://files.pythonhosted.org/packages/71/9e/171f40f9c70a873e73c2efcdbe91e1d4b1777a03398fa1c4af3c56a2477a/cryptography-46.0.2-cp38-abi3-win_amd64.whl", hash = "sha256:f9b2dc7668418fb6f221e4bf701f716e05e8eadb4f1988a2487b11aedf8abe62", size = 3500007, upload-time = "2025-10-01T00:28:48.967Z" },
-    { url = "https://files.pythonhosted.org/packages/3e/7c/15ad426257615f9be8caf7f97990cf3dcbb5b8dd7ed7e0db581a1c4759dd/cryptography-46.0.2-cp38-abi3-win_arm64.whl", hash = "sha256:91447f2b17e83c9e0c89f133119d83f94ce6e0fb55dd47da0a959316e6e9cfa1", size = 2918153, upload-time = "2025-10-01T00:28:51.003Z" },
-    { url = "https://files.pythonhosted.org/packages/25/b2/067a7db693488f19777ecf73f925bcb6a3efa2eae42355bafaafa37a6588/cryptography-46.0.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f25a41f5b34b371a06dad3f01799706631331adc7d6c05253f5bca22068c7a34", size = 3701860, upload-time = "2025-10-01T00:28:53.003Z" },
-    { url = "https://files.pythonhosted.org/packages/87/12/47c2aab2c285f97c71a791169529dbb89f48fc12e5f62bb6525c3927a1a2/cryptography-46.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e12b61e0b86611e3f4c1756686d9086c1d36e6fd15326f5658112ad1f1cc8807", size = 3429917, upload-time = "2025-10-01T00:28:55.03Z" },
-    { url = "https://files.pythonhosted.org/packages/b7/8c/1aabe338149a7d0f52c3e30f2880b20027ca2a485316756ed6f000462db3/cryptography-46.0.2-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1d3b3edd145953832e09607986f2bd86f85d1dc9c48ced41808b18009d9f30e5", size = 3714495, upload-time = "2025-10-01T00:28:57.222Z" },
-    { url = "https://files.pythonhosted.org/packages/e3/0a/0d10eb970fe3e57da9e9ddcfd9464c76f42baf7b3d0db4a782d6746f788f/cryptography-46.0.2-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:fe245cf4a73c20592f0f48da39748b3513db114465be78f0a36da847221bd1b4", size = 4243379, upload-time = "2025-10-01T00:28:58.989Z" },
-    { url = "https://files.pythonhosted.org/packages/7d/60/e274b4d41a9eb82538b39950a74ef06e9e4d723cb998044635d9deb1b435/cryptography-46.0.2-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:2b9cad9cf71d0c45566624ff76654e9bae5f8a25970c250a26ccfc73f8553e2d", size = 4409533, upload-time = "2025-10-01T00:29:00.785Z" },
-    { url = "https://files.pythonhosted.org/packages/19/9a/fb8548f762b4749aebd13b57b8f865de80258083fe814957f9b0619cfc56/cryptography-46.0.2-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:9bd26f2f75a925fdf5e0a446c0de2714f17819bf560b44b7480e4dd632ad6c46", size = 4243120, upload-time = "2025-10-01T00:29:02.515Z" },
-    { url = "https://files.pythonhosted.org/packages/71/60/883f24147fd4a0c5cab74ac7e36a1ff3094a54ba5c3a6253d2ff4b19255b/cryptography-46.0.2-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:7282d8f092b5be7172d6472f29b0631f39f18512a3642aefe52c3c0e0ccfad5a", size = 4408940, upload-time = "2025-10-01T00:29:04.42Z" },
-    { url = "https://files.pythonhosted.org/packages/d9/b5/c5e179772ec38adb1c072b3aa13937d2860509ba32b2462bf1dda153833b/cryptography-46.0.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c4b93af7920cdf80f71650769464ccf1fb49a4b56ae0024173c24c48eb6b1612", size = 3438518, upload-time = "2025-10-01T00:29:06.139Z" },
+sdist = { url = "https://files.pythonhosted.org/packages/9f/33/c00162f49c0e2fe8064a62cb92b93e50c74a72bc370ab92f86112b33ff62/cryptography-46.0.3.tar.gz", hash = "sha256:a8b17438104fed022ce745b362294d9ce35b4c2e45c1d958ad4a4b019285f4a1", size = 749258, upload-time = "2025-10-15T23:18:31.74Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/1d/42/9c391dd801d6cf0d561b5890549d4b27bafcc53b39c31a817e69d87c625b/cryptography-46.0.3-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:109d4ddfadf17e8e7779c39f9b18111a09efb969a301a31e987416a0191ed93a", size = 7225004, upload-time = "2025-10-15T23:16:52.239Z" },
+    { url = "https://files.pythonhosted.org/packages/1c/67/38769ca6b65f07461eb200e85fc1639b438bdc667be02cf7f2cd6a64601c/cryptography-46.0.3-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:09859af8466b69bc3c27bdf4f5d84a665e0f7ab5088412e9e2ec49758eca5cbc", size = 4296667, upload-time = "2025-10-15T23:16:54.369Z" },
+    { url = "https://files.pythonhosted.org/packages/5c/49/498c86566a1d80e978b42f0d702795f69887005548c041636df6ae1ca64c/cryptography-46.0.3-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:01ca9ff2885f3acc98c29f1860552e37f6d7c7d013d7334ff2a9de43a449315d", size = 4450807, upload-time = "2025-10-15T23:16:56.414Z" },
+    { url = "https://files.pythonhosted.org/packages/4b/0a/863a3604112174c8624a2ac3c038662d9e59970c7f926acdcfaed8d61142/cryptography-46.0.3-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6eae65d4c3d33da080cff9c4ab1f711b15c1d9760809dad6ea763f3812d254cb", size = 4299615, upload-time = "2025-10-15T23:16:58.442Z" },
+    { url = "https://files.pythonhosted.org/packages/64/02/b73a533f6b64a69f3cd3872acb6ebc12aef924d8d103133bb3ea750dc703/cryptography-46.0.3-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5bf0ed4490068a2e72ac03d786693adeb909981cc596425d09032d372bcc849", size = 4016800, upload-time = "2025-10-15T23:17:00.378Z" },
+    { url = "https://files.pythonhosted.org/packages/25/d5/16e41afbfa450cde85a3b7ec599bebefaef16b5c6ba4ec49a3532336ed72/cryptography-46.0.3-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5ecfccd2329e37e9b7112a888e76d9feca2347f12f37918facbb893d7bb88ee8", size = 4984707, upload-time = "2025-10-15T23:17:01.98Z" },
+    { url = "https://files.pythonhosted.org/packages/c9/56/e7e69b427c3878352c2fb9b450bd0e19ed552753491d39d7d0a2f5226d41/cryptography-46.0.3-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a2c0cd47381a3229c403062f764160d57d4d175e022c1df84e168c6251a22eec", size = 4482541, upload-time = "2025-10-15T23:17:04.078Z" },
+    { url = "https://files.pythonhosted.org/packages/78/f6/50736d40d97e8483172f1bb6e698895b92a223dba513b0ca6f06b2365339/cryptography-46.0.3-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:549e234ff32571b1f4076ac269fcce7a808d3bf98b76c8dd560e42dbc66d7d91", size = 4299464, upload-time = "2025-10-15T23:17:05.483Z" },
+    { url = "https://files.pythonhosted.org/packages/00/de/d8e26b1a855f19d9994a19c702fa2e93b0456beccbcfe437eda00e0701f2/cryptography-46.0.3-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:c0a7bb1a68a5d3471880e264621346c48665b3bf1c3759d682fc0864c540bd9e", size = 4950838, upload-time = "2025-10-15T23:17:07.425Z" },
+    { url = "https://files.pythonhosted.org/packages/8f/29/798fc4ec461a1c9e9f735f2fc58741b0daae30688f41b2497dcbc9ed1355/cryptography-46.0.3-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:10b01676fc208c3e6feeb25a8b83d81767e8059e1fe86e1dc62d10a3018fa926", size = 4481596, upload-time = "2025-10-15T23:17:09.343Z" },
+    { url = "https://files.pythonhosted.org/packages/15/8d/03cd48b20a573adfff7652b76271078e3045b9f49387920e7f1f631d125e/cryptography-46.0.3-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0abf1ffd6e57c67e92af68330d05760b7b7efb243aab8377e583284dbab72c71", size = 4426782, upload-time = "2025-10-15T23:17:11.22Z" },
+    { url = "https://files.pythonhosted.org/packages/fa/b1/ebacbfe53317d55cf33165bda24c86523497a6881f339f9aae5c2e13e57b/cryptography-46.0.3-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a04bee9ab6a4da801eb9b51f1b708a1b5b5c9eb48c03f74198464c66f0d344ac", size = 4698381, upload-time = "2025-10-15T23:17:12.829Z" },
+    { url = "https://files.pythonhosted.org/packages/96/92/8a6a9525893325fc057a01f654d7efc2c64b9de90413adcf605a85744ff4/cryptography-46.0.3-cp311-abi3-win32.whl", hash = "sha256:f260d0d41e9b4da1ed1e0f1ce571f97fe370b152ab18778e9e8f67d6af432018", size = 3055988, upload-time = "2025-10-15T23:17:14.65Z" },
+    { url = "https://files.pythonhosted.org/packages/7e/bf/80fbf45253ea585a1e492a6a17efcb93467701fa79e71550a430c5e60df0/cryptography-46.0.3-cp311-abi3-win_amd64.whl", hash = "sha256:a9a3008438615669153eb86b26b61e09993921ebdd75385ddd748702c5adfddb", size = 3514451, upload-time = "2025-10-15T23:17:16.142Z" },
+    { url = "https://files.pythonhosted.org/packages/2e/af/9b302da4c87b0beb9db4e756386a7c6c5b8003cd0e742277888d352ae91d/cryptography-46.0.3-cp311-abi3-win_arm64.whl", hash = "sha256:5d7f93296ee28f68447397bf5198428c9aeeab45705a55d53a6343455dcb2c3c", size = 2928007, upload-time = "2025-10-15T23:17:18.04Z" },
+    { url = "https://files.pythonhosted.org/packages/f5/e2/a510aa736755bffa9d2f75029c229111a1d02f8ecd5de03078f4c18d91a3/cryptography-46.0.3-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:00a5e7e87938e5ff9ff5447ab086a5706a957137e6e433841e9d24f38a065217", size = 7158012, upload-time = "2025-10-15T23:17:19.982Z" },
+    { url = "https://files.pythonhosted.org/packages/73/dc/9aa866fbdbb95b02e7f9d086f1fccfeebf8953509b87e3f28fff927ff8a0/cryptography-46.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c8daeb2d2174beb4575b77482320303f3d39b8e81153da4f0fb08eb5fe86a6c5", size = 4288728, upload-time = "2025-10-15T23:17:21.527Z" },
+    { url = "https://files.pythonhosted.org/packages/c5/fd/bc1daf8230eaa075184cbbf5f8cd00ba9db4fd32d63fb83da4671b72ed8a/cryptography-46.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:39b6755623145ad5eff1dab323f4eae2a32a77a7abef2c5089a04a3d04366715", size = 4435078, upload-time = "2025-10-15T23:17:23.042Z" },
+    { url = "https://files.pythonhosted.org/packages/82/98/d3bd5407ce4c60017f8ff9e63ffee4200ab3e23fe05b765cab805a7db008/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:db391fa7c66df6762ee3f00c95a89e6d428f4d60e7abc8328f4fe155b5ac6e54", size = 4293460, upload-time = "2025-10-15T23:17:24.885Z" },
+    { url = "https://files.pythonhosted.org/packages/26/e9/e23e7900983c2b8af7a08098db406cf989d7f09caea7897e347598d4cd5b/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:78a97cf6a8839a48c49271cdcbd5cf37ca2c1d6b7fdd86cc864f302b5e9bf459", size = 3995237, upload-time = "2025-10-15T23:17:26.449Z" },
+    { url = "https://files.pythonhosted.org/packages/91/15/af68c509d4a138cfe299d0d7ddb14afba15233223ebd933b4bbdbc7155d3/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:dfb781ff7eaa91a6f7fd41776ec37c5853c795d3b358d4896fdbb5df168af422", size = 4967344, upload-time = "2025-10-15T23:17:28.06Z" },
+    { url = "https://files.pythonhosted.org/packages/ca/e3/8643d077c53868b681af077edf6b3cb58288b5423610f21c62aadcbe99f4/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:6f61efb26e76c45c4a227835ddeae96d83624fb0d29eb5df5b96e14ed1a0afb7", size = 4466564, upload-time = "2025-10-15T23:17:29.665Z" },
+    { url = "https://files.pythonhosted.org/packages/0e/43/c1e8726fa59c236ff477ff2b5dc071e54b21e5a1e51aa2cee1676f1c986f/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:23b1a8f26e43f47ceb6d6a43115f33a5a37d57df4ea0ca295b780ae8546e8044", size = 4292415, upload-time = "2025-10-15T23:17:31.686Z" },
+    { url = "https://files.pythonhosted.org/packages/42/f9/2f8fefdb1aee8a8e3256a0568cffc4e6d517b256a2fe97a029b3f1b9fe7e/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:b419ae593c86b87014b9be7396b385491ad7f320bde96826d0dd174459e54665", size = 4931457, upload-time = "2025-10-15T23:17:33.478Z" },
+    { url = "https://files.pythonhosted.org/packages/79/30/9b54127a9a778ccd6d27c3da7563e9f2d341826075ceab89ae3b41bf5be2/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:50fc3343ac490c6b08c0cf0d704e881d0d660be923fd3076db3e932007e726e3", size = 4466074, upload-time = "2025-10-15T23:17:35.158Z" },
+    { url = "https://files.pythonhosted.org/packages/ac/68/b4f4a10928e26c941b1b6a179143af9f4d27d88fe84a6a3c53592d2e76bf/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:22d7e97932f511d6b0b04f2bfd818d73dcd5928db509460aaf48384778eb6d20", size = 4420569, upload-time = "2025-10-15T23:17:37.188Z" },
+    { url = "https://files.pythonhosted.org/packages/a3/49/3746dab4c0d1979888f125226357d3262a6dd40e114ac29e3d2abdf1ec55/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d55f3dffadd674514ad19451161118fd010988540cee43d8bc20675e775925de", size = 4681941, upload-time = "2025-10-15T23:17:39.236Z" },
+    { url = "https://files.pythonhosted.org/packages/fd/30/27654c1dbaf7e4a3531fa1fc77986d04aefa4d6d78259a62c9dc13d7ad36/cryptography-46.0.3-cp314-cp314t-win32.whl", hash = "sha256:8a6e050cb6164d3f830453754094c086ff2d0b2f3a897a1d9820f6139a1f0914", size = 3022339, upload-time = "2025-10-15T23:17:40.888Z" },
+    { url = "https://files.pythonhosted.org/packages/f6/30/640f34ccd4d2a1bc88367b54b926b781b5a018d65f404d409aba76a84b1c/cryptography-46.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:760f83faa07f8b64e9c33fc963d790a2edb24efb479e3520c14a45741cd9b2db", size = 3494315, upload-time = "2025-10-15T23:17:42.769Z" },
+    { url = "https://files.pythonhosted.org/packages/ba/8b/88cc7e3bd0a8e7b861f26981f7b820e1f46aa9d26cc482d0feba0ecb4919/cryptography-46.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:516ea134e703e9fe26bcd1277a4b59ad30586ea90c365a87781d7887a646fe21", size = 2919331, upload-time = "2025-10-15T23:17:44.468Z" },
+    { url = "https://files.pythonhosted.org/packages/fd/23/45fe7f376a7df8daf6da3556603b36f53475a99ce4faacb6ba2cf3d82021/cryptography-46.0.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:cb3d760a6117f621261d662bccc8ef5bc32ca673e037c83fbe565324f5c46936", size = 7218248, upload-time = "2025-10-15T23:17:46.294Z" },
+    { url = "https://files.pythonhosted.org/packages/27/32/b68d27471372737054cbd34c84981f9edbc24fe67ca225d389799614e27f/cryptography-46.0.3-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4b7387121ac7d15e550f5cb4a43aef2559ed759c35df7336c402bb8275ac9683", size = 4294089, upload-time = "2025-10-15T23:17:48.269Z" },
+    { url = "https://files.pythonhosted.org/packages/26/42/fa8389d4478368743e24e61eea78846a0006caffaf72ea24a15159215a14/cryptography-46.0.3-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:15ab9b093e8f09daab0f2159bb7e47532596075139dd74365da52ecc9cb46c5d", size = 4440029, upload-time = "2025-10-15T23:17:49.837Z" },
+    { url = "https://files.pythonhosted.org/packages/5f/eb/f483db0ec5ac040824f269e93dd2bd8a21ecd1027e77ad7bdf6914f2fd80/cryptography-46.0.3-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:46acf53b40ea38f9c6c229599a4a13f0d46a6c3fa9ef19fc1a124d62e338dfa0", size = 4297222, upload-time = "2025-10-15T23:17:51.357Z" },
+    { url = "https://files.pythonhosted.org/packages/fd/cf/da9502c4e1912cb1da3807ea3618a6829bee8207456fbbeebc361ec38ba3/cryptography-46.0.3-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10ca84c4668d066a9878890047f03546f3ae0a6b8b39b697457b7757aaf18dbc", size = 4012280, upload-time = "2025-10-15T23:17:52.964Z" },
+    { url = "https://files.pythonhosted.org/packages/6b/8f/9adb86b93330e0df8b3dcf03eae67c33ba89958fc2e03862ef1ac2b42465/cryptography-46.0.3-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:36e627112085bb3b81b19fed209c05ce2a52ee8b15d161b7c643a7d5a88491f3", size = 4978958, upload-time = "2025-10-15T23:17:54.965Z" },
+    { url = "https://files.pythonhosted.org/packages/d1/a0/5fa77988289c34bdb9f913f5606ecc9ada1adb5ae870bd0d1054a7021cc4/cryptography-46.0.3-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1000713389b75c449a6e979ffc7dcc8ac90b437048766cef052d4d30b8220971", size = 4473714, upload-time = "2025-10-15T23:17:56.754Z" },
+    { url = "https://files.pythonhosted.org/packages/14/e5/fc82d72a58d41c393697aa18c9abe5ae1214ff6f2a5c18ac470f92777895/cryptography-46.0.3-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:b02cf04496f6576afffef5ddd04a0cb7d49cf6be16a9059d793a30b035f6b6ac", size = 4296970, upload-time = "2025-10-15T23:17:58.588Z" },
+    { url = "https://files.pythonhosted.org/packages/78/06/5663ed35438d0b09056973994f1aec467492b33bd31da36e468b01ec1097/cryptography-46.0.3-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:71e842ec9bc7abf543b47cf86b9a743baa95f4677d22baa4c7d5c69e49e9bc04", size = 4940236, upload-time = "2025-10-15T23:18:00.897Z" },
+    { url = "https://files.pythonhosted.org/packages/fc/59/873633f3f2dcd8a053b8dd1d38f783043b5fce589c0f6988bf55ef57e43e/cryptography-46.0.3-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:402b58fc32614f00980b66d6e56a5b4118e6cb362ae8f3fda141ba4689bd4506", size = 4472642, upload-time = "2025-10-15T23:18:02.749Z" },
+    { url = "https://files.pythonhosted.org/packages/3d/39/8e71f3930e40f6877737d6f69248cf74d4e34b886a3967d32f919cc50d3b/cryptography-46.0.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ef639cb3372f69ec44915fafcd6698b6cc78fbe0c2ea41be867f6ed612811963", size = 4423126, upload-time = "2025-10-15T23:18:04.85Z" },
+    { url = "https://files.pythonhosted.org/packages/cd/c7/f65027c2810e14c3e7268353b1681932b87e5a48e65505d8cc17c99e36ae/cryptography-46.0.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b51b8ca4f1c6453d8829e1eb7299499ca7f313900dd4d89a24b8b87c0a780d4", size = 4686573, upload-time = "2025-10-15T23:18:06.908Z" },
+    { url = "https://files.pythonhosted.org/packages/0a/6e/1c8331ddf91ca4730ab3086a0f1be19c65510a33b5a441cb334e7a2d2560/cryptography-46.0.3-cp38-abi3-win32.whl", hash = "sha256:6276eb85ef938dc035d59b87c8a7dc559a232f954962520137529d77b18ff1df", size = 3036695, upload-time = "2025-10-15T23:18:08.672Z" },
+    { url = "https://files.pythonhosted.org/packages/90/45/b0d691df20633eff80955a0fc7695ff9051ffce8b69741444bd9ed7bd0db/cryptography-46.0.3-cp38-abi3-win_amd64.whl", hash = "sha256:416260257577718c05135c55958b674000baef9a1c7d9e8f306ec60d71db850f", size = 3501720, upload-time = "2025-10-15T23:18:10.632Z" },
+    { url = "https://files.pythonhosted.org/packages/e8/cb/2da4cc83f5edb9c3257d09e1e7ab7b23f049c7962cae8d842bbef0a9cec9/cryptography-46.0.3-cp38-abi3-win_arm64.whl", hash = "sha256:d89c3468de4cdc4f08a57e214384d0471911a3830fcdaf7a8cc587e42a866372", size = 2918740, upload-time = "2025-10-15T23:18:12.277Z" },
+    { url = "https://files.pythonhosted.org/packages/d9/cd/1a8633802d766a0fa46f382a77e096d7e209e0817892929655fe0586ae32/cryptography-46.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a23582810fedb8c0bc47524558fb6c56aac3fc252cb306072fd2815da2a47c32", size = 3689163, upload-time = "2025-10-15T23:18:13.821Z" },
+    { url = "https://files.pythonhosted.org/packages/4c/59/6b26512964ace6480c3e54681a9859c974172fb141c38df11eadd8416947/cryptography-46.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e7aec276d68421f9574040c26e2a7c3771060bc0cff408bae1dcb19d3ab1e63c", size = 3429474, upload-time = "2025-10-15T23:18:15.477Z" },
+    { url = "https://files.pythonhosted.org/packages/06/8a/e60e46adab4362a682cf142c7dcb5bf79b782ab2199b0dcb81f55970807f/cryptography-46.0.3-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7ce938a99998ed3c8aa7e7272dca1a610401ede816d36d0693907d863b10d9ea", size = 3698132, upload-time = "2025-10-15T23:18:17.056Z" },
+    { url = "https://files.pythonhosted.org/packages/da/38/f59940ec4ee91e93d3311f7532671a5cef5570eb04a144bf203b58552d11/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:191bb60a7be5e6f54e30ba16fdfae78ad3a342a0599eb4193ba88e3f3d6e185b", size = 4243992, upload-time = "2025-10-15T23:18:18.695Z" },
+    { url = "https://files.pythonhosted.org/packages/b0/0c/35b3d92ddebfdfda76bb485738306545817253d0a3ded0bfe80ef8e67aa5/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c70cc23f12726be8f8bc72e41d5065d77e4515efae3690326764ea1b07845cfb", size = 4409944, upload-time = "2025-10-15T23:18:20.597Z" },
+    { url = "https://files.pythonhosted.org/packages/99/55/181022996c4063fc0e7666a47049a1ca705abb9c8a13830f074edb347495/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:9394673a9f4de09e28b5356e7fff97d778f8abad85c9d5ac4a4b7e25a0de7717", size = 4242957, upload-time = "2025-10-15T23:18:22.18Z" },
+    { url = "https://files.pythonhosted.org/packages/ba/af/72cd6ef29f9c5f731251acadaeb821559fe25f10852f44a63374c9ca08c1/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:94cd0549accc38d1494e1f8de71eca837d0509d0d44bf11d158524b0e12cebf9", size = 4409447, upload-time = "2025-10-15T23:18:24.209Z" },
+    { url = "https://files.pythonhosted.org/packages/0d/c3/e90f4a4feae6410f914f8ebac129b9ae7a8c92eb60a638012dde42030a9d/cryptography-46.0.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:6b5063083824e5509fdba180721d55909ffacccc8adbec85268b48439423d78c", size = 3438528, upload-time = "2025-10-15T23:18:26.227Z" },
 ]
 
 [[package]]
@@ -515,14 +533,52 @@ wheels = [
 
 [[package]]
 name = "exceptiongroup"
-version = "1.3.0"
+version = "1.3.1"
 source = { registry = "https://pypi.org/simple" }
 dependencies = [
     { name = "typing-extensions", marker = "python_full_version < '3.13'" },
 ]
-sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749, upload-time = "2025-05-10T17:42:51.123Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/50/79/66800aadf48771f6b62f7eb014e352e5d06856655206165d775e675a02c9/exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219", size = 30371, upload-time = "2025-11-21T23:01:54.787Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/8a/0e/97c33bf5009bdbac74fd2beace167cab3f978feb69cc36f1ef79360d6c4e/exceptiongroup-1.3.1-py3-none-any.whl", hash = "sha256:a7a39a3bd276781e98394987d3a5701d0c4edffb633bb7a5144577f82c773598", size = 16740, upload-time = "2025-11-21T23:01:53.443Z" },
+]
+
+[[package]]
+name = "h11"
+version = "0.16.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" },
+]
+
+[[package]]
+name = "httpcore"
+version = "1.0.9"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "certifi" },
+    { name = "h11" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" },
+]
+
+[[package]]
+name = "httpx"
+version = "0.27.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "anyio" },
+    { name = "certifi" },
+    { name = "httpcore" },
+    { name = "idna" },
+    { name = "sniffio" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/5c/2d/3da5bdf4408b8b2800061c339f240c1802f2e82d55e50bd39c5a881f47f0/httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5", size = 126413, upload-time = "2024-02-21T13:07:52.434Z" }
 wheels = [
-    { url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", size = 16674, upload-time = "2025-05-10T17:42:49.33Z" },
+    { url = "https://files.pythonhosted.org/packages/41/7b/ddacf6dcebb42466abd03f368782142baa82e08fc0c1f8eaa05b4bae87d5/httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5", size = 75590, upload-time = "2024-02-21T13:07:50.455Z" },
 ]
 
 [[package]]
@@ -536,11 +592,11 @@ wheels = [
 
 [[package]]
 name = "iniconfig"
-version = "2.1.0"
+version = "2.3.0"
 source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" }
 wheels = [
-    { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" },
+    { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" },
 ]
 
 [[package]]
@@ -595,6 +651,79 @@ wheels = [
     { url = "https://files.pythonhosted.org/packages/46/3c/36a0a265c920e478cec88faab97169cbc39d5d67822fcbd63eed052112cf/junos_eznc-2.7.5-py2.py3-none-any.whl", hash = "sha256:db94074c50227c217a7669b9efad372d11bb354de4985cffcc7295aa2af80919", size = 206884, upload-time = "2025-09-23T15:03:23.637Z" },
 ]
 
+[[package]]
+name = "librt"
+version = "0.7.5"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/b5/8a/071f6628363d83e803d4783e0cd24fb9c5b798164300fcfaaa47c30659c0/librt-0.7.5.tar.gz", hash = "sha256:de4221a1181fa9c8c4b5f35506ed6f298948f44003d84d2a8b9885d7e01e6cfa", size = 145868, upload-time = "2025-12-25T03:53:16.039Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/88/f2/3248d8419db99ab80bb36266735d1241f766ad5fd993071211f789b618a5/librt-0.7.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:81056e01bba1394f1d92904ec61a4078f66df785316275edbaf51d90da8c6e26", size = 54703, upload-time = "2025-12-25T03:51:48.394Z" },
+    { url = "https://files.pythonhosted.org/packages/7b/30/7e179543dbcb1311f84b7e797658ad85cf2d4474c468f5dbafa13f2a98a5/librt-0.7.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d7c72c8756eeb3aefb1b9e3dac7c37a4a25db63640cac0ab6fc18e91a0edf05a", size = 56660, upload-time = "2025-12-25T03:51:49.791Z" },
+    { url = "https://files.pythonhosted.org/packages/15/91/3ba03ac1ac1abd66757a134b3bd56d9674928b163d0e686ea065a2bbb92d/librt-0.7.5-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:ddc4a16207f88f9597b397fc1f60781266d13b13de922ff61c206547a29e4bbd", size = 161026, upload-time = "2025-12-25T03:51:51.021Z" },
+    { url = "https://files.pythonhosted.org/packages/0d/6e/b8365f547817d37b44c4be2ffa02630be995ef18be52d72698cecc3640c5/librt-0.7.5-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:63055d3dda433ebb314c9f1819942f16a19203c454508fdb2d167613f7017169", size = 169530, upload-time = "2025-12-25T03:51:52.417Z" },
+    { url = "https://files.pythonhosted.org/packages/63/6a/8442eb0b6933c651a06e1888f863971f3391cc11338fdaa6ab969f7d1eac/librt-0.7.5-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9f85f9b5db87b0f52e53c68ad2a0c5a53e00afa439bd54a1723742a2b1021276", size = 183272, upload-time = "2025-12-25T03:51:53.713Z" },
+    { url = "https://files.pythonhosted.org/packages/90/c4/b1166df6ef8e1f68d309f50bf69e8e750a5ea12fe7e2cf202c771ff359fc/librt-0.7.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c566a4672564c5d54d8ab65cdaae5a87ee14c1564c1a2ddc7a9f5811c750f023", size = 179040, upload-time = "2025-12-25T03:51:55.048Z" },
+    { url = "https://files.pythonhosted.org/packages/fc/30/8f3fd9fd975b16c37832d6c248b976d2a0e33f155063781e064f249b37f1/librt-0.7.5-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fee15c2a190ef389f14928135c6fb2d25cd3fdb7887bfd9a7b444bbdc8c06b96", size = 173506, upload-time = "2025-12-25T03:51:56.407Z" },
+    { url = "https://files.pythonhosted.org/packages/75/71/c3d4d5658f9849bf8e07ffba99f892d49a0c9a4001323ed610db72aedc82/librt-0.7.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:584cb3e605ec45ba350962cec853e17be0a25a772f21f09f1e422f7044ae2a7d", size = 193573, upload-time = "2025-12-25T03:51:57.949Z" },
+    { url = "https://files.pythonhosted.org/packages/86/7c/c1c8a0116a2eed3d58c8946c589a8f9e1354b9b825cc92eba58bb15f6fb1/librt-0.7.5-cp310-cp310-win32.whl", hash = "sha256:9c08527055fbb03c641c15bbc5b79dd2942fb6a3bd8dabf141dd7e97eeea4904", size = 42603, upload-time = "2025-12-25T03:51:59.215Z" },
+    { url = "https://files.pythonhosted.org/packages/1d/00/b52c77ca294247420020b829b70465c6e6f2b9d59ab21d8051aac20432da/librt-0.7.5-cp310-cp310-win_amd64.whl", hash = "sha256:dd810f2d39c526c42ea205e0addad5dc08ef853c625387806a29d07f9d150d9b", size = 48977, upload-time = "2025-12-25T03:52:00.519Z" },
+    { url = "https://files.pythonhosted.org/packages/11/89/42b3ccb702a7e5f7a4cf2afc8a0a8f8c5e7d4b4d3a7c3de6357673dddddb/librt-0.7.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f952e1a78c480edee8fb43aa2bf2e84dcd46c917d44f8065b883079d3893e8fc", size = 54705, upload-time = "2025-12-25T03:52:01.433Z" },
+    { url = "https://files.pythonhosted.org/packages/bb/90/c16970b509c3c448c365041d326eeef5aeb2abaed81eb3187b26a3cd13f8/librt-0.7.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75965c1f4efb7234ff52a58b729d245a21e87e4b6a26a0ec08052f02b16274e4", size = 56667, upload-time = "2025-12-25T03:52:02.391Z" },
+    { url = "https://files.pythonhosted.org/packages/ac/2f/da4bdf6c190503f4663fbb781dfae5564a2b1c3f39a2da8e1ac7536ac7bd/librt-0.7.5-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:732e0aa0385b59a1b2545159e781c792cc58ce9c134249233a7c7250a44684c4", size = 161705, upload-time = "2025-12-25T03:52:03.395Z" },
+    { url = "https://files.pythonhosted.org/packages/fb/88/c5da8e1f5f22b23d56e1fbd87266799dcf32828d47bf69fabc6f9673c6eb/librt-0.7.5-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cdde31759bd8888f3ef0eebda80394a48961328a17c264dce8cc35f4b9cde35d", size = 171029, upload-time = "2025-12-25T03:52:04.798Z" },
+    { url = "https://files.pythonhosted.org/packages/38/8a/8dfc00a6f1febc094ed9a55a448fc0b3a591b5dfd83be6cfd76d0910b1f0/librt-0.7.5-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:df3146d52465b3b6397d25d513f428cb421c18df65b7378667bb5f1e3cc45805", size = 184704, upload-time = "2025-12-25T03:52:05.887Z" },
+    { url = "https://files.pythonhosted.org/packages/ad/57/65dec835ff235f431801064a3b41268f2f5ee0d224dc3bbf46d911af5c1a/librt-0.7.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:29c8d2fae11d4379ea207ba7fc69d43237e42cf8a9f90ec6e05993687e6d648b", size = 180720, upload-time = "2025-12-25T03:52:06.925Z" },
+    { url = "https://files.pythonhosted.org/packages/1e/27/92033d169bbcaa0d9a2dd476c179e5171ec22ed574b1b135a3c6104fb7d4/librt-0.7.5-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:bb41f04046b4f22b1e7ba5ef513402cd2e3477ec610e5f92d38fe2bba383d419", size = 174538, upload-time = "2025-12-25T03:52:08.075Z" },
+    { url = "https://files.pythonhosted.org/packages/44/5c/0127098743575d5340624d8d4ec508d4d5ff0877dcee6f55f54bf03e5ed0/librt-0.7.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8bb7883c1e94ceb87c2bf81385266f032da09cd040e804cc002f2c9d6b842e2f", size = 195240, upload-time = "2025-12-25T03:52:09.427Z" },
+    { url = "https://files.pythonhosted.org/packages/47/0f/be028c3e906a8ee6d29a42fd362e6d57d4143057f2bc0c454d489a0f898b/librt-0.7.5-cp311-cp311-win32.whl", hash = "sha256:84d4a6b9efd6124f728558a18e79e7cc5c5d4efc09b2b846c910de7e564f5bad", size = 42941, upload-time = "2025-12-25T03:52:10.527Z" },
+    { url = "https://files.pythonhosted.org/packages/ac/3a/2f0ed57f4c3ae3c841780a95dfbea4cd811c6842d9ee66171ce1af606d25/librt-0.7.5-cp311-cp311-win_amd64.whl", hash = "sha256:ab4b0d3bee6f6ff7017e18e576ac7e41a06697d8dea4b8f3ab9e0c8e1300c409", size = 49244, upload-time = "2025-12-25T03:52:11.832Z" },
+    { url = "https://files.pythonhosted.org/packages/ee/7c/d7932aedfa5a87771f9e2799e7185ec3a322f4a1f4aa87c234159b75c8c8/librt-0.7.5-cp311-cp311-win_arm64.whl", hash = "sha256:730be847daad773a3c898943cf67fb9845a3961d06fb79672ceb0a8cd8624cfa", size = 42614, upload-time = "2025-12-25T03:52:12.745Z" },
+    { url = "https://files.pythonhosted.org/packages/33/9d/cb0a296cee177c0fee7999ada1c1af7eee0e2191372058814a4ca6d2baf0/librt-0.7.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ba1077c562a046208a2dc6366227b3eeae8f2c2ab4b41eaf4fd2fa28cece4203", size = 55689, upload-time = "2025-12-25T03:52:14.041Z" },
+    { url = "https://files.pythonhosted.org/packages/79/5c/d7de4d4228b74c5b81a3fbada157754bb29f0e1f8c38229c669a7f90422a/librt-0.7.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:654fdc971c76348a73af5240d8e2529265b9a7ba6321e38dd5bae7b0d4ab3abe", size = 57142, upload-time = "2025-12-25T03:52:15.336Z" },
+    { url = "https://files.pythonhosted.org/packages/e5/b2/5da779184aae369b69f4ae84225f63741662a0fe422e91616c533895d7a4/librt-0.7.5-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:6b7b58913d475911f6f33e8082f19dd9b120c4f4a5c911d07e395d67b81c6982", size = 165323, upload-time = "2025-12-25T03:52:16.384Z" },
+    { url = "https://files.pythonhosted.org/packages/5a/40/6d5abc15ab6cc70e04c4d201bb28baffff4cfb46ab950b8e90935b162d58/librt-0.7.5-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b8e0fd344bad57026a8f4ccfaf406486c2fc991838050c2fef156170edc3b775", size = 174218, upload-time = "2025-12-25T03:52:17.518Z" },
+    { url = "https://files.pythonhosted.org/packages/0d/d0/5239a8507e6117a3cb59ce0095bdd258bd2a93d8d4b819a506da06d8d645/librt-0.7.5-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:46aa91813c267c3f60db75d56419b42c0c0b9748ec2c568a0e3588e543fb4233", size = 189007, upload-time = "2025-12-25T03:52:18.585Z" },
+    { url = "https://files.pythonhosted.org/packages/1f/a4/8eed1166ffddbb01c25363e4c4e655f4bac298debe9e5a2dcfaf942438a1/librt-0.7.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ddc0ab9dbc5f9ceaf2bf7a367bf01f2697660e908f6534800e88f43590b271db", size = 183962, upload-time = "2025-12-25T03:52:19.723Z" },
+    { url = "https://files.pythonhosted.org/packages/a1/83/260e60aab2f5ccba04579c5c46eb3b855e51196fde6e2bcf6742d89140a8/librt-0.7.5-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:7a488908a470451338607650f1c064175094aedebf4a4fa37890682e30ce0b57", size = 177611, upload-time = "2025-12-25T03:52:21.18Z" },
+    { url = "https://files.pythonhosted.org/packages/c4/36/6dcfed0df41e9695665462bab59af15b7ed2b9c668d85c7ebadd022cbb76/librt-0.7.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e47fc52602ffc374e69bf1b76536dc99f7f6dd876bd786c8213eaa3598be030a", size = 199273, upload-time = "2025-12-25T03:52:22.25Z" },
+    { url = "https://files.pythonhosted.org/packages/a6/b7/157149c8cffae6bc4293a52e0267860cee2398cb270798d94f1c8a69b9ae/librt-0.7.5-cp312-cp312-win32.whl", hash = "sha256:cda8b025875946ffff5a9a7590bf9acde3eb02cb6200f06a2d3e691ef3d9955b", size = 43191, upload-time = "2025-12-25T03:52:23.643Z" },
+    { url = "https://files.pythonhosted.org/packages/f8/91/197dfeb8d3bdeb0a5344d0d8b3077f183ba5e76c03f158126f6072730998/librt-0.7.5-cp312-cp312-win_amd64.whl", hash = "sha256:b591c094afd0ffda820e931148c9e48dc31a556dc5b2b9b3cc552fa710d858e4", size = 49462, upload-time = "2025-12-25T03:52:24.637Z" },
+    { url = "https://files.pythonhosted.org/packages/03/ea/052a79454cc52081dfaa9a1c4c10a529f7a6a6805b2fac5805fea5b25975/librt-0.7.5-cp312-cp312-win_arm64.whl", hash = "sha256:532ddc6a8a6ca341b1cd7f4d999043e4c71a212b26fe9fd2e7f1e8bb4e873544", size = 42830, upload-time = "2025-12-25T03:52:25.944Z" },
+    { url = "https://files.pythonhosted.org/packages/9f/9a/8f61e16de0ff76590af893cfb5b1aa5fa8b13e5e54433d0809c7033f59ed/librt-0.7.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b1795c4b2789b458fa290059062c2f5a297ddb28c31e704d27e161386469691a", size = 55750, upload-time = "2025-12-25T03:52:26.975Z" },
+    { url = "https://files.pythonhosted.org/packages/05/7c/a8a883804851a066f301e0bad22b462260b965d5c9e7fe3c5de04e6f91f8/librt-0.7.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2fcbf2e135c11f721193aa5f42ba112bb1046afafbffd407cbc81d8d735c74d0", size = 57170, upload-time = "2025-12-25T03:52:27.948Z" },
+    { url = "https://files.pythonhosted.org/packages/d6/5d/b3b47facf5945be294cf8a835b03589f70ee0e791522f99ec6782ed738b3/librt-0.7.5-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:c039bbf79a9a2498404d1ae7e29a6c175e63678d7a54013a97397c40aee026c5", size = 165834, upload-time = "2025-12-25T03:52:29.09Z" },
+    { url = "https://files.pythonhosted.org/packages/b4/b6/b26910cd0a4e43e5d02aacaaea0db0d2a52e87660dca08293067ee05601a/librt-0.7.5-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3919c9407faeeee35430ae135e3a78acd4ecaaaa73767529e2c15ca1d73ba325", size = 174820, upload-time = "2025-12-25T03:52:30.463Z" },
+    { url = "https://files.pythonhosted.org/packages/a5/a3/81feddd345d4c869b7a693135a462ae275f964fcbbe793d01ea56a84c2ee/librt-0.7.5-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:26b46620e1e0e45af510d9848ea0915e7040605dd2ae94ebefb6c962cbb6f7ec", size = 189609, upload-time = "2025-12-25T03:52:31.492Z" },
+    { url = "https://files.pythonhosted.org/packages/ce/a9/31310796ef4157d1d37648bf4a3b84555319f14cee3e9bad7bdd7bfd9a35/librt-0.7.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9bbb8facc5375476d392990dd6a71f97e4cb42e2ac66f32e860f6e47299d5e89", size = 184589, upload-time = "2025-12-25T03:52:32.59Z" },
+    { url = "https://files.pythonhosted.org/packages/32/22/da3900544cb0ac6ab7a2857850158a0a093b86f92b264aa6c4a4f2355ff3/librt-0.7.5-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:e9e9c988b5ffde7be02180f864cbd17c0b0c1231c235748912ab2afa05789c25", size = 178251, upload-time = "2025-12-25T03:52:33.745Z" },
+    { url = "https://files.pythonhosted.org/packages/db/77/78e02609846e78b9b8c8e361753b3dbac9a07e6d5b567fe518de9e074ab0/librt-0.7.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:edf6b465306215b19dbe6c3fb63cf374a8f3e1ad77f3b4c16544b83033bbb67b", size = 199852, upload-time = "2025-12-25T03:52:34.826Z" },
+    { url = "https://files.pythonhosted.org/packages/2a/25/05706f6b346429c951582f1b3561f4d5e1418d0d7ba1a0c181237cd77b3b/librt-0.7.5-cp313-cp313-win32.whl", hash = "sha256:060bde69c3604f694bd8ae21a780fe8be46bb3dbb863642e8dfc75c931ca8eee", size = 43250, upload-time = "2025-12-25T03:52:35.905Z" },
+    { url = "https://files.pythonhosted.org/packages/d9/59/c38677278ac0b9ae1afc611382ef6c9ea87f52ad257bd3d8d65f0eacdc6a/librt-0.7.5-cp313-cp313-win_amd64.whl", hash = "sha256:a82d5a0ee43aeae2116d7292c77cc8038f4841830ade8aa922e098933b468b9e", size = 49421, upload-time = "2025-12-25T03:52:36.895Z" },
+    { url = "https://files.pythonhosted.org/packages/c0/47/1d71113df4a81de5fdfbd3d7244e05d3d67e89f25455c3380ca50b92741e/librt-0.7.5-cp313-cp313-win_arm64.whl", hash = "sha256:3c98a8d0ac9e2a7cb8ff8c53e5d6e8d82bfb2839abf144fdeaaa832f2a12aa45", size = 42827, upload-time = "2025-12-25T03:52:37.856Z" },
+    { url = "https://files.pythonhosted.org/packages/97/ae/8635b4efdc784220f1378be640d8b1a794332f7f6ea81bb4859bf9d18aa7/librt-0.7.5-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:9937574e6d842f359b8585903d04f5b4ab62277a091a93e02058158074dc52f2", size = 55191, upload-time = "2025-12-25T03:52:38.839Z" },
+    { url = "https://files.pythonhosted.org/packages/52/11/ed7ef6955dc2032af37db9b0b31cd5486a138aa792e1bb9e64f0f4950e27/librt-0.7.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:5cd3afd71e9bc146203b6c8141921e738364158d4aa7cdb9a874e2505163770f", size = 56894, upload-time = "2025-12-25T03:52:39.805Z" },
+    { url = "https://files.pythonhosted.org/packages/24/f1/02921d4a66a1b5dcd0493b89ce76e2762b98c459fe2ad04b67b2ea6fdd39/librt-0.7.5-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:9cffa3ef0af29687455161cb446eff059bf27607f95163d6a37e27bcb37180f6", size = 163726, upload-time = "2025-12-25T03:52:40.79Z" },
+    { url = "https://files.pythonhosted.org/packages/65/87/27df46d2756fcb7a82fa7f6ca038a0c6064c3e93ba65b0b86fbf6a4f76a2/librt-0.7.5-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:82f3f088482e2229387eadf8215c03f7726d56f69cce8c0c40f0795aebc9b361", size = 172470, upload-time = "2025-12-25T03:52:42.226Z" },
+    { url = "https://files.pythonhosted.org/packages/9f/a9/e65a35e5d423639f4f3d8e17301ff13cc41c2ff97677fe9c361c26dbfbb7/librt-0.7.5-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d7aa33153a5bb0bac783d2c57885889b1162823384e8313d47800a0e10d0070e", size = 186807, upload-time = "2025-12-25T03:52:43.688Z" },
+    { url = "https://files.pythonhosted.org/packages/d7/b0/ac68aa582a996b1241773bd419823290c42a13dc9f494704a12a17ddd7b6/librt-0.7.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:265729b551a2dd329cc47b323a182fb7961af42abf21e913c9dd7d3331b2f3c2", size = 181810, upload-time = "2025-12-25T03:52:45.095Z" },
+    { url = "https://files.pythonhosted.org/packages/e1/c1/03f6717677f20acd2d690813ec2bbe12a2de305f32c61479c53f7b9413bc/librt-0.7.5-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:168e04663e126416ba712114050f413ac306759a1791d87b7c11d4428ba75760", size = 175599, upload-time = "2025-12-25T03:52:46.177Z" },
+    { url = "https://files.pythonhosted.org/packages/01/d7/f976ff4c07c59b69bb5eec7e5886d43243075bbef834428124b073471c86/librt-0.7.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:553dc58987d1d853adda8aeadf4db8e29749f0b11877afcc429a9ad892818ae2", size = 196506, upload-time = "2025-12-25T03:52:47.327Z" },
+    { url = "https://files.pythonhosted.org/packages/b7/74/004f068b8888e61b454568b5479f88018fceb14e511ac0609cccee7dd227/librt-0.7.5-cp314-cp314-win32.whl", hash = "sha256:263f4fae9eba277513357c871275b18d14de93fd49bf5e43dc60a97b81ad5eb8", size = 39747, upload-time = "2025-12-25T03:52:48.437Z" },
+    { url = "https://files.pythonhosted.org/packages/37/b1/ea3ec8fcf5f0a00df21f08972af77ad799604a306db58587308067d27af8/librt-0.7.5-cp314-cp314-win_amd64.whl", hash = "sha256:85f485b7471571e99fab4f44eeb327dc0e1f814ada575f3fa85e698417d8a54e", size = 45970, upload-time = "2025-12-25T03:52:49.389Z" },
+    { url = "https://files.pythonhosted.org/packages/5d/30/5e3fb7ac4614a50fc67e6954926137d50ebc27f36419c9963a94f931f649/librt-0.7.5-cp314-cp314-win_arm64.whl", hash = "sha256:49c596cd18e90e58b7caa4d7ca7606049c1802125fcff96b8af73fa5c3870e4d", size = 39075, upload-time = "2025-12-25T03:52:50.395Z" },
+    { url = "https://files.pythonhosted.org/packages/a4/7f/0af0a9306a06c2aabee3a790f5aa560c50ec0a486ab818a572dd3db6c851/librt-0.7.5-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:54d2aef0b0f5056f130981ad45081b278602ff3657fe16c88529f5058038e802", size = 57375, upload-time = "2025-12-25T03:52:51.439Z" },
+    { url = "https://files.pythonhosted.org/packages/57/1f/c85e510baf6572a3d6ef40c742eacedc02973ed2acdb5dba2658751d9af8/librt-0.7.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:0b4791202296ad51ac09a3ff58eb49d9da8e3a4009167a6d76ac418a974e5fd4", size = 59234, upload-time = "2025-12-25T03:52:52.687Z" },
+    { url = "https://files.pythonhosted.org/packages/49/b1/bb6535e4250cd18b88d6b18257575a0239fa1609ebba925f55f51ae08e8e/librt-0.7.5-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:6e860909fea75baef941ee6436e0453612505883b9d0d87924d4fda27865b9a2", size = 183873, upload-time = "2025-12-25T03:52:53.705Z" },
+    { url = "https://files.pythonhosted.org/packages/8e/49/ad4a138cca46cdaa7f0e15fa912ce3ccb4cc0d4090bfeb8ccc35766fa6d5/librt-0.7.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f02c4337bf271c4f06637f5ff254fad2238c0b8e32a3a480ebb2fc5e26f754a5", size = 194609, upload-time = "2025-12-25T03:52:54.884Z" },
+    { url = "https://files.pythonhosted.org/packages/9c/2d/3b3cb933092d94bb2c1d3c9b503d8775f08d806588c19a91ee4d1495c2a8/librt-0.7.5-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f7f51ffe59f4556243d3cc82d827bde74765f594fa3ceb80ec4de0c13ccd3416", size = 206777, upload-time = "2025-12-25T03:52:55.969Z" },
+    { url = "https://files.pythonhosted.org/packages/3a/52/6e7611d3d1347812233dabc44abca4c8065ee97b83c9790d7ecc3f782bc8/librt-0.7.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:0b7f080ba30601dfa3e3deed3160352273e1b9bc92e652f51103c3e9298f7899", size = 203208, upload-time = "2025-12-25T03:52:57.036Z" },
+    { url = "https://files.pythonhosted.org/packages/27/aa/466ae4654bd2d45903fbf180815d41e3ae8903e5a1861f319f73c960a843/librt-0.7.5-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:fb565b4219abc8ea2402e61c7ba648a62903831059ed3564fa1245cc245d58d7", size = 196698, upload-time = "2025-12-25T03:52:58.481Z" },
+    { url = "https://files.pythonhosted.org/packages/97/8f/424f7e4525bb26fe0d3e984d1c0810ced95e53be4fd867ad5916776e18a3/librt-0.7.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8a3cfb15961e7333ea6ef033dc574af75153b5c230d5ad25fbcd55198f21e0cf", size = 217194, upload-time = "2025-12-25T03:52:59.575Z" },
+    { url = "https://files.pythonhosted.org/packages/9e/33/13a4cb798a171b173f3c94db23adaf13a417130e1493933dc0df0d7fb439/librt-0.7.5-cp314-cp314t-win32.whl", hash = "sha256:118716de5ad6726332db1801bc90fa6d94194cd2e07c1a7822cebf12c496714d", size = 40282, upload-time = "2025-12-25T03:53:01.091Z" },
+    { url = "https://files.pythonhosted.org/packages/5f/f1/62b136301796399d65dad73b580f4509bcbd347dff885a450bff08e80cb6/librt-0.7.5-cp314-cp314t-win_amd64.whl", hash = "sha256:3dd58f7ce20360c6ce0c04f7bd9081c7f9c19fc6129a3c705d0c5a35439f201d", size = 46764, upload-time = "2025-12-25T03:53:02.381Z" },
+    { url = "https://files.pythonhosted.org/packages/49/cb/940431d9410fda74f941f5cd7f0e5a22c63be7b0c10fa98b2b7022b48cb1/librt-0.7.5-cp314-cp314t-win_arm64.whl", hash = "sha256:08153ea537609d11f774d2bfe84af39d50d5c9ca3a4d061d946e0c9d8bce04a1", size = 39728, upload-time = "2025-12-25T03:53:03.306Z" },
+]
+
 [[package]]
 name = "lxml"
 version = "6.0.2"
@@ -827,47 +956,48 @@ wheels = [
 
 [[package]]
 name = "mypy"
-version = "1.18.2"
+version = "1.19.1"
 source = { registry = "https://pypi.org/simple" }
 dependencies = [
+    { name = "librt", marker = "platform_python_implementation != 'PyPy'" },
     { name = "mypy-extensions" },
     { name = "pathspec" },
     { name = "tomli", marker = "python_full_version < '3.11'" },
     { name = "typing-extensions" },
 ]
-sdist = { url = "https://files.pythonhosted.org/packages/c0/77/8f0d0001ffad290cef2f7f216f96c814866248a0b92a722365ed54648e7e/mypy-1.18.2.tar.gz", hash = "sha256:06a398102a5f203d7477b2923dda3634c36727fa5c237d8f859ef90c42a9924b", size = 3448846, upload-time = "2025-09-19T00:11:10.519Z" }
-wheels = [
-    { url = "https://files.pythonhosted.org/packages/03/6f/657961a0743cff32e6c0611b63ff1c1970a0b482ace35b069203bf705187/mypy-1.18.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c1eab0cf6294dafe397c261a75f96dc2c31bffe3b944faa24db5def4e2b0f77c", size = 12807973, upload-time = "2025-09-19T00:10:35.282Z" },
-    { url = "https://files.pythonhosted.org/packages/10/e9/420822d4f661f13ca8900f5fa239b40ee3be8b62b32f3357df9a3045a08b/mypy-1.18.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7a780ca61fc239e4865968ebc5240bb3bf610ef59ac398de9a7421b54e4a207e", size = 11896527, upload-time = "2025-09-19T00:10:55.791Z" },
-    { url = "https://files.pythonhosted.org/packages/aa/73/a05b2bbaa7005f4642fcfe40fb73f2b4fb6bb44229bd585b5878e9a87ef8/mypy-1.18.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:448acd386266989ef11662ce3c8011fd2a7b632e0ec7d61a98edd8e27472225b", size = 12507004, upload-time = "2025-09-19T00:11:05.411Z" },
-    { url = "https://files.pythonhosted.org/packages/4f/01/f6e4b9f0d031c11ccbd6f17da26564f3a0f3c4155af344006434b0a05a9d/mypy-1.18.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f9e171c465ad3901dc652643ee4bffa8e9fef4d7d0eece23b428908c77a76a66", size = 13245947, upload-time = "2025-09-19T00:10:46.923Z" },
-    { url = "https://files.pythonhosted.org/packages/d7/97/19727e7499bfa1ae0773d06afd30ac66a58ed7437d940c70548634b24185/mypy-1.18.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:592ec214750bc00741af1f80cbf96b5013d81486b7bb24cb052382c19e40b428", size = 13499217, upload-time = "2025-09-19T00:09:39.472Z" },
-    { url = "https://files.pythonhosted.org/packages/9f/4f/90dc8c15c1441bf31cf0f9918bb077e452618708199e530f4cbd5cede6ff/mypy-1.18.2-cp310-cp310-win_amd64.whl", hash = "sha256:7fb95f97199ea11769ebe3638c29b550b5221e997c63b14ef93d2e971606ebed", size = 9766753, upload-time = "2025-09-19T00:10:49.161Z" },
-    { url = "https://files.pythonhosted.org/packages/88/87/cafd3ae563f88f94eec33f35ff722d043e09832ea8530ef149ec1efbaf08/mypy-1.18.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:807d9315ab9d464125aa9fcf6d84fde6e1dc67da0b6f80e7405506b8ac72bc7f", size = 12731198, upload-time = "2025-09-19T00:09:44.857Z" },
-    { url = "https://files.pythonhosted.org/packages/0f/e0/1e96c3d4266a06d4b0197ace5356d67d937d8358e2ee3ffac71faa843724/mypy-1.18.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:776bb00de1778caf4db739c6e83919c1d85a448f71979b6a0edd774ea8399341", size = 11817879, upload-time = "2025-09-19T00:09:47.131Z" },
-    { url = "https://files.pythonhosted.org/packages/72/ef/0c9ba89eb03453e76bdac5a78b08260a848c7bfc5d6603634774d9cd9525/mypy-1.18.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1379451880512ffce14505493bd9fe469e0697543717298242574882cf8cdb8d", size = 12427292, upload-time = "2025-09-19T00:10:22.472Z" },
-    { url = "https://files.pythonhosted.org/packages/1a/52/ec4a061dd599eb8179d5411d99775bec2a20542505988f40fc2fee781068/mypy-1.18.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1331eb7fd110d60c24999893320967594ff84c38ac6d19e0a76c5fd809a84c86", size = 13163750, upload-time = "2025-09-19T00:09:51.472Z" },
-    { url = "https://files.pythonhosted.org/packages/c4/5f/2cf2ceb3b36372d51568f2208c021870fe7834cf3186b653ac6446511839/mypy-1.18.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3ca30b50a51e7ba93b00422e486cbb124f1c56a535e20eff7b2d6ab72b3b2e37", size = 13351827, upload-time = "2025-09-19T00:09:58.311Z" },
-    { url = "https://files.pythonhosted.org/packages/c8/7d/2697b930179e7277529eaaec1513f8de622818696857f689e4a5432e5e27/mypy-1.18.2-cp311-cp311-win_amd64.whl", hash = "sha256:664dc726e67fa54e14536f6e1224bcfce1d9e5ac02426d2326e2bb4e081d1ce8", size = 9757983, upload-time = "2025-09-19T00:10:09.071Z" },
-    { url = "https://files.pythonhosted.org/packages/07/06/dfdd2bc60c66611dd8335f463818514733bc763e4760dee289dcc33df709/mypy-1.18.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:33eca32dd124b29400c31d7cf784e795b050ace0e1f91b8dc035672725617e34", size = 12908273, upload-time = "2025-09-19T00:10:58.321Z" },
-    { url = "https://files.pythonhosted.org/packages/81/14/6a9de6d13a122d5608e1a04130724caf9170333ac5a924e10f670687d3eb/mypy-1.18.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a3c47adf30d65e89b2dcd2fa32f3aeb5e94ca970d2c15fcb25e297871c8e4764", size = 11920910, upload-time = "2025-09-19T00:10:20.043Z" },
-    { url = "https://files.pythonhosted.org/packages/5f/a9/b29de53e42f18e8cc547e38daa9dfa132ffdc64f7250e353f5c8cdd44bee/mypy-1.18.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d6c838e831a062f5f29d11c9057c6009f60cb294fea33a98422688181fe2893", size = 12465585, upload-time = "2025-09-19T00:10:33.005Z" },
-    { url = "https://files.pythonhosted.org/packages/77/ae/6c3d2c7c61ff21f2bee938c917616c92ebf852f015fb55917fd6e2811db2/mypy-1.18.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:01199871b6110a2ce984bde85acd481232d17413868c9807e95c1b0739a58914", size = 13348562, upload-time = "2025-09-19T00:10:11.51Z" },
-    { url = "https://files.pythonhosted.org/packages/4d/31/aec68ab3b4aebdf8f36d191b0685d99faa899ab990753ca0fee60fb99511/mypy-1.18.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a2afc0fa0b0e91b4599ddfe0f91e2c26c2b5a5ab263737e998d6817874c5f7c8", size = 13533296, upload-time = "2025-09-19T00:10:06.568Z" },
-    { url = "https://files.pythonhosted.org/packages/9f/83/abcb3ad9478fca3ebeb6a5358bb0b22c95ea42b43b7789c7fb1297ca44f4/mypy-1.18.2-cp312-cp312-win_amd64.whl", hash = "sha256:d8068d0afe682c7c4897c0f7ce84ea77f6de953262b12d07038f4d296d547074", size = 9828828, upload-time = "2025-09-19T00:10:28.203Z" },
-    { url = "https://files.pythonhosted.org/packages/5f/04/7f462e6fbba87a72bc8097b93f6842499c428a6ff0c81dd46948d175afe8/mypy-1.18.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:07b8b0f580ca6d289e69209ec9d3911b4a26e5abfde32228a288eb79df129fcc", size = 12898728, upload-time = "2025-09-19T00:10:01.33Z" },
-    { url = "https://files.pythonhosted.org/packages/99/5b/61ed4efb64f1871b41fd0b82d29a64640f3516078f6c7905b68ab1ad8b13/mypy-1.18.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ed4482847168439651d3feee5833ccedbf6657e964572706a2adb1f7fa4dfe2e", size = 11910758, upload-time = "2025-09-19T00:10:42.607Z" },
-    { url = "https://files.pythonhosted.org/packages/3c/46/d297d4b683cc89a6e4108c4250a6a6b717f5fa96e1a30a7944a6da44da35/mypy-1.18.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c3ad2afadd1e9fea5cf99a45a822346971ede8685cc581ed9cd4d42eaf940986", size = 12475342, upload-time = "2025-09-19T00:11:00.371Z" },
-    { url = "https://files.pythonhosted.org/packages/83/45/4798f4d00df13eae3bfdf726c9244bcb495ab5bd588c0eed93a2f2dd67f3/mypy-1.18.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a431a6f1ef14cf8c144c6b14793a23ec4eae3db28277c358136e79d7d062f62d", size = 13338709, upload-time = "2025-09-19T00:11:03.358Z" },
-    { url = "https://files.pythonhosted.org/packages/d7/09/479f7358d9625172521a87a9271ddd2441e1dab16a09708f056e97007207/mypy-1.18.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7ab28cc197f1dd77a67e1c6f35cd1f8e8b73ed2217e4fc005f9e6a504e46e7ba", size = 13529806, upload-time = "2025-09-19T00:10:26.073Z" },
-    { url = "https://files.pythonhosted.org/packages/71/cf/ac0f2c7e9d0ea3c75cd99dff7aec1c9df4a1376537cb90e4c882267ee7e9/mypy-1.18.2-cp313-cp313-win_amd64.whl", hash = "sha256:0e2785a84b34a72ba55fb5daf079a1003a34c05b22238da94fcae2bbe46f3544", size = 9833262, upload-time = "2025-09-19T00:10:40.035Z" },
-    { url = "https://files.pythonhosted.org/packages/5a/0c/7d5300883da16f0063ae53996358758b2a2df2a09c72a5061fa79a1f5006/mypy-1.18.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:62f0e1e988ad41c2a110edde6c398383a889d95b36b3e60bcf155f5164c4fdce", size = 12893775, upload-time = "2025-09-19T00:10:03.814Z" },
-    { url = "https://files.pythonhosted.org/packages/50/df/2cffbf25737bdb236f60c973edf62e3e7b4ee1c25b6878629e88e2cde967/mypy-1.18.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:8795a039bab805ff0c1dfdb8cd3344642c2b99b8e439d057aba30850b8d3423d", size = 11936852, upload-time = "2025-09-19T00:10:51.631Z" },
-    { url = "https://files.pythonhosted.org/packages/be/50/34059de13dd269227fb4a03be1faee6e2a4b04a2051c82ac0a0b5a773c9a/mypy-1.18.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6ca1e64b24a700ab5ce10133f7ccd956a04715463d30498e64ea8715236f9c9c", size = 12480242, upload-time = "2025-09-19T00:11:07.955Z" },
-    { url = "https://files.pythonhosted.org/packages/5b/11/040983fad5132d85914c874a2836252bbc57832065548885b5bb5b0d4359/mypy-1.18.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d924eef3795cc89fecf6bedc6ed32b33ac13e8321344f6ddbf8ee89f706c05cb", size = 13326683, upload-time = "2025-09-19T00:09:55.572Z" },
-    { url = "https://files.pythonhosted.org/packages/e9/ba/89b2901dd77414dd7a8c8729985832a5735053be15b744c18e4586e506ef/mypy-1.18.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:20c02215a080e3a2be3aa50506c67242df1c151eaba0dcbc1e4e557922a26075", size = 13514749, upload-time = "2025-09-19T00:10:44.827Z" },
-    { url = "https://files.pythonhosted.org/packages/25/bc/cc98767cffd6b2928ba680f3e5bc969c4152bf7c2d83f92f5a504b92b0eb/mypy-1.18.2-cp314-cp314-win_amd64.whl", hash = "sha256:749b5f83198f1ca64345603118a6f01a4e99ad4bf9d103ddc5a3200cc4614adf", size = 9982959, upload-time = "2025-09-19T00:10:37.344Z" },
-    { url = "https://files.pythonhosted.org/packages/87/e3/be76d87158ebafa0309946c4a73831974d4d6ab4f4ef40c3b53a385a66fd/mypy-1.18.2-py3-none-any.whl", hash = "sha256:22a1748707dd62b58d2ae53562ffc4d7f8bcc727e8ac7cbc69c053ddc874d47e", size = 2352367, upload-time = "2025-09-19T00:10:15.489Z" },
+sdist = { url = "https://files.pythonhosted.org/packages/f5/db/4efed9504bc01309ab9c2da7e352cc223569f05478012b5d9ece38fd44d2/mypy-1.19.1.tar.gz", hash = "sha256:19d88bb05303fe63f71dd2c6270daca27cb9401c4ca8255fe50d1d920e0eb9ba", size = 3582404, upload-time = "2025-12-15T05:03:48.42Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/2f/63/e499890d8e39b1ff2df4c0c6ce5d371b6844ee22b8250687a99fd2f657a8/mypy-1.19.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5f05aa3d375b385734388e844bc01733bd33c644ab48e9684faa54e5389775ec", size = 13101333, upload-time = "2025-12-15T05:03:03.28Z" },
+    { url = "https://files.pythonhosted.org/packages/72/4b/095626fc136fba96effc4fd4a82b41d688ab92124f8c4f7564bffe5cf1b0/mypy-1.19.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:022ea7279374af1a5d78dfcab853fe6a536eebfda4b59deab53cd21f6cd9f00b", size = 12164102, upload-time = "2025-12-15T05:02:33.611Z" },
+    { url = "https://files.pythonhosted.org/packages/0c/5b/952928dd081bf88a83a5ccd49aaecfcd18fd0d2710c7ff07b8fb6f7032b9/mypy-1.19.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee4c11e460685c3e0c64a4c5de82ae143622410950d6be863303a1c4ba0e36d6", size = 12765799, upload-time = "2025-12-15T05:03:28.44Z" },
+    { url = "https://files.pythonhosted.org/packages/2a/0d/93c2e4a287f74ef11a66fb6d49c7a9f05e47b0a4399040e6719b57f500d2/mypy-1.19.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de759aafbae8763283b2ee5869c7255391fbc4de3ff171f8f030b5ec48381b74", size = 13522149, upload-time = "2025-12-15T05:02:36.011Z" },
+    { url = "https://files.pythonhosted.org/packages/7b/0e/33a294b56aaad2b338d203e3a1d8b453637ac36cb278b45005e0901cf148/mypy-1.19.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ab43590f9cd5108f41aacf9fca31841142c786827a74ab7cc8a2eacb634e09a1", size = 13810105, upload-time = "2025-12-15T05:02:40.327Z" },
+    { url = "https://files.pythonhosted.org/packages/0e/fd/3e82603a0cb66b67c5e7abababce6bf1a929ddf67bf445e652684af5c5a0/mypy-1.19.1-cp310-cp310-win_amd64.whl", hash = "sha256:2899753e2f61e571b3971747e302d5f420c3fd09650e1951e99f823bc3089dac", size = 10057200, upload-time = "2025-12-15T05:02:51.012Z" },
+    { url = "https://files.pythonhosted.org/packages/ef/47/6b3ebabd5474d9cdc170d1342fbf9dddc1b0ec13ec90bf9004ee6f391c31/mypy-1.19.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d8dfc6ab58ca7dda47d9237349157500468e404b17213d44fc1cb77bce532288", size = 13028539, upload-time = "2025-12-15T05:03:44.129Z" },
+    { url = "https://files.pythonhosted.org/packages/5c/a6/ac7c7a88a3c9c54334f53a941b765e6ec6c4ebd65d3fe8cdcfbe0d0fd7db/mypy-1.19.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e3f276d8493c3c97930e354b2595a44a21348b320d859fb4a2b9f66da9ed27ab", size = 12083163, upload-time = "2025-12-15T05:03:37.679Z" },
+    { url = "https://files.pythonhosted.org/packages/67/af/3afa9cf880aa4a2c803798ac24f1d11ef72a0c8079689fac5cfd815e2830/mypy-1.19.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2abb24cf3f17864770d18d673c85235ba52456b36a06b6afc1e07c1fdcd3d0e6", size = 12687629, upload-time = "2025-12-15T05:02:31.526Z" },
+    { url = "https://files.pythonhosted.org/packages/2d/46/20f8a7114a56484ab268b0ab372461cb3a8f7deed31ea96b83a4e4cfcfca/mypy-1.19.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a009ffa5a621762d0c926a078c2d639104becab69e79538a494bcccb62cc0331", size = 13436933, upload-time = "2025-12-15T05:03:15.606Z" },
+    { url = "https://files.pythonhosted.org/packages/5b/f8/33b291ea85050a21f15da910002460f1f445f8007adb29230f0adea279cb/mypy-1.19.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f7cee03c9a2e2ee26ec07479f38ea9c884e301d42c6d43a19d20fb014e3ba925", size = 13661754, upload-time = "2025-12-15T05:02:26.731Z" },
+    { url = "https://files.pythonhosted.org/packages/fd/a3/47cbd4e85bec4335a9cd80cf67dbc02be21b5d4c9c23ad6b95d6c5196bac/mypy-1.19.1-cp311-cp311-win_amd64.whl", hash = "sha256:4b84a7a18f41e167f7995200a1d07a4a6810e89d29859df936f1c3923d263042", size = 10055772, upload-time = "2025-12-15T05:03:26.179Z" },
+    { url = "https://files.pythonhosted.org/packages/06/8a/19bfae96f6615aa8a0604915512e0289b1fad33d5909bf7244f02935d33a/mypy-1.19.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a8174a03289288c1f6c46d55cef02379b478bfbc8e358e02047487cad44c6ca1", size = 13206053, upload-time = "2025-12-15T05:03:46.622Z" },
+    { url = "https://files.pythonhosted.org/packages/a5/34/3e63879ab041602154ba2a9f99817bb0c85c4df19a23a1443c8986e4d565/mypy-1.19.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ffcebe56eb09ff0c0885e750036a095e23793ba6c2e894e7e63f6d89ad51f22e", size = 12219134, upload-time = "2025-12-15T05:03:24.367Z" },
+    { url = "https://files.pythonhosted.org/packages/89/cc/2db6f0e95366b630364e09845672dbee0cbf0bbe753a204b29a944967cd9/mypy-1.19.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b64d987153888790bcdb03a6473d321820597ab8dd9243b27a92153c4fa50fd2", size = 12731616, upload-time = "2025-12-15T05:02:44.725Z" },
+    { url = "https://files.pythonhosted.org/packages/00/be/dd56c1fd4807bc1eba1cf18b2a850d0de7bacb55e158755eb79f77c41f8e/mypy-1.19.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c35d298c2c4bba75feb2195655dfea8124d855dfd7343bf8b8c055421eaf0cf8", size = 13620847, upload-time = "2025-12-15T05:03:39.633Z" },
+    { url = "https://files.pythonhosted.org/packages/6d/42/332951aae42b79329f743bf1da088cd75d8d4d9acc18fbcbd84f26c1af4e/mypy-1.19.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:34c81968774648ab5ac09c29a375fdede03ba253f8f8287847bd480782f73a6a", size = 13834976, upload-time = "2025-12-15T05:03:08.786Z" },
+    { url = "https://files.pythonhosted.org/packages/6f/63/e7493e5f90e1e085c562bb06e2eb32cae27c5057b9653348d38b47daaecc/mypy-1.19.1-cp312-cp312-win_amd64.whl", hash = "sha256:b10e7c2cd7870ba4ad9b2d8a6102eb5ffc1f16ca35e3de6bfa390c1113029d13", size = 10118104, upload-time = "2025-12-15T05:03:10.834Z" },
+    { url = "https://files.pythonhosted.org/packages/de/9f/a6abae693f7a0c697dbb435aac52e958dc8da44e92e08ba88d2e42326176/mypy-1.19.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e3157c7594ff2ef1634ee058aafc56a82db665c9438fd41b390f3bde1ab12250", size = 13201927, upload-time = "2025-12-15T05:02:29.138Z" },
+    { url = "https://files.pythonhosted.org/packages/9a/a4/45c35ccf6e1c65afc23a069f50e2c66f46bd3798cbe0d680c12d12935caa/mypy-1.19.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bdb12f69bcc02700c2b47e070238f42cb87f18c0bc1fc4cdb4fb2bc5fd7a3b8b", size = 12206730, upload-time = "2025-12-15T05:03:01.325Z" },
+    { url = "https://files.pythonhosted.org/packages/05/bb/cdcf89678e26b187650512620eec8368fded4cfd99cfcb431e4cdfd19dec/mypy-1.19.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f859fb09d9583a985be9a493d5cfc5515b56b08f7447759a0c5deaf68d80506e", size = 12724581, upload-time = "2025-12-15T05:03:20.087Z" },
+    { url = "https://files.pythonhosted.org/packages/d1/32/dd260d52babf67bad8e6770f8e1102021877ce0edea106e72df5626bb0ec/mypy-1.19.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c9a6538e0415310aad77cb94004ca6482330fece18036b5f360b62c45814c4ef", size = 13616252, upload-time = "2025-12-15T05:02:49.036Z" },
+    { url = "https://files.pythonhosted.org/packages/71/d0/5e60a9d2e3bd48432ae2b454b7ef2b62a960ab51292b1eda2a95edd78198/mypy-1.19.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:da4869fc5e7f62a88f3fe0b5c919d1d9f7ea3cef92d3689de2823fd27e40aa75", size = 13840848, upload-time = "2025-12-15T05:02:55.95Z" },
+    { url = "https://files.pythonhosted.org/packages/98/76/d32051fa65ecf6cc8c6610956473abdc9b4c43301107476ac03559507843/mypy-1.19.1-cp313-cp313-win_amd64.whl", hash = "sha256:016f2246209095e8eda7538944daa1d60e1e8134d98983b9fc1e92c1fc0cb8dd", size = 10135510, upload-time = "2025-12-15T05:02:58.438Z" },
+    { url = "https://files.pythonhosted.org/packages/de/eb/b83e75f4c820c4247a58580ef86fcd35165028f191e7e1ba57128c52782d/mypy-1.19.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:06e6170bd5836770e8104c8fdd58e5e725cfeb309f0a6c681a811f557e97eac1", size = 13199744, upload-time = "2025-12-15T05:03:30.823Z" },
+    { url = "https://files.pythonhosted.org/packages/94/28/52785ab7bfa165f87fcbb61547a93f98bb20e7f82f90f165a1f69bce7b3d/mypy-1.19.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:804bd67b8054a85447c8954215a906d6eff9cabeabe493fb6334b24f4bfff718", size = 12215815, upload-time = "2025-12-15T05:02:42.323Z" },
+    { url = "https://files.pythonhosted.org/packages/0a/c6/bdd60774a0dbfb05122e3e925f2e9e846c009e479dcec4821dad881f5b52/mypy-1.19.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:21761006a7f497cb0d4de3d8ef4ca70532256688b0523eee02baf9eec895e27b", size = 12740047, upload-time = "2025-12-15T05:03:33.168Z" },
+    { url = "https://files.pythonhosted.org/packages/32/2a/66ba933fe6c76bd40d1fe916a83f04fed253152f451a877520b3c4a5e41e/mypy-1.19.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:28902ee51f12e0f19e1e16fbe2f8f06b6637f482c459dd393efddd0ec7f82045", size = 13601998, upload-time = "2025-12-15T05:03:13.056Z" },
+    { url = "https://files.pythonhosted.org/packages/e3/da/5055c63e377c5c2418760411fd6a63ee2b96cf95397259038756c042574f/mypy-1.19.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:481daf36a4c443332e2ae9c137dfee878fcea781a2e3f895d54bd3002a900957", size = 13807476, upload-time = "2025-12-15T05:03:17.977Z" },
+    { url = "https://files.pythonhosted.org/packages/cd/09/4ebd873390a063176f06b0dbf1f7783dd87bd120eae7727fa4ae4179b685/mypy-1.19.1-cp314-cp314-win_amd64.whl", hash = "sha256:8bb5c6f6d043655e055be9b542aa5f3bdd30e4f3589163e85f93f3640060509f", size = 10281872, upload-time = "2025-12-15T05:03:05.549Z" },
+    { url = "https://files.pythonhosted.org/packages/8d/f4/4ce9a05ce5ded1de3ec1c1d96cf9f9504a04e54ce0ed55cfa38619a32b8d/mypy-1.19.1-py3-none-any.whl", hash = "sha256:f1235f5ea01b7db5468d53ece6aaddf1ad0b88d9e7462b86ef96fe04995d7247", size = 2471239, upload-time = "2025-12-15T05:03:07.248Z" },
 ]
 
 [[package]]
@@ -947,22 +1077,23 @@ wheels = [
 
 [[package]]
 name = "netutils"
-version = "1.15.0"
+version = "1.15.2"
 source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/4c/f6/bf786ac602db7911edd521f435ac7539e50deaaf997690189b09b579d49a/netutils-1.15.0.tar.gz", hash = "sha256:01d6cc5e527090089f924e4df0f7375213dbe476e0689e636da3b42db99be71f", size = 517296, upload-time = "2025-09-05T17:11:54.391Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/ff/cb/a7fe8abe49d874cc50909d3bb451037dd8e2489e78d2e9da53baca5ab91b/netutils-1.15.2.tar.gz", hash = "sha256:9877d997155ca0c570eb70ec3b382e458f3b8ea209869093c6dca31554fa029e", size = 519059, upload-time = "2025-12-31T19:21:21.096Z" }
 wheels = [
-    { url = "https://files.pythonhosted.org/packages/6d/3f/9a66e4e19d564af1fb9397cfc6479f0f65ae90e09b6073db4e83a764247a/netutils-1.15.0-py3-none-any.whl", hash = "sha256:4f45ea9b5ece955d2a35e4ae1530cbaa1806b09b4dedd7ac0cc893d9b9ba1caa", size = 532068, upload-time = "2025-09-05T17:11:53.175Z" },
+    { url = "https://files.pythonhosted.org/packages/ad/78/e2999ac6a64c98a52abb1a471515bbd1696ad2d9149c19a68e03fea2f567/netutils-1.15.2-py3-none-any.whl", hash = "sha256:a79c84ff248c9cd8f872aa195155e08dd2d1395ff695dbac51b7ff32923f0cdf", size = 533673, upload-time = "2025-12-31T19:21:19.386Z" },
 ]
 
 [[package]]
 name = "nornflow"
-version = "0.5.1"
+version = "0.6.0"
 source = { editable = "." }
 dependencies = [
     { name = "jmespath" },
     { name = "nornir" },
     { name = "nornir-utils" },
     { name = "pydantic-serdes" },
+    { name = "pydantic-settings" },
     { name = "pytest" },
     { name = "pyyaml" },
     { name = "tabulate" },
@@ -979,6 +1110,7 @@ dev = [
 ]
 manual-tests = [
     { name = "nornir-napalm" },
+    { name = "nornir-nautobot" },
     { name = "nornir-netbox" },
     { name = "nornir-netmiko" },
 ]
@@ -990,10 +1122,12 @@ requires-dist = [
     { name = "mypy", marker = "extra == 'dev'", specifier = ">=1.15.0" },
     { name = "nornir", specifier = ">=3.5.0" },
     { name = "nornir-napalm", marker = "extra == 'manual-tests'", specifier = ">=0.5.0" },
+    { name = "nornir-nautobot", marker = "extra == 'manual-tests'", specifier = ">=4.0.0" },
     { name = "nornir-netbox", marker = "extra == 'manual-tests'", specifier = ">=0.3.0" },
     { name = "nornir-netmiko", marker = "extra == 'manual-tests'", specifier = ">=1.0.1" },
     { name = "nornir-utils", specifier = ">=0.2.0" },
     { name = "pydantic-serdes", specifier = ">=1.0.3" },
+    { name = "pydantic-settings", specifier = ">=2.12.0" },
     { name = "pytest", specifier = ">=8.3.4" },
     { name = "pytest-cov", marker = "extra == 'dev'", specifier = ">=6.0.0" },
     { name = "pyyaml", specifier = ">=6.0.2" },
@@ -1030,6 +1164,29 @@ wheels = [
     { url = "https://files.pythonhosted.org/packages/bf/16/fb7f72cb75adf051ff0049b613c1a2db557772323a70d594b6195549d572/nornir_napalm-0.5.0-py3-none-any.whl", hash = "sha256:1a418bf0f5e38ac65894d474f81b50787dafe0aa1965c4fbd1b86d34d4374418", size = 11971, upload-time = "2024-04-13T04:16:38.621Z" },
 ]
 
+[[package]]
+name = "nornir-nautobot"
+version = "4.0.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "httpx" },
+    { name = "napalm" },
+    { name = "netmiko" },
+    { name = "netutils" },
+    { name = "nornir" },
+    { name = "nornir-napalm" },
+    { name = "nornir-netmiko" },
+    { name = "nornir-scrapli" },
+    { name = "nornir-utils" },
+    { name = "pynautobot" },
+    { name = "requests" },
+    { name = "scrapli" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/b8/7f/e414a4197557395d92069cd59decb64cc194432b7aa759c66c249dd959a3/nornir_nautobot-4.0.0.tar.gz", hash = "sha256:c90a5999b2278da2a550bc5c4ad436eb2beaf7a36bad36f90539a0e4b0d39031", size = 26133, upload-time = "2025-11-14T18:43:09.007Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/61/d8/d827a4b120f196331ff4f9fc946e1faff31a07d839aa1d91d55fcf2e2e8f/nornir_nautobot-4.0.0-py3-none-any.whl", hash = "sha256:cc542b75a6afde232b2af7eed987504c6abf6cbdcc6e74acee9d8b2a60b40e87", size = 34487, upload-time = "2025-11-14T18:43:07.626Z" },
+]
+
 [[package]]
 name = "nornir-netbox"
 version = "0.3.0"
@@ -1055,6 +1212,25 @@ wheels = [
     { url = "https://files.pythonhosted.org/packages/13/16/63764c64087f14b2cd66f205da8570dfc13887ec43aa668de0c55a3ff92b/nornir_netmiko-1.0.1-py3-none-any.whl", hash = "sha256:eaee2944ad386b40c0719e8ac393ac63d531f44fb9a07d660bae7de430f12834", size = 11518, upload-time = "2023-12-07T18:18:33.369Z" },
 ]
 
+[[package]]
+name = "nornir-scrapli"
+version = "2025.1.30"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "nornir" },
+    { name = "nornir-utils" },
+    { name = "ntc-templates" },
+    { name = "scrapli" },
+    { name = "scrapli-cfg" },
+    { name = "scrapli-community" },
+    { name = "scrapli-netconf" },
+    { name = "textfsm" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/a9/90/39fb7d17ff695c8fd4023f06e004ec76c344ebdc6c4be362423f33c06c7c/nornir_scrapli-2025.1.30.tar.gz", hash = "sha256:b36bbfcf3678e695d54c0df673f9cf662dbd737d9e13170893cd4ed833c7b410", size = 19184, upload-time = "2025-01-31T01:34:56.65Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/00/11/fb2e8f73b205212c6b4abfbdb66d90d500971582eef2e5afaf295007c387/nornir_scrapli-2025.1.30-py3-none-any.whl", hash = "sha256:be178b13257ece024a01c97c798973622138bc780fa607fe8bed70d0dbf2914a", size = 33234, upload-time = "2025-01-31T01:34:55.451Z" },
+]
+
 [[package]]
 name = "nornir-utils"
 version = "0.2.0"
@@ -1115,11 +1291,11 @@ wheels = [
 
 [[package]]
 name = "platformdirs"
-version = "4.5.0"
+version = "4.5.1"
 source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/61/33/9611380c2bdb1225fdef633e2a9610622310fed35ab11dac9620972ee088/platformdirs-4.5.0.tar.gz", hash = "sha256:70ddccdd7c99fc5942e9fc25636a8b34d04c24b335100223152c2803e4063312", size = 21632, upload-time = "2025-10-08T17:44:48.791Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/cf/86/0248f086a84f01b37aaec0fa567b397df1a119f73c16f6c7a9aac73ea309/platformdirs-4.5.1.tar.gz", hash = "sha256:61d5cdcc6065745cdd94f0f878977f8de9437be93de97c1c12f853c9c0cdcbda", size = 21715, upload-time = "2025-12-05T13:52:58.638Z" }
 wheels = [
-    { url = "https://files.pythonhosted.org/packages/73/cb/ac7874b3e5d58441674fb70742e6c374b28b0c7cb988d37d991cde47166c/platformdirs-4.5.0-py3-none-any.whl", hash = "sha256:e578a81bb873cbb89a41fcc904c7ef523cc18284b7e3b3ccf06aca1403b7ebd3", size = 18651, upload-time = "2025-10-08T17:44:47.223Z" },
+    { url = "https://files.pythonhosted.org/packages/cb/28/3bfe2fa5a7b9c46fe7e13c97bda14c895fb10fa2ebf1d0abb90e0cea7ee1/platformdirs-4.5.1-py3-none-any.whl", hash = "sha256:d03afa3963c806a9bed9d5125c8f4cb2fdaf74a55ab60e5d59b3fde758104d31", size = 18731, upload-time = "2025-12-05T13:52:56.823Z" },
 ]
 
 [[package]]
@@ -1142,7 +1318,7 @@ wheels = [
 
 [[package]]
 name = "pydantic"
-version = "2.12.2"
+version = "2.12.5"
 source = { registry = "https://pypi.org/simple" }
 dependencies = [
     { name = "annotated-types" },
@@ -1150,123 +1326,127 @@ dependencies = [
     { name = "typing-extensions" },
     { name = "typing-inspection" },
 ]
-sdist = { url = "https://files.pythonhosted.org/packages/8d/35/d319ed522433215526689bad428a94058b6dd12190ce7ddd78618ac14b28/pydantic-2.12.2.tar.gz", hash = "sha256:7b8fa15b831a4bbde9d5b84028641ac3080a4ca2cbd4a621a661687e741624fd", size = 816358, upload-time = "2025-10-14T15:02:21.842Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" }
 wheels = [
-    { url = "https://files.pythonhosted.org/packages/6c/98/468cb649f208a6f1279448e6e5247b37ae79cf5e4041186f1e2ef3d16345/pydantic-2.12.2-py3-none-any.whl", hash = "sha256:25ff718ee909acd82f1ff9b1a4acfd781bb23ab3739adaa7144f19a6a4e231ae", size = 460628, upload-time = "2025-10-14T15:02:19.623Z" },
+    { url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" },
 ]
 
 [[package]]
 name = "pydantic-core"
-version = "2.41.4"
+version = "2.41.5"
 source = { registry = "https://pypi.org/simple" }
 dependencies = [
     { name = "typing-extensions" },
 ]
-sdist = { url = "https://files.pythonhosted.org/packages/df/18/d0944e8eaaa3efd0a91b0f1fc537d3be55ad35091b6a87638211ba691964/pydantic_core-2.41.4.tar.gz", hash = "sha256:70e47929a9d4a1905a67e4b687d5946026390568a8e952b92824118063cee4d5", size = 457557, upload-time = "2025-10-14T10:23:47.909Z" }
-wheels = [
-    { url = "https://files.pythonhosted.org/packages/a7/3d/9b8ca77b0f76fcdbf8bc6b72474e264283f461284ca84ac3fde570c6c49a/pydantic_core-2.41.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2442d9a4d38f3411f22eb9dd0912b7cbf4b7d5b6c92c4173b75d3e1ccd84e36e", size = 2111197, upload-time = "2025-10-14T10:19:43.303Z" },
-    { url = "https://files.pythonhosted.org/packages/59/92/b7b0fe6ed4781642232755cb7e56a86e2041e1292f16d9ae410a0ccee5ac/pydantic_core-2.41.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:30a9876226dda131a741afeab2702e2d127209bde3c65a2b8133f428bc5d006b", size = 1917909, upload-time = "2025-10-14T10:19:45.194Z" },
-    { url = "https://files.pythonhosted.org/packages/52/8c/3eb872009274ffa4fb6a9585114e161aa1a0915af2896e2d441642929fe4/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d55bbac04711e2980645af68b97d445cdbcce70e5216de444a6c4b6943ebcccd", size = 1969905, upload-time = "2025-10-14T10:19:46.567Z" },
-    { url = "https://files.pythonhosted.org/packages/f4/21/35adf4a753bcfaea22d925214a0c5b880792e3244731b3f3e6fec0d124f7/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e1d778fb7849a42d0ee5927ab0f7453bf9f85eef8887a546ec87db5ddb178945", size = 2051938, upload-time = "2025-10-14T10:19:48.237Z" },
-    { url = "https://files.pythonhosted.org/packages/7d/d0/cdf7d126825e36d6e3f1eccf257da8954452934ede275a8f390eac775e89/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b65077a4693a98b90ec5ad8f203ad65802a1b9b6d4a7e48066925a7e1606706", size = 2250710, upload-time = "2025-10-14T10:19:49.619Z" },
-    { url = "https://files.pythonhosted.org/packages/2e/1c/af1e6fd5ea596327308f9c8d1654e1285cc3d8de0d584a3c9d7705bf8a7c/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:62637c769dee16eddb7686bf421be48dfc2fae93832c25e25bc7242e698361ba", size = 2367445, upload-time = "2025-10-14T10:19:51.269Z" },
-    { url = "https://files.pythonhosted.org/packages/d3/81/8cece29a6ef1b3a92f956ea6da6250d5b2d2e7e4d513dd3b4f0c7a83dfea/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dfe3aa529c8f501babf6e502936b9e8d4698502b2cfab41e17a028d91b1ac7b", size = 2072875, upload-time = "2025-10-14T10:19:52.671Z" },
-    { url = "https://files.pythonhosted.org/packages/e3/37/a6a579f5fc2cd4d5521284a0ab6a426cc6463a7b3897aeb95b12f1ba607b/pydantic_core-2.41.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ca2322da745bf2eeb581fc9ea3bbb31147702163ccbcbf12a3bb630e4bf05e1d", size = 2191329, upload-time = "2025-10-14T10:19:54.214Z" },
-    { url = "https://files.pythonhosted.org/packages/ae/03/505020dc5c54ec75ecba9f41119fd1e48f9e41e4629942494c4a8734ded1/pydantic_core-2.41.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e8cd3577c796be7231dcf80badcf2e0835a46665eaafd8ace124d886bab4d700", size = 2151658, upload-time = "2025-10-14T10:19:55.843Z" },
-    { url = "https://files.pythonhosted.org/packages/cb/5d/2c0d09fb53aa03bbd2a214d89ebfa6304be7df9ed86ee3dc7770257f41ee/pydantic_core-2.41.4-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:1cae8851e174c83633f0833e90636832857297900133705ee158cf79d40f03e6", size = 2316777, upload-time = "2025-10-14T10:19:57.607Z" },
-    { url = "https://files.pythonhosted.org/packages/ea/4b/c2c9c8f5e1f9c864b57d08539d9d3db160e00491c9f5ee90e1bfd905e644/pydantic_core-2.41.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a26d950449aae348afe1ac8be5525a00ae4235309b729ad4d3399623125b43c9", size = 2320705, upload-time = "2025-10-14T10:19:59.016Z" },
-    { url = "https://files.pythonhosted.org/packages/28/c3/a74c1c37f49c0a02c89c7340fafc0ba816b29bd495d1a31ce1bdeacc6085/pydantic_core-2.41.4-cp310-cp310-win32.whl", hash = "sha256:0cf2a1f599efe57fa0051312774280ee0f650e11152325e41dfd3018ef2c1b57", size = 1975464, upload-time = "2025-10-14T10:20:00.581Z" },
-    { url = "https://files.pythonhosted.org/packages/d6/23/5dd5c1324ba80303368f7569e2e2e1a721c7d9eb16acb7eb7b7f85cb1be2/pydantic_core-2.41.4-cp310-cp310-win_amd64.whl", hash = "sha256:a8c2e340d7e454dc3340d3d2e8f23558ebe78c98aa8f68851b04dcb7bc37abdc", size = 2024497, upload-time = "2025-10-14T10:20:03.018Z" },
-    { url = "https://files.pythonhosted.org/packages/62/4c/f6cbfa1e8efacd00b846764e8484fe173d25b8dab881e277a619177f3384/pydantic_core-2.41.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:28ff11666443a1a8cf2a044d6a545ebffa8382b5f7973f22c36109205e65dc80", size = 2109062, upload-time = "2025-10-14T10:20:04.486Z" },
-    { url = "https://files.pythonhosted.org/packages/21/f8/40b72d3868896bfcd410e1bd7e516e762d326201c48e5b4a06446f6cf9e8/pydantic_core-2.41.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:61760c3925d4633290292bad462e0f737b840508b4f722247d8729684f6539ae", size = 1916301, upload-time = "2025-10-14T10:20:06.857Z" },
-    { url = "https://files.pythonhosted.org/packages/94/4d/d203dce8bee7faeca791671c88519969d98d3b4e8f225da5b96dad226fc8/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eae547b7315d055b0de2ec3965643b0ab82ad0106a7ffd29615ee9f266a02827", size = 1968728, upload-time = "2025-10-14T10:20:08.353Z" },
-    { url = "https://files.pythonhosted.org/packages/65/f5/6a66187775df87c24d526985b3a5d78d861580ca466fbd9d4d0e792fcf6c/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ef9ee5471edd58d1fcce1c80ffc8783a650e3e3a193fe90d52e43bb4d87bff1f", size = 2050238, upload-time = "2025-10-14T10:20:09.766Z" },
-    { url = "https://files.pythonhosted.org/packages/5e/b9/78336345de97298cf53236b2f271912ce11f32c1e59de25a374ce12f9cce/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:15dd504af121caaf2c95cb90c0ebf71603c53de98305621b94da0f967e572def", size = 2249424, upload-time = "2025-10-14T10:20:11.732Z" },
-    { url = "https://files.pythonhosted.org/packages/99/bb/a4584888b70ee594c3d374a71af5075a68654d6c780369df269118af7402/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3a926768ea49a8af4d36abd6a8968b8790f7f76dd7cbd5a4c180db2b4ac9a3a2", size = 2366047, upload-time = "2025-10-14T10:20:13.647Z" },
-    { url = "https://files.pythonhosted.org/packages/5f/8d/17fc5de9d6418e4d2ae8c675f905cdafdc59d3bf3bf9c946b7ab796a992a/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6916b9b7d134bff5440098a4deb80e4cb623e68974a87883299de9124126c2a8", size = 2071163, upload-time = "2025-10-14T10:20:15.307Z" },
-    { url = "https://files.pythonhosted.org/packages/54/e7/03d2c5c0b8ed37a4617430db68ec5e7dbba66358b629cd69e11b4d564367/pydantic_core-2.41.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5cf90535979089df02e6f17ffd076f07237efa55b7343d98760bde8743c4b265", size = 2190585, upload-time = "2025-10-14T10:20:17.3Z" },
-    { url = "https://files.pythonhosted.org/packages/be/fc/15d1c9fe5ad9266a5897d9b932b7f53d7e5cfc800573917a2c5d6eea56ec/pydantic_core-2.41.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7533c76fa647fade2d7ec75ac5cc079ab3f34879626dae5689b27790a6cf5a5c", size = 2150109, upload-time = "2025-10-14T10:20:19.143Z" },
-    { url = "https://files.pythonhosted.org/packages/26/ef/e735dd008808226c83ba56972566138665b71477ad580fa5a21f0851df48/pydantic_core-2.41.4-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:37e516bca9264cbf29612539801ca3cd5d1be465f940417b002905e6ed79d38a", size = 2315078, upload-time = "2025-10-14T10:20:20.742Z" },
-    { url = "https://files.pythonhosted.org/packages/90/00/806efdcf35ff2ac0f938362350cd9827b8afb116cc814b6b75cf23738c7c/pydantic_core-2.41.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0c19cb355224037c83642429b8ce261ae108e1c5fbf5c028bac63c77b0f8646e", size = 2318737, upload-time = "2025-10-14T10:20:22.306Z" },
-    { url = "https://files.pythonhosted.org/packages/41/7e/6ac90673fe6cb36621a2283552897838c020db343fa86e513d3f563b196f/pydantic_core-2.41.4-cp311-cp311-win32.whl", hash = "sha256:09c2a60e55b357284b5f31f5ab275ba9f7f70b7525e18a132ec1f9160b4f1f03", size = 1974160, upload-time = "2025-10-14T10:20:23.817Z" },
-    { url = "https://files.pythonhosted.org/packages/e0/9d/7c5e24ee585c1f8b6356e1d11d40ab807ffde44d2db3b7dfd6d20b09720e/pydantic_core-2.41.4-cp311-cp311-win_amd64.whl", hash = "sha256:711156b6afb5cb1cb7c14a2cc2c4a8b4c717b69046f13c6b332d8a0a8f41ca3e", size = 2021883, upload-time = "2025-10-14T10:20:25.48Z" },
-    { url = "https://files.pythonhosted.org/packages/33/90/5c172357460fc28b2871eb4a0fb3843b136b429c6fa827e4b588877bf115/pydantic_core-2.41.4-cp311-cp311-win_arm64.whl", hash = "sha256:6cb9cf7e761f4f8a8589a45e49ed3c0d92d1d696a45a6feaee8c904b26efc2db", size = 1968026, upload-time = "2025-10-14T10:20:27.039Z" },
-    { url = "https://files.pythonhosted.org/packages/e9/81/d3b3e95929c4369d30b2a66a91db63c8ed0a98381ae55a45da2cd1cc1288/pydantic_core-2.41.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ab06d77e053d660a6faaf04894446df7b0a7e7aba70c2797465a0a1af00fc887", size = 2099043, upload-time = "2025-10-14T10:20:28.561Z" },
-    { url = "https://files.pythonhosted.org/packages/58/da/46fdac49e6717e3a94fc9201403e08d9d61aa7a770fab6190b8740749047/pydantic_core-2.41.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c53ff33e603a9c1179a9364b0a24694f183717b2e0da2b5ad43c316c956901b2", size = 1910699, upload-time = "2025-10-14T10:20:30.217Z" },
-    { url = "https://files.pythonhosted.org/packages/1e/63/4d948f1b9dd8e991a5a98b77dd66c74641f5f2e5225fee37994b2e07d391/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:304c54176af2c143bd181d82e77c15c41cbacea8872a2225dd37e6544dce9999", size = 1952121, upload-time = "2025-10-14T10:20:32.246Z" },
-    { url = "https://files.pythonhosted.org/packages/b2/a7/e5fc60a6f781fc634ecaa9ecc3c20171d238794cef69ae0af79ac11b89d7/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:025ba34a4cf4fb32f917d5d188ab5e702223d3ba603be4d8aca2f82bede432a4", size = 2041590, upload-time = "2025-10-14T10:20:34.332Z" },
-    { url = "https://files.pythonhosted.org/packages/70/69/dce747b1d21d59e85af433428978a1893c6f8a7068fa2bb4a927fba7a5ff/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b9f5f30c402ed58f90c70e12eff65547d3ab74685ffe8283c719e6bead8ef53f", size = 2219869, upload-time = "2025-10-14T10:20:35.965Z" },
-    { url = "https://files.pythonhosted.org/packages/83/6a/c070e30e295403bf29c4df1cb781317b6a9bac7cd07b8d3acc94d501a63c/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd96e5d15385d301733113bcaa324c8bcf111275b7675a9c6e88bfb19fc05e3b", size = 2345169, upload-time = "2025-10-14T10:20:37.627Z" },
-    { url = "https://files.pythonhosted.org/packages/f0/83/06d001f8043c336baea7fd202a9ac7ad71f87e1c55d8112c50b745c40324/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98f348cbb44fae6e9653c1055db7e29de67ea6a9ca03a5fa2c2e11a47cff0e47", size = 2070165, upload-time = "2025-10-14T10:20:39.246Z" },
-    { url = "https://files.pythonhosted.org/packages/14/0a/e567c2883588dd12bcbc110232d892cf385356f7c8a9910311ac997ab715/pydantic_core-2.41.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec22626a2d14620a83ca583c6f5a4080fa3155282718b6055c2ea48d3ef35970", size = 2189067, upload-time = "2025-10-14T10:20:41.015Z" },
-    { url = "https://files.pythonhosted.org/packages/f4/1d/3d9fca34273ba03c9b1c5289f7618bc4bd09c3ad2289b5420481aa051a99/pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3a95d4590b1f1a43bf33ca6d647b990a88f4a3824a8c4572c708f0b45a5290ed", size = 2132997, upload-time = "2025-10-14T10:20:43.106Z" },
-    { url = "https://files.pythonhosted.org/packages/52/70/d702ef7a6cd41a8afc61f3554922b3ed8d19dd54c3bd4bdbfe332e610827/pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:f9672ab4d398e1b602feadcffcdd3af44d5f5e6ddc15bc7d15d376d47e8e19f8", size = 2307187, upload-time = "2025-10-14T10:20:44.849Z" },
-    { url = "https://files.pythonhosted.org/packages/68/4c/c06be6e27545d08b802127914156f38d10ca287a9e8489342793de8aae3c/pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:84d8854db5f55fead3b579f04bda9a36461dab0730c5d570e1526483e7bb8431", size = 2305204, upload-time = "2025-10-14T10:20:46.781Z" },
-    { url = "https://files.pythonhosted.org/packages/b0/e5/35ae4919bcd9f18603419e23c5eaf32750224a89d41a8df1a3704b69f77e/pydantic_core-2.41.4-cp312-cp312-win32.whl", hash = "sha256:9be1c01adb2ecc4e464392c36d17f97e9110fbbc906bcbe1c943b5b87a74aabd", size = 1972536, upload-time = "2025-10-14T10:20:48.39Z" },
-    { url = "https://files.pythonhosted.org/packages/1e/c2/49c5bb6d2a49eb2ee3647a93e3dae7080c6409a8a7558b075027644e879c/pydantic_core-2.41.4-cp312-cp312-win_amd64.whl", hash = "sha256:d682cf1d22bab22a5be08539dca3d1593488a99998f9f412137bc323179067ff", size = 2031132, upload-time = "2025-10-14T10:20:50.421Z" },
-    { url = "https://files.pythonhosted.org/packages/06/23/936343dbcba6eec93f73e95eb346810fc732f71ba27967b287b66f7b7097/pydantic_core-2.41.4-cp312-cp312-win_arm64.whl", hash = "sha256:833eebfd75a26d17470b58768c1834dfc90141b7afc6eb0429c21fc5a21dcfb8", size = 1969483, upload-time = "2025-10-14T10:20:52.35Z" },
-    { url = "https://files.pythonhosted.org/packages/13/d0/c20adabd181a029a970738dfe23710b52a31f1258f591874fcdec7359845/pydantic_core-2.41.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:85e050ad9e5f6fe1004eec65c914332e52f429bc0ae12d6fa2092407a462c746", size = 2105688, upload-time = "2025-10-14T10:20:54.448Z" },
-    { url = "https://files.pythonhosted.org/packages/00/b6/0ce5c03cec5ae94cca220dfecddc453c077d71363b98a4bbdb3c0b22c783/pydantic_core-2.41.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e7393f1d64792763a48924ba31d1e44c2cfbc05e3b1c2c9abb4ceeadd912cced", size = 1910807, upload-time = "2025-10-14T10:20:56.115Z" },
-    { url = "https://files.pythonhosted.org/packages/68/3e/800d3d02c8beb0b5c069c870cbb83799d085debf43499c897bb4b4aaff0d/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94dab0940b0d1fb28bcab847adf887c66a27a40291eedf0b473be58761c9799a", size = 1956669, upload-time = "2025-10-14T10:20:57.874Z" },
-    { url = "https://files.pythonhosted.org/packages/60/a4/24271cc71a17f64589be49ab8bd0751f6a0a03046c690df60989f2f95c2c/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:de7c42f897e689ee6f9e93c4bec72b99ae3b32a2ade1c7e4798e690ff5246e02", size = 2051629, upload-time = "2025-10-14T10:21:00.006Z" },
-    { url = "https://files.pythonhosted.org/packages/68/de/45af3ca2f175d91b96bfb62e1f2d2f1f9f3b14a734afe0bfeff079f78181/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:664b3199193262277b8b3cd1e754fb07f2c6023289c815a1e1e8fb415cb247b1", size = 2224049, upload-time = "2025-10-14T10:21:01.801Z" },
-    { url = "https://files.pythonhosted.org/packages/af/8f/ae4e1ff84672bf869d0a77af24fd78387850e9497753c432875066b5d622/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d95b253b88f7d308b1c0b417c4624f44553ba4762816f94e6986819b9c273fb2", size = 2342409, upload-time = "2025-10-14T10:21:03.556Z" },
-    { url = "https://files.pythonhosted.org/packages/18/62/273dd70b0026a085c7b74b000394e1ef95719ea579c76ea2f0cc8893736d/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1351f5bbdbbabc689727cb91649a00cb9ee7203e0a6e54e9f5ba9e22e384b84", size = 2069635, upload-time = "2025-10-14T10:21:05.385Z" },
-    { url = "https://files.pythonhosted.org/packages/30/03/cf485fff699b4cdaea469bc481719d3e49f023241b4abb656f8d422189fc/pydantic_core-2.41.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1affa4798520b148d7182da0615d648e752de4ab1a9566b7471bc803d88a062d", size = 2194284, upload-time = "2025-10-14T10:21:07.122Z" },
-    { url = "https://files.pythonhosted.org/packages/f9/7e/c8e713db32405dfd97211f2fc0a15d6bf8adb7640f3d18544c1f39526619/pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7b74e18052fea4aa8dea2fb7dbc23d15439695da6cbe6cfc1b694af1115df09d", size = 2137566, upload-time = "2025-10-14T10:21:08.981Z" },
-    { url = "https://files.pythonhosted.org/packages/04/f7/db71fd4cdccc8b75990f79ccafbbd66757e19f6d5ee724a6252414483fb4/pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:285b643d75c0e30abda9dc1077395624f314a37e3c09ca402d4015ef5979f1a2", size = 2316809, upload-time = "2025-10-14T10:21:10.805Z" },
-    { url = "https://files.pythonhosted.org/packages/76/63/a54973ddb945f1bca56742b48b144d85c9fc22f819ddeb9f861c249d5464/pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:f52679ff4218d713b3b33f88c89ccbf3a5c2c12ba665fb80ccc4192b4608dbab", size = 2311119, upload-time = "2025-10-14T10:21:12.583Z" },
-    { url = "https://files.pythonhosted.org/packages/f8/03/5d12891e93c19218af74843a27e32b94922195ded2386f7b55382f904d2f/pydantic_core-2.41.4-cp313-cp313-win32.whl", hash = "sha256:ecde6dedd6fff127c273c76821bb754d793be1024bc33314a120f83a3c69460c", size = 1981398, upload-time = "2025-10-14T10:21:14.584Z" },
-    { url = "https://files.pythonhosted.org/packages/be/d8/fd0de71f39db91135b7a26996160de71c073d8635edfce8b3c3681be0d6d/pydantic_core-2.41.4-cp313-cp313-win_amd64.whl", hash = "sha256:d081a1f3800f05409ed868ebb2d74ac39dd0c1ff6c035b5162356d76030736d4", size = 2030735, upload-time = "2025-10-14T10:21:16.432Z" },
-    { url = "https://files.pythonhosted.org/packages/72/86/c99921c1cf6650023c08bfab6fe2d7057a5142628ef7ccfa9921f2dda1d5/pydantic_core-2.41.4-cp313-cp313-win_arm64.whl", hash = "sha256:f8e49c9c364a7edcbe2a310f12733aad95b022495ef2a8d653f645e5d20c1564", size = 1973209, upload-time = "2025-10-14T10:21:18.213Z" },
-    { url = "https://files.pythonhosted.org/packages/36/0d/b5706cacb70a8414396efdda3d72ae0542e050b591119e458e2490baf035/pydantic_core-2.41.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:ed97fd56a561f5eb5706cebe94f1ad7c13b84d98312a05546f2ad036bafe87f4", size = 1877324, upload-time = "2025-10-14T10:21:20.363Z" },
-    { url = "https://files.pythonhosted.org/packages/de/2d/cba1fa02cfdea72dfb3a9babb067c83b9dff0bbcb198368e000a6b756ea7/pydantic_core-2.41.4-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a870c307bf1ee91fc58a9a61338ff780d01bfae45922624816878dce784095d2", size = 1884515, upload-time = "2025-10-14T10:21:22.339Z" },
-    { url = "https://files.pythonhosted.org/packages/07/ea/3df927c4384ed9b503c9cc2d076cf983b4f2adb0c754578dfb1245c51e46/pydantic_core-2.41.4-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d25e97bc1f5f8f7985bdc2335ef9e73843bb561eb1fa6831fdfc295c1c2061cf", size = 2042819, upload-time = "2025-10-14T10:21:26.683Z" },
-    { url = "https://files.pythonhosted.org/packages/6a/ee/df8e871f07074250270a3b1b82aad4cd0026b588acd5d7d3eb2fcb1471a3/pydantic_core-2.41.4-cp313-cp313t-win_amd64.whl", hash = "sha256:d405d14bea042f166512add3091c1af40437c2e7f86988f3915fabd27b1e9cd2", size = 1995866, upload-time = "2025-10-14T10:21:28.951Z" },
-    { url = "https://files.pythonhosted.org/packages/fc/de/b20f4ab954d6d399499c33ec4fafc46d9551e11dc1858fb7f5dca0748ceb/pydantic_core-2.41.4-cp313-cp313t-win_arm64.whl", hash = "sha256:19f3684868309db5263a11bace3c45d93f6f24afa2ffe75a647583df22a2ff89", size = 1970034, upload-time = "2025-10-14T10:21:30.869Z" },
-    { url = "https://files.pythonhosted.org/packages/54/28/d3325da57d413b9819365546eb9a6e8b7cbd9373d9380efd5f74326143e6/pydantic_core-2.41.4-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:e9205d97ed08a82ebb9a307e92914bb30e18cdf6f6b12ca4bedadb1588a0bfe1", size = 2102022, upload-time = "2025-10-14T10:21:32.809Z" },
-    { url = "https://files.pythonhosted.org/packages/9e/24/b58a1bc0d834bf1acc4361e61233ee217169a42efbdc15a60296e13ce438/pydantic_core-2.41.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:82df1f432b37d832709fbcc0e24394bba04a01b6ecf1ee87578145c19cde12ac", size = 1905495, upload-time = "2025-10-14T10:21:34.812Z" },
-    { url = "https://files.pythonhosted.org/packages/fb/a4/71f759cc41b7043e8ecdaab81b985a9b6cad7cec077e0b92cff8b71ecf6b/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3b4cc4539e055cfa39a3763c939f9d409eb40e85813257dcd761985a108554", size = 1956131, upload-time = "2025-10-14T10:21:36.924Z" },
-    { url = "https://files.pythonhosted.org/packages/b0/64/1e79ac7aa51f1eec7c4cda8cbe456d5d09f05fdd68b32776d72168d54275/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b1eb1754fce47c63d2ff57fdb88c351a6c0150995890088b33767a10218eaa4e", size = 2052236, upload-time = "2025-10-14T10:21:38.927Z" },
-    { url = "https://files.pythonhosted.org/packages/e9/e3/a3ffc363bd4287b80f1d43dc1c28ba64831f8dfc237d6fec8f2661138d48/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e6ab5ab30ef325b443f379ddb575a34969c333004fca5a1daa0133a6ffaad616", size = 2223573, upload-time = "2025-10-14T10:21:41.574Z" },
-    { url = "https://files.pythonhosted.org/packages/28/27/78814089b4d2e684a9088ede3790763c64693c3d1408ddc0a248bc789126/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:31a41030b1d9ca497634092b46481b937ff9397a86f9f51bd41c4767b6fc04af", size = 2342467, upload-time = "2025-10-14T10:21:44.018Z" },
-    { url = "https://files.pythonhosted.org/packages/92/97/4de0e2a1159cb85ad737e03306717637842c88c7fd6d97973172fb183149/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a44ac1738591472c3d020f61c6df1e4015180d6262ebd39bf2aeb52571b60f12", size = 2063754, upload-time = "2025-10-14T10:21:46.466Z" },
-    { url = "https://files.pythonhosted.org/packages/0f/50/8cb90ce4b9efcf7ae78130afeb99fd1c86125ccdf9906ef64b9d42f37c25/pydantic_core-2.41.4-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d72f2b5e6e82ab8f94ea7d0d42f83c487dc159c5240d8f83beae684472864e2d", size = 2196754, upload-time = "2025-10-14T10:21:48.486Z" },
-    { url = "https://files.pythonhosted.org/packages/34/3b/ccdc77af9cd5082723574a1cc1bcae7a6acacc829d7c0a06201f7886a109/pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:c4d1e854aaf044487d31143f541f7aafe7b482ae72a022c664b2de2e466ed0ad", size = 2137115, upload-time = "2025-10-14T10:21:50.63Z" },
-    { url = "https://files.pythonhosted.org/packages/ca/ba/e7c7a02651a8f7c52dc2cff2b64a30c313e3b57c7d93703cecea76c09b71/pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:b568af94267729d76e6ee5ececda4e283d07bbb28e8148bb17adad93d025d25a", size = 2317400, upload-time = "2025-10-14T10:21:52.959Z" },
-    { url = "https://files.pythonhosted.org/packages/2c/ba/6c533a4ee8aec6b812c643c49bb3bd88d3f01e3cebe451bb85512d37f00f/pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:6d55fb8b1e8929b341cc313a81a26e0d48aa3b519c1dbaadec3a6a2b4fcad025", size = 2312070, upload-time = "2025-10-14T10:21:55.419Z" },
-    { url = "https://files.pythonhosted.org/packages/22/ae/f10524fcc0ab8d7f96cf9a74c880243576fd3e72bd8ce4f81e43d22bcab7/pydantic_core-2.41.4-cp314-cp314-win32.whl", hash = "sha256:5b66584e549e2e32a1398df11da2e0a7eff45d5c2d9db9d5667c5e6ac764d77e", size = 1982277, upload-time = "2025-10-14T10:21:57.474Z" },
-    { url = "https://files.pythonhosted.org/packages/b4/dc/e5aa27aea1ad4638f0c3fb41132f7eb583bd7420ee63204e2d4333a3bbf9/pydantic_core-2.41.4-cp314-cp314-win_amd64.whl", hash = "sha256:557a0aab88664cc552285316809cab897716a372afaf8efdbef756f8b890e894", size = 2024608, upload-time = "2025-10-14T10:21:59.557Z" },
-    { url = "https://files.pythonhosted.org/packages/3e/61/51d89cc2612bd147198e120a13f150afbf0bcb4615cddb049ab10b81b79e/pydantic_core-2.41.4-cp314-cp314-win_arm64.whl", hash = "sha256:3f1ea6f48a045745d0d9f325989d8abd3f1eaf47dd00485912d1a3a63c623a8d", size = 1967614, upload-time = "2025-10-14T10:22:01.847Z" },
-    { url = "https://files.pythonhosted.org/packages/0d/c2/472f2e31b95eff099961fa050c376ab7156a81da194f9edb9f710f68787b/pydantic_core-2.41.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6c1fe4c5404c448b13188dd8bd2ebc2bdd7e6727fa61ff481bcc2cca894018da", size = 1876904, upload-time = "2025-10-14T10:22:04.062Z" },
-    { url = "https://files.pythonhosted.org/packages/4a/07/ea8eeb91173807ecdae4f4a5f4b150a520085b35454350fc219ba79e66a3/pydantic_core-2.41.4-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:523e7da4d43b113bf8e7b49fa4ec0c35bf4fe66b2230bfc5c13cc498f12c6c3e", size = 1882538, upload-time = "2025-10-14T10:22:06.39Z" },
-    { url = "https://files.pythonhosted.org/packages/1e/29/b53a9ca6cd366bfc928823679c6a76c7a4c69f8201c0ba7903ad18ebae2f/pydantic_core-2.41.4-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5729225de81fb65b70fdb1907fcf08c75d498f4a6f15af005aabb1fdadc19dfa", size = 2041183, upload-time = "2025-10-14T10:22:08.812Z" },
-    { url = "https://files.pythonhosted.org/packages/c7/3d/f8c1a371ceebcaf94d6dd2d77c6cf4b1c078e13a5837aee83f760b4f7cfd/pydantic_core-2.41.4-cp314-cp314t-win_amd64.whl", hash = "sha256:de2cfbb09e88f0f795fd90cf955858fc2c691df65b1f21f0aa00b99f3fbc661d", size = 1993542, upload-time = "2025-10-14T10:22:11.332Z" },
-    { url = "https://files.pythonhosted.org/packages/8a/ac/9fc61b4f9d079482a290afe8d206b8f490e9fd32d4fc03ed4fc698214e01/pydantic_core-2.41.4-cp314-cp314t-win_arm64.whl", hash = "sha256:d34f950ae05a83e0ede899c595f312ca976023ea1db100cd5aa188f7005e3ab0", size = 1973897, upload-time = "2025-10-14T10:22:13.444Z" },
-    { url = "https://files.pythonhosted.org/packages/b0/12/5ba58daa7f453454464f92b3ca7b9d7c657d8641c48e370c3ebc9a82dd78/pydantic_core-2.41.4-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:a1b2cfec3879afb742a7b0bcfa53e4f22ba96571c9e54d6a3afe1052d17d843b", size = 2122139, upload-time = "2025-10-14T10:22:47.288Z" },
-    { url = "https://files.pythonhosted.org/packages/21/fb/6860126a77725c3108baecd10fd3d75fec25191d6381b6eb2ac660228eac/pydantic_core-2.41.4-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:d175600d975b7c244af6eb9c9041f10059f20b8bbffec9e33fdd5ee3f67cdc42", size = 1936674, upload-time = "2025-10-14T10:22:49.555Z" },
-    { url = "https://files.pythonhosted.org/packages/de/be/57dcaa3ed595d81f8757e2b44a38240ac5d37628bce25fb20d02c7018776/pydantic_core-2.41.4-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f184d657fa4947ae5ec9c47bd7e917730fa1cbb78195037e32dcbab50aca5ee", size = 1956398, upload-time = "2025-10-14T10:22:52.19Z" },
-    { url = "https://files.pythonhosted.org/packages/2f/1d/679a344fadb9695f1a6a294d739fbd21d71fa023286daeea8c0ed49e7c2b/pydantic_core-2.41.4-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ed810568aeffed3edc78910af32af911c835cc39ebbfacd1f0ab5dd53028e5c", size = 2138674, upload-time = "2025-10-14T10:22:54.499Z" },
-    { url = "https://files.pythonhosted.org/packages/c4/48/ae937e5a831b7c0dc646b2ef788c27cd003894882415300ed21927c21efa/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:4f5d640aeebb438517150fdeec097739614421900e4a08db4a3ef38898798537", size = 2112087, upload-time = "2025-10-14T10:22:56.818Z" },
-    { url = "https://files.pythonhosted.org/packages/5e/db/6db8073e3d32dae017da7e0d16a9ecb897d0a4d92e00634916e486097961/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:4a9ab037b71927babc6d9e7fc01aea9e66dc2a4a34dff06ef0724a4049629f94", size = 1920387, upload-time = "2025-10-14T10:22:59.342Z" },
-    { url = "https://files.pythonhosted.org/packages/0d/c1/dd3542d072fcc336030d66834872f0328727e3b8de289c662faa04aa270e/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4dab9484ec605c3016df9ad4fd4f9a390bc5d816a3b10c6550f8424bb80b18c", size = 1951495, upload-time = "2025-10-14T10:23:02.089Z" },
-    { url = "https://files.pythonhosted.org/packages/2b/c6/db8d13a1f8ab3f1eb08c88bd00fd62d44311e3456d1e85c0e59e0a0376e7/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8a5028425820731d8c6c098ab642d7b8b999758e24acae03ed38a66eca8335", size = 2139008, upload-time = "2025-10-14T10:23:04.539Z" },
-    { url = "https://files.pythonhosted.org/packages/5d/d4/912e976a2dd0b49f31c98a060ca90b353f3b73ee3ea2fd0030412f6ac5ec/pydantic_core-2.41.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1e5ab4fc177dd41536b3c32b2ea11380dd3d4619a385860621478ac2d25ceb00", size = 2106739, upload-time = "2025-10-14T10:23:06.934Z" },
-    { url = "https://files.pythonhosted.org/packages/71/f0/66ec5a626c81eba326072d6ee2b127f8c139543f1bf609b4842978d37833/pydantic_core-2.41.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:3d88d0054d3fa11ce936184896bed3c1c5441d6fa483b498fac6a5d0dd6f64a9", size = 1932549, upload-time = "2025-10-14T10:23:09.24Z" },
-    { url = "https://files.pythonhosted.org/packages/c4/af/625626278ca801ea0a658c2dcf290dc9f21bb383098e99e7c6a029fccfc0/pydantic_core-2.41.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b2a054a8725f05b4b6503357e0ac1c4e8234ad3b0c2ac130d6ffc66f0e170e2", size = 2135093, upload-time = "2025-10-14T10:23:11.626Z" },
-    { url = "https://files.pythonhosted.org/packages/20/f6/2fba049f54e0f4975fef66be654c597a1d005320fa141863699180c7697d/pydantic_core-2.41.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0d9db5a161c99375a0c68c058e227bee1d89303300802601d76a3d01f74e258", size = 2187971, upload-time = "2025-10-14T10:23:14.437Z" },
-    { url = "https://files.pythonhosted.org/packages/0e/80/65ab839a2dfcd3b949202f9d920c34f9de5a537c3646662bdf2f7d999680/pydantic_core-2.41.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:6273ea2c8ffdac7b7fda2653c49682db815aebf4a89243a6feccf5e36c18c347", size = 2147939, upload-time = "2025-10-14T10:23:16.831Z" },
-    { url = "https://files.pythonhosted.org/packages/44/58/627565d3d182ce6dfda18b8e1c841eede3629d59c9d7cbc1e12a03aeb328/pydantic_core-2.41.4-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:4c973add636efc61de22530b2ef83a65f39b6d6f656df97f678720e20de26caa", size = 2311400, upload-time = "2025-10-14T10:23:19.234Z" },
-    { url = "https://files.pythonhosted.org/packages/24/06/8a84711162ad5a5f19a88cead37cca81b4b1f294f46260ef7334ae4f24d3/pydantic_core-2.41.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b69d1973354758007f46cf2d44a4f3d0933f10b6dc9bf15cf1356e037f6f731a", size = 2316840, upload-time = "2025-10-14T10:23:21.738Z" },
-    { url = "https://files.pythonhosted.org/packages/aa/8b/b7bb512a4682a2f7fbfae152a755d37351743900226d29bd953aaf870eaa/pydantic_core-2.41.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:3619320641fd212aaf5997b6ca505e97540b7e16418f4a241f44cdf108ffb50d", size = 2149135, upload-time = "2025-10-14T10:23:24.379Z" },
-    { url = "https://files.pythonhosted.org/packages/7e/7d/138e902ed6399b866f7cfe4435d22445e16fff888a1c00560d9dc79a780f/pydantic_core-2.41.4-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:491535d45cd7ad7e4a2af4a5169b0d07bebf1adfd164b0368da8aa41e19907a5", size = 2104721, upload-time = "2025-10-14T10:23:26.906Z" },
-    { url = "https://files.pythonhosted.org/packages/47/13/0525623cf94627f7b53b4c2034c81edc8491cbfc7c28d5447fa318791479/pydantic_core-2.41.4-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:54d86c0cada6aba4ec4c047d0e348cbad7063b87ae0f005d9f8c9ad04d4a92a2", size = 1931608, upload-time = "2025-10-14T10:23:29.306Z" },
-    { url = "https://files.pythonhosted.org/packages/d6/f9/744bc98137d6ef0a233f808bfc9b18cf94624bf30836a18d3b05d08bf418/pydantic_core-2.41.4-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eca1124aced216b2500dc2609eade086d718e8249cb9696660ab447d50a758bd", size = 2132986, upload-time = "2025-10-14T10:23:32.057Z" },
-    { url = "https://files.pythonhosted.org/packages/17/c8/629e88920171173f6049386cc71f893dff03209a9ef32b4d2f7e7c264bcf/pydantic_core-2.41.4-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6c9024169becccf0cb470ada03ee578d7348c119a0d42af3dcf9eda96e3a247c", size = 2187516, upload-time = "2025-10-14T10:23:34.871Z" },
-    { url = "https://files.pythonhosted.org/packages/2e/0f/4f2734688d98488782218ca61bcc118329bf5de05bb7fe3adc7dd79b0b86/pydantic_core-2.41.4-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:26895a4268ae5a2849269f4991cdc97236e4b9c010e51137becf25182daac405", size = 2146146, upload-time = "2025-10-14T10:23:37.342Z" },
-    { url = "https://files.pythonhosted.org/packages/ed/f2/ab385dbd94a052c62224b99cf99002eee99dbec40e10006c78575aead256/pydantic_core-2.41.4-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:ca4df25762cf71308c446e33c9b1fdca2923a3f13de616e2a949f38bf21ff5a8", size = 2311296, upload-time = "2025-10-14T10:23:40.145Z" },
-    { url = "https://files.pythonhosted.org/packages/fc/8e/e4f12afe1beeb9823bba5375f8f258df0cc61b056b0195fb1cf9f62a1a58/pydantic_core-2.41.4-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:5a28fcedd762349519276c36634e71853b4541079cab4acaaac60c4421827308", size = 2315386, upload-time = "2025-10-14T10:23:42.624Z" },
-    { url = "https://files.pythonhosted.org/packages/48/f7/925f65d930802e3ea2eb4d5afa4cb8730c8dc0d2cb89a59dc4ed2fcb2d74/pydantic_core-2.41.4-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c173ddcd86afd2535e2b695217e82191580663a1d1928239f877f5a1649ef39f", size = 2147775, upload-time = "2025-10-14T10:23:45.406Z" },
+sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/c6/90/32c9941e728d564b411d574d8ee0cf09b12ec978cb22b294995bae5549a5/pydantic_core-2.41.5-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:77b63866ca88d804225eaa4af3e664c5faf3568cea95360d21f4725ab6e07146", size = 2107298, upload-time = "2025-11-04T13:39:04.116Z" },
+    { url = "https://files.pythonhosted.org/packages/fb/a8/61c96a77fe28993d9a6fb0f4127e05430a267b235a124545d79fea46dd65/pydantic_core-2.41.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dfa8a0c812ac681395907e71e1274819dec685fec28273a28905df579ef137e2", size = 1901475, upload-time = "2025-11-04T13:39:06.055Z" },
+    { url = "https://files.pythonhosted.org/packages/5d/b6/338abf60225acc18cdc08b4faef592d0310923d19a87fba1faf05af5346e/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5921a4d3ca3aee735d9fd163808f5e8dd6c6972101e4adbda9a4667908849b97", size = 1918815, upload-time = "2025-11-04T13:39:10.41Z" },
+    { url = "https://files.pythonhosted.org/packages/d1/1c/2ed0433e682983d8e8cba9c8d8ef274d4791ec6a6f24c58935b90e780e0a/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e25c479382d26a2a41b7ebea1043564a937db462816ea07afa8a44c0866d52f9", size = 2065567, upload-time = "2025-11-04T13:39:12.244Z" },
+    { url = "https://files.pythonhosted.org/packages/b3/24/cf84974ee7d6eae06b9e63289b7b8f6549d416b5c199ca2d7ce13bbcf619/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f547144f2966e1e16ae626d8ce72b4cfa0caedc7fa28052001c94fb2fcaa1c52", size = 2230442, upload-time = "2025-11-04T13:39:13.962Z" },
+    { url = "https://files.pythonhosted.org/packages/fd/21/4e287865504b3edc0136c89c9c09431be326168b1eb7841911cbc877a995/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f52298fbd394f9ed112d56f3d11aabd0d5bd27beb3084cc3d8ad069483b8941", size = 2350956, upload-time = "2025-11-04T13:39:15.889Z" },
+    { url = "https://files.pythonhosted.org/packages/a8/76/7727ef2ffa4b62fcab916686a68a0426b9b790139720e1934e8ba797e238/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:100baa204bb412b74fe285fb0f3a385256dad1d1879f0a5cb1499ed2e83d132a", size = 2068253, upload-time = "2025-11-04T13:39:17.403Z" },
+    { url = "https://files.pythonhosted.org/packages/d5/8c/a4abfc79604bcb4c748e18975c44f94f756f08fb04218d5cb87eb0d3a63e/pydantic_core-2.41.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:05a2c8852530ad2812cb7914dc61a1125dc4e06252ee98e5638a12da6cc6fb6c", size = 2177050, upload-time = "2025-11-04T13:39:19.351Z" },
+    { url = "https://files.pythonhosted.org/packages/67/b1/de2e9a9a79b480f9cb0b6e8b6ba4c50b18d4e89852426364c66aa82bb7b3/pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:29452c56df2ed968d18d7e21f4ab0ac55e71dc59524872f6fc57dcf4a3249ed2", size = 2147178, upload-time = "2025-11-04T13:39:21Z" },
+    { url = "https://files.pythonhosted.org/packages/16/c1/dfb33f837a47b20417500efaa0378adc6635b3c79e8369ff7a03c494b4ac/pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:d5160812ea7a8a2ffbe233d8da666880cad0cbaf5d4de74ae15c313213d62556", size = 2341833, upload-time = "2025-11-04T13:39:22.606Z" },
+    { url = "https://files.pythonhosted.org/packages/47/36/00f398642a0f4b815a9a558c4f1dca1b4020a7d49562807d7bc9ff279a6c/pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:df3959765b553b9440adfd3c795617c352154e497a4eaf3752555cfb5da8fc49", size = 2321156, upload-time = "2025-11-04T13:39:25.843Z" },
+    { url = "https://files.pythonhosted.org/packages/7e/70/cad3acd89fde2010807354d978725ae111ddf6d0ea46d1ea1775b5c1bd0c/pydantic_core-2.41.5-cp310-cp310-win32.whl", hash = "sha256:1f8d33a7f4d5a7889e60dc39856d76d09333d8a6ed0f5f1190635cbec70ec4ba", size = 1989378, upload-time = "2025-11-04T13:39:27.92Z" },
+    { url = "https://files.pythonhosted.org/packages/76/92/d338652464c6c367e5608e4488201702cd1cbb0f33f7b6a85a60fe5f3720/pydantic_core-2.41.5-cp310-cp310-win_amd64.whl", hash = "sha256:62de39db01b8d593e45871af2af9e497295db8d73b085f6bfd0b18c83c70a8f9", size = 2013622, upload-time = "2025-11-04T13:39:29.848Z" },
+    { url = "https://files.pythonhosted.org/packages/e8/72/74a989dd9f2084b3d9530b0915fdda64ac48831c30dbf7c72a41a5232db8/pydantic_core-2.41.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a3a52f6156e73e7ccb0f8cced536adccb7042be67cb45f9562e12b319c119da6", size = 2105873, upload-time = "2025-11-04T13:39:31.373Z" },
+    { url = "https://files.pythonhosted.org/packages/12/44/37e403fd9455708b3b942949e1d7febc02167662bf1a7da5b78ee1ea2842/pydantic_core-2.41.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7f3bf998340c6d4b0c9a2f02d6a400e51f123b59565d74dc60d252ce888c260b", size = 1899826, upload-time = "2025-11-04T13:39:32.897Z" },
+    { url = "https://files.pythonhosted.org/packages/33/7f/1d5cab3ccf44c1935a359d51a8a2a9e1a654b744b5e7f80d41b88d501eec/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:378bec5c66998815d224c9ca994f1e14c0c21cb95d2f52b6021cc0b2a58f2a5a", size = 1917869, upload-time = "2025-11-04T13:39:34.469Z" },
+    { url = "https://files.pythonhosted.org/packages/6e/6a/30d94a9674a7fe4f4744052ed6c5e083424510be1e93da5bc47569d11810/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7b576130c69225432866fe2f4a469a85a54ade141d96fd396dffcf607b558f8", size = 2063890, upload-time = "2025-11-04T13:39:36.053Z" },
+    { url = "https://files.pythonhosted.org/packages/50/be/76e5d46203fcb2750e542f32e6c371ffa9b8ad17364cf94bb0818dbfb50c/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6cb58b9c66f7e4179a2d5e0f849c48eff5c1fca560994d6eb6543abf955a149e", size = 2229740, upload-time = "2025-11-04T13:39:37.753Z" },
+    { url = "https://files.pythonhosted.org/packages/d3/ee/fed784df0144793489f87db310a6bbf8118d7b630ed07aa180d6067e653a/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88942d3a3dff3afc8288c21e565e476fc278902ae4d6d134f1eeda118cc830b1", size = 2350021, upload-time = "2025-11-04T13:39:40.94Z" },
+    { url = "https://files.pythonhosted.org/packages/c8/be/8fed28dd0a180dca19e72c233cbf58efa36df055e5b9d90d64fd1740b828/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f31d95a179f8d64d90f6831d71fa93290893a33148d890ba15de25642c5d075b", size = 2066378, upload-time = "2025-11-04T13:39:42.523Z" },
+    { url = "https://files.pythonhosted.org/packages/b0/3b/698cf8ae1d536a010e05121b4958b1257f0b5522085e335360e53a6b1c8b/pydantic_core-2.41.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1df3d34aced70add6f867a8cf413e299177e0c22660cc767218373d0779487b", size = 2175761, upload-time = "2025-11-04T13:39:44.553Z" },
+    { url = "https://files.pythonhosted.org/packages/b8/ba/15d537423939553116dea94ce02f9c31be0fa9d0b806d427e0308ec17145/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4009935984bd36bd2c774e13f9a09563ce8de4abaa7226f5108262fa3e637284", size = 2146303, upload-time = "2025-11-04T13:39:46.238Z" },
+    { url = "https://files.pythonhosted.org/packages/58/7f/0de669bf37d206723795f9c90c82966726a2ab06c336deba4735b55af431/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:34a64bc3441dc1213096a20fe27e8e128bd3ff89921706e83c0b1ac971276594", size = 2340355, upload-time = "2025-11-04T13:39:48.002Z" },
+    { url = "https://files.pythonhosted.org/packages/e5/de/e7482c435b83d7e3c3ee5ee4451f6e8973cff0eb6007d2872ce6383f6398/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c9e19dd6e28fdcaa5a1de679aec4141f691023916427ef9bae8584f9c2fb3b0e", size = 2319875, upload-time = "2025-11-04T13:39:49.705Z" },
+    { url = "https://files.pythonhosted.org/packages/fe/e6/8c9e81bb6dd7560e33b9053351c29f30c8194b72f2d6932888581f503482/pydantic_core-2.41.5-cp311-cp311-win32.whl", hash = "sha256:2c010c6ded393148374c0f6f0bf89d206bf3217f201faa0635dcd56bd1520f6b", size = 1987549, upload-time = "2025-11-04T13:39:51.842Z" },
+    { url = "https://files.pythonhosted.org/packages/11/66/f14d1d978ea94d1bc21fc98fcf570f9542fe55bfcc40269d4e1a21c19bf7/pydantic_core-2.41.5-cp311-cp311-win_amd64.whl", hash = "sha256:76ee27c6e9c7f16f47db7a94157112a2f3a00e958bc626e2f4ee8bec5c328fbe", size = 2011305, upload-time = "2025-11-04T13:39:53.485Z" },
+    { url = "https://files.pythonhosted.org/packages/56/d8/0e271434e8efd03186c5386671328154ee349ff0354d83c74f5caaf096ed/pydantic_core-2.41.5-cp311-cp311-win_arm64.whl", hash = "sha256:4bc36bbc0b7584de96561184ad7f012478987882ebf9f9c389b23f432ea3d90f", size = 1972902, upload-time = "2025-11-04T13:39:56.488Z" },
+    { url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990, upload-time = "2025-11-04T13:39:58.079Z" },
+    { url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003, upload-time = "2025-11-04T13:39:59.956Z" },
+    { url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200, upload-time = "2025-11-04T13:40:02.241Z" },
+    { url = "https://files.pythonhosted.org/packages/38/de/8c36b5198a29bdaade07b5985e80a233a5ac27137846f3bc2d3b40a47360/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75", size = 2052578, upload-time = "2025-11-04T13:40:04.401Z" },
+    { url = "https://files.pythonhosted.org/packages/00/b5/0e8e4b5b081eac6cb3dbb7e60a65907549a1ce035a724368c330112adfdd/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05", size = 2208504, upload-time = "2025-11-04T13:40:06.072Z" },
+    { url = "https://files.pythonhosted.org/packages/77/56/87a61aad59c7c5b9dc8caad5a41a5545cba3810c3e828708b3d7404f6cef/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc", size = 2335816, upload-time = "2025-11-04T13:40:07.835Z" },
+    { url = "https://files.pythonhosted.org/packages/0d/76/941cc9f73529988688a665a5c0ecff1112b3d95ab48f81db5f7606f522d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c", size = 2075366, upload-time = "2025-11-04T13:40:09.804Z" },
+    { url = "https://files.pythonhosted.org/packages/d3/43/ebef01f69baa07a482844faaa0a591bad1ef129253ffd0cdaa9d8a7f72d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5", size = 2171698, upload-time = "2025-11-04T13:40:12.004Z" },
+    { url = "https://files.pythonhosted.org/packages/b1/87/41f3202e4193e3bacfc2c065fab7706ebe81af46a83d3e27605029c1f5a6/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c", size = 2132603, upload-time = "2025-11-04T13:40:13.868Z" },
+    { url = "https://files.pythonhosted.org/packages/49/7d/4c00df99cb12070b6bccdef4a195255e6020a550d572768d92cc54dba91a/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294", size = 2329591, upload-time = "2025-11-04T13:40:15.672Z" },
+    { url = "https://files.pythonhosted.org/packages/cc/6a/ebf4b1d65d458f3cda6a7335d141305dfa19bdc61140a884d165a8a1bbc7/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1", size = 2319068, upload-time = "2025-11-04T13:40:17.532Z" },
+    { url = "https://files.pythonhosted.org/packages/49/3b/774f2b5cd4192d5ab75870ce4381fd89cf218af999515baf07e7206753f0/pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d", size = 1985908, upload-time = "2025-11-04T13:40:19.309Z" },
+    { url = "https://files.pythonhosted.org/packages/86/45/00173a033c801cacf67c190fef088789394feaf88a98a7035b0e40d53dc9/pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815", size = 2020145, upload-time = "2025-11-04T13:40:21.548Z" },
+    { url = "https://files.pythonhosted.org/packages/f9/22/91fbc821fa6d261b376a3f73809f907cec5ca6025642c463d3488aad22fb/pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3", size = 1976179, upload-time = "2025-11-04T13:40:23.393Z" },
+    { url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403, upload-time = "2025-11-04T13:40:25.248Z" },
+    { url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206, upload-time = "2025-11-04T13:40:27.099Z" },
+    { url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307, upload-time = "2025-11-04T13:40:29.806Z" },
+    { url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258, upload-time = "2025-11-04T13:40:33.544Z" },
+    { url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917, upload-time = "2025-11-04T13:40:35.479Z" },
+    { url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186, upload-time = "2025-11-04T13:40:37.436Z" },
+    { url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164, upload-time = "2025-11-04T13:40:40.289Z" },
+    { url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146, upload-time = "2025-11-04T13:40:42.809Z" },
+    { url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788, upload-time = "2025-11-04T13:40:44.752Z" },
+    { url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133, upload-time = "2025-11-04T13:40:46.66Z" },
+    { url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852, upload-time = "2025-11-04T13:40:48.575Z" },
+    { url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679, upload-time = "2025-11-04T13:40:50.619Z" },
+    { url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766, upload-time = "2025-11-04T13:40:52.631Z" },
+    { url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" },
+    { url = "https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a", size = 2119622, upload-time = "2025-11-04T13:40:56.68Z" },
+    { url = "https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14", size = 1891725, upload-time = "2025-11-04T13:40:58.807Z" },
+    { url = "https://files.pythonhosted.org/packages/23/04/e89c29e267b8060b40dca97bfc64a19b2a3cf99018167ea1677d96368273/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1", size = 1915040, upload-time = "2025-11-04T13:41:00.853Z" },
+    { url = "https://files.pythonhosted.org/packages/84/a3/15a82ac7bd97992a82257f777b3583d3e84bdb06ba6858f745daa2ec8a85/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66", size = 2063691, upload-time = "2025-11-04T13:41:03.504Z" },
+    { url = "https://files.pythonhosted.org/packages/74/9b/0046701313c6ef08c0c1cf0e028c67c770a4e1275ca73131563c5f2a310a/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869", size = 2213897, upload-time = "2025-11-04T13:41:05.804Z" },
+    { url = "https://files.pythonhosted.org/packages/8a/cd/6bac76ecd1b27e75a95ca3a9a559c643b3afcd2dd62086d4b7a32a18b169/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2", size = 2333302, upload-time = "2025-11-04T13:41:07.809Z" },
+    { url = "https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375", size = 2064877, upload-time = "2025-11-04T13:41:09.827Z" },
+    { url = "https://files.pythonhosted.org/packages/18/66/e9db17a9a763d72f03de903883c057b2592c09509ccfe468187f2a2eef29/pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553", size = 2180680, upload-time = "2025-11-04T13:41:12.379Z" },
+    { url = "https://files.pythonhosted.org/packages/d3/9e/3ce66cebb929f3ced22be85d4c2399b8e85b622db77dad36b73c5387f8f8/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90", size = 2138960, upload-time = "2025-11-04T13:41:14.627Z" },
+    { url = "https://files.pythonhosted.org/packages/a6/62/205a998f4327d2079326b01abee48e502ea739d174f0a89295c481a2272e/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07", size = 2339102, upload-time = "2025-11-04T13:41:16.868Z" },
+    { url = "https://files.pythonhosted.org/packages/3c/0d/f05e79471e889d74d3d88f5bd20d0ed189ad94c2423d81ff8d0000aab4ff/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb", size = 2326039, upload-time = "2025-11-04T13:41:18.934Z" },
+    { url = "https://files.pythonhosted.org/packages/ec/e1/e08a6208bb100da7e0c4b288eed624a703f4d129bde2da475721a80cab32/pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23", size = 1995126, upload-time = "2025-11-04T13:41:21.418Z" },
+    { url = "https://files.pythonhosted.org/packages/48/5d/56ba7b24e9557f99c9237e29f5c09913c81eeb2f3217e40e922353668092/pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf", size = 2015489, upload-time = "2025-11-04T13:41:24.076Z" },
+    { url = "https://files.pythonhosted.org/packages/4e/bb/f7a190991ec9e3e0ba22e4993d8755bbc4a32925c0b5b42775c03e8148f9/pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0", size = 1977288, upload-time = "2025-11-04T13:41:26.33Z" },
+    { url = "https://files.pythonhosted.org/packages/92/ed/77542d0c51538e32e15afe7899d79efce4b81eee631d99850edc2f5e9349/pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a", size = 2120255, upload-time = "2025-11-04T13:41:28.569Z" },
+    { url = "https://files.pythonhosted.org/packages/bb/3d/6913dde84d5be21e284439676168b28d8bbba5600d838b9dca99de0fad71/pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3", size = 1863760, upload-time = "2025-11-04T13:41:31.055Z" },
+    { url = "https://files.pythonhosted.org/packages/5a/f0/e5e6b99d4191da102f2b0eb9687aaa7f5bea5d9964071a84effc3e40f997/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c", size = 1878092, upload-time = "2025-11-04T13:41:33.21Z" },
+    { url = "https://files.pythonhosted.org/packages/71/48/36fb760642d568925953bcc8116455513d6e34c4beaa37544118c36aba6d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612", size = 2053385, upload-time = "2025-11-04T13:41:35.508Z" },
+    { url = "https://files.pythonhosted.org/packages/20/25/92dc684dd8eb75a234bc1c764b4210cf2646479d54b47bf46061657292a8/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d", size = 2218832, upload-time = "2025-11-04T13:41:37.732Z" },
+    { url = "https://files.pythonhosted.org/packages/e2/09/f53e0b05023d3e30357d82eb35835d0f6340ca344720a4599cd663dca599/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9", size = 2327585, upload-time = "2025-11-04T13:41:40Z" },
+    { url = "https://files.pythonhosted.org/packages/aa/4e/2ae1aa85d6af35a39b236b1b1641de73f5a6ac4d5a7509f77b814885760c/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660", size = 2041078, upload-time = "2025-11-04T13:41:42.323Z" },
+    { url = "https://files.pythonhosted.org/packages/cd/13/2e215f17f0ef326fc72afe94776edb77525142c693767fc347ed6288728d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9", size = 2173914, upload-time = "2025-11-04T13:41:45.221Z" },
+    { url = "https://files.pythonhosted.org/packages/02/7a/f999a6dcbcd0e5660bc348a3991c8915ce6599f4f2c6ac22f01d7a10816c/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3", size = 2129560, upload-time = "2025-11-04T13:41:47.474Z" },
+    { url = "https://files.pythonhosted.org/packages/3a/b1/6c990ac65e3b4c079a4fb9f5b05f5b013afa0f4ed6780a3dd236d2cbdc64/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf", size = 2329244, upload-time = "2025-11-04T13:41:49.992Z" },
+    { url = "https://files.pythonhosted.org/packages/d9/02/3c562f3a51afd4d88fff8dffb1771b30cfdfd79befd9883ee094f5b6c0d8/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470", size = 2331955, upload-time = "2025-11-04T13:41:54.079Z" },
+    { url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" },
+    { url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" },
+    { url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" },
+    { url = "https://files.pythonhosted.org/packages/11/72/90fda5ee3b97e51c494938a4a44c3a35a9c96c19bba12372fb9c634d6f57/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:b96d5f26b05d03cc60f11a7761a5ded1741da411e7fe0909e27a5e6a0cb7b034", size = 2115441, upload-time = "2025-11-04T13:42:39.557Z" },
+    { url = "https://files.pythonhosted.org/packages/1f/53/8942f884fa33f50794f119012dc6a1a02ac43a56407adaac20463df8e98f/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:634e8609e89ceecea15e2d61bc9ac3718caaaa71963717bf3c8f38bfde64242c", size = 1930291, upload-time = "2025-11-04T13:42:42.169Z" },
+    { url = "https://files.pythonhosted.org/packages/79/c8/ecb9ed9cd942bce09fc888ee960b52654fbdbede4ba6c2d6e0d3b1d8b49c/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e8740d7503eb008aa2df04d3b9735f845d43ae845e6dcd2be0b55a2da43cd2", size = 1948632, upload-time = "2025-11-04T13:42:44.564Z" },
+    { url = "https://files.pythonhosted.org/packages/2e/1b/687711069de7efa6af934e74f601e2a4307365e8fdc404703afc453eab26/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f15489ba13d61f670dcc96772e733aad1a6f9c429cc27574c6cdaed82d0146ad", size = 2138905, upload-time = "2025-11-04T13:42:47.156Z" },
+    { url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" },
+    { url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" },
+    { url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" },
+    { url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017, upload-time = "2025-11-04T13:42:59.471Z" },
+    { url = "https://files.pythonhosted.org/packages/e6/b0/1a2aa41e3b5a4ba11420aba2d091b2d17959c8d1519ece3627c371951e73/pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b5819cd790dbf0c5eb9f82c73c16b39a65dd6dd4d1439dcdea7816ec9adddab8", size = 2103351, upload-time = "2025-11-04T13:43:02.058Z" },
+    { url = "https://files.pythonhosted.org/packages/a4/ee/31b1f0020baaf6d091c87900ae05c6aeae101fa4e188e1613c80e4f1ea31/pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5a4e67afbc95fa5c34cf27d9089bca7fcab4e51e57278d710320a70b956d1b9a", size = 1925363, upload-time = "2025-11-04T13:43:05.159Z" },
+    { url = "https://files.pythonhosted.org/packages/e1/89/ab8e86208467e467a80deaca4e434adac37b10a9d134cd2f99b28a01e483/pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ece5c59f0ce7d001e017643d8d24da587ea1f74f6993467d85ae8a5ef9d4f42b", size = 2135615, upload-time = "2025-11-04T13:43:08.116Z" },
+    { url = "https://files.pythonhosted.org/packages/99/0a/99a53d06dd0348b2008f2f30884b34719c323f16c3be4e6cc1203b74a91d/pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:16f80f7abe3351f8ea6858914ddc8c77e02578544a0ebc15b4c2e1a0e813b0b2", size = 2175369, upload-time = "2025-11-04T13:43:12.49Z" },
+    { url = "https://files.pythonhosted.org/packages/6d/94/30ca3b73c6d485b9bb0bc66e611cff4a7138ff9736b7e66bcf0852151636/pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:33cb885e759a705b426baada1fe68cbb0a2e68e34c5d0d0289a364cf01709093", size = 2144218, upload-time = "2025-11-04T13:43:15.431Z" },
+    { url = "https://files.pythonhosted.org/packages/87/57/31b4f8e12680b739a91f472b5671294236b82586889ef764b5fbc6669238/pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:c8d8b4eb992936023be7dee581270af5c6e0697a8559895f527f5b7105ecd36a", size = 2329951, upload-time = "2025-11-04T13:43:18.062Z" },
+    { url = "https://files.pythonhosted.org/packages/7d/73/3c2c8edef77b8f7310e6fb012dbc4b8551386ed575b9eb6fb2506e28a7eb/pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:242a206cd0318f95cd21bdacff3fcc3aab23e79bba5cac3db5a841c9ef9c6963", size = 2318428, upload-time = "2025-11-04T13:43:20.679Z" },
+    { url = "https://files.pythonhosted.org/packages/2f/02/8559b1f26ee0d502c74f9cca5c0d2fd97e967e083e006bbbb4e97f3a043a/pydantic_core-2.41.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d3a978c4f57a597908b7e697229d996d77a6d3c94901e9edee593adada95ce1a", size = 2147009, upload-time = "2025-11-04T13:43:23.286Z" },
+    { url = "https://files.pythonhosted.org/packages/5f/9b/1b3f0e9f9305839d7e84912f9e8bfbd191ed1b1ef48083609f0dabde978c/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2379fa7ed44ddecb5bfe4e48577d752db9fc10be00a6b7446e9663ba143de26", size = 2101980, upload-time = "2025-11-04T13:43:25.97Z" },
+    { url = "https://files.pythonhosted.org/packages/a4/ed/d71fefcb4263df0da6a85b5d8a7508360f2f2e9b3bf5814be9c8bccdccc1/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:266fb4cbf5e3cbd0b53669a6d1b039c45e3ce651fd5442eff4d07c2cc8d66808", size = 1923865, upload-time = "2025-11-04T13:43:28.763Z" },
+    { url = "https://files.pythonhosted.org/packages/ce/3a/626b38db460d675f873e4444b4bb030453bbe7b4ba55df821d026a0493c4/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58133647260ea01e4d0500089a8c4f07bd7aa6ce109682b1426394988d8aaacc", size = 2134256, upload-time = "2025-11-04T13:43:31.71Z" },
+    { url = "https://files.pythonhosted.org/packages/83/d9/8412d7f06f616bbc053d30cb4e5f76786af3221462ad5eee1f202021eb4e/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:287dad91cfb551c363dc62899a80e9e14da1f0e2b6ebde82c806612ca2a13ef1", size = 2174762, upload-time = "2025-11-04T13:43:34.744Z" },
+    { url = "https://files.pythonhosted.org/packages/55/4c/162d906b8e3ba3a99354e20faa1b49a85206c47de97a639510a0e673f5da/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:03b77d184b9eb40240ae9fd676ca364ce1085f203e1b1256f8ab9984dca80a84", size = 2143141, upload-time = "2025-11-04T13:43:37.701Z" },
+    { url = "https://files.pythonhosted.org/packages/1f/f2/f11dd73284122713f5f89fc940f370d035fa8e1e078d446b3313955157fe/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:a668ce24de96165bb239160b3d854943128f4334822900534f2fe947930e5770", size = 2330317, upload-time = "2025-11-04T13:43:40.406Z" },
+    { url = "https://files.pythonhosted.org/packages/88/9d/b06ca6acfe4abb296110fb1273a4d848a0bfb2ff65f3ee92127b3244e16b/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f14f8f046c14563f8eb3f45f499cc658ab8d10072961e07225e507adb700e93f", size = 2316992, upload-time = "2025-11-04T13:43:43.602Z" },
+    { url = "https://files.pythonhosted.org/packages/36/c7/cfc8e811f061c841d7990b0201912c3556bfeb99cdcb7ed24adc8d6f8704/pydantic_core-2.41.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:56121965f7a4dc965bff783d70b907ddf3d57f6eba29b6d2e5dabfaf07799c51", size = 2145302, upload-time = "2025-11-04T13:43:46.64Z" },
 ]
 
 [[package]]
@@ -1286,6 +1466,20 @@ wheels = [
     { url = "https://files.pythonhosted.org/packages/61/3c/34c82eece029502808401b3bfd0b906038df1a746c03bcb1af522c15b8f1/pydantic_serdes-1.0.3-py3-none-any.whl", hash = "sha256:c77f4fd59f11d2ec24cc3ab93d42bbfc247c12b51cc479dfe8dab13a21c6c4f0", size = 21151, upload-time = "2025-07-30T15:25:18.527Z" },
 ]
 
+[[package]]
+name = "pydantic-settings"
+version = "2.12.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "pydantic" },
+    { name = "python-dotenv" },
+    { name = "typing-inspection" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/43/4b/ac7e0aae12027748076d72a8764ff1c9d82ca75a7a52622e67ed3f765c54/pydantic_settings-2.12.0.tar.gz", hash = "sha256:005538ef951e3c2a68e1c08b292b5f2e71490def8589d4221b95dab00dafcfd0", size = 194184, upload-time = "2025-11-10T14:25:47.013Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/c1/60/5d4751ba3f4a40a6891f24eec885f51afd78d208498268c734e256fb13c4/pydantic_settings-2.12.0-py3-none-any.whl", hash = "sha256:fddb9fd99a5b18da837b29710391e945b1e30c135477f484084ee513adb93809", size = 51880, upload-time = "2025-11-10T14:25:45.546Z" },
+]
+
 [[package]]
 name = "pyeapi"
 version = "1.0.4"
@@ -1306,48 +1500,62 @@ wheels = [
 
 [[package]]
 name = "pynacl"
-version = "1.6.0"
+version = "1.6.1"
 source = { registry = "https://pypi.org/simple" }
 dependencies = [
     { name = "cffi", marker = "platform_python_implementation != 'PyPy'" },
 ]
-sdist = { url = "https://files.pythonhosted.org/packages/06/c6/a3124dee667a423f2c637cfd262a54d67d8ccf3e160f3c50f622a85b7723/pynacl-1.6.0.tar.gz", hash = "sha256:cb36deafe6e2bce3b286e5d1f3e1c246e0ccdb8808ddb4550bb2792f2df298f2", size = 3505641, upload-time = "2025-09-10T23:39:22.308Z" }
-wheels = [
-    { url = "https://files.pythonhosted.org/packages/70/24/1b639176401255605ba7c2b93a7b1eb1e379e0710eca62613633eb204201/pynacl-1.6.0-cp314-cp314t-macosx_10_10_universal2.whl", hash = "sha256:f46386c24a65383a9081d68e9c2de909b1834ec74ff3013271f1bca9c2d233eb", size = 384141, upload-time = "2025-09-10T23:38:28.675Z" },
-    { url = "https://files.pythonhosted.org/packages/5e/7b/874efdf57d6bf172db0df111b479a553c3d9e8bb4f1f69eb3ffff772d6e8/pynacl-1.6.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:dea103a1afcbc333bc0e992e64233d360d393d1e63d0bc88554f572365664348", size = 808132, upload-time = "2025-09-10T23:38:38.995Z" },
-    { url = "https://files.pythonhosted.org/packages/f3/61/9b53f5913f3b75ac3d53170cdb897101b2b98afc76f4d9d3c8de5aa3ac05/pynacl-1.6.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:04f20784083014e265ad58c1b2dd562c3e35864b5394a14ab54f5d150ee9e53e", size = 1407253, upload-time = "2025-09-10T23:38:40.492Z" },
-    { url = "https://files.pythonhosted.org/packages/7c/0a/b138916b22bbf03a1bdbafecec37d714e7489dd7bcaf80cd17852f8b67be/pynacl-1.6.0-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bbcc4452a1eb10cd5217318c822fde4be279c9de8567f78bad24c773c21254f8", size = 843719, upload-time = "2025-09-10T23:38:30.87Z" },
-    { url = "https://files.pythonhosted.org/packages/01/3b/17c368197dfb2c817ce033f94605a47d0cc27901542109e640cef263f0af/pynacl-1.6.0-cp314-cp314t-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:51fed9fe1bec9e7ff9af31cd0abba179d0e984a2960c77e8e5292c7e9b7f7b5d", size = 1445441, upload-time = "2025-09-10T23:38:33.078Z" },
-    { url = "https://files.pythonhosted.org/packages/35/3c/f79b185365ab9be80cd3cd01dacf30bf5895f9b7b001e683b369e0bb6d3d/pynacl-1.6.0-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:10d755cf2a455d8c0f8c767a43d68f24d163b8fe93ccfaabfa7bafd26be58d73", size = 825691, upload-time = "2025-09-10T23:38:34.832Z" },
-    { url = "https://files.pythonhosted.org/packages/f7/1f/8b37d25e95b8f2a434a19499a601d4d272b9839ab8c32f6b0fc1e40c383f/pynacl-1.6.0-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:536703b8f90e911294831a7fbcd0c062b837f3ccaa923d92a6254e11178aaf42", size = 1410726, upload-time = "2025-09-10T23:38:36.893Z" },
-    { url = "https://files.pythonhosted.org/packages/bd/93/5a4a4cf9913014f83d615ad6a2df9187330f764f606246b3a744c0788c03/pynacl-1.6.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:6b08eab48c9669d515a344fb0ef27e2cbde847721e34bba94a343baa0f33f1f4", size = 801035, upload-time = "2025-09-10T23:38:42.109Z" },
-    { url = "https://files.pythonhosted.org/packages/bf/60/40da6b0fe6a4d5fd88f608389eb1df06492ba2edca93fca0b3bebff9b948/pynacl-1.6.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5789f016e08e5606803161ba24de01b5a345d24590a80323379fc4408832d290", size = 1371854, upload-time = "2025-09-10T23:38:44.16Z" },
-    { url = "https://files.pythonhosted.org/packages/44/b2/37ac1d65008f824cba6b5bf68d18b76d97d0f62d7a032367ea69d4a187c8/pynacl-1.6.0-cp314-cp314t-win32.whl", hash = "sha256:4853c154dc16ea12f8f3ee4b7e763331876316cc3a9f06aeedf39bcdca8f9995", size = 230345, upload-time = "2025-09-10T23:38:48.276Z" },
-    { url = "https://files.pythonhosted.org/packages/f4/5a/9234b7b45af890d02ebee9aae41859b9b5f15fb4a5a56d88e3b4d1659834/pynacl-1.6.0-cp314-cp314t-win_amd64.whl", hash = "sha256:347dcddce0b4d83ed3f32fd00379c83c425abee5a9d2cd0a2c84871334eaff64", size = 243103, upload-time = "2025-09-10T23:38:45.503Z" },
-    { url = "https://files.pythonhosted.org/packages/c9/2c/c1a0f19d720ab0af3bc4241af2bdf4d813c3ecdcb96392b5e1ddf2d8f24f/pynacl-1.6.0-cp314-cp314t-win_arm64.whl", hash = "sha256:2d6cd56ce4998cb66a6c112fda7b1fdce5266c9f05044fa72972613bef376d15", size = 187778, upload-time = "2025-09-10T23:38:46.731Z" },
-    { url = "https://files.pythonhosted.org/packages/63/37/87c72df19857c5b3b47ace6f211a26eb862ada495cc96daa372d96048fca/pynacl-1.6.0-cp38-abi3-macosx_10_10_universal2.whl", hash = "sha256:f4b3824920e206b4f52abd7de621ea7a44fd3cb5c8daceb7c3612345dfc54f2e", size = 382610, upload-time = "2025-09-10T23:38:49.459Z" },
-    { url = "https://files.pythonhosted.org/packages/0c/64/3ce958a5817fd3cc6df4ec14441c43fd9854405668d73babccf77f9597a3/pynacl-1.6.0-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:16dd347cdc8ae0b0f6187a2608c0af1c8b7ecbbe6b4a06bff8253c192f696990", size = 798744, upload-time = "2025-09-10T23:38:58.531Z" },
-    { url = "https://files.pythonhosted.org/packages/e4/8a/3f0dd297a0a33fa3739c255feebd0206bb1df0b44c52fbe2caf8e8bc4425/pynacl-1.6.0-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:16c60daceee88d04f8d41d0a4004a7ed8d9a5126b997efd2933e08e93a3bd850", size = 1397879, upload-time = "2025-09-10T23:39:00.44Z" },
-    { url = "https://files.pythonhosted.org/packages/41/94/028ff0434a69448f61348d50d2c147dda51aabdd4fbc93ec61343332174d/pynacl-1.6.0-cp38-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:25720bad35dfac34a2bcdd61d9e08d6bfc6041bebc7751d9c9f2446cf1e77d64", size = 833907, upload-time = "2025-09-10T23:38:50.936Z" },
-    { url = "https://files.pythonhosted.org/packages/52/bc/a5cff7f8c30d5f4c26a07dfb0bcda1176ab8b2de86dda3106c00a02ad787/pynacl-1.6.0-cp38-abi3-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8bfaa0a28a1ab718bad6239979a5a57a8d1506d0caf2fba17e524dbb409441cf", size = 1436649, upload-time = "2025-09-10T23:38:52.783Z" },
-    { url = "https://files.pythonhosted.org/packages/7a/20/c397be374fd5d84295046e398de4ba5f0722dc14450f65db76a43c121471/pynacl-1.6.0-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:ef214b90556bb46a485b7da8258e59204c244b1b5b576fb71848819b468c44a7", size = 817142, upload-time = "2025-09-10T23:38:54.4Z" },
-    { url = "https://files.pythonhosted.org/packages/12/30/5efcef3406940cda75296c6d884090b8a9aad2dcc0c304daebb5ae99fb4a/pynacl-1.6.0-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:49c336dd80ea54780bcff6a03ee1a476be1612423010472e60af83452aa0f442", size = 1401794, upload-time = "2025-09-10T23:38:56.614Z" },
-    { url = "https://files.pythonhosted.org/packages/be/e1/a8fe1248cc17ccb03b676d80fa90763760a6d1247da434844ea388d0816c/pynacl-1.6.0-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:f3482abf0f9815e7246d461fab597aa179b7524628a4bc36f86a7dc418d2608d", size = 772161, upload-time = "2025-09-10T23:39:01.93Z" },
-    { url = "https://files.pythonhosted.org/packages/a3/76/8a62702fb657d6d9104ce13449db221a345665d05e6a3fdefb5a7cafd2ad/pynacl-1.6.0-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:140373378e34a1f6977e573033d1dd1de88d2a5d90ec6958c9485b2fd9f3eb90", size = 1370720, upload-time = "2025-09-10T23:39:03.531Z" },
-    { url = "https://files.pythonhosted.org/packages/6d/38/9e9e9b777a1c4c8204053733e1a0269672c0bd40852908c9ad6b6eaba82c/pynacl-1.6.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6b393bc5e5a0eb86bb85b533deb2d2c815666665f840a09e0aa3362bb6088736", size = 791252, upload-time = "2025-09-10T23:39:05.058Z" },
-    { url = "https://files.pythonhosted.org/packages/63/ef/d972ce3d92ae05c9091363cf185e8646933f91c376e97b8be79ea6e96c22/pynacl-1.6.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:4a25cfede801f01e54179b8ff9514bd7b5944da560b7040939732d1804d25419", size = 1362910, upload-time = "2025-09-10T23:39:06.924Z" },
-    { url = "https://files.pythonhosted.org/packages/35/2c/ee0b373a1861f66a7ca8bdb999331525615061320dd628527a50ba8e8a60/pynacl-1.6.0-cp38-abi3-win32.whl", hash = "sha256:dcdeb41c22ff3c66eef5e63049abf7639e0db4edee57ba70531fc1b6b133185d", size = 226461, upload-time = "2025-09-10T23:39:11.894Z" },
-    { url = "https://files.pythonhosted.org/packages/75/f7/41b6c0b9dd9970173b6acc026bab7b4c187e4e5beef2756d419ad65482da/pynacl-1.6.0-cp38-abi3-win_amd64.whl", hash = "sha256:cf831615cc16ba324240de79d925eacae8265b7691412ac6b24221db157f6bd1", size = 238802, upload-time = "2025-09-10T23:39:08.966Z" },
-    { url = "https://files.pythonhosted.org/packages/8e/0f/462326910c6172fa2c6ed07922b22ffc8e77432b3affffd9e18f444dbfbb/pynacl-1.6.0-cp38-abi3-win_arm64.whl", hash = "sha256:84709cea8f888e618c21ed9a0efdb1a59cc63141c403db8bf56c469b71ad56f2", size = 183846, upload-time = "2025-09-10T23:39:10.552Z" },
+sdist = { url = "https://files.pythonhosted.org/packages/b2/46/aeca065d227e2265125aea590c9c47fbf5786128c9400ee0eb7c88931f06/pynacl-1.6.1.tar.gz", hash = "sha256:8d361dac0309f2b6ad33b349a56cd163c98430d409fa503b10b70b3ad66eaa1d", size = 3506616, upload-time = "2025-11-10T16:02:13.195Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/75/d6/4b2dca33ed512de8f54e5c6074aa06eaeb225bfbcd9b16f33a414389d6bd/pynacl-1.6.1-cp314-cp314t-macosx_10_10_universal2.whl", hash = "sha256:7d7c09749450c385301a3c20dca967a525152ae4608c0a096fe8464bfc3df93d", size = 389109, upload-time = "2025-11-10T16:01:28.79Z" },
+    { url = "https://files.pythonhosted.org/packages/3c/30/e8dbb8ff4fa2559bbbb2187ba0d0d7faf728d17cb8396ecf4a898b22d3da/pynacl-1.6.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fc734c1696ffd49b40f7c1779c89ba908157c57345cf626be2e0719488a076d3", size = 808254, upload-time = "2025-11-10T16:01:37.839Z" },
+    { url = "https://files.pythonhosted.org/packages/44/f9/f5449c652f31da00249638dbab065ad4969c635119094b79b17c3a4da2ab/pynacl-1.6.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3cd787ec1f5c155dc8ecf39b1333cfef41415dc96d392f1ce288b4fe970df489", size = 1407365, upload-time = "2025-11-10T16:01:40.454Z" },
+    { url = "https://files.pythonhosted.org/packages/eb/2f/9aa5605f473b712065c0a193ebf4ad4725d7a245533f0cd7e5dcdbc78f35/pynacl-1.6.1-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b35d93ab2df03ecb3aa506be0d3c73609a51449ae0855c2e89c7ed44abde40b", size = 843842, upload-time = "2025-11-10T16:01:30.524Z" },
+    { url = "https://files.pythonhosted.org/packages/32/8d/748f0f6956e207453da8f5f21a70885fbbb2e060d5c9d78e0a4a06781451/pynacl-1.6.1-cp314-cp314t-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dece79aecbb8f4640a1adbb81e4aa3bfb0e98e99834884a80eb3f33c7c30e708", size = 1445559, upload-time = "2025-11-10T16:01:33.663Z" },
+    { url = "https://files.pythonhosted.org/packages/78/d0/2387f0dcb0e9816f38373999e48db4728ed724d31accdd4e737473319d35/pynacl-1.6.1-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:c2228054f04bf32d558fb89bb99f163a8197d5a9bf4efa13069a7fa8d4b93fc3", size = 825791, upload-time = "2025-11-10T16:01:34.823Z" },
+    { url = "https://files.pythonhosted.org/packages/18/3d/ef6fb7eb072aaf15f280bc66f26ab97e7fc9efa50fb1927683013ef47473/pynacl-1.6.1-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:2b12f1b97346f177affcdfdc78875ff42637cb40dcf79484a97dae3448083a78", size = 1410843, upload-time = "2025-11-10T16:01:36.401Z" },
+    { url = "https://files.pythonhosted.org/packages/e3/fb/23824a017526850ee7d8a1cc4cd1e3e5082800522c10832edbbca8619537/pynacl-1.6.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e735c3a1bdfde3834503baf1a6d74d4a143920281cb724ba29fb84c9f49b9c48", size = 801140, upload-time = "2025-11-10T16:01:42.013Z" },
+    { url = "https://files.pythonhosted.org/packages/5d/d1/ebc6b182cb98603a35635b727d62f094bc201bf610f97a3bb6357fe688d2/pynacl-1.6.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:3384a454adf5d716a9fadcb5eb2e3e72cd49302d1374a60edc531c9957a9b014", size = 1371966, upload-time = "2025-11-10T16:01:43.297Z" },
+    { url = "https://files.pythonhosted.org/packages/64/f4/c9d7b6f02924b1f31db546c7bd2a83a2421c6b4a8e6a2e53425c9f2802e0/pynacl-1.6.1-cp314-cp314t-win32.whl", hash = "sha256:d8615ee34d01c8e0ab3f302dcdd7b32e2bcf698ba5f4809e7cc407c8cdea7717", size = 230482, upload-time = "2025-11-10T16:01:47.688Z" },
+    { url = "https://files.pythonhosted.org/packages/c4/2c/942477957fba22da7bf99131850e5ebdff66623418ab48964e78a7a8293e/pynacl-1.6.1-cp314-cp314t-win_amd64.whl", hash = "sha256:5f5b35c1a266f8a9ad22525049280a600b19edd1f785bccd01ae838437dcf935", size = 243232, upload-time = "2025-11-10T16:01:45.208Z" },
+    { url = "https://files.pythonhosted.org/packages/7a/0c/bdbc0d04a53b96a765ab03aa2cf9a76ad8653d70bf1665459b9a0dedaa1c/pynacl-1.6.1-cp314-cp314t-win_arm64.whl", hash = "sha256:d984c91fe3494793b2a1fb1e91429539c6c28e9ec8209d26d25041ec599ccf63", size = 187907, upload-time = "2025-11-10T16:01:46.328Z" },
+    { url = "https://files.pythonhosted.org/packages/49/41/3cfb3b4f3519f6ff62bf71bf1722547644bcfb1b05b8fdbdc300249ba113/pynacl-1.6.1-cp38-abi3-macosx_10_10_universal2.whl", hash = "sha256:a6f9fd6d6639b1e81115c7f8ff16b8dedba1e8098d2756275d63d208b0e32021", size = 387591, upload-time = "2025-11-10T16:01:49.1Z" },
+    { url = "https://files.pythonhosted.org/packages/18/21/b8a6563637799f617a3960f659513eccb3fcc655d5fc2be6e9dc6416826f/pynacl-1.6.1-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e49a3f3d0da9f79c1bec2aa013261ab9fa651c7da045d376bd306cf7c1792993", size = 798866, upload-time = "2025-11-10T16:01:55.688Z" },
+    { url = "https://files.pythonhosted.org/packages/e8/6c/dc38033bc3ea461e05ae8f15a81e0e67ab9a01861d352ae971c99de23e7c/pynacl-1.6.1-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7713f8977b5d25f54a811ec9efa2738ac592e846dd6e8a4d3f7578346a841078", size = 1398001, upload-time = "2025-11-10T16:01:57.101Z" },
+    { url = "https://files.pythonhosted.org/packages/9f/05/3ec0796a9917100a62c5073b20c4bce7bf0fea49e99b7906d1699cc7b61b/pynacl-1.6.1-cp38-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5a3becafc1ee2e5ea7f9abc642f56b82dcf5be69b961e782a96ea52b55d8a9fc", size = 834024, upload-time = "2025-11-10T16:01:50.228Z" },
+    { url = "https://files.pythonhosted.org/packages/f0/b7/ae9982be0f344f58d9c64a1c25d1f0125c79201634efe3c87305ac7cb3e3/pynacl-1.6.1-cp38-abi3-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4ce50d19f1566c391fedc8dc2f2f5be265ae214112ebe55315e41d1f36a7f0a9", size = 1436766, upload-time = "2025-11-10T16:01:51.886Z" },
+    { url = "https://files.pythonhosted.org/packages/b4/51/b2ccbf89cf3025a02e044dd68a365cad593ebf70f532299f2c047d2b7714/pynacl-1.6.1-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:543f869140f67d42b9b8d47f922552d7a967e6c116aad028c9bfc5f3f3b3a7b7", size = 817275, upload-time = "2025-11-10T16:01:53.351Z" },
+    { url = "https://files.pythonhosted.org/packages/a8/6c/dd9ee8214edf63ac563b08a9b30f98d116942b621d39a751ac3256694536/pynacl-1.6.1-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:a2bb472458c7ca959aeeff8401b8efef329b0fc44a89d3775cffe8fad3398ad8", size = 1401891, upload-time = "2025-11-10T16:01:54.587Z" },
+    { url = "https://files.pythonhosted.org/packages/0f/c1/97d3e1c83772d78ee1db3053fd674bc6c524afbace2bfe8d419fd55d7ed1/pynacl-1.6.1-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:3206fa98737fdc66d59b8782cecc3d37d30aeec4593d1c8c145825a345bba0f0", size = 772291, upload-time = "2025-11-10T16:01:58.111Z" },
+    { url = "https://files.pythonhosted.org/packages/4d/ca/691ff2fe12f3bb3e43e8e8df4b806f6384593d427f635104d337b8e00291/pynacl-1.6.1-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:53543b4f3d8acb344f75fd4d49f75e6572fce139f4bfb4815a9282296ff9f4c0", size = 1370839, upload-time = "2025-11-10T16:01:59.252Z" },
+    { url = "https://files.pythonhosted.org/packages/30/27/06fe5389d30391fce006442246062cc35773c84fbcad0209fbbf5e173734/pynacl-1.6.1-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:319de653ef84c4f04e045eb250e6101d23132372b0a61a7acf91bac0fda8e58c", size = 791371, upload-time = "2025-11-10T16:02:01.075Z" },
+    { url = "https://files.pythonhosted.org/packages/2c/7a/e2bde8c9d39074a5aa046c7d7953401608d1f16f71e237f4bef3fb9d7e49/pynacl-1.6.1-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:262a8de6bba4aee8a66f5edf62c214b06647461c9b6b641f8cd0cb1e3b3196fe", size = 1363031, upload-time = "2025-11-10T16:02:02.656Z" },
+    { url = "https://files.pythonhosted.org/packages/dd/b6/63fd77264dae1087770a1bb414bc604470f58fbc21d83822fc9c76248076/pynacl-1.6.1-cp38-abi3-win32.whl", hash = "sha256:9fd1a4eb03caf8a2fe27b515a998d26923adb9ddb68db78e35ca2875a3830dde", size = 226585, upload-time = "2025-11-10T16:02:07.116Z" },
+    { url = "https://files.pythonhosted.org/packages/12/c8/b419180f3fdb72ab4d45e1d88580761c267c7ca6eda9a20dcbcba254efe6/pynacl-1.6.1-cp38-abi3-win_amd64.whl", hash = "sha256:a569a4069a7855f963940040f35e87d8bc084cb2d6347428d5ad20550a0a1a21", size = 238923, upload-time = "2025-11-10T16:02:04.401Z" },
+    { url = "https://files.pythonhosted.org/packages/35/76/c34426d532e4dce7ff36e4d92cb20f4cbbd94b619964b93d24e8f5b5510f/pynacl-1.6.1-cp38-abi3-win_arm64.whl", hash = "sha256:5953e8b8cfadb10889a6e7bd0f53041a745d1b3d30111386a1bb37af171e6daf", size = 183970, upload-time = "2025-11-10T16:02:05.786Z" },
+]
+
+[[package]]
+name = "pynautobot"
+version = "3.0.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "packaging" },
+    { name = "requests" },
+    { name = "urllib3" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/b6/a8/2f9f9d652f35e8f0f09f35fb5d204659515fe2a4385f8c30eecebb5569dc/pynautobot-3.0.0.tar.gz", hash = "sha256:cc0b5ccfd9edc4f7fa9777845ce895bf0f6a6e41610a8bc05c2459463351b0b7", size = 33587, upload-time = "2025-11-14T16:55:18.862Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/5b/cd/8c1e495fbe21298dc484fc4a3c294388fda55d9467579885e170895411dc/pynautobot-3.0.0-py3-none-any.whl", hash = "sha256:b24c636517dfdbebdc258d283a99af079e0c278e69d705767db3e8455413590f", size = 41319, upload-time = "2025-11-14T16:55:17.874Z" },
 ]
 
 [[package]]
 name = "pyparsing"
-version = "3.2.5"
+version = "3.3.1"
 source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/f2/a5/181488fc2b9d093e3972d2a472855aae8a03f000592dbfce716a512b3359/pyparsing-3.2.5.tar.gz", hash = "sha256:2df8d5b7b2802ef88e8d016a2eb9c7aeaa923529cd251ed0fe4608275d4105b6", size = 1099274, upload-time = "2025-09-21T04:11:06.277Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/33/c1/1d9de9aeaa1b89b0186e5fe23294ff6517fce1bc69149185577cd31016b2/pyparsing-3.3.1.tar.gz", hash = "sha256:47fad0f17ac1e2cad3de3b458570fbc9b03560aa029ed5e16ee5554da9a2251c", size = 1550512, upload-time = "2025-12-23T03:14:04.391Z" }
 wheels = [
-    { url = "https://files.pythonhosted.org/packages/10/5e/1aa9a93198c6b64513c9d7752de7422c06402de6600a8767da1524f9570b/pyparsing-3.2.5-py3-none-any.whl", hash = "sha256:e38a4f02064cf41fe6593d328d0512495ad1f3d8a91c4f73fc401b3079a59a5e", size = 113890, upload-time = "2025-09-21T04:11:04.117Z" },
+    { url = "https://files.pythonhosted.org/packages/8b/40/2614036cdd416452f5bf98ec037f38a1afb17f327cb8e6b652d4729e0af8/pyparsing-3.3.1-py3-none-any.whl", hash = "sha256:023b5e7e5520ad96642e2c6db4cb683d3970bd640cdf7115049a6e9c3682df82", size = 121793, upload-time = "2025-12-23T03:14:02.103Z" },
 ]
 
 [[package]]
@@ -1361,7 +1569,7 @@ wheels = [
 
 [[package]]
 name = "pytest"
-version = "8.4.2"
+version = "9.0.2"
 source = { registry = "https://pypi.org/simple" }
 dependencies = [
     { name = "colorama", marker = "sys_platform == 'win32'" },
@@ -1372,9 +1580,9 @@ dependencies = [
     { name = "pygments" },
     { name = "tomli", marker = "python_full_version < '3.11'" },
 ]
-sdist = { url = "https://files.pythonhosted.org/packages/a3/5c/00a0e072241553e1a7496d638deababa67c5058571567b92a7eaa258397c/pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01", size = 1519618, upload-time = "2025-09-04T14:34:22.711Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" }
 wheels = [
-    { url = "https://files.pythonhosted.org/packages/a8/a4/20da314d277121d6534b3a980b29035dcd51e6744bd79075a6ce8fa4eb8d/pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79", size = 365750, upload-time = "2025-09-04T14:34:20.226Z" },
+    { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" },
 ]
 
 [[package]]
@@ -1391,13 +1599,22 @@ wheels = [
     { url = "https://files.pythonhosted.org/packages/ee/49/1377b49de7d0c1ce41292161ea0f721913fa8722c19fb9c1e3aa0367eecb/pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861", size = 22424, upload-time = "2025-09-09T10:57:00.695Z" },
 ]
 
+[[package]]
+name = "python-dotenv"
+version = "1.2.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/f0/26/19cadc79a718c5edbec86fd4919a6b6d3f681039a2f6d66d14be94e75fb9/python_dotenv-1.2.1.tar.gz", hash = "sha256:42667e897e16ab0d66954af0e60a9caa94f0fd4ecf3aaf6d2d260eec1aa36ad6", size = 44221, upload-time = "2025-10-26T15:12:10.434Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/14/1b/a298b06749107c305e1fe0f814c6c74aea7b2f1e10989cb30f544a1b3253/python_dotenv-1.2.1-py3-none-any.whl", hash = "sha256:b81ee9561e9ca4004139c6cbba3a238c32b03e4894671e181b671e8cb8425d61", size = 21230, upload-time = "2025-10-26T15:12:09.109Z" },
+]
+
 [[package]]
 name = "pytokens"
-version = "0.2.0"
+version = "0.3.0"
 source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/d4/c2/dbadcdddb412a267585459142bfd7cc241e6276db69339353ae6e241ab2b/pytokens-0.2.0.tar.gz", hash = "sha256:532d6421364e5869ea57a9523bf385f02586d4662acbcc0342afd69511b4dd43", size = 15368, upload-time = "2025-10-15T08:02:42.738Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/4e/8d/a762be14dae1c3bf280202ba3172020b2b0b4c537f94427435f19c413b72/pytokens-0.3.0.tar.gz", hash = "sha256:2f932b14ed08de5fcf0b391ace2642f858f1394c0857202959000b68ed7a458a", size = 17644, upload-time = "2025-11-05T13:36:35.34Z" }
 wheels = [
-    { url = "https://files.pythonhosted.org/packages/89/5a/c269ea6b348b6f2c32686635df89f32dbe05df1088dd4579302a6f8f99af/pytokens-0.2.0-py3-none-any.whl", hash = "sha256:74d4b318c67f4295c13782ddd9abcb7e297ec5630ad060eb90abf7ebbefe59f8", size = 12038, upload-time = "2025-10-15T08:02:41.694Z" },
+    { url = "https://files.pythonhosted.org/packages/84/25/d9db8be44e205a124f6c98bc0324b2bb149b7431c53877fc6d1038dddaf5/pytokens-0.3.0-py3-none-any.whl", hash = "sha256:95b2b5eaf832e469d141a378872480ede3f251a5a5041b8ec6e581d3ac71bbf3", size = 12195, upload-time = "2025-11-05T13:36:33.183Z" },
 ]
 
 [[package]]
@@ -1494,94 +1711,46 @@ wheels = [
 
 [[package]]
 name = "ruamel-yaml"
-version = "0.18.15"
+version = "0.19.0"
 source = { registry = "https://pypi.org/simple" }
 dependencies = [
-    { name = "ruamel-yaml-clib", marker = "python_full_version < '3.14' and platform_python_implementation == 'CPython'" },
-]
-sdist = { url = "https://files.pythonhosted.org/packages/3e/db/f3950f5e5031b618aae9f423a39bf81a55c148aecd15a34527898e752cf4/ruamel.yaml-0.18.15.tar.gz", hash = "sha256:dbfca74b018c4c3fba0b9cc9ee33e53c371194a9000e694995e620490fd40700", size = 146865, upload-time = "2025-08-19T11:15:10.694Z" }
-wheels = [
-    { url = "https://files.pythonhosted.org/packages/d1/e5/f2a0621f1781b76a38194acae72f01e37b1941470407345b6e8653ad7640/ruamel.yaml-0.18.15-py3-none-any.whl", hash = "sha256:148f6488d698b7a5eded5ea793a025308b25eca97208181b6a026037f391f701", size = 119702, upload-time = "2025-08-19T11:15:07.696Z" },
-]
-
-[[package]]
-name = "ruamel-yaml-clib"
-version = "0.2.14"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/d8/e9/39ec4d4b3f91188fad1842748f67d4e749c77c37e353c4e545052ee8e893/ruamel.yaml.clib-0.2.14.tar.gz", hash = "sha256:803f5044b13602d58ea378576dd75aa759f52116a0232608e8fdada4da33752e", size = 225394, upload-time = "2025-09-22T19:51:23.753Z" }
-wheels = [
-    { url = "https://files.pythonhosted.org/packages/b4/56/35a0a752415ae01992c68f5a6513bdef0e1b6fbdb60d7619342ce12346a0/ruamel.yaml.clib-0.2.14-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f8b2acb0ffdd2ce8208accbec2dca4a06937d556fdcaefd6473ba1b5daa7e3c4", size = 269216, upload-time = "2025-09-23T14:24:09.742Z" },
-    { url = "https://files.pythonhosted.org/packages/98/6a/9a68184ab93619f4607ff1675e4ef01e8accfcbff0d482f4ca44c10d8eab/ruamel.yaml.clib-0.2.14-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:aef953f3b8bd0b50bd52a2e52fb54a6a2171a1889d8dea4a5959d46c6624c451", size = 137092, upload-time = "2025-09-22T19:50:26.906Z" },
-    { url = "https://files.pythonhosted.org/packages/2b/3f/cfed5f088628128a9ec66f46794fd4d165642155c7b78c26d83b16c6bf7b/ruamel.yaml.clib-0.2.14-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:a0ac90efbc7a77b0d796c03c8cc4e62fd710b3f1e4c32947713ef2ef52e09543", size = 633768, upload-time = "2025-09-22T19:50:31.228Z" },
-    { url = "https://files.pythonhosted.org/packages/3a/d5/5ce2cc156c1da48160171968d91f066d305840fbf930ee955a509d025a44/ruamel.yaml.clib-0.2.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9bf6b699223afe6c7fe9f2ef76e0bfa6dd892c21e94ce8c957478987ade76cd8", size = 721253, upload-time = "2025-09-22T19:50:28.776Z" },
-    { url = "https://files.pythonhosted.org/packages/2b/71/d0b56bc902b38ebe4be8e270f730f929eec4edaf8a0fa7028f4ef64fa950/ruamel.yaml.clib-0.2.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d73a0187718f6eec5b2f729b0f98e4603f7bd9c48aa65d01227d1a5dcdfbe9e8", size = 683823, upload-time = "2025-09-22T19:50:29.993Z" },
-    { url = "https://files.pythonhosted.org/packages/4b/db/1f37449dd89c540218598316ccafc1a0aed60215e72efa315c5367cfd015/ruamel.yaml.clib-0.2.14-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:81f6d3b19bc703679a5705c6a16dabdc79823c71d791d73c65949be7f3012c02", size = 690370, upload-time = "2025-09-23T18:42:46.797Z" },
-    { url = "https://files.pythonhosted.org/packages/5d/53/c498b30f35efcd9f47cb084d7ad9374f2b907470f73913dec6396b81397d/ruamel.yaml.clib-0.2.14-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b28caeaf3e670c08cb7e8de221266df8494c169bd6ed8875493fab45be9607a4", size = 703578, upload-time = "2025-09-22T19:50:32.531Z" },
-    { url = "https://files.pythonhosted.org/packages/34/79/492cfad9baed68914840c39e5f3c1cc251f51a897ddb3f532601215cbb12/ruamel.yaml.clib-0.2.14-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:94f3efb718f8f49b031f2071ec7a27dd20cbfe511b4dfd54ecee54c956da2b31", size = 722544, upload-time = "2025-09-22T19:50:34.157Z" },
-    { url = "https://files.pythonhosted.org/packages/ca/f5/479ebfd5ba396e209ade90f7282d84b90c57b3e07be8dc6fcd02a6df7ffc/ruamel.yaml.clib-0.2.14-cp310-cp310-win32.whl", hash = "sha256:27c070cf3888e90d992be75dd47292ff9aa17dafd36492812a6a304a1aedc182", size = 100375, upload-time = "2025-09-22T19:50:36.832Z" },
-    { url = "https://files.pythonhosted.org/packages/57/31/a044520fdb3bd409889f67f1efebda0658033c7ab3f390cee37531cc9a9e/ruamel.yaml.clib-0.2.14-cp310-cp310-win_amd64.whl", hash = "sha256:4f4a150a737fccae13fb51234d41304ff2222e3b7d4c8e9428ed1a6ab48389b8", size = 118129, upload-time = "2025-09-22T19:50:35.545Z" },
-    { url = "https://files.pythonhosted.org/packages/b3/9f/3c51e9578b8c36fcc4bdd271a1a5bb65963a74a4b6ad1a989768a22f6c2a/ruamel.yaml.clib-0.2.14-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5bae1a073ca4244620425cd3d3aa9746bde590992b98ee8c7c8be8c597ca0d4e", size = 270207, upload-time = "2025-09-23T14:24:11.445Z" },
-    { url = "https://files.pythonhosted.org/packages/4a/16/cb02815bc2ae9c66760c0c061d23c7358f9ba51dae95ac85247662b7fbe2/ruamel.yaml.clib-0.2.14-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:0a54e5e40a7a691a426c2703b09b0d61a14294d25cfacc00631aa6f9c964df0d", size = 137780, upload-time = "2025-09-22T19:50:37.734Z" },
-    { url = "https://files.pythonhosted.org/packages/31/c6/fc687cd1b93bff8e40861eea46d6dc1a6a778d9a085684e4045ff26a8e40/ruamel.yaml.clib-0.2.14-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:10d9595b6a19778f3269399eff6bab642608e5966183abc2adbe558a42d4efc9", size = 641590, upload-time = "2025-09-22T19:50:41.978Z" },
-    { url = "https://files.pythonhosted.org/packages/45/5d/65a2bc08b709b08576b3f307bf63951ee68a8e047cbbda6f1c9864ecf9a7/ruamel.yaml.clib-0.2.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dba72975485f2b87b786075e18a6e5d07dc2b4d8973beb2732b9b2816f1bad70", size = 738090, upload-time = "2025-09-22T19:50:39.152Z" },
-    { url = "https://files.pythonhosted.org/packages/fb/d0/a70a03614d9a6788a3661ab1538879ed2aae4e84d861f101243116308a37/ruamel.yaml.clib-0.2.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:29757bdb7c142f9595cc1b62ec49a3d1c83fab9cef92db52b0ccebaad4eafb98", size = 700744, upload-time = "2025-09-22T19:50:40.811Z" },
-    { url = "https://files.pythonhosted.org/packages/77/30/c93fa457611f79946d5cb6cc97493ca5425f3f21891d7b1f9b44eaa1b38e/ruamel.yaml.clib-0.2.14-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:557df28dbccf79b152fe2d1b935f6063d9cc431199ea2b0e84892f35c03bb0ee", size = 742321, upload-time = "2025-09-23T18:42:48.916Z" },
-    { url = "https://files.pythonhosted.org/packages/40/85/e2c54ad637117cd13244a4649946eaa00f32edcb882d1f92df90e079ab00/ruamel.yaml.clib-0.2.14-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:26a8de280ab0d22b6e3ec745b4a5a07151a0f74aad92dd76ab9c8d8d7087720d", size = 743805, upload-time = "2025-09-22T19:50:43.58Z" },
-    { url = "https://files.pythonhosted.org/packages/81/50/f899072c38877d8ef5382e0b3d47f8c4346226c1f52d6945d6f64fec6a2f/ruamel.yaml.clib-0.2.14-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e501c096aa3889133d674605ebd018471bc404a59cbc17da3c5924421c54d97c", size = 769529, upload-time = "2025-09-22T19:50:45.707Z" },
-    { url = "https://files.pythonhosted.org/packages/99/7c/96d4b5075e30c65ea2064e40c2d657c7c235d7b6ef18751cf89a935b9041/ruamel.yaml.clib-0.2.14-cp311-cp311-win32.whl", hash = "sha256:915748cfc25b8cfd81b14d00f4bfdb2ab227a30d6d43459034533f4d1c207a2a", size = 100256, upload-time = "2025-09-22T19:50:48.26Z" },
-    { url = "https://files.pythonhosted.org/packages/7d/8c/73ee2babd04e8bfcf1fd5c20aa553d18bf0ebc24b592b4f831d12ae46cc0/ruamel.yaml.clib-0.2.14-cp311-cp311-win_amd64.whl", hash = "sha256:4ccba93c1e5a40af45b2f08e4591969fa4697eae951c708f3f83dcbf9f6c6bb1", size = 118234, upload-time = "2025-09-22T19:50:47.019Z" },
-    { url = "https://files.pythonhosted.org/packages/b4/42/ccfb34a25289afbbc42017e4d3d4288e61d35b2e00cfc6b92974a6a1f94b/ruamel.yaml.clib-0.2.14-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:6aeadc170090ff1889f0d2c3057557f9cd71f975f17535c26a5d37af98f19c27", size = 271775, upload-time = "2025-09-23T14:24:12.771Z" },
-    { url = "https://files.pythonhosted.org/packages/82/73/e628a92e80197ff6a79ab81ec3fa00d4cc082d58ab78d3337b7ba7043301/ruamel.yaml.clib-0.2.14-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:5e56ac47260c0eed992789fa0b8efe43404a9adb608608631a948cee4fc2b052", size = 138842, upload-time = "2025-09-22T19:50:49.156Z" },
-    { url = "https://files.pythonhosted.org/packages/2b/c5/346c7094344a60419764b4b1334d9e0285031c961176ff88ffb652405b0c/ruamel.yaml.clib-0.2.14-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:a911aa73588d9a8b08d662b9484bc0567949529824a55d3885b77e8dd62a127a", size = 647404, upload-time = "2025-09-22T19:50:52.921Z" },
-    { url = "https://files.pythonhosted.org/packages/df/99/65080c863eb06d4498de3d6c86f3e90595e02e159fd8529f1565f56cfe2c/ruamel.yaml.clib-0.2.14-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a05ba88adf3d7189a974b2de7a9d56731548d35dc0a822ec3dc669caa7019b29", size = 753141, upload-time = "2025-09-22T19:50:50.294Z" },
-    { url = "https://files.pythonhosted.org/packages/3d/e3/0de85f3e3333f8e29e4b10244374a202a87665d1131798946ee22cf05c7c/ruamel.yaml.clib-0.2.14-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb04c5650de6668b853623eceadcdb1a9f2fee381f5d7b6bc842ee7c239eeec4", size = 703477, upload-time = "2025-09-22T19:50:51.508Z" },
-    { url = "https://files.pythonhosted.org/packages/d9/25/0d2f09d8833c7fd77ab8efeff213093c16856479a9d293180a0d89f6bed9/ruamel.yaml.clib-0.2.14-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:df3ec9959241d07bc261f4983d25a1205ff37703faf42b474f15d54d88b4f8c9", size = 741157, upload-time = "2025-09-23T18:42:50.408Z" },
-    { url = "https://files.pythonhosted.org/packages/d3/8c/959f10c2e2153cbdab834c46e6954b6dd9e3b109c8f8c0a3cf1618310985/ruamel.yaml.clib-0.2.14-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:fbc08c02e9b147a11dfcaa1ac8a83168b699863493e183f7c0c8b12850b7d259", size = 745859, upload-time = "2025-09-22T19:50:54.497Z" },
-    { url = "https://files.pythonhosted.org/packages/ed/6b/e580a7c18b485e1a5f30a32cda96b20364b0ba649d9d2baaf72f8bd21f83/ruamel.yaml.clib-0.2.14-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c099cafc1834d3c5dac305865d04235f7c21c167c8dd31ebc3d6bbc357e2f023", size = 770200, upload-time = "2025-09-22T19:50:55.718Z" },
-    { url = "https://files.pythonhosted.org/packages/ef/44/3455eebc761dc8e8fdced90f2b0a3fa61e32ba38b50de4130e2d57db0f21/ruamel.yaml.clib-0.2.14-cp312-cp312-win32.whl", hash = "sha256:b5b0f7e294700b615a3bcf6d28b26e6da94e8eba63b079f4ec92e9ba6c0d6b54", size = 98829, upload-time = "2025-09-22T19:50:58.895Z" },
-    { url = "https://files.pythonhosted.org/packages/76/ab/5121f7f3b651db93de546f8c982c241397aad0a4765d793aca1dac5eadee/ruamel.yaml.clib-0.2.14-cp312-cp312-win_amd64.whl", hash = "sha256:a37f40a859b503304dd740686359fcf541d6fb3ff7fc10f539af7f7150917c68", size = 115570, upload-time = "2025-09-22T19:50:57.981Z" },
-    { url = "https://files.pythonhosted.org/packages/d7/ae/e3811f05415594025e96000349d3400978adaed88d8f98d494352d9761ee/ruamel.yaml.clib-0.2.14-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7e4f9da7e7549946e02a6122dcad00b7c1168513acb1f8a726b1aaf504a99d32", size = 269205, upload-time = "2025-09-23T14:24:15.06Z" },
-    { url = "https://files.pythonhosted.org/packages/72/06/7d51f4688d6d72bb72fa74254e1593c4f5ebd0036be5b41fe39315b275e9/ruamel.yaml.clib-0.2.14-cp313-cp313-macosx_15_0_arm64.whl", hash = "sha256:dd7546c851e59c06197a7c651335755e74aa383a835878ca86d2c650c07a2f85", size = 137417, upload-time = "2025-09-22T19:50:59.82Z" },
-    { url = "https://files.pythonhosted.org/packages/5a/08/b4499234a420ef42960eeb05585df5cc7eb25ccb8c980490b079e6367050/ruamel.yaml.clib-0.2.14-cp313-cp313-manylinux2014_aarch64.whl", hash = "sha256:1c1acc3a0209ea9042cc3cfc0790edd2eddd431a2ec3f8283d081e4d5018571e", size = 642558, upload-time = "2025-09-22T19:51:03.388Z" },
-    { url = "https://files.pythonhosted.org/packages/b6/ba/1975a27dedf1c4c33306ee67c948121be8710b19387aada29e2f139c43ee/ruamel.yaml.clib-0.2.14-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2070bf0ad1540d5c77a664de07ebcc45eebd1ddcab71a7a06f26936920692beb", size = 744087, upload-time = "2025-09-22T19:51:00.897Z" },
-    { url = "https://files.pythonhosted.org/packages/20/15/8a19a13d27f3bd09fa18813add8380a29115a47b553845f08802959acbce/ruamel.yaml.clib-0.2.14-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd8fe07f49c170e09d76773fb86ad9135e0beee44f36e1576a201b0676d3d1d", size = 699709, upload-time = "2025-09-22T19:51:02.075Z" },
-    { url = "https://files.pythonhosted.org/packages/19/ee/8d6146a079ad21e534b5083c9ee4a4c8bec42f79cf87594b60978286b39a/ruamel.yaml.clib-0.2.14-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ff86876889ea478b1381089e55cf9e345707b312beda4986f823e1d95e8c0f59", size = 708926, upload-time = "2025-09-23T18:42:51.707Z" },
-    { url = "https://files.pythonhosted.org/packages/a9/f5/426b714abdc222392e68f3b8ad323930d05a214a27c7e7a0f06c69126401/ruamel.yaml.clib-0.2.14-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:1f118b707eece8cf84ecbc3e3ec94d9db879d85ed608f95870d39b2d2efa5dca", size = 740202, upload-time = "2025-09-22T19:51:04.673Z" },
-    { url = "https://files.pythonhosted.org/packages/3d/ac/3c5c2b27a183f4fda8a57c82211721c016bcb689a4a175865f7646db9f94/ruamel.yaml.clib-0.2.14-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b30110b29484adc597df6bd92a37b90e63a8c152ca8136aad100a02f8ba6d1b6", size = 765196, upload-time = "2025-09-22T19:51:05.916Z" },
-    { url = "https://files.pythonhosted.org/packages/92/2e/06f56a71fd55021c993ed6e848c9b2e5e9cfce180a42179f0ddd28253f7c/ruamel.yaml.clib-0.2.14-cp313-cp313-win32.whl", hash = "sha256:f4e97a1cf0b7a30af9e1d9dad10a5671157b9acee790d9e26996391f49b965a2", size = 98635, upload-time = "2025-09-22T19:51:08.183Z" },
-    { url = "https://files.pythonhosted.org/packages/51/79/76aba16a1689b50528224b182f71097ece338e7a4ab55e84c2e73443b78a/ruamel.yaml.clib-0.2.14-cp313-cp313-win_amd64.whl", hash = "sha256:090782b5fb9d98df96509eecdbcaffd037d47389a89492320280d52f91330d78", size = 115238, upload-time = "2025-09-22T19:51:07.081Z" },
-    { url = "https://files.pythonhosted.org/packages/21/e2/a59ff65c26aaf21a24eb38df777cb9af5d87ba8fc8107c163c2da9d1e85e/ruamel.yaml.clib-0.2.14-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:7df6f6e9d0e33c7b1d435defb185095386c469109de723d514142632a7b9d07f", size = 271441, upload-time = "2025-09-23T14:24:16.498Z" },
-    { url = "https://files.pythonhosted.org/packages/6b/fa/3234f913fe9a6525a7b97c6dad1f51e72b917e6872e051a5e2ffd8b16fbb/ruamel.yaml.clib-0.2.14-cp314-cp314-macosx_15_0_arm64.whl", hash = "sha256:70eda7703b8126f5e52fcf276e6c0f40b0d314674f896fc58c47b0aef2b9ae83", size = 137970, upload-time = "2025-09-22T19:51:09.472Z" },
-    { url = "https://files.pythonhosted.org/packages/ef/ec/4edbf17ac2c87fa0845dd366ef8d5852b96eb58fcd65fc1ecf5fe27b4641/ruamel.yaml.clib-0.2.14-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:a0cb71ccc6ef9ce36eecb6272c81afdc2f565950cdcec33ae8e6cd8f7fc86f27", size = 739639, upload-time = "2025-09-22T19:51:10.566Z" },
-    { url = "https://files.pythonhosted.org/packages/15/18/b0e1fafe59051de9e79cdd431863b03593ecfa8341c110affad7c8121efc/ruamel.yaml.clib-0.2.14-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e7cb9ad1d525d40f7d87b6df7c0ff916a66bc52cb61b66ac1b2a16d0c1b07640", size = 764456, upload-time = "2025-09-22T19:51:11.736Z" },
-    { url = "https://files.pythonhosted.org/packages/e7/cd/150fdb96b8fab27fe08d8a59fe67554568727981806e6bc2677a16081ec7/ruamel_yaml_clib-0.2.14-cp314-cp314-win32.whl", hash = "sha256:9b4104bf43ca0cd4e6f738cb86326a3b2f6eef00f417bd1e7efb7bdffe74c539", size = 102394, upload-time = "2025-11-14T21:57:36.703Z" },
-    { url = "https://files.pythonhosted.org/packages/bd/e6/a3fa40084558c7e1dc9546385f22a93949c890a8b2e445b2ba43935f51da/ruamel_yaml_clib-0.2.14-cp314-cp314-win_amd64.whl", hash = "sha256:13997d7d354a9890ea1ec5937a219817464e5cc344805b37671562a401ca3008", size = 122673, upload-time = "2025-11-14T21:57:38.177Z" },
+    { name = "ruamel-yaml-clibz", marker = "platform_python_implementation == 'CPython'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/0c/5d/8a1de57b5a11245c61c906d422cd1e66b6778e134a1c68823a451be5759c/ruamel_yaml-0.19.0.tar.gz", hash = "sha256:ff19233e1eb3e9301e7a3d437847713e361a80faace167639327efbe8c0e5f95", size = 142095, upload-time = "2025-12-31T16:47:31.837Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/9b/3e/835d495068a4bb03419ce8c5464734ff6f3343df948e033cb5e5f81f7f08/ruamel_yaml-0.19.0-py3-none-any.whl", hash = "sha256:96ea8bafd9f3fdb0181ce3cc05e6ec02ce0a8788cbafa9b5a6e47c76fe26dfc6", size = 117777, upload-time = "2025-12-31T16:47:29.07Z" },
 ]
 
+[[package]]
+name = "ruamel-yaml-clibz"
+version = "0.3.4"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/8f/95/9bcc25e84703180c3941062796572e0fc73bd659086efdc4ef9b8af19e36/ruamel_yaml_clibz-0.3.4.tar.gz", hash = "sha256:e99077ac6aa4943af1000161a0cb793a379c5c8cd03ea8dd3803e0b58739b685", size = 231076, upload-time = "2025-12-31T17:11:09.341Z" }
+
 [[package]]
 name = "ruff"
-version = "0.14.0"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/41/b9/9bd84453ed6dd04688de9b3f3a4146a1698e8faae2ceeccce4e14c67ae17/ruff-0.14.0.tar.gz", hash = "sha256:62ec8969b7510f77945df916de15da55311fade8d6050995ff7f680afe582c57", size = 5452071, upload-time = "2025-10-07T18:21:55.763Z" }
-wheels = [
-    { url = "https://files.pythonhosted.org/packages/3a/4e/79d463a5f80654e93fa653ebfb98e0becc3f0e7cf6219c9ddedf1e197072/ruff-0.14.0-py3-none-linux_armv6l.whl", hash = "sha256:58e15bffa7054299becf4bab8a1187062c6f8cafbe9f6e39e0d5aface455d6b3", size = 12494532, upload-time = "2025-10-07T18:21:00.373Z" },
-    { url = "https://files.pythonhosted.org/packages/ee/40/e2392f445ed8e02aa6105d49db4bfff01957379064c30f4811c3bf38aece/ruff-0.14.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:838d1b065f4df676b7c9957992f2304e41ead7a50a568185efd404297d5701e8", size = 13160768, upload-time = "2025-10-07T18:21:04.73Z" },
-    { url = "https://files.pythonhosted.org/packages/75/da/2a656ea7c6b9bd14c7209918268dd40e1e6cea65f4bb9880eaaa43b055cd/ruff-0.14.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:703799d059ba50f745605b04638fa7e9682cc3da084b2092feee63500ff3d9b8", size = 12363376, upload-time = "2025-10-07T18:21:07.833Z" },
-    { url = "https://files.pythonhosted.org/packages/42/e2/1ffef5a1875add82416ff388fcb7ea8b22a53be67a638487937aea81af27/ruff-0.14.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ba9a8925e90f861502f7d974cc60e18ca29c72bb0ee8bfeabb6ade35a3abde7", size = 12608055, upload-time = "2025-10-07T18:21:10.72Z" },
-    { url = "https://files.pythonhosted.org/packages/4a/32/986725199d7cee510d9f1dfdf95bf1efc5fa9dd714d0d85c1fb1f6be3bc3/ruff-0.14.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e41f785498bd200ffc276eb9e1570c019c1d907b07cfb081092c8ad51975bbe7", size = 12318544, upload-time = "2025-10-07T18:21:13.741Z" },
-    { url = "https://files.pythonhosted.org/packages/9a/ed/4969cefd53315164c94eaf4da7cfba1f267dc275b0abdd593d11c90829a3/ruff-0.14.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30a58c087aef4584c193aebf2700f0fbcfc1e77b89c7385e3139956fa90434e2", size = 14001280, upload-time = "2025-10-07T18:21:16.411Z" },
-    { url = "https://files.pythonhosted.org/packages/ab/ad/96c1fc9f8854c37681c9613d825925c7f24ca1acfc62a4eb3896b50bacd2/ruff-0.14.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:f8d07350bc7af0a5ce8812b7d5c1a7293cf02476752f23fdfc500d24b79b783c", size = 15027286, upload-time = "2025-10-07T18:21:19.577Z" },
-    { url = "https://files.pythonhosted.org/packages/b3/00/1426978f97df4fe331074baf69615f579dc4e7c37bb4c6f57c2aad80c87f/ruff-0.14.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eec3bbbf3a7d5482b5c1f42d5fc972774d71d107d447919fca620b0be3e3b75e", size = 14451506, upload-time = "2025-10-07T18:21:22.779Z" },
-    { url = "https://files.pythonhosted.org/packages/58/d5/9c1cea6e493c0cf0647674cca26b579ea9d2a213b74b5c195fbeb9678e15/ruff-0.14.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16b68e183a0e28e5c176d51004aaa40559e8f90065a10a559176713fcf435206", size = 13437384, upload-time = "2025-10-07T18:21:25.758Z" },
-    { url = "https://files.pythonhosted.org/packages/29/b4/4cd6a4331e999fc05d9d77729c95503f99eae3ba1160469f2b64866964e3/ruff-0.14.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb732d17db2e945cfcbbc52af0143eda1da36ca8ae25083dd4f66f1542fdf82e", size = 13447976, upload-time = "2025-10-07T18:21:28.83Z" },
-    { url = "https://files.pythonhosted.org/packages/3b/c0/ac42f546d07e4f49f62332576cb845d45c67cf5610d1851254e341d563b6/ruff-0.14.0-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:c958f66ab884b7873e72df38dcabee03d556a8f2ee1b8538ee1c2bbd619883dd", size = 13682850, upload-time = "2025-10-07T18:21:31.842Z" },
-    { url = "https://files.pythonhosted.org/packages/5f/c4/4b0c9bcadd45b4c29fe1af9c5d1dc0ca87b4021665dfbe1c4688d407aa20/ruff-0.14.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:7eb0499a2e01f6e0c285afc5bac43ab380cbfc17cd43a2e1dd10ec97d6f2c42d", size = 12449825, upload-time = "2025-10-07T18:21:35.074Z" },
-    { url = "https://files.pythonhosted.org/packages/4b/a8/e2e76288e6c16540fa820d148d83e55f15e994d852485f221b9524514730/ruff-0.14.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:4c63b2d99fafa05efca0ab198fd48fa6030d57e4423df3f18e03aa62518c565f", size = 12272599, upload-time = "2025-10-07T18:21:38.08Z" },
-    { url = "https://files.pythonhosted.org/packages/18/14/e2815d8eff847391af632b22422b8207704222ff575dec8d044f9ab779b2/ruff-0.14.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:668fce701b7a222f3f5327f86909db2bbe99c30877c8001ff934c5413812ac02", size = 13193828, upload-time = "2025-10-07T18:21:41.216Z" },
-    { url = "https://files.pythonhosted.org/packages/44/c6/61ccc2987cf0aecc588ff8f3212dea64840770e60d78f5606cd7dc34de32/ruff-0.14.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:a86bf575e05cb68dcb34e4c7dfe1064d44d3f0c04bbc0491949092192b515296", size = 13628617, upload-time = "2025-10-07T18:21:44.04Z" },
-    { url = "https://files.pythonhosted.org/packages/73/e6/03b882225a1b0627e75339b420883dc3c90707a8917d2284abef7a58d317/ruff-0.14.0-py3-none-win32.whl", hash = "sha256:7450a243d7125d1c032cb4b93d9625dea46c8c42b4f06c6b709baac168e10543", size = 12367872, upload-time = "2025-10-07T18:21:46.67Z" },
-    { url = "https://files.pythonhosted.org/packages/41/77/56cf9cf01ea0bfcc662de72540812e5ba8e9563f33ef3d37ab2174892c47/ruff-0.14.0-py3-none-win_amd64.whl", hash = "sha256:ea95da28cd874c4d9c922b39381cbd69cb7e7b49c21b8152b014bd4f52acddc2", size = 13464628, upload-time = "2025-10-07T18:21:50.318Z" },
-    { url = "https://files.pythonhosted.org/packages/c6/2a/65880dfd0e13f7f13a775998f34703674a4554906167dce02daf7865b954/ruff-0.14.0-py3-none-win_arm64.whl", hash = "sha256:f42c9495f5c13ff841b1da4cb3c2a42075409592825dada7c5885c2c844ac730", size = 12565142, upload-time = "2025-10-07T18:21:53.577Z" },
+version = "0.14.10"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/57/08/52232a877978dd8f9cf2aeddce3e611b40a63287dfca29b6b8da791f5e8d/ruff-0.14.10.tar.gz", hash = "sha256:9a2e830f075d1a42cd28420d7809ace390832a490ed0966fe373ba288e77aaf4", size = 5859763, upload-time = "2025-12-18T19:28:57.98Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/60/01/933704d69f3f05ee16ef11406b78881733c186fe14b6a46b05cfcaf6d3b2/ruff-0.14.10-py3-none-linux_armv6l.whl", hash = "sha256:7a3ce585f2ade3e1f29ec1b92df13e3da262178df8c8bdf876f48fa0e8316c49", size = 13527080, upload-time = "2025-12-18T19:29:25.642Z" },
+    { url = "https://files.pythonhosted.org/packages/df/58/a0349197a7dfa603ffb7f5b0470391efa79ddc327c1e29c4851e85b09cc5/ruff-0.14.10-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:674f9be9372907f7257c51f1d4fc902cb7cf014b9980152b802794317941f08f", size = 13797320, upload-time = "2025-12-18T19:29:02.571Z" },
+    { url = "https://files.pythonhosted.org/packages/7b/82/36be59f00a6082e38c23536df4e71cdbc6af8d7c707eade97fcad5c98235/ruff-0.14.10-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d85713d522348837ef9df8efca33ccb8bd6fcfc86a2cde3ccb4bc9d28a18003d", size = 12918434, upload-time = "2025-12-18T19:28:51.202Z" },
+    { url = "https://files.pythonhosted.org/packages/a6/00/45c62a7f7e34da92a25804f813ebe05c88aa9e0c25e5cb5a7d23dd7450e3/ruff-0.14.10-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6987ebe0501ae4f4308d7d24e2d0fe3d7a98430f5adfd0f1fead050a740a3a77", size = 13371961, upload-time = "2025-12-18T19:29:04.991Z" },
+    { url = "https://files.pythonhosted.org/packages/40/31/a5906d60f0405f7e57045a70f2d57084a93ca7425f22e1d66904769d1628/ruff-0.14.10-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:16a01dfb7b9e4eee556fbfd5392806b1b8550c9b4a9f6acd3dbe6812b193c70a", size = 13275629, upload-time = "2025-12-18T19:29:21.381Z" },
+    { url = "https://files.pythonhosted.org/packages/3e/60/61c0087df21894cf9d928dc04bcd4fb10e8b2e8dca7b1a276ba2155b2002/ruff-0.14.10-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7165d31a925b7a294465fa81be8c12a0e9b60fb02bf177e79067c867e71f8b1f", size = 14029234, upload-time = "2025-12-18T19:29:00.132Z" },
+    { url = "https://files.pythonhosted.org/packages/44/84/77d911bee3b92348b6e5dab5a0c898d87084ea03ac5dc708f46d88407def/ruff-0.14.10-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:c561695675b972effb0c0a45db233f2c816ff3da8dcfbe7dfc7eed625f218935", size = 15449890, upload-time = "2025-12-18T19:28:53.573Z" },
+    { url = "https://files.pythonhosted.org/packages/e9/36/480206eaefa24a7ec321582dda580443a8f0671fdbf6b1c80e9c3e93a16a/ruff-0.14.10-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4bb98fcbbc61725968893682fd4df8966a34611239c9fd07a1f6a07e7103d08e", size = 15123172, upload-time = "2025-12-18T19:29:23.453Z" },
+    { url = "https://files.pythonhosted.org/packages/5c/38/68e414156015ba80cef5473d57919d27dfb62ec804b96180bafdeaf0e090/ruff-0.14.10-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f24b47993a9d8cb858429e97bdf8544c78029f09b520af615c1d261bf827001d", size = 14460260, upload-time = "2025-12-18T19:29:27.808Z" },
+    { url = "https://files.pythonhosted.org/packages/b3/19/9e050c0dca8aba824d67cc0db69fb459c28d8cd3f6855b1405b3f29cc91d/ruff-0.14.10-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59aabd2e2c4fd614d2862e7939c34a532c04f1084476d6833dddef4afab87e9f", size = 14229978, upload-time = "2025-12-18T19:29:11.32Z" },
+    { url = "https://files.pythonhosted.org/packages/51/eb/e8dd1dd6e05b9e695aa9dd420f4577debdd0f87a5ff2fedda33c09e9be8c/ruff-0.14.10-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:213db2b2e44be8625002dbea33bb9c60c66ea2c07c084a00d55732689d697a7f", size = 14338036, upload-time = "2025-12-18T19:29:09.184Z" },
+    { url = "https://files.pythonhosted.org/packages/6a/12/f3e3a505db7c19303b70af370d137795fcfec136d670d5de5391e295c134/ruff-0.14.10-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:b914c40ab64865a17a9a5b67911d14df72346a634527240039eb3bd650e5979d", size = 13264051, upload-time = "2025-12-18T19:29:13.431Z" },
+    { url = "https://files.pythonhosted.org/packages/08/64/8c3a47eaccfef8ac20e0484e68e0772013eb85802f8a9f7603ca751eb166/ruff-0.14.10-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:1484983559f026788e3a5c07c81ef7d1e97c1c78ed03041a18f75df104c45405", size = 13283998, upload-time = "2025-12-18T19:29:06.994Z" },
+    { url = "https://files.pythonhosted.org/packages/12/84/534a5506f4074e5cc0529e5cd96cfc01bb480e460c7edf5af70d2bcae55e/ruff-0.14.10-py3-none-musllinux_1_2_i686.whl", hash = "sha256:c70427132db492d25f982fffc8d6c7535cc2fd2c83fc8888f05caaa248521e60", size = 13601891, upload-time = "2025-12-18T19:28:55.811Z" },
+    { url = "https://files.pythonhosted.org/packages/0d/1e/14c916087d8598917dbad9b2921d340f7884824ad6e9c55de948a93b106d/ruff-0.14.10-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5bcf45b681e9f1ee6445d317ce1fa9d6cba9a6049542d1c3d5b5958986be8830", size = 14336660, upload-time = "2025-12-18T19:29:16.531Z" },
+    { url = "https://files.pythonhosted.org/packages/f2/1c/d7b67ab43f30013b47c12b42d1acd354c195351a3f7a1d67f59e54227ede/ruff-0.14.10-py3-none-win32.whl", hash = "sha256:104c49fc7ab73f3f3a758039adea978869a918f31b73280db175b43a2d9b51d6", size = 13196187, upload-time = "2025-12-18T19:29:19.006Z" },
+    { url = "https://files.pythonhosted.org/packages/fb/9c/896c862e13886fae2af961bef3e6312db9ebc6adc2b156fe95e615dee8c1/ruff-0.14.10-py3-none-win_amd64.whl", hash = "sha256:466297bd73638c6bdf06485683e812db1c00c7ac96d4ddd0294a338c62fdc154", size = 14661283, upload-time = "2025-12-18T19:29:30.16Z" },
+    { url = "https://files.pythonhosted.org/packages/74/31/b0e29d572670dca3674eeee78e418f20bdf97fa8aa9ea71380885e175ca0/ruff-0.14.10-py3-none-win_arm64.whl", hash = "sha256:e51d046cf6dda98a4633b8a8a771451107413b0f07183b2bef03f075599e44e6", size = 13729839, upload-time = "2025-12-18T19:28:48.636Z" },
 ]
 
 [[package]]
@@ -1596,6 +1765,52 @@ wheels = [
     { url = "https://files.pythonhosted.org/packages/79/b3/561cd6afa959e9dd522af12acc4f803e8bab1bd0e383bffc5211721c5fcb/scp-0.15.0-py2.py3-none-any.whl", hash = "sha256:9e7f721e5ac563c33eb0831d0f949c6342f1c28c3bdc3b02f39d77b5ea20df7e", size = 8753, upload-time = "2024-05-23T21:37:46.226Z" },
 ]
 
+[[package]]
+name = "scrapli"
+version = "2025.1.30"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/01/b0/72fd21e1abdfc0e36f07c56c7aad339bf554771784464980d70249fe0f64/scrapli-2025.1.30.tar.gz", hash = "sha256:3426a38b5dd6a4c67749c30f14102c04a4a43d3da17710b46fec7e53409b340e", size = 104403, upload-time = "2025-01-31T00:47:02.257Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/51/2d/b7dc63b1ff7d60908843fcee2a8897735a06f18d04654baf83f95a4b1a2f/scrapli-2025.1.30-py3-none-any.whl", hash = "sha256:f71ca4e96b56ad245f34269dc3eedf168aca54eb7b1ba96ad0c965c2b76f807e", size = 145741, upload-time = "2025-01-31T00:47:01.002Z" },
+]
+
+[[package]]
+name = "scrapli-cfg"
+version = "2025.1.30"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "scrapli" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/e4/ba/4e475aee50378af7975cd940b21d727040e14175cb065e8b10b65b0b6240/scrapli_cfg-2025.1.30.tar.gz", hash = "sha256:7ed3b5743116d35d7d65f86d06dd7e74fcc509e1c8df6ddebffdeb7cebed13e8", size = 37327, upload-time = "2025-01-31T00:51:01.032Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/3f/fd/33837093aa3cbed2939ad662252f47c37c49787189806ea100c83bca6e36/scrapli_cfg-2025.1.30-py3-none-any.whl", hash = "sha256:aaf0c5f52ed06ba0ffefa860ab947c96b663c98a96f38b90eac567d46906fdf4", size = 61083, upload-time = "2025-01-31T00:50:58.475Z" },
+]
+
+[[package]]
+name = "scrapli-community"
+version = "2025.1.30"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "scrapli" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/92/29/b5df286b9cb143f1e95b7824a8ba1ebb0fc7606b96a0a50a62fc50cf3858/scrapli_community-2025.1.30.tar.gz", hash = "sha256:5357d1e471879203f648948adf114d8155a69ca1242060504d9b92f54f79d119", size = 33083, upload-time = "2025-01-31T00:50:37.445Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/23/67/c0c0d088a786fcaa3e0584a765f1da00b0560dadc7f0ea8c8755b7d175f3/scrapli_community-2025.1.30-py3-none-any.whl", hash = "sha256:dc4d02193ec3bb14cec9e04ab0d48fc59d575e1ccedac92fb4b8bbd6f3b6726f", size = 93093, upload-time = "2025-01-31T00:50:36.099Z" },
+]
+
+[[package]]
+name = "scrapli-netconf"
+version = "2025.1.30"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "lxml" },
+    { name = "scrapli" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/71/7a/11f5881ae1aa8fe3f102d3c6e6a16e4b3db7c2b9c9558422227f9a2131d4/scrapli_netconf-2025.1.30.tar.gz", hash = "sha256:06813455dda9bd27d99734a3f689b4259ccab625141fc41126495ccad705d248", size = 30362, upload-time = "2025-01-31T00:50:02.72Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/e7/a9/3d722619901cca8ba5cbe0a66ed024292d17e155da3b26260abe227fcbe3/scrapli_netconf-2025.1.30-py3-none-any.whl", hash = "sha256:009041515d9d75d6e3748730bc920b350eb6ab86c6890e44495ca00de3e86cd9", size = 37133, upload-time = "2025-01-31T00:50:01.042Z" },
+]
+
 [[package]]
 name = "setuptools"
 version = "80.9.0"
@@ -1623,6 +1838,15 @@ wheels = [
     { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" },
 ]
 
+[[package]]
+name = "sniffio"
+version = "1.3.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" },
+]
+
 [[package]]
 name = "sortedcontainers"
 version = "2.4.0"
@@ -1643,11 +1867,11 @@ wheels = [
 
 [[package]]
 name = "termcolor"
-version = "3.1.0"
+version = "3.3.0"
 source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/ca/6c/3d75c196ac07ac8749600b60b03f4f6094d54e132c4d94ebac6ee0e0add0/termcolor-3.1.0.tar.gz", hash = "sha256:6a6dd7fbee581909eeec6a756cff1d7f7c376063b14e4a298dc4980309e55970", size = 14324, upload-time = "2025-04-30T11:37:53.791Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/46/79/cf31d7a93a8fdc6aa0fbb665be84426a8c5a557d9240b6239e9e11e35fc5/termcolor-3.3.0.tar.gz", hash = "sha256:348871ca648ec6a9a983a13ab626c0acce02f515b9e1983332b17af7979521c5", size = 14434, upload-time = "2025-12-29T12:55:21.882Z" }
 wheels = [
-    { url = "https://files.pythonhosted.org/packages/4f/bd/de8d508070629b6d84a30d01d57e4a65c69aa7f5abe7560b8fad3b50ea59/termcolor-3.1.0-py3-none-any.whl", hash = "sha256:591dd26b5c2ce03b9e43f391264626557873ce1d379019786f99b0c2bee140aa", size = 7684, upload-time = "2025-04-30T11:37:52.382Z" },
+    { url = "https://files.pythonhosted.org/packages/33/d1/8bb87d21e9aeb323cc03034f5eaf2c8f69841e40e4853c2627edf8111ed3/termcolor-3.3.0-py3-none-any.whl", hash = "sha256:cf642efadaf0a8ebbbf4bc7a31cec2f9b5f21a9f726f4ccbb08192c9c26f43a5", size = 7734, upload-time = "2025-12-29T12:55:20.718Z" },
 ]
 
 [[package]]
@@ -1731,11 +1955,11 @@ wheels = [
 
 [[package]]
 name = "ttp"
-version = "0.9.5"
+version = "0.10.0"
 source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/6e/4f/b95e48d3b653e1b1494a3a5b15614db5a340826282c5389e38cb2248da39/ttp-0.9.5.tar.gz", hash = "sha256:234414f4d3039d2d1cde09993f89f8db1b34d447f76c6a402555cefac2e59c4e", size = 69141, upload-time = "2023-06-25T00:28:14.036Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/83/e6/9169d35574be82df2a0cdd2546f4f83d0d30964cf0043fc9784df855b024/ttp-0.10.0.tar.gz", hash = "sha256:40f1ca61ee1431f5b1ab5326fb55f852a04749e9574792d45455b62c5e7ac97b", size = 64665, upload-time = "2025-11-02T08:47:50.329Z" }
 wheels = [
-    { url = "https://files.pythonhosted.org/packages/03/38/d475e7bde8d192ca5d64ccc0988f3d58f36211bd68c32b6c5883332a8abf/ttp-0.9.5-py2.py3-none-any.whl", hash = "sha256:2c9fcf560b3f696e9fdd3554dc8e4622cbb10cac1d4fca13a7cf608c4a7fd137", size = 85763, upload-time = "2023-06-25T00:28:11.949Z" },
+    { url = "https://files.pythonhosted.org/packages/b3/c3/60abb45bd8eb973997f133eb76949523478d35dfc551a0dbd8906b6a8075/ttp-0.10.0-py3-none-any.whl", hash = "sha256:9985e0ca414e85d41493a6291a924624b9a08c48c78d2d01477cc60ba2a347c1", size = 84287, upload-time = "2025-11-02T08:47:48.656Z" },
 ]
 
 [[package]]
@@ -1752,7 +1976,7 @@ wheels = [
 
 [[package]]
 name = "typer"
-version = "0.19.2"
+version = "0.21.0"
 source = { registry = "https://pypi.org/simple" }
 dependencies = [
     { name = "click" },
@@ -1760,9 +1984,9 @@ dependencies = [
     { name = "shellingham" },
     { name = "typing-extensions" },
 ]
-sdist = { url = "https://files.pythonhosted.org/packages/21/ca/950278884e2ca20547ff3eb109478c6baf6b8cf219318e6bc4f666fad8e8/typer-0.19.2.tar.gz", hash = "sha256:9ad824308ded0ad06cc716434705f691d4ee0bfd0fb081839d2e426860e7fdca", size = 104755, upload-time = "2025-09-23T09:47:48.256Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/85/30/ff9ede605e3bd086b4dd842499814e128500621f7951ca1e5ce84bbf61b1/typer-0.21.0.tar.gz", hash = "sha256:c87c0d2b6eee3b49c5c64649ec92425492c14488096dfbc8a0c2799b2f6f9c53", size = 106781, upload-time = "2025-12-25T09:54:53.651Z" }
 wheels = [
-    { url = "https://files.pythonhosted.org/packages/00/22/35617eee79080a5d071d0f14ad698d325ee6b3bf824fc0467c03b30e7fa8/typer-0.19.2-py3-none-any.whl", hash = "sha256:755e7e19670ffad8283db353267cb81ef252f595aa6834a0d1ca9312d9326cb9", size = 46748, upload-time = "2025-09-23T09:47:46.777Z" },
+    { url = "https://files.pythonhosted.org/packages/e1/e4/5ebc1899d31d2b1601b32d21cfb4bba022ae6fce323d365f0448031b1660/typer-0.21.0-py3-none-any.whl", hash = "sha256:c79c01ca6b30af9fd48284058a7056ba0d3bf5cf10d0ff3d0c5b11b68c258ac6", size = 47109, upload-time = "2025-12-25T09:54:51.918Z" },
 ]
 
 [[package]]
@@ -1788,11 +2012,11 @@ wheels = [
 
 [[package]]
 name = "urllib3"
-version = "2.5.0"
+version = "2.6.2"
 source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/1e/24/a2a2ed9addd907787d7aa0355ba36a6cadf1768b934c652ea78acbd59dcd/urllib3-2.6.2.tar.gz", hash = "sha256:016f9c98bb7e98085cb2b4b17b87d2c702975664e4f060c6532e64d1c1a5e797", size = 432930, upload-time = "2025-12-11T15:56:40.252Z" }
 wheels = [
-    { url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" },
+    { url = "https://files.pythonhosted.org/packages/6d/b9/4095b668ea3678bf6a0af005527f39de12fb026516fb3df17495a733b7f8/urllib3-2.6.2-py3-none-any.whl", hash = "sha256:ec21cddfe7724fc7cb4ba4bea7aa8e2ef36f607a4bab81aa6ce42a13dc3f03dd", size = 131182, upload-time = "2025-12-11T15:56:38.584Z" },
 ]
 
 [[package]]
