============================================================
DRY RUN — No jobs will be submitted
============================================================

Generated sbatch script (per-trial lookup elided for brevity):
----------------------------------------
#!/bin/bash
#SBATCH --job-name=hyperherd_lr_sweep
#SBATCH --array=0-11
#SBATCH --partition=short
#SBATCH --time=01:00:00
#SBATCH --mem=2G
#SBATCH --cpus-per-task=1
#SBATCH --output=/path/to/lr_sweep/.hyperherd/logs/%a.out
#SBATCH --error=/path/to/lr_sweep/.hyperherd/logs/%a.err
#SBATCH --open-mode=append

# Run divider — visible in both stdout and stderr after append
_HH_DIVIDER="==== HyperHerd run: job ${SLURM_JOB_ID} array-task ${SLURM_ARRAY_TASK_ID} $(date -Iseconds) ===="
printf "\n%s\n\n" "$_HH_DIVIDER"
printf "\n%s\n\n" "$_HH_DIVIDER" >&2

# Export HyperHerd environment variables
export HYPERHERD_WORKSPACE=/path/to/lr_sweep
export HYPERHERD_SWEEP_NAME=lr_sweep
export HYPERHERD_TRIAL_ID="$SLURM_ARRAY_TASK_ID"

# Per-trial lookup baked at submission time (no Python required here).
case "$SLURM_ARRAY_TASK_ID" in
  0)
    HYPERHERD_TRIAL_NAME=lr-0.0001_opt-adam
    HYPERHERD_EXPERIMENT_NAME=lr-0.0001_opt-adam
    OVERRIDES='experiment_name=lr-0.0001_opt-adam learning_rate=0.0001 optimizer=adam'
    ;;
  # ... [11 more trial arm(s) elided in dry-run; full script is submitted] ...
  *)
    echo "HyperHerd: no lookup entry for SLURM_ARRAY_TASK_ID=$SLURM_ARRAY_TASK_ID" >&2
    exit 1
    ;;
esac
export HYPERHERD_TRIAL_NAME HYPERHERD_EXPERIMENT_NAME

# Invoke the user's launcher script
bash /path/to/lr_sweep/launch.sh "$OVERRIDES"
----------------------------------------

Submission plan
  Pending: 12 of 12 trial(s)
  Indices: 0-11
  Use herd ls to see every trial in the sweep.