# LLM Router Configuration
# Generated by setup

# OpenAI (https://platform.openai.com/api-keys)
OPENAI_API_KEY=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

# Google Gemini (https://aistudio.google.com/apikey)
GEMINI_API_KEY=AIzaSyAbCFzDDZ36E28tkX6Z-6H6GhRIq_1OrRE

# Perplexity (https://www.perplexity.ai/settings/api)
PERPLEXITY_API_KEY=

LLM_ROUTER_PROFILE=balanced

# Claude Code subscription mode — never call Anthropic API.
# Claude models are accessed through Claude Code subscription, not API keys.
# simple tasks  → cheap external (Gemini Flash, GPT-4o-mini) saves tokens
# moderate tasks → passthrough (Sonnet handles natively, no routing)
# complex tasks  → /model claude-opus-4-6 hint injected by hook
LLM_ROUTER_CLAUDE_SUBSCRIPTION=true

# Ollama (local inference — free, private)
OLLAMA_BASE_URL=http://localhost:11434
OLLAMA_BUDGET_MODELS=qwen2.5:1.5b,gemma4:latest,qwen3.5:latest
