# Filoma Filaraki Configuration Template
# Copy this file to '.env' and fill in the values for ONE scenario only.
# Comment out all other scenarios to avoid conflicts.

# ==============================================================================
# SCENARIO A: Mistral AI (Cloud - Recommended Default)
# ==============================================================================
# Use this for a "plug and play" European cloud experience.
# Get a key at https://console.mistral.ai/
# MISTRAL_API_KEY=
# FILOMA_FILARAKI_MODEL=mistral:mistral-small-latest  # Optional override

# ==============================================================================
# SCENARIO B: Google Gemini (Cloud)
# ==============================================================================
# Use this for Google's Gemini models.
# Get a key at https://aistudio.google.com/
GEMINI_API_KEY=""
FILOMA_FILARAKI_MODEL=gemini-3-flash-preview
# ==============================================================================
# SCENARIO C: Ollama (Local - Privacy First)
# ==============================================================================
# Use this for zero-cost, 100% private analysis.
# Requires Ollama app running with `ollama serve`. A good "starter" model is `llama3.1:8b`
# Also noticed quite good performance with qwen3:30b (on RX 7900 XT, Ryzen 5 7600X, 32GB RAM)
FILOMA_FILARAKI_MODEL=llama3.1:8b
FILOMA_FILARAKI_BASE_URL=http://localhost:11434/v1
