Coverage for session_buddy / llm / __init__.py: 100.00%
5 statements
« prev ^ index » next coverage.py v7.13.1, created at 2026-01-04 00:43 -0800
« prev ^ index » next coverage.py v7.13.1, created at 2026-01-04 00:43 -0800
1"""LLM provider system for session management.
3This package provides a unified interface for working with multiple LLM providers
4including OpenAI, Google Gemini, and Ollama. It includes:
5- Standardized data models for messages and responses
6- Abstract base class for provider implementations
7- Individual provider implementations
8- Security utilities for API key validation
9- Manager class for multi-provider orchestration
10"""
12from session_buddy.llm.base import LLMProvider
13from session_buddy.llm.models import (
14 LLMMessage,
15 LLMResponse,
16 StreamChunk,
17 StreamGenerationOptions,
18)
19from session_buddy.llm.providers import (
20 GeminiProvider,
21 OllamaProvider,
22 OpenAIProvider,
23)
24from session_buddy.llm.security import (
25 get_masked_api_key,
26 validate_llm_api_keys_at_startup,
27)
29__all__ = [
30 "GeminiProvider",
31 # Data models
32 "LLMMessage",
33 # Base classes
34 "LLMProvider",
35 "LLMResponse",
36 "OllamaProvider",
37 # Provider implementations
38 "OpenAIProvider",
39 "StreamChunk",
40 "StreamGenerationOptions",
41 # Security utilities
42 "get_masked_api_key",
43 "validate_llm_api_keys_at_startup",
44]