Coverage for session_buddy / llm / base.py: 100.00%
8 statements
« prev ^ index » next coverage.py v7.13.1, created at 2026-01-04 00:43 -0800
« prev ^ index » next coverage.py v7.13.1, created at 2026-01-04 00:43 -0800
1"""Abstract base class for LLM providers.
3This module provides the base interface that all LLM provider implementations
4must follow, ensuring consistent API across different providers.
5"""
7from __future__ import annotations
9import logging
10from abc import ABC, abstractmethod
11from typing import TYPE_CHECKING, Any
13if TYPE_CHECKING:
14 from collections.abc import AsyncGenerator
16 from session_buddy.llm.models import LLMMessage, LLMResponse
19class LLMProvider(ABC):
20 """Abstract base class for LLM providers."""
22 def __init__(self, config: dict[str, Any]) -> None:
23 self.config = config
24 self.name = self.__class__.__name__.replace("Provider", "").lower()
25 self.logger = logging.getLogger(f"llm_providers.{self.name}")
27 @abstractmethod
28 async def generate(
29 self,
30 messages: list[LLMMessage],
31 model: str | None = None,
32 temperature: float = 0.7,
33 max_tokens: int | None = None,
34 **kwargs: Any,
35 ) -> LLMResponse:
36 """Generate a response from the LLM."""
38 @abstractmethod
39 async def stream_generate( # type: ignore[override]
40 self,
41 messages: list[LLMMessage],
42 model: str | None = None,
43 temperature: float = 0.7,
44 max_tokens: int | None = None,
45 **kwargs: Any,
46 ) -> AsyncGenerator[str]:
47 """Generate a streaming response from the LLM."""
49 @abstractmethod
50 async def is_available(self) -> bool:
51 """Check if the provider is available and properly configured."""
53 @abstractmethod
54 def get_models(self) -> list[str]:
55 """Get list of available models for this provider."""