Coverage for session_buddy / llm / models.py: 84.62%
46 statements
« prev ^ index » next coverage.py v7.13.1, created at 2026-01-04 00:43 -0800
« prev ^ index » next coverage.py v7.13.1, created at 2026-01-04 00:43 -0800
1"""Data models for LLM provider system.
3This module provides standardized data models for LLM interactions including
4messages, responses, streaming chunks, and generation options.
5"""
7from __future__ import annotations
9from dataclasses import dataclass, field
10from datetime import datetime
11from typing import Any
14@dataclass(frozen=True)
15class StreamGenerationOptions:
16 """Immutable streaming generation options."""
18 provider: str | None = None
19 model: str | None = None
20 use_fallback: bool = True
21 temperature: float = 0.7
22 max_tokens: int | None = None
25@dataclass
26class StreamChunk:
27 """Immutable streaming response chunk."""
29 content: str = field(default="")
30 is_error: bool = field(default=False)
31 provider: str = field(default="")
32 metadata: dict[str, Any] = field(default_factory=dict)
34 @classmethod
35 def content_chunk(cls, content: str, provider: str = "") -> StreamChunk:
36 """Create content chunk."""
37 return cls(content=content, provider=provider) # type: ignore[call-arg]
39 @classmethod
40 def error_chunk(cls, error: str) -> StreamChunk:
41 """Create error chunk."""
42 return cls(content="", is_error=True, metadata={"error": error}) # type: ignore[call-arg]
45@dataclass
46class LLMMessage:
47 """Standardized message format across LLM providers."""
49 role: str # 'system', 'user', 'assistant'
50 content: str
51 timestamp: str | None = None
52 metadata: dict[str, Any] | None = None
54 def __post_init__(self) -> None:
55 if self.timestamp is None: 55 ↛ 57line 55 didn't jump to line 57 because the condition on line 55 was always true
56 self.timestamp = datetime.now().isoformat()
57 if self.metadata is None: 57 ↛ exitline 57 didn't return from function '__post_init__' because the condition on line 57 was always true
58 self.metadata = {}
61@dataclass
62class LLMResponse:
63 """Standardized response format from LLM providers."""
65 content: str
66 model: str
67 provider: str
68 usage: dict[str, Any]
69 finish_reason: str
70 timestamp: str
71 metadata: dict[str, Any] | None = None
73 def __post_init__(self) -> None:
74 if self.metadata is None:
75 self.metadata = {}