Spaces:
Sleeping
Sleeping
| """ | |
| Domain Layer - LLM Interface | |
| Defines contract for LLM implementations. | |
| """ | |
| from abc import ABC, abstractmethod | |
| from dataclasses import dataclass | |
| from typing import AsyncIterator, List, Optional | |
| class LLMMessage: | |
| """Chat message""" | |
| role: str # system, user, assistant | |
| content: str | |
| class LLMResponse: | |
| """LLM generation response""" | |
| content: str | |
| model: str | |
| tokens_used: int | |
| finish_reason: str | |
| class ILLM(ABC): | |
| """Interface for LLM implementations""" | |
| async def generate( | |
| self, | |
| messages: List[LLMMessage], | |
| temperature: float = 0.7, | |
| max_tokens: int = 2048, | |
| stream: bool = False, | |
| ) -> LLMResponse: | |
| """Generate response from LLM""" | |
| pass | |
| async def generate_stream( | |
| self, | |
| messages: List[LLMMessage], | |
| temperature: float = 0.7, | |
| max_tokens: int = 2048, | |
| ) -> AsyncIterator[str]: | |
| """Generate streaming response from LLM""" | |
| pass | |
| def get_model_name(self) -> str: | |
| """Get model name""" | |
| pass | |
| class IPromptBuilder(ABC): | |
| """Interface for prompt building""" | |
| def build_rag_prompt( | |
| self, query: str, context: List[str], system_prompt: Optional[str] = None | |
| ) -> List[LLMMessage]: | |
| """Build RAG prompt with query and context""" | |
| pass | |
| def build_query_expansion_prompt(self, query: str) -> List[LLMMessage]: | |
| """Build prompt for query expansion""" | |
| pass | |