flare / llm_openai.py
ciyidogan's picture
Create llm_openai.py
7a6c758 verified
raw
history blame
2.58 kB
"""
OpenAI GPT Implementation
"""
from openai import AsyncOpenAI
from typing import Dict, List, Any
from llm_interface import LLMInterface
from utils import log
class OpenAILLM(LLMInterface):
"""OpenAI GPT integration (GPT-4o, GPT-4o-mini)"""
def __init__(self, api_key: str, model: str, settings: Dict[str, Any] = None):
super().__init__(settings)
self.client = AsyncOpenAI(api_key=api_key)
self.model = self._map_model_name(model)
self.temperature = settings.get("temperature", 0.7) if settings else 0.7
self.max_tokens = settings.get("max_tokens", 1000) if settings else 1000
log(f"πŸ€– Initialized OpenAI LLM with model: {self.model}")
def _map_model_name(self, model: str) -> str:
"""Map provider name to actual model name"""
mappings = {
"gpt4o": "gpt-4",
"gpt4o-mini": "gpt-4o-mini"
}
return mappings.get(model, model)
async def generate(self, system_prompt: str, user_input: str, context: List[Dict]) -> str:
"""Generate response from OpenAI"""
try:
# Build messages
messages = [{"role": "system", "content": system_prompt}]
# Add context
for msg in context:
messages.append({
"role": msg.get("role", "user"),
"content": msg.get("content", "")
})
# Add current user input
messages.append({"role": "user", "content": user_input})
# Call OpenAI
response = await self.client.chat.completions.create(
model=self.model,
messages=messages,
temperature=self.temperature,
max_tokens=self.max_tokens
)
return response.choices[0].message.content.strip()
except Exception as e:
log(f"❌ OpenAI error: {e}")
raise
async def startup(self, project_config: Dict) -> bool:
"""GPT doesn't need startup, always return True"""
log("βœ… GPT provider ready (no startup needed)")
return True
def get_provider_name(self) -> str:
"""Get provider name"""
return self.model
def get_model_info(self) -> Dict[str, Any]:
"""Get model information"""
return {
"provider": "openai",
"model": self.model,
"temperature": self.temperature,
"max_tokens": self.max_tokens
}