from langchain_openai import ChatOpenAI
from langchain.chains import LLMChain
from langchain.prompts import PromptTemplate
from config.config import Config
from service.PromptService import PromptService
from service.RagService import RagService
from models.models import DetailedConversations
import uuid
from datetime import datetime

class FinalService:
    """最终服务 - 用LangChain链式调用大模型，预留RAG链"""

    def __init__(self):
        self.config = Config()
        self.prompt_service = PromptService()
        self.rag_service = RagService()
        self.llm = ChatOpenAI(
            openai_api_key=self.config.AI_API_KEY,
            openai_api_base=self.config.AI_BASE_URL,
            model_name=self.config.AI_MODEL,
            temperature=0.7,
            max_tokens=1000,
        )

    def process_user_input(self, user_input: str, session_id: str) -> dict:
        start_time = datetime.now()
        
        # 1. RAG 检索（目前为空，后续可扩展）
        rag_context = self.rag_service.retrieve_context(user_input)
        
        # 2. 使用PromptService构建提示词（文本格式用于LLM调用）
        if rag_context:
            prompt = self.prompt_service.build_prompt_with_rag_context(user_input, rag_context)
            final_prompt_json = self.prompt_service.build_prompt_with_rag_context_json(user_input, rag_context)
            retrieved_documents = rag_context
        else:
            prompt = self.prompt_service.build_base_prompt(user_input)
            final_prompt_json = self.prompt_service.build_base_prompt_json(user_input)
            retrieved_documents = None
        
        # 3. 构建 LangChain 的 PromptTemplate 并调用
        prompt_template = PromptTemplate.from_template("{prompt}")
        chain = LLMChain(llm=self.llm, prompt=prompt_template)
        
        try:
            result = chain.run({"prompt": prompt})
            success = True
            error = None
            llm_response = result
        except Exception as e:
            success = False
            error = str(e)
            llm_response = ""
        
        end_time = datetime.now()
        processing_time = (end_time - start_time).total_seconds()
        
        # 4. 记录到DetailedConversations表
        try:
            DetailedConversations.create(
                conversation_id=str(uuid.uuid4()),
                session_id=session_id,
                user_input=user_input,
                final_prompt=final_prompt_json,  # 使用JSON格式的prompt
                llm_response=llm_response,
                final_response=llm_response,
                retrieved_documents=retrieved_documents,
                prompt_template="{prompt}",
                model_name=self.config.AI_MODEL,
                processing_time=processing_time,
                created_at=start_time
            )
        except Exception as db_e:
            # 可以选择log或返回db错误
            print(f"数据库记录失败: {db_e}")
        
        if success:
            return {
                "success": True,
                "response": llm_response
            }
        else:
            return {
                "success": False,
                "error": error
            }