from langchain_openai import ChatOpenAI
from langchain_core.messages import HumanMessage
import os

class GeneratorAgent:
    def __init__(self, api_key=None, model="gpt-3.5-turbo", temperature=0.7, base_url=None):
        self.api_key = api_key
        self.model = model
        self.temperature = temperature
        self.base_url = base_url
        self.llm = None
        if self.api_key and self.base_url:
            self.llm = ChatOpenAI(
                api_key=self.api_key,
                model=self.model,
                temperature=self.temperature,
                base_url=self.base_url
            )

    def generate(self, user_query, last_feedback=None):
        if self.llm:
            prompt = f"请根据以下需求生成结构化检索式：{user_query}"
            if last_feedback:
                prompt += f"\n上轮反馈：{last_feedback}"
            try:
                response = self.llm([
                    HumanMessage(content=prompt)
                ])
                return response.content.strip()
            except Exception as e:
                print(e)
                return f"[大模型调用失败] {str(e)}"
        else:
            retrieval = f"{user_query} 检索式"
            if last_feedback:
                retrieval += f"（优化：{last_feedback}）"
            return retrieval