from langchain.llms.base import LLM
from typing import Optional, List, Dict, Any
import requests
from langchain.agents import AgentType, initialize_agent, Tool
from langchain.memory import ConversationBufferMemory
import requests
from dotenv import load_dotenv
import os

# 加载环境变量
load_dotenv()

# 1. 自定义LLM类（适配/generate接口）
class LocalGenerateLLM(LLM):
    model_name: str = "local-generate-model"
    generate_url: str = os.getenv("LOCAL_GENERATE_URL")

    def _call(
        self,
        prompt: str,
        stop: Optional[List[str]] = None,
        run_manager: Optional[Any] = None,
    ) -> str:
        # 构造接口请求参数（与你的/generate接口参数完全匹配）
        request_data = {
            "prompt": prompt,
            "max_tokens": 512,
            "repetition_penalty": 1.03,
            "presence_penalty": 1.2,
            "frequency_penalty": 1.2,
            "temperature": 0.5,
            "top_k": 10,
            "top_p": 0.95,
            "stream": False
        }

        # 调用接口
        try:
            response = requests.post(
                url=self.generate_url,
                json=request_data,
                timeout=30
            )
            response.raise_for_status()
            result = response.json()
        except Exception as e:
            return f"模型调用失败：{str(e)}"

        # 假设接口返回的生成结果在"response"字段（需根据实际响应格式修改！）
        # 请用curl测试接口，确认响应中哪个字段是生成文本（如"output"、"result"等）
        return result.get("response", "未返回有效结果")

    @property
    def _identifying_params(self) -> Dict[str, Any]:
        return {"model_name": self.model_name, "url": self.generate_url}

    @property
    def _llm_type(self) -> str:
        return "local-generate"

# 2. 天气查询工具（同方案一）
def get_weather(city: str) -> str:
    weather_url = f"http://api.openweathermap.org/data/2.5/weather?q={city}&appid={os.getenv('WEATHER_API_KEY')}&units=metric"
    try:
        response = requests.get(weather_url, timeout=10)
        data = response.json()
        if data.get("cod") != 200:
            return f"天气查询失败：{data.get('message')}"
        temp = data["main"]["temp"]
        desc = data["weather"][0]["description"]
        return f"{city} 当前气温：{temp}°C，天气：{desc}"
    except Exception as e:
        return f"查询出错：{str(e)}"

# 3. 工具列表
tools = [
    Tool(
        name="WeatherQuery",
        func=get_weather,
        description="用于查询城市实时天气，用户问天气时必须调用，不能直接回答"
    )
]

# 4. 初始化模型和Agent
local_llm = LocalGenerateLLM()
memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)

agent = initialize_agent(
    tools=tools,
    llm=local_llm,
    agent=AgentType.CONVERSATIONAL_REACT_DESCRIPTION,
    verbose=True,
    memory=memory
)

# 测试Agent
if __name__ == "__main__":
    agent.run("查询深圳今天的天气")
    agent.run("刚才查询的城市气温是多少？")
