from typing import Any, Optional, Type, Iterator
import requests

# 封装工具对象
from langchain.tools import BaseTool
from pydantic import Field, BaseModel
import json

from langchain.llms.base import LLM
import zai


class FessToolParam(BaseModel):
    query: str = Field(description="查询关键词")


class FessTool(BaseTool):
    name: str = "知识搜索"
    description: str = "通过将问题提取为一个关键词搜索，注意：只可以是一个词，可能了解到问题答案的相关信息"
    args_schema: Type[BaseModel] = FessToolParam

    def _run(self, query: str) -> Any:
        url = 'http://localhost:8080/api/v1/documents'
        params = {
            'q': query,
            'wt': 'json',
            'indent': 'true',
            'rows': 1
        }
        try:
            response = requests.get(url, params=params)
            record_count = response.json()["record_count"]
            if record_count > 0:
                filetype = response.json()["data"][0]["filetype"]
                url_link: str = response.json()["data"][0]["url_link"]
                digest = response.json()["data"][0]["digest"]
                if filetype == 'txt':
                    with open(url_link.replace(r"file://", ""), encoding='utf-8') as f:
                        content = f.read()
                    return content
                return digest
            else:
                return "没有搜索到相关的知识，你可以根据你知道的回答。"
        except Exception as e:
            return "请求失败，尝试其他搜索工具"


from zai import ZhipuAiClient
from langchain_core.outputs import GenerationChunk
from langchain.llms.base import LLM


class GLM_LLMS(LLM):
    client: ZhipuAiClient = ZhipuAiClient(api_key="d0fc2026b50344b18e25187d9393ce3f.P2XsXy1lpeqc2Gl0")
    loacl_model: str = "GLM-4-Flash-250414"

    def __init__(self):
        super().__init__()

    @property
    def _llm_type(self) -> str:
        return "GLM_LLMS"

    def _call(
            self,
            prompt: str,
            stop: Optional[list[str]] = None,
            run_manager=None,
            **kwargs: Any,
    ) -> str:
        messages = [{"role": "user", "content": prompt}]
        response = self.client.chat.completions.create(
            model=self.loacl_model,
            messages=messages,
        )
        # print("_call ====response=====> ", response)
        return response.choices[0].message.content

    def _stream(
            self,
            prompt: str,
            stop: Optional[list[str]] = None,
            run_manager=None,
            **kwargs: Any,
    ) -> Iterator:
        agent_output = get_agent().invoke(prompt).get("output")
        # print("_stream ===agent_output======> ", agent_output)
        messages = [
            {"role": "system", "content": f"你可以参考代理返回的数据进行最后的回答。 代理返回如下:{agent_output}"},
            {"role": "user", "content": prompt}
        ]
        # 获取最终回答
        response = self.client.chat.completions.create(
            model=self.loacl_model,
            messages=messages,
            stream=True
        )
        append_text = ""
        for chunk in response:
            if chunk.choices[0].delta.content:
                append_text += chunk.choices[0].delta.content
                yield GenerationChunk(text=chunk.choices[0].delta.content)


from langchain.agents import AgentType, initialize_agent, Tool
from langchain_community.utilities import SerpAPIWrapper

serpAPI_key = "86fe55cc06c9506b94c99c06febc1ae7662ebf5b6b42b0fa442643e2d44f575e"

params = {
    "engine": "google",
}

serp = SerpAPIWrapper(serpapi_api_key=serpAPI_key, params=params)

serp_tool = Tool(
    name="SerpAPI",
    func=serp.run,
    description="提供实时信息的查询",
)


def get_agent():
    return initialize_agent(
        tools=[FessTool(), serp_tool],
        llm=GLM_LLMS(),
        agent=AgentType.STRUCTURED_CHAT_ZERO_SHOT_REACT_DESCRIPTION,
        handle_parsing_errors=False,
        verbose=True,
        return_intermediate_steps=True,  # 可以输出 intermediate_steps 思考的参考数据
    )


if __name__ == '__main__':
    agent = get_agent()
    res = agent.invoke("中国昨天的阅兵怎么样，总结亮点出来，搞成列表")
    print("******************")
    print(res["output"])
    print("******************")
