import asyncio
import json
import os
import time
import warnings
from typing import Optional, Any, Iterator

from zai import ZhipuAiClient
from langchain.llms.base import LLM
from langchain_core.outputs import GenerationChunk

from tools.weather import WeatherTool, get_weather
from tools.wanianliClass import waNianLi

from langchain.agents import Tool, AgentType, initialize_agent, create_tool_calling_agent, create_structured_chat_agent
from langchain.agents.agent import AgentExecutor

from langchain_huggingface import HuggingFaceEmbeddings
from langchain_community.vectorstores import FAISS
from langchain_community.document_loaders import DirectoryLoader
from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder

from langchain_core.runnables import RunnablePassthrough, RunnableParallel

from pydantic import Field
from typing import List, Any

import yaml

# 旧方式（推荐）
from langchain.memory import ConversationBufferMemory
# 新方式（推荐）
from langchain_community.chat_message_histories import ChatMessageHistory

embeddings_path = r'M:\moudels\BAAIbge-large-zh-v1.5'

with warnings.catch_warnings():
    warnings.simplefilter("ignore")
    embeddings = HuggingFaceEmbeddings(model_name=embeddings_path)
    load = DirectoryLoader(r'F:\A_wokecode\gradio_study\langchain_study\data')
    vs = FAISS.from_documents(load.load(), embeddings)
    retriever = vs.as_retriever()  # 辨别器


def get_tools_yaml(tools_paths="./tools"):
    for path in os.listdir(tools_paths):
        if path.endswith(".yaml"):
            # 读取json文件内容
            with open(os.path.join(tools_paths, path), 'r', encoding='utf-8') as f:
                tool_data = yaml.safe_load(f)
                return tool_data
    return None


class GLM_4(LLM):
    client: ZhipuAiClient = ZhipuAiClient(api_key="d0fc2026b50344b18e25187d9393ce3f.P2XsXy1lpeqc2Gl0")
    loacl_model: str = "GLM-4-Flash-250414"

    def __init__(self):
        super().__init__()

    # 必须的方法
    @property
    def _llm_type(self):
        return "GLM_4"

    def _call(
            self,
            prompt: str,
            stop: Optional[list[str]] = None,
            run_manager=None,
            **kwargs: Any,
    ) -> str:
        messages = [{"role": "user", "content": prompt}]
        response = self.client.chat.completions.create(
            model=self.loacl_model,
            messages=messages,
        )
        print(response)
        return response.choices[0].message.content

    def _stream(
            self,
            prompt: str,
            stop: Optional[list[str]] = None,
            run_manager=None,
            **kwargs: Any,
    ) -> Iterator:
        agent_output = get_agent().invoke(prompt).get("output")
        print(agent_output)
        messages = [
            {"role": "system", "content": f"你可以参考代理返回的数据进行最后的回答。 代理返回如下:{agent_output}"},
            {"role": "user", "content": prompt}
        ]
        # 获取最终回答
        response = self.client.chat.completions.create(
            model=self.loacl_model,
            messages=messages,
            stream=True
        )
        append_text = ""
        for chunk in response:
            if chunk.choices[0].delta.content:
                append_text += chunk.choices[0].delta.content
                yield GenerationChunk(text=append_text)


def get_agent():
    return initialize_agent(
        tools=[WeatherTool(), waNianLi()],
        llm=GLM_4(),
        agent=AgentType.STRUCTURED_CHAT_ZERO_SHOT_REACT_DESCRIPTION,
        handle_parsing_errors=True,
        verbose=False,
        return_intermediate_steps=False,
    )


if __name__ == '__main__':
    # print(get_tools_yaml())
    llm = GLM_4()
    # 测试普通对话
    # print("\n测试普通对话:")
    # response = llm.invoke("西安今天的天气怎么样？")
    # print(response)

    # 测试流式响应
    # print("\n测试流式响应:")
    # for chunk in llm.stream("你好"):
    #     print(chunk, end="", flush=True)

    # 代理
    # agent = get_agent()
    # res = agent.run("2025年9月1日 阴历是多少？")
    # print("agent.run == >", res)
    # res = agent.invoke("2025年9月1日 阴历是多少？")
    # print(res)
    # print(res.get("output"))
    it = llm.stream("2025年9月1日 阴历是多少？")
    for i in it:
        print(i)
