from langchain.llms.base import LLM
from zhipuai import ZhipuAI
from langchain_core.messages.ai import AIMessage
from typing import List
import os
from zhipuai import ZhipuAI




class ChatLYY(LLM):
    history: List[dict[str, str]] = []
    client:object = None

    def __init__(self):
        super().__init__()
        zhipuai_api_key = os.getenv('ZHIPU_API_KEY')
        self.client = ZhipuAI(api_key=zhipuai_api_key)

    @property
    def _llm_type(self):
        return "ChatGLN4"

    def invoke(self,prompt,history=[]):
        if history is None:
            history=[]
        history.append({"role":"user","content":prompt})
        response = self.client.chat.completions.create(
            model="glm-4-flash",
            messages=history
        )

        result = response.choices[0].message.content
        return AIMessage(content=result)

    def _call(self,prompt,history=[]):
        return self.invoke(prompt,history)

    def stream(self,prompt,history=[]):
        if history is None :
            history=[]
        history.append({"role":"user","content":prompt})
        response = self.client.chat.completions.create(
            model="glm-4-flash",
            messages=history,
            stream=True
        )
        for chunk in response:
            yield chunk.choices[0].delta.content