from zhipuai import ZhipuAI


class LLM:
    def __init__(self, tools=None):
        self.client = ZhipuAI(api_key="a713a50a79864b8e9f4a9facbf1eac1d.Kz1IInVsEBv4gC45")
        self.tools = tools

    def chat(self, messages, temperature=0.6):
        response = self.client.chat.completions.create(
            model="GLM-4-Flash",
            messages=messages,
            tools=self.tools,
            tool_choice="auto",
            stream=True,
            temperature=temperature
        )
        rsp_text = ''
        tool_calls = None
        for chunk in response:
            if chunk.choices[0].delta.tool_calls is not None and tool_calls is None:
                tool_calls = chunk.choices[0].delta.tool_calls
            msg = chunk.choices[0].delta.content
            rsp_text += msg if msg else ''
            if msg:
                print(msg, end='')
        print()
        return rsp_text, tool_calls


if __name__ == '__main__':
    llm = LLM()
    rsp_text, tool_calls = llm.chat(messages=[{"role":"user","content":"中国的首都在哪里"}])
    print(rsp_text, tool_calls)
