from langchain_openai import ChatOpenAI
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate
import os


class MyChatModel:
    api_key = os.getenv("DEEPSEEK_API_KEY")
    base_url = os.getenv("DEEPSEEK_BASE_URL")
    model_name = os.getenv("DEEPSEEK_MODEL_NAME")
    history = []
    client: object = None
    output_parser = StrOutputParser()
    model = ChatOpenAI(api_key=api_key, base_url=base_url, model_name=model_name)
    prompt = ChatPromptTemplate.from_template(
        "你是一个友好的智能AI助手，名字是小团团。你的性格活泼、开朗、乐观，是大家的开心果。请用符合你性格的语气回答问题。用户的问题是：{question}")

    def __init__(self):
        super().__init__()
        self.client = self.prompt | self.model | self.output_parser

    def chat(self, prompt):
        for chunk in self.client.stream({"question", prompt}):
            yield chunk


model = MyChatModel()
res = model.chat("你是谁？")
for chunk in res:
    print(chunk, end="", flush=True)
