# encoding: utf-8
"""

    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    created by lei.chang on '11/04/2025'
    comment: 大语言模型(llm)
"""
import asyncio
from openai import AsyncOpenAI

from project.lib.common import deepseek_model, Model


class LlmModel:

    def __init__(self, model: Model):
        """
        :param model:
        """
        self.model = model
        self.async_client = AsyncOpenAI(api_key=self.model.api_key, base_url=self.model.base_url)

    async def async_chat(self, prompt,
                         stop: str | list[str] = None,
                         temperature: float | None = None):
        """ 模型对话
        :param prompt:
        :param stop:
        :param temperature:
        :return:
        """
        params = dict()
        params['model'] = self.model.model_name  # 指定模型
        params['messages'] = [{'role': 'user', 'content': prompt}]
        if stop is not None:
            params['stop'] = stop
        if temperature is not None:
            params['temperature'] = temperature

        ret = await self.async_client.chat.completions.create(**params)

        return ret.choices[0].message.content

    async def async_chat_streaming(self, prompt,
                                   stop: str | list[str] = None,
                                   temperature: float | None = None):
        """ 模型对话(流式)
        :param prompt:
        :param stop:
        :param temperature:
        :return:
        """
        try:
            params = dict()
            params['model'] = self.model.model_name  # 指定模型
            messages = [{'role': 'user', 'content': prompt}]
            params['messages'] = messages
            if stop:
                params['stop'] = stop
            if temperature is not None:
                params['temperature'] = temperature
            params['stream'] = True

            async for chunk in await self.async_client.chat.completions.create(**params):
                content = chunk.choices[0].delta.content
                if content is not None:
                    yield content
        finally:
            await self.async_client.close()


async def main():
    """
    :return:
    """
    llm_model = LlmModel(deepseek_model)
    async for chunk in llm_model.async_chat_streaming(prompt='给我介绍一下唐朝'):
        print(chunk)

if __name__ == "__main__":
    """
    """
    asyncio.run(main())
