"""
    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    created by lei.chang on '07/07/2023'
    comment: chat gpt 相关
"""
import aiohttp
import asyncio
from sqlalchemy import func, BOOLEAN
from sqlalchemy.dialects.mysql import INTEGER, VARCHAR, DATETIME, TIMESTAMP, TEXT
from laner.pydantic import BaseModel
from pydantic import Field
from openai import AsyncOpenAI, OpenAI
from config import Config
from project.core.base import CustomModel, Column, CustomDict, CustomList
from project.core.exception import BizError
from project.lib.decorator import retry


class Agent(CustomModel):
    """
    """
    __tablename__ = "agent"
    id = Column(INTEGER(unsigned=True), primary_key=True, autoincrement=True)
    dev_id = Column(VARCHAR(30), nullable=False, server_default='dev01', comment='设备id')
    bot_id = Column(VARCHAR(30), nullable=False, server_default='', comment='数字人id')
    user_id = Column(VARCHAR(30), nullable=False, server_default='', comment='用户id')
    model = Column(VARCHAR(30), nullable=False, server_default='moonshot-v1-32k', comment='模型')
    business_name = Column(VARCHAR(30), nullable=False, server_default='', comment='业务名称')
    user_message = Column(VARCHAR(1000), nullable=False, server_default='', comment='用户信息')
    input = Column(VARCHAR(1000), comment='agent输入')
    output = Column(VARCHAR(1000), comment='agent输出')
    create_time = Column(DATETIME, nullable=False, server_default=func.now(), is_displayed=False)
    update_time = Column(TIMESTAMP, nullable=False, server_default=func.now(), onupdate=func.now(), is_displayed=False)


class Chat(CustomModel):
    """ 用户对话表
    """
    __tablename__ = "chat"
    id = Column(INTEGER(unsigned=True), primary_key=True, autoincrement=True)
    dev_id = Column(VARCHAR(30), nullable=False, server_default='dev01', comment='设备id')
    bot_id = Column(VARCHAR(30), nullable=False, server_default='', comment='数字人id')
    user_id = Column(VARCHAR(30), nullable=False, server_default='', comment='用户id')
    user_message = Column(VARCHAR(1000), nullable=False, server_default='', comment='用户信息')
    bot_message = Column(VARCHAR(200), nullable=False, server_default='', comment='数字人信息')
    detail = Column(CustomDict(1000), nullable=False, server_default='', comment='详情')
    is_deleted = Column(BOOLEAN, nullable=False, server_default='0', comment='是否已删除', is_displayed=False)
    create_time = Column(DATETIME, nullable=False, server_default=func.now(), is_displayed=False)
    update_time = Column(TIMESTAMP, nullable=False, server_default=func.now(), onupdate=func.now(), is_displayed=False)


class Advice(CustomModel):
    """ 建议表
    """
    __tablename__ = "advice"
    id = Column(INTEGER(unsigned=True), primary_key=True, autoincrement=True)
    dev_id = Column(VARCHAR(30), nullable=False, server_default='dev01', comment='设备id')
    bot_id = Column(VARCHAR(30), nullable=False, server_default='', comment='数字人id')
    user_id = Column(VARCHAR(30), nullable=False, server_default='', comment='用户id')
    user_message = Column(VARCHAR(1000), nullable=False, server_default='', comment='用户说的话')
    advice_type = Column(VARCHAR(30), nullable=False, server_default='', comment='建议类型: 系统/业务')
    content = Column(VARCHAR(1000), nullable=False, server_default='', comment='用户的建议内容')
    is_deleted = Column(BOOLEAN, nullable=False, server_default='0', comment='是否已删除', is_displayed=False)
    create_time = Column(DATETIME, nullable=False, server_default=func.now(), is_displayed=False)
    update_time = Column(TIMESTAMP, nullable=False, server_default=func.now(), onupdate=func.now(), is_displayed=False)


class KnowledgeVector(CustomModel):
    """ 知识库向量
    """
    __tablename__ = "knowledge_vector"
    id = Column(INTEGER(unsigned=True), primary_key=True, autoincrement=True)
    organization_code = Column(VARCHAR(30), nullable=False, server_default='', comment='机构编号')
    question = Column(VARCHAR(200), nullable=False, server_default='dev01', comment='问题')
    answer = Column(VARCHAR(1000), nullable=False, server_default='', comment='用户说的话')
    embedding = Column(CustomList, nullable=False, server_default='', comment='向量数据')
    create_time = Column(DATETIME, nullable=False, server_default=func.now(), is_displayed=False)
    update_time = Column(TIMESTAMP, nullable=False, server_default=func.now(), onupdate=func.now(), is_displayed=False)


class Knowledge(CustomModel):
    """ 知识库表
    """
    __tablename__ = "knowledge"
    id = Column(INTEGER(unsigned=True), primary_key=True, autoincrement=True)
    knowledge_detail_code = Column(VARCHAR(100), comment='知识库详情编号')
    organization_code = Column(VARCHAR(30), nullable=False, server_default='', comment='机构编号')
    text = Column(TEXT, comment='文本')
    text_embedding = Column(CustomList, nullable=False, server_default='', comment='向量文本')
    create_time = Column(DATETIME, nullable=False, server_default=func.now(), is_displayed=False)
    update_time = Column(TIMESTAMP, nullable=False, server_default=func.now(), onupdate=func.now(), is_displayed=False)


class GptModel(BaseModel):
    """
    """
    model: str = Field('', title='模型名称')
    api_key: str = Field('', title='模型APIkey')
    base_url: str = Field('', title='模型url')

# 聊天模型
chat_models: list[GptModel] = [
    GptModel(model=v['model'], api_key=v['api_key'], base_url=v['base_url']) for v in Config.CHAT_MODELS
]

emb_models: list[GptModel] = [
    GptModel(model=v['model'], api_key=v['api_key'], base_url=v['base_url']) for v in Config.EMB_MODELS
]

class ChatGpt:
    """
    """
    def __init__(self, chat_model_name='moonshot-v1-32k', emb_model_name='text-embedding-ada-002'):
        """
        """
        self.chat_model_name = chat_model_name
        self.emb_model_name = emb_model_name

    def chat_model(self) -> GptModel:
        """ 活动模型
        :return:
        """
        model = None
        for v in chat_models:
            if v.model != self.chat_model_name:
                continue

            model = v

        if not model:
            raise BizError('无指定chat模型信息')

        return model

    def emb_model(self) -> GptModel:
        """
        :return:
        """
        model = None
        for v in emb_models:
            if v.model != self.emb_model_name:
                continue

            model = v

        if not model:
            raise BizError('无指定embedding模型信息')

        return model

    @retry()
    async def llm_async(self, system_text='', user_text='', stop=None, temperature=None, max_tokens=None):
        """
        :param system_text:
        :param user_text:
        :param stop:
        :param temperature:
        :return:
        """
        chat_model = self.chat_model()
        if chat_model.api_key:
            client_async = AsyncOpenAI(api_key=chat_model.api_key, base_url=chat_model.base_url)
            params = dict()
            params['model'] = self.chat_model_name  # 指定模型
            messages = []
            if system_text:
                messages.append({'role': 'system', 'content': system_text})
            if user_text:
                messages.append({'role': 'user', 'content': user_text})
            params['messages'] = messages
            if stop:
                params['stop'] = stop
            if temperature is not None:
                params['temperature'] = temperature

            ret = await client_async.chat.completions.create(**params)

            return ret.choices[0].message.content
        else:
            headers = {
                'Content-Type': 'application/json'
            }

            params = dict()
            params['model'] = chat_model.model
            messages = []
            if system_text:
                messages.append({'role': 'system', 'content': system_text})
            if user_text:
                messages.append({'role': 'user', 'content': user_text})
            params['messages'] = messages
            if max_tokens is not None:
                params['max_tokens'] = max_tokens
            if temperature is not None:
                params['temperature'] = temperature

            async with aiohttp.ClientSession() as session:
                async with session.post(chat_model.base_url, headers=headers, json=params) as resp:
                    if resp.status != 200:
                        raise Exception(f'研究院模型推理失败 status: {resp.status} url: {chat_model.base_url}')

                    result = await resp.json()

            return result['choices'][0]['message']['content']

    async def llm_async_streaming(self, system_text='', user_text='', stop=None, temperature=None, max_tokens=None):
        """ 流式返回
        :param system_text:
        :param user_text:
        :param stop:
        :param temperature:
        :param max_tokens:
        :return:
        """
        chat_model = self.chat_model()
        client_async = AsyncOpenAI(api_key=chat_model.api_key, base_url=chat_model.base_url)
        params = dict()
        params['model'] = self.chat_model_name  # 指定模型
        messages = []
        if system_text:
            messages.append({'role': 'system', 'content': system_text})
        if user_text:
            messages.append({'role': 'user', 'content': user_text})
        params['messages'] = messages
        if stop:
            params['stop'] = stop
        if temperature is not None:
            params['temperature'] = temperature
        params['stream'] = True

        async for chunk in await client_async.chat.completions.create(**params):
            content = chunk.choices[0].delta.content
            if content:
                yield content

    @retry()
    async def get_embedding(self, text):
        """
        :param text:
        :return:
        """
        emb_model = self.emb_model()
        if emb_model.api_key:
            client = OpenAI(api_key=emb_model.api_key, base_url=emb_model.base_url)
            return client.embeddings.create(input=[text], model=emb_model.model).data[0].embedding
        else:
            headers = {
                'Content-Type': 'application/json'
            }

            params = {
                'model': emb_model.model,
                'input': text
            }

            async with aiohttp.ClientSession() as session:
                async with session.post(emb_model.base_url, headers=headers, json=params) as resp:
                    if resp.status != 200:
                        raise Exception(f'研究院模型embedding失败 status: {resp.status} url: {emb_model.base_url}')

                    result = await resp.json()

            return result['data'][0]['embedding']


async def main():
    """
    :return:
    """
    chat_gpt = ChatGpt(chat_model_name='Qwen2.5-14B-Instruct-GPTQ-Int8')
    stream = await chat_gpt.llm_async_streaming(user_text='给我简单介绍一下唐朝')

    for chunk in stream:
        print(chunk)



if __name__ == '__main__':

    result = asyncio.run(main())



