# -*- coding:utf-8 -*-

# @Time    : 2023/3/9 14:33
# @Author  : zengwenjia
# @Email   : zengwenjia@lingxi.ai
# @File    : chat.py
# @Software: chatbot

# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #

from bot import bot_factory
from common.log import logger
from config import conf
from bot.insurance_sales_gpt.demo.cache_dict import cache_dict


# redis_client = redisClient.RedisClient()


class Chat(object):
    def __init__(self, model_name=None):
        self.scene_dict = {
            'norm': conf().get("model_name", ""),
            'insurance': 'insuranceSaleGPT',
            'credit': 'creditSaleGPT',
            'plannerGPT': 'plannerGPT',
            'insurancePension': 'insurancePensionSaleGPT',
            "mf": 'insuranceSaleGPT_mf',
            "dx": 'insuranceSaleGPT_dx',
            'insurancePensionTest': 'insurancePensionSaleSelf',
            'insurancePensionSteam': 'insurancePensionSaleSteam',
            'financial': 'financialSales',
            'modelTes': 'modelTesSales',
            'axinsur': 'axinsur'
        }

    # 页面初始化时，传入不同场景的参数，init后台
    def init(self, init_scene, user_id):
        logger.info('init_scene:{},user_id:{}'.format(self.scene_dict[init_scene], user_id))
        self.bot = bot_factory.create_bot(self.scene_dict[init_scene])
        # 多人测试临时补丁
        if init_scene in ['insurance', 'insurancePension', 'financial', 'mf', 'axinsur']:
            cache_dict[f'user_id_bot:{user_id}'] = self.bot
        instruction = self.bot.reply('', user_id)
        return instruction

    def get_user_info(self, session_id):
        # 默认bot
        self.bot = bot_factory.create_bot('insuranceSaleGPT')
        res = self.bot.get_user_info(session_id)
        return res

    def get_reply(self, query, user_id):
        if f'user_id_bot:{user_id}' in cache_dict:
            return cache_dict[f'user_id_bot:{user_id}'].reply(query, user_id)
        res = self.bot.reply(query, user_id)
        return res

    def get_reply_by_stream(self, query, user_id):
        if f'user_id_bot:{user_id}' in cache_dict:
            stream_iter = cache_dict[f'user_id_bot:{user_id}'].reply_by_steam(query, user_id)
        else:
            stream_iter = self.bot.reply_by_steam(query, user_id)

        return stream_iter

    def flush_conversation(self, reply_content, user_id):
        if f'user_id_bot:{user_id}' in cache_dict:
            cache_dict[f'user_id_bot:{user_id}'].flush_conversation(reply_content, user_id)
        else:
            self.bot.flush_conversation(reply_content, user_id)



    async def get_async_reply(self, context, session_id):
        res = await self.bot.async_reply(context, session_id)
        return res

    def get_reply_with_prompt(self, prompt):
        logging.info('prompt:{}'.format(prompt))
        res = self.bot.reply_with_prompt(prompt)
        return res


class openaiChat(object):

    def __init__(self):
        self.bot = bot_factory.create_bot('chatGPT')

    def get_reply(self, query, user_id):
        context = dict()
        context['from_user_id'] = user_id
        res = self.bot.reply('', query, context)

        return res


class documentChat(object):

    def __init__(self):
        self.bot = bot_factory.create_bot("indexAssistence")

    def get_reply(self, query, user_id):
        res = self.bot.reply(query)

        return res
