from fastapi import Body
from fastapi.responses import StreamingResponse
from configs.model_config import LLM_MODELS, TEMPERATURE, HISTORY_LEN, MAX_TOKENS
from server.redis.redis_utils import ChatHistory, RedisClient
from server.utils import BaseResponse
from startup import REDIS
from configs import logging_config
from loguru import logger
from instruction.XiaoKaAPP.XiaoKaAPP import intent_recognition
from instruction.utils import CHINESE_WEEKDAY
from server.utils import create_chat_response_event
import datetime
from server.chat.utils import generate_unique_id, log_content
from server.FAQ.utils import load_data
from server.FAQ.faq_module import faq
from configs.other_config import QA_FILE_PATH, KV_FILE_PATH, TRUCK2ID_FILE_PATH
from configs.model_config import FAQ_THRESHOLD
from sse_starlette.sse import EventSourceResponse

# FAQ模型名字
FAQ_MODEL = "faq-question-answering_chinese-base"
# 加载FAQ的QA_datas
QA_datas = load_data(QA_FILE_PATH)
# 加载FAQ的KV_datas
KV_datas = load_data(KV_FILE_PATH)

# 加载卡车2id的数据
truck2id = load_data(TRUCK2ID_FILE_PATH)


async def xiaokaapp_agent_chat(uid: str = Body(..., description="用户ID"),
                               query: str = Body(..., description="用户输入", examples=["恼羞成怒"]),
                               stream: bool = Body(False, description="流式输出")
                               ):
    # 尝试创建 Redis 客户端
    try:
        # 实例化 Redis 连接类
        redis_client = RedisClient(
            host=REDIS["host"],
            port=REDIS["port"],
            db=REDIS["db"]["XiaoKa"],
            max_connections=REDIS["max_connections"]
        )
    except ConnectionError as e:
        # 捕获连接错误，通常是无法连接到 Redis 服务器
        logger.error(f"ConnectionError: {e}")
        return BaseResponse(code=50002, msg="Failed to connect to Redis server.")
    except Exception as e:
        # 捕获其他所有异常
        logger.error(f"Unexpected error while initializing Redis client: {e}")
        return BaseResponse(code=50000, msg="An unexpected error occurred while connecting to Redis.")
    # 实例化对话历史处理类
    try:
        chat_history = ChatHistory(uid, redis_client, HISTORY_LEN)
    except Exception as e:
        # 捕获实例化 ChatHistory 时可能出现的任何异常
        logger.error(f"Failed to initialize ChatHistory: {e}")
        return BaseResponse(code=50001, msg="Failed to initialize chat history.")
    # 获取uid的历史对话
    try:
        histories = chat_history.get_history()
    except Exception as e:
        logger.error(f"Failed to get chat history: {e}")
        return BaseResponse(code=50003, msg="Failed to retrieve chat history.")
    # 检查是否识别到了意图
    try:
        results = await intent_recognition(query)
    except Exception as e:
        logger.error(f"Failed to recognize intent: {e}")
        return BaseResponse(code=50004, msg="Failed to recognize intent.")
    # 生成唯一ID
    unique_id_ = generate_unique_id()
    # FAQ模型计算
    _, label, probability, answer = faq(query=query, QA_datas=QA_datas, KV_datas=KV_datas, FAQ_MODEL=FAQ_MODEL)
    # 如果FAQ的概率小于FAQ_THRESHOLD，说明FAQ模型没有识别到问题
    if probability > FAQ_THRESHOLD:
        unique_id = "D_" + unique_id_
        if stream:
            responses = EventSourceResponse(
                create_chat_response_event(response_text=answer, instruction="-1", unique_id=unique_id, finish=True,
                                           data_content=""))
        else:
            responses = StreamingResponse(
                create_chat_response_event(response_text=answer, instruction="-1", unique_id=unique_id, finish=True,
                                           data_content=""))
        log_content(uid, query, answer, instruction="-1")
        return responses

        # 检查是否识别到了意图
    data_content = None
    if results is not None:
        unique_id = "I_" + unique_id_
        if isinstance(results, tuple) and results[1] in truck2id.keys():
            data_content = results[1]
            answer = truck2id[data_content]
            results = results[0]
        else:
            if results == '附近':
                answer = '小卡现在还没办法了解您身边的环境和您的位置，后续我会努力变得更好的！'
                results = '-1'
            elif results == 'date':
                date = datetime.datetime.now()
                week_what = CHINESE_WEEKDAY[date.weekday()]
                answer = f"今天是: {date.year}年 {date.month}月{date.day}日 {week_what}"
                results = '-1'
            else:
                answer = None

        if results == '-1':
            # 如果 results 为 '-1'，则执行agent问答的代码
            from server.chat.agent_chat_ import agent_chat
            unique_id = "D_" + unique_id_
            response = await agent_chat(uid=uid,
                                        query=query,
                                        stream=stream,
                                        history=histories,
                                        unique_id=unique_id,
                                        model_name=LLM_MODELS[0],
                                        temperature=TEMPERATURE,
                                        max_tokens=MAX_TOKENS,
                                        prompt_name="default")
            if stream:
                responses = EventSourceResponse(response)
            else:
                responses = StreamingResponse(response)
            return responses
        else:
            if stream:
                responses = EventSourceResponse(
                    create_chat_response_event(response_text=answer, instruction=results, unique_id=unique_id,
                                               finish=True, data_content=data_content))
            else:
                responses = StreamingResponse(
                    create_chat_response_event(response_text=answer, instruction=results, unique_id=unique_id,
                                               finish=True, data_content=data_content))
            log_content(uid, query, answer, instruction=results)
            return responses

    else:
        from server.chat.agent_chat_ import agent_chat
        unique_id = "D_" + unique_id_
        response = await agent_chat(uid=uid,
                                    query=query,
                                    stream=stream,
                                    history=histories,
                                    unique_id=unique_id,
                                    model_name=LLM_MODELS[0],
                                    temperature=TEMPERATURE,
                                    max_tokens=MAX_TOKENS,
                                    prompt_name="default")
        if stream:
            responses = EventSourceResponse(response)
        else:
            responses = StreamingResponse(response)
        return responses
