from fastapi import Body, Request
from fastapi.responses import StreamingResponse
from configs.model_config import LLM_MODELS, TEMPERATURE, HISTORY_LEN, MAX_TOKENS
from configs.kb_config import OLYMPIC_SCORE_THRESHOLD, OLYMPIC_VECTOR_SEARCH_TOP_K, GO_SERVICE_SECRET_KEY
from configs.other_config import INSTRUCTIONS, DEFAULT_CHARACTER, DEFAULT_NICKNAME, \
    ALGORITHM
from server.utils import BaseResponse
from configs.logging_config import configure_logging
from loguru import logger
from server.utils import create_chat_response_event
from server.chat.utils import generate_request_id, log_content
from sse_starlette.sse import EventSourceResponse
from server.chat.utils import llm_chat
from server.db.repository.message_repository import get_message_by_user_id_with_pando_chat
from server.utils import decode_verify_token
from typing import Optional

# from server.chat.knowledge_base_chat_ import knowledge_base_chat
from server.chat.kb_chat_olympic import knowledge_base_chat
from server.chat.chat_fanyi import fanyi_chat
knowledge_base_name = "Olympic"
prompt_name = "knowledge_Olympic"

configure_logging()


async def mars_olympic_kb_chat_v2(uid: str = Body(..., description="用户ID"),
                                  lang: str = Body("zh", description="语种"),
                                query: str = Body(..., description="用户输入", examples=["恼羞成怒"]),
                                stream: bool = Body(False, description="流式输出"),
                                nickname: Optional[str] = Body(DEFAULT_NICKNAME, description="用户昵称"),
                                character: Optional[str] = Body(DEFAULT_CHARACTER, description="性格"),
                                request: Request = None
                                ):
    # 解码token
    try:
        token = decode_verify_token(request, GO_SERVICE_SECRET_KEY, ALGORITHM)
    except Exception as e:
        return BaseResponse(code=401, msg="Token is invalid", data={"error": str(e.detail.get('msg'))})
    # 要求query 非空
    query = query.strip()
    if not query:
        return BaseResponse(code=400, msg="Query cannot be empty.")

    if lang != "zh":
        response = await fanyi_chat(query=query, prompt_name="fanyi_chat")
        search_query = response.dict()["data"]["answer"]
        logger.debug(f"由于不是中文，所以翻译后的search_query: {search_query}")
    else: 
        search_query = query
    # 生成唯一ID
    request_id = generate_request_id()

    # 检查是否识别到了意图
    data_content = None
    try:
        if lang == "zh":
            instruction = await llm_chat(query, "Intention_Recognition_Expert", 0.1)
        else:
            instruction = "-1"
    except Exception as e:
        logger.error(f"Failed to recognize the instruction: {e}")
        instruction = "-1"
    city = ""
    district = ""
    if instruction in INSTRUCTIONS:
        answer = None
        request_id = "I_" + request_id
        if instruction == "5":
            try:
                info = await llm_chat(query, "Extract", 0.1)
                if info != "缺少信息":
                    info = info.split(",")
                    if len(info) == 1:
                        city = info[0]
                        district = ""
                    if len(info) == 2:
                        city = info[0]
                        district = info[1]
                if district == "None":
                    district = ""
            except Exception as e:
                logger.error(f"Failed to extract the information: {e}")
                request_id = "D_" + request_id
                # 获取uid的历史对话
                try:
                    histories = get_message_by_user_id_with_pando_chat(user_id=uid, history_len=HISTORY_LEN)
                except Exception as e:
                    logger.error(f"Failed to get chat history: {e}")
                    return BaseResponse(code=50003, msg="Failed to retrieve chat history.")
                response = await knowledge_base_chat(uid=uid,
                                                     query=query,
                                                     search_query=search_query,
                                                     stream=stream,
                                                     history=histories,
                                                     knowledge_base_name=knowledge_base_name,
                                                     request_id=request_id,
                                                     score_threshold=OLYMPIC_SCORE_THRESHOLD,
                                                     top_k=OLYMPIC_VECTOR_SEARCH_TOP_K,
                                                     model_name=LLM_MODELS[0],
                                                     temperature=TEMPERATURE,
                                                     max_tokens=MAX_TOKENS,
                                                     request=request,
                                                     prompt_name=prompt_name,
                                                     nickname=nickname,
                                                     character=character)
                if stream:
                    responses = EventSourceResponse(response)
                else:
                    responses = StreamingResponse(response)
                return responses

        if stream:
            responses = EventSourceResponse(
                create_chat_response_event(response_text=answer, instruction=instruction, request_id=request_id,
                                           finish=True, data_content=data_content, city=city, district=district))
        else:
            responses = StreamingResponse(
                create_chat_response_event(response_text=answer, instruction=instruction, request_id=request_id,
                                           finish=True, data_content=data_content, city=city, district=district))
        log_content(uid, query, answer, instruction=instruction, request_id=request_id, city=city, district=district, chat_type="mars_olympic_kb_chat")
        return responses

    else:
        request_id = "D_" + request_id
        # 获取uid的历史对话
        try:
            histories = get_message_by_user_id_with_pando_chat(user_id=uid, history_len=HISTORY_LEN)
        except Exception as e:
            logger.error(f"Failed to get chat history: {e}")
            return BaseResponse(code=50003, msg="Failed to retrieve chat history.")
        response = await knowledge_base_chat(uid=uid,
                                             query=query,
                                             search_query=search_query,
                                             stream=stream,
                                             history=histories,
                                             knowledge_base_name=knowledge_base_name,
                                             request_id=request_id,
                                             score_threshold=OLYMPIC_SCORE_THRESHOLD,
                                             top_k=OLYMPIC_VECTOR_SEARCH_TOP_K,
                                             model_name=LLM_MODELS[0],
                                             temperature=TEMPERATURE,
                                             max_tokens=MAX_TOKENS,
                                             request=request,
                                             prompt_name=prompt_name,
                                             nickname=nickname,
                                             character=character)
        if stream:
            responses = EventSourceResponse(response)
        else:
            responses = StreamingResponse(response)
        return responses
