# -*- coding: utf-8 -*-
import json
import time
import uuid
import traceback
from collections import Counter
from typing import List, Tuple, Dict, Optional

from loguru import logger
from langchain_core.documents import Document
from pydantic import BaseModel
from fastapi import Request
from sse_starlette import EventSourceResponse

from configs import KB_CHAT_TEMPLATE_BASE_FILENAMES, KB_CHAT_TEMPLATE, ES_KB_INDEX_NAME, ES_KB_EMERGENCY_NAME
from schemas.response_entity import JsonModel
from db.repository.knowledge import db_get_files_in_kb
from db.repository.conversation import db_add_conversation, db_get_conversations, db_save_dialog, db_search_dialog, \
    db_update_conversion_answer
from utils import hybird_tools
from utils.rewrite_query import rewrite_query
from service.es_service import ELASTICSEARCH, ELASTICSEARCH_EMERGENCY, ik_match_search, similarity_search_with_score, \
    kb_vector_store_emergency, kb_vector_store
from service.llm_service.deepseek_chat import deepseek_model

from .utils import response_source_text, replace_attach_id, prepare_prompt, invoke_glm


async def chat_iter(query: str,
                    references: List[Tuple[Document, float]],
                    histories: List[Dict],
                    origin_query: str,
                    token: str
                    ):
    """
    处理参考列表并返回
    构建prompt
    模型回复
    """
    try:
        references_json_returned = []
        for reference, score in references:
            if len(reference.page_content) < 15:
                continue
            # 将占位符替换为可渲染内容
            processed_content = replace_attach_id(reference, token)

            references_json_returned.append({
                "file_id": reference.metadata.get("file_id"),
                "file_name": reference.metadata.get("file_name"),
                "score": str(score),
                "content": processed_content
            })

        if len(references_json_returned) == 0:
            res = "无相关内容"
            yield res
        else:
            res = ""
            async for i in invoke_glm(query, references_json_returned):
                if i:
                    res += i
                    yield i

        final_response = json.dumps({"response": res}, ensure_ascii=False)
        yield final_response
        logger.info(f"question: {origin_query}, answer: {res}")

    except Exception as e:
        import traceback
        traceback.print_exc()
        error_response = json.dumps({
            "event": "error",
            "data": json.dumps({
                "code": 500,
                "data": None,
                "msg": "回答生成失败"
            }, ensure_ascii=False)
        }, ensure_ascii=False)
        yield error_response


class ChatEntity(BaseModel):
    query: str
    token: str
    file_id: Optional[str] = None


async def single_file_chat(request: Request, payload: ChatEntity):
    try:
        user = request.state.user  # 直接获取
        headers = None

        if payload.file_id is None:
            return JsonModel(code=404, data=None, msg=f"请选择文件").to_response(headers=headers)

        file_ids = [payload.file_id]

        # Vector search
        # logger.debug(str(time.time()))
        new_query = rewrite_query(payload.query)  # 实体名称替换
        reference_list_1: List[Document] = similarity_search_with_score(kb_vector_store, query=new_query,
                                                                        query_type="kb", k=20, file_ids=file_ids)
        logger.debug(f"vs_reference_list length: {len(reference_list_1)}")
        # IK match search
        # logger.debug(str(time.time()))
        ik_reference_list_1: List[Document] = ik_match_search(ELASTICSEARCH, index_name=ES_KB_INDEX_NAME,
                                                              target=new_query, file_ids=file_ids, top_k=20)
        logger.debug(f"ik_reference_list length: {len(ik_reference_list_1)}")

        # Vector search
        # logger.debug(str(time.time()))
        reference_list_2: List[Document] = similarity_search_with_score(kb_vector_store_emergency, query=new_query,
                                                                        query_type="kb", k=20, file_ids=file_ids)
        logger.debug(f"vs_reference_list length: {len(reference_list_2)}")
        # IK match search
        # logger.debug(str(time.time()))
        ik_reference_list_2: List[Document] = ik_match_search(ELASTICSEARCH_EMERGENCY, index_name=ES_KB_EMERGENCY_NAME,
                                                              target=new_query, file_ids=file_ids, top_k=20)
        logger.debug(f"ik_reference_list length: {len(ik_reference_list_2)}")

        ik_reference_list = ik_reference_list_1 + ik_reference_list_2
        reference_list = reference_list_1 + reference_list_2

        ik_reference_list = ik_reference_list_1 + ik_reference_list_2
        reference_list = reference_list_1 + reference_list_2

        # Hybrid search and rerank
        start = time.time()
        rerank_results: List[Tuple[Document, float]] = hybird_tools.model_rerank(
            new_query, ik_reference_list, reference_list, final_top_k=20, reranker_threshold=0.6
        )
        logger.debug(f"rerank_results: {rerank_results}")
        logger.info(f"rerank take time: {time.time() - start}s")

        reference_rerank_list = []
        seen_texts = set()
        for doc, score in rerank_results:
            if doc.page_content not in seen_texts:
                reference_rerank_list.append((doc, score))
        reference_rerank_list.sort(key=lambda x: x[1], reverse=True)  # True倒序，False正叙

        # Get conversation history
        histories = []

    except Exception as e:
        logger.error(str(traceback.format_exc()))
        return JsonModel(code=500, data=None, msg=f"回答生成失败").to_response()  # 将异常转换为 HTTP 异常

    return EventSourceResponse(
        chat_iter(query=new_query, histories=histories, references=reference_rerank_list, origin_query=payload.query, token=payload.token)
    )
