# -*- coding: utf-8 -*-
import hashlib
import json
import uuid
import time
import traceback
import asyncio
from typing import List, Tuple, Dict, Optional, AsyncGenerator

from loguru import logger
from langchain_core.documents import Document
from pydantic import BaseModel
from fastapi import Request
from sse_starlette import EventSourceResponse

from configs import ES_KB_INDEX_NAME, ES_KB_EMERGENCY_NAME
from schemas.response_entity import JsonModel
from db.repository.knowledge import db_get_files_in_kb
from db.repository.conversation import db_add_conversation, db_get_conversations, db_save_dialog, db_search_dialog, \
    db_update_conversion_answer
from utils import hybird_tools
from utils.rewrite_query import rewrite_query
from service.es_service import ELASTICSEARCH, kb_vector_store, \
    similarity_search_with_score, ik_match_search

from .utils import replace_attach_id, invoke_glm


def _document_identity(doc: Document) -> str:
    metadata = doc.metadata or {}
    for key in ("chunk_id", "doc_id", "document_id", "id", "source_id", "source", "uuid", "unique_id"):
        value = metadata.get(key)
        if value:
            return str(value)
    file_id = metadata.get("file_id")
    content = doc.page_content or ""
    content_hash = hashlib.md5(content.encode("utf-8")).hexdigest()
    if file_id:
        return f"{file_id}:{content_hash}"
    return content_hash


async def chat_iter(query: str,
                    dialog_id: str,
                    references: List[Tuple[Document, float]],
                    histories: List[Dict],
                    origin_query: str,
                    token: str
                    ):
    try:
        references_json = []
        for reference, score in references:
            if len(reference.page_content) < 15:
                continue
            processed_content = replace_attach_id(reference, token)
            references_json.append({
                "file_id": reference.metadata.get("file_id"),
                "file_name": reference.metadata.get("file_name"),
                "score": str(score),
                "content": processed_content
            })

        conversation = db_add_conversation(origin_query, dialog_id=dialog_id, references=references_json)
        yield json.dumps(
            {"reference": references_json, "dialog_id": str(dialog_id), "conversion": str(conversation.id)},
            ensure_ascii=False)
        
        res = ''
        async for i in invoke_glm(query, references_json):
            if i:
                res += i
                yield i

        db_update_conversion_answer(conversation_id=conversation.id, answer=res)
        final_response = json.dumps({"response": res}, ensure_ascii=False)
        yield final_response
        logger.info(f"dialog_id: {str(dialog_id)}, question: {origin_query}, answer: {res}")

    except Exception as e:
        import traceback
        traceback.print_exc()
        yield {
            "event": "error",
            "data": json.dumps({
                "code": 500,
                "data": None,
                "msg": "回答生成失败"
            }, ensure_ascii=False)
        }


class ChatEntity(BaseModel):
    query: str
    dialog_id: Optional[uuid.UUID | str] = None
    token: str
    kb_ids: List[uuid.UUID] = None


async def knowledge_chat(request: Request, payload: ChatEntity):
    try:
        user = request.state.user  # 直接获取
        headers = None

        if payload.kb_ids is None:
            return JsonModel(code=404, data=None, msg=f"请选择知识库").to_response(headers=headers)

        user_id = user.id
        if payload.dialog_id:
            logger.debug(f"user_id: {user_id}")
            dialog = db_search_dialog(dialog_id=payload.dialog_id, user_id=user_id, dialog_type=1)
            if dialog is None:
                if user.check_admin_role():
                    return JsonModel(code=401, data=None, msg=f"无权限在其他账户的对话中继续提问").to_response(headers=headers)
                else:
                    return JsonModel(code=404, data=None, msg=f"对话不存在,请更换对话").to_response(headers=headers)
        else:
            dialog = db_save_dialog(payload.query, user.id, dialog_type=1)

        file_ids = []
        for kb_id in payload.kb_ids:
            items, _ = db_get_files_in_kb(kb_id, all=True)  # 默认kb_type=1，返回(items, total)
            file_ids.extend(str(f.id) for f in items)

        # Vector search
        # logger.debug(str(time.time()))
        new_query = rewrite_query(payload.query)  # 实体名称替换
        reference_list_1: List[Document] = similarity_search_with_score(kb_vector_store, query=new_query, query_type="kb", k=20, file_ids=file_ids)
        logger.debug(f"vs_reference_list length: {len(reference_list_1)}")
        # IK match search
        # logger.debug(str(time.time()))
        ik_reference_list_1: List[Document] = ik_match_search(ELASTICSEARCH, index_name=ES_KB_INDEX_NAME, target=new_query, file_ids=file_ids, top_k=20)
        logger.debug(f"ik_reference_list length: {len(ik_reference_list_1)}")

        ik_reference_list = ik_reference_list_1
        reference_list = reference_list_1
        # if ik_reference_list_1 and reference_list_1:
        #     ik_pairs = [(_document_identity(doc), doc) for doc in ik_reference_list_1]
        #     reference_pairs = [(_document_identity(doc), doc) for doc in reference_list_1]
        #     common_keys = {key for key, _ in ik_pairs} & {key for key, _ in reference_pairs}
        #     logger.debug(f"intersection size: {len(common_keys)}")
        #     ik_reference_list = [doc for key, doc in ik_pairs if key in common_keys]
        #     reference_list = [doc for key, doc in reference_pairs if key in common_keys]
        # else:
        #     logger.debug("No intersection between vector and IK search results")
        #     ik_reference_list = []
        #     reference_list = []

        # Hybrid search and rerank
        start = time.time()
        rerank_results: List[Tuple[Document, float]] = hybird_tools.model_rerank(
            new_query, ik_reference_list, reference_list, final_top_k=20, reranker_threshold=0.6
        )
        # rerank_results = [(i, 0.0) for i in ik_reference_list + reference_list]
        logger.debug(f"rerank_results: {rerank_results}")
        logger.info(f"rerank take time: {time.time() - start}s")

        start = time.time()
        reference_rerank_list = []
        seen_texts = set()
        for doc, score in rerank_results:
            if doc.page_content in seen_texts:
                continue
            reference_rerank_list.append((doc, score))
            seen_texts.add(doc.page_content)
        reference_rerank_list.sort(key=lambda x: x[1], reverse=True)  # True倒序，False正叙
        logger.info(f"sort take time: {time.time() - start}s")

        # Get conversation history
        conversions = list(db_get_conversations(dialog.id, user.id))
        conversions = conversions[::-1]
        histories = [message for conversion in conversions for message in conversion.to_messages()]

    except Exception as e:
        logger.error(str(traceback.format_exc()))
        return JsonModel(code=500, data=None, msg=f"回答生成失败").to_response()  # 将异常转换为 HTTP 异常

    return EventSourceResponse(
        chat_iter(query=new_query, dialog_id=dialog.id, histories=histories, references=reference_rerank_list,
                  origin_query=payload.query, token=payload.token)
    )