"""
11111
科技成果检索算法接口
"""
import base64
import os
import time
from datetime import datetime

from fastapi import FastAPI
from loguru import logger
from pydantic import BaseModel

from fastapi import FastAPI, status
from fastapi.encoders import jsonable_encoder
from fastapi.exceptions import RequestValidationError
from fastapi.responses import JSONResponse
from requests import Request
from for_search_api_engine import ChromaDataloadEngineSearch

PROJECT_PATH = os.path.dirname(os.path.abspath(__file__))  # 项目目录
BASE_LOG_PATH = os.path.join(PROJECT_PATH, "logs")  # 日志根目录
INFO_LOG_PATH = os.path.join(BASE_LOG_PATH, "INFO")  # INFO日志目录
ERROR_LOG_PATH = os.path.join(BASE_LOG_PATH, "ERROR")  # ERROR日志目录

logger.add(
    f"{INFO_LOG_PATH}/_{time.strftime('%Y-%m-%d')}.log",
    level="INFO",
    filter=lambda record: record["level"].name == "INFO",
    rotation="00:00",
    encoding="utf8",
    compression=None,  # 压缩归档旧日志文件
    retention="180 day",  # 保留最近180天的日志文件
    format="{time:YYYY-MM-DD HH:mm:ss.SSS} | {level} | (file:{file.name}:{line}) {message}",
    backtrace=True,  # 记录完整的异常堆栈跟踪信息
    diagnose=True  # 在日志中显示更多的诊断信息
)

logger.add(
    f"{ERROR_LOG_PATH}/_{time.strftime('%Y-%m-%d')}.log",
    level="ERROR",
    filter=lambda record: record["level"].name == "ERROR",
    rotation="00:00",
    encoding="utf8",
    compression=None,  # 压缩归档旧日志文件
    format="{time:YYYY-MM-DD HH:mm:ss.SSS} | {level} | (file:{file.name}:{line}) {message}",
    retention="180 day",  # 保留最近180天的日志文件
    backtrace=True,  # 记录完整的异常堆栈跟踪信息
    diagnose=True  # 在日志中显示更多的诊断信息
)


def log_write(res, start_time, end_time, serve_name="", error_b=False):
    if error_b:
        logger.error(f'--get {serve_name} the request--')
        logger.error(f"接收请求时间:{start_time}")
        logger.error(f"业务结果:{res}")
        logger.error(f"服务请求失败,失败时间:{end_time}")
        logger.error(f"--{serve_name} Finish--\n")
        return
    else:
        logger.info(f'--get {serve_name} the request--')
        logger.info(f"接收请求时间:{start_time}")
        logger.info(f"业务结果:{res}")
        logger.info(f"当次请求截至时间:{end_time}")
        logger.info(f"--{serve_name} Finish--\n")
        return


app_ppt = FastAPI()
search_engine = ChromaDataloadEngineSearch()


class ResearchRequest(BaseModel):
    ppt_file: str
    ppt_name: str


# 数据不符合定义的 Pydantic 模型时，FastAPI 会抛出这个异常。
@app_ppt.exception_handler(RequestValidationError)
async def validation_exception_handler(request: Request, exc: RequestValidationError):
    return JSONResponse(status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
                        content=jsonable_encoder({"result": 1,
                                                  "msg": str(exc.errors()),
                                                  "result_data": [],
                                                  }))


@app_ppt.post(f"/api/ppt_search")
async def process_file(request: ResearchRequest):
    start_time = datetime.now().strftime("%Y%m%d_%H%M%S")
    try:
        pptx_base64 = request.ppt_file
        ppt_name = request.ppt_name

        pptx_data = base64.b64decode(pptx_base64.encode("utf-8"))

        tmp_save_path = os.path.join("received_files", ppt_name)
        os.makedirs("received_files", exist_ok=True)
        with open(tmp_save_path, "wb") as f:
            f.write(pptx_data)

        bool_has_table, table_str_list = search_engine.ppt_loader(tmp_save_path)
        if not bool_has_table:
            raise Exception("输入pptx文件中无待检索项")

        final_res = []

        max_same_table_sorce = 0
        best_top5_res = None
        for table_index, search_query in enumerate(table_str_list):
            top5_res = search_engine.history_similarity_search(search_query)
            total_score = sum(item[1] for item in top5_res)
            if total_score > max_same_table_sorce:
                best_top5_res = top5_res

        for res in best_top5_res:
            ppt_name =res[0].split(">>:")[0][2:]  #
            text=res[0].split(">>:")[1]
            score=res[1]
            final_res.append({
                "ppt_file_name":ppt_name,
                "text":text,
                "score":score
            })

        res = {
            "result": 0,
            "msg": "success",
            "result_data": final_res
        }
        log_write(res, start_time=start_time, end_time=datetime.now(), serve_name="ppt_search",
                  error_b=False)

        os.remove(tmp_save_path)
        return res

    except Exception as error:
        res = {
            "result": 1,
            "msg": str(error),
            "result_data": [],
        }
        log_write(res, start_time=start_time, end_time=datetime.now(), serve_name="ppt_search",
                  error_b=True)
        return res


if __name__ == "__main__":
    import uvicorn

    uvicorn.run(app_ppt, host="0.0.0.0", port=8713)
