from flask import Flask, request, Response
from embedding_hyd.embedding_implement import Embedding
from faiss_database_hyd.faiss_database import FaissDataBase
from qianfan_big_model.qianfan_big_model import QianfanBigModel
from utils.tools import *
from flask_cors import CORS
from error_hyd.ai_error import AIError
from flask.logging import default_handler
from logging_class.logging_ import Loggers
import json


with open("./config.json", "r", encoding="utf-8") as f:
    config = json.load(f)

# baidu api token
api_token = config["api_token"]

# baidu secret_token
secret_token = config["secret_token"]

# trained model system template file path
st_path = config["system_template_path"]

# ordinary model system template file path
st1_path = config["system_template1_path"]

# question template file path
qt_path = config["question_template_path"]

# vector data file path
vb_path = config["vector_base_path"]

# vector data dimension
vk = config["vector_dimension"]

# knowledge base data path
kb_path = config["knowledge_base_path"]

# knowledge base file type
kb_filetype = config["knowledge_base_file_type"]

# api_token = "NAjHiybRP3UXzENY0E4XDsfo"
# secret_token = "W23YOtWRGLHqSSxEsZ1e6Wvwxin1Fy9S"

with open(st_path, "r", encoding="utf-8") as f:
    system_template = f.read()

with open(st1_path, "r", encoding="utf-8") as f:
    system_template1 = f.read()

with open(qt_path, "r", encoding="utf-8") as f:
    question_template = f.read()


# 实例化embedding编码器
embedding_tool = Embedding(api_token, secret_token)

# 实例化向量数据库
# faiss_database = FaissDataBase(384, "./embedding_data.npy", "./train_data_csv.csv", "csv")
faiss_database = FaissDataBase(vk, vb_path, kb_path, kb_filetype)

# 实例化brnie_bot_turbo大模型
qianfan_big_model = QianfanBigModel(api_token, secret_token)

# 创建一个索引
euclidean_distance = faiss_database.create_index_l2()


app = Flask(__name__)

# del default log configuration
app.logger.removeHandler(default_handler)

# flask log
app_formatter = Loggers.generate_formatter()
app_handlers = Loggers.generate_handler("./log/app/app.log", app_formatter)
Loggers.configurate_loggers(logger_obj=app.logger, handler=app_handlers, is_propagate=False)

# root log
root_formatter = Loggers.generate_formatter()
root_handlers = Loggers.generate_handler(log_store_path="./log/root/root.log", formatter=root_formatter)
root_logger = Loggers.generate_loggers(logger_name="", handler=root_handlers, is_propagate=False)

CORS(app)


# 普通响应聊天接口
@app.route("/chat", methods=["post"])
def chat():

    data = request.json

    if 'history' not in data:
        app.logger.error(AIError("5001", 'Missing parameters "history" ').error_info())

        return Response(json.dumps(AIError("5001", 'Missing parameters "history" ').error_info()),
                        status=400)

    if (len(data['history']) % 2) == 0:
        app.logger.error(
            AIError("5002", '"history" len is even number, no new query or the format is not right').error_info())

        return Response(json.dumps(
            AIError("5002", '"history" len is even number, no new query or the format is not right').error_info()),
            status=400)

    if 'query' not in data:
        app.logger.error(AIError("5003", 'Missing parameters "query" ').error_info())

        return Response(json.dumps(AIError("5003", 'Missing parameters "query" ').error_info()),
                        status=400)

    # 获取查询问题
    query_text_data = [data["query"]]

    # 将文本问题转化为向量问题
    query_embedding_data = embedding_tool.encode_str(query_text_data)

    kb_data = faiss_database.search_vector_text(faiss_obj=euclidean_distance,
                                                top_k=3,
                                                embedding_data=query_embedding_data,
                                                threshold_value=0.5)

    # kb_data_text = kb_data[0][0][0]
    # kb_relate_point = kb_data[0][0][1]

    kb_data_text = []

    relate_kb_point_max = 100
    if kb_data:
        for i in kb_data:
            for j in i:
                kb_data_text.append(j[0])
                if j[1] < relate_kb_point_max:
                    relate_kb_point_max = j[1]
        kb_data_text = "\n\n".join(kb_data_text)

    # if kb_relate_point <= 1:
        model_system_template = system_template
        new_question = question_template.format(kb=kb_data_text, question=data["query"])
        data["history"][-1] = new_question
        model_message = to_message(data["history"])
        model_message = max_len(model_message)

        app.logger.info(
            f"use the trained model, the maximum corpus relevance is: {relate_kb_point_max}")

        return qianfan_big_model.brnie_bot_turbo(model_message,
                                                 system_template=model_system_template)

    else:
        model_system_template = system_template1
        model_message = to_message(data["history"])
        model_message = max_len(model_message)

        app.logger.info(f"use the ordinary model")
        return qianfan_big_model.brnie_bot_turbo(model_message,
                                                 system_template=model_system_template)


# 流式响应聊天接口
@app.route("/chat_stream", methods=["post"])
def chat_stream():
    try:
        data = request.json

        if 'history' not in data:
            app.logger.error(
                AIError("5001", 'Missing parameters "history" ').error_info())

            return Response(
                json.dumps(
                    AIError("5001", 'Missing parameters "history" ').error_info()),
                status=400)

        if (len(data['history']) % 2) == 0:
            app.logger.error(
                AIError("5002", '"history" len is even number, no new query or the format is not right').error_info())

            return Response(
                json.dumps(
                    AIError("5002", '"history" len is even number, no new query or the format is not right').error_info()),
                status=400)

        if 'query' not in data:
            app.logger.error(
                AIError("5003", 'Missing parameters "query" ').error_info())

            return Response(
                json.dumps(
                    AIError("5003", 'Missing parameters "query" ').error_info()),
                status=400)

        # 获取查询问题
        query_text_data = [data["query"]]

        # 将文本问题转化为向量问题
        query_embedding_data = embedding_tool.encode_str(query_text_data)

        kb_data = faiss_database.search_vector_text(faiss_obj=euclidean_distance,
                                                    top_k=4,
                                                    embedding_data=query_embedding_data,
                                                    threshold_value=0.5)

        # kb_data_text = kb_data[0][0][0]
        # kb_relate_point = kb_data[0][0][1]
        # kb_relate_point = 1

        kb_data_text = []

        relate_kb_point_max = 100
        if kb_data:
            for i in kb_data:
                for j in i:
                    kb_data_text.append(j[0])
                    if j[1] < relate_kb_point_max:
                        relate_kb_point_max = j[1]
            kb_data_text = "\n\n".join(kb_data_text)

        # if kb_data_text:
            model_system_template = system_template
            new_question = question_template.format(kb=kb_data_text, question=data["query"])
            data["history"][-1] = new_question
            model_message = to_message(data["history"])
            model_message = max_len(model_message)

            app.logger.info(
                f"use the trained model, the maximum corpus relevance is: {relate_kb_point_max}")

            return Response(
                qianfan_big_model.brnie_bot_turbo_stream(model_message, system_template=model_system_template),
                mimetype="text/event-html")

        else:
            model_system_template = system_template1
            model_message = to_message(data["history"])
            model_message = max_len(model_message)

            app.logger.info(f"use the ordinary model")
            return Response(
                qianfan_big_model.brnie_bot_turbo_stream(model_message, system_template=model_system_template),
                mimetype="text/event-html")

    except Exception as e:
        app.logger.error(AIError("5004", e).error_info())
        return Response(
            json.dumps(AIError("5004", "there are error occurred, please contact the administrator").error_info()),
            status=400)



@app.route("/hello", methods=["get", "post"])
def test_interface():
    return "Successful access to the project!!!"

# if __name__ == "__main__":
#     app.run(port=5000,
#             host="0.0.0.0",
#             debug=True)
