#include "HistoryManager.h"
#include "HistoryBased.h"


#include <filesystem>
#include <fstream>
#include <iostream>
#include <thread>
#include <vector>
#include <string.h>

#include <nlohmann/json.hpp>

#ifdef __cplusplus
extern "C" {
#endif

#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <gio/gio.h>
#include <gio/giotypes.h>
#include <genai/text/chat.h>

#ifdef __cplusplus
}
#endif


using namespace std;

using json = nlohmann::json;

static communicate_data indata = {"", ""};
static string model_response = "";
static string end_flag = "false";
static string record_file_path = ""; // callback得要static的路径

static int chat_flag = 0;

HistoryBased::HistoryBased(string record_path)
{

    static GMainLoop *loop = g_main_loop_new(nullptr, FALSE);
    std::thread eventThread([]()
                            {
        g_main_loop_run(loop);
        g_main_loop_unref(loop); });
    eventThread.detach();

    this->historyManager = new HistoryManager();
    this->record_path = record_path;
    this->loadtimes = 0;
    record_file_path = record_path; // 放静态变量里面

    config = chat_model_config_create();

    chat_model_config_set_name(config, "Qwen-2.5-3b_1.0");
    chat_model_config_set_top_k(config, 0.5);
    chat_model_config_set_deploy_type(config, ModelDeployType::OnDevice);

    session = genai_text_create_session();

    genai_text_set_model_config(session, config);
    genai_text_init_session(session);
    genai_text_set_chat_system_prompt(session, "你是一个聪明的AI助手。");
    genai_text_result_set_callback(session, chat_callback, this); // 注册回调函数

    chatMessage = chat_message_create(); //

    if (loadtimes == 0)
    {
        auto history = historyManager->loadHistory(record_path); // 加载历史记录

        for (const auto &[user, assistant] : history)
        {
            chat_message_add_user_message(chatMessage, user.c_str());
            chat_message_add_system_message(chatMessage, assistant.c_str());
        }

        loadtimes++;
    }
}

HistoryBased::HistoryBased(){} //想到了init一个，不过init之前需要先给路径赋值

HistoryBased::~HistoryBased()
{
    if (historyManager)
    {
        delete historyManager;
    }

    if (config)
    {
        chat_model_config_destroy(&config);
    }

    if (session)
    {
        genai_text_stop_chat(session);
        genai_text_destroy_session(&session);
    }

    if (chatMessage)
    {
        chat_message_destroy(&chatMessage);
    }
}

void HistoryBased::init() {
    static GMainLoop *loop = g_main_loop_new(nullptr, FALSE);
    std::thread eventThread([]()
                            {
        g_main_loop_run(loop);
        g_main_loop_unref(loop); });
    eventThread.detach();

    this->historyManager = new HistoryManager();
    record_file_path = this->record_path; // 放静态变量里面

    config = chat_model_config_create();

    chat_model_config_set_name(config, "Qwen-2.5-3b_1.0");
    chat_model_config_set_top_k(config, 0.5);
    chat_model_config_set_deploy_type(config, ModelDeployType::OnDevice);

    session = genai_text_create_session();

    genai_text_set_model_config(session, config);
    genai_text_init_session(session);
    genai_text_set_chat_system_prompt(session, "你是一个聪明的AI助手，名字叫做小麒麟。");
    genai_text_result_set_callback(session, chat_callback, this); // 注册回调函数

    chatMessage = chat_message_create(); //

 
    auto history = historyManager->loadHistory(this->record_path); // 加载历史记录，初始化里面加载一次即可

    for (const auto &[user, assistant] : history)
    {
        chat_message_add_user_message(chatMessage, user.c_str());
        chat_message_add_system_message(chatMessage, assistant.c_str());
    }

    // std::cout << "load history is ok\n";
   
}

void HistoryBased::set_record_path(string path) {
    this->record_path = path;
}

string HistoryBased::chat_with_model(string input_text)
{
    indata.user_query = input_text;
    indata.model_reply = "";
    end_flag = "false";

    // cout << "user_query: " << indata.user_query << endl;

    {
        std::lock_guard<std::mutex> lock(sync_mutex);
        reply_ready = false;
    }
    if (chat_flag == 0) {
        chat_flag ++;
    } else {
        chat_message_add_system_message(chatMessage, model_response.c_str()); //除了第一次不用加系统历史消息，其他的在每一轮对话前加上上一次的系统历史对话消息
    }
    chat_message_add_user_message(chatMessage, input_text.c_str());
    genai_text_chat_with_history_messages_async(session, chatMessage);

    // 等待回调通知
    std::unique_lock<std::mutex> lock(sync_mutex);
    cv.wait(lock, [this]()
            { return reply_ready; });

    return model_response;
}

void HistoryBased::chat_callback(ChatResult *result, void *user_data)
{
    // cout << "chat_callback" << endl;
    HistoryBased *self = static_cast<HistoryBased *>(user_data);

    const char *message = chat_result_get_assistant_message(result);
    end_flag = chat_result_get_is_end(result) ? "true" : "false";

    if (end_flag == "false")
    {
        indata.model_reply += message;
    }
    else
    {
        indata.model_reply += message;
        model_response = indata.model_reply;
        // chat_message_add_system_message(chatMessage, model_response.c_str());
        
        if (indata.model_reply != "<|stopChat_8167431|>")
        {
            HistoryManager interimHM;
            interimHM.saveTurn(indata.user_query, indata.model_reply, record_file_path);
        }

        indata.user_query = "";
        indata.model_reply = "";

        {
            std::lock_guard<std::mutex> lock(self->sync_mutex);
            self->reply_ready = true;
        }
        self->cv.notify_one();
    }
}
