#include "HistoryManager.h"
#include "HistoryBased.h"

#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <gio/gio.h>
#include <gio/giotypes.h>
#include <filesystem>
#include <fstream>
#include <iostream>
#include <thread>
#include <vector>
#include <string.h>

#include <genai/text/chat.h>

#include <nlohmann/json.hpp>

using namespace std;

using json = nlohmann::json;


static mutex mtx;

struct HistoryBased::Impl
{
    GMainLoop *loop = nullptr;
    GenAiTextSession *session = nullptr;
    ChatModelConfig *config = nullptr;
    ChatMessage *chatMessage = nullptr; // 聊天总消息

    HistoryManager *historyManager = nullptr; // 历史记录管理器

    static std::string record_path; // 历史记录文件路径

    static int loadtimes;

    static std::string userQuery; // 用户新的提问，用来装入历史记录的
                                  // 没有模型回答是因为可以用output_text来代替

    static std::string finish_reason;
    static std::string output_buffer;
    static std::string output_text;
    static std::string stop_chat_flag;

    static void callback(ChatResult *result, void *user_data)
    {

        HistoryManager interimHM; //临时的用一用，这里没法子了

        auto getBool = [](bool value)
        { return value ? "true" : "false"; };

        const char *message = chat_result_get_assistant_message(result);
        const char *reason = chat_result_get_finish_reason_message(result);
        std::string is_end = getBool(chat_result_get_is_end(result));


        if (is_end == "false")
        {
            output_buffer += message;
        }
        else if (is_end == "true" && stop_chat_flag != "end")
        {
            output_buffer += message;
            output_text = output_buffer;
            std::cout << "模型回答: " << output_text << std::endl;

            // 使用 chatMessage 进行聊天后，保存结果
            interimHM.saveTurn(userQuery, output_text, record_path);
            userQuery = ""; // 记录完成后清空userQuery，准备接受下一个用户的提问

            output_buffer = "";
            output_text = "";

            finish_reason = reason;
        }
        else
        {
        }

        // std::cout << "模型回答: " << (message ? message : "(null)") << std::endl;
        // std::cout << "结束原因: " << (reason ? reason : "(null)") << std::endl;
        // std::cout << "是否完成: " << (is_end == "true" ? "yes" : "no") << std::endl;
    }

    Impl()
    {

        historyManager = new HistoryManager();

        loop = g_main_loop_new(nullptr, FALSE);

        std::thread eventThread([this]()
                                {
            g_main_loop_run(loop);
            g_main_loop_unref(loop); });
        eventThread.detach(); // 把当前线程放到后台成为守护线程，在主线程结束时，守护线程不会跟随主线程一起结束。

        // 1. 创建并设置模型配置
        config = chat_model_config_create();
        if (!config)
        {
            fprintf(stderr, "Failed to create model config\n");
            genai_text_destroy_session(&session);
        }

        chat_model_config_set_name(config, "Qwen-2.5-3b_1.0");
        chat_model_config_set_top_k(config, 0.5);

        // 使用云端模型
        // chat_model_config_set_deploy_type(config, ModelDeployType::PublicCloud);

        // 使用端侧模型
        chat_model_config_set_deploy_type(config, ModelDeployType::OnDevice);

        // 2. 创建会话
        session = genai_text_create_session();
        if (!session)
        {
            fprintf(stderr, "Failed to create session\n");
        }

        // 在会话建立后设置模型配置
        genai_text_set_model_config(session, config);

        // 3. 初始化会话
        if (genai_text_init_session(session) != 0)
        {
            fprintf(stderr, "Failed to initialize session\n");
            genai_text_destroy_session(&session);
        }

        // 4. 设置系统提示词（可以用 set_c(message ? message : "(null)") hat_system_prompt_id 选择内置提示词）
        genai_text_set_chat_system_prompt(session, "你是一个聪明的AI助手。");

        // 5. 设置回调函数
        genai_text_result_set_callback(session, callback, NULL); // 注册回调函数

        // 6. 设置聊天消息
        chatMessage = chat_message_create();

        
    }

    ~Impl()
    {
        if (historyManager)
        {
            delete historyManager;
        }

        if (session)
        {
            genai_text_stop_chat(session);
            genai_text_destroy_session(&session);
        }
        if (loop)static HistoryManager *historyManager; // 历史记录管理器

        if (config)
        {
            chat_model_config_destroy(&config);
        }

        if (chatMessage)
        {
            chat_message_destroy(&chatMessage);
        }
    }

    void send(const std::string &msg)
    {
        if (loadtimes == 0){
            getHistory();
        }
        userQuery = msg; // 做做记录，用来装入历史记录的

        chat_message_add_user_message(chatMessage, msg.c_str());
        genai_text_chat_with_history_messages_async(session, chatMessage);

    }

    void getHistory(){
        auto history = historyManager->loadHistory(record_path); // 加载历史记录

        for (const auto &[user, assistant] : history)
        {
            chat_message_add_user_message(chatMessage, user.c_str());
            chat_message_add_system_message(chatMessage, assistant.c_str());
        }

        loadtimes++;
    }
};

// 在类外定义并初始化静态成员

int HistoryBased::Impl::loadtimes = 0;
std::string HistoryBased::Impl::record_path = "";
std::string HistoryBased::Impl::userQuery = "";

std::string HistoryBased::Impl::output_buffer = "";
std::string HistoryBased::Impl::output_text = "";
std::string HistoryBased::Impl::finish_reason = "";
std::string HistoryBased::Impl::stop_chat_flag = "";
std::string HistoryBased::Impl::end_flag = "";

HistoryBased::HistoryBased() : pImpl(new Impl) {}
HistoryBased::~HistoryBased() { delete pImpl; }

// 让用户输入exit后退出模型，确保callback最后一次调用的时候不会输出那个 "模型回答: "………… 这个部分
void HistoryBased::setChatFlag(std::string flag)
{
    pImpl->stop_chat_flag = flag;
}

void HistoryBased::sendRecordFilePath(std::string filePath)
{   
    pImpl->record_path = filePath;
}

void HistoryBased::chat_with_model(const std::string &message)
{
    pImpl->send(message);
}
