#include "cloud_model_image.h"
#include <curl/curl.h>
#include <sstream>
#include <fstream>
#include <nlohmann/json.hpp>
#include <iostream>
#include "common_config_manager.h"
#include "base_log.h"
#include "base_utils.h"
#include <map>
#include <algorithm>

namespace El {
namespace Cloud {

CloudModelImage &CloudModelImage::GetInstance()
{
    static CloudModelImage instance;
    return instance;
}

CloudModelImage::CloudModelImage() {}

void CloudModelImage::Start()
{
    InitConfig();
    curl_global_init(CURL_GLOBAL_DEFAULT);
}

void CloudModelImage::Stop()
{
    curl_global_cleanup();
}

bool CloudModelImage::InitConfig()
{
    // 获取配置
    nlohmann::json config = El::Common::ConfigManager::GetInstance().GetConfig("cloud_model");
    if (config.contains("api_key")) {
        api_key_ = config["api_key"].get<std::string>();
        LOG_INFO("API key: {}", api_key_);
    } else {
        LOG_ERROR("API key not set in configuration");
        return false;
    }

    if (config.contains("model")) {
        model_ = config["model"].get<std::string>();
        LOG_INFO("Model: {}", model_);
    } else {
        LOG_ERROR("Model not set in configuration");
        return false;
    }

    if (config.contains("max_context_length")) {
        max_context_length_ = config["max_context_length"].get<size_t>();
        LOG_INFO("Max context length: {} bytes", max_context_length_);
    } else {
        LOG_ERROR("Max context length not set in configuration");
        return false;
    }

    api_url_ = "https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions";

    return true;
}

int32_t CloudModelImage::NewConversation()
{
    int32_t conversation_id = next_conversation_id_++;

    if (conversations_.size() >= 5) {
        auto it = std::min_element(conversations_.begin(), conversations_.end(),
                                   [](const auto &a, const auto &b) { return a.first < b.first; });

        if (it != conversations_.end()) {
            LOG_INFO("Deleting oldest conversation: {}", it->first);
            conversations_.erase(it);
        }
    }

    conversations_[conversation_id] = std::vector<std::pair<nlohmann::json, std::optional<ImageData>>>();
    LOG_INFO("Created new conversation: {}", conversation_id);
    return conversation_id;
}

bool CloudModelImage::DeleteConversation(int32_t conversation_id)
{
    auto it = conversations_.find(conversation_id);
    if (it != conversations_.end()) {
        conversations_.erase(it);
        LOG_INFO("Deleted conversation: {}", conversation_id);
        return true;
    }
    LOG_WARN("Attempted to delete non-existent conversation: {}", conversation_id);
    return false;
}

std::string CloudModelImage::AnalyzeImage(int32_t conversation_id, const ImageData &image_data, const std::string &prompt)
{
    LOG_INFO("Analyzing image: Conversation ID {}, Image data size {}, Width {}, Height {}", conversation_id,
             image_data.size, image_data.width, image_data.height);

    if (image_data.size == 0 || image_data.data == nullptr) {
        LOG_ERROR("Invalid image data");
        return "";
    }

    std::vector<unsigned char> image_buffer(image_data.data, image_data.data + image_data.size);
    std::string base64_image = El::Base::Base64Encode(image_buffer);
    // clang-format off
    nlohmann::json message = {
        {"role", "user"},
        {"content", nlohmann::json::array({
            {
                {"type", "text"},
                {"text", prompt}
            },
            {
                {"type", "image_url"},
                {"image_url", {
                    {"url", "data:image/jpeg;base64," + base64_image}
                }}
            }
        })}
    };
    // clang-format on

    return ProcessMessage(conversation_id, message, image_data);
}

std::string CloudModelImage::GenerateText(int32_t conversation_id, const std::string &prompt)
{
    LOG_INFO("Generating text: Conversation ID {}", conversation_id);

    nlohmann::json message = {{"role", "user"}, {"content", prompt}};

    return ProcessMessage(conversation_id, message, std::nullopt);
}

std::string CloudModelImage::ProcessMessage(int32_t conversation_id,
                                       const nlohmann::json &message,
                                       const std::optional<ImageData> &img_data)
{
    auto &conversation = conversations_[conversation_id];
    conversation.push_back({message, img_data});

    // 计算当前对话的总token数
    size_t total_tokens = 0;
    for (const auto &[msg, info] : conversation) {
        total_tokens += CalculateMessageTokens(msg, info);
    }

    // 如果总token数超过 max_context_length_，则删除最老的消息
    while (total_tokens > max_context_length_ && !conversation.empty()) {
        total_tokens -= CalculateMessageTokens(conversation.front().first, conversation.front().second);
        conversation.erase(conversation.begin());
    }

    // 构建请求数据
    nlohmann::json request_data = {{"model", model_}, {"messages", nlohmann::json::array()}};

    for (const auto &[msg, _] : conversation) {
        (void)_;
        request_data["messages"].push_back(msg);
    }

    // Print request data
    nlohmann::json log_data = request_data;
    if (log_data.contains("messages")) {
        for (auto &msg : log_data["messages"]) {
            if (msg.contains("content") && msg["content"].is_array()) {
                for (auto &content : msg["content"]) {
                    if (content.contains("image_url") && content["image_url"].contains("url")) {
                        content["image_url"]["url"] = "***omitted***";
                    }
                }
            }
        }
        LOG_INFO("Preparing to send request: tokens : {} messages : {}", total_tokens, log_data.dump(4));
    }

    std::string post_data = request_data.dump();
    std::string response = SendHttpRequest(post_data, api_url_);
    if (response.empty()) {
        LOG_ERROR("HTTP request failed");
        return "";
    }

    std::string result = ParseResponse(response);
    if (!result.empty()) {
        nlohmann::json assistant_message = {{"role", "assistant"}, {"content", result}};
        conversation.push_back({assistant_message, std::nullopt});
    }

    LOG_INFO("Processing complete, result length: {} characters", result.length());
    return result;
}

size_t CloudModelImage::WriteCallback(void *contents, size_t size, size_t nmemb, std::string *s)
{
    if (s == nullptr) {
        return 0;
    }
    size_t totalSize = size * nmemb;
    s->append(static_cast<char *>(contents), totalSize);
    return totalSize;
}

std::string CloudModelImage::SendHttpRequest(const std::string &post_data, const std::string &url)
{
    CURL *curl = curl_easy_init();
    std::string response_string;

    if (curl) {
        struct curl_slist *headers = nullptr;
        headers = curl_slist_append(headers, "Content-Type: application/json");
        headers = curl_slist_append(headers, ("Authorization: Bearer " + api_key_).c_str());

        curl_easy_setopt(curl, CURLOPT_URL, url.c_str());
        curl_easy_setopt(curl, CURLOPT_POSTFIELDS, post_data.c_str());
        curl_easy_setopt(curl, CURLOPT_HTTPHEADER, headers);
        curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, WriteCallback);
        curl_easy_setopt(curl, CURLOPT_WRITEDATA, &response_string);

        // 设置超时
        curl_easy_setopt(curl, CURLOPT_TIMEOUT, 30L);
        curl_easy_setopt(curl, CURLOPT_CONNECTTIMEOUT, 10L);

        // SSL相关设置
        curl_easy_setopt(curl, CURLOPT_SSL_VERIFYPEER, 0L);
        curl_easy_setopt(curl, CURLOPT_SSL_VERIFYHOST, 0L);
        curl_easy_setopt(curl, CURLOPT_SSLVERSION, CURL_SSLVERSION_TLSv1_2);

        // 详细错误信息
        char errbuf[CURL_ERROR_SIZE];
        // curl_easy_setopt(curl, CURLOPT_ERRORBUFFER, errbuf);
        // curl_easy_setopt(curl, CURLOPT_VERBOSE, 1L);

        // 添加重试机制
        int retries = 3;
        CURLcode res;
        while (retries > 0) {
            res = curl_easy_perform(curl);
            if (res == CURLE_OK) {
                break;
            }
            LOG_WARN("HTTP request failed, error code: {} ({}), remaining retries: {}", curl_easy_strerror(res), errbuf,
                     --retries);
            std::this_thread::sleep_for(std::chrono::seconds(1));
        }

        curl_slist_free_all(headers);
        curl_easy_cleanup(curl);

        if (res != CURLE_OK) {
            LOG_ERROR("HTTP request ultimately failed: {} ({})", curl_easy_strerror(res), errbuf);
            return "";
        }
    } else {
        LOG_ERROR("Failed to initialize CURL");
        return "";
    }

    return response_string;
}

std::string CloudModelImage::ParseResponse(const std::string &response)
{
    nlohmann::json root;
    auto result = nlohmann::json::parse(response, nullptr, false);
    if (result.is_discarded()) {
        LOG_ERROR("Failed to parse response: JSON format error");
        return "";
    }
    root = std::move(result);

    if (root.contains("choices") && !root["choices"].empty() && root["choices"][0].contains("message") &&
        root["choices"][0]["message"].contains("content")) {
        return root["choices"][0]["message"]["content"].get<std::string>();
    }

    LOG_ERROR("Failed to extract result from response: {}", response);
    return "";
}

std::string CloudModelImage::ParseTextGenerationResponse(const std::string &response)
{
    nlohmann::json root;
    auto result = nlohmann::json::parse(response, nullptr, false);
    if (result.is_discarded()) {
        LOG_ERROR("Failed to parse response: JSON format error");
        return "";
    }
    root = std::move(result);

    if (root.contains("choices") && !root["choices"].empty() && root["choices"][0].contains("message") &&
        root["choices"][0]["message"].contains("content")) {
        return root["choices"][0]["message"]["content"].get<std::string>();
    }

    LOG_ERROR("Failed to extract result from text generation response: {}", response);
    return "";
}

// 添加新的方法实现
size_t CloudModelImage::CalculateImageTokens(int width, int height)
{
    // 向上取整到28的倍数
    int adjusted_width = ((width + 27) / 28) * 28;
    int adjusted_height = ((height + 27) / 28) * 28;

    // 计算token数量
    size_t tokens = (adjusted_width / 28) * (adjusted_height / 28);

    // 确保至少有4个token
    return std::max(size_t(4), tokens);
}

size_t CloudModelImage::CalculateTextTokens(const std::string &text)
{
    // 简单估算：每个中文字符算一个token
    return text.length();
}

size_t CloudModelImage::CalculateMessageTokens(const nlohmann::json &message, const std::optional<ImageData> &img_data)
{
    size_t total_tokens = 0;
    if (message.contains("content")) {
        if (message["content"].is_array()) {
            for (const auto &content : message["content"]) {
                if (content["type"] == "text") {
                    total_tokens += CalculateTextTokens(content["text"]);
                } else if (content["type"] == "image_url") {
                    if (img_data) {
                        total_tokens += CalculateImageTokens(img_data->width, img_data->height);
                    }
                }
            }
        } else if (message["content"].is_string()) {
            total_tokens += CalculateTextTokens(message["content"]);
        }
    }
    return total_tokens;
}

} // namespace Cloud
} // namespace El