#include "ai_model_config.h"
#include <iostream>
#include <cstdlib>

AIModelConfig::AIModelConfig() 
    : m_currentType(AIModelType::DSAI)
    , m_currentVersion("DS-V3")
{
    SetupPresetModels();
}

void AIModelConfig::RegisterModel(AIModelType type, const std::string& version_id, 
                                 const ModelVersion& version) {
    m_models[type][version_id] = version;
}

bool AIModelConfig::SetCurrentModel(AIModelType type, const std::string& version_id) {
    if (m_models.find(type) == m_models.end()) {
        return false;
    }
    
    if (!version_id.empty()) {
        if (m_models[type].find(version_id) == m_models[type].end()) {
            return false;
        }
        m_currentVersion = version_id;
    } else {
        // 使用该类型的第一个版本
        m_currentVersion = m_models[type].begin()->first;
    }
    
    m_currentType = type;
    return true;
}

ModelVersion AIModelConfig::GetCurrentModel() const {
    auto type_it = m_models.find(m_currentType);
    if (type_it != m_models.end()) {
        auto version_it = type_it->second.find(m_currentVersion);
        if (version_it != type_it->second.end()) {
            return version_it->second;
        }
    }
    
    // 返回默认模型 - 使用静态函数避免 const 问题
    static ModelVersion default_model = [](){
        ModelVersion model;
        model.version_id = "DS-V3";
        model.display_name = "DeepSeek V3";
        model.api_endpoint = "https://api.deepseek.com/v1/chat/completions";
        model.api_key_env = "DEEPSEEK_API_KEY";
        model.max_tokens = 4000;
        model.supports_streaming = true;
        return model;
    }();
    
    return default_model;
}

std::vector<std::string> AIModelConfig::GetAvailableVersions(AIModelType type) const {
    std::vector<std::string> versions;
    if (m_models.find(type) != m_models.end()) {
        for (const auto& pair : m_models.at(type)) {
            versions.push_back(pair.first);
        }
    }
    return versions;
}

std::string AIModelConfig::GetModelDisplayName(AIModelType type, const std::string& version_id) const {
    if (m_models.find(type) != m_models.end()) {
        std::string actual_version = version_id.empty() ? m_currentVersion : version_id;
        if (m_models.at(type).find(actual_version) != m_models.at(type).end()) {
            return m_models.at(type).at(actual_version).display_name;
        }
    }
    return "Unknown Model";
}

bool AIModelConfig::ValidateConfig() const {
    ModelVersion current = GetCurrentModel();
    if (current.api_key_env.empty()) {
        return true; // 某些模型可能不需要API密钥
    }
    
    const char* api_key = std::getenv(current.api_key_env.c_str());
    return api_key != nullptr && std::string(api_key) != "";
}

std::string AIModelConfig::GetApiKey() const {
    ModelVersion current = GetCurrentModel();
    if (current.api_key_env.empty()) {
        return "";
    }
    
    const char* api_key = std::getenv(current.api_key_env.c_str());
    return api_key ? std::string(api_key) : "";
}

void AIModelConfig::SetupPresetModels() {
    // 创建模型的辅助函数
    auto create_ds_model = [](const std::string& version_id, const std::string& display_name) {
        ModelVersion model;
        model.version_id = version_id;
        model.display_name = display_name;
        model.api_endpoint = "https://api.deepseek.com/v1/chat/completions";
        model.api_key_env = "DEEPSEEK_API_KEY";
        model.max_tokens = 4000;
        model.supports_streaming = true;
        return model;
    };
    
    auto create_ueai_model = [](const std::string& version_id, const std::string& display_name) {
        ModelVersion model;
        model.version_id = version_id;
        model.display_name = display_name;
        model.api_endpoint = "https://ai.uesoft.com/v1/chat/completions";
        model.api_key_env = "UESOFT_AI_KEY";
        model.max_tokens = 3000;
        model.supports_streaming = true;
        return model;
    };
    
    auto create_opai_model = [](const std::string& version_id, const std::string& display_name) {
        ModelVersion model;
        model.version_id = version_id;
        model.display_name = display_name;
        model.api_endpoint = "https://api.openai.com/v1/chat/completions";
        model.api_key_env = "OPENAI_API_KEY";
        model.max_tokens = 2000;
        model.supports_streaming = true;
        return model;
    };

    // DeepSeek AI 模型
    RegisterModel(AIModelType::DSAI, "DS-V3", create_ds_model("DS-V3", "DeepSeek V3"));
    RegisterModel(AIModelType::DSAI, "DS-V3.2exp0922", create_ds_model("DS-V3.2exp0922", "DeepSeek V3.2 Experimental 0922"));
    RegisterModel(AIModelType::DSAI, "DS-R1-0528", create_ds_model("DS-R1-0528", "DeepSeek R1 0528"));
    RegisterModel(AIModelType::DSAI, "DS-Coder", create_ds_model("DS-Coder", "DeepSeek Coder"));
    
    // UESOFT AI 模型
    RegisterModel(AIModelType::UEAI, "UE-Base", create_ueai_model("UE-Base", "UESOFT Base Model"));
    RegisterModel(AIModelType::UEAI, "UE-Dev", create_ueai_model("UE-Dev", "UESOFT Development"));
    RegisterModel(AIModelType::UEAI, "UE-CodeExpert", create_ueai_model("UE-CodeExpert", "UESOFT Code Expert"));
    
    // OpenAI 模型
    RegisterModel(AIModelType::OPAI, "GPT-4", create_opai_model("GPT-4", "OpenAI GPT-4"));
    RegisterModel(AIModelType::OPAI, "GPT-3.5", create_opai_model("GPT-3.5", "OpenAI GPT-3.5 Turbo"));
    RegisterModel(AIModelType::OPAI, "GPT-4o", create_opai_model("GPT-4o", "OpenAI GPT-4 Omni"));
}
