#!/bin/bash

echo "🚀 实现多AI模型选择系统"
echo "===================================="

# 1. 创建模型配置类
cat > src/ai_model_config.h << 'MODELH'
#ifndef AI_MODEL_CONFIG_H
#define AI_MODEL_CONFIG_H

#include <string>
#include <map>
#include <vector>

enum class AIModelType {
    DSAI,    // DeepSeek AI
    UEAI,    // UESOFT AI  
    OPAI     // OpenAI
};

struct ModelVersion {
    std::string version_id;
    std::string display_name;
    std::string api_endpoint;
    std::string api_key_env;  // 环境变量名
    int max_tokens;
    bool supports_streaming;
};

class AIModelConfig {
public:
    AIModelConfig();
    
    // 模型管理
    void RegisterModel(AIModelType type, const std::string& version_id, 
                      const ModelVersion& version);
    bool SetCurrentModel(AIModelType type, const std::string& version_id = "");
    
    // 获取模型信息
    ModelVersion GetCurrentModel() const;
    std::vector<std::string> GetAvailableVersions(AIModelType type) const;
    std::string GetModelDisplayName(AIModelType type, const std::string& version_id = "") const;
    
    // 配置验证
    bool ValidateConfig() const;
    std::string GetApiKey() const;
    
    // 预设模型
    void SetupPresetModels();
    
private:
    std::map<AIModelType, std::map<std::string, ModelVersion>> m_models;
    AIModelType m_currentType;
    std::string m_currentVersion;
    
    ModelVersion CreateDSModel(const std::string& version_id, const std::string& display_name);
    ModelVersion CreateUEAIModel(const std::string& version_id, const std::string& display_name);
    ModelVersion CreateOPAIModel(const std::string& version_id, const std::string& display_name);
};

#endif // AI_MODEL_CONFIG_H
MODELH

# 2. 创建模型配置实现
cat > src/ai_model_config.cpp << 'MODELCPP'
#include "ai_model_config.h"
#include <iostream>
#include <cstdlib>

AIModelConfig::AIModelConfig() 
    : m_currentType(AIModelType::DSAI)
    , m_currentVersion("DS-V3")
{
    SetupPresetModels();
}

void AIModelConfig::RegisterModel(AIModelType type, const std::string& version_id, 
                                 const ModelVersion& version) {
    m_models[type][version_id] = version;
}

bool AIModelConfig::SetCurrentModel(AIModelType type, const std::string& version_id) {
    if (m_models.find(type) == m_models.end()) {
        return false;
    }
    
    if (!version_id.empty()) {
        if (m_models[type].find(version_id) == m_models[type].end()) {
            return false;
        }
        m_currentVersion = version_id;
    } else {
        // 使用该类型的第一个版本
        m_currentVersion = m_models[type].begin()->first;
    }
    
    m_currentType = type;
    return true;
}

ModelVersion AIModelConfig::GetCurrentModel() const {
    if (m_models.find(m_currentType) != m_models.end() &&
        m_models.at(m_currentType).find(m_currentVersion) != m_models.at(m_currentType).end()) {
        return m_models.at(m_currentType).at(m_currentVersion);
    }
    
    // 返回默认模型
    return CreateDSModel("DS-V3", "DeepSeek V3");
}

std::vector<std::string> AIModelConfig::GetAvailableVersions(AIModelType type) const {
    std::vector<std::string> versions;
    if (m_models.find(type) != m_models.end()) {
        for (const auto& pair : m_models.at(type)) {
            versions.push_back(pair.first);
        }
    }
    return versions;
}

std::string AIModelConfig::GetModelDisplayName(AIModelType type, const std::string& version_id) const {
    if (m_models.find(type) != m_models.end()) {
        std::string actual_version = version_id.empty() ? m_currentVersion : version_id;
        if (m_models.at(type).find(actual_version) != m_models.at(type).end()) {
            return m_models.at(type).at(actual_version).display_name;
        }
    }
    return "Unknown Model";
}

bool AIModelConfig::ValidateConfig() const {
    ModelVersion current = GetCurrentModel();
    if (current.api_key_env.empty()) {
        return true; // 某些模型可能不需要API密钥
    }
    
    const char* api_key = std::getenv(current.api_key_env.c_str());
    return api_key != nullptr && std::string(api_key) != "";
}

std::string AIModelConfig::GetApiKey() const {
    ModelVersion current = GetCurrentModel();
    if (current.api_key_env.empty()) {
        return "";
    }
    
    const char* api_key = std::getenv(current.api_key_env.c_str());
    return api_key ? std::string(api_key) : "";
}

void AIModelConfig::SetupPresetModels() {
    // DeepSeek AI 模型
    RegisterModel(AIModelType::DSAI, "DS-V3", CreateDSModel("DS-V3", "DeepSeek V3"));
    RegisterModel(AIModelType::DSAI, "DS-V3.2exp0922", CreateDSModel("DS-V3.2exp0922", "DeepSeek V3.2 Experimental 0922"));
    RegisterModel(AIModelType::DSAI, "DS-R1-0528", CreateDSModel("DS-R1-0528", "DeepSeek R1 0528"));
    RegisterModel(AIModelType::DSAI, "DS-Coder", CreateDSModel("DS-Coder", "DeepSeek Coder"));
    
    // UESOFT AI 模型
    RegisterModel(AIModelType::UEAI, "UE-Base", CreateUEAIModel("UE-Base", "UESOFT Base Model"));
    RegisterModel(AIModelType::UEAI, "UE-Dev", CreateUEAIModel("UE-Dev", "UESOFT Development"));
    RegisterModel(AIModelType::UEAI, "UE-CodeExpert", CreateUEAIModel("UE-CodeExpert", "UESOFT Code Expert"));
    
    // OpenAI 模型
    RegisterModel(AIModelType::OPAI, "GPT-4", CreateOPAIModel("GPT-4", "OpenAI GPT-4"));
    RegisterModel(AIModelType::OPAI, "GPT-3.5", CreateOPAIModel("GPT-3.5", "OpenAI GPT-3.5 Turbo"));
    RegisterModel(AIModelType::OPAI, "GPT-4o", CreateOPAIModel("GPT-4o", "OpenAI GPT-4 Omni"));
}

ModelVersion AIModelConfig::CreateDSModel(const std::string& version_id, const std::string& display_name) {
    ModelVersion model;
    model.version_id = version_id;
    model.display_name = display_name;
    model.api_endpoint = "https://api.deepseek.com/v1/chat/completions";
    model.api_key_env = "DEEPSEEK_API_KEY";
    model.max_tokens = 4000;
    model.supports_streaming = true;
    return model;
}

ModelVersion AIModelConfig::CreateUEAIModel(const std::string& version_id, const std::string& display_name) {
    ModelVersion model;
    model.version_id = version_id;
    model.display_name = display_name;
    model.api_endpoint = "https://ai.uesoft.com/v1/chat/completions";
    model.api_key_env = "UESOFT_AI_KEY";
    model.max_tokens = 3000;
    model.supports_streaming = true;
    return model;
}

ModelVersion AIModelConfig::CreateOPAIModel(const std::string& version_id, const std::string& display_name) {
    ModelVersion model;
    model.version_id = version_id;
    model.display_name = display_name;
    model.api_endpoint = "https://api.openai.com/v1/chat/completions";
    model.api_key_env = "OPENAI_API_KEY";
    model.max_tokens = 2000;
    model.supports_streaming = true;
    return model;
}
MODELCPP

# 3. 创建统一的AI客户端
cat > src/unified_ai_client.h << 'UNIFIEDH'
#ifndef UNIFIED_AI_CLIENT_H
#define UNIFIED_AI_CLIENT_H

#include "ai_model_config.h"
#include <string>
#include <vector>
#include <functional>

class UnifiedAIClient {
public:
    UnifiedAIClient();
    
    // 模型配置
    bool SetModel(AIModelType type, const std::string& version_id = "");
    ModelVersion GetCurrentModel() const;
    std::string GetModelStatus() const;
    
    // AI 功能
    std::string ChatCompletion(const std::string& prompt);
    std::string AnalyzeCode(const std::string& code, const std::string& language = "cpp");
    std::string AnalyzeProjectStructure(const std::vector<std::string>& files);
    std::string OptimizeBuildSystem(const std::string& makefileContent);
    std::string GenerateDocumentation(const std::string& code);
    
    // 流式响应回调
    using StreamCallback = std::function<void(const std::string& chunk)>;
    void ChatCompletionStream(const std::string& prompt, StreamCallback callback);
    
private:
    AIModelConfig m_config;
    
    std::string MakeRequest(const std::string& prompt);
    std::string BuildRequestJson(const std::string& prompt);
    std::string ExtractContentFromResponse(const std::string& response, const std::string& model_type);
    
    // 各模型特定的响应解析
    std::string ExtractDeepSeekResponse(const std::string& response);
    std::string ExtractUESOFTResponse(const std::string& response);
    std::string ExtractOpenAIResponse(const std::string& response);
};

#endif // UNIFIED_AI_CLIENT_H
UNIFIEDH

# 4. 创建统一客户端实现
cat > src/unified_ai_client.cpp << 'UNIFIEDCPP'
#include "unified_ai_client.h"
#include <iostream>
#include <sstream>
#include <curl/curl.h>

// 写回调用函数
static size_t WriteCallback(void* contents, size_t size, size_t nmemb, std::string* response) {
    size_t totalSize = size * nmemb;
    response->append((char*)contents, totalSize);
    return totalSize;
}

UnifiedAIClient::UnifiedAIClient() {
    // 默认使用 DeepSeek
    SetModel(AIModelType::DSAI, "DS-V3");
}

bool UnifiedAIClient::SetModel(AIModelType type, const std::string& version_id) {
    return m_config.SetCurrentModel(type, version_id);
}

ModelVersion UnifiedAIClient::GetCurrentModel() const {
    return m_config.GetCurrentModel();
}

std::string UnifiedAIClient::GetModelStatus() const {
    ModelVersion model = m_config.GetCurrentModel();
    std::stringstream status;
    
    status << "当前模型: " << model.display_name << "\n";
    status << "模型ID: " << model.version_id << "\n";
    status << "API端点: " << model.api_endpoint << "\n";
    status << "最大Token: " << model.max_tokens << "\n";
    
    if (m_config.ValidateConfig()) {
        status << "🔑 API密钥: 已配置\n";
    } else {
        status << "❌ API密钥: 未配置 (需要设置 " << model.api_key_env << " 环境变量)\n";
    }
    
    return status.str();
}

std::string UnifiedAIClient::ChatCompletion(const std::string& prompt) {
    if (!m_config.ValidateConfig()) {
        return "错误: " + m_config.GetCurrentModel().api_key_env + " 环境变量未设置";
    }
    
    return MakeRequest(prompt);
}

std::string UnifiedAIClient::AnalyzeCode(const std::string& code, const std::string& language) {
    std::string prompt = "请分析以下 " + language + " 代码，提供改进建议、潜在问题和优化方案:\n\n" + code;
    return ChatCompletion(prompt);
}

std::string UnifiedAIClient::AnalyzeProjectStructure(const std::vector<std::string>& files) {
    std::string prompt = "分析以下C++项目文件结构，提供架构建议、依赖关系分析和构建优化:\n\n文件列表:\n";
    for (const auto& file : files) {
        prompt += "- " + file + "\n";
    }
    prompt += "\n请给出具体的构建系统优化建议。";
    return ChatCompletion(prompt);
}

std::string UnifiedAIClient::OptimizeBuildSystem(const std::string& makefileContent) {
    std::string prompt = "优化以下Makefile，提供性能改进、可读性提升和跨平台兼容性建议:\n\n" + makefileContent;
    return ChatCompletion(prompt);
}

std::string UnifiedAIClient::GenerateDocumentation(const std::string& code) {
    std::string prompt = "为以下C++代码生成详细的文档注释，包括函数说明、参数说明和返回值说明:\n\n" + code;
    return ChatCompletion(prompt);
}

std::string UnifiedAIClient::MakeRequest(const std::string& prompt) {
    CURL* curl;
    CURLcode res;
    std::string response;
    
    curl = curl_easy_init();
    if (!curl) {
        return "错误: 无法初始化 cURL";
    }
    
    ModelVersion model = m_config.GetCurrentModel();
    std::string api_key = m_config.GetApiKey();
    
    struct curl_slist* headers = NULL;
    headers = curl_slist_append(headers, "Content-Type: application/json");
    std::string auth_header = "Authorization: Bearer " + api_key;
    headers = curl_slist_append(headers, auth_header.c_str());
    
    std::string json_data = BuildRequestJson(prompt);
    
    curl_easy_setopt(curl, CURLOPT_URL, model.api_endpoint.c_str());
    curl_easy_setopt(curl, CURLOPT_HTTPHEADER, headers);
    curl_easy_setopt(curl, CURLOPT_POSTFIELDS, json_data.c_str());
    curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, WriteCallback);
    curl_easy_setopt(curl, CURLOPT_WRITEDATA, &response);
    curl_easy_setopt(curl, CURLOPT_USERAGENT, "cbp2make-ai/1.0");
    
    res = curl_easy_perform(curl);
    
    if (res != CURLE_OK) {
        response = "网络请求失败: " + std::string(curl_easy_strerror(res));
    } else {
        // 根据模型类型解析响应
        AIModelType current_type = AIModelType::DSAI; // 需要从配置获取
        // 这里简化处理，实际应该存储当前类型
        if (model.display_name.find("DeepSeek") != std::string::npos) {
            response = ExtractDeepSeekResponse(response);
        } else if (model.display_name.find("UESOFT") != std::string::npos) {
            response = ExtractUESOFTResponse(response);
        } else {
            response = ExtractOpenAIResponse(response);
        }
    }
    
    curl_slist_free_all(headers);
    curl_easy_cleanup(curl);
    
    return response;
}

std::string UnifiedAIClient::BuildRequestJson(const std::string& prompt) {
    ModelVersion model = m_config.GetCurrentModel();
    
    std::stringstream json;
    json << "{";
    
    if (model.display_name.find("DeepSeek") != std::string::npos) {
        json << "\"model\": \"deepseek-chat\",";
    } else if (model.display_name.find("UESOFT") != std::string::npos) {
        json << "\"model\": \"uesoft-coder\",";
    } else {
        json << "\"model\": \"gpt-4\",";
    }
    
    json << "\"messages\": [{\"role\": \"user\", \"content\": \"" << prompt << "\"}],";
    json << "\"max_tokens\": " << model.max_tokens << ",";
    json << "\"temperature\": 0.7";
    json << "}";
    
    return json.str();
}

std::string UnifiedAIClient::ExtractDeepSeekResponse(const std::string& response) {
    size_t pos = response.find("\"content\":\"");
    if (pos != std::string::npos) {
        size_t start = pos + 11;
        size_t end = response.find("\"", start);
        if (end != std::string::npos) {
            return response.substr(start, end - start);
        }
    }
    return "DeepSeek: 无法解析响应: " + response;
}

std::string UnifiedAIClient::ExtractUESOFTResponse(const std::string& response) {
    size_t pos = response.find("\"text\":\"");
    if (pos != std::string::npos) {
        size_t start = pos + 8;
        size_t end = response.find("\"", start);
        if (end != std::string::npos) {
            return response.substr(start, end - start);
        }
    }
    return "UESOFT AI: 无法解析响应: " + response;
}

std::string UnifiedAIClient::ExtractOpenAIResponse(const std::string& response) {
    size_t pos = response.find("\"content\":\"");
    if (pos != std::string::npos) {
        size_t start = pos + 11;
        size_t end = response.find("\"", start);
        if (end != std::string::npos) {
            return response.substr(start, end - start);
        }
    }
    return "OpenAI: 无法解析响应: " + response;
}

void UnifiedAIClient::ChatCompletionStream(const std::string& prompt, StreamCallback callback) {
    // 流式响应实现（简化版）
    std::string response = ChatCompletion(prompt);
    // 模拟流式输出
    std::istringstream stream(response);
    std::string line;
    while (std::getline(stream, line)) {
        callback(line + "\n");
    }
}
UNIFIEDCPP

echo "✅ 多模型AI系统框架已创建"
echo ""
echo "📋 支持的模型:"
echo "  DeepSeek AI: DS-V3, DS-V3.2exp0922, DS-R1-0528, DS-Coder"
echo "  UESOFT AI: UE-Base, UE-Dev, UE-CodeExpert"  
echo "  OpenAI: GPT-4, GPT-3.5, GPT-4o"
echo ""
echo "🔧 环境变量配置:"
echo "  DeepSeek: DEEPSEEK_API_KEY"
echo "  UESOFT: UESOFT_AI_KEY"
echo "  OpenAI: OPENAI_API_KEY"
