#!/bin/bash

echo "🔧 修复编译错误"
echo "================================"

# 1. 修复 ai_model_config.cpp 的 const 问题
cat > src/ai_model_config.cpp << 'FIXEDMODELCPP'
#include "ai_model_config.h"
#include <iostream>
#include <cstdlib>

AIModelConfig::AIModelConfig() 
    : m_currentType(AIModelType::DSAI)
    , m_currentVersion("DS-V3")
{
    SetupPresetModels();
}

void AIModelConfig::RegisterModel(AIModelType type, const std::string& version_id, 
                                 const ModelVersion& version) {
    m_models[type][version_id] = version;
}

bool AIModelConfig::SetCurrentModel(AIModelType type, const std::string& version_id) {
    if (m_models.find(type) == m_models.end()) {
        return false;
    }
    
    if (!version_id.empty()) {
        if (m_models[type].find(version_id) == m_models[type].end()) {
            return false;
        }
        m_currentVersion = version_id;
    } else {
        // 使用该类型的第一个版本
        m_currentVersion = m_models[type].begin()->first;
    }
    
    m_currentType = type;
    return true;
}

ModelVersion AIModelConfig::GetCurrentModel() const {
    auto type_it = m_models.find(m_currentType);
    if (type_it != m_models.end()) {
        auto version_it = type_it->second.find(m_currentVersion);
        if (version_it != type_it->second.end()) {
            return version_it->second;
        }
    }
    
    // 返回默认模型 - 使用静态函数避免 const 问题
    static ModelVersion default_model = [](){
        ModelVersion model;
        model.version_id = "DS-V3";
        model.display_name = "DeepSeek V3";
        model.api_endpoint = "https://api.deepseek.com/v1/chat/completions";
        model.api_key_env = "DEEPSEEK_API_KEY";
        model.max_tokens = 4000;
        model.supports_streaming = true;
        return model;
    }();
    
    return default_model;
}

std::vector<std::string> AIModelConfig::GetAvailableVersions(AIModelType type) const {
    std::vector<std::string> versions;
    if (m_models.find(type) != m_models.end()) {
        for (const auto& pair : m_models.at(type)) {
            versions.push_back(pair.first);
        }
    }
    return versions;
}

std::string AIModelConfig::GetModelDisplayName(AIModelType type, const std::string& version_id) const {
    if (m_models.find(type) != m_models.end()) {
        std::string actual_version = version_id.empty() ? m_currentVersion : version_id;
        if (m_models.at(type).find(actual_version) != m_models.at(type).end()) {
            return m_models.at(type).at(actual_version).display_name;
        }
    }
    return "Unknown Model";
}

bool AIModelConfig::ValidateConfig() const {
    ModelVersion current = GetCurrentModel();
    if (current.api_key_env.empty()) {
        return true; // 某些模型可能不需要API密钥
    }
    
    const char* api_key = std::getenv(current.api_key_env.c_str());
    return api_key != nullptr && std::string(api_key) != "";
}

std::string AIModelConfig::GetApiKey() const {
    ModelVersion current = GetCurrentModel();
    if (current.api_key_env.empty()) {
        return "";
    }
    
    const char* api_key = std::getenv(current.api_key_env.c_str());
    return api_key ? std::string(api_key) : "";
}

void AIModelConfig::SetupPresetModels() {
    // 创建模型的辅助函数
    auto create_ds_model = [](const std::string& version_id, const std::string& display_name) {
        ModelVersion model;
        model.version_id = version_id;
        model.display_name = display_name;
        model.api_endpoint = "https://api.deepseek.com/v1/chat/completions";
        model.api_key_env = "DEEPSEEK_API_KEY";
        model.max_tokens = 4000;
        model.supports_streaming = true;
        return model;
    };
    
    auto create_ueai_model = [](const std::string& version_id, const std::string& display_name) {
        ModelVersion model;
        model.version_id = version_id;
        model.display_name = display_name;
        model.api_endpoint = "https://ai.uesoft.com/v1/chat/completions";
        model.api_key_env = "UESOFT_AI_KEY";
        model.max_tokens = 3000;
        model.supports_streaming = true;
        return model;
    };
    
    auto create_opai_model = [](const std::string& version_id, const std::string& display_name) {
        ModelVersion model;
        model.version_id = version_id;
        model.display_name = display_name;
        model.api_endpoint = "https://api.openai.com/v1/chat/completions";
        model.api_key_env = "OPENAI_API_KEY";
        model.max_tokens = 2000;
        model.supports_streaming = true;
        return model;
    };

    // DeepSeek AI 模型
    RegisterModel(AIModelType::DSAI, "DS-V3", create_ds_model("DS-V3", "DeepSeek V3"));
    RegisterModel(AIModelType::DSAI, "DS-V3.2exp0922", create_ds_model("DS-V3.2exp0922", "DeepSeek V3.2 Experimental 0922"));
    RegisterModel(AIModelType::DSAI, "DS-R1-0528", create_ds_model("DS-R1-0528", "DeepSeek R1 0528"));
    RegisterModel(AIModelType::DSAI, "DS-Coder", create_ds_model("DS-Coder", "DeepSeek Coder"));
    
    // UESOFT AI 模型
    RegisterModel(AIModelType::UEAI, "UE-Base", create_ueai_model("UE-Base", "UESOFT Base Model"));
    RegisterModel(AIModelType::UEAI, "UE-Dev", create_ueai_model("UE-Dev", "UESOFT Development"));
    RegisterModel(AIModelType::UEAI, "UE-CodeExpert", create_ueai_model("UE-CodeExpert", "UESOFT Code Expert"));
    
    // OpenAI 模型
    RegisterModel(AIModelType::OPAI, "GPT-4", create_opai_model("GPT-4", "OpenAI GPT-4"));
    RegisterModel(AIModelType::OPAI, "GPT-3.5", create_opai_model("GPT-3.5", "OpenAI GPT-3.5 Turbo"));
    RegisterModel(AIModelType::OPAI, "GPT-4o", create_opai_model("GPT-4o", "OpenAI GPT-4 Omni"));
}
FIXEDMODELCPP

# 2. 修复 aicoder_enhancements.cpp 的缺失函数
cat >> src/aicoder_enhancements.cpp << 'MISSINGFUNCTIONS'

// 实现缺失的函数
std::string AICoderEnhancements::AnalyzeProjectStructure(const std::string& projectFile) {
    if (!m_aiEnabled) {
        return "AI 功能未启用";
    }
    
    std::vector<std::string> files = ParseSourceFiles(projectFile);
    return m_aiClient.AnalyzeProjectStructure(files);
}

std::string AICoderEnhancements::OptimizeMakefile(const std::string& makefilePath) {
    if (!m_aiEnabled) {
        return "AI 功能未启用";
    }
    
    std::string content = ReadFileContent(makefilePath);
    if (content.empty()) {
        return "错误: 无法读取 Makefile: " + makefilePath;
    }
    
    return m_aiClient.OptimizeBuildSystem(content);
}

void AICoderEnhancements::GenerateCodeDocumentation(const std::vector<std::string>& files) {
    if (!m_aiEnabled) {
        std::cout << "AI 功能未启用" << std::endl;
        return;
    }
    
    std::cout << "AI: 开始为 " << files.size() << " 个文件生成文档...\n";
    
    for (const auto& file : files) {
        std::cout << "📝 处理: " << file << "\n";
        std::string code = ReadFileContent(file);
        if (!code.empty()) {
            std::string documentation = m_aiClient.GenerateDocumentation(code);
            std::string outputFile = file + ".ai_docs.txt";
            std::ofstream out(outputFile);
            if (out.is_open()) {
                out << "// AI 生成的文档 - 文件: " << file << "\n";
                out << "// 生成时间: [当前时间]\n";
                out << documentation << "\n";
                out.close();
                std::cout << "  ✅ 文档已保存到: " << outputFile << "\n";
            }
        } else {
            std::cout << "  ❌ 无法读取文件: " << file << "\n";
        }
    }
    
    std::cout << "✅ 文档生成完成!\n";
}

std::string AICoderEnhancements::ChatWithAI(const std::string& message) {
    if (!m_aiEnabled) {
        return "AI 功能未启用";
    }
    
    return m_aiClient.ChatCompletion(message);
}

void AICoderEnhancements::StartInteractiveMode() {
    if (!m_aiEnabled) {
        std::cout << "AI 功能未启用" << std::endl;
        return;
    }
    
    std::cout << "🤖 启动 AI 交互模式 (当前模型: " << m_aiClient.GetModelStatus() << ")\n";
    std::cout << "===================\n";
    std::cout << "输入 'quit' 退出交互模式\n";
    std::cout << "输入 'help' 查看可用命令\n";
    std::cout << "输入 'model' 查看当前模型状态\n\n";
    
    std::string input;
    
    while (true) {
        std::cout << "AI> ";
        std::getline(std::cin, input);
        
        if (input == "quit" || input == "exit") {
            std::cout << "退出交互模式\n";
            break;
        }
        else if (input == "help") {
            std::cout << "可用命令:\n";
            std::cout << "  analyze <project>   分析项目\n";
            std::cout << "  optimize <file>     优化文件\n";
            std::cout << "  model               查看模型状态\n";
            std::cout << "  quit                退出\n";
        }
        else if (input == "model") {
            std::cout << m_aiClient.GetModelStatus() << "\n";
        }
        else if (input.find("analyze ") == 0) {
            std::string project = input.substr(8);
            std::cout << "分析项目: " << project << "\n";
            std::string result = AnalyzeProjectStructure(project);
            std::cout << "分析结果:\n" << result << "\n";
        }
        else if (input.find("optimize ") == 0) {
            std::string file = input.substr(9);
            std::cout << "优化文件: " << file << "\n";
            std::string result = OptimizeMakefile(file);
            std::cout << "优化建议:\n" << result << "\n";
        }
        else if (!input.empty()) {
            std::string response = ChatWithAI(input);
            std::cout << "🤖 " << response << "\n";
        }
    }
}

std::vector<std::string> AICoderEnhancements::ParseSourceFiles(const std::string& cbpFile) {
    std::vector<std::string> files;
    // 简化的解析 - 实际应该解析 .cbp 文件
    files.push_back("src/main.cpp");
    files.push_back("src/utils.cpp");
    files.push_back("include/utils.h");
    return files;
}

std::string AICoderEnhancements::ReadFileContent(const std::string& filePath) {
    std::ifstream file(filePath);
    if (!file.is_open()) {
        return "";
    }
    
    std::stringstream buffer;
    buffer << file.rdbuf();
    return buffer.str();
}
MISSINGFUNCTIONS

echo "✅ 编译错误已修复"
