#!/bin/bash

echo "🔧 修复 curl 依赖问题"
echo "================================"

# 重新创建 unified_ai_client.cpp，确保 NO_CURL 生效
cat > src/unified_ai_client.cpp << 'FIXEDCURLCPP'
#include "unified_ai_client.h"
#include <iostream>
#include <sstream>

#ifdef NO_CURL
// 模拟模式 - 不使用真实的 curl
static size_t WriteCallback(void* contents, size_t size, size_t nmemb, std::string* response) {
    // 空实现
    return 0;
}
#endif

UnifiedAIClient::UnifiedAIClient() {
    // 默认使用 DeepSeek
    SetModel(AIModelType::DSAI, "DS-V3");
}

bool UnifiedAIClient::SetModel(AIModelType type, const std::string& version_id) {
    return m_config.SetCurrentModel(type, version_id);
}

ModelVersion UnifiedAIClient::GetCurrentModel() const {
    return m_config.GetCurrentModel();
}

std::string UnifiedAIClient::GetModelStatus() const {
    ModelVersion model = m_config.GetCurrentModel();
    std::stringstream status;
    
    status << "当前模型: " << model.display_name << "\n";
    status << "模型ID: " << model.version_id << "\n";
    status << "API端点: " << model.api_endpoint << "\n";
    status << "最大Token: " << model.max_tokens << "\n";
    
    if (m_config.ValidateConfig()) {
        status << "🔑 API密钥: 已配置\n";
    } else {
        status << "❌ API密钥: 未配置 (需要设置 " << model.api_key_env << " 环境变量)\n";
    }
    
    status << "💡 模式: " << 
#ifdef NO_CURL
        "模拟模式 (编译时未链接 curl)"
#else
        "真实模式"
#endif
        << "\n";
    
    return status.str();
}

std::string UnifiedAIClient::ChatCompletion(const std::string& prompt) {
#ifdef NO_CURL
    // 模拟响应
    ModelVersion model = m_config.GetCurrentModel();
    std::stringstream response;
    response << "🤖 " << model.display_name << " 模拟响应:\n";
    response << "提示: " << prompt << "\n\n";
    response << "这是模拟响应。要使用真实AI功能，请:\n";
    response << "1. 安装 curl: pacman -S curl\n";
    response << "2. 设置API密钥: export " << model.api_key_env << "=您的密钥\n";
    response << "3. 重新编译（去掉 -DNO_CURL 标志）\n";
    return response.str();
#else
    if (!m_config.ValidateConfig()) {
        return "错误: " + m_config.GetCurrentModel().api_key_env + " 环境变量未设置";
    }
    
    return MakeRequest(prompt);
#endif
}

std::string UnifiedAIClient::AnalyzeCode(const std::string& code, const std::string& language) {
    std::string prompt = "请分析以下 " + language + " 代码，提供改进建议、潜在问题和优化方案:\n\n" + code;
    return ChatCompletion(prompt);
}

std::string UnifiedAIClient::AnalyzeProjectStructure(const std::vector<std::string>& files) {
    std::string prompt = "分析以下C++项目文件结构，提供架构建议、依赖关系分析和构建优化:\n\n文件列表:\n";
    for (const auto& file : files) {
        prompt += "- " + file + "\n";
    }
    prompt += "\n请给出具体的构建系统优化建议。";
    return ChatCompletion(prompt);
}

std::string UnifiedAIClient::OptimizeBuildSystem(const std::string& makefileContent) {
    std::string prompt = "优化以下Makefile，提供性能改进、可读性提升和跨平台兼容性建议:\n\n" + makefileContent;
    return ChatCompletion(prompt);
}

std::string UnifiedAIClient::GenerateDocumentation(const std::string& code) {
    std::string prompt = "为以下C++代码生成详细的文档注释，包括函数说明、参数说明和返回值说明:\n\n" + code;
    return ChatCompletion(prompt);
}

#ifndef NO_CURL
#include <curl/curl.h>

std::string UnifiedAIClient::MakeRequest(const std::string& prompt) {
    CURL* curl;
    CURLcode res;
    std::string response;
    
    curl = curl_easy_init();
    if (!curl) {
        return "错误: 无法初始化 cURL";
    }
    
    ModelVersion model = m_config.GetCurrentModel();
    std::string api_key = m_config.GetApiKey();
    
    struct curl_slist* headers = NULL;
    headers = curl_slist_append(headers, "Content-Type: application/json");
    std::string auth_header = "Authorization: Bearer " + api_key;
    headers = curl_slist_append(headers, auth_header.c_str());
    
    std::string json_data = BuildRequestJson(prompt);
    
    curl_easy_setopt(curl, CURLOPT_URL, model.api_endpoint.c_str());
    curl_easy_setopt(curl, CURLOPT_HTTPHEADER, headers);
    curl_easy_setopt(curl, CURLOPT_POSTFIELDS, json_data.c_str());
    curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, WriteCallback);
    curl_easy_setopt(curl, CURLOPT_WRITEDATA, &response);
    curl_easy_setopt(curl, CURLOPT_USERAGENT, "cbp2make-ai/1.0");
    
    res = curl_easy_perform(curl);
    
    if (res != CURLE_OK) {
        response = "网络请求失败: " + std::string(curl_easy_strerror(res));
    } else {
        // 根据模型类型解析响应
        if (model.display_name.find("DeepSeek") != std::string::npos) {
            response = ExtractDeepSeekResponse(response);
        } else if (model.display_name.find("UESOFT") != std::string::npos) {
            response = ExtractUESOFTResponse(response);
        } else {
            response = ExtractOpenAIResponse(response);
        }
    }
    
    curl_slist_free_all(headers);
    curl_easy_cleanup(curl);
    
    return response;
}
#else
std::string UnifiedAIClient::MakeRequest(const std::string& prompt) {
    return "错误: 编译时使用了 NO_CURL 标志，无法进行网络请求";
}
#endif

std::string UnifiedAIClient::BuildRequestJson(const std::string& prompt) {
    ModelVersion model = m_config.GetCurrentModel();
    
    std::stringstream json;
    json << "{";
    
    if (model.display_name.find("DeepSeek") != std::string::npos) {
        json << "\"model\": \"deepseek-chat\",";
    } else if (model.display_name.find("UESOFT") != std::string::npos) {
        json << "\"model\": \"uesoft-coder\",";
    } else {
        json << "\"model\": \"gpt-4\",";
    }
    
    json << "\"messages\": [{\"role\": \"user\", \"content\": \"" << prompt << "\"}],";
    json << "\"max_tokens\": " << model.max_tokens << ",";
    json << "\"temperature\": 0.7";
    json << "}";
    
    return json.str();
}

std::string UnifiedAIClient::ExtractDeepSeekResponse(const std::string& response) {
    size_t pos = response.find("\"content\":\"");
    if (pos != std::string::npos) {
        size_t start = pos + 11;
        size_t end = response.find("\"", start);
        if (end != std::string::npos) {
            return response.substr(start, end - start);
        }
    }
    return "DeepSeek: 无法解析响应: " + response;
}

std::string UnifiedAIClient::ExtractUESOFTResponse(const std::string& response) {
    size_t pos = response.find("\"text\":\"");
    if (pos != std::string::npos) {
        size_t start = pos + 8;
        size_t end = response.find("\"", start);
        if (end != std::string::npos) {
            return response.substr(start, end - start);
        }
    }
    return "UESOFT AI: 无法解析响应: " + response;
}

std::string UnifiedAIClient::ExtractOpenAIResponse(const std::string& response) {
    size_t pos = response.find("\"content\":\"");
    if (pos != std::string::npos) {
        size_t start = pos + 11;
        size_t end = response.find("\"", start);
        if (end != std::string::npos) {
            return response.substr(start, end - start);
        }
    }
    return "OpenAI: 无法解析响应: " + response;
}

void UnifiedAIClient::ChatCompletionStream(const std::string& prompt, StreamCallback callback) {
    // 流式响应实现（简化版）
    std::string response = ChatCompletion(prompt);
    // 模拟流式输出
    std::istringstream stream(response);
    std::string line;
    while (std::getline(stream, line)) {
        callback(line + "\n");
    }
}
FIXEDCURLCPP

echo "✅ 已修复 curl 依赖问题"
