function quick_bayesian_test()
% 快速测试贝叶斯优化SVM功能
% 验证贝叶斯优化是否正常工作

fprintf('===== 快速贝叶斯优化测试 =====\n');
fprintf('测试目标: 验证贝叶斯优化功能是否正常工作\n\n');

% 1. 检查依赖
fprintf('【步骤1/4】检查依赖和环境...\n');
checkDependencies();

% 2. 生成测试数据
fprintf('\n【步骤2/4】生成测试数据...\n');
[X_test, Y_test] = generateTestData();
fprintf('✓ 测试数据生成完成: %d样本, %d特征, %d类别\n', ...
        size(X_test, 1), size(X_test, 2), length(unique(Y_test)));

% 3. 测试贝叶斯优化
fprintf('\n【步骤3/4】测试贝叶斯优化...\n');
try
    [svmModel, results] = testBayesianOptimization(X_test, Y_test);
    fprintf('✓ 贝叶斯优化测试成功\n');
    fprintf('  最优KernelScale: %.4f\n', results.bestParams.KernelScale);
    fprintf('  最优BoxConstraint: %.4f\n', results.bestParams.BoxConstraint);
    fprintf('  评估次数: %d\n', results.numEvaluations);
    bayesianSuccess = true;
catch ME
    fprintf('❌ 贝叶斯优化测试失败: %s\n', ME.message);
    bayesianSuccess = false;
end

% 4. 测试传统网格搜索作为对比
fprintf('\n【步骤4/4】测试传统网格搜索对比...\n');
try
    [svmModel2, results2] = testGridSearch(X_test, Y_test);
    fprintf('✓ 网格搜索测试成功\n');
    fprintf('  最优KernelScale: %.4f\n', results2.bestParams.KernelScale);
    fprintf('  最优BoxConstraint: %.4f\n', results2.bestParams.BoxConstraint);
    fprintf('  评估次数: %d\n', results2.numEvaluations);
    gridSuccess = true;
catch ME
    fprintf('❌ 网格搜索测试失败: %s\n', ME.message);
    gridSuccess = false;
end

% 总结测试结果
fprintf('\n===== 测试结果总结 =====\n');
if bayesianSuccess && gridSuccess
    fprintf('🎉 两种方法都测试成功！\n');
    fprintf('\n参数对比:\n');
    fprintf('%-15s | %-12s | %-12s\n', '方法', 'KernelScale', 'BoxConstraint');
    fprintf('%s\n', repmat('-', 1, 45));
    fprintf('%-15s | %-12.4f | %-12.4f\n', '贝叶斯优化', ...
            results.bestParams.KernelScale, results.bestParams.BoxConstraint);
    fprintf('%-15s | %-12.4f | %-12.4f\n', '网格搜索', ...
            results2.bestParams.KernelScale, results2.bestParams.BoxConstraint);
    
    fprintf('\n评估效率对比:\n');
    fprintf('- 贝叶斯优化评估次数: %d\n', results.numEvaluations);
    fprintf('- 网格搜索评估次数: %d\n', results2.numEvaluations);
    
    if results.numEvaluations < results2.numEvaluations
        fprintf('✓ 贝叶斯优化更高效 (减少%d次评估)\n', ...
                results2.numEvaluations - results.numEvaluations);
    end
    
elseif bayesianSuccess
    fprintf('✓ 贝叶斯优化测试成功\n');
    fprintf('⚠ 网格搜索测试失败\n');
    
elseif gridSuccess
    fprintf('⚠ 贝叶斯优化测试失败\n');
    fprintf('✓ 网格搜索测试成功\n');
    
else
    fprintf('❌ 两种方法都测试失败\n');
    fprintf('建议: 检查MATLAB版本和Statistics and Machine Learning Toolbox\n');
end

% 使用建议
fprintf('\n📋 使用建议:\n');
if bayesianSuccess
    fprintf('✓ 您的环境支持贝叶斯优化，推荐使用 bayesian_svm_training.m\n');
    fprintf('✓ 可以在主程序中集成贝叶斯优化功能\n');
else
    fprintf('⚠ 贝叶斯优化不可用，建议使用传统网格搜索\n');
    fprintf('  可能原因: MATLAB版本过低或缺少相关工具箱\n');
end

fprintf('\n===== 快速测试完成 =====\n');

end

function checkDependencies()
% 检查必要的依赖和工具箱

fprintf('  检查MATLAB版本...\n');
version_info = version('-release');
fprintf('    MATLAB版本: %s\n', version_info);

% 检查Statistics and Machine Learning Toolbox
fprintf('  检查Statistics and Machine Learning Toolbox...\n');
try
    license('test', 'statistics_toolbox');
    fprintf('    ✓ Statistics and Machine Learning Toolbox 可用\n');
catch
    fprintf('    ❌ Statistics and Machine Learning Toolbox 不可用\n');
end

% 检查fitcsvm函数
fprintf('  检查fitcsvm函数...\n');
if exist('fitcsvm', 'file')
    fprintf('    ✓ fitcsvm函数可用\n');
else
    fprintf('    ❌ fitcsvm函数不可用\n');
end

% 检查贝叶斯优化支持
fprintf('  检查贝叶斯优化支持...\n');
if verLessThan('matlab', '9.1')  % R2016b之前版本不支持
    fprintf('    ⚠ MATLAB版本可能不支持完整的贝叶斯优化功能\n');
else
    fprintf('    ✓ MATLAB版本支持贝叶斯优化\n');
end

end

function [X_test, Y_test] = generateTestData()
% 生成测试数据

% 设置随机种子
rng(123, 'twister');

% 生成简单的分类数据
numSamples = 150;
numFeatures = 10;
numClasses = 3;

% 为每个类别生成数据
X_test = [];
Y_test = [];

for class = 1:numClasses
    % 每个类别生成50个样本
    classSamples = 50;
    
    % 生成具有不同均值的正态分布数据
    classMean = randn(1, numFeatures) * 2;
    classData = randn(classSamples, numFeatures) + repmat(classMean, classSamples, 1);
    
    X_test = [X_test; classData];
    Y_test = [Y_test; repmat(class, classSamples, 1)];
end

% 添加一些噪声使问题更有挑战性
X_test = X_test + randn(size(X_test)) * 0.1;

end

function [svmModel, results] = testBayesianOptimization(X_train, Y_train)
% 测试贝叶斯优化

fprintf('  执行贝叶斯优化测试...\n');

% 设置交叉验证
cv = cvpartition(Y_train, 'KFold', 3);  % 减少折数加快测试

% 配置贝叶斯优化选项（减少评估次数加快测试）
optimizationOptions = struct(...
    'AcquisitionFunctionName', 'expected-improvement-plus', ...
    'MaxObjectiveEvaluations', 15, ...  % 减少评估次数
    'MaxTime', 60, ...                   % 限制时间
    'CVPartition', cv, ...
    'UseParallel', false, ...
    'Verbose', 0, ...                    % 关闭详细输出加快测试
    'ShowPlots', false, ...
    'Repartition', false);

% 执行贝叶斯优化
[svmModel, ~, hyperparameterOptimizationResults] = fitcsvm(X_train, Y_train, ...
    'KernelFunction', 'rbf', ...
    'Standardize', true, ...
    'OptimizeHyperparameters', {'KernelScale', 'BoxConstraint'}, ...
    'HyperparameterOptimizationOptions', optimizationOptions);

% 提取结果
results = struct();
results.bestParams = struct();
results.bestParams.KernelScale = svmModel.KernelParameters.Scale;
results.bestParams.BoxConstraint = svmModel.BoxConstraints(1);
results.bestObjective = hyperparameterOptimizationResults.MinObjective;
results.numEvaluations = hyperparameterOptimizationResults.NumObjectiveEvaluations;

end

function [svmModel, results] = testGridSearch(X_train, Y_train)
% 测试传统网格搜索

fprintf('  执行网格搜索测试...\n');

% 简化的网格搜索（减少组合数加快测试）
kernelScales = [0.5, 1.0, 2.0];
boxConstraints = [1, 10];

bestAccuracy = 0;
bestParams = struct('KernelScale', 1.0, 'BoxConstraint', 1.0);

% 设置交叉验证
cv = cvpartition(Y_train, 'KFold', 3);

totalCombinations = length(kernelScales) * length(boxConstraints);
currentCombination = 0;

for i = 1:length(kernelScales)
    for j = 1:length(boxConstraints)
        currentCombination = currentCombination + 1;
        
        ks = kernelScales(i);
        bc = boxConstraints(j);
        
        % 交叉验证评估
        accuracies = [];
        for fold = 1:3
            trainIdx = training(cv, fold);
            testIdx = test(cv, fold);
            
            tempModel = fitcsvm(X_train(trainIdx, :), Y_train(trainIdx), ...
                'KernelFunction', 'rbf', 'KernelScale', ks, ...
                'BoxConstraint', bc, 'Standardize', true);
            
            predictions = predict(tempModel, X_train(testIdx, :));
            accuracy = mean(predictions == Y_train(testIdx)) * 100;
            accuracies(end+1) = accuracy;
        end
        
        avgAccuracy = mean(accuracies);
        if avgAccuracy > bestAccuracy
            bestAccuracy = avgAccuracy;
            bestParams.KernelScale = ks;
            bestParams.BoxConstraint = bc;
        end
    end
end

% 使用最优参数训练最终模型
svmModel = fitcsvm(X_train, Y_train, 'KernelFunction', 'rbf', ...
                  'KernelScale', bestParams.KernelScale, ...
                  'BoxConstraint', bestParams.BoxConstraint, 'Standardize', true);

results = struct();
results.bestParams = bestParams;
results.bestObjective = 1 - bestAccuracy/100;
results.numEvaluations = totalCombinations;

end 