﻿using System;
using System.Collections.Generic;
using System.Text;

namespace MLForgeSharp.Models.SupervisedLearningModels.EnsembleLearning.Boosting
{
    /// <summary>
    /// AdaBoost
    /// </summary>
    public class AdaBoostModel
    {
        private class WeakClassifier
        {
            public int FeatureIndex { get; set; } // 特征索引
            public double Threshold { get; set; } // 阈值
            public int Direction { get; set; } // 方向（1 或 -1）
            public double Alpha { get; set; } // 分类器权重
        }

        private List<WeakClassifier> classifiers; // 弱分类器集合
        private int numClassifiers; // 弱分类器数量

        public AdaBoostModel(int numClassifiers = 50)
        {
            this.numClassifiers = numClassifiers;
            this.classifiers = new List<WeakClassifier>();
        }

        // 训练弱分类器
        private WeakClassifier TrainWeakClassifier(double[][] data, int[] labels, double[] weights)
        {
            int numFeatures = data[0].Length;
            WeakClassifier bestClassifier = new WeakClassifier();
            double minError = double.MaxValue;

            for (int featureIndex = 0; featureIndex < numFeatures; featureIndex++)
            {
                List<double> featureValues = new List<double>();
                for (int i = 0; i < data.Length; i++)
                {
                    featureValues.Add(data[i][featureIndex]);
                }
                featureValues.Sort();

                for (int i = 1; i < featureValues.Count; i++)
                {
                    double threshold = (featureValues[i - 1] + featureValues[i]) / 2;
                    for (int direction = -1; direction <= 1; direction += 2)
                    {
                        double error = 0.0;
                        for (int j = 0; j < data.Length; j++)
                        {
                            int prediction = (data[j][featureIndex] < threshold) ? direction : -direction;
                            if (prediction != labels[j])
                            {
                                error += weights[j];
                            }
                        }

                        if (error < minError)
                        {
                            minError = error;
                            bestClassifier.FeatureIndex = featureIndex;
                            bestClassifier.Threshold = threshold;
                            bestClassifier.Direction = direction;
                        }
                    }
                }
            }

            bestClassifier.Alpha = 0.5 * Math.Log((1 - minError) / (minError + 1e-10));
            return bestClassifier;
        }

        // 训练模型
        public void Train(double[][] data, int[] labels)
        {
            int numSamples = data.Length;
            double[] weights = new double[numSamples];
            for (int i = 0; i < numSamples; i++)
            {
                weights[i] = 1.0 / numSamples;
            }

            for (int t = 0; t < numClassifiers; t++)
            {
                WeakClassifier classifier = TrainWeakClassifier(data, labels, weights);
                classifiers.Add(classifier);

                double sumWeights = 0.0;
                for (int i = 0; i < numSamples; i++)
                {
                    int prediction = (data[i][classifier.FeatureIndex] < classifier.Threshold) ? classifier.Direction : -classifier.Direction;
                    weights[i] *= Math.Exp(-classifier.Alpha * labels[i] * prediction);
                    sumWeights += weights[i];
                }

                for (int i = 0; i < numSamples; i++)
                {
                    weights[i] /= sumWeights;
                }
            }
        }

        // 预测
        public int Predict(double[] features)
        {
            double sum = 0.0;
            foreach (WeakClassifier classifier in classifiers)
            {
                int prediction = (features[classifier.FeatureIndex] < classifier.Threshold) ? classifier.Direction : -classifier.Direction;
                sum += classifier.Alpha * prediction;
            }
            return Math.Sign(sum);
        }
    }

    // 示例程序
    public class AdaBoostModelExample
    {
        public AdaBoostModelExample()
        {
            // 示例数据
            double[][] data = new double[][]
        {
            new double[] { 1.0, 2.0 },
            new double[] { 1.1, 2.1 },
            new double[] { 1.2, 2.2 },
            new double[] { 1.3, 2.3 },
            new double[] { 10.0, 10.0 },
            new double[] { 10.1, 10.1 }
        };

            int[] labels = { -1, -1, -1, -1, 1, 1 }; // 标签

            // 创建模型
            AdaBoostModel model = new AdaBoostModel(numClassifiers: 50);

            // 训练模型
            model.Train(data, labels);

            // 预测
            double[] testFeatures = { 1.0, 2.0 };
            int predictedLabel = model.Predict(testFeatures);

            System.Console.WriteLine("预测标签: " + predictedLabel);
        }
    }
}
