﻿using System;
using System.Collections.Generic;
using System.Text;

namespace MLForgeSharp.Models.ProbabilisticModels
{
    /// <summary>
    /// 最大熵模型
    /// </summary>
    public class MaxEntModel
    {
        private int numFeatures; // 特征数量
        private int numLabels; // 标签数量
        private double[] weights; // 模型参数
        private double learningRate; // 学习率

        public MaxEntModel(int numFeatures, int numLabels, double learningRate = 0.01)
        {
            this.numFeatures = numFeatures;
            this.numLabels = numLabels;
            this.learningRate = learningRate;
            this.weights = new double[numFeatures * numLabels];

            // 初始化权重
            Random rand = new Random();
            for (int i = 0; i < weights.Length; i++)
            {
                weights[i] = rand.NextDouble() - 0.5; // 随机初始化
            }
        }

        // 计算归一化因子 Z(x)
        private double ComputeZ(int[] features)
        {
            double z = 0.0;
            for (int y = 0; y < numLabels; y++)
            {
                double score = 0.0;
                for (int i = 0; i < features.Length; i++)
                {
                    score += weights[y * numFeatures + features[i]];
                }
                z += Math.Exp(score);
            }
            return z;
        }

        // 计算条件概率 P(y|x)
        private double ComputeProbability(int[] features, int label)
        {
            double score = 0.0;
            for (int i = 0; i < features.Length; i++)
            {
                score += weights[label * numFeatures + features[i]];
            }
            return Math.Exp(score) / ComputeZ(features);
        }

        // 训练模型
        public void Train(List<int[]> featuresList, int[] labels, int maxIterations = 1000)
        {
            for (int iter = 0; iter < maxIterations; iter++)
            {
                double[] gradient = new double[weights.Length];

                // 计算梯度
                for (int n = 0; n < featuresList.Count; n++)
                {
                    int[] features = featuresList[n];
                    int label = labels[n];

                    for (int y = 0; y < numLabels; y++)
                    {
                        double prob = ComputeProbability(features, y);
                        for (int i = 0; i < features.Length; i++)
                        {
                            int featureIndex = features[i];
                            gradient[y * numFeatures + featureIndex] += (y == label ? 1 : 0) - prob;
                        }
                    }
                }

                // 更新权重
                for (int i = 0; i < weights.Length; i++)
                {
                    weights[i] += learningRate * gradient[i];
                }
            }
        }

        // 预测标签
        public int Predict(int[] features)
        {
            double maxProb = 0.0;
            int predictedLabel = 0;

            for (int y = 0; y < numLabels; y++)
            {
                double prob = ComputeProbability(features, y);
                if (prob > maxProb)
                {
                    maxProb = prob;
                    predictedLabel = y;
                }
            }

            return predictedLabel;
        }
    }

    // 示例程序
    public class MaxEntModelExample
    {
        public MaxEntModelExample()
        {
            // 示例数据
            List<int[]> featuresList = new List<int[]>
            {
                new int[] { 0, 1 }, // 样本1
                new int[] { 1, 0 }, // 样本2
                new int[] { 0, 0 }, // 样本3
                new int[] { 1, 1 }  // 样本4
            };

            int[] labels = { 0, 1, 0, 1 }; // 标签

            // 创建最大熵模型
            MaxEntModel model = new MaxEntModel(numFeatures: 2, numLabels: 2, learningRate: 0.01);

            // 训练模型
            model.Train(featuresList, labels);

            // 预测
            int[] testFeatures = { 0, 1 };
            int predictedLabel = model.Predict(testFeatures);

            Console.WriteLine("预测标签: " + predictedLabel);
        }
    }
}
