﻿using System;
using System.Collections.Generic;
using System.Text;

namespace MLForgeSharp.Models.ProbabilisticModels.PGM
{
    using System;
    using System.Collections.Generic;

    /// <summary>
    /// 条件随机场
    /// </summary>
    public class CRFModel
    {
        private int numStates; // 标签状态数
        private int numFeatures; // 特征数
        private double[] weights; // 特征权重

        public CRFModel(int numStates, int numFeatures)
        {
            this.numStates = numStates;
            this.numFeatures = numFeatures;
            this.weights = new double[numFeatures];
            Random rand = new Random();
            for (int i = 0; i < numFeatures; i++)
            {
                weights[i] = rand.NextDouble() - 0.5; // 初始化权重
            }
        }

        // 计算特征函数的得分
        private double ComputeFeatureScore(int[] features, int state)
        {
            double score = 0.0;
            for (int i = 0; i < features.Length; i++)
            {
                score += weights[features[i]] * (features[i] == state ? 1 : 0);
            }
            return score;
        }

        // 前向算法计算配分函数
        private double Forward(int[] sequence, int[] features)
        {
            double[] alpha = new double[numStates];
            for (int i = 0; i < numStates; i++)
            {
                alpha[i] = ComputeFeatureScore(features, i);
            }

            for (int t = 1; t < sequence.Length; t++)
            {
                double[] newAlpha = new double[numStates];
                for (int j = 0; j < numStates; j++)
                {
                    double maxScore = double.MinValue;
                    for (int i = 0; i < numStates; i++)
                    {
                        double score = alpha[i] + ComputeFeatureScore(features, j);
                        if (score > maxScore)
                        {
                            maxScore = score;
                        }
                    }
                    newAlpha[j] = maxScore;
                }
                alpha = newAlpha;
            }

            double logZ = 0.0;
            for (int i = 0; i < numStates; i++)
            {
                logZ += Math.Exp(alpha[i]);
            }
            return Math.Log(logZ);
        }

        // 预测序列标签
        public int[] Predict(int[] sequence, int[] features)
        {
            int[] labels = new int[sequence.Length];
            double[] alpha = new double[numStates];
            for (int i = 0; i < numStates; i++)
            {
                alpha[i] = ComputeFeatureScore(features, i);
            }

            for (int t = 1; t < sequence.Length; t++)
            {
                double[] newAlpha = new double[numStates];
                for (int j = 0; j < numStates; j++)
                {
                    double maxScore = double.MinValue;
                    int bestState = 0;
                    for (int i = 0; i < numStates; i++)
                    {
                        double score = alpha[i] + ComputeFeatureScore(features, j);
                        if (score > maxScore)
                        {
                            maxScore = score;
                            bestState = i;
                        }
                    }
                    newAlpha[j] = maxScore;
                    labels[t] = bestState;
                }
                alpha = newAlpha;
            }

            return labels;
        }

        // 训练模型
        public void Train(List<int[]> sequences, List<int[]> featuresList, List<int[]> labelsList, int maxIterations = 100, double learningRate = 0.01)
        {
            for (int iter = 0; iter < maxIterations; iter++)
            {
                for (int n = 0; n < sequences.Count; n++)
                {
                    int[] sequence = sequences[n];
                    int[] features = featuresList[n];
                    int[] labels = labelsList[n];

                    double logZ = Forward(sequence, features);

                    // 计算梯度
                    double[] gradient = new double[numFeatures];
                    for (int t = 0; t < sequence.Length; t++)
                    {
                        for (int i = 0; i < numFeatures; i++)
                        {
                            gradient[i] += (features[t] == labels[t] ? 1 : 0) - Math.Exp(ComputeFeatureScore(features, labels[t]) / Math.Exp(logZ));
                        }
                    }

                    // 更新权重
                    for (int i = 0; i < numFeatures; i++)
                    {
                        weights[i] += learningRate * gradient[i];
                    }
                }
            }
        }
    }


    public class CRFModelExample
    {
        public CRFModelExample()
        {
            // 示例数据
            List<int[]> sequences = new List<int[]>
        {
            new int[] { 0, 1, 0 }, // 序列1
            new int[] { 1, 0, 1 }  // 序列2
        };

            List<int[]> featuresList = new List<int[]>
        {
            new int[] { 0, 1, 0 }, // 序列1的特征
            new int[] { 1, 0, 1 }  // 序列2的特征
        };

            List<int[]> labelsList = new List<int[]>
        {
            new int[] { 0, 1, 0 }, // 序列1的标签
            new int[] { 1, 0, 1 }  // 序列2的标签
        };

            // 创建 CRFModel 模型
            CRFModel CRFModel = new CRFModel(numStates: 2, numFeatures: 2);
            CRFModel.Train(sequences, featuresList, labelsList);

            // 预测
            int[] testSequence = { 0, 1, 0 };
            int[] testFeatures = { 0, 1, 0 };
            int[] predictedLabels = CRFModel.Predict(testSequence, testFeatures);

            Console.WriteLine("Predicted Label: ");
            foreach (int label in predictedLabels)
            {
                Console.Write(label + ' ');
            }
        }
    }
}
