using System;
using System.Linq;

namespace MT5Trade.Models
{
    /// <summary>
    /// BFGS（Broyden-Fletcher-Goldfarb-Shanno）优化算法实现
    /// 用于GARCH模型参数的最大似然估计
    /// 符合算法实现专项规则：逐步推导数学概念，包含假设、证明步骤与边界条件
    /// </summary>
    public class BfgsOptimizer
    {
        #region 常量定义

        /// <summary>线搜索参数 - Armijo条件系数</summary>
        private const double ARMIJO_C1 = 1e-4;

        /// <summary>线搜索参数 - Wolfe条件系数</summary>
        private const double WOLFE_C2 = 0.9;

        /// <summary>最大线搜索迭代次数</summary>
        private const int MAX_LINE_SEARCH_ITER = 20;

        /// <summary>数值梯度计算步长</summary>
        private const double GRADIENT_EPSILON = 1e-8;

        /// <summary>机器精度</summary>
        private const double MACHINE_EPSILON = 2.2204460492503131e-16;

        #endregion

        #region 公共方法

        /// <summary>
        /// 执行BFGS优化
        /// 数学原理：拟牛顿法，通过迭代更新Hessian矩阵的近似来寻找最优解
        /// 收敛条件：||∇f(x)|| < tolerance
        /// </summary>
        /// <param name="objectiveFunc">目标函数（需要最小化）</param>
        /// <param name="initialParameters">初始参数</param>
        /// <param name="lowerBounds">参数下界</param>
        /// <param name="upperBounds">参数上界</param>
        /// <param name="maxIterations">最大迭代次数</param>
        /// <param name="tolerance">收敛容差</param>
        /// <returns>优化结果</returns>
        public OptimizationResult Minimize(
            Func<double[], double> objectiveFunc,
            double[] initialParameters,
            double[] lowerBounds = null,
            double[] upperBounds = null,
            int maxIterations = 100,
            double tolerance = 1e-8)
        {
            int n = initialParameters.Length;
            double[] x = (double[])initialParameters.Clone();
            double[] xOld = new double[n];
            double[] gradient = new double[n];
            double[] gradientOld = new double[n];
            double[] direction = new double[n];
            double[,] hessianInverse = InitializeHessianInverse(n);

            double fValue = objectiveFunc(x);
            ComputeGradient(objectiveFunc, x, gradient);

            int iteration = 0;
            bool converged = false;

            while (iteration < maxIterations && !converged)
            {
                // 检查梯度收敛
                double gradientNorm = ComputeNorm(gradient);
                if (gradientNorm < tolerance)
                {
                    converged = true;
                    break;
                }

                // 计算搜索方向 p = -H^(-1) * ∇f
                ComputeSearchDirection(hessianInverse, gradient, direction);

                // 线搜索确定步长
                double stepSize = LineSearch(objectiveFunc, x, gradient, direction, 1.0);

                // 保存旧值
                Array.Copy(x, xOld, n);
                Array.Copy(gradient, gradientOld, n);

                // 更新参数 x_{k+1} = x_k + α * p_k
                for (int i = 0; i < n; i++)
                {
                    x[i] = xOld[i] + stepSize * direction[i];
                    
                    // 应用边界约束
                    if (lowerBounds != null && x[i] < lowerBounds[i])
                        x[i] = lowerBounds[i];
                    if (upperBounds != null && x[i] > upperBounds[i])
                        x[i] = upperBounds[i];
                }

                // 计算新的函数值和梯度
                double fValueNew = objectiveFunc(x);
                ComputeGradient(objectiveFunc, x, gradient);

                // 更新Hessian逆矩阵（BFGS更新公式）
                UpdateHessianInverse(hessianInverse, x, xOld, gradient, gradientOld);

                // 检查函数值改进
                if (Math.Abs(fValueNew - fValue) < tolerance * Math.Max(1, Math.Abs(fValue)))
                {
                    converged = true;
                }

                fValue = fValueNew;
                iteration++;
            }

            return new OptimizationResult
            {
                Parameters = x,
                ObjectiveValue = fValue,
                GradientNorm = ComputeNorm(gradient),
                Iterations = iteration,
                Converged = converged
            };
        }

        #endregion

        #region 私有方法

        /// <summary>
        /// 初始化Hessian逆矩阵为单位矩阵
        /// </summary>
        private double[,] InitializeHessianInverse(int n)
        {
            double[,] H = new double[n, n];
            for (int i = 0; i < n; i++)
            {
                H[i, i] = 1.0;
            }
            return H;
        }

        /// <summary>
        /// 计算数值梯度
        /// ∇f_i = (f(x + εe_i) - f(x - εe_i)) / (2ε)
        /// </summary>
        private void ComputeGradient(Func<double[], double> func, double[] x, double[] gradient)
        {
            int n = x.Length;
            double[] xPlus = (double[])x.Clone();
            double[] xMinus = (double[])x.Clone();

            for (int i = 0; i < n; i++)
            {
                double h = Math.Max(GRADIENT_EPSILON, Math.Abs(x[i]) * GRADIENT_EPSILON);
                
                xPlus[i] = x[i] + h;
                xMinus[i] = x[i] - h;

                double fPlus = func(xPlus);
                double fMinus = func(xMinus);

                gradient[i] = (fPlus - fMinus) / (2.0 * h);

                // 恢复原值
                xPlus[i] = x[i];
                xMinus[i] = x[i];
            }
        }

        /// <summary>
        /// 计算搜索方向
        /// p = -H^(-1) * g
        /// </summary>
        private void ComputeSearchDirection(double[,] hessianInverse, double[] gradient, double[] direction)
        {
            int n = gradient.Length;
            for (int i = 0; i < n; i++)
            {
                direction[i] = 0;
                for (int j = 0; j < n; j++)
                {
                    direction[i] -= hessianInverse[i, j] * gradient[j];
                }
            }
        }

        /// <summary>
        /// 线搜索算法（基于Armijo条件的回溯线搜索）
        /// 寻找满足 f(x + αp) ≤ f(x) + c1 * α * g^T * p 的步长α
        /// </summary>
        private double LineSearch(
            Func<double[], double> func,
            double[] x,
            double[] gradient,
            double[] direction,
            double initialStepSize)
        {
            double alpha = initialStepSize;
            double rho = 0.5; // 回溯系数
            
            double f0 = func(x);
            double gDotP = 0;
            for (int i = 0; i < x.Length; i++)
            {
                gDotP += gradient[i] * direction[i];
            }

            double[] xNew = new double[x.Length];
            int iter = 0;

            while (iter < MAX_LINE_SEARCH_ITER)
            {
                // 计算新点 x + α * p
                for (int i = 0; i < x.Length; i++)
                {
                    xNew[i] = x[i] + alpha * direction[i];
                }

                double fNew = func(xNew);

                // Armijo条件：f(x + αp) ≤ f(x) + c1 * α * g^T * p
                if (fNew <= f0 + ARMIJO_C1 * alpha * gDotP)
                {
                    return alpha;
                }

                // 回溯
                alpha *= rho;
                iter++;
            }

            return alpha; // 返回最后的步长
        }

        /// <summary>
        /// 更新Hessian逆矩阵（BFGS更新公式）
        /// H_{k+1} = (I - ρ_k s_k y_k^T) H_k (I - ρ_k y_k s_k^T) + ρ_k s_k s_k^T
        /// 其中 s_k = x_{k+1} - x_k, y_k = g_{k+1} - g_k, ρ_k = 1/(y_k^T s_k)
        /// </summary>
        private void UpdateHessianInverse(
            double[,] H,
            double[] x,
            double[] xOld,
            double[] gradient,
            double[] gradientOld)
        {
            int n = x.Length;
            double[] s = new double[n]; // s_k = x_{k+1} - x_k
            double[] y = new double[n]; // y_k = g_{k+1} - g_k

            for (int i = 0; i < n; i++)
            {
                s[i] = x[i] - xOld[i];
                y[i] = gradient[i] - gradientOld[i];
            }

            double yDotS = 0;
            for (int i = 0; i < n; i++)
            {
                yDotS += y[i] * s[i];
            }

            // 跳过更新如果曲率条件不满足
            if (Math.Abs(yDotS) < MACHINE_EPSILON)
            {
                return;
            }

            double rho = 1.0 / yDotS;

            // 计算 H * y
            double[] Hy = new double[n];
            for (int i = 0; i < n; i++)
            {
                for (int j = 0; j < n; j++)
                {
                    Hy[i] += H[i, j] * y[j];
                }
            }

            // 计算 y^T * H * y
            double yHy = 0;
            for (int i = 0; i < n; i++)
            {
                yHy += y[i] * Hy[i];
            }

            // BFGS更新
            for (int i = 0; i < n; i++)
            {
                for (int j = 0; j < n; j++)
                {
                    H[i, j] += rho * (1 + rho * yHy) * s[i] * s[j]
                             - rho * (Hy[i] * s[j] + s[i] * Hy[j]);
                }
            }
        }

        /// <summary>
        /// 计算向量的欧几里得范数
        /// ||x|| = sqrt(Σx_i^2)
        /// </summary>
        private double ComputeNorm(double[] vector)
        {
            return Math.Sqrt(vector.Sum(x => x * x));
        }

        /// <summary>
        /// 兼容性方法：执行优化（包装Minimize方法）
        /// </summary>
        /// <param name="objectiveFunc">目标函数</param>
        /// <param name="initialParameters">初始参数</param>
        /// <param name="maxIterations">最大迭代次数</param>
        /// <param name="tolerance">收敛容差</param>
        /// <returns>优化结果</returns>
        public OptimizationResult Optimize(
            Func<double[], double> objectiveFunc,
            double[] initialParameters,
            int maxIterations = 100,
            double tolerance = 1e-8)
        {
            return Minimize(objectiveFunc, initialParameters, null, null, maxIterations, tolerance);
        }

        #endregion
    }

    /// <summary>
    /// 优化结果数据结构
    /// </summary>
    public class OptimizationResult
    {
        /// <summary>优化后的参数</summary>
        public double[] Parameters { get; set; }

        /// <summary>目标函数值</summary>
        public double ObjectiveValue { get; set; }

        /// <summary>梯度范数</summary>
        public double GradientNorm { get; set; }

        /// <summary>迭代次数</summary>
        public int Iterations { get; set; }

        /// <summary>是否收敛</summary>
        public bool Converged { get; set; }
    }
}