﻿using Accord.MachineLearning.VectorMachines.Learning;
using Accord.Math.Optimization.Losses;
using Accord.Math.Optimization;
using Accord.Math;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Accord.IO;
using Accord.MachineLearning.VectorMachines;
using Accord.Statistics.Kernels;
using ICSharpCode.SharpZipLib.Zip;

namespace mlaccordtest
{
    internal class Program
    {
        static void Main(string[] args)
        {
            // In the previous section, we have seen how different models in the Accord.NET 
            // Framework could be used to solve linear regression problems. Here, we look at
            // how to address non-linear problems.

            // Declare a very simple regression problem 
            // with only 2 input variables (x and y):
            double[][] inputs =
            {
                new[] { 0.301748252, 0.1 },
                new[] { 0.40034965, 0.1 },
                new[] { 0.5, 0.1 },
                new[] { 0.59965035, 0.1 },
                new[] { 0.70034965, 0.1 },
                new[] { 0.801048951, 0.1 },
                new[] { 0.89965035, 0.1 },
                new[] { 1.00034965, 0.1 },
                new[] { 1.08951049, 0.1 },
                new[] { 0.302797203, 0.15 },
                new[] { 0.40034965, 0.15 },
                new[] { 0.5, 0.15 },
                new[] { 0.600699301, 0.15 },
                new[] { 0.70034965, 0.15 },
                new[] { 0.8, 0.15 },
                new[] { 0.89965035, 0.15 },
                new[] { 0.999300699, 0.15 },
                new[] { 1.08951049, 0.15 },
                new[] { 0.303846154, 0.2 },
                new[] { 0.401398601, 0.2 },
                new[] { 0.5, 0.2 },
                new[] { 0.600699301, 0.2 },
                new[] { 0.70034965, 0.2 },
                new[] { 0.8, 0.2 },
                new[] { 0.89965035, 0.2 },
                new[] { 0.999300699, 0.2 },
                new[] { 1.08951049, 0.2 },
                new[] { 0.303846154, 0.25 },
                new[] { 0.40034965, 0.25 },
                new[] { 0.5, 0.25 },
                new[] { 0.600699301, 0.25 },
                new[] { 0.70034965, 0.25 },
                new[] { 0.8, 0.25 },
                new[] { 0.89965035, 0.25 },
                new[] { 0.999300699, 0.25 },
                new[] { 1.08951049, 0.25 },
                new[] { 0.303846154, 0.3 },
                new[] { 0.40034965, 0.3 },
                new[] { 0.501048951, 0.3 },
                new[] { 0.600699301, 0.3 },
                new[] { 0.70034965, 0.3 },
                new[] { 0.8, 0.3 },
                new[] { 0.89965035, 0.3 },
                new[] { 0.999300699, 0.3 },
                new[] { 1.08951049, 0.3 },
                new[] { 0.300699301, 0.4 },
                new[] { 0.40034965, 0.4 },
                new[] { 0.5, 0.4 },
                new[] { 0.598601399, 0.4 },
                new[] { 0.70034965, 0.4 },
                new[] { 0.8, 0.4 },
                new[] { 0.89965035, 0.4 },
                new[] { 0.999300699, 0.4 },
                new[] { 1.08951049, 0.4 },
                new[] { 0.300699301, 0.5 },
                new[] { 0.40034965, 0.5 },
                new[] { 0.5, 0.5 },
                new[] { 0.600699301, 0.5 },
                new[] { 0.70034965, 0.5 },
                new[] { 0.8, 0.5 },
                new[] { 0.89965035, 0.5 },
                new[] { 0.999300699, 0.5 },
                new[] { 1.08951049, 0.5 }


                // new[] {5.0,3.0},
                // new[] {8.0,4.0},
            };

            // The task is to output a non-linear combination 
            // of those numbers: log(7.4x) / sqrt(1.1y + 42)
            double[] outputs =
            {
                0.72044489,
                0.6312145,
                0.58010857,
                0.57541147,
                0.60055152,
                0.64723853,
                0.70220705,
                0.76878356,
                0.84195804,
                0.68232624,
                0.56988854,
                0.4988931,
                0.47596685,
                0.4812145,
                0.50635166,
                0.54640594,
                0.6113221,
                0.68615694,
                0.67901422,
                0.54502956,
                0.4508268,
                0.39640884,
                0.38010953,
                0.39032956,
                0.43038384,
                0.49198509,
                0.5717923,
                0.67404184,
                0.53508191,
                0.42927984,
                0.35165746,
                0.31215373,
                0.29419696,
                0.31436174,
                0.36767569,
                0.44251053,
                0.66409709,
                0.51850732,
                0.40773577,
                0.32679558,
                0.27237472,
                0.23618591,
                0.24143357,
                0.28148785,
                0.34140556,
                0.65911602,
                0.51022003,
                0.39613066,
                0.3102152,
                0.24751285,
                0.21795387,
                0.20662694,
                0.21187459,
                0.23201329,
                0.64254144,
                0.48038577,
                0.36961133,
                0.28038674,
                0.22762334,
                0.19474945,
                0.1801076,
                0.1754105,
                0.17731716,



                // Math.Log(7.4*3.0) / Math.Sqrt(1.1*1.0 + 42.0),
                // Math.Log(7.4*7.0) / Math.Sqrt(1.1*1.0 + 42.0),
                // Math.Log(7.4*3.0) / Math.Sqrt(1.1*1.0 + 42.0),
                // Math.Log(7.4*3.0) / Math.Sqrt(1.1*2.0 + 42.0),
                // Math.Log(7.4*6.0) / Math.Sqrt(1.1*1.0 + 42.0),
                // Math.Log(7.4*5.0) / Math.Sqrt(1.1*3.0 + 42.0),
                // Math.Log(7.4*8.0) / Math.Sqrt(1.1*4.0 + 42.0),
            };

            double[][] testputs =
            {
                new[] { 0.45, 0.1 },
                new[] { 0.65, 0.1 },
                new[] { 0.45, 0.2 },
                new[] { 0.65, 0.2 },
                new[] { 0.45, 0.3 },
                new[] { 0.65, 0.3 },

            };
            double[] testmanpred =
            {
                0.6   ,
                0.587 ,
                0.495 ,
                0.388 ,
                0.46  ,
                0.3   ,


            };




            // Solve using a kernel SVM
            // kernelSvm1(inputs, outputs);

            // Solve using a kernel SVM
            // kernelSvm2(inputs, outputs,testputs, testmanpred);
            //
            // // Solve using non-linear, gradient-free optimization
            // // optimization(inputs, outputs);
            // kernelSvm3(inputs, outputs, testputs, testmanpred);

            kernelSvmTest("E:\\codic\\mltest\\YP1POLYtestdata.csv");









            Console.Read();





        }
        private static void kernelSvm1(double[][] inputs, double[] outputs)
        {
            // Create a LibSVM-based support vector regression algorithm
            var teacher = new FanChenLinSupportVectorRegression<Gaussian>()
            {
                Tolerance = 1e-5,
                UseKernelEstimation = true,
                UseComplexityHeuristic = true,
                // Complexity = 10000,
                Kernel = new Gaussian(0.2)
            };

            // Use the algorithm to learn the machine
            var svm = teacher.Learn(inputs, outputs);

            // Get machine's predictions for inputs
            double[] prediction = svm.Score(inputs);

            // Compute the error in the prediction (should be 0.0)
            double error = new SquareLoss(outputs).Loss(prediction);

            Console.WriteLine($"LibSVM-based support vector regression algorithm gave error: "+error);

            for (int i = 0; i < inputs.Length; i++)
            {
                Console.WriteLine($"{inputs[i][0]}  {inputs[i][1]}   {outputs[i]}    {prediction[i]}");
            }




        }

        private static void kernelSvmTest(string inputpath)
        {

            var csv = new CsvReader(inputpath, true);

            var db= csv.ToTable();
            var inputall = db.ToJagged();

             inputall= Accord.Statistics.Tools.Standardize(inputall);

            var inputallsub= inputall.Submatrix(0, 62, 0, 2);
            var input = inputallsub.GetColumns(1, 0);
            var oridata = inputallsub.GetColumn(2);


            var inputallsub2= inputall.Submatrix(63, 96, 0, 2);
            var testinput= inputallsub2.GetColumns(1,0);
            var testdata= inputallsub2.GetColumn(2);

            csv.Dispose();


            var Comp = 5;

            var Gaussianteacher = new SequentialMinimalOptimizationRegression<Gaussian>()
            {
                UseComplexityHeuristic = true,
                Tolerance = 1e-6,
                Complexity = Comp,
                UseKernelEstimation = true // estimate the kernel from the data
            };
            var Laplacianteacher = new SequentialMinimalOptimizationRegression<Laplacian>()
            {
                UseComplexityHeuristic = true,
                Tolerance = 1e-6,
                Complexity = Comp,
                UseKernelEstimation = true // estimate the kernel from the data
            };
            var Sigmoidteacher = new SequentialMinimalOptimizationRegression<Sigmoid>()
            {
                UseComplexityHeuristic = true,
                Tolerance = 1e-6,
                Complexity = Comp,
                UseKernelEstimation = true // estimate the kernel from the data
            };
            var Polynomialteacher = new SequentialMinimalOptimizationRegression<Polynomial>()
            {
                UseComplexityHeuristic = true,
                Tolerance = 1e-6,
                Complexity = Comp,
                UseKernelEstimation = true // estimate the kernel from the data
            };
            var rand = new Random();
            var rand2= new Random();
            var lstSvrs=new List<ClsSVRres>();

            for (int i = 0; i < 500; i++)
            {
                var svr = new ClsSVRres();
                var rd = rand.NextDouble() * 1;
                var cp = rand2.NextDouble() * 10;

                Gaussianteacher.Kernel = new Gaussian(rd);
                // Gaussianteacher.Complexity = cp;
                var GaussianSvm = Gaussianteacher.Learn(input, oridata);
                double[] answers1 = GaussianSvm.Score(input);

                // svr.TRtype = SVRtype.Gaussian;
                
                // double rSquared = Accord.Statistics.Tools.Determination(answers1, oridata);
                double rSquared2 = new RSquaredLoss(GaussianSvm.NumberOfOutputs, oridata).Loss(answers1);
                double error = new SquareLoss(oridata).Loss(answers1);

                svr.Sigma = rd;
                svr.TRtype = SVRtype.Gaussian;
                svr.R2 = rSquared2;
                svr.RSME = error;
                
                double[] answers2 = GaussianSvm.Score(testinput);
                
                // rSquared = Accord.Statistics.Tools.Determination(answers2, testdata);
                var rSquared3 = new RSquaredLoss(GaussianSvm.NumberOfOutputs, testdata).Loss(answers2);
                var error2 = new SquareLoss(testdata).Loss(answers2);

                svr.R2_Test = rSquared3;
                svr.RSME_Test = error2;
                svr.Complex = Gaussianteacher.Complexity;
                lstSvrs.Add(svr);
            }

            lstSvrs=lstSvrs.OrderByDescending(ss => ss.R2_Test).ToList();
            Console.WriteLine($"type  sigma  R2 RSME R2_Test RSME_Test");
            int ii = 0;
            foreach (var svr in lstSvrs)
            {
                Console.WriteLine($"{ii} Gauss  {svr.Sigma}   {svr.Complex}   {svr.R2}   {svr.RSME}  {svr.R2_Test}  {svr.RSME_Test}");
                ii++;
            }













            
            // var Comp = kernel.EstimateComplexity(inputs)
            // Gaussianteacher.Kernel = new Gaussian(1.22);
            // Gaussianteacher.Complexity=Gaussianteacher.Kernel.EstimateComplexity(input);
            // Laplacianteacher.Kernel = new Laplacian(1.22);
            // // Laplacianteacher.Complexity = Laplacianteacher.Kernel.EstimateComplexity(input);
            // Sigmoidteacher.Kernel = new Sigmoid(1.22,30);
            // // Sigmoidteacher.Complexity = Sigmoidteacher.Kernel.EstimateComplexity(input);
            // Polynomialteacher.Kernel = new Polynomial(4,1);
            // Polynomialteacher.Complexity = Polynomialteacher.Kernel.EstimateComplexity(input);

            // var GaussianSvm = Gaussianteacher.Learn(input, oridata);
            // var LaplacianSvm = Laplacianteacher.Learn(input, oridata);
            // var SigmoidSvm = Sigmoidteacher.Learn(input, oridata);
            // var PolynomialSvm = Polynomialteacher.Learn(input, oridata);

            //高斯核函数
            // double[] answers = GaussianSvm.Score(input);
            //
            // //r2,RSME
            // double rSquared = Accord.Statistics.Tools.Determination(answers, oridata);
            // double rSquared2 = new RSquaredLoss(GaussianSvm.NumberOfOutputs,oridata).Loss(answers);
            // double error = new SquareLoss(oridata).Loss(answers);
            //
            //
            //
            // Console.WriteLine();
            // Console.WriteLine($"(SMO) learning Gaussian algorithm Result: ");
            //
            // Console.WriteLine($"Kernel.Sigma :  {GaussianSvm.Kernel.Sigma}");
            // Console.WriteLine($"Kernel.conplex :  {Gaussianteacher.Complexity}");
            // Console.WriteLine($"R2 :  {rSquared}   {rSquared2}");
            // Console.WriteLine($"RSME :  {error}  ");
            // Console.WriteLine();
            // Console.WriteLine($"oridata Complication :    ");
            // for (int i = 0; i < input.Length; i++)
            // {
            //     Console.WriteLine($"{input[i][0]}  {input[i][1]}   {oridata[i]}    {answers[i]}");
            // }
            // Console.WriteLine();

            // double[] answer2 = GaussianSvm.Score(testinput);
            // // var aaa3 = GaussianSvm.Score(input, answer2);
            //
            // rSquared = Accord.Statistics.Tools.Determination(answer2, testdata);
            // rSquared2 = new RSquaredLoss(GaussianSvm.NumberOfOutputs, testdata).Loss(answer2);
            // error = new SquareLoss(testdata).Loss(answer2);
            // Console.WriteLine($"Other valiData Comlication : ");
            // Console.WriteLine($"R2 :  {rSquared}   {rSquared2}");
            // Console.WriteLine($"RSME :  {error}  ");
            // Console.WriteLine();
            // Console.WriteLine($"valiData Complication :  {error}  ");
            // for (int i = 0; i < testinput.Length; i++)
            // {
            //     Console.WriteLine($"{testinput[i][0]}  {testinput[i][1]}   {testdata[i]}    {answer2[i]}");
            // }
            // Console.WriteLine();
            // Console.WriteLine();
            //
            // //拉普拉斯核函数
            // answers = LaplacianSvm.Score(input);
            //
            // //r2,RSME
            // rSquared = Accord.Statistics.Tools.Determination(answers, oridata);
            // rSquared2 = new RSquaredLoss(LaplacianSvm.NumberOfOutputs, oridata).Loss(answers);
            // error = new SquareLoss(oridata).Loss(answers);
            //
            // Console.WriteLine();
            // Console.WriteLine($"(SMO) learning LaplacianSvm;\r\n algorithm Result: ");
            //
            // Console.WriteLine($"Kernel.Sigma :  {LaplacianSvm.Kernel.Sigma}");
            // Console.WriteLine($"Kernel.conplex :  {Laplacianteacher.Complexity}");
            // Console.WriteLine($"R2 :  {rSquared}   {rSquared2}");
            // Console.WriteLine($"RSME :  {error}  ");
            // Console.WriteLine();
            // Console.WriteLine($"oridata Complication :    ");
            // for (int i = 0; i < input.Length; i++)
            // {
            //     Console.WriteLine($"{input[i][0]}  {input[i][1]}   {oridata[i]}    {answers[i]}");
            // }
            // Console.WriteLine();
            //
            // answer2 = LaplacianSvm.Score(testinput);
            // rSquared = Accord.Statistics.Tools.Determination(answer2, testdata);
            // rSquared2 = new RSquaredLoss(LaplacianSvm.NumberOfOutputs, testdata).Loss(answer2);
            // error = new SquareLoss(testdata).Loss(answer2);
            // Console.WriteLine($"LaplacianSvm Other valiData Comlication : ");
            // Console.WriteLine($"R2 :  {rSquared}   {rSquared2}");
            // Console.WriteLine($"RSME :  {error}  ");
            // Console.WriteLine();
            // Console.WriteLine($"valiData Complication :  {error}  ");
            // for (int i = 0; i < testinput.Length; i++)
            // {
            //     Console.WriteLine($"{testinput[i][0]}  {testinput[i][1]}   {testdata[i]}    {answer2[i]}");
            // }
            // Console.WriteLine();
            
            //西蒙格核函数
            
             //
             // answers = SigmoidSvm.Score(input);
             //
             // //r2,RSME
             // rSquared = Accord.Statistics.Tools.Determination(answers, oridata);
             // rSquared2 = new RSquaredLoss(GaussianSvm.NumberOfOutputs, oridata).Loss(answers);
             // error = new SquareLoss(oridata).Loss(answers);
             //
             // Console.WriteLine();
             // Console.WriteLine($"(SMO) learning SigmoidSvm (input);\r\n algorithm Result: ");
             //
             // Console.WriteLine($"Kernel.Sigma :  {GaussianSvm.Kernel.Sigma}");
             // Console.WriteLine($"R2 :  {rSquared}   {rSquared2}");
             // Console.WriteLine($"RSME :  {error}  ");
             // Console.WriteLine();
             // Console.WriteLine($"oridata Complication :    ");
             // for (int i = 0; i < input.Length; i++)
             // {
             //     Console.WriteLine($"{input[i][0]}  {input[i][1]}   {oridata[i]}    {answers[i]}");
             // }
             // Console.WriteLine();
             //
             // answer2 = SigmoidSvm.Score(testinput);
             // rSquared = Accord.Statistics.Tools.Determination(answer2, testdata);
             // rSquared2 = new RSquaredLoss(GaussianSvm.NumberOfOutputs, testdata).Loss(answer2);
             // error = new SquareLoss(testdata).Loss(answer2);
             // Console.WriteLine($" SigmoidSvm Other valiData Comlication : ");
             // Console.WriteLine($"R2 :  {rSquared}   {rSquared2}");
             // Console.WriteLine($"RSME :  {error}  ");
             // Console.WriteLine();
             // Console.WriteLine($"valiData Complication :  {error}  ");
             // for (int i = 0; i < testinput.Length; i++)
             // {
             //     Console.WriteLine($"{testinput[i][0]}  {testinput[i][1]}   {testdata[i]}    {answer2[i]}");
             // }
             // Console.WriteLine();
             //
             //
            

            //多项式核函数

            
             // answers = PolynomialSvm.Score(input);
             //
             // //r2,RSME
             // rSquared = Accord.Statistics.Tools.Determination(answers, oridata);
             // rSquared2 = new RSquaredLoss(GaussianSvm.NumberOfOutputs, oridata).Loss(answers);
             // error = new SquareLoss(oridata).Loss(answers);
             //
             // Console.WriteLine();
             // Console.WriteLine($"(SMO) learning PolynomialSvm (input);\r\n algorithm Result: ");
             //
             // Console.WriteLine($"Kernel.Sigma :  {GaussianSvm.Kernel.Sigma}");
             // Console.WriteLine($"R2 :  {rSquared}   {rSquared2}");
             // Console.WriteLine($"RSME :  {error}  ");
             // Console.WriteLine();
             // Console.WriteLine($"oridata Complication :    ");
             // for (int i = 0; i < input.Length; i++)
             // {
             //     Console.WriteLine($"{input[i][0]}  {input[i][1]}   {oridata[i]}    {answers[i]}");
             // }
             // Console.WriteLine();
             //
             // answer2 = PolynomialSvm.Score(testinput);
             // rSquared = Accord.Statistics.Tools.Determination(answer2, testdata);
             // rSquared2 = new RSquaredLoss(GaussianSvm.NumberOfOutputs, testdata).Loss(answer2);
             // error = new SquareLoss(testdata).Loss(answer2);
             // Console.WriteLine($"PolynomialSvm Other valiData Comlication : ");
             // Console.WriteLine($"R2 :  {rSquared}   {rSquared2}");
             // Console.WriteLine($"RSME :  {error}  ");
             // Console.WriteLine();
             // Console.WriteLine($"valiData Complication :  {error}  ");
             // for (int i = 0; i < testinput.Length; i++)
             // {
             //     Console.WriteLine($"{testinput[i][0]}  {testinput[i][1]}   {testdata[i]}    {answer2[i]}");
             // }
             // Console.WriteLine();
             //
             //










        }






        private static void kernelSvm2(double[][] inputs, double[] outputs, double[][] testputs, double[] testmanpred)
        {




            // Create a new Sequential Minimal Optimization (SMO) learning 
            // algorithm and estimate the complexity parameter C from data
            var teacher = new SequentialMinimalOptimizationRegression<Gaussian>()
            {
                UseComplexityHeuristic = true,
                Tolerance = 1e-6,
                
                UseKernelEstimation = true // estimate the kernel from the data
            };

            // Teach the vector machine
            var svm = teacher.Learn(inputs, outputs);


            svm.Save("d:\\acctest.txt");
            





            // Classify the samples using the model
            double[] answers = svm.Score(inputs);

            // double[] expected = table.GetColumn(1);
            //
            // double rSquared = Accord.Statistics.Tools.Determination(answers, expected);
           
            double error = new SquareLoss(outputs).Loss(answers); // should be

            Console.WriteLine($"Sequential Minimal Optimization (SMO) learning algorithm gave error: " + error);

            for (int i = 0; i < inputs.Length; i++)
            {
                Console.WriteLine($"{inputs[i][0]}  {inputs[i][1]}   {outputs[i]}    {answers[i]}");
            }
            Console.WriteLine();
            Console.WriteLine($"(SMO) learning algorithm test: ");

            double[] answers2 = svm.Score(testputs);
 
            for (int i = 0; i < testputs.Length; i++)
            {
                Console.WriteLine($"{testputs[i][0]}  {testputs[i][1]}   {testmanpred[i]}    {answers2[i]}");
            }









        }


        private static void kernelSvm3(double[][] inputs, double[] outputs, double[][] testputs, double[] testmanpred)
        {

            var svm2 = new SupportVectorMachine(5);

            SupportVectorMachine<IKernel> svm3;

            // svm3 = new SupportVectorMachine<IKernel>(1, 2);

            svm3 = Accord.IO.Serializer.Load<SupportVectorMachine<IKernel>>("d:\\acctest.txt");

              
            Console.WriteLine($"reading model from txt:");
            double[] answers2 = svm3.Score(testputs);

            Console.WriteLine();
            Console.WriteLine($"(SMO) learning algorithm test: ");
            for (int i = 0; i < testputs.Length; i++)
            {
                Console.WriteLine($"{testputs[i][0]}  {testputs[i][1]}   {testmanpred[i]}    {answers2[i]}");
            }
            
            

        }


        private static void optimization(double[][] inputs, double[] outputs)
        {
            // Non-linear regression can also be solved using arbitrary models
            // that can be defined by the user. For example, let's say we know
            // the overall model for the outputs but we do not know the value
            // of its parameters: log(w0  * x) / sqrt(w1 * y + w2)

            Func<double[], double[], double> model = (double[] x, double[] w)
                => Math.Log(w[0] * x[0]) / Math.Sqrt(w[1] * x[1] + w[2]);

            // Now that we have the model, we want to find which values we
            // can plug in its parameters such that the error when evaluating
            // in our data is as close to zero as possible. Mathematically, we
            // would like to find the best parameters w that minimizes:

            Func<double[], double> objective = (double[] w) =>
            {
                double sumOfSquares = 0.0;
                for (int i = 0; i < inputs.Length; i++)
                {
                    double expected = outputs[i];
                    double actual = model(inputs[i], w);
                    sumOfSquares += Math.Pow(expected - actual, 2);
                }
                return sumOfSquares;
            };

            // Now, let's use a gradient-free optimization algorithm to 
            // find the best parameters for our model's equations:
            var cobyla = new Cobyla(numberOfVariables: 3) // we have 3 parameters: w0, w1, and w2
            {
                Function = objective,
                MaxIterations = 100,
                Solution = new double[] { 1.0, 6.4, 100 } // start with some random values
            };

            bool success = cobyla.Minimize(); // should be true
            double[] solution = cobyla.Solution;

            // Get machine's predictions for inputs
            double[] prediction = inputs.Apply(x => model(x, solution));

            // Compute the error in the prediction (should be 0.0)
            double error = new SquareLoss(outputs).Loss(prediction);

            Console.WriteLine(error); // should be 0.000
        }
    
}
}
