﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using ZGSharp;
using ZGSharp.DataSets;
using ZGSharp.Layers;

namespace ZGSharpTest
{
    internal class TestMnistLinear
    {
        public class Net : Model
        {
            //Linear layerLinear0 = new ZGSharp.Layers.Linear(28*28, 64);
            //Linear layerLinear1 = new ZGSharp.Layers.Linear(64, 32);
            //Linear layerLinear2 = new ZGSharp.Layers.Linear(32, 10);
            Linear layerLinear0 = new ZGSharp.Layers.Linear(28 * 28, 10);
            protected override Tensor forward(Tensor input)
            {
                return layerLinear0.LayerPred(input);
                //if (true)
                //{  //use relu
                //    Tensor tensor = layerLinear0.LayerPred(input);
                //    tensor = Relu.relu(tensor);
                //    tensor = layerLinear1.LayerPred(tensor);
                //    tensor = Relu.relu(tensor);
                //    tensor = layerLinear2.LayerPred(tensor);
                //    return tensor;
                //}
                //else
                //{
                //    Tensor tensor = layerLinear0.LayerPred(input);
                //    tensor = Sigmoid.sigmoid(tensor);
                //    tensor = layerLinear1.LayerPred(tensor);
                //    tensor = Sigmoid.sigmoid(tensor);
                //    tensor = layerLinear2.LayerPred(tensor);
                //    return tensor;
                //}
            }
        }

        public static int Test()
        {
            Mnist mnist = new Mnist();
            mnist.InitMinst(@"..\..\..\..\DataSets\Mnist");
            int epoch = 20;
            int steps = 64;
            int batch_size = 64;
            double learn_rat = 0.01;
            CommonDef.Opt = new ZGSharp.Optimeters.Adam();
            Net model = new Net();
            //model.ModelRestore("TestLinearMinst.txt");
            for (int i = 0; i < epoch; i++) { 
                for (int j = 0; j < steps; j++)
                {
                    double[] input_data = new double[batch_size * 28 * 28];
                    int[] label_data = new int[batch_size];
                    mnist.GetBatchTrainData(batch_size, input_data, label_data);
                    Tensor data = Tensor.ToTensor(new int[] { batch_size, 28 * 28 }, input_data);
                    Tensor label_t = CommonDef.GetOneHot(label_data, 10);
                    Tensor pred = model.Run(data);
                    //Tensor loss = Losses.MSE(pred, label_t);
                    Tensor loss = Losses.CrossEntropy(pred, label_t);
                    loss.Backward(loss, learn_rat);
                    Console.WriteLine("epoch {0} step {1}, loss is {2}", i, j, loss.Avarage());
                }
                { 
                    double[] input_data = new double[100 * 28 * 28];
                    int[] label_data = new int[100];
                    mnist.GetBatchTestData(100, input_data, label_data);
                    Tensor data = Tensor.ToTensor(new int[] { 100, 28 * 28 }, input_data);
                    Tensor pred = model.Run(data);
                    double accurrency = CommonDef.GetAccuracy(pred, label_data);
                    Console.WriteLine("########################test accurrency{0}\n", accurrency);
                }
                model.SaveModel("TestLinearMinst.txt");
            }
            return 0;
        }
    }
}
