﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using ZGSharp;
using ZGSharp.DataSets;
using ZGSharp.Layers;

namespace ZGSharpTest
{
    internal class TestMnistCnn
    {
        public class CnnNet : Model
        {
            Conv2D Conv0 = new ZGSharp.Layers.Conv2D(1, 32, new int[] { 3, 3 }, new int[] {1, 1 });//28*28->14*14
            Conv2D Conv1 = new ZGSharp.Layers.Conv2D(32, 32, new int[] { 3, 3 }, new int[] { 1, 1 });//7*7
            Linear linear1 = new ZGSharp.Layers.Linear(49 * 32, 64);
            Linear linear2 = new ZGSharp.Layers.Linear(64, 10);
            protected override Tensor forward(Tensor input)
            {
                Tensor tensor = Pool2D.MaxPool(Conv0.LayerPred(input), new int[] {2, 2 }); //13*13
                tensor = Relu.relu(tensor);
                tensor = Pool2D.MaxPool(Conv1.LayerPred(tensor), new int[] { 2, 2 }); //6*6
                tensor = Relu.relu(tensor);
                tensor = linear1.LayerPred(tensor);
                tensor = Relu.relu(tensor);
                tensor = linear2.LayerPred(tensor);
                return tensor;
            }
        }

        public static int Test()
        {
            Mnist mnist = new Mnist();
            mnist.InitMinst(@"..\..\..\..\DataSets\Mnist");
            int epoch = 100;
            int steps = 20;
            int batch_size = 16;
            double learn_rat = 0.001;
            CommonDef.Opt = new ZGSharp.Optimeters.Adam();
            CnnNet model = new CnnNet();
            model.ModelRestore("TestCnnMinst.txt");
            double maxacc = 0;
            for (int i = 0; i < epoch; i++) {
                double avg_loss = 0;
                {
                    double[] input_data = new double[batch_size * 28 * 28];
                    int[] label_data = new int[batch_size];
                    for (int j = 0; j < steps; j++)
                    {
                        mnist.GetBatchTrainData(batch_size, input_data, label_data);
                        Tensor data = Tensor.ToTensor(new int[] { batch_size, 28, 28, 1}, input_data);
                        Tensor label_t = CommonDef.GetOneHot(label_data, 10);
                        Tensor pred = model.Run(data);
                        //Tensor loss = Losses.MSE(pred, label_t);
                        Tensor loss = Losses.CrossEntropy(pred, label_t);
                        loss.Backward(loss, learn_rat);
                        avg_loss += loss.Avarage() / steps;
                        Console.WriteLine("step {0} loss is {1}", j, loss.Avarage());
                    }
                }
                { 
                    double[] input_data = new double[1000 * 28 * 28];
                    int[] label_data = new int[1000];
                    mnist.GetBatchTestData(1000, input_data, label_data);
                    Tensor data = Tensor.ToTensor(new int[] { 1000, 28, 28, 1}, input_data);
                    Tensor pred = model.Run(data);
                    double accurrency = CommonDef.GetAccuracy(pred, label_data);
                    Console.WriteLine("epoch {0} loss is {1} test accurrency {2}", i, avg_loss, accurrency);
                    if (maxacc < accurrency) 
                    {
                        maxacc = accurrency;
                    }
                }
                model.SaveModel("TestCnnMinst.txt");
            }
            Console.WriteLine("########################max accurrency{0}\n", maxacc);
            return 0;
        }
    }
}
