﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using ZGSharp;
using ZGSharp.Layers;


namespace ZGSharpTest
{
    internal class TestSameOr
    {
        public class Net : Model
        {
            Linear layerLinear0 = new ZGSharp.Layers.Linear(2, 16);
            Linear layerLinear1 = new ZGSharp.Layers.Linear(16, 16);
            Linear layerLinear2 = new ZGSharp.Layers.Linear(16, 2);
            protected override Tensor forward(Tensor input)
            {
                if (true)
                {  //use relu
                    Tensor tensor = layerLinear0.LayerPred(input);
                    tensor = Relu.relu(tensor);
                    tensor = layerLinear1.LayerPred(tensor);
                    tensor = Relu.relu(tensor);
                    tensor = layerLinear2.LayerPred(tensor);
                    return tensor;
                }
                else { 
                    Tensor tensor = layerLinear0.LayerPred(input);
                    tensor = Sigmoid.sigmoid(tensor);
                    tensor = layerLinear1.LayerPred(tensor);
                    //tensor = Dropout.dropout(tensor);
                    tensor = Sigmoid.sigmoid(tensor);
                    tensor = layerLinear2.LayerPred(tensor);
                    return tensor;
                }
            }
        }

        public static int Test()
        {
            int step = 100;
            int batch_size = 32;
            double learn_rat = 0.1;
            CommonDef.Opt = new ZGSharp.Optimeters.Adam();
            Net model = new Net();
            //model.ModelRestore("TestSameOr.txt");
            Random rdm = new Random();
            for (int i = 0; i < step; i++) {
                double[] input = new double[2 * batch_size];
                int[] label = new int[batch_size];
                for (int j = 0; j < batch_size; j++)
                {
                    double a = rdm.Next(2);
                    double b = rdm.Next(2);
                    if (a == b) 
                    {
                        label[j] = 1;
                    }
                    input[j * 2] = a;
                    input[j * 2 + 1] = b;
                }
                Tensor data = Tensor.ToTensor(new int[] { batch_size, 2 }, input);
                Tensor label_t = CommonDef.GetOneHot(label, 2);
                Tensor pred = model.Run(data);
                //Tensor loss = Losses.MSE(pred, label_t);
                Tensor loss = Losses.CrossEntropy(pred, label_t);
                loss.Backward(loss, learn_rat);
                Console.WriteLine("Train Times {0}, loss is {1}", i, loss.Avarage());
            }
            for (int i = 0; i < 10; i++)
            {
                double[] input = new double[] { rdm.Next(2), rdm.Next(2) };
                Tensor data = Tensor.ToTensor(new int[] { 1, 2 }, input);
                Tensor pred = model.Run(data);
                Console.WriteLine(String.Format("TEST:{0} {1} {2} {3}", input[0], input[1], Math.Round(pred.outputs[0], 2), Math.Round(pred.outputs[1], 2)));
            }
            model.SaveModel("TestSameOr.txt");
            return 0;
        }
    }
}
