﻿
#include "pch.h"
#include "nn/Module/Linear.hpp"
#include "nn/Module/module.hpp"
#include "nn/Module/loss.hpp"
#include "nn/_functional.hpp"
#include "Tensor.hpp"
#include <memory>
#include <stdio.h>

using namespace std;
using namespace DimN;

void testBackward();
void testCrossEntropy();
void testForward();


int main()
{
	std::cout << "start test corss entropy\n";
	testCrossEntropy();
	std::cout << "start test backward\n";
	testBackward();
	std::cout << "start test forward\n";
	testForward();
	return 0;
}


void testBackward()
{
	//thanks epal, see doc here
 	//https://zhuanlan.zhihu.com/p/587114664
	try {

		{
			//cacluate backward by hand
		float a = 3.1f;
		float b = 2.0f;
		float c = 99.0f;
		float x = 2.0f;
	 	float y = a * x * x + (b * x) + c;

	    float dY_dA = x * x;
        float dY_dX = (a * 2.0) * x + b;
        float dY_dB = x;
 	    float dY_dC = 1.0;

		std::cout <<"caculate grad by hand Y:" << y << std::endl;
		std::cout << "dA:" << dY_dA << " dX: " << dY_dX << " dB:" << dY_dB << " dC:" << dY_dC  << "\n";
		}

		DimN::TensorD::getTensorBuff().clear();
		DimN::TensorD a, b, c, x, y, dy_da, dy_db, dy_dx, dy_dc, Y;
		 a = 3.1f;
		 b = 2.0f;
		 c = 99.0f;
		 x = 2.0f;

		Y = a * x * x + (b * x) + c;
		Y.backward();

		 auto dY_dA = a.gradient;
		 auto dY_dX = x.gradient;
		 auto dY_dB = b.gradient;
		 auto dY_dC = c.gradient;
		
		Y.print();
		std::cout <<"caculate grad by auto\n";
		std::cout <<   "dA:" << dY_dA << " dX:" << dY_dX << " dB:" << dY_dB << " dC:" << dY_dC  << "\n";
#if 0
	//	Y.backward();
#endif
	}
	catch (std::exception& e)
	{
		std::cout << e.what();
	}

}


void testCrossEntropy()
{
	DimN::TensorF tensor_data(3, 3);
	tensor_data(0,0) = -0.1878f;
	tensor_data(0,1) = -0.3153f;
	tensor_data(0,2) = 0.3348f;
	tensor_data(1,0) = -1.0205f;
	tensor_data(1,1) = 1.4709f;
	tensor_data(1,2) = 0.4120f;
	tensor_data(2,0) = 2.2158f;
	tensor_data(2,1) = -0.1813f;
	tensor_data(2,2) = 1.5044f;

	tensor_data.print();

    DimN::TensorF y_data(3, 1);
	y_data(0,0) = 1.0f;
	y_data(0,1) = 2.0f;
	y_data(0,2) = 0.0f;
	auto result = DimN::cross_entropy(tensor_data, y_data);
	result.print();

}

void testForward()
{
class Net:public Module<float>
{
public:
    shared_ptr<Linear<>> fc1;
    shared_ptr<Linear<>> fc2;
	Net(){
        fc1 =  std::make_shared<Linear<>>(5, 100);
        fc2 =  std::make_shared<Linear<>>(100, 3);
		add_module("fc1", fc1);
		add_module("fc2", fc2);
	}

	DimN::TensorF forward(DimN::TensorF x){
		x.print();
		x = fc1->forward(x);
		x.print();
		x = fc2->forward(x);
		x.print();
		return x;
	}
};

    DimN::TensorF x(5, 1);
    for (size_t i = 0; i < 5; ++i) {
        x(i, 0) = static_cast<float>(i + 1);  // fill data  1, 2, 3, 4, 5
    }

	try {
		Net net;
		net.forward(x);
	}
	catch (std::exception& e)
	{
		std::cout << e.what() << std::endl;
	}

}