#pragma once
#include <ctime>
#include <cmath>
#include <vector>
#include <cstdlib>
#include <iostream>
#include "readset.hpp"

#define input (neuron[0])              //输入
#define output (neuron[layer_num - 1]) //输出

using namespace std;

//定义这两个玩意儿是为了避免格式化代码的时候出错(把两个“>”连在一起)
typedef vector<double> V_DOUBLE;
typedef vector<V_DOUBLE> V_V_DOUBLE;

double sigmoid(double x)
{
    return (1 / (1 + exp(-x)));
}

template <int layer_num>
class network
{
  public:
    void random_init_weight(); //随机初始化权值
    void set_neuron(int *);    //设置神经元个数

    void foward();                       //向前传播
    double cost();                       //代价函数
    void train(double, double);          //训练网络
    vector<double> test(vector<double>); //测试网络

    int m = 100;               //样本个数
    vector<V_DOUBLE> error;    //误差，neuron[a][b]:第a层的第b个误差
    vector<V_DOUBLE> neuron;   //神经元们，neuron[a][b]:第a层的第b个神经元(偏置是最后一个，如:neuron[0].back())
    vector<V_V_DOUBLE> weight; //权重，weight[a][b][c]:第(a + 1)层的第b个神经元对应上一层的第c个权重
};

template <int layer_num>
void network<layer_num>::set_neuron(int *arr)
{
    //设置神经元
    neuron.resize(layer_num);
    for (size_t i = 0; i < layer_num; i++)
    {
        if (i != (layer_num - 1))
        {
            neuron[i].resize(arr[i] + 1);
            neuron[i][neuron[i].size() - 1] = 1;
        }
        else
        {
            neuron[i].resize(arr[i]);
        }
    }

    //设置权重
    weight.resize(layer_num - 1);
    for (size_t i = 0; i < (layer_num - 1); i++)
    {
        weight[i].resize(arr[i] + 1);
        for (size_t j = 0; j < (arr[i] + 1); j++)
        {
            weight[i][j].resize(arr[i + 1]);
        }
    }

    //设置误差
    error.resize(layer_num - 1);
    for (size_t i = 0; i < (layer_num - 1); i++)
    {
        error[i].resize(neuron[i + 1].size());
    }
}

template <int layer_num>
void network<layer_num>::random_init_weight()
{
    //随机初始化权值
    double tmp;

    for (size_t i = 0; i < weight.size(); i++)
    {
        for (size_t j = 0; j < weight[i].size(); j++)
        {
            for (size_t k = 0; k < weight[i][j].size(); k++)
            {
                srandom(clock());
                tmp = (random() % 10) * 0.1;
                if ((!isnan(tmp)) && (!isinf(tmp)) && (tmp != 0))
                {
                    weight[i][j][k] = tmp;
                }
                else
                {
                    weight[i][j][k] = 0.5;
                }
            }
        }
    }
}

template <int layer_num>
void network<layer_num>::foward()
{
    double tmp = 0;

    for (size_t i = 0; i < (layer_num - 1); i++)
    {
        for (size_t j = 0; j < neuron[i + 1].size(); j++)
        {
            tmp = 0;
            for (size_t k = 0; k < neuron[i].size(); k++)
            {
                tmp += weight[i][j][k] * neuron[i][k];
                cout << "weight[" << i << "][" << j << "][" << k  << "] * neuron[" << i << "][" << k << "]" << endl;
            }
            neuron[i + 1][j] = sigmoid(tmp);
        }
    }
}

template <int layer_num>
vector<double> network<layer_num>::test(vector<double> i)
{
    input = i;
    foward();
    return output;
}

template <int layer_num>
double network<layer_num>::cost()
{
    double ret = 0;

    for (size_t i = 0; i < m; i++)
    {
        for (size_t j = 0; j < output.size(); j++)
        {
            //ret += pow(output[j] - read_label(i)[j], 2);
            ret += read_label(i)[j] * log(test(read_image(i))[j]) + (1 - read_label(i)[j]) * log(1 - test(read_image(i))[j]);
        }
    }

    return (ret / -m);
}

template <int layer_num>
void network<layer_num>::train(double learn_rate, double stop_door)
{
    double tmp = 0;

    //while (cost() > stop_door)
    {
        //cout << cost() << endl;

        for (size_t i = 0; i < m; i++)
        {
            test(read_image(i));
            //计算误差
            for (size_t j = 0; j < neuron[layer_num - 1].size(); j++)
            {
                error[layer_num - 2][j] = output[j] - read_label(i)[j];
            }
            for (size_t i = layer_num - 2; i > 0; i--)
            {
                for (size_t j = 0; j < neuron[i].size(); j++)
                {
                    tmp = 0;
                    for (size_t k = 0; k < neuron[i - 1].size(); k++)
                    {
                        //tmp += weight[i][j][k];// * error[i][j];
                        // cout << "weight[" << i << "][" << j << "][" << k  << "]:" << weight[i][j][k] << endl;
                    }
                    error[i - 1][j] = tmp;
                }
            }
            for (size_t i = 0; i < weight.size(); i++)
            {
                for (size_t j = 0; j < weight[i].size(); j++)
                {
                    for (size_t k = 0; k < weight[i][j].size(); k++)
                    {
                        //weight[i][j][k] += learn_rate * error[i];
                    }
                }
            }
        }
    }
}