#include "vector/fashion_mnist_classification.h"

#include <string>
#include <chrono>
#include "vector/linear.h"
#include "log.h"
#include "vector/tensor.h"
#include "vector/iterator.h"
#include "vector/relu.h"
#include "vector/softmax.h"
#include "vector/cross_entropy_loss.h"
#include "vector/fashion_mnist_load.h"

void FashionMnistClassification::train()
{
    // 加载训练集（前10张）
    const std::string resource_dir{"../../data/FashionMnist/"};
    FashionMnistLoad fashion_mnist_load(resource_dir);
    fashion_mnist_load.load();
    const auto& train_images =  fashion_mnist_load.get_normalized_train_images();
    const auto& train_labels = Tensor::cast<uint8_t, double>(fashion_mnist_load.get_one_hot_train_labels());

    int64_t d = 28 * 28;    // 单个样本的维度
    int64_t h = 10; // 输出的个数
    Linear linear(d, h);
    ReLU relu{};
    SoftMax softmax{};
    CrossEntropyLoss cross_entropy_loss{};

    int64_t batch_size = 128;   // 每个批次的样本个数
    int64_t epochs = 10;    // 训练次数
    Iterator iterator(train_images.size());
    std::vector<std::vector<double>> linear_output{};
    std::vector<std::vector<double>> relu_output{};
    std::vector<std::vector<double>> softmax_output{};
    double loss{};
    std::vector<std::vector<double>> cross_entropy_derivative{};
    std::vector<std::vector<double>> softmax_devivative{};
    std::vector<std::vector<double>> relu_devivative{};
    std::vector<std::vector<double>> linear_derivative{};

    std::vector<std::vector<double>> train_images_batch;
    std::vector<std::vector<double>> train_labels_batch;
    for(int64_t epoch = 0; epoch < epochs; epoch++)
    {
        int64_t loop_per_epoch = train_images.size() / batch_size;
        auto start = std::chrono::high_resolution_clock::now();
        for(int64_t loop = 0;loop < loop_per_epoch;loop++)
        {
            // 获取批次数据
            iterator.get_batch(train_images, train_labels, train_images_batch, train_labels_batch, batch_size);

            // 将数据输入到全连接层，得到全连接的输出
            linear.forward(train_images_batch, linear_output);
            // LogInfo() << "linear_output[64]: " << linear_output[64];

            // 将全连接层的输出，作为ReLU层的输入
            // relu.forward(linear_output, relu_output);
            // LogInfo() << "relu_output[64]: " << relu_output[64];

            // 将ReLU层的输出，输入到softmax层，得到softmax层的输出
            softmax.forward(linear_output, softmax_output);
            // LogInfo() << "softmax_output[64]: " << softmax_output[64];

            // 将softmax层的输出和对应的标签数据，输入到交叉熵层，得到损失值（误差）
            cross_entropy_loss.forward(softmax_output, train_labels_batch, loss);
            // LogInfo() << "CrossEntropyLoss loss: " << loss/batch_size;

            // 计算交叉熵层的损失对输入的偏导数
            cross_entropy_derivative = cross_entropy_loss.backward();
            // LogInfo() << "cross_entropy_derivative[64]: " << cross_entropy_derivative[64];

            // 计算softmax层的损失对输入的偏导数
            softmax_devivative = softmax.backward(cross_entropy_derivative);

            // 计算ReLU层的损失对输入的偏导数
            // relu_devivative = relu.backward(softmax_devivative);

            // 计算全连接层中的权重对loss的导数，偏执对loss的导数和输入对loss的导数
            linear_derivative = linear.backward(softmax_devivative);
            // std::cout << std::endl;
        }
        auto end = std::chrono::high_resolution_clock::now();
        auto duration = std::chrono::duration_cast<std::chrono::milliseconds>(end - start);
        LogInfo() << "Train time: " << duration.count() << " ms, epoch: " << epoch << ", epochs: " << epochs;
        LogInfo() << "linear output.shape: " << Tensor::shape(linear_output);

        // LogInfo() << "ReLU output.shape: " << Tensor::shape(relu_output);
        // LogInfo() << "ReLU output: " << relu_output;

        LogInfo() << "SoftMax output.shape: " << Tensor::shape(softmax_output);
        // LogInfo() << "SoftMax output: " << softmax_output;

        LogInfo() << "CrossEntropyLoss loss: " << loss/batch_size;

        LogInfo() << "CrossEntropyLoss derivative.shape: " << Tensor::shape(cross_entropy_derivative);
        // LogInfo() << "CrossEntropyLoss derivative: " << cross_entropy_derivative;

        LogInfo() << "Softmax devivative.shape: " << Tensor::shape(softmax_devivative);
        // LogInfo() << "Softmax derivative: " << softmax_devivative;

        // LogInfo() << "ReLU devivative.shape: " << Tensor::shape(relu_devivative);
        // LogInfo() << "ReLU derivative: " << relu_devivative;

        LogInfo() << "Linear devivative.shape: " << Tensor::shape(linear_derivative);
        // LogInfo() << "Linear derivative: " << linear_derivative;

        // LogInfo() << "train_labels_batch: " << train_labels_batch;
        std::cout << std::endl;
    }
}