#include "vector/softmax_test.h"

#include <chrono>
#include <execution>
#include <numeric>
#include "log.h"
#include "vector/normal.h"
#include "vector/softmax.h"
#include "tools/measure_execution_time.h"

static const std::vector<std::vector<double>> input{
{-1.74659, 0.123506, -1.9983, 1.35272, 1.99304, -3.17698, -0.777461, 1.25511, -0.340708, -2.10382}, 
{-1.06658, -0.414349, -1.22249, 1.34097, 2.61636, -1.44539, 0.0635651, -0.219036, 0.668611, -1.092}, 
{-0.808914, -0.211129, -0.153677, 1.76608, 0.91764, -3.05343, -1.37189, 0.704946, 2.16131, -1.02346}, 
{-0.958162, -0.926441, -2.14889, 1.19929, 1.63712, -1.99042, -0.625384, -1.09716, 0.355972, -1.54769}};

static const std::vector<std::vector<double>> expect_output{
{0.00996962, 0.064692, 0.00775108, 0.221152, 0.419546, 0.00238489, 0.0262764, 0.200586, 0.0406673, 0.00697486}, 
{0.0148357, 0.0284819, 0.012694, 0.164776, 0.589914, 0.0101577, 0.0459329, 0.0346252, 0.0841187, 0.0144633}, 
{0.0204002, 0.0370894, 0.0392826, 0.26788, 0.114675, 0.00216199, 0.0116182, 0.0927034, 0.397728, 0.0164611}, 
{0.0319662, 0.0329964, 0.00971772, 0.276478, 0.428359, 0.0113864, 0.0445876, 0.0278179, 0.118963, 0.0177281}};

static const std::vector<std::vector<double>> expect_derivative{
{-0.0304296, -0.0764746, -0.0256091, 0.0104122, 0.288396, -0.0106905, -0.0547364, -0.0101353, -0.0669525, -0.0237805}, 
{-0.0416725, -0.0614268, -0.0376356, -0.0661375, 0.515592, -0.0323799, -0.0771114, -0.0679133, -0.0903213, -0.0409942}, 
{-0.0457465, -0.0609996, -0.0623499, 0.0890819, -0.0591602, -0.00970078, -0.0325939, -0.0675428, 0.289456, -0.0404448}, 
{-0.0589065, -0.0597583, -0.0294787, 0.0870014, 0.322343, -0.0327363, -0.0673272, -0.0551288, -0.0628887, -0.0431201}};

void SoftMaxTest::test()
{
    SoftMax softmax{};
    std::vector<std::vector<double>> output{};
    softmax.forward(input, output);
    double sum = std::reduce(output[0].begin(), output[0].end(), 0.0);;
    // LogInfo() << "input: " << input;
    // LogInfo() << "output: " << output;
    // LogInfo() << "sum: " << sum;

    std::vector<std::vector<double>> output_gradient = input;
    auto derivative = softmax.backward(output_gradient);
    // LogInfo() << "output_gradient: " << output_gradient;
    // LogInfo() << "derivative: " << derivative;

    assert(Tensor::equal(output, expect_output, 1e-5));
    assert(Tensor::equal(derivative, expect_derivative, 1e-6));
    LogInfo() << "SoftMaxTest test success.";
}

void SoftMaxTest::performance_test()
{
    SoftMax softmax{};
    std::vector<std::vector<double>> input = Normal::matrix<double>(128, 10, 1, 0.1);
    // LogInfo() << "input: " << input;
    std::vector<std::vector<double>> output{};
    std::vector<std::vector<double>> output_gradient = input;

    int64_t epochs{10000};
    MEASURE_CPU_TIME("SoftMaxTest", epochs, {
        softmax.forward(input, output);
        softmax.backward(output_gradient);
    });
}