#include <stdio.h>
#include <math.h>
#include <assert.h>
// #include <string.h>
#include <stdlib.h>
#include <memory.h>
#include "mlp_nn.h"
#include "multi-cls-data.h"

double ReLU(double z) 
{ 
    if (z <= 0) {
        return 0;
    }
    return z;
}

double DiffReLU(double z) 
{
    if (z <= 0) {
        return 0;
    }
    return 1;
}


double Linear(double z)
{
    return z;
}

double DiffLinear(double z) {
    return 1;
}


void Softmax(const double z[], double p[], int n)
{
    double d = 0;
    for (int i = 0; i < n; i++) {
        p[i] =  exp(z[i]);
        d += p[i];
    }

    for (int i = 0; i < n; i++) {
        p[i] /= d;
    }
}


double CrossEntropy(const double y[], const double z[], int n)
{
    double d = 0;
    double *p = malloc(sizeof(double) * n);
    assert(p!= NULL);

    Softmax(z, p, n);

    double loss = 0;
    for (int i = 0; i < n; i++) {
        loss += - y[i] * log(p[i] + 1e-12);
    }

    free(p);
    return loss;
}

void DiffLoss(double loss, const double y[], const double z[], double delta[], int n) {
    (void) loss;
    double d = 0;
    double *p = malloc(sizeof(double) * n);
    assert(p!= NULL);
    Softmax(z, p, n);

    for (int i = 0; i < n; i++) {
        delta[i] = p[i] - y[i];
    }

    free(p);
}

int main()
{
    MLPNeuralNetworkPtr nn = InitNeuralNetwork();
    
    AppendLayer(nn, 4, 128, ReLU, DiffReLU);
    AppendLayer(nn, 128, 64, ReLU, DiffReLU);
    AppendLayer(nn, 64, 3, Linear, DiffLinear);
    double out[3];
    
    double **onehot = malloc(sizeof(double *) * n_train_samples);
    for (int i = 0; i < n_train_samples; i++) {
        onehot[i] = malloc(sizeof(double) * n_classes);
        memset(onehot[i], 0, sizeof(double) * n_classes);
        onehot[i][y_train[i]] = 1.0;
    }
    double **x = malloc(sizeof(double *) * n_train_samples);
    for (int i = 0; i < n_train_samples; i++) {
        x[i] = malloc(sizeof(double) * n_features);
        memcpy(x[i], x_train[i], sizeof(double) * n_features);
    }
    TrainNeuralNetwork(nn, CrossEntropy, DiffLoss, x, onehot, n_train_samples, n_classes, 1000, 0.01);
    
    for (int i = 0; i < n_test_samples; i++) {
        Inference(nn, x_test[i], out, n_classes);
        Softmax(out, out, n_classes);
        // 输出预测结果
        for (int j = 0; j < n_features; j++) {
            printf("%.4lf ", x_test[i][j]);
        }
        printf("|");
        for (int j = 0; j < n_classes; j++) {
            printf("%.4lf ", out[j]);
        }
        printf("|");
        printf("%d\n", y_test[i]);
    }

    DeinitNeuralNetwork(&nn);

    return 0;
}

