#include <stdio.h>
#include <math.h>
#include <assert.h>
// #include <string.h>
#include <stdlib.h>
#include <memory.h>
#include "mlp_nn.h"

#define N_LABELS 1
#define N_FEATURES 3
#define N_SAMPLES 8

double x_data[][3] = {
    {0, 0, 0},
    {0, 0, 1},
    {0, 1, 0},
    {0, 1, 1},
    {1, 0, 0},
    {1, 0, 1},
    {1, 1, 0},
    {1, 1, 1}
};

double y_data[][1] = {
    {0},
    {1},
    {1},
    {0},
    {1},
    {0},
    {0},
    {1} 
};

double ReLU(double z) 
{ 
    if (z <= 0) {
        return 0;
    }
    return z;
}

double DiffReLU(double z) 
{
    if (z <= 0) {
        return 0;
    }
    return 1;
}


double Sigmoid(double z)
{
    return 1.0 / (1 + exp(-z));
}

double DiffSigmoid(double z) {
    double p = Sigmoid(z);
    return p * ( 1 - p);
}




double BinaryCrossEntropy(const double y[], const double p[], int k)
{
    assert(k == 1);
    double epsilon = 1e-9;
    double loss = -(y[0] * log(p[0] + epsilon) + (1 - y[0]) * log ( 1 - p[0] + epsilon));
    return loss;
}

void DiffLoss(double loss, const double y[], const double p[], double delta[], int k) {
    assert(k == 1 && delta != NULL);
    (void) loss;
    double epsilon = 1e-9;
    delta[0] = (p[0] - y[0]) / (p[0] * (1 - p[0]) + epsilon);
}


int main()
{
    MLPNeuralNetworkPtr nn = InitNeuralNetwork();
    
    AppendLayer(nn, N_FEATURES, 16, ReLU, DiffReLU);
    AppendLayer(nn, 16, N_LABELS, Sigmoid, DiffSigmoid);
    
    // 适配网络输入
    double **labelArray = malloc(sizeof(double *) * N_SAMPLES);
    for (int i = 0; i < N_SAMPLES; i++) {
        labelArray[i] = malloc(sizeof(double) * N_LABELS);
        memcpy(labelArray[i], y_data[i], sizeof(double) * N_LABELS);
    } 

    double **x = malloc(sizeof(double *) * N_SAMPLES);
    for (int i = 0; i < N_SAMPLES; i++) {
        x[i] = malloc(sizeof(double) * N_FEATURES);
        memcpy(x[i], x_data[i], sizeof(double) * N_FEATURES);
    }
    // 

    int epoches = 30000;
    double *loss = TrainNeuralNetwork(nn, BinaryCrossEntropy, DiffLoss, x, 
        labelArray, N_SAMPLES, N_LABELS, epoches, 0.001);
    
    for (int i = 0; i < epoches; i ++) {
        if ((i + 1) % 500 == 0) {
            printf("epoch %d, loss = %.7f\n", i+1, loss[i]);
        }
    }
    free(loss);

    double p[N_LABELS];
    for (int i = 0; i < N_SAMPLES; i++) {
        Inference(nn, x[i], p, N_LABELS);
       
        printf("[%.0f %.0f %.0f] ->预测值:[%.4lf], 真实标签[%.0f]\n", x[i][0], x[i][1], x[i][2],
            p[0], y_data[i][0]);
    }

    DeinitNeuralNetwork(&nn);

    for(int i = 0; i < N_SAMPLES; i++) {
        free(x[i]);
        free(labelArray[i]);
    }
    free(x);
    free(labelArray);

    return 0;
}

