#include <stdio.h>
#include <string.h>
#include <time.h>
#include <stdlib.h>
#include <assert.h>
#include "mlp_layer.h"
#include "mlp_layer_priv.h"


static void forward_fn(const MLPLayerPtr layer_ptr, const double x[], double y[])
{
    memcpy(layer_ptr->input, x, sizeof(double) * layer_ptr->input_dim);
    
    for (int m = 0; m < layer_ptr->output_dim; m++) {
        double *w = layer_ptr->weights + m * layer_ptr->input_dim;
        double b = *(layer_ptr->bias + m);
        *(layer_ptr->z_value + m) = b;
        for (int n = 0; n < layer_ptr->input_dim; n++) {
            *(layer_ptr->z_value + m) += *(x + n)  * (*(w + n));
        }
        *(y + m) =  layer_ptr->h_activate(*(layer_ptr->z_value + m));
    }
}


static void backward_fn(MLPLayerPtr layer_ptr, const double delta_y[], double delta_x[])
{
    memset(delta_x, 0, sizeof(double) * layer_ptr->input_dim);
    memset(layer_ptr->delta_bias, 0, sizeof(double) * layer_ptr->output_dim);
    memset(layer_ptr->delta_weights, 0, sizeof(double) * layer_ptr->input_dim * layer_ptr->output_dim);

    for (int m = 0; m < layer_ptr->output_dim; m++) {
        double delta_ym = *(delta_y + m);
        double delta_z = layer_ptr->h_activate_diff(layer_ptr->z_value[m]) * delta_ym;
        *(layer_ptr->delta_bias + m)  +=  delta_z * 1;
        double *w = layer_ptr->weights + m * layer_ptr->input_dim;
        double *delta_w = layer_ptr->delta_weights + m * layer_ptr->input_dim;
        for (int n = 0; n < layer_ptr->input_dim; n++) {
            *(delta_x + n) += *(w + n) * delta_z;
            *(delta_w + n) += *(layer_ptr->input + n) * delta_z;
        }
    }
}


static void optimizer_step(MLPLayerPtr layer_ptr, double lr)
{
    for (int m = 0; m < layer_ptr->output_dim; m++) {
        *(layer_ptr->bias + m) += -lr * (*(layer_ptr->delta_bias));
        double *w = layer_ptr->weights + layer_ptr->input_dim * m;
        double *delta_w = layer_ptr->delta_weights + layer_ptr->input_dim * m;
        for (int n = 0; n < layer_ptr->input_dim; n++) {
            *(w + n) += -lr * (*(delta_w + n));
        }
    }
}

static void RandomInitParameters(double *pparam, int n) 
{   
    srand(time(NULL));
    for (int i = 0; i < n; i++) {
        *(pparam + i) = (-50 + rand() % 100) * 0.01;
    }
}

MLPLayerPtr InitLayer(int input_dim, int output_dim, ActivateType activate, DiffActivateType activate_diff)
{
    MLPLayerPtr layer = malloc(sizeof(MLPLayer));
    assert(layer != NULL);

    layer->input_dim = input_dim;
    layer->output_dim = output_dim;
    layer->h_activate = activate;
    layer->h_activate_diff = activate_diff;

    layer->input = malloc(sizeof(double) * layer->input_dim);
    assert(layer->input != NULL);

    layer->weights = malloc(sizeof(double) * layer->input_dim * output_dim);
    
    // memset(layer->weights, -1, sizeof(double) * layer->input_dim * layer->output_dim);
    assert(layer->weights != NULL);
    RandomInitParameters(layer->weights, layer->input_dim * layer->output_dim);


    layer->bias = malloc(sizeof(double) * layer->output_dim);
    assert(layer->bias != NULL);
    RandomInitParameters(layer->bias, layer->output_dim);
    // memset(layer->bias, -1, sizeof(double) * layer->input_dim * layer->output_dim);

    layer->z_value = malloc(sizeof(double) * layer->output_dim);
    assert(layer->z_value != NULL);

    layer->delta_weights = malloc(sizeof(double) * layer->input_dim * output_dim);
    assert(layer->delta_weights != NULL);

    layer->delta_bias = malloc(sizeof(double) * layer->output_dim);
    assert(layer->delta_bias != NULL);
    
    // layer->input = NULL;

    layer->h_forward = forward_fn;
    layer->h_backward = backward_fn;
    layer->h_optim_step = optimizer_step;

    return layer;
}

void DeInitLayer(MLPLayerPtr *p_layer_ptr) 
{
    MLPLayerPtr layer_ptr = *p_layer_ptr;
    *p_layer_ptr = NULL;
    if (!layer_ptr) { return; }
    free(layer_ptr->delta_bias);
    free(layer_ptr->delta_weights);
    free(layer_ptr->z_value);
    free(layer_ptr->input);
    if(layer_ptr->bias) free(layer_ptr->bias);
    if(layer_ptr->weights) free(layer_ptr->weights);
    free(layer_ptr);
}

void SaveLayer(const MLPLayerPtr layer_ptr, FILE *outfile)
{
    fwrite(&layer_ptr->input_dim, sizeof(layer_ptr->input_dim), 1, outfile);
    fwrite(&layer_ptr->output_dim, sizeof(layer_ptr->input_dim), 1, outfile);
    fwrite(
        layer_ptr->weights, 
        sizeof(double) * layer_ptr->input_dim * layer_ptr->output_dim, 
        1, 
        outfile
    );
    fwrite(layer_ptr->bias, sizeof(double) * layer_ptr->output_dim, 1, outfile); 
    
}


MLPLayerPtr LoadLayerForInference(FILE *infile, ActivateType activate)
{
    MLPLayerPtr layer_ptr = malloc(sizeof(MLPLayer));
    assert(layer_ptr != NULL);

    fread(&layer_ptr->input_dim, sizeof(layer_ptr->input_dim), 1, infile);
    fread(&layer_ptr->output_dim, sizeof(layer_ptr->output_dim), 1, infile);

    layer_ptr->weights = malloc(sizeof(double) * layer_ptr->input_dim * layer_ptr->output_dim);
    assert(layer_ptr->weights != NULL);
    fread(
        layer_ptr->weights, 
        sizeof(double) * layer_ptr->input_dim * layer_ptr->output_dim, 
        1, 
        infile
    );

    layer_ptr->bias = malloc(sizeof(double) * layer_ptr->output_dim);
    assert(layer_ptr->bias != NULL);
    fread(layer_ptr->bias, sizeof(double) * layer_ptr->output_dim, 1, infile);

    layer_ptr->z_value = malloc(sizeof(double) * layer_ptr->output_dim);
    assert(layer_ptr->z_value != NULL);

    layer_ptr->h_activate = activate;
    layer_ptr->delta_bias = layer_ptr->delta_weights = NULL;

    return layer_ptr;
}

MLPLayerPtr LoadLayerForRetrain(
    FILE *infile, 
    ActivateType activate, 
    DiffActivateType activate_diff)
{
    MLPLayerPtr layer_ptr = LoadLayerForInference(infile, activate);
    assert(layer_ptr != NULL);
    layer_ptr->h_activate_diff = activate_diff;

    layer_ptr->delta_weights = malloc(sizeof(double) * layer_ptr->input_dim * layer_ptr->output_dim);
    assert(layer_ptr->delta_weights != NULL);

    layer_ptr->delta_bias = malloc(sizeof(double) * layer_ptr->output_dim);
    assert(layer_ptr->delta_bias != NULL);
    
    layer_ptr->input = NULL;

    layer_ptr->h_backward = backward_fn;
    layer_ptr->h_optim_step = optimizer_step;

    return layer_ptr;

}