#include <stdlib.h>
#include <assert.h>
#include <memory.h>
#include "mlp_layer.h"
#include "mlp_layer_priv.h"
#include "mlp_nn.h"


typedef struct MLPNeuralNetwork
{
    MLPLayerPtr layer;
    struct MLPNeuralNetwork *prev, *next;

} MLPNeuralNetwork, MLPNeuralNetworkNode;

static void NetworkForward(MLPNeuralNetworkPtr nn_ptr, const double *x, double *y, int n_classes);
static void NetworkBackward(MLPNeuralNetworkPtr nn_ptr, const double *delta_y, int n_classes);
static void UpdateWeights(MLPNeuralNetworkPtr nn_ptr, double lr);


void AppendLayer(MLPNeuralNetworkPtr p_nn, int input_dim, int output_dim, 
    ActivateType activate, DiffActivateType activate_diff)
{
    MLPLayerPtr layer_ptr = InitLayer(input_dim, output_dim, activate, activate_diff);
    if (!p_nn->layer) {
        p_nn->layer = layer_ptr;
        p_nn->prev = p_nn->next = p_nn;
        return;
    }

    MLPNeuralNetworkNode *last_node = p_nn->prev;
    MLPNeuralNetworkNode *new_node = malloc(sizeof(MLPNeuralNetworkNode));
    new_node->layer = layer_ptr;
    new_node->prev = last_node;
    last_node->next = new_node;
    new_node->next = p_nn;
    p_nn->prev = new_node;
}

MLPNeuralNetworkPtr InitNeuralNetwork()
{
    MLPNeuralNetworkPtr ptr = malloc(sizeof(MLPNeuralNetwork));
    assert(ptr != NULL);
    ptr->layer = NULL;
    ptr->prev = ptr->next = NULL;
    return ptr;
}

void DeinitNeuralNetwork(MLPNeuralNetworkPtr *pptr)
{
    MLPNeuralNetworkPtr nn_ptr = *pptr;
    *pptr = NULL;
    assert(nn_ptr != NULL);
    if (nn_ptr->prev == NULL) {
        if (nn_ptr->layer) {
            DeInitLayer(&nn_ptr->layer);
        }
        free(nn_ptr);
        return;
    }
    nn_ptr->prev->next = NULL;
    for (MLPNeuralNetworkNode *p = nn_ptr; p; ) {
        DeInitLayer(&p->layer);
        MLPNeuralNetworkNode *q = p;
        p = q->next;
        free(q);
    }
}

/**
 * @brief 采用SGD训练MPL神经网络
 * 
 * @param nn_ptr: 神经网络实例
 * @param loss_fn: 损失函数
 * @param loss_diff_fn: 损失函数的导函数
 * @param x: 数据的特征
 * @param y: 数据的预测目标
 * @param n_samples: 样本数量
 * @param n_classes: 预测类别数量
 * @param epochs: 训练轮次
 * @param lr: 学习率
 * 
 * @return: 训练过程中的损失记录
 */
double *TrainNeuralNetwork(MLPNeuralNetworkPtr const nn_ptr, 
    LossFunctionType loss_fn,
    LossFnDiffType loss_diff_fn,
    double **x, double **y, int n_samples, int n_classes,
    int epochs, double lr)
{
    double *loss_history = malloc(sizeof(double) * epochs);
    assert(loss_history != NULL);

    double *y_pred = malloc(sizeof(double) * n_classes);
    assert(y_pred != NULL);

    double *delta_y = malloc(sizeof(double) * n_classes);
    assert(delta_y != NULL);

    for (int epoch = 0; epoch < epochs; epoch++) {
        
        double total_loss = 0;
        for (int n = 0; n < n_samples; n++) { // SGD

            NetworkForward(nn_ptr, x[n], y_pred, n_classes);
            double loss = loss_fn(y[n], y_pred, n_classes);
            total_loss += loss;
            
            loss_diff_fn(loss, y[n], y_pred, delta_y, n_classes);

            NetworkBackward(nn_ptr, delta_y, n_classes);
            UpdateWeights(nn_ptr, lr);
            // PrintParameters(nn_ptr);

        }
        
        *(loss_history + epoch) = total_loss / n_samples;
        // printf("epoch %d, loss = %.5f\n", epoch, *(loss_history + epoch));
    } 

    free(delta_y);
    free(y_pred);

    return loss_history;
}



void Inference(MLPNeuralNetworkPtr nn_ptr, double x[], double p[], int n_classes)
{
    assert(n_classes == nn_ptr->prev->layer->output_dim);
    NetworkForward(nn_ptr, x, p, n_classes);
}


void PrintParameters(MLPNeuralNetworkPtr nn_ptr)
{
    int i = 0;
    for (MLPNeuralNetworkNode *p = nn_ptr; ;) {
        printf("=================== Layer %d ==================\n", ++i);
        printf("weights: \n");
        for (int i = 0; i < p->layer->input_dim; i++) {
            for (int j = 0; j < p->layer->output_dim; j++) {
                printf("%lf ", *(p->layer->weights + i * p->layer->output_dim + j));
            }
            printf("\n");
        }

        printf("bias:\n");
        for (int j = 0; j < p->layer->output_dim; j++) {
            printf("%lf ", *(p->layer->bias + j));
        }
        printf("\n");
        p = p->next;
        if (p == nn_ptr) {
            break;
        }

    }

}
/**
 * @brief 网络前向传播
 * 
 */
static void NetworkForward(MLPNeuralNetworkPtr nn_ptr, const double *x, double *y_pred, int n_classes)
{
    double *layer_in = malloc(sizeof(double) * nn_ptr->layer->input_dim);
    assert(layer_in != NULL);
    memcpy(layer_in, x, sizeof(double) * nn_ptr->layer->input_dim);

    double *layer_out = NULL;
    for(MLPNeuralNetworkNode *layer_node = nn_ptr; ; ) {
        
        layer_out = realloc(layer_out, sizeof(double) * layer_node->layer->output_dim);
        assert(layer_out != NULL);
        layer_node->layer->h_forward(layer_node->layer, layer_in, layer_out);
        
        layer_in = realloc(layer_in, sizeof(double) *layer_node->layer->output_dim);
        assert(layer_in != NULL);
        memcpy(layer_in, layer_out, sizeof(double) *layer_node->layer->output_dim);

        layer_node = layer_node->next;
        if (layer_node == nn_ptr) {
            break;
        }
    }
    memcpy(y_pred, layer_out, sizeof(double) * n_classes);
    free(layer_out);
    free(layer_in);
}

/**
 *  反向传播，计算梯度
 */
static void NetworkBackward(MLPNeuralNetworkPtr nn_ptr, const double *err, int n_classes)
{
    double *delta_x = NULL;
    double *delta_y = malloc(sizeof(double) * n_classes);
    assert(delta_y != NULL);
    memcpy(delta_y, err, sizeof(double) * n_classes);

    for (MLPNeuralNetworkNode *layer_node = nn_ptr->prev;;) {
        delta_x = realloc(delta_x, sizeof(double) * layer_node->layer->input_dim);
        assert(delta_x != NULL);

        layer_node->layer->h_backward(layer_node->layer, delta_y, delta_x);

        delta_y = realloc(delta_y, sizeof(double) * layer_node->layer->input_dim);
        assert(delta_y != NULL);
        memcpy(delta_y, delta_x, sizeof(double) * layer_node->layer->input_dim);

        layer_node = layer_node->prev;
        if (layer_node == nn_ptr->prev) {
            break;
        }
    
    }
    free(delta_x);
    free(delta_y);
}

static void UpdateWeights(MLPNeuralNetworkPtr nn_ptr, double lr)
{
    for (MLPNeuralNetworkNode *layer_node = nn_ptr; ;) {
        layer_node->layer->h_optim_step(layer_node->layer, lr);
        layer_node = layer_node->next;

        if (layer_node == nn_ptr) {
            break;
        }
    }
}