#include <stdio.h>
#include "gemm.h"
#include "blas.h"
#include "activations.h"
#include "connected_layer.h"

void forward_connected_layer(layer l, network net)
{
    fill_cpu(l.outputs * l.batch, 0, l.output, 1);
    int m = l.batch;
    int k = l.inputs;
    int n = l.outputs;
    float *a = net.input;
    float *b = l.weights;
    float *c = l.output;
    gemm(0, 1, m,n,k,1,a,k,b,k,1,c,n);
    // if (l.batch_normalize) {
    //     // forward_batchnorm_layer(l,net);
    // } else
    // {
        add_bias(l.output, l.biases, l.batch, l.outputs,1);
    // }
    activate_array(l.output, l.outputs * l.batch, l.activation);
}

void backward_connected_layer(layer l, network net)
{
    gradient_array(l.output, l.outputs*l.batch, l.activation, l.delta);
}

layer make_connected_layer(int batch, int inputs, int outputs, ACTIVATION activation, int batch_normalize, int adam)
{
    layer l = {0};
    l.type = CONNECTED;
    l.inputs = inputs;
    l.outputs = outputs;
    l.batch = batch;
    l.batch_normalize = batch_normalize;
    l.h = 1;
    l.w = 1;
    l.c = inputs;
    l.out_h = 1;
    l.out_w = 1;
    l.out_c = outputs;

    l.output = calloc(batch * outputs, sizeof(float));
    l.delta = calloc(batch * outputs, sizeof(float));

    l.weights = calloc(outputs * inputs, sizeof(float));
    l.biases = calloc(outputs, sizeof(float));
    l.activation = activation;
    l.forward = forward_connected_layer;
    l.backward = backward_connected_layer;

    return l;
}