
#include "ednn.h"
#include "ednn_dense.h"
#include "ednn_backends.h"


static ednn_ret dense_build(struct ednn_layer *layer)
{
    struct ednn_layer_dense *dense = (struct ednn_layer_dense *)layer;
    struct ednn_layer *prev = ednn_list_entry(layer->list.prev, 
        struct ednn_layer, list);
    
    /* reset weights shape */
    if (prev->type != EDNN_LAYER_INPUT) {
        dense->weights->dim[0] = prev->output->dim[0];
    }

    layer->output = ednn_tensor_create(dense->bias->dim, dense->bias->ndim);

    /* calculate layers' params */
    layer->stat.params = dense->weights->dim[0] * dense->weights->dim[1] + 
        dense->bias->dim[0];

    return ednn_ok;
}

static ednn_ret dense_forward(struct ednn_layer *layer)
{
    struct ednn_layer_dense *dense = (struct ednn_layer_dense *)layer;
    struct ednn_layer *prev = ednn_list_entry(layer->list.prev, 
        struct ednn_layer, list);
    return ednn_ok;
    local_fully_connected_q7(
        prev->output->pd,
        dense->weights->pd,
        ednn_tensor_size(prev->output),
        layer->output->dim[0],
        dense->bias_shift[0],
        dense->output_shift[0],
        dense->bias->pd,
        layer->output->pd
    );

    return ednn_ok;
}

struct ednn_layer *ednn_dense_layer(ednn_size_t out_features)
{
    struct ednn_layer_dense *layer = ednn_mem_zalloc(sizeof(struct ednn_layer_dense));
    ednn_assert(layer != EDNN_NULL);

    layer->super.type = EDNN_LAYER_DENSE;
    layer->super.build = dense_build;
    layer->super.forward = dense_forward;

    ednn_shape_t wdim[2] = {0, out_features};
    layer->weights = ednn_tensor_create(wdim, 2);

    ednn_shape_t bdim[1] = {out_features};
    layer->bias = ednn_tensor_create(bdim, 1);

    return (struct ednn_layer *)layer;
}