#ifndef DS_LAYER_H_
#define DS_LAYER_H_
#include <stdlib.h>
#include "activations.h"

typedef enum {
    CONVOLUTIONAL,
    DECONVOLUTIONAL,
    CONNECTED,
    MAXPOOL,
    SOFTMAX,
    DETECTION,
    DROPOUT,
    CROP,
    ROUTE,
    COST,
    NORMALIZATION,
    AVGPOOL,
    LOCAL,
    SHORTCUT,
    ACTIVE,
    RNN,
    GRU,
    LSTM,
    CRNN,
    BATCHNORM,
    NETWORK,
    XNOR,
    REGION,
    YOLO,
    ISEG,
    REORG,
    UPSAMPLE,
    LOGXENT,
    L2NORM,
    BLANK
} LAYER_TYPE;

typedef enum{
    SSE, MASKED, L1, SEG, SMOOTH,WGAN
} COST_TYPE;

struct network;
typedef struct network network;

struct layer;
typedef struct layer layer;

typedef struct{
    int batch;
    float learning_rate;
    float momentum;
    float decay;
    int adam;
    float B1;
    float B2;
    float eps;
    int t;
} update_args;

struct layer
{
    LAYER_TYPE type;
    ACTIVATION activation;
    COST_TYPE cost_type;
    int h,w,c;
    int n;
    int groups;
    int size;
    int side;
    int stride;
    int pad;
    int binary;
    int batch_normalize;
    int batch;
    int xnor;
    int stopbackward;
    int noloss;
    int truth;
    float temperature;

    float *weights;
    float *weights_updates;
    int nweights;

    float *biases;
    float *biases_updates;
    int nbiases;

    int out_h;
    int out_w;
    int out_c;
    int outputs;

    int inputs;

    float *output;
    float *delta;
    float *loss;
    float *indexes;
    float *cost;

    void (*forward)   (struct layer, struct network);
    void (*backward)  (struct layer, struct network);
    size_t workspace_size;
};

void add_bias(float *output, float *biases, int batch, int n, int size);
void free_layer(layer l);

#endif  // DS_LAYER_H_