#pragma once
#include "myactivation.h"

struct msize {
	int row, col;
};

class layer {
protected:
	mat weight;
	std::string activation;
private:
	int units;
public:
	//units,activation
	layer(int i, std::string temp = "none",int input_shape=0);
	layer(const layer& temp);
	layer() {};

	//print activation of layer
	void showAct() const;

	//return number of cols of weight
	virtual int weight_column() const;

	//return number of rows of weight
	int weight_row() const;

	//return size of weight
	msize shape() const;

	//weight initialization
	//elements of weight are in range of (-r,r)
	virtual void random(int rows,double pram);

	//print params of the layer
	void describe() const;

	//execute forward propagation with different activation function
	//the params are input and output of one layer respectively
	virtual void forward_prop(const mat& input,mat& result);

	//execute backward propagation with different activation function
	//the params are input of the layer_i, gradient of layer_i+1, gradient of layer_i
	virtual void backward_prop(const mat& input,const mat &gradient1,mat& gradient0);

	//update weight with caculated delta
	void update(const mat& delta);

	void layer_export(int i) const;

	void layer_input(const char* ptr);
};


class cnn_layer {
private:
	int kernel_rows, kernel_cols;
	int num_kernels, stride, conv_size;
	int pic_rows, pic_cols;
	mat kernels;
	mat flattend_input;
	std::string activation;
public:
	/*the params are numer rows of kernels, number of cols of kernels, number of kernels,
	stride of scanning, original number of rows of pictures, original number of cols of pictures,
	activation function*/
	cnn_layer(int kernelrow, int kernelcow, int kernelnum, int step, int picrows, int piccols, std::string temp = "none");
	cnn_layer(const cnn_layer& temp);
	cnn_layer() {};

	//return the number cols of output of layer 
	int weight_column() const;
	
	//kernels initialization
	//elements of kernels are in range of (-r,r)
	void random(int rows, double pram);

	//print params of the layer
	void describe() const;

	//execute forward propagation with different activation function
	//the params are input and output of one layer respectively
	void forward_prop(const mat& input, mat& result);

	//execute backward propagation with different activation function
	//the params are input of the layer_i, gradient of layer_i+1, gradient of layer_i
	void backward_prop(const mat& input, const mat& gradient1, mat& gradient0);

	//update weight with caculated delta
	void update(int rows, double learning_rate, const mat& delta);

	void cnnlayer_export(int i) const;

	void layer_input(const char* ptr);
};



//derived cnn, the firts idea is kind of strange, which I give up
class cnn_derived :public layer {
private:
	int kernel_rows, kernel_cols;
	int num_kernels, stride, conv_size;
	int pic_rows, pic_cols;
	mat flattend_input;
public:

	cnn_derived(int kernalrow, int kernalcow, int kernalnum, int step, int picrows, int piccols, std::string temp = "none");
	
	//return the number cols of output of layer 
	virtual int weight_column() const;

	//kernels initialization
	//elements of kernels are in range of (-r,r)
	virtual void random(int rows, double pram);

	//print params of the layer
	virtual void describe() const;

	//execute forward propagation with different activation function
	//the params are input and output of one layer respectively
	virtual void forward_prop(const mat& input, mat& result);

	//execute backward propagation with different activation function
	//the params are input of the layer_i, gradient of layer_i+1, gradient of layer_i
	virtual void backward_prop(const mat& input, const mat& gradient1, mat& gradient0);

	//update weight with caculated delta
	void update(int rows, double learning_rate, const mat& delta);

};