#ifndef _DFA_H_
#define _DFA_H_

#include <cstdio>
#include <cstring>
#include <string>
#include <sstream>
#include <vector>
#include <map>
#include <utility>
#include <algorithm>
#include <boost/checked_delete.hpp>
#include <boost/utility.hpp>

using namespace boost;
using namespace std;

#define  _CRT_SECURE_NO_WARNINGS 1

class DFANode{
public:
	DFANode(){};
	~DFANode(){};
	enum ERROR_TYPE{
		CORRECT_STATE = 0,
		ERROR_READ_FILE,
		ERROR_WRITEN_FILE,
		ERROR_OTHERS
	};
};


class DFALink
{
public:
	DFALink(){};
	~DFALink(){};
	inline void set_score(float new_score){
		score_ = new_score;
	}
	inline void add_score(float new_score){
		acc_score_ += new_score;
	}
	inline float get_acc_score() const{
		return acc_score_;
	}
	inline float get_score() const{
		return score_;

	}
protected:
private:
	float score_;
	float acc_score_;

};

//                     make some definition about DL       ////////////////////////////////////////////////////////////////
template <typename dataType>
class ConnectLayer
{
public:
	ConnectLayer(int nodeNum, bool is_init=true);
	~ConnectLayer();
	void Init();
	void UpdateWeight();
	void UpdateBias();
	void MakeSparse();
	void ComputeActSigmoid();
	void ComputeActTanh();
	void ComputeActReLu();
	dataType ComputeActAverage();

private:
	std::vector<dataType>* layer_data_;
	std::vector<std::vector<dataType> > * layer_weight_;
	std::vector<dataType>* layer_bias_;
	bool layer_inited_;


};

template <typename dataType>
class ConvolutionLayer
{
public:
	ConvolutionLayer(int nodeNum, const string& convolutionType, int accSize, int outMapSize, bool isInit = true);
	~ConvolutionLayer();
	void Init();
	void UpdateWeight();
	void UpdateBias();
	void MakeSparse();
	void ComputeActSigmoid();
	void ComputeActTanh();
	void ComputeActReLu();
	dataType ComputeActAverage();

private:
	std::vector<dataType>* layer_data_;
	std::vector<std::vector<dataType> > * layer_weight_;
	std::vector<dataType>* layer_bias_;
	bool layer_inited_;
	int layer_outmapsize_;
	int layer_inmapsize_;
	int acc_size_;
	std::string convolution_type_;


};


template <typename dataType>
class SubSampleLayer
{
public:
	SubSampleLayer(int nodeNum, int accSize, bool is_init = true);
	virtual ~SubSampleLayer(){
		boost::checked_array_delete(layer_data_);
		boost::checked_array_delete(layer_bias_);
		std::vector<std::vector<dataType> >::iterator it = layer_weight_->begin, end = layer_weight_->end();
		for (; it != end;it++){
			boost::checked_array_delete(it);
		}
	}
	void Init();
	void UpdateWeight();
	void UpdateBias();
	void MakeSparse();
	void ComputeActSigmoid();
	void ComputeActTanh();
	void ComputeActReLu();
	dataType ComputeActAverage();

private:
	std::vector<dataType>* layer_data_;
	std::vector<std::vector<dataType> > * layer_weight_;
	std::vector<dataType>* layer_bias_;
	bool layer_inited_;
	int acc_size_;
	int layer_outmap_size_;
};


class Rbm
{
public:
	typedef enum NODETYPE
	{
		GAUSS = 0,
		BERNOULLI
	};

	Rbm() :hidden_node_type_(GAUSS), obs_node_type_(GAUSS),
		obvs_dim_(0), class_dim_(0), hidden_num_(0)
	{
	}

	~Rbm()
	{
	}
	void Init();
	void ReadObvs(int batch_size);
	void Solver();

	float ComputeLoss();

	// the biggest function [9/1/2015 Administrator]
	void Train();
	void Recognize();

	inline void set_InputNodeType(NODETYPE node_type){
		obs_node_type_ = node_type;
	}
	inline NODETYPE get_InputNodeType() const{
		return obs_node_type_;
	}
	inline void set_HiddenNodeType(NODETYPE node_type){
		hidden_node_type_ = node_type;
	}
	inline NODETYPE get_HiddenNodeType() const{
		return hidden_node_type_;
	}
	inline int get_HiddenNum() const{
		return hidden_num_;
	}
	inline void set_HiddenNum(int hidden_num){
		hidden_num_ = hidden_num;
	}
protected:
	void ShuffleBatchSamples();
	void ComputeDerivative();
private:
	
	NODETYPE obs_node_type_ ;
	NODETYPE hidden_node_type_ ;
	int hidden_num_;
	int obvs_dim_;
	int class_dim_;
	std::vector<ConvolutionLayer<float> >* net_;
	std::vector<float>* curr_error_derivative_;

};






#endif
