import tensorflow as tf
import math


class fullConnectedNet:
	def __init__(self,data,shape,active_func,solver,param,regulization=None):
		# input data
		self.mnist = data
		self.param=param

		# construct neuron network
		self.x_ = tf.placeholder(tf.float32, [None, 784])
		self.y_ = tf.placeholder(tf.float32, [None, 10])

		x=self.x_
		logits=None
		l2_loss=0
		for i,n in enumerate(shape):
			x,logits,weights=self.addLayer("layer{}".format(i),x,n,"truncated_normal","truncated_normal",active_func)
			if regulization is not None:
				l2_loss = l2_loss+tf.nn.l2_loss(weights)

		self.y_out=logits
		cross_entropy = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(labels=self.y_, logits=logits))		
		self.loss=cross_entropy+l2_loss

		# model evaluation
		correct_prediction = tf.equal(tf.argmax(self.y_out, 1), tf.argmax(self.y_, 1))
		self.accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
		self.te_acc=[]
		self.tr_loss=[]
		self.tr_acc=[]

		# learning rate setting
		self.lr=tf.placeholder(tf.float32)
		self.solver_=self.getSolver(solver,param)

	def run(self,steps=3000):
		train_step = self.solver_.minimize(self.loss)
		learning_rate=self.param["learningRate"]

		with tf.Session() as sess:
			init_op = tf.global_variables_initializer()
			sess.run(init_op)

			for step in range(steps):
				if "learningRateDecay" in self.param:
					if "decayStep" in self.param:
						if step % self.param["decayStep"]==self.param["decayStep"]-1:
							learning_rate=learning_rate*self.param["learningRateDecay"]
					else:
						raise Exception("Learing rate decay step undefined!")
				
				batch_xs, batch_ys = self.mnist.train.next_batch(100)
				sess.run(train_step,feed_dict={self.x_: batch_xs, self.y_: batch_ys,self.lr:learning_rate})

				if step%30==29:
					acc,loss=self.test(self.mnist.train.images,self.mnist.train.labels,sess)
					te_acc,te_loss=self.test(self.mnist.test.images,self.mnist.test.labels,sess)
					self.te_acc.append(te_acc)
					self.tr_acc.append(acc)
					self.tr_loss.append(loss)
					print("step:{},loss:{},train accuracy:{},test_accuracy:{}".format(step+1,loss,acc,te_acc))

		return self.tr_loss,self.tr_acc,self.te_acc
	
	def test(self,X_,label_,sess_):
		return sess_.run([self.accuracy,self.loss], feed_dict={self.x_: X_,self.y_: label_})

	# bottom:input X
	# neuron_number:number of neurons in this layer
	# w_init:how to init weights,take values:truncated_normal,normal,uniform
	# b_init:how to init bias
	def addLayer(self,layer_name,bottom,neuron_num,w_init,b_init,active_func):
		in_num=bottom.get_shape().as_list()[1]

		with tf.name_scope(layer_name):
			weights = self.getVariable([in_num,neuron_num],w_init,name='weights')
			biases = self.getVariable([neuron_num],b_init,name='biases')
			logits=tf.matmul(bottom, weights) + biases
			out = self.active_neuron(logits,active_func)
		return out,logits,weights

	def getVariable(self,shape,init_type,name=None):
		return tf.Variable(self.getInitialWeight(shape,init_type),name='weights')

	def getInitialWeight(self,shape,init_type,param=None):
		if init_type=="truncated_normal":
			if param is None:
				return tf.truncated_normal(shape,stddev=1.0 / math.sqrt(float(shape[0])))
			else:
				return tf.truncated_normal(shape,stddev=param)
		elif init_type=="normal":
			if param is None:
				return tf.random_normal(shape,stddev=1.0 / math.sqrt(float(shape[0])))
			else:
				return tf.random_normal(shape,stddev=param)
		elif init_type=="uniform":
			if param is None:
				return tf.random_uniform(shape)
			else:
				return tf.random_uniform(shape,minval=param[0],maxval=param[1])
		elif init_type=="constant":
			return tf.ones(shape)*param
		else:
			return tf.zeros(shape)

	def active_neuron(self,x,active_func):
		if active_func=="relu":
			return tf.nn.relu(x)
		elif active_func=="relu6":
			return tf.nn.relu6(x)
		elif active_func=="softplus":
			return tf.nn.softplus(x)
		elif active_func=="sigmod":
			return tf.sigmoid(x)
		elif active_func=="tanh":
			return tf.tanh(x)
		elif active_func=="dropout":
			return tf.dropout(x)
		else:
			raise Exception("Activation function {} undefined!".format(active_func))

	def getSolver(self,solver="GD",param=None):
		if solver=="GD":
			return tf.train.GradientDescentOptimizer(self.lr)
		elif solver=="Moment":
			return tf.train.AdagradOptimizer(self.lr)
		elif solver=="Adagrad":
			return tf.train.MomentumOptimizer(self.lr,param["momentum"])
		elif solver=="adam":
			return tf.train.AdamOptimizer(self.lr)
		elif solver=="RMSProp":
			return tf.train.RMSPropOptimizer(self.lr)
		else:
			raise Exception("Solver undefined!")


