# 制作自己的神经网络
# 《Python神经网络编程》

import numpy
# scipy.special for the sigmod function expit()
import scipy.special

# neural network class definition
class neuralNetWork:
	# initialise the neural network
	def __inite__(self,inputnodes,hiddennodes,outputnodes,learningrate):
		self.inodes = inputnodes
		self.hnodes = hiddennodes
		self.onodes = outputnodes
		# learning rate
		self.lr = learningrate
		# link weight matrices,wih and who
		# weight inside the arrays are w_i_j,where link is from node i to node j in the next layer
		# w11 w21
		# w12 w22 etc
		self.wih = (numpy.random.normal(0.0,pow(self.hnodes,-0.5),(self.onodes,self.hnodes)))
		self.who = (numpy.random.normal(0.0,pow(self.onodes,-0.5),(self.onodes,self.hnodes)))
		# activation function is the sigmod function
		self.activation_function = lambda x: scipy.special.expit(x)
		pass
	
	# train the neural network
	def train(self,inputs_list,targets_list):
		#convert inputs list to 2d array
		inputs = numpy.array(inputs_list,ndmin=2).T

		# calculate signals into hidden layer
		hidden_inputs = numpy.dot(self.wih,inputs)
		# calculate the signals emerging from hidden layer
		hidden_outputs = self.activation_function(hidden_inputs)
		
		# calculate signals into final output layer
		final_inputs = numpy.dot(self.who,hidden_outputs)
		# calculate the signals emerging from final output layer
		final_outputs = self.activation_function(final_inputs)

		targets = numpy.array(targets_list,ndmin=2).T

		# error is the (target - actual)
		output_errors = targets - final_outputs
		# hidden layer error is the output_errors,split by weights,recombined at hidden nodes
		hidden_errors = numpy.dot(self.who.T,output_errors)
		# update the weights for the links between the hidden and output layers
		self.who += self.lr * numpy.dot((output_errors * final_outputs * (1.0 - final_outputs)),numpy.transpose(hidden_outputs))
		# update the weights for the links between the input and hidden layers
		self.who += self.lr * numpy.dot((hidden_errors * hidden_outputs * (1.0 - hidden_outputs)),numpy.transpose(inputs))
		pass

	# query the neural network
	def query(self,inputs_list):
		#convert inputs list to 2d array
		inputs = numpy.array(inputs_list,ndmin=2).T

		# calculate signals into hidden layer
		hidden_inputs = numpy.dot(self.wih,inputs)
		# calculate the signals emerging from hidden layer
		hidden_outputs = self.activation_function(hidden_inputs)
		
		# calculate signals into final output layer
		final_inputs = numpy.dot(self.who,hidden_outputs)
		# calculate the signals emerging from final output layer
		final_outputs = self.activation_function(final_inputs)
		return final_outputs

# program entry
if __name__ == '__main__':
	input_nodes = 3
	hidden_nodes = 3
	output_ndoes = 3
	# learning rate
	learningrate = 0.5
	# create instance of neural network
	neural = neuralNetWork(input_nodes,hidden_nodes,output_ndoes,learningrate)
	
