'''
vec版
'''

from functools import reduce
import numpy as np
from datetime import datetime
import minst

# 节点
class activator(object):
    def sigmod(self, val):
        return 1.0/(1 + np.exp(-val))

    def forward(self, input):
        return self.sigmod(input)
    
    def backward(self, input):
        return input*(1-input)

class FullConnectLayer(object):
    def __init__(self, layer_index, inputSize, outputSize, activator):
        self.layer_index = layer_index
        self.inputSize = inputSize
        self.outputSize = outputSize
        self.activator = activator
        self.w = np.random.uniform(-0.1, 0.1, (outputSize, inputSize))
        self.b = np.zeros((outputSize, 1))
        self.outputArray = np.zeros((outputSize, 1))
        self.input = np.zeros((inputSize, 1))
        self.nextConn = None
        self.delta = np.zeros((outputSize, 1))
    
    def set_next_conn(self, conn):
        self.nextConn = conn 
    
    def forward(self, inputArray):
        self.input = inputArray
        self.outputArray = self.activator.forward(np.dot(self.w, inputArray)+self.b)
        return self.outputArray
    
    def backward(self, label):
        # out put layer
        if not self.nextConn: 
            self.delta = self.activator.backward(self.outputArray)*(label-self.outputArray) 
        # hidden layer
        else:
            self.delta = self.activator.backward(self.outputArray)*np.dot(self.nextConn.w.T, self.nextConn.delta) 
        # gradient
        self.grad = np.dot(self.delta, self.input.T)
        self.b_grad = self.delta
    
    def update(self, rate):
        self.w += self.grad*rate
        self.b += self.b_grad*rate
       



class Network(object):
    # layers包含每个层的节点个数
    def __init__(self, layers):
        self.connections = []
        for i in range(len(layers)-1):
            conn = FullConnectLayer(i+1, layers[i], layers[i+1], activator())
            self.connections.append(conn)
            if i > 0: 
                self.connections[i-1].set_next_conn(conn)

    def train(self, samples, labels, rate, iteration, index):
        for j in range(iteration):
            print("train itreation(%s) index(%s)" %(j, index))
            for i, sample in enumerate(samples):
                print("train sample %s itreation(%s) index(%s)" %(i, j, index))
                self._train_one_sample(sample, labels[i], rate)
    
    def _train_one_sample(self, sample, label, rate):
        self.predict(sample)
        self._calc_gradient(label)
        self._update_weight(rate)
        #self.gradient_check(sample, label)
    
    def predict(self, sample):
        # 设置输入层
        # /256是对输入做归一化，否则识别率惨不忍睹
        inputArray = sample.reshape(sample.size, 1)/256
        for conn in self.connections:
            inputArray = conn.forward(inputArray)
        return self.connections[-1].outputArray

    
    def _update_weight(self, rate):
        for conn in self.connections:
            conn.update(rate)

    
    def _calc_gradient(self, label):
        labelArray = label.reshape(label.size, 1)
        for conn in self.connections[::-1]:
            conn.backward(labelArray)


    def get_gradient(self, input, label):
        self.predict(input)
        self._calc_gradient(label)
    
    def network_delta(self, outputs, labels):
        return 0.5*reduce(lambda val, vec: val + (vec[0]-vec[1])**2, zip(outputs.T[0], labels), 0)
    
    def gradient_check(self, input, label):
        self.get_gradient(input, label)
        for conn in self.connections:
            row, col = conn.w.shape
            for i in range(row):
                for j in range(col):
                    actualGradient = conn.grad[i][j]
                    # another way to compute gradient,then compare with actual
                    weightDelta = 0.0004
                    #print("old: conn.w[%s][%s] = %s" %(i, j, conn.w[i][j]))
                    conn.w[i][j] += weightDelta
                    #print("delta1: conn.w[%s][%s] = %s" %(i, j, conn.w[i][j]))
                    delta1 = self.network_delta(self.predict(input), label)
                    conn.w[i][j] -= 2*weightDelta
                    #print("delta2: conn.w[%s][%s] = %s" %(i, j, conn.w[i][j]))
                    delta2 = self.network_delta(self.predict(input), label)
                    #print("delta1 = %s, delta2 = %s" %(delta1, delta2))
                    expectGradient = (delta2 - delta1) / (2*weightDelta)
                    conn.w[i][j] += weightDelta
                    if expectGradient != actualGradient:
                        print("---{conn(%s) w(%s, %s) actual gradient \t%s\nexpect gradient \t%s}---" %(conn.layer_index, i, j, actualGradient, expectGradient))
            

    
def get_result(output):
    max = 0
    for i in range(1, len(output)):
        if output[max] < output[i]:
            max = i 
    return max

def evaluate(network, testSamples, testLabels):
    total = len(testSamples)
    error = 0
    error_sample = []
    error_label = []
    for i in range(total):
        sample = testSamples[i]
        label = testLabels[i]
        output = network.predict(sample)
        val = get_result(list(output.flat))
        label_val = get_result(list(label.flat))
        if val != label_val:
            error += 1
    errorRate = float(error)/total
    print("total %d, error %d, erroRate %f" %(total, error, errorRate))

    return errorRate 




def main():
    TRAIN_DATA_PATH = "D:/code/python/train_data"
    mntImg = minst.MinstImage()
    trainMntImgPath = "%s/train-images-idx3-ubyte/train-images.idx3-ubyte" %(TRAIN_DATA_PATH)
    mntImg.parse(trainMntImgPath)
    train_images = mntImg.get_images()

    mntLable = minst.MinstLabel()
    trainMntLablePath = "%s/train-labels-idx1-ubyte/train-labels.idx1-ubyte" %(TRAIN_DATA_PATH)
    mntLable.parse(trainMntLablePath)
    train_labels = mntLable.get_labels()

    testMntImgPath = "%s/t10k-images-idx3-ubyte/t10k-images.idx3-ubyte" %(TRAIN_DATA_PATH)
    mntImg.parse(testMntImgPath)
    test_images = mntImg.get_images()


    testMntLablePath = "%s/t10k-labels-idx1-ubyte/t10k-labels.idx1-ubyte" %(TRAIN_DATA_PATH)
    mntLable.parse(testMntLablePath)
    test_labels = mntLable.get_labels()

    index = 0
    lastErrorRadio = 1.0
    learning_rate = 0.4
    hidden_layer_node = 30
    network = Network([mntImg.get_image_len(), hidden_layer_node, mntLable.get_label_len()])
    while True:
        network.train(train_images, train_labels, learning_rate, 1, index)
        index += 1
        errorRadio = evaluate(network, test_images, test_labels)
        print("%s after %s train errorRate %f" %(datetime.now(), index*10, errorRadio))
        if index%10 == 0:
            if errorRadio > lastErrorRadio:
                break 
            else:
                lastErrorRadio = errorRadio



        

if __name__ == "__main__":
    main()



