# -*- coding:utf-8 -*-
import numpy as np
import os
import time
from matplotlib import pyplot as plt

from preprocess_data import get_data

class NetWork(object):
    """docstring for NetWork"""
    def __init__(self,input_length = 440,hidden_length = 200,output_length = 200):
        super(NetWork, self).__init__()

        self.input_length = input_length
        self.hidden_length = hidden_length
        self.output_length = output_length

        # 优化器
        self.lr = 5e-4
        self.beta1 = 0.9
        self.beta2 = 0.999
        self.eps=1e-8

        # 初始化权重参数
        self.weight1 = np.random.rand(self.input_length,self.hidden_length)/10
        self.weight2 = np.random.rand(self.hidden_length,self.output_length)/10

        # adam 优化器 历史导数
        self.weight1_m = np.zeros((self.input_length,self.hidden_length))
        self.weight2_m = np.zeros((self.hidden_length,self.output_length))
        self.weight1_v = np.zeros((self.input_length,self.hidden_length))
        self.weight2_v = np.zeros((self.hidden_length,self.output_length))

    # 正向传播
    def forward(self,x):
        """
            y_hat = softmax(relu(input[b*f]*weight1[f*h])*weight2[h*c])
            hidden[b*h] : input[b*f]*weight1[f*h]
            hidden2[b*h]: relu(hidden[b*h])
            output[b*c] : hidden2[b*h]*weight2[h*c]
            y_hat[b*c]  : softmax(output[b*c])
        """
        # x : batch_size * feature_size
        # weight1 : feature_size * hidden_size
        # hidden: batch_size * hidden_size
        self.input = x
        self.hidden = np.matmul(x,self.weight1)
        self.hidden2 = self.relu(self.hidden)
        self.output = np.matmul(self.hidden2,self.weight2)
        self.y_hat = self.softmax(self.output)
        return self.y_hat

    # 反向传播
    def backward(self,y_hat,y,t):
        # 计算loss对其导数
        d_output = y_hat - y
        d_hidden2 =  np.matmul(d_output,self.weight2.T)
        d_weight2 = np.matmul(self.hidden2.T,d_output)
        d_hidden = d_hidden2.copy()
        d_hidden[self.hidden<0] = 0
        d_weight1 = np.matmul(self.input.T,d_hidden)

        # 更新参数 adam优化器
        self.weight1_m = self.beta1*self.weight1_m+(1-self.beta1)*d_weight1
        self.weight1_v = self.beta2*self.weight1_v+(1-self.beta2)*d_weight1*d_weight1
        self.weight1_m_hat = self.weight1_m / (1-np.power(self.beta1,t))
        self.weight1_v_hat = self.weight1_v / (1-np.power(self.beta2,t))
        self.weight1 = self.weight1 - self.lr*self.weight1_m_hat/(np.sqrt(self.weight1_v_hat)+self.eps)

        self.weight2_m = self.beta1*self.weight2_m+(1-self.beta1)*d_weight2
        self.weight2_v = self.beta2*self.weight2_v+(1-self.beta2)*d_weight2*d_weight2
        self.weight2_m_hat = self.weight2_m / (1-np.power(self.beta1,t))
        self.weight2_v_hat = self.weight2_v / (1-np.power(self.beta2,t))
        self.weight2 = self.weight2 - self.lr*self.weight2_m_hat/(np.sqrt(self.weight2_v_hat)+self.eps)


    def relu(self,x):
        x_relu = x.copy()
        x_relu[x<0] = 0
        return x_relu
    def softmax(self,x):
        x_softmax = x.copy()
        x_softmax = np.exp(x_softmax)
        x_softmax /= x_softmax.sum(axis = 1,keepdims = True)
        return x_softmax


class Model(object):
    """docstring for Model"""
    def __init__(self,hidden_length = 200):
        super(Model, self).__init__()
        self.model = NetWork(hidden_length = hidden_length)
        self.save_dir = 'saved_models'
        self.hidden_length = hidden_length
    def train(self):
        eps = 1e-8
        train_features_all,train_labels_all = get_data('训练集')
        test_features,test_labels = get_data('测试集')
        
        # 切分0.9数据train为valid集合
        train_features = train_features_all[:train_features_all.shape[0]*9//10]
        valid_features = train_features_all[train_features_all.shape[0]*9//10:]
        train_labels = train_labels_all[:train_features_all.shape[0]*9//10]
        valid_labels = train_labels_all[train_features_all.shape[0]*9//10:]

        std = np.std(train_features,axis = 0,keepdims = True)
        mean = np.mean(train_features,axis = 0,keepdims = True)
        
        train_features = (train_features - mean)/(std+eps)
        valid_features = (valid_features - mean)/(std+eps)
        test_features = (test_features - mean)/(std+eps)

        num_class = np.max(train_labels)
        train_labels_onehot = np.zeros((train_labels.shape[0], num_class))
        train_labels_onehot[np.arange(train_labels.shape[0]),train_labels-1] = 1

        batch_size = 16
        epochs = 50
        data_size = train_features.shape[0]
        t = 1
        best_acc = 0
        early_stop = 100
        print('training...')
        improve = 0
        begin_time = time.time()
        iters,accs = [],[]
        for epoch in range(epochs):
            # step 1. 打乱数据
            data_index = np.arange(data_size)
            np.random.shuffle(data_index)

            train_features = [train_features[i] for i in data_index]
            train_labels = [train_labels[i] for i in data_index]
            train_labels_onehot = [train_labels_onehot[i] for i in data_index]

            features=np.array(train_features)
            labels = np.array(train_labels)
            labels_onehot=np.array(train_labels_onehot)

            for i in range(data_size//batch_size):
                y_hat = self.model.forward(features[i*batch_size:min(data_size,(i+1)*batch_size)])
                self.model.backward(y_hat,labels_onehot[i*batch_size:min(data_size,(i+1)*batch_size)],t)
                t += 1
                if t % 50 == 0:
                    y_hat = self.model.forward(valid_features)
                    acc = np.sum((np.argmax(y_hat,axis = 1)+1)==valid_labels)/valid_features.shape[0]
                    iters.append(t)
                    accs.append(acc)
                    if best_acc < acc:
                        improve = 0
                        best_acc = acc
                        # 保存模型参数
                        np.savez('saved_models/NetWork.npz',weight1 = self.model.weight1,weight2 = self.model.weight2,\
                                    std = std,mean = mean)
                        print('iter:{},valid acc={},best'.format(t,acc))
                    else:
                        improve += 1
                        print('iter:{},valid acc={}'.format(t,acc))

                if improve > early_stop:
                    break
        end_time = time.time()
        print('train time:%.2fs'%(end_time-begin_time))
        print('valid acc:%.2f%%'%(best_acc*100))
        fig, ax = plt.subplots()
        ax.plot(iters, accs)
        ax.set(xlabel='iter', ylabel='accuracy',
               title='The accuracy varies with the number of iterations')
        ax.grid()

        fig.savefig("acc/acc-%d.png"%(self.hidden_length))
        plt.show()

        print('done.')

    def test(self,model_path=None):
        if model_path == None:
            model_path = self.save_dir + '/NetWork.npz'
        assert os.path.exists(model_path),'no find model_path:%s'%(model_path)
        print('testing...\nload model from %s'%model_path)
        test_features,test_labels = get_data('测试集')
        train_features,train_labels = get_data('训练集')
        eps = 1e-8
        model_parameters = np.load(model_path)
        mean = model_parameters['mean']
        std = model_parameters['std']
        test_features = (test_features - mean)/(std+eps)
        train_features = (train_features - mean)/(std+eps)
        self.model.weight1 = model_parameters['weight1']
        self.model.weight2 = model_parameters['weight2']

        begin_time = time.time()
        y_hat = self.model.forward(train_features)
        acc = np.sum((np.argmax(y_hat,axis = 1)+1)==train_labels)/train_features.shape[0]
        print('train acc = %2.2f%%'%(acc*100))
        print('[train] time:%.2fs'%(time.time()-begin_time))

        begin_time = time.time()
        y_hat = self.model.forward(test_features)
        acc = np.sum((np.argmax(y_hat,axis = 1)+1)==test_labels)/test_features.shape[0]
        print('test acc = %2.2f%%'%(acc*100))
        print('[test] time:%.2fs'%(time.time()-begin_time))

        print('done.')

