import yaml
import numpy as np
from layer import LayerFactory
import dataset
from collections import OrderedDict

from optimizer import AdaGrad
from optimizer import SGD
from analyser import Analyser
import pickle
from drawer import Drawer


class NeuralNetwork(object):
    """
        模型训练总类
        @params
            params_path: 参数配置文件地址
    """

    def __init__(self, params_path) -> None:
        self.getParams(params_path)
        self.getData()
        self.getLayer()

        self.gradMethod = eval(self.params['learning_method'])(
            self.params['learing_rate'])
        self.analyser = Analyser()
        self.drawer = Drawer()

    def test(self):
        x_test = self.x_test.reshape(-1, 1, 8, 8)
        x_predict = self.predict(x_test)
        predict_y = np.argmax(x_predict, axis=1)
        if self.params['analyse']:
            self.analyser.analyse(predict_y, self.y_test)
        if self.params['draw']:
            self.drawer.plot()

    def loadAndTest(self, params_path):
        with open(params_path, 'rb') as f:
            layer_info = pickle.load(f)
        for layer_dict in layer_info:
            wb_dict = {}
            key = list(layer_dict.keys())
            layer_params = layer_dict[key[0]]
            if 'w' in layer_params and 'b' in layer_params:
                wb_dict['w'] = layer_params.pop('w')
                wb_dict['b'] = layer_params.pop('b')
            layer_ip = 0
            layer_str = key[0] + str(layer_ip)
            while layer_str in self.order_layer:
                layer_ip += 1
                layer_str = key[0]+str(layer_ip)
            if layer_dict[key[0]] is not None and 'is_last_layer' \
                    in layer_dict[key[0]]:
                self.last_layer = self.layer_factory.produce(key[0], {})
                break
            self.order_layer[layer_str] = self.layer_factory.produce(
                key[0], layer_dict[key[0]])
            self.order_layer[layer_str].loadParams(wb_dict)

    def runNetWork(self):
        if self.params['test_mode']:
            self.test()
            return
        iterative_num = self.params['iterative_num']
        for i in range(iterative_num):
            x_sample, y_sample = self.getTrainData()

            # forward计算
            y_predict = self.predict(x_sample)

            # backward计算 更新参数
            self.update(y_predict, y_sample)
        self.test()
        self.saveParams(self.params['save_params_path'])

    def predict(self, data):
        for layer in self.order_layer.keys():
            if layer[0:(len(layer)-1)] == 'DropOutLayer':
                self.order_layer[layer].forward(data, False)
            data = self.order_layer[layer].forward(data)
        return data

    def update(self, x, target):
        loss = self.last_layer(x, target)

        dout = 1
        dout = self.last_layer.backward(dout)

        layers = list(self.order_layer.values())
        layers.reverse()
        for layer in layers:
            dout = layer.backward(dout)

        # gradient decent

        for layer in layers:
            layer.update(self.gradMethod)

        print(loss)
        self.drawer.record(loss)

    # 从配置文件中获取参数

    def getParams(self, params_path):
        with open(params_path) as params_file:
            self.params = yaml.load((params_file))

    def saveParams(self, params_name: str):
        if not self.params['is_save_params']:
            return
        params_list = []
        for layer in self.order_layer.values():
            params_list.append(layer.saveParams())
        with open(self.params['save_params_path'], 'wb') as f:
            pickle.dump(params_list, f)
            # print("save params sucess")

        # 获取训练数据

    def getData(self):
        data_preparer = dataset.DataPreparer(self.params['train_size'])
        self.x_train, self.x_test, self.y_train, self.y_test\
            = data_preparer.get()

    def getLayer(self):
        self.order_layer = OrderedDict()
        self.layer_factory = LayerFactory()

        if self.params['test_mode']:
            self.loadAndTest(self.params['test_params_path'])
            return

        for layer in self.params['layers']:
            index = list(layer.keys())
            layer_ip = 0
            layer_str = index[0] + str(layer_ip)
            while layer_str in self.order_layer:
                layer_ip += 1
                layer_str = index[0]+str(layer_ip)
            if layer[index[0]] is not None and 'is_last_layer' in layer[index[0]]:
                self.last_layer = self.layer_factory.produce(index[0], {})
                break
            self.order_layer[layer_str] = self.layer_factory.produce(
                index[0], layer[index[0]])

    def getTrainData(self):
        batch_num = self.params['batch_size']
        mask = np.random.randint(0, self.x_train.shape[0], batch_num)
        x_sample = self.x_train[mask]
        x_sample = x_sample.reshape(-1, 1, 8, 8)
        y_sample = self.y_train[mask]
        return x_sample, y_sample


if __name__ == '__main__':
    network = NeuralNetwork('./params.yaml')
    network.runNetWork()
