import numpy as np
import pandas as pd
np.random.seed(1337)
import keras
from keras.models import Sequential
from keras.layers import Dense, Activation, Convolution1D, Flatten, MaxPooling1D, Dropout
from keras.optimizers import SGD

import matplotlib.pyplot as plt

import datetime as d

############# Callback Class ###############

class LossHistory(keras.callbacks.Callback):
    def on_train_begin(self, logs={}):
        self.losses = {'batch':[], 'epoch':[]}
        self.accuracy = {'batch':[], 'epoch':[]}
        self.val_loss = {'batch':[], 'epoch':[]}
        self.val_acc = {'batch':[], 'epoch':[]}

    def on_batch_end(self, batch, logs={}):
        self.losses['batch'].append(logs.get('loss'))
        self.accuracy['batch'].append(logs.get('acc'))
        self.val_loss['batch'].append(logs.get('val_loss'))
        self.val_acc['batch'].append(logs.get('val_acc'))

    def on_epoch_end(self, batch, logs={}):
        self.losses['epoch'].append(logs.get('loss'))
        self.accuracy['epoch'].append(logs.get('acc'))
        self.val_loss['epoch'].append(logs.get('val_loss'))
        self.val_acc['epoch'].append(logs.get('val_acc'))

    def loss_plot(self, loss_type):
        iters = range(len(self.losses[loss_type]))
        plt.figure()
        # acc
        plt.plot(iters, self.accuracy[loss_type], 'r', label='train acc')
        # loss
        plt.plot(iters, self.losses[loss_type], 'g', label='train loss')
        if loss_type == 'epoch':
            # val_acc
            plt.plot(iters, self.val_acc[loss_type], 'b', label='val acc')
            # val_loss
            plt.plot(iters, self.val_loss[loss_type], 'k', label='val loss')
        plt.grid(True)
        plt.xlabel(loss_type)
        plt.ylabel('acc-loss')
        plt.legend(loc="upper right")
        plt.show()

############# Callback Class ################

history = LossHistory()


batch_size = 120
epochs = 100
pool_size = 2
filters = 32
kernel_size = 3
input_shape = (78, 100)

model = Sequential()
model.add(Convolution1D(
    filters=filters,
    kernel_size=kernel_size,
    input_shape=input_shape,
    padding='same'
))
model.add(Activation('relu'))
model.add(Convolution1D(filters=filters, kernel_size=kernel_size)) #卷积层2
model.add(Activation('relu')) #激活层
model.add(MaxPooling1D(pool_size=pool_size))
model.add(Dropout(0.25))
model.add(Flatten())
model.add(Dense(128))
model.add(Activation('relu'))
model.add(Dropout(0.5))
model.add(Dense(1))
model.add(Activation('sigmoid'))
sgd = SGD(lr=0.001, momentum=0.9, decay=1e-06, nesterov=True)

model.compile(loss="binary_crossentropy", optimizer=sgd, metrics=['accuracy'])


############### 分批读取train_data.csv ###############

training_start_time = d.datetime.now().strftime("%Y.%m.%d-%H:%M:%S")

print("###############################################")
print("traing start time: ", training_start_time)
print("###############################################")

train_data = pd.read_table('/home/orient/PycharmProjects/OtSpeacherRecognization/train_data.csv', sep=',', chunksize=12000)
X_train = np.empty(shape=[0, 7800])
y_train = np.array([])
for data in train_data:
    data = data.sample(frac=1)
    X_train = np.row_stack((X_train, data.iloc[:, 0:-1]))
    y_train = np.append(y_train, data.iloc[:, -1])
    X_train = X_train.reshape(len(X_train), 78, 100)
    # data = np.column_stack((X_train, y_train))
    print("-------------------Traning-------------------")
    model.fit(X_train, y_train, batch_size=batch_size, epochs=epochs, validation_split=0.2, callbacks=[history])
    X_train = np.empty(shape=[0, 7800])
    y_train = np.array([])

model.save('/home/orient/PycharmProjects/OtSpeacherRecognization/model/test.h5')

history.loss_plot('epoch')

training_stop_time = d.datetime.now().strftime("%Y.%m.%d-%H:%M:%S")


print("###############################################")
print("traning start time: ", training_start_time)
print("traning stop time: ", training_stop_time)
print("###############################################")

############### 分批读取train_data.csv ###############
