import numpy as np
from utils.Fea_Tra_Models import MLP_keras
from keras.optimizers import SGD, Adam, RMSprop
from keras.utils.np_utils import to_categorical
from DataUtils.Load_util import load_local_train_val_DEBUG, select_drivers, load_test, load_local_train_val, \
    load_local_train_val_multiscale


def train():
    X = np.load('/media/dell/cb552bf1-c649-4cca-8aca-3c24afca817b/dell/wxm/Data/KaggleDDD/npy/VGG_FC7_X.npy')
    y = np.load('/media/dell/cb552bf1-c649-4cca-8aca-3c24afca817b/dell/wxm/Data/KaggleDDD/npy/VGG_FC7_y.npy')
    unique_drivers = np.load(
        '/media/dell/cb552bf1-c649-4cca-8aca-3c24afca817b/dell/wxm/Data/KaggleDDD/npy/VGG_FC7_ud.npy')
    drivers_id = np.load('/media/dell/cb552bf1-c649-4cca-8aca-3c24afca817b/dell/wxm/Data/KaggleDDD/npy/VGG_FC7_did.npy')
    # print unique_drivers
    Y = to_categorical(y, nb_classes=10)
    unique_list_train = unique_drivers[0:-1]
    unique_list_val = unique_drivers[-1]
    num_samples = len(y)
    print '{0} train_val samples'.format(num_samples)
    print '{0} drivers'.format(len(unique_drivers))
    train_X, train_Y, index = select_drivers(X, Y, drivers_id, unique_list_train)
    val_X, val_Y, index = select_drivers(X, Y, drivers_id, unique_list_val)
    print '{0} train samples'.format(len(train_X))
    print '{0} val samples'.format(len(val_X))

    model = MLP_keras(input_dim=4096)
    model.compile(loss='categorical_crossentropy',
                  optimizer='adam',
                  metrics=['accuracy'])
    history = model.fit(train_X, train_Y,
                        batch_size=16, nb_epoch=100,
                        verbose=1, validation_data=(val_X, val_Y))


if __name__ == '__main__':
    train()
