#!/usr/bin/python
# -*- coding: utf-8 -*-

import mne
import matplotlib.pyplot as plt
from sklearn import preprocessing
import numpy as np
from keras.utils.np_utils import to_categorical
from sklearn.model_selection import train_test_split

"""
    EEG datasets for motor imagery brain computer interface  http://gigadb.org/dataset/view/id/100542  
    62 channels EEG，1000hz freq, 4 seconds MI-task,Total 7 seconds
    100 runs for each of the 54 subjects
    EEG Dataset and OpenBMI Toolbox for Three BCI Paradigms: An Investigation into BCI Illiteracy
"""

import os
import numpy as np
import matplotlib.pyplot as plt
from scipy.fftpack import fft,fftfreq
from scipy.io import loadmat
from scipy import interpolate
from scipy import signal
data_path = "/media/brainseek/dataset/54subjects/rawdata/"
save_path = "/media/brainseek/dataset/54subjects/LB_2s_250Hz/"

rename_mapping = {'Fp1.': 'Fp1', 'AF7.': 'AF7', 'AF3.': 'AF3', 'F1.': 'F1', 'F3.': 'F3', 'F5.': 'F5',
                  'F7.': 'F7', 'FT7..': 'FT7', 'Fc5..': 'Fc5', 'Fc3..': 'Fc3', 'Fc1..': 'Fc3', 'C1..': 'C1', 'C3..': 'C3',
                  'C5..': 'C5', 'T7.': 'T7', 'Tp7.': 'Tp7', 'Cp5.': 'Cp5', 'Cp3.': 'Cp3', 'Cp1.': 'Cp1',
                  'P1.': 'P1', 'P3.': 'P3', 'P5.': 'P5', 'P7.': 'P7', 'P9.': 'P9', 'Po7.': 'Po7',
                  'Po3.': 'Po3', 'O1.': 'O1', 'Lz.': 'Lz', 'Oz.': 'Oz', 'Poz..': 'Poz', 'Pz..': 'Pz', 'Cpz..': 'Cpz',
                  'Fpz..': 'Fpz', 'Fp2..': 'Fp2', 'Af8..': 'Af8', 'Af4..': 'Af4', 'Afz..': 'Afz', 'Fz..': 'Fz', 'F2.': 'F2',
                  'F4.': 'F4', 'F6..': 'F6', 'F8..': 'F8', 'Ff8..': 'Ft8', 'Fc6.': 'Fc6', 'Fc4.': 'Fc4', 'Fc2.': 'Fc2',
                  'Fcz..': 'Fcz', 'Cz..': 'Cz', 'C2..': 'C2', 'C4..': 'C4', 'C6..': 'C6', 'T8..': 'T8', 'Tp8..': 'Tp8',
                  'Cp6..': 'Cp6', 'Cp4..': 'Cp4', 'Cp2.': 'Cp2', 'P2.': 'P2', 'P4.': 'P4', 'P6.': 'P6', 'P8.': 'P8',
                  'P10..': 'P10', 'Po8..': 'Po8', 'Po4..': 'Po4', 'O2..': 'O2'}


def get_physionet(num,subject: int):
    """
    :param subject: SN of subject : [1,54]
    :return: data shapes (-1, channels, 500)
    """
    # loading from file
    file_name = os.path.join(data_path,'sess%02d' % num) + '_subj%02d' % subject +'_EEG_MI.mat'
    if not os.path.exists(file_name):
        return 0
    raw_new = loadmat(file_name)
    #raw_new = loadmat("sess01_subj52_EEG_MI.mat")
    print(raw_new.keys())
    print(raw_new['EEG_MI_train'][0,0]['smt'].shape)
    imagery = raw_new['EEG_MI_train'][0, 0]['smt'].transpose(1,0,2)
    imagery_raw = raw_new['EEG_MI_train'][0,0]['smt'][:,:,[32,8,9,33,12,35,36,14,38,18,19,40]]
    imagery_label = raw_new['EEG_MI_train'][0,0]['y_dec']

    imagery_left_label = np.where(imagery_label == 2)
    imagery_left_raw  = imagery_raw[:,imagery_left_label[1],:]
    #100 trials  4000
    imagery_left_2s = imagery_left_raw[0:2000,:,:].transpose(1,0,2)

    # shap the rest data and keep channel last
    rest_raw = raw_new['EEG_MI_train'][0, 0]['pre_rest'][:,[32,8,9,33,12,35,36,14,38,18,19,40]]
    rest_2s = rest_raw.reshape(30, 2000, 12)

    #Standardization the data
    scaler = preprocessing.StandardScaler()
    for i in range(len(imagery_left_2s)):
        scaler.fit(imagery_left_2s[i])
        imagery_left_2s[i] = scaler.transform(imagery_left_2s[i])
    scaler_rest = preprocessing.StandardScaler()
    for i in range(len(rest_2s)):
        scaler_rest.fit(rest_2s[i])
        rest_2s[i] = scaler_rest.transform(rest_2s[i])


    # # #huatu ceshi
    # for i in range(len(imagery_left_2s)):
    #     x = np.arange(imagery_left_2s.shape[1])
    #     fft_y = fft(imagery_left_2s[i,:,0])
    #     abs_y = np.abs(fft_y)
    #     xf = np.arange(0,160,160/512)
    #     plt.figure(i)
    #     plt.subplot(2, 1, 1)
    #     plt.plot(x,imagery_left_2s[i,:,0])
    #     plt.subplot(2, 1, 2)
    #     plt.plot(xf, abs_y[0:512])
    #     plt.show()
    # # # huatu ceshi


    # # #huatu ceshi
    # for i in range(len(rest_2s)):
    #     x = np.arange(rest_2s.shape[1])
    #     fft_y = fft(rest_2s[i,:,0])
    #     abs_y = np.abs(fft_y)
    #     xf = np.arange(0,160,160/512)
    #     plt.figure(i)
    #     plt.subplot(2, 1, 1)
    #     plt.plot(x,rest_2s[i,:,0])
    #     plt.subplot(2, 1, 2)
    #     plt.plot(xf, abs_y[0:512])
    #     plt.show()
    # # # huatu ceshi




    # get the data and labels 0:base  1:left
    labels_left = np.ones([1,len(imagery_left_2s)])
    labels_rest = np.zeros([1, len(rest_2s)])

    # add the data together
    data = np.vstack((imagery_left_2s,rest_2s))
    #data = data.transpose(0,2,1)
    labels_left_rest = np.hstack((labels_left,labels_rest))
    labels = to_categorical(labels_left_rest[0])  # one-hot

    # reshape and return
    train_data_ori, test_data_ori, train_label_ori, test_label_ori = train_test_split(data, labels, test_size=0.2,
                                                                                      random_state=42)
    train_data = np.empty((0, train_data_ori.shape[1], 2))
    train_label = np.empty((0, 2))
    test_data = np.empty((0, test_data_ori.shape[1], 2))
    test_label = np.empty((0, 2))
    for i in range(0, 12, 2):
        train_data = np.concatenate((train_data, train_data_ori[:, :, i:i + 2]))
        test_data = np.concatenate((test_data, test_data_ori[:, :, i:i + 2]))
        train_label = np.concatenate((train_label, train_label_ori))
        test_label = np.concatenate((test_label, test_label_ori))
    print('data loaded'+str(subject))
    train_data = signal.resample_poly(train_data, 500, 2000, axis=1)
    test_data  = signal.resample_poly(test_data, 500, 2000, axis=1)

    return train_data, test_data, train_label, test_label


if __name__ == '__main__':
    train_data_total  = np.empty((0, 500,2))
    test_data_total   = np.empty((0, 500, 2))
    train_label_total = np.empty((0, 2))
    test_label_total  = np.empty((0, 2))
    subjects_good = np.arange(1, 55)
    if not os.path.exists(save_path):
        os.mkdir(save_path)
    for num in (1,2):
        for subs in subjects_good:
            res = get_physionet(num,subs)
            if res == 0:
                continue
            np.save(os.path.join(save_path, "train_data" + str(subs)),  res[0],  allow_pickle=True)
            np.save(os.path.join(save_path, "test_data" + str(subs)),   res[1],   allow_pickle=True)
            np.save(os.path.join(save_path, "train_label" + str(subs)), res[2], allow_pickle=True)
            np.save(os.path.join(save_path, "test_label" + str(subs)),  res[3],  allow_pickle=True)
            train_data_total  = np.concatenate((train_data_total,  res[0]), axis=0)
            test_data_total   = np.concatenate((test_data_total,   res[1]), axis=0)
            train_label_total = np.concatenate((train_label_total, res[2]), axis=0)
            test_label_total  = np.concatenate((test_label_total,  res[3]), axis=0)
    np.save(os.path.join(save_path, "train_data_total" ),  train_data_total, allow_pickle=True)
    np.save(os.path.join(save_path, "test_data_total" ),   test_data_total, allow_pickle=True)
    np.save(os.path.join(save_path, "train_label_total" ), train_label_total, allow_pickle=True)
    np.save(os.path.join(save_path, "test_label_total" ),  test_label_total, allow_pickle=True)
    print(train_data_total.shape)
    print(test_data_total.shape)
    print(train_label_total.shape)
    print(test_label_total.shape)