#!/usr/bin/python
# -*- coding: utf-8 -*-

import mne
import matplotlib.pyplot as plt
from sklearn import preprocessing
import numpy as np
from keras.utils.np_utils import to_categorical
from sklearn.model_selection import train_test_split

"""
    EEG datasets for motor imagery brain computer interface  http://gigadb.org/dataset/100295
    64 channels EEG，512hz freq, 3 seconds MI-task,Total 7 seconds
    100 runs for each of the 52 subjects
    

"""
import os
import numpy as np
import matplotlib.pyplot as plt
from scipy.fftpack import fft,fftfreq
from scipy.io import loadmat
from scipy import interpolate
from scipy import signal
data_path = "/media/brainseek/dataset/52subjects/rawdata/"
save_path = "/media/brainseek/dataset/52subjects/LB_2s_250Hz/"

rename_mapping = {'Fp1.': 'Fp1', 'AF7.': 'AF7', 'AF3.': 'AF3', 'F1.': 'F1', 'F3.': 'F3', 'F5.': 'F5',
                  'F7.': 'F7', 'FT7..': 'FT7', 'Fc5..': 'Fc5', 'Fc3..': 'Fc3', 'Fc1..': 'Fc3', 'C1..': 'C1', 'C3..': 'C3',
                  'C5..': 'C5', 'T7.': 'T7', 'Tp7.': 'Tp7', 'Cp5.': 'Cp5', 'Cp3.': 'Cp3', 'Cp1.': 'Cp1',
                  'P1.': 'P1', 'P3.': 'P3', 'P5.': 'P5', 'P7.': 'P7', 'P9.': 'P9', 'Po7.': 'Po7',
                  'Po3.': 'Po3', 'O1.': 'O1', 'Lz.': 'Lz', 'Oz.': 'Oz', 'Poz..': 'Poz', 'Pz..': 'Pz', 'Cpz..': 'Cpz',
                  'Fpz..': 'Fpz', 'Fp2..': 'Fp2', 'Af8..': 'Af8', 'Af4..': 'Af4', 'Afz..': 'Afz', 'Fz..': 'Fz', 'F2.': 'F2',
                  'F4.': 'F4', 'F6..': 'F6', 'F8..': 'F8', 'Ff8..': 'Ft8', 'Fc6.': 'Fc6', 'Fc4.': 'Fc4', 'Fc2.': 'Fc2',
                  'Fcz..': 'Fcz', 'Cz..': 'Cz', 'C2..': 'C2', 'C4..': 'C4', 'C6..': 'C6', 'T8..': 'T8', 'Tp8..': 'Tp8',
                  'Cp6..': 'Cp6', 'Cp4..': 'Cp4', 'Cp2.': 'Cp2', 'P2.': 'P2', 'P4.': 'P4', 'P6.': 'P6', 'P8.': 'P8',
                  'P10..': 'P10', 'Po8..': 'Po8', 'Po4..': 'Po4', 'O2..': 'O2'}


def get_physionet(subject: int):
    """
    :param subject: SN of subject : [1,52]
    :return: data shapes (-1, channels, 1024)
    """
    # loading from file
    raw_new = loadmat(os.path.join(data_path,'s%02d' % subject))
    #在本数据及中，取FC3 FC4 FC1 FC2 C3 C4 C1 C2 CP3 CP4 CP1 CP2 在.mat文件中对应 9 44 10 45 12 49 11 48 17 54 18 55
    imagery_left = raw_new['eeg'][0, 0]['imagery_left'][[9 ,44 ,10 ,45 ,12 ,49 ,11 ,48 ,17 ,54 ,18 ,55], :]
    rest_raw         = raw_new['eeg'][0, 0]['rest'][[9 ,44 ,10 ,45 ,12 ,49 ,11 ,48 ,17 ,54 ,18 ,55], :]

    #100 /120 trials  358400 / 430080
    if len(imagery_left[1]) == 430080:
        imagery_left_raw = imagery_left.reshape((12,120,3584)).transpose(1,2,0)
    else:
        imagery_left_raw = imagery_left.reshape(12, 100, 3584).transpose(1, 2, 0)
    imagery_left_2s = imagery_left_raw[:,1024:2048,:]
    # shap the rest data and keep channel last
    rest_raw_cut = rest_raw[:,0:32768].transpose(1,0)
    rest_2s = rest_raw_cut.reshape(32, 1024, 12)
    #Standardization the data
    scaler = preprocessing.StandardScaler()
    for i in range(len(imagery_left_2s)):
        scaler.fit(imagery_left_2s[i])
        imagery_left_2s[i] = scaler.transform(imagery_left_2s[i])
    scaler_rest = preprocessing.StandardScaler()
    for i in range(len(rest_2s)):
        scaler_rest.fit(rest_2s[i])
        rest_2s[i] = scaler_rest.transform(rest_2s[i])


    # # #huatu ceshi
    # for i in range(len(imagery_left_2s)):
    #     x = np.arange(imagery_left_2s.shape[1])
    #     fft_y = fft(imagery_left_2s[i,:,0])
    #     abs_y = np.abs(fft_y)
    #     xf = np.arange(0,160,160/512)
    #     plt.figure(i)
    #     plt.subplot(2, 1, 1)
    #     plt.plot(x,imagery_left_2s[i,:,0])
    #     plt.subplot(2, 1, 2)
    #     plt.plot(xf, abs_y[0:512])
    #     plt.show()
    # # # huatu ceshi


    # # #huatu ceshi
    # for i in range(len(rest_2s)):
    #     x = np.arange(rest_2s.shape[1])
    #     fft_y = fft(rest_2s[i,:,0])
    #     abs_y = np.abs(fft_y)
    #     xf = np.arange(0,160,160/512)
    #     plt.figure(i)
    #     plt.subplot(2, 1, 1)
    #     plt.plot(x,rest_2s[i,:,0])
    #     plt.subplot(2, 1, 2)
    #     plt.plot(xf, abs_y[0:512])
    #     plt.show()
    # # # huatu ceshi




    # get the data and labels 0:base  1:left
    labels_left = np.ones([1,len(imagery_left_2s)])
    labels_rest = np.zeros([1, len(rest_2s)])

    # add the data together
    data = np.vstack((imagery_left_2s,rest_2s))
    #data = data.transpose(0,2,1)
    labels_left_rest = np.hstack((labels_left,labels_rest))
    labels = to_categorical(labels_left_rest[0])  # one-hot

    # reshape and return
    train_data_ori, test_data_ori, train_label_ori, test_label_ori = train_test_split(data, labels, test_size=0.2,
                                                                                      random_state=42)
    train_data = np.empty((0, train_data_ori.shape[1], 2))
    train_label = np.empty((0, 2))
    test_data = np.empty((0, test_data_ori.shape[1], 2))
    test_label = np.empty((0, 2))
    for i in range(0, 12, 2):
        train_data = np.concatenate((train_data, train_data_ori[:, :, i:i + 2]))
        test_data = np.concatenate((test_data, test_data_ori[:, :, i:i + 2]))
        train_label = np.concatenate((train_label, train_label_ori))
        test_label = np.concatenate((test_label, test_label_ori))
    print('data loaded'+str(subject))
    train_data = signal.resample_poly(train_data, 500, 1024, axis=1)
    test_data = signal.resample_poly(test_data, 500, 1024, axis=1)

    return train_data, test_data, train_label, test_label


if __name__ == '__main__':
    train_data_total  = np.empty((0, 500,2))
    test_data_total   = np.empty((0, 500, 2))
    train_label_total = np.empty((0, 2))
    test_label_total  = np.empty((0, 2))
    subjects_good = np.arange(1, 53)
    if not os.path.exists(save_path):
        os.mkdir(save_path)
    for subs in subjects_good:
        res = get_physionet(subs)
        np.save(os.path.join(save_path, "train_data" + str(subs)),  res[0],  allow_pickle=True)
        np.save(os.path.join(save_path, "test_data" + str(subs)),   res[1],   allow_pickle=True)
        np.save(os.path.join(save_path, "train_label" + str(subs)), res[2], allow_pickle=True)
        np.save(os.path.join(save_path, "test_label" + str(subs)),  res[3],  allow_pickle=True)
        train_data_total  = np.concatenate((train_data_total,  res[0]), axis=0)
        test_data_total   = np.concatenate((test_data_total,   res[1]), axis=0)
        train_label_total = np.concatenate((train_label_total, res[2]), axis=0)
        test_label_total  = np.concatenate((test_label_total,  res[3]), axis=0)
    np.save(os.path.join(save_path, "train_data_total" ),  train_data_total, allow_pickle=True)
    np.save(os.path.join(save_path, "test_data_total" ),   test_data_total, allow_pickle=True)
    np.save(os.path.join(save_path, "train_label_total" ), train_label_total, allow_pickle=True)
    np.save(os.path.join(save_path, "test_label_total" ),  test_label_total, allow_pickle=True)
    print(train_data_total.shape)
    print(test_data_total.shape)
    print(train_label_total.shape)
    print(test_label_total.shape)