import numpy as np
import os
import glob as glob
import pandas as pd
from torch.utils.data import Dataset, IterableDataset
import torch

def load_bearingdata_test(root_path,
                     data_path='BearingsDetection',
                     data_name='bearingdetection'):
    np.random.seed(0)
    datadir= os.path.join(root_path,data_path, data_name)
    filelist = sorted(glob.glob(os.path.join(datadir, '*')))
    test = []
    testsize = 400
    sample_len = 1024
    for i, f in enumerate(filelist):
        data = pd.read_csv(f, names=['FE', 'DE'])
        # split data into train and test
        splitnum = np.int(len(data)/2)
        traindata = data.iloc[0:splitnum, :]
        testdata = data.iloc[splitnum:len(data), :]
        #print('traindata/testdata.shape', traindata.shape, testdata.shape)
        label = i
        test_sp = np.random.randint(0, len(testdata) - sample_len - 1, testsize)
        for sp in test_sp:
            x_ = testdata.iloc[sp:sp + sample_len, :].values
            test.append((x_, label))
    return test

def load_bearingdata_train(root_path,
                     data_path='BearingsDetection',
                     data_name='bearingdetection'):
    np.random.seed(0)
    datadir= os.path.join(root_path,data_path, data_name)
    filelist = sorted(glob.glob(os.path.join(datadir, '*')))
    #print(filelist)
    train = []
    trainsize = 50
    sample_len = 1024
    for i, f in enumerate(filelist):
        data = pd.read_csv(f, names=['FE', 'DE'])
        # split data into train and test
        splitnum = np.int(len(data)/2)
        traindata = data.iloc[0:splitnum, :]
        testdata = data.iloc[splitnum:len(data), :]
        #print('traindata/testdata.shape', traindata.shape, testdata.shape)
        label = i
        train_sp = np.random.randint(0, len(traindata) - sample_len - 1, trainsize)
        for sp in train_sp:
            x_ = traindata.iloc[sp:sp + sample_len, :].values
            train.append((x_, label))
    return train

class Dataset_bearing(Dataset):
    def __init__(self,
                 root_path,
                 data_path,
                 data_name,
                 flag='TRAIN',
                 config=None
                 ):
        self.root_path = root_path
        self.data_path = data_path
        self.data_name = data_name
        self.flag = flag
        self.config = config

        self.preprocessing = 'standard' # or 'standard' 'maxmin' 'norm'
        self.data = self.__read_data()

        #
        self.ts_num = len(self.data)
        self.ts_len, self.ts_dim = self.data[0][0].shape
        self.num_classes = 10



    def __read_data(self):
        if self.flag == 'TRAIN':
            data = load_bearingdata_train(root_path=self.root_path, data_path=self.data_path, data_name=self.data_name)
        elif self.flag == 'TEST':
            data = load_bearingdata_test(root_path=self.root_path, data_path=self.data_path, data_name=self.data_name)
        return data

    def __getitem__(self, idx):
        return torch.from_numpy(self.data[idx][0]), torch.from_numpy(np.array(self.data[idx][1])), idx

    def __len__(self):
        return self.ts_num




if __name__ == '__main__':
    import options

    args = options.Options().parse()


    #load_bearingdata(root_path=args.root_path, data_path=args.data_path, data_name=args.data_name)

    mydata = Dataset_bearing(root_path=args.root_path,
                             data_path=args.data_path,
                             data_name=args.data_name,
                             flag='TRAIN',
                             config=args)
    print(mydata)
    print(len(mydata))
    x,y,info = mydata[3333]
    print(x.shape, y, info)
