from joblib.externals.loky.backend import get_context
from torch.utils.data import DataLoader

from data_provider.data_loader import Dataset_ETT_hour, Dataset_ETT_minute, Dataset_Custom

data_dict = {
    'ETTh1': Dataset_ETT_hour,
    'ETTh2': Dataset_ETT_hour,
    'ETTm1': Dataset_ETT_minute,
    'ETTm2': Dataset_ETT_minute,
    'custom': Dataset_Custom
}


def data_provider(args, flag):
    Data = data_dict[args.data]
    timeenc = 0 if args.embed != 'timeF' else 1

    shuffle_flag = False if flag == 'test' else True
    drop_last = True
    batch_size = args.batch_size
    freq = args.freq

    data_set = Data(
        args=args,
        root_path=args.root_path,
        data_path=args.data_path,
        flag=flag,
        size=[args.seq_len, args.label_len, args.pred_len],
        features=args.features,
        target=args.target,
        timeenc=timeenc,
        freq=freq,
        seasonal_patterns=args.seasonal_patterns
    )
    print(flag, len(data_set))
    data_loader = DataLoader(
        data_set,
        batch_size=batch_size,
        shuffle=shuffle_flag,
        num_workers=args.num_workers,
        drop_last=drop_last,
        prefetch_factor=args.prefetch_factor if args.num_workers > 1 else None,
        pin_memory=True,
        persistent_workers=True if args.num_workers > 0 else None,
        pin_memory_device=f'cuda:{args.gpu}' if args.num_workers > 0 and args.use_gpu else '',
        multiprocessing_context=get_context('loky') if args.num_workers > 0 else None
    )
    return data_set, data_loader
