import torch
import numpy as np
from torch.utils.data import Dataset, DataLoader
import os.path as osp
import logging

logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)

fmt = logging.Formatter("[%(name)s] %(levelname)s %(message)s")
console_handler = logging.StreamHandler()
console_handler.setFormatter(fmt)
logger.addHandler(console_handler)

# Define some constans, for keeping consistency with prior works
# Set seq_len to 10, and the whole sequence as [10, 10]

class ScanPosVelDataset(Dataset):
    def __init__(self, 
                 dset_path, 
                 split, 
                 num_points=1080, 
                 image_size=64, 
                 obs_len=10,
                 pred_len=10):
        self.num_points = num_points
        self.image_size = image_size
        self.obs_len = obs_len
        self.pred_len = pred_len
        # initialize the data and labels
        # read the names of image data:
        self.scan_splits = []
        self.pos_splits = []
        self.vel_splits = []
        # open train.txt or dev.txt:
        scan_file_path = osp.join(osp.abspath(dset_path), 'scans', split+'.txt')
        pos_file_path = osp.join(osp.abspath(dset_path), 'positions', split+'.txt')
        vel_file_path = osp.join(osp.abspath(dset_path), 'velocities', split+'.txt')
        with open(scan_file_path, 'r') as f:
            for line in f.read().split('\n'):
                if '.npy' in line: 
                    self.scan_splits.append(osp.join(osp.abspath(dset_path), 'scans', line))
        with open(pos_file_path, 'r') as f:
            for line in f.read().split('\n'):
                if '.npy' in line: 
                    self.pos_splits.append(osp.join(osp.abspath(dset_path), 'positions', line))
        with open(vel_file_path, 'r') as f:
            for line in f.read().split('\n'):
                if '.npy' in line: 
                    self.vel_splits.append(osp.join(osp.abspath(dset_path), 'velocities', line)                )
        self.length = len(self.scan_splits)

        # print("dataset length: ", self.length)
        logger.info(f"dataset length: {self.length}")


    def __len__(self):
        return self.length

    def __getitem__(self, idx):
        # get the index of start point:
        scans = np.zeros((self.obs_len + self.pred_len, self.num_points))
        positions = np.zeros((self.obs_len + self.pred_len, 3))
        vels = np.zeros((self.obs_len + self.pred_len, 2))
        # get the index of start point:
        if(idx+(self.obs_len+self.pred_len) < self.length): # train1:
            idx_s = idx
        else:
            idx_s = idx - (self.obs_len+self.pred_len)

        for i in range(self.obs_len+self.pred_len):
            # get the scan data:
            scan_name = self.scan_splits[idx_s+i]
            scan = np.load(scan_name)
            scans[i] = scan
            # get the scan_ur data:
            pos_name = self.pos_splits[idx_s+i]
            pos = np.load(pos_name)
            positions[i] = pos
            # get the velocity data:
            vel_name = self.vel_splits[idx_s+i]
            vel = np.load(vel_name)
            vels[i] = vel
        
        # initialize:
        scans[np.isnan(scans)] = 20.
        scans[np.isinf(scans)] = 20.
        scans[scans==30] = 20.

        positions[np.isnan(positions)] = 0.
        positions[np.isinf(positions)] = 0.

        vels[np.isnan(vels)] = 0.
        vels[np.isinf(vels)] = 0.

        # transfer to pytorch tensor:
        scan_tensor = torch.FloatTensor(scans)
        pose_tensor = torch.FloatTensor(positions)
        vel_tensor =  torch.FloatTensor(vels)

        data = {
                'scan': scan_tensor,
                'position': pose_tensor,
                'velocity': vel_tensor, 
                }

        return data



def get_dataloader(path, split, batch_size):
    '''
    Function for acquiring the dataset and dataloader 
    '''
    d_set = ScanPosVelDataset(path, split)
    d_loader = DataLoader(d_set, batch_size, shuffle=True, drop_last=True, 
                          pin_memory=True, num_workers=4)
    return d_set, d_loader

if __name__ == "__main__":
    d_set, d_loader = get_dataloader('/home/yflei/data/OGM-datasets/OGM-Turtlebot2/train', 'train', 
                                     batch_size=128)

    for data in d_loader:
        print(data['scan'].shape)
        print(data['position'].shape)
        print(data['velocity'].shape)
        break
