import torch
from torch.utils.data import DataLoader
import numpy as np

POINTS = 1080
IMG_SIZE = 64
NEW_LINE = "\n"

class VaeTestDataset(torch.utils.data.Dataset):
    def __init__(self, img_path, file_name, seq_len):
        # initialize the data and labels
        # read the names of image data:
        self.seq_len = seq_len
        self.scan_file_names = []
        self.pos_file_names = []
        self.vel_file_names = []
        # open train.txt or dev.txt:
        fp_scan = open(img_path + '/scans/' + file_name + '.txt', 'r')
        fp_pos = open(img_path + '/positions/' + file_name + '.txt', 'r')
        fp_vel = open(img_path + '/velocities/' + file_name + '.txt', 'r')
        # for each line of the file:
        for line in fp_scan.read().split(NEW_LINE):
            if ('.npy' in line):
                self.scan_file_names.append(img_path + '/scans/' + line)
        for line in fp_pos.read().split(NEW_LINE):
            if ('.npy' in line):
                self.pos_file_names.append(img_path + '/positions/' + line)
        for line in fp_vel.read().split(NEW_LINE):
            if ('.npy' in line):
                self.vel_file_names.append(img_path + '/velocities/' + line)
        # close txt file:
        fp_scan.close()
        fp_pos.close()
        fp_vel.close()
        self.length = len(self.scan_file_names)

        print("dataset length: ", self.length)

    def __len__(self):
        return self.length

    def __getitem__(self, idx):
        # get the index of start point:
        scans = np.zeros((self.seq_len*2, POINTS))
        positions = np.zeros((self.seq_len*2, 3))
        vels = np.zeros((self.seq_len*2, 2))
        # get the index of start point:
        if (idx + (self.seq_len*2) < self.length):  # train1:
            idx_s = idx
        else:
            idx_s = idx - (self.seq_len*2)

        for i in range(self.seq_len*2):
            # get the scan data:
            scan_name = self.scan_file_names[idx_s + i]
            scan = np.load(scan_name)
            scans[i] = scan
            # get the scan_ur data:
            pos_name = self.pos_file_names[idx_s + i]
            pos = np.load(pos_name)
            positions[i] = pos
            # get the velocity data:
            vel_name = self.vel_file_names[idx_s + i]
            vel = np.load(vel_name)
            vels[i] = vel

        # initialize:
        scans[np.isnan(scans)] = 20.
        scans[np.isinf(scans)] = 20.
        scans[scans == 30] = 20.

        positions[np.isnan(positions)] = 0.
        positions[np.isinf(positions)] = 0.

        vels[np.isnan(vels)] = 0.
        vels[np.isinf(vels)] = 0.

        # transfer to pytorch tensor:
        scan_tensor = torch.FloatTensor(scans)
        pose_tensor = torch.FloatTensor(positions)
        vel_tensor = torch.FloatTensor(vels)

        data = {
            'scan': scan_tensor,
            'position': pose_tensor,
            'velocity': vel_tensor,
        }
        return data



if __name__ == "__main__":

    dset_path = "/data/OGM-datasets/OGM-Turtlebot2/train"

    dset = VaeTestDataset(dset_path,
                          "train")
    dloader = DataLoader(dset, batch_size=1, num_workers=2,
                         shuffle=False, drop_last=True, pin_memory=True)
    for batch in dloader:
        for i,j in batch.items():
            print(i)


