import torch
import numpy as np
from torch.utils.data import Dataset
import os
import imageio as iio
from typing import List, Union, Dict


def get_instance_path(path: str) -> List[str]:
    files = os.listdir(path)
    return sorted([os.path.join(path, i) for i in files])

def load_from_npy(files: List[str]):
    return np.stack([np.load(i) for i in sorted(files)], axis=0)

def normalize_feature(feature: np.array) -> np.array:
    feature_offset = np.mean(feature, axis=0)
    std = np.mean(np.square(feature - feature_offset), axis=0)
    assert np.all(std > 0)
    return (feature - feature_offset) / std

class OGMNavigationDataset(Dataset):
    def __init__(self, 
                 map_base_path: str, 
                 other_path: str,
                 tag: str,
                 hist_seq_length: int,
                 pred_seq_length: int,
                 device: Union[torch.device, str] = 'cpu',
                 split: str = 'train') -> None:
        self.map_other_path = map_base_path
        self.other_path = other_path
        self.tag = tag
        self.split = split
        self.hist_seq_length = hist_seq_length
        self.pred_seq_length = pred_seq_length
        self.seq_len = hist_seq_length + pred_seq_length
        self.map_files = get_instance_path(map_base_path)
        self.device = device if isinstance(device, torch.device) \
                      else torch.device(device)
        
        self.pos_files = []
        self.vel_files = []
        self.scan_files = []
        scan_meta_file = os.path.join(other_path, split, 'scans', split+'.txt')
        pos_meta_file = os.path.join(other_path, split, 'positions', split+'.txt')
        vel_meta_file = os.path.join(other_path, split, 'velocities', split+'.txt')
        with open(scan_meta_file, 'r') as f:
            for line in f.read().split('\n'):
                if '.npy' in line:
                    self.scan_files.append(os.path.join(other_path, split, 'scans', line))
        with open(pos_meta_file, 'r') as f:
            for line in f.read().split('\n'):
                if '.npy' in line:
                    self.pos_files.append(os.path.join(other_path, split, 'positions', line))
        with open(vel_meta_file, 'r') as f:
            for line in f.read().split('\n'):
                if '.npy' in line:
                    self.vel_files.append(os.path.join(other_path, split, 'velocities', line))
        self.pos_files = sorted(self.pos_files)
        self.vel_files = sorted(self.vel_files)
        self._check_consistency()
        self.pos = load_from_npy(self.pos_files)
        self.vel = load_from_npy(self.vel_files)
        self.length = len(self.map_files) - self.seq_len
         
    def __len__(self) -> int:
        return self.length
    
    def __getitem__(self, idx: int) -> Dict[str, torch.tensor]:
        if self.tag.lower() == 'local':
            return self.get_local_data(idx)
        elif self.tag.lower == 'motion_compensated':
            return self.get_motion_compensation_data(idx)
    
    def get_local_data(self, idx: int) -> Dict[str, torch.tensor]:
        image = iio.imread(self.map_files[idx]).astype(np.float32) / 255.
        image_tensor = torch.from_numpy(image).unsqueeze(0)
        vel_normalized = torch.from_numpy(normalize_feature(self.vel))
        data = {
            'maps': image_tensor,
            'velocities': vel_normalized
        }
        return data

    def get_motion_compensation_data(self, idx: int) -> Dict[str, torch.tensor]:
        raise NotImplementedError
         
    def _check_consistency(self):
        def check_order(idx: int) -> int:
            map_order = self.map_files[idx].split('/')[-1].split('.')[0]
            pos_order = self.pos_files[idx].split('/')[-1].split('.')[0]
            vel_order = self.vel_files[idx].split('/')[-1].split('.')[0]
            if map_order == pos_order and pos_order == vel_order:
                return 1
            else:
                return 0
        order_matching = [check_order(i) for i in range(len(self.map_files))]
        length_match = len(self.map_files) == len(self.pos_files) and \
            len(self.pos_files) == len(self.vel_files) and len(self.map_files) == sum(order_matching)
        assert length_match
        
        
        
if __name__ == "__main__":
    map_base_path = "/home/lisa/Codes/datasets/OGM-dataset-maps/OGM-Turtlebot2-maps/train"
    other_path = "/home/lisa/Codes/datasets/OGM-datasets/OGM-Turtlebot2"
    dset = OGMNavigationDataset(map_base_path=map_base_path,
                                other_path=other_path,
                                hist_seq_length=10,
                                pred_seq_length=10,
                                tag=None)
    