import torch.utils.data as data
import numpy as np
from torch.utils.data import DataLoader
from utils.tools import get_rad
from utils import metrics
from utils.config import *
from os.path import basename, isfile
import numba
from numba import jit
# from datetime import datetime, timedelta
# import cv2

@jit
def make_dataset(mode):
    tensors = []
    txtpath = f"{txt_path}/tp_{mode.lower()}.txt"
    txtList = open(txtpath, 'r').readlines()
    # txtList = txtList[:len(txtList) // 3]
    for data_path in txtList:
        acpc_path, radar_path, label_path = data_path.strip().split(' ')

        if isfile(acpc_path) * isfile(radar_path) * isfile(label_path):
            tensors.append((acpc_path, radar_path, label_path))

    return tensors

@jit
def data_loader(acpc_file, radar_file, label_file):
    # Get torch tensor
    try:

        apcp_data = np.loadtxt(acpc_file).copy()[120:-200, 200:-160]
        # apcp_data = np.loadtxt(acpc_file).copy()[120:280, 200:360]
        # apcp_data = cv2.resize(apcp_data, (160, 112))

        input_data = np.expand_dims(apcp_data, axis=0)
        input_data[input_data < 0] = 0

        raddata, lat0, lon0 = get_rad(radar_file)
        raddata = raddata[140:-200, 220:-180]

        station_data = np.nan_to_num(np.loadtxt(label_file))  # [:160, :160]

        target_data = np.where(station_data > raddata, station_data, raddata)
        target_data[target_data < 0] = 0
        # target_data = cv2.resize(target_data, (160, 112))
        target_data = np.expand_dims(target_data, axis=0)

        data = (input_data, target_data)
        return data

    except ValueError:
        print("DATA ERROR :", basename(acpc_file))
        return None
    except AttributeError:
        print("AttributeError:", basename(acpc_file))
        return None


class SegmentedData(data.Dataset):
    def __init__(self, mode, loader=data_loader):
        """
        Load data kept in folders ans their corresponding segmented data

        :param mode: train/val mode
        :type mode: str
        :param loader: type of data loader
        :type loader: function
        """

        self.tensors = make_dataset(mode)
        self.loader = loader

    def __getitem__(self, index):
        # Get path of input data and ground truth
        input_path, radar_path, target_path = self.tensors[index]

        # Acquire input data and ground truth
        tp_data = self.loader(input_path, radar_path, target_path)
        # input_tensor, target = self.loader(input_path, target_path)
        return tp_data

    def __len__(self):
        return len(self.tensors)


if __name__ == '__main__':

    n = 0

    data_obj_val = SegmentedData(mode='val')
    val_generator = DataLoader(data_obj_val, batch_size=5, shuffle=False, num_workers=6, pin_memory=True)
    data_len_val = len(data_obj_val)
    print("数据样本数：", data_len_val)

    maexList = []
    for data in val_generator:
        if data is None:
            continue
        x, y = data
        if x.max() > 20:
            continue

        maex = metrics.mae(x.numpy().squeeze(), y.numpy().squeeze())
        print(maex)
