from torch.utils.data import Dataset, DataLoader
import torchvision.transforms as Transforms
import numpy as np
import torch
import cv2
import random
from tqdm import tqdm
import logging
import os
import skimage.io as io

logging.basicConfig(level=logging.INFO)


def Trans():
    transform = Transforms.Compose([
        Transforms.ToTensor(),
        Transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]),
    ])
    return transform


class MyDataset(Dataset):
    def __init__(self, img, mask, transforms=None):
        super(MyDataset, self).__init__()
        self.img = img
        self.mask = mask
        self.transforms = transforms

    def __getitem__(self, index):
        if self.transforms is not None:
            img = self.transforms(self.img[index])
        else:
            img = self.img[index]
        mask = torch.tensor(self.mask[index]).long()
        return img, mask

    def __len__(self):
        return len(self.img)


def getDataloader(root=None, txtfile_path="../data/train.txt", shuffle=True, BatchSize=16, num_workers=2):
    # 数据集的名称：train,val or test
    set_name = txtfile_path.split('/')[-1].split('.')[0]
    logging.info(f"{set_name}数据集加载中......")
    # 读取数据（包括img,mask）
    file = open(txtfile_path, "r")
    file_list = file.readlines()
    file.close()
    nums = len(file_list)
    first_img = cv2.imread(os.path.join(root,file_list[0].strip()+ '/img.png'))
    h, w, c = first_img.shape
    img = np.zeros((nums, h, w, c), np.float32)
    mask = np.zeros((nums, h, w), np.float32)
    with tqdm(total=nums)as pbar:
        for i, file_name in enumerate(file_list):
            img[i] = cv2.imread(os.path.join(root, file_name.strip() + "/img.png"))
            mask[i] = io.imread(os.path.join(root, file_name.strip() + "/mask.png"))
            pbar.update()
    logging.info(f"{set_name}数据读取完毕，开始进行预处理......")
    # 对数据进行打乱
    idx = list(range(0, nums))
    random.shuffle(idx)
    img = img[idx]
    mask = mask[idx]
    # 对数据进行预处理及加载数据
    mydata = MyDataset(img, mask, Trans())
    mydataloader = DataLoader(mydata, shuffle=shuffle,
                              batch_size=BatchSize, drop_last=True, num_workers=num_workers)
    logging.info(f"{set_name}数据预处理完毕! 共{len(mydataloader) * BatchSize}份")
    return mydataloader


if __name__ == '__main__':
    path = "../data/train.txt"
    train_loader = getDataloader(path)
