from torch.utils.data import Dataset, DataLoader
import os
import torchvision.transforms as transforms
from PIL import Image
import cv2

train_transform = transforms.Compose(
    [transforms.RandomAffine(degrees=30, translate=(0.1, 0.1), scale=(0.9, 1.1), shear=10),
     transforms.RandomPerspective(distortion_scale=0.5, p=0.5, interpolation=3),
     transforms.Resize((256, 256)),
     transforms.CenterCrop((224, 224)),
     transforms.ToTensor(),
     transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])]
)
test_transform = transforms.Compose(
    [
     transforms.Resize((224, 224)),
     transforms.ToTensor(),
     transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])]
)
class MyDataset(Dataset):
    def __init__(self,datatxt,root,transform = train_transform):
        with open(datatxt,'r') as f:
            lines = f.readlines()
        self.datas = []
        self.int2char = {}
        self.transform = transform
        for line in lines:
            name,label = line.strip().split(' ')
            dir = name.split('.')[0].strip('0123456789')
            path = os.path.join(root,dir,name)
            self.int2char[int(label)-1] = dir
            self.datas.append((path,int(label)-1))
    def __getitem__(self, item):
        path,label = self.datas[item]
        img = Image.open(path).convert('RGB')
        img = self.transform(img)

        return img,label
    def __len__(self):
        return len(self.datas)


def get_dataloader(batchsize):
    trainset = MyDataset(datatxt = r'../datas/Garbage classification/one-indexed-files-notrash_train.txt',
                         root = '../datas/Garbage classification',transform = test_transform)
    valset = MyDataset(datatxt = r'../datas/Garbage classification/one-indexed-files-notrash_val.txt',
                       root = '../datas/Garbage classification',transform = test_transform)
    testset = MyDataset(datatxt = r'../datas/Garbage classification/one-indexed-files-notrash_test.txt',
                       root = '../datas/Garbage classification',transform = test_transform)
    trainloader = DataLoader(dataset=trainset,batch_size=batchsize,shuffle=True)
    valloader = DataLoader(dataset=valset,batch_size=batchsize,shuffle=False)
    testloader = DataLoader(dataset=testset,batch_size=batchsize,shuffle=False)
    return trainloader,valloader,testloader,trainset.int2char
