import os
import random
from PIL import Image
from torch.utils.data import Dataset, DataLoader
from torchvision import transforms
import torch
from torchvision.transforms import functional as F
from io import BytesIO

class SRDatasetPT(Dataset):
    def __init__(self, hr_dir, lr_dir, train=True):
        self.hr_pt_dir = hr_dir
        self.lr_pt_dir = lr_dir
        self.train = train

        self.hr_files = sorted([f for f in os.listdir(hr_dir) if f.endswith('.pt')])
        self.lr_files = [f for f in self.hr_files]  # 名字一一对应

        if self.train:
            self.hr_files = self.hr_files[:800]
            self.lr_files = self.lr_files[:800]
        else:
            self.hr_files = self.hr_files[800:900]
            self.lr_files = self.lr_files[800:900]

    def __len__(self):
        return len(self.hr_files)

    def __getitem__(self, idx):
        hr_tensor = torch.load(os.path.join(self.hr_pt_dir, self.hr_files[idx]))
        lr_tensor = torch.load(os.path.join(self.lr_pt_dir, self.lr_files[idx]))
        return {'lr': lr_tensor, 'hr': hr_tensor}


def get_dataloaders(batch_size, num_workers=8, preload=True):
    hr_dir = 'SR_Datasets/pt/HR'
    lr_dir = 'SR_Datasets/pt/LR'

    train_set = SRDatasetPT(hr_dir=hr_dir, lr_dir=lr_dir, train=True)
    val_set = SRDatasetPT(hr_dir=hr_dir, lr_dir=lr_dir, train=False)

    train_loader = DataLoader(train_set, batch_size=batch_size, shuffle=True,
                              num_workers=num_workers, pin_memory=True, persistent_workers=True)
    val_loader = DataLoader(val_set, batch_size=batch_size, shuffle=False,
                            num_workers=num_workers, pin_memory=True, persistent_workers=True)

    return train_loader, val_loader
