File size: 316 Bytes
9d3a895
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
from torchvision.transforms import Normalize, Compose, Resize, ToTensor


def get_transform(image_size=384):
    return Compose([
        lambda image: image.convert("RGB"),
        Resize((image_size, image_size)),
        ToTensor(),
        Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
    ])