# 准备数据集
import paddlex as pdx
from paddlex import transforms as T


train_transforms = T.Compose([
    T.MixupImage(mixup_epoch=-1),
    T.RandomDistort(),
    T.RandomExpand(im_padding_value=[123.675, 116.28, 103.53]),
    T.RandomCrop(),
    T.RandomHorizontalFlip(),
    T.BatchRandomResize(
        target_sizes=[320, 352, 384, 416, 448, 480, 512, 544, 576, 608],
        interp="RANDOM"),
    T.Normalize(
        mean=[0.46157165, 0.46698335, 0.46580717], std=[0.28069802, 0.28096266, 0.28323689])
])

eval_transforms = T.Compose([
    T.Resize(
        target_size=608, interp="CUBIC"),
    T.Normalize(
        mean=[0.46157165, 0.46698335, 0.46580717], std=[0.28069802, 0.28096266, 0.28323689])
])

train_dataset = pdx.datasets.CocoDetection(
    data_dir="train/200",
    ann_file="train/200/train.json",
    transforms=train_transforms,
    shuffle=True)
eval_dataset = pdx.datasets.CocoDetection(
    data_dir="train/200",
    ann_file="train/200/val.json",
    transforms=eval_transforms)