File size: 1,474 Bytes
672f30e 1d9dac8 672f30e 1d9dac8 672f30e 1d9dac8 672f30e |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 |
import os
import datasets as ds
import pytest
from MSCOCO import CATEGORIES, SUPER_CATEGORIES
@pytest.fixture
def dataset_path() -> str:
return "MSCOCO.py"
@pytest.mark.skipif(
condition=bool(os.environ.get("CI", False)),
reason=(
"Because this loading script downloads a large dataset, "
"we will skip running it on CI."
),
)
@pytest.mark.parametrize(
argnames="decode_rle,",
argvalues=(
True,
False,
),
)
@pytest.mark.parametrize(
argnames=(
"dataset_year",
"coco_task",
"expected_num_train",
"expected_num_validation",
),
argvalues=(
(2014, "captions", 82783, 40504),
(2017, "captions", 118287, 5000),
(2014, "instances", 82081, 40137),
(2017, "instances", 117266, 4952),
(2014, "person_keypoints", 45174, 21634),
(2017, "person_keypoints", 64115, 2693),
),
)
def test_load_dataset(
dataset_path: str,
dataset_year: int,
coco_task: str,
decode_rle: bool,
expected_num_train: int,
expected_num_validation: int,
):
dataset = ds.load_dataset(
path=dataset_path,
year=dataset_year,
coco_task=coco_task,
decode_rle=decode_rle,
)
assert dataset["train"].num_rows == expected_num_train
assert dataset["validation"].num_rows == expected_num_validation
def test_consts():
assert len(CATEGORIES) == 80
assert len(SUPER_CATEGORIES) == 12
|