from nnunetv2.paths import nnUNet_raw
from pathlib import Path
from batchgenerators.utilities.file_and_folder_operations import *
import multiprocessing
import shutil
from tqdm import tqdm
from nnunetv2.dataset_conversion.generate_dataset_json import generate_dataset_json

labels = {'background': 0, 'artery': 1}

def load_and_convert_case(input_image: str, input_seg: str, output_image: str, output_seg: str):
    shutil.copy(input_image, output_image)
    shutil.copy(input_seg, output_seg)


if __name__ == "__main__":
    root = Path(nnUNet_raw)
    source = '/home/yjiang/workspace/local/dataset/parse2022'
    source = Path(source)
    dataset_num = 997
    dataset_num = str(dataset_num).zfill(3)
    assert len(dataset_num) == 3
    dataset_name = f'Dataset{dataset_num}_PARSE2022'

    imagestr = root / dataset_name / 'imagesTr'
    imagests = root / dataset_name / 'imagesTs'
    labelstr = root / dataset_name / 'labelsTr'
    labelsts = root / dataset_name / 'labelsTs'

    maybe_mkdir_p(imagestr)
    maybe_mkdir_p(imagests)
    maybe_mkdir_p(labelstr)
    maybe_mkdir_p(labelsts)

    train_datas = [(i / 'image' / i.with_suffix('.nii.gz').name, i / 'label' / i.with_suffix('.nii.gz').name) for i in (source / 'raw_data' / 'train').glob('*')]

    

    r = []
    with multiprocessing.get_context("spawn").Pool(multiprocessing.cpu_count()) as p:
        for img, anno in train_datas:
            r.append(
                p.starmap_async(
                    load_and_convert_case,
                    ((
                        img, 
                        anno,
                        imagestr / '{}_0000.nii.gz'.format(img.name.split('.')[0]),
                        labelstr / anno.name
                    ),)
                )
            )

        for i in tqdm(r):
            i.get()

    generate_dataset_json(root / dataset_name, {0: 'CT'}, labels,
                          len(train_datas), '.nii.gz', dataset_name=dataset_name)
    
