from yolov8_dataset import YOLOv8ImageDataset
import fiftyone as fo
import os
from fiftyone import ViewField as F

def find_dataset_dir(dataset_path):
    dataset_dirs = []
    if 'Images' in os.listdir(dataset_path):
        return [dataset_path,]
    else:
        for sub_dir in os.listdir(dataset_path):
            dataset_dirs.extend(find_dataset_dir(os.path.join(dataset_path, sub_dir)))
        return dataset_dirs
            
dataset_name = None
training_dataset_root = "/home/ma-user/work/柔性包装_v1_dev/训练集"
val_dataset_root = "/home/ma-user/work/柔性包装_v1_dev/val"
test_dataset_root = "/home/ma-user/work/柔性包装_v1_dev/test"

def convert_to_coco(dataset_root, dataset_name, save=False, exclude=None, val=False):
    dataset_dirs = find_dataset_dir(dataset_root)
    datasets = []
    for dataset_dir in dataset_dirs[:]:
        color, batch, have_defects = dataset_dir.split("/")[-3:]
        print(color, batch, have_defects)
        if exclude and batch in exclude and have_defects == 'NG':
            print("exclude batch:", color, batch)
            continue
        dataset = fo.Dataset.from_dir(name=None,
                                        dataset_type=YOLOv8ImageDataset,
                                        dataset_dir=dataset_dir if not val else os.path.join(dataset_dir, 'test'),
                                        shuffle=False,
                                        tags=[color, batch, have_defects])
        datasets += [dataset,]
    merge_dataset = fo.Dataset(name=dataset_name, overwrite=True, persistent=True)
    for dataset in datasets:
        merge_dataset.merge_samples(dataset)
    if save:
        merge_dataset.persistent = True
        merge_dataset.save()
    print(merge_dataset)
    merge_dataset.export(
        export_dir=dataset_name+'_coco',
        dataset_type=fo.types.COCODetectionDataset,
        label_field="gt",
        abs_paths=True,
        export_media=False,
        )
    return merge_dataset


def convert_to_coco_single(dataset_root, dataset_name, save=False, exclude=None, val=False):
    dataset_dir = dataset_root
    dataset = fo.Dataset.from_dir(name=None,
                                    dataset_type=YOLOv8ImageDataset,
                                    dataset_dir=dataset_dir if not val else os.path.join(dataset_dir, 'test'),
                                    shuffle=False,
                                    image_folder_name = "images",
                                    label_file_name = "labels",
                                    )
    if save:
        dataset.persistent = True
        dataset.save()
    print(dataset)
    dataset.export(
        export_dir=dataset_name+'_coco',
        dataset_type=fo.types.COCODetectionDataset,
        label_field="gt",
        abs_paths=True,
        export_media=False,
        )
    return dataset

# View summary info about the dataset
# print(merge_dataset)

# Print the first few samples in the dataset
# print(merge_dataset.head())

### without pink 
# exclude = ['5','6','11']
# training_dataset = convert_to_coco(training_dataset_root, "wo_pink_trainingset", save=True, exclude=exclude)
# print("positive images num: ",training_dataset.match(F("gt.detections").length()>0).count())
# print("positive bbox num: ",training_dataset.count("gt.detections"))

# val_dataset = convert_to_coco(training_dataset_root, "wo_pink_validset", save=True, exclude=exclude, val=True)
# print("positive num: ",val_dataset.match(F("gt.detections").length()>0).count())
# print("positive bbox num: ",val_dataset.count("gt.detections"))

# training_dataset = convert_to_coco(training_dataset_root, "raw_trainingset", save=True)
# print("positive images num: ",training_dataset.match(F("gt.detections").length()>0).count())
# print("positive bbox num: ",training_dataset.count("gt.detections"))

### without pink class 5
# training_dataset = convert_to_coco_single('/dataset/zhu_workspace/柔性包装_v2_clean2_crop_coco_combine/train', "wo_pink_v2_trainingset", save=True,)
# print("positive images num: ",training_dataset.match(F("gt.detections").length()>0).count())
# print("positive bbox num: ",training_dataset.count("gt.detections"))

# val_dataset = convert_to_coco_single('/dataset/zhu_workspace/柔性包装_v2_clean2_crop_coco_combine/val', "wo_pink_v2_validset", save=True)
# print("positive num: ",val_dataset.match(F("gt.detections").length()>0).count())
# print("positive bbox num: ",val_dataset.count("gt.detections"))

### full dataset
# training_dataset = convert_to_coco_single('/dataset/zhu_workspace/柔性包装_v2_coco_combine/train', "full_trainingset", save=True,)
# print("positive images num: ",training_dataset.match(F("gt.detections").length()>0).count())
# print("positive bbox num: ",training_dataset.count("gt.detections"))

# val_dataset = convert_to_coco_single('/dataset/zhu_workspace/柔性包装_v2_coco_combine/val', "full_validset", save=True)
# print("positive num: ",val_dataset.match(F("gt.detections").length()>0).count())
# print("positive bbox num: ",val_dataset.count("gt.detections"))

### original dataset
training_dataset = convert_to_coco(training_dataset_root, "raw_trainingset", save=True)
print("positive images num: ",training_dataset.match(F("gt.detections").length()>0).count())
print("positive bbox num: ",training_dataset.count("gt.detections"))

val_dataset = convert_to_coco(val_dataset_root, "raw_validset", save=True)
print("positive num: ",val_dataset.match(F("gt.detections").length()>0).count())
print("positive bbox num: ",val_dataset.count("gt.detections"))

test_dataset = convert_to_coco(test_dataset_root, "raw_testset", save=True)
print(test_dataset)
