import shutil

import os
import random


def train_val_split(root_dir, val_percent=0.1):
    images_path = os.path.join(root_dir, "images")
    images_list = [img for img in os.listdir(images_path)]
    random.shuffle(images_list)
    # train_images_count = int((1 - test_percent - val_percent) * len(images_list))
    train_images_count = int((1 - val_percent) * len(images_list))
    # test_images_count = int(test_percent * len(images_list))
    # val_images_count = len(images_list) - train_images_count - test_images_count
    val_images_count = len(images_list) - train_images_count
    # Generate train.txt
    with open(os.path.join(root_dir, "train.txt"), "w") as train_txt:
        train_count = 0
        for train in range(train_images_count):
            file_name = root_dir + r"/images/" + images_list[train] + "\n"
            train_txt.write(file_name)
            train_count += 1
        print("train_count: " + str(train_count))
    # # Generate test.txt
    # with open(os.path.join(root_dir, "test.txt"), "w") as test_txt:
    #     test_count = 0
    #     for test in range(test_images_count):
    #         file_name = r"./images/" + images_list[train_images_count + test] + "\n"
    #         test_txt.write(file_name)
    #         test_count += 1
    #     print("test_count: " + str(test_count))
    # Generate val.txt
    with open(os.path.join(root_dir, "val.txt"), "w") as val_txt:
        val_count = 0
        for val in range(val_images_count):
            # file_name = r"./images/" + images_list[train_images_count + test_images_count + val] + "\n"
            file_name = root_dir + r"/images/" + images_list[train_images_count + val] + "\n"
            val_txt.write(file_name)
            val_count += 1
        print("val_count: " + str(val_count))


if __name__ == "__main__":
    root_directory = "/home/kaijia/algo-env/datasets/hq_safeguard_dataset/yolov5"
    train_val_split(root_directory)
