import os from PIL import Image from datasets import Dataset from huggingface_hub import HfApi # Set the path to the CoLeaf dataset directory coleaf_dir = "/home/goya/CV_Plant_Disease/Datasets/CoLeaf_dataset" # Set the name and description for the Huggingface dataset dataset_name = "bhugxer/CoLeafLabels" dataset_description = "CoLeaf dataset for fine-tuning Stable Diffusion" # Create lists to store the image paths and labels image_paths = [] labels = [] api_token = os.environ.get("HUGGINGFACE_API_TOKEN") # Iterate over the CoLeaf dataset directory for label in os.listdir(coleaf_dir): label_dir = os.path.join(coleaf_dir, label) if os.path.isdir(label_dir): for image_file in os.listdir(label_dir): image_path = os.path.join(label_dir, image_file) image_paths.append(image_path) labels.append(label) # Create a Huggingface dataset dataset = Dataset.from_dict({"image_path": image_paths, "label": labels}) # Define the image loading function def load_image(example): with open(example["image_path"], "rb") as f: image_data = f.read() return {"image": image_data} # Apply the image loading function to the dataset dataset = dataset.map(load_image, batched=False, num_proc=4) # Remove the "image_path" column from the dataset dataset = dataset.remove_columns("image_path") # Push the dataset to the Huggingface Hub api = HfApi(token=api_token) api.upload_folder( folder_path=".", repo_id=dataset_name, repo_type="dataset", ignore_patterns=["**/.*", "**/__pycache__"], use_auth_token=api_token, ) # Create a dataset card dataset_card = f""" # {dataset_name} {dataset_description} ## Dataset Structure - `image`: The image data. - `label`: The label or text description of the image. ## Dataset Info - Number of examples: {len(dataset)} - Image format: Various (PNG, JPEG, etc.) ## License [Insert license information here] ## Citation [Insert citation information here] """ # Push the dataset card to the Huggingface Hub with open("README.md", "w") as f: f.write(dataset_card) api.upload_file( path_or_fileobj="README.md", path_in_repo="README.md", repo_id=dataset_name, repo_type="dataset", use_auth_token=api_token, )