fb-housing-posts / main.py
hoaj's picture
Update dataset splitting and push to Hugging Face Hub
1546bf6
raw
history blame contribute delete
832 Bytes
from datasets import load_dataset, DatasetDict
# Load your dataset from a local JSONL file
dataset = load_dataset("json", data_files="listings.jsonl")
dataset = dataset.shuffle(seed=42)
# Split the dataset into train and temp
split_dataset = dataset["train"].train_test_split(test_size=0.3)
train_dataset = split_dataset["train"]
temp_dataset = split_dataset["test"]
# Split the temp dataset into validation and test
split_temp_dataset = temp_dataset.train_test_split(test_size=0.5)
validation_dataset = split_temp_dataset["train"]
test_dataset = split_temp_dataset["test"]
# Combine all splits into a DatasetDict
final_dataset = DatasetDict(
{"train": train_dataset, "validation": validation_dataset, "test": test_dataset}
)
# Push the split datasets to Hugging Face Hub
final_dataset.push_to_hub("fb-housing-posts")