File size: 1,353 Bytes
b64db2c d1a686e 5135e80 2f05017 b64db2c 2f05017 d1a686e 2f05017 b64db2c 50aa10b b64db2c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 |
---
dataset_info:
features:
- name: image
dtype: image
- name: en_text
dtype: string
splits:
- name: train
num_bytes: 9768225.0
num_examples: 170
download_size: 4653899
dataset_size: 9768225.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
# Dataset Card for "Pak_Law_QA"
```python
from huggingface_hub import login
login()
from datasets import load_dataset
import os
# dataset = load_dataset("imagefolder", data_dir="/home/musab/Downloads/ROBIN/Dataset_3rooms/", split="train")
# from datasets import load_dataset
# Load dataset
data_dir = "/home/musab/Downloads/ROBIN/Dataset_3rooms/"
dataset = load_dataset("imagefolder", data_dir=data_dir, split="train")
# Define a function that will be applied to each item
def add_new_column(example):
# Add a new key-value pair for the new column
example["en_text"] = "Create 3 room 2d floor plan"
return example
# Use the map function to apply the function to each item in the dataset
dataset = dataset.map(add_new_column)
# To verify that the new column has been added
print(dataset.column_names)
dataset.push_to_hub("DisgustingOzil/Pak_Law_QA", token=os.environ.get('HF_TOKEN'))
```
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |