Datasets:
Update README.md
Browse files
README.md
CHANGED
@@ -129,4 +129,89 @@ ds.map(
|
|
129 |
},
|
130 |
remove_columns=["images"],
|
131 |
).rename_column("images_t", "images")
|
132 |
-
```
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
129 |
},
|
130 |
remove_columns=["images"],
|
131 |
).rename_column("images_t", "images")
|
132 |
+
```
|
133 |
+
|
134 |
+
<details>
|
135 |
+
<summary>Show the code used to generate this dataset.</summary>
|
136 |
+
This assumes that the directory `m3exam/multimodal-question/` exists and is an exact copy from the original GitHub repository.
|
137 |
+
```python
|
138 |
+
import pandas as pd
|
139 |
+
from pathlib import Path
|
140 |
+
from datasets import Image, DatasetDict, Dataset, Value, Sequence
|
141 |
+
from PIL import Image as PILImage
|
142 |
+
from tqdm.auto import tqdm
|
143 |
+
from copy import deepcopy
|
144 |
+
from functools import partial
|
145 |
+
import re
|
146 |
+
|
147 |
+
tqdm.pandas()
|
148 |
+
|
149 |
+
def get_img_ids(row, img_base_p):
|
150 |
+
p = r"\(image\)\[image-.*\..*\]"
|
151 |
+
imgs = re.findall(p, row["question_text"])
|
152 |
+
for option in row["options"]:
|
153 |
+
imgs.extend(re.findall(p, option))
|
154 |
+
for bgdesc in row["background_description"]:
|
155 |
+
imgs.extend(re.findall(p, bgdesc))
|
156 |
+
|
157 |
+
img_ids = [img.split("[")[1].split("]")[0] for img in imgs]
|
158 |
+
# remove the last character if it is a period (eg. image-1.png. -> image-1.png)
|
159 |
+
img_ids = [img_id[:-1] if img_id[-1] == "." else img_id for img_id in img_ids]
|
160 |
+
# remove character after the last digit (eg. image-13c.png -> image-13.png)
|
161 |
+
img_ids = [re.sub(r"\D*\.", ".", img_id) for img_id in img_ids]
|
162 |
+
# remove character between dots (eg. image-13.c.png -> image-13.png)
|
163 |
+
img_ids = [re.sub(r"\.\D*\.", ".", img_id) for img_id in img_ids]
|
164 |
+
|
165 |
+
for img_id in img_ids:
|
166 |
+
if not (img_base_p / img_id).exists():
|
167 |
+
# print(f"MISSING IMAGE: {img_id=}, {imgs=}, {row.name=}")
|
168 |
+
return None
|
169 |
+
return img_ids
|
170 |
+
|
171 |
+
def load_images(img_ids, img_base_p):
|
172 |
+
if img_ids is None:
|
173 |
+
return None
|
174 |
+
img = Image()
|
175 |
+
return [
|
176 |
+
img.encode_example(deepcopy(PILImage.open(img_base_p / img_id).convert("RGB")))
|
177 |
+
for img_id in img_ids
|
178 |
+
]
|
179 |
+
|
180 |
+
if __name__ == "__main__":
|
181 |
+
dsd = DatasetDict()
|
182 |
+
img_base_p = "m3exam/multimodal-question/images-"
|
183 |
+
for p in (
|
184 |
+
pbar := tqdm(
|
185 |
+
list(Path("m3exam/multimodal-question").glob("*-questions-image.json"))
|
186 |
+
)
|
187 |
+
):
|
188 |
+
lang = p.stem.split("-")[0]
|
189 |
+
pbar.set_description(lang)
|
190 |
+
|
191 |
+
df = pd.read_json(p)
|
192 |
+
df["image_ids"] = df.apply(
|
193 |
+
partial(get_img_ids, img_base_p=Path(img_base_p + lang)), axis=1
|
194 |
+
)
|
195 |
+
df["images"] = df["image_ids"].progress_apply(
|
196 |
+
partial(load_images, img_base_p=Path(img_base_p + lang))
|
197 |
+
)
|
198 |
+
df = df[~df.image_ids.isna()]
|
199 |
+
df["year"] = df["year"].astype(str).str.strip()
|
200 |
+
df["answer_text"] = df["answer_text"].astype(str).str.strip()
|
201 |
+
df["question_text"] = df["question_text"].astype(str).str.strip()
|
202 |
+
ds = Dataset.from_pandas(df.reset_index(drop=True))
|
203 |
+
# for javanese there are no background descs thus it is interpreted as dtype null. We need to change it to string
|
204 |
+
features = ds.features.copy()
|
205 |
+
features["background_description"] = Sequence(
|
206 |
+
feature=Value(dtype="string", id=None), length=-1, id=None
|
207 |
+
)
|
208 |
+
ds = ds.cast(features)
|
209 |
+
|
210 |
+
dsd[lang] = ds
|
211 |
+
|
212 |
+
dsd.push_to_hub(
|
213 |
+
"floschne/multimodal-m3exam", token=<OMITTED>
|
214 |
+
)
|
215 |
+
|
216 |
+
```
|
217 |
+
</details>
|