|
from io import BytesIO
|
|
|
|
import ujson
|
|
import webdataset as wds
|
|
from PIL import Image
|
|
from tqdm import tqdm
|
|
|
|
|
|
def load_text(txt: bytes):
|
|
return txt.decode()
|
|
|
|
|
|
def load_image(jpg):
|
|
return Image.open(BytesIO(jpg)).convert('RGB')
|
|
|
|
|
|
load_mapping = {
|
|
'input.jpg': load_image,
|
|
'output.txt': load_text
|
|
}
|
|
|
|
|
|
def valid_image(img):
|
|
return min(img.size) >= 64
|
|
|
|
|
|
def resize_img(img, max_size=512):
|
|
width, height = img.size
|
|
|
|
if width < max_size and height < max_size:
|
|
return img
|
|
|
|
if width > height:
|
|
new_width = max_size
|
|
new_height = int(new_width * height / width)
|
|
elif height > width:
|
|
new_height = max_size
|
|
new_width = int(new_height * width / height)
|
|
else:
|
|
new_height = new_width = max_size
|
|
|
|
img = img.resize((new_width, new_height), Image.ANTIALIAS)
|
|
return img
|
|
|
|
|
|
def img_to_meta(img):
|
|
width, height = img.size
|
|
return {
|
|
'width': width,
|
|
'height': height
|
|
}
|
|
|
|
|
|
def get_image(img):
|
|
if not valid_image(img):
|
|
return None, None
|
|
|
|
resize_img(img)
|
|
img_stream = BytesIO()
|
|
img.save(img_stream, format='jpeg')
|
|
|
|
img_stream.seek(0)
|
|
return img_stream.read(), ujson.dumps(img_to_meta(img))
|
|
|
|
change_mapping = {
|
|
'input.jpg': get_image
|
|
}
|
|
|
|
def func(wds_dataset_str, **kwargs):
|
|
ds = wds.WebDataset(wds_dataset_str, shardshuffle=False).map_dict(**load_mapping).map_dict(**change_mapping).to_tuple(
|
|
'input.jpg', 'output.txt')
|
|
dl = wds.WebLoader(ds, batch_size=None, num_workers=48, prefetch_factor=16, **kwargs)
|
|
|
|
writer = wds.ShardWriter('%05d.tar', 10000)
|
|
for img, txt in tqdm(dl):
|
|
img_str, meta = img
|
|
if img_str is None:
|
|
continue
|
|
sample = {
|
|
'__key__': f'{writer.count:08}',
|
|
'jpg': img_str,
|
|
'txt': txt,
|
|
'json': meta
|
|
}
|
|
writer.write(sample)
|
|
|
|
if __name__ == '__main__':
|
|
func('../conceptual-captions-12m-webdataset/{0..127}/{00000..4}.tar')
|
|
|