# laion 的文本很多，分 part 存储的。一共有 12M , 分成12 part, 一共 12 个 txt 文件，每个文件 1M 文本。
# 从中随机抽样，先随机抽取 part 号，然后抽取 1~1M 中的随机数。最后每个 part 中的随机号采样之后在合并，避免同时打开12 个大文件。

import random
from collections import defaultdict
import os
from tqdm import tqdm

seed=3475693756
num_samples = 100000
caption_save_path='data/reflow/laion6+_random10W.txt'
laion_txt_path = 'data/reflow/laion6+_txt' # 包含所有 txt 文件的文件夹

random.seed(seed)

PARTS=12
SAMPLES_PER_PARTS=int(1e6)
part_indices = defaultdict(list)
for i in range(num_samples):
    rnd_part = random.randint(1, PARTS)
    part_indices[rnd_part].append(random.randint(0, SAMPLES_PER_PARTS-1))
all_caps=[]
for part, indices in tqdm(part_indices.items()):
    part_caps = open(os.path.join(laion_txt_path, f'part{part}.txt'), 'r').read().splitlines()
    all_caps.extend([part_caps[i] for i in indices])
all_caps.sort()
with open(caption_save_path, 'w') as f:
    f.write('\n'.join(all_caps) )