import pyarrow.parquet as pq
import os
from hashlib import md5
import hashlib


def createMD5tofile(file):
  m = md5()
  if isinstance(file, str):
    a_file = open(file, 'rb')
    m.update(a_file.read())
  else:
    m.update(file)
  a_file.close()
  return m.hexdigest()

def write_text(text, path):
   with open(path, 'w', encoding='utf-8') as w:
      w.write(text)

def write_image(image_byte, path):
   with open(path, 'wb') as w:
      w.write(image_byte)  

def handle_file(file_path, save_dir, cts = 0):
   # 打开Parquet文件
    parquet_file = pq.ParquetFile(file_path)
    
    # 读取整个文件内容
    table = parquet_file.read()
    
    
    # 逐行读取文件
    for i in range(parquet_file.num_row_groups):
        row_group = parquet_file.read_row_group(i)
        # pyarrow.lib.StructScalar
        bytes_res = row_group[0]
        string_res = row_group[1]
        for j in range(len(string_res)):
            file_byte = bytes_res[j]
            text = string_res[j].as_py()
            img_byte = file_byte[0].as_buffer()

            # hashlib.md5(row_group[0][0][0].as_buffer()).hexdigest()
            key =  hashlib.md5(img_byte).hexdigest()
            text_path = os.path.join(save_dir, 'text', f'{key}.txt')
            img_path = os.path.join(save_dir, 'images', f'{key}.jpg')
            write_text(text, text_path)
            write_image(img_byte, img_path)

            cts += 1
            print(cts)
        
    return cts

def main():
    import pyarrow.parquet as pq
    data_dir = '/home/centos/.cache/huggingface/hub/datasets--bhargavsdesai--laion_improved_aesthetics_6.5plus_with_images/snapshots/3abfff061fa56cb5812ced3452cd290a65ca2d97/data'
    save_dir = './laion_improved_aesthetics_6.5plus'
    for sub in ['text', 'images']:
       os.makedirs(os.path.join(save_dir, sub), exist_ok=True)
    
    cts = 0
    for filename in os.listdir(data_dir):
       file_path = os.path.join(data_dir, filename)
       cts = handle_file(file_path, save_dir, cts)
 
    



if __name__ == '__main__':
    main()