import json
import mmap
from concurrent.futures import ProcessPoolExecutor
from tqdm import tqdm

def process_line(line):
    """
    处理单行 JSON 数据。
    示例中提取 'key' 字段，你可以根据需求修改此处的逻辑。
    """
    try:
        data = json.loads(line)
    except json.JSONDecodeError:
        return None  # 或记录错误日志
    # 示例处理：提取 'key' 字段
    result = {"processed": data.get("key")}
    return json.dumps(result) + "\n"

def process_file(input_file, output_file, process_func, workers=4, chunk_size=1000):
    """
    使用 mmap 优化大文件读取，同时利用多进程并行处理每行数据，
    并将处理结果保存到输出文件中。

    参数：
      input_file  : 输入 JSONL 文件路径
      output_file : 输出处理结果文件路径
      workers     : 并行处理时使用的进程数
      chunk_size  : 每次批量提交处理的行数，视内存和任务复杂度进行调优
    """
    # 以二进制方式打开文件，便于 mmap 处理
    with open(input_file, "rb") as fin, \
         open(output_file, "w", buffering=1024*1024) as fout:
        # 使用 mmap 将整个文件映射到内存（ACCESS_READ 表示只读）
        mm = mmap.mmap(fin.fileno(), 0, access=mmap.ACCESS_READ)
        total_lines = 0
        while mm.readline():
            total_lines += 1
        mm.seek(0)  # 重置 mmap 的位置到文件开头
        lines_chunk = []
        with ProcessPoolExecutor(max_workers=workers) as executor, tqdm(total=total_lines, desc="Processing Lines", unit="lines") as progress_bar:
            # 通过 iter(mm.readline, b"") 循环读取每一行数据
            for raw_line in iter(mm.readline, b""):
                line = raw_line.decode('utf-8')
                lines_chunk.append(line)
                if len(lines_chunk) >= chunk_size:
                    results = list(executor.map(process_func, lines_chunk))
                    for result in results:
                        if result is not None:
                            fout.write(result)
                    progress_bar.update(len(lines_chunk))
                    lines_chunk = []
            # 处理剩余的行
            if lines_chunk:
                results = list(executor.map(process_func, lines_chunk))
                for result in results:
                    if result is not None:
                        fout.write(result)
                progress_bar.update(len(lines_chunk))
        mm.close()

if __name__ == "__main__":
    input_path = "input.jsonl"
    output_path = "output.jsonl"
    process_file(input_path, output_path, workers=8, chunk_size=5000)
