
import multiprocessing
import json

from collections import defaultdict
from tqdm import tqdm

# 日志文件名称 1千万条数据，
def read_large_file(file_name):
    
    with open(file_name,  'r') as f:
        for line in f:
            yield line

def read_file_and_process(file_name,num_lines):
    
    results = defaultdict(set)
    # 使用tqdm显示进度条
    with tqdm(total=num_lines, desc=f'Processing ', unit='lines') as pbar:
        for chunk in read_large_file(file_name):
            
            parts = chunk.split()
            # 提取ip和状态码
            ip_address = parts[0]
            status_code = int(parts[8])

            results[status_code].add(ip_address)
            pbar.update(1) # 更新进度条
    for code,ips in results.items():
        sorted_ips = sorted(ips,reverse=True)                
        results[code] = sorted_ips
    return results


def main():
    file_name = 'access_logs_10000.txt'
    file_name = 'access_log_1kw.txt'
    num_lines = 10000000
    with multiprocessing.Pool() as pool:
        async_result = pool.starmap_async(read_file_and_process,[(file_name,num_lines)])
        pool.close()
        pool.join()
    with open('result.json','w')as f:
        json.dump(async_result.get()[0], f, indent=4)

if __name__ == '__main__':
    main()