import os
import json
import csv
from tqdm import tqdm
from concurrent.futures import ThreadPoolExecutor, as_completed

def fix_json_file(input_file, output_file):
    # 读取文件内容
    with open(input_file, 'r', encoding='utf-8') as f:
        content = f.read()

    # 将多个JSON对象拆分为列表
    json_objects = content.strip().split('}{')

    # 在每个对象前后添加大括号，并用逗号分隔
    fixed_json = '[' + '},{'.join(json_objects) + ']'

    # 将修复后的内容写入新文件
    with open(output_file, 'w', encoding='utf-8') as f:
        f.write(fixed_json)

    print(f"Fixed JSON has been saved to {output_file}")

def process_json_file(json_path, writer):
    """处理单个 JSON 文件并写入 CSV"""
    with open(json_path, 'r', encoding='utf-8') as f:
        try:
            # 读取JSON文件内容
            comments = json.load(f)
            # 遍历每个评论
            for comment in comments:
                # 提取所需信息
                row = {
                    'user_id': comment['user']['id'],
                    'user_name': comment['user']['name'],
                    'created_at': comment['created_at'],
                    'updated_at': comment['updated_at'],
                    'body': comment['body'],
                    'new_line': comment['new_line'],
                    'comment_url': comment['_links']['html']['href']
                }
                # 写入CSV文件
                writer.writerow(row)
        except json.JSONDecodeError as e:
            print(f"Error decoding JSON from file {json_path}: {e}")

# 定义根目录
root_dir = 'openharmony'

# 定义输出CSV文件
output_csv = 'code_review_comments.csv'

# 定义CSV文件的列名
csv_columns = ['user_id', 'user_name', 'created_at', 'updated_at', 'body', 'new_line', 'comment_url']

# 打开CSV文件准备写入
with open(output_csv, mode='w', newline='', encoding='utf-8') as csv_file:
    writer = csv.DictWriter(csv_file, fieldnames=csv_columns)
    writer.writeheader()

    # 获取所有仓库目录
    repo_names = [repo_name for repo_name in os.listdir(root_dir) if os.path.isdir(os.path.join(root_dir, repo_name))]
    
    # 遍历根目录下的所有仓库目录，并添加进度条
    for repo_name in tqdm(repo_names, desc="Processing repositories"):
        repo_dir = os.path.join(root_dir, repo_name)
        
        # 获取仓库目录下的所有JSON文件
        json_files = [json_file for json_file in os.listdir(repo_dir) if json_file.endswith('.json')]
        
        # 使用线程池处理 JSON 文件
        with ThreadPoolExecutor(max_workers=8) as executor:  # 设置线程数
            futures = []
            for json_file in json_files:
                json_path = os.path.join(repo_dir, json_file)
                # 提交任务到线程池
                futures.append(executor.submit(process_json_file, json_path, writer))
            
            # 使用 tqdm 显示线程池任务进度
            for future in tqdm(as_completed(futures), total=len(futures), desc=f"Processing files in {repo_name}", leave=False):
                future.result()  # 获取任务结果（如果有异常会抛出）

print(f"Data has been successfully written to {output_csv}")