import pymysql
import argparse
import csv

def merge_data_to_result_file(db_config, batch_size=100, csv_path='result_file_data.csv'):
    # 连接到MySQL数据库
    conn = pymysql.connect(**db_config, charset='utf8mb4')
    cursor = conn.cursor()

    # 创建 result_file 表（如果不存在的话）
    cursor.execute('''
        CREATE TABLE IF NOT EXISTS result_file (
            id INT AUTO_INCREMENT PRIMARY KEY,
            source_id VARCHAR(255),
            source_absolutepath VARCHAR(255),
            source_sysfilename VARCHAR(255),
            source_operation_code VARCHAR(255),
            source_operation_id VARCHAR(255),
            source_phyfilename VARCHAR(255),
            source_filetype VARCHAR(255),
            source_filesize INT,
            source_personid VARCHAR(255),
            source_personname VARCHAR(255),
            source_operationname VARCHAR(255),
            source_file_not_found_path VARCHAR(255),
            source_processed TINYINT(1),
            find_id INT,
            find_file_name VARCHAR(255),
            find_absolute_path TEXT,
            find_file_extension VARCHAR(50),
            find_file_size BIGINT,
            find_md5 VARCHAR(32)
        ) ENGINE=InnoDB CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci
    ''')

    # 创建联合索引
    create_index(cursor)

    # 处理源数据
    offset = 0
    while True:
        cursor.execute(f'''
            SELECT id, ABSOLUTEPATH, SYSFILENAME, OPERATIONCODE, OPERATIONID, 
                   PHYFILENAME, FILETYPE, FILESIZE, PERSONID, PERSONNAME, 
                   OPERATIONNAME, file_not_found_path, processed 
            FROM source_file 
            LIMIT {batch_size} OFFSET {offset}
        ''')
        source_rows = cursor.fetchall()

        if not source_rows:
            break

        insert_batch = []
        for source_row in source_rows:
            (source_id, source_absolutepath, source_sysfilename, source_operation_code, source_operation_id,
             source_phyfilename, source_filetype, source_filesize, source_personid, source_personname,
             source_operationname, source_file_not_found_path, source_processed) = source_row

            cursor.execute('''
                SELECT id, file_name, absolute_path, file_extension, file_size, md5 
                FROM find_file 
                WHERE file_extension = %s AND file_size = %s
            ''', (source_filetype, source_filesize))

            find_rows = cursor.fetchall()

            # 使用 md5_map 字典来去重
            md5_map = {}
            for find_row in find_rows:
                find_id, find_file_name, find_absolute_path, find_file_extension, find_file_size, find_md5 = find_row
                if find_md5 not in md5_map:
                    md5_map[find_md5] = (find_id, find_file_name, find_absolute_path, find_file_extension, find_file_size)

            # 将唯一的 md5 记录添加到插入批次
            for find_md5, find_data in md5_map.items():
                find_id, find_file_name, find_absolute_path, find_file_extension, find_file_size = find_data
                insert_batch.append((
                    source_id,
                    source_absolutepath,
                    source_sysfilename,
                    source_operation_code,
                    source_operation_id,
                    source_phyfilename,
                    source_filetype,
                    source_filesize,
                    source_personid,
                    source_personname,
                    source_operationname,
                    source_file_not_found_path,
                    source_processed,
                    find_id,
                    find_file_name,
                    find_absolute_path,
                    find_file_extension,
                    find_file_size,
                    find_md5
                ))

            if len(insert_batch) == 0:
                # 如果没有匹配的记录，则插入一条默认记录
                insert_batch.append((
                    source_id,
                    source_absolutepath,
                    source_sysfilename,
                    source_operation_code,
                    source_operation_id,
                    source_phyfilename,
                    source_filetype,
                    source_filesize,
                    source_personid,
                    source_personname,
                    source_operationname,
                    source_file_not_found_path,
                    source_processed,
                    None,
                    "not_found",
                    None,
                    None,
                    None,
                    None
                ))

            # 批量插入数据
            print(f"Processing batch starting at offset {offset} with {len(insert_batch)} records.")
            cursor.executemany('''
                INSERT INTO result_file (
                    source_id, source_absolutepath, 
                    source_sysfilename, source_operation_code, 
                    source_operation_id, source_phyfilename, 
                    source_filetype, source_filesize, 
                    source_personid, source_personname, 
                    source_operationname, source_file_not_found_path, 
                    source_processed, find_id,
                    find_file_name, find_absolute_path,
                    find_file_extension, find_file_size,
                    find_md5
                ) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
            ''', insert_batch)
            conn.commit()
            insert_batch = []

        # 处理最后剩余的记录
        if insert_batch:
            print(f"Processing final batch starting at offset {offset} with {len(insert_batch)} records.")
            cursor.executemany('''
                INSERT INTO result_file (
                    source_id, source_absolutepath, source_sysfilename, 
                    source_operation_code, source_operation_id,
                    source_phyfilename, source_filetype, source_filesize, 
                    source_personid, source_personname, source_operationname, 
                    source_file_not_found_path, source_processed,
                    find_id, find_file_name, find_absolute_path, 
                    find_file_extension, find_file_size, find_md5
                ) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
            ''', insert_batch)
            conn.commit()

        offset += batch_size

    # 导出数据到 CSV
    export_to_csv(cursor, csv_path)

    # 提交更改并关闭连接
    cursor.close()
    conn.close()

    print("数据已成功合并到result_file表中。")

def create_index(cursor):
    try:
        # 尝试创建索引
        cursor.execute("CREATE INDEX idx_op_code_id_v2 ON result_file (source_operation_code, source_operation_id)")
    except pymysql.err.OperationalError as e:
        # 如果错误代码是1061（Duplicate key name），忽略错误
        if e.args[0] == 1061:
            print("索引已存在，跳过创建。")
        else:
            # 重新抛出其他错误
            raise

def export_to_csv(cursor, csv_path):
    query = '''
        SELECT source_operation_code, source_operation_id, source_sysfilename, 
               source_phyfilename, source_file_not_found_path, find_absolute_path
        FROM result_file
        WHERE source_sysfilename IN (
            SELECT source_sysfilename 
            FROM (
                SELECT source_sysfilename, COUNT(source_sysfilename) AS num
                FROM (
                    SELECT source_sysfilename  
                    FROM result_file  
                    WHERE find_file_name <> 'not_found'
                ) a 
                GROUP BY source_sysfilename 
                ORDER BY num DESC
            ) b 
            WHERE num < 50
        )
    '''

    cursor.execute(query)
    rows = cursor.fetchall()

    # 写入 CSV 文件
    with open(csv_path, 'w', newline='', encoding='utf-8') as csvfile:
        csv_writer = csv.writer(csvfile)
        # 写入表头
        csv_writer.writerow(['source_operation_code', 'source_operation_id', 'source_sysfilename',
                            'source_phyfilename', 'source_file_not_found_path', 'find_absolute_path'])
        # 写入数据
        csv_writer.writerows(rows)

    print(f"数据已成功导出到 {csv_path}。")

if __name__ == "__main__":
    # 设置命令行参数解析
    parser = argparse.ArgumentParser(description='将source_file和find_file表的数据合并到result_file表中')
    parser.add_argument('--host', type=str, help='MySQL主机地址')
    parser.add_argument('--user', type=str, help='MySQL用户名')
    parser.add_argument('--password', type=str, help='MySQL密码')
    parser.add_argument('--csv_path', type=str, default='result_file_data.csv', help='CSV文件路径')

    # 解析命令行参数
    args = parser.parse_args()

    # MySQL数据库配置
    db_config = {
        'host': args.host,
        'user': args.user,
        'password': args.password,
        'database': 'blg'
    }

    # 调用函数
    merge_data_to_result_file(db_config, csv_path=args.csv_path)
