import pandas as pd
import clevercsv
from datetime import datetime
import os
import argparse
import numpy as np

def merge_data_by_closest_time(target_csv_path, raw_csv_path, fields_to_merge):
    """
    根据时间戳匹配合并两个CSV文件的数据。

    Args:
        target_csv_path (str): target.csv 文件的路径。
        raw_csv_path (str): raw.csv 文件的路径。
        fields_to_merge (list): 需要从 raw.csv 合并到 target.csv 的字段名列表。
    """
    if not os.path.exists(target_csv_path):
        print(f"错误：Target CSV 文件不存在于 '{target_csv_path}'")
        return
    if not os.path.exists(raw_csv_path):
        print(f"错误：Raw CSV 文件不存在于 '{raw_csv_path}'")
        return

    # 读取 CSV 文件
    try:
        target_df = pd.read_csv(target_csv_path, on_bad_lines='skip', quotechar='"', quoting=1, escapechar='\\')
    except Exception as e:
        print(f"错误：无法读取 {target_csv_path} Target CSV 文件：{e}")
        return
    try:
        raw_df = pd.read_csv(raw_csv_path,  on_bad_lines='skip', quotechar='"', quoting=1, escapechar='\\')
    except Exception as e:
        print(f"错误：无法读取 {raw_csv_path} Raw CSV 文件：{e}")

    # 检查 'time' 字段是否存在
    if 'time' not in target_df.columns:
        print("错误：target.csv 中缺少 'time' 字段。")
        return
    if 'time' not in raw_df.columns:
        print("错误：raw.csv 中缺少 'time' 字段。")
        return

    # 将时间字段转换为 datetime 对象以便进行时间比较
    # 使用 errors='coerce' 将无法解析的日期时间转换为 NaT (Not a Time)
    target_df['time_dt'] = pd.to_datetime(target_df['time'], format='%Y.%m.%d_%H.%M.%S', errors='coerce')
    raw_df['time_dt'] = pd.to_datetime(raw_df['time'], format='%Y.%m.%d_%H.%M.%S', errors='coerce')

    # 删除时间转换失败的行（如果有的话）
    target_df.dropna(subset=['time_dt'], inplace=True)
    raw_df.dropna(subset=['time_dt'], inplace=True)

    if target_df.empty or raw_df.empty:
        print("错误：时间字段转换后，其中一个或两个 DataFrame 为空。请检查时间格式。")
        return

    # 为 raw_df 建立一个时间索引，方便查找最近的时间
    raw_df_indexed = raw_df.set_index('time_dt')
    raw_df_indexed = raw_df_indexed.sort_index()

    # 初始化用于存储合并数据的列表
    merged_data_rows = []

    # 遍历 target_df 的每一行
    for index, target_row in target_df.iterrows():
        target_time = target_row['time_dt']

        # 计算时间差并获取最小绝对值索引
        time_diff = raw_df_indexed.index - target_time
        closest_raw_time_index = raw_df_indexed.index[np.abs(time_diff).argmin()]
        # 查找 raw_df 中最接近的时间行
        closest_raw_row = raw_df_indexed.loc[closest_raw_time_index]

        # 创建一个字典来存储当前 target_row 的所有原始数据
        current_row_data = target_row.drop('time_dt').to_dict()

        # 添加 raw_time 字段
        current_row_data['raw_time'] = closest_raw_row['time']

        # 合并指定字段，如果重复则覆盖
        for field in fields_to_merge:
            if field in closest_raw_row.index:
                current_row_data[field] = closest_raw_row[field]
            else:
                print(f"警告：raw.csv 中未找到合并字段 '{field}'，跳过。")

        merged_data_rows.append(current_row_data)

    # 创建新的 DataFrame
    merged_df = pd.DataFrame(merged_data_rows)
    # 重新组织列的顺序，确保 'raw_time' 和合并的字段在 'time' 之后
    # 首先获取所有原始 target_df 的列名
    original_target_columns = [col for col in target_df.columns if col != 'time_dt']

    # 确定新 DataFrame 的列顺序
    new_columns_order = []
    if 'time' in original_target_columns:
        new_columns_order.append('time')
        remaining_target_columns = [col for col in original_target_columns if col != 'time']
        new_columns_order.extend(remaining_target_columns)
    else:
        new_columns_order.extend(original_target_columns)

    if 'raw_time' not in new_columns_order:
        new_columns_order.append('raw_time')

    for field in fields_to_merge:
        if field not in new_columns_order:
            new_columns_order.append(field)

    final_columns_order = [col for col in new_columns_order if col in merged_df.columns]
    for col in merged_df.columns:
        if col not in final_columns_order:
            final_columns_order.append(col)

    merged_df = merged_df[final_columns_order]

    # 构建输出文件名
    base_name, ext = os.path.splitext(target_csv_path)
    output_csv_path = f"{base_name}_merged{ext}"

    # 保存合并后的数据到新的 CSV 文件
    try:
        merged_df.to_csv(output_csv_path, index=False)
        print(f"数据合并成功！结果已保存到 '{output_csv_path}'")
    except Exception as e:
        print(f"保存 CSV 文件时发生错误：{e}")

# --- 主程序入口 ---
if __name__ == "__main__":
    # 使用 argparse 设置命令行参数
    parser = argparse.ArgumentParser(description='添加原始时间字段和指定字段到目标文件')
    parser.add_argument('target_path', help='目标CSV文件路径')
    parser.add_argument('raw_path', help='原始数据CSV文件路径')
    parser.add_argument('--fields', nargs="+",
                        help='要合并的字段名列表。例如：--fields vchg vbat',
                        default=['vchg', 'vbat', 'frontshell_therm', 'board_up_therm', 'board_down_therm',
                                 'air_inlet_therm', 'backshell_therm', 'sxr_therm', 'wlan_pa_therm', 'usb_therm',
                                 'bat_therm', 'cpu-0-0-0', 'gpuss-0', 'nspss-0', 'Fan1_PWM', 'Fan1_state',
                                 'Fan2_PWM', 'Fan2_state', 'VST1_T_sensor', 'VST2_T_sensor'])

    args = parser.parse_args()

    # 调用主合并函数
    merge_data_by_closest_time(args.target_path, args.raw_path, args.fields)

    # 示例数据文件创建（如果不存在的话，方便测试）
    # target.csv
    # time,sceneState,status
    # 2025.07.23_14.04.02,34,Charging
    # 2025.07.23_14.04.05,35,Discharging
    # 2025.07.23_14.04.10,36,Idle

    # raw.csv (包含更多字段)
    # time,vchg,ibus,vbat,ibat,capacity,mAh,frontshell_therm,board_up_therm,board_down_therm,air_inlet_therm,backshell_therm,sxr_therm,wlan_pa_therm,usb_therm,bat_therm,cpu-0-0-0,cpu-0-0-1,cpu-0-1-1,cpu-0-1-0,cpu-1-0-0,cpu-1-0-1,cpu-1-1-0,cpu-1-1-1,cpu-1-2-0,cpu-1-2-1,cpu-1-3-0,cpu-1-3-1,gpuss-0,gpuss-1,gpuss-2,gpuss-3,gpuss-4,gpuss-5,gpuss-6,gpuss-7,nspss-0,nspss-1,nspss-2,Fan1_PWM,Fan1_state,Fan2_PWM,Fan2_state,VST1_status,VST1_T_sensor,VST1_fps,VST2_status,VST2_T_sensor,VST2_fps,SLAM1_status,SLAM1_fps,SLAM2_status,SLAM2_fps,SLAM3_status,SLAM3_fps,SLAM4_status,SLAM4_fps,SLAM5_status,SLAM5_fps,SLAM6_status,SLAM6_fps
    # 2025.07.23_14.04.01,5000000,100,3800,1000,98%,9800,25000,30000,31000,24000,25000,30000,28000,26000,24000,40000,41000,42000,43000,44000,45000,46000,47000,48000,49000,50000,51000,35000,35500,36000,36500,37000,37500,38000,38500,39000,39500,40000,10%,1,10%,1,Open,25,78,Open,25,78,Open,59,Open,59,Open,60,Open,59,Open,59,Open,59
    # 2025.07.23_14.04.03,5100000,120,3850,1100,99%,9900,25500,30500,31500,24500,25500,30500,28500,26500,24500,40500,41500,42500,43500,44500,45500,46500,47500,48500,49500,50500,51500,35500,36000,36500,37000,37500,38000,38500,39000,39500,40000,40500,12%,1,12%,1,Open,26,79,Open,26,79,Open,60,Open,60,Open,61,Open,60,Open,60,Open,60
    # 2025.07.23_14.04.06,5200000,150,3900,1200,100%,10000,26000,31000,32000,25000,26000,31000,29000,27000,25000,41000,42000,43000,44000,45000,46000,47000,48000,49000,50000,51000,52000,36000,36500,37000,37500,38000,38500,39000,39500,40000,40500,41000,15%,1,15%,1,Open,27,80,Open,27,80,Open,61,Open,61,Open,62,Open,61,Open,61,Open,61
    # 2025.07.23_14.04.08,5300000,130,3880,1150,99%,9950,25800,30800,31800,24800,25800,30800,28800,26800,24800,40800,41800,42800,43800,44800,45800,46800,47800,48800,49800,50800,51800,35800,36300,36800,37300,37800,38300,38800,39300,39800,40300,40800,13%,1,13%,1,Open,26,79,Open,26,79,Open,60,Open,60,Open,61,Open,60,Open,60,Open,60
    # 2025.07.23_14.04.11,5400000,160,3920,1250,100%,10000,26200,31200,32200,25200,26200,31200,29200,27200,25200,41200,42200,43200,44200,45200,46200,47200,48200,49200,50200,51200,52200,36200,36700,37200,37700,38200,38700,39200,39700,40200,40700,41200,16%,1,16%,1,Open,28,81,Open,28,81,Open,62,Open,62,Open,63,Open,62,Open,62,Open,62

    # 如果需要自动生成测试文件，可以取消下面代码的注释
    # if not os.path.exists("target.csv"):
    #     with open("target.csv", 'w') as f:
    #         f.write("time,sceneState,status\n")
    #         f.write("2025.07.23_14.04.02,34,Charging\n")
    #         f.write("2025.07.23_14.04.05,35,Discharging\n")
    #         f.write("2025.07.23_14.04.10,36,Idle\n")
    #
    # if not os.path.exists("raw.csv"):
    #     with open("raw.csv", 'w') as f:
    #         f.write("time,vchg,ibus,vbat,ibat,capacity,mAh,frontshell_therm,board_up_therm,board_down_therm,air_inlet_therm,backshell_therm,sxr_therm,wlan_pa_therm,usb_therm,bat_therm,cpu-0-0-0,cpu-0-0-1,cpu-0-1-1,cpu-0-1-0,cpu-1-0-0,cpu-1-0-1,cpu-1-1-0,cpu-1-1-1,cpu-1-2-0,cpu-1-2-1,cpu-1-3-0,cpu-1-3-1,gpuss-0,gpuss-1,gpuss-2,gpuss-3,gpuss-4,gpuss-5,gpuss-6,gpuss-7,nspss-0,nspss-1,nspss-2,Fan1_PWM,Fan1_state,Fan2_PWM,Fan2_state,VST1_status,VST1_T_sensor,VST1_fps,VST2_status,VST2_T_sensor,VST2_fps,SLAM1_status,SLAM1_fps,SLAM2_status,SLAM2_fps,SLAM3_status,SLAM3_fps,SLAM4_status,SLAM4_fps,SLAM5_status,SLAM5_fps,SLAM6_status,SLAM6_fps\n")
    #         f.write("2025.07.23_14.04.01,5000000,100,3800,1000,98%,9800,25000,30000,31000,24000,25000,30000,28000,26000,24000,40000,41000,42000,43000,44000,45000,46000,47000,48000,49000,50000,51000,35000,35500,36000,36500,37000,37500,38000,38500,39000,39500,40000,10%,1,10%,1,Open,25,78,Open,25,78,Open,59,Open,59,Open,60,Open,59,Open,59,Open,59\n")
    #         f.write("2025.07.23_14.04.03,5100000,120,3850,1100,99%,9900,25500,30500,31500,24500,25500,30500,28500,26500,24500,40500,41500,42500,43500,44500,45500,46500,47500,48500,49500,50500,51500,35500,36000,36500,37000,37500,38000,38500,39000,39500,40000,40500,12%,1,12%,1,Open,26,79,Open,26,79,Open,60,Open,60,Open,61,Open,60,Open,60,Open,60\n")
    #         f.write("2025.07.23_14.04.06,5200000,150,3900,1200,100%,10000,26000,31000,32000,25000,26000,31000,29000,27000,25000,41000,42000,43000,44000,45000,46000,47000,48000,49000,50000,51000,52000,36000,36500,37000,37500,38000,38500,39000,39500,40000,40500,41000,15%,1,15%,1,Open,27,80,Open,27,80,Open,61,Open,61,Open,62,Open,61,Open,61,Open,61\n")
    #         f.write("2025.07.23_14.04.08,5300000,130,3880,1150,99%,9950,25800,30800,31800,24800,25800,30800,28800,26800,24800,40800,41800,42800,43800,44800,45800,46800,47800,48800,49800,50800,51800,35800,36300,36800,37300,37800,38300,38800,39300,39800,40300,40800,13%,1,13%,1,Open,26,79,Open,26,79,Open,60,Open,60,Open,61,Open,60,Open,60,Open,60\n")
    #         f.write("2025.07.23_14.04.11,5400000,160,3920,1250,100%,10000,26200,31200,32200,25200,26200,31200,29200,27200,25200,41200,42200,43200,44200,45200,46200,47200,48200,49200,50200,51200,52200,36200,36700,37200,37700,38200,38700,39200,39700,40200,40700,41200,16%,1,16%,1,Open,28,81,Open,28,81,Open,62,Open,62,Open,63,Open,62,Open,62,Open,62\n")