import os
import csv
import math
import shutil
from multiprocessing import Pool
from parse_pin import parse_pins, parse_sdf_net_section

def build_statistical_features_single_design(args):
    """
    多进程子任务函数：处理单个设计，输出单独 CSV。
    """
    name, def_file_path, sdf_file_path, pre_route_sdf_path, lib_dir, log_dir, lef_dir, output_dir = args
    output_csv_path = os.path.join(output_dir, f"{name}.csv")

    try:
        pin_info = parse_pins(def_file_path, lib_dir, log_dir, lef_dir, sdf_file_path, pre_route_sdf_path)
        _, net_connections = parse_sdf_net_section(sdf_file_path)
    except Exception as e:
        print(f"[X] Failed to parse {name}: {e}")
        return

    data = []
    for (driver, sink) in net_connections:
        if driver not in pin_info or sink not in pin_info:
            continue

        driver_info = pin_info[driver]
        sink_info = pin_info[sink]

        if driver_info["is_fanin_or_fanout"] != 1 or sink_info["is_fanin_or_fanout"] != 0:
            continue

        net_name = driver_info["net_name"]
        if net_name != sink_info["net_name"]:
            continue

        # 1. 特征提取
        driver_output_cap = driver_info["ff_rise"]
        total_sink_cap = sum(
            info["ff_rise"]
            for pin, info in pin_info.items()
            if info["net_name"] == net_name and info["is_fanin_or_fanout"] == 0
        )

        dx = abs(driver_info["left_distance"] - sink_info["left_distance"])
        dy = abs(driver_info["bottom_distance"] - sink_info["bottom_distance"])

        # sink pin 坐标
        sink_x = sink_info.get("x_pos", sink_info["left_distance"])
        sink_y = sink_info.get("y_pos", sink_info["bottom_distance"])

        slew_vals = [
            driver_info.get("FF_rise_slew_pre_route", 0.0),
            driver_info.get("SS_rise_slew_pre_route", 0.0),
            driver_info.get("FF_fall_slew_pre_route", 0.0),
            driver_info.get("SS_fall_slew_pre_route", 0.0),
        ]
        max_driver_input_slew = max(slew_vals)


        # 2. 真实值 target_delay：SDF 四个 delay 的平均值
        ff_rise = sink_info.get("FF_rise_net_delay_to_root_pin", 0.0)
        ss_rise = sink_info.get("SS_rise_net_delay_to_root_pin", 0.0)
        ff_fall = sink_info.get("FF_fall_net_delay_to_root_pin", 0.0)
        ss_fall = sink_info.get("SS_fall_net_delay_to_root_pin", 0.0)
        target_delay = (ff_rise + ss_rise + ff_fall + ss_fall) / 4.0

        data.append({
            "design_name": name,
            "net_name": net_name,
            "driver_pin": driver,
            "sink_pin": sink,
            "driver_output_cap": driver_output_cap,
            "total_sink_cap": total_sink_cap,
            "driver_to_sink_dx": dx,
            "driver_to_sink_dy": dy,
            "sink_x": sink_x,
            "sink_y": sink_y,
            "max_driver_input_slew": max_driver_input_slew,
            "target_delay": target_delay
        })

    fieldnames = [
        "design_name", "net_name", "driver_pin", "sink_pin",
        "driver_output_cap", "total_sink_cap",
        "driver_to_sink_dx", "driver_to_sink_dy",
        "sink_x", "sink_y", "max_driver_input_slew", "target_delay"
    ]

    with open(output_csv_path, "w", newline="") as f:
        writer = csv.DictWriter(f, fieldnames=fieldnames)
        writer.writeheader()
        writer.writerows(data)

    print(f"[✓] Done: {name} → {output_csv_path}")


def merge_csv_files(csv_dir, final_csv_path):
    """
    合并所有临时 CSV 为一个最终文件。
    """
    fieldnames = [
        "design_name", "net_name", "driver_pin", "sink_pin",
        "driver_output_cap", "total_sink_cap",
        "driver_to_sink_dx", "driver_to_sink_dy",
        "sink_x", "sink_y", "max_driver_input_slew", "target_delay"
    ]

    with open(final_csv_path, "w", newline="") as fout:
        writer = csv.DictWriter(fout, fieldnames=fieldnames)
        writer.writeheader()

        for fname in os.listdir(csv_dir):
            if not fname.endswith(".csv"):
                continue
            with open(os.path.join(csv_dir, fname), "r") as fin:
                reader = csv.DictReader(fin)
                for row in reader:
                    writer.writerow(row)

    print(f"[✓] Final merged CSV written to: {final_csv_path}")
    try:
        shutil.rmtree(csv_dir)
    except Exception as e:
        print(f"[!] Failed to remove {csv_dir}: {e}")

def main():
    lib_dir = "/home/hujiahui/case_work/prase_platfrom/asap7/lib"
    lef_dir = "/home/hujiahui/case_work/platform/asap7/lef"
    dataset_dir = "/home/hujiahui/test/dataset"
    pdk_name = 'asap7'
    output_dir = "./tmp_feature_csv"  # 每个设计一个临时 CSV
    final_output_csv = "../net_delay_predict_work/rf_feature.csv"

    os.makedirs(output_dir, exist_ok=True)

    task_args = []
    for name in os.listdir(dataset_dir):
        log_dir = f"{dataset_dir}/{name}/{pdk_name}/innovus/log"
        def_file_path = f"{log_dir}/cts.def"
        sdf_file_path = f"{log_dir}/post_route.sdf"
        pre_route_sdf_path = f"{log_dir}/opensta_pre_route.sdf"

        if not os.path.exists(def_file_path) or not os.path.exists(sdf_file_path) or not os.path.exists(pre_route_sdf_path):
            print(f"[!] Skipped {name}: Missing DEF or SDF")
            continue

        task_args.append((name, def_file_path, sdf_file_path, pre_route_sdf_path, lib_dir, log_dir, lef_dir, output_dir))

    with Pool(processes=os.cpu_count()) as pool:
        pool.map(build_statistical_features_single_design, task_args)

    merge_csv_files(output_dir, final_output_csv)


if __name__ == "__main__":
    main()
