import paramiko
import subprocess
import time
import os
import csv

# ==== 配置 ====
remote_host = 'nuc.10042008.xyz'
remote_port = 17003
remote_user = 'ubuntu'

remote_server_cmd_template = '/home/ubuntu/xeg/BenchBF3/bin_dpu/test_cpu_server --batch_size={batch_size}'
client_cmd_template = ['/root/xeg/BenchBF3/bin_host/test_cpu', '--batch_size={batch_size}', '--if_pipeline=0']
# batch_sizes = [8192]
batch_sizes = [2**i for i in range(1, 18)]  # [2, 4, ..., 131072]

results_dir = '../results_raw'
output_csv = os.path.join(results_dir, 'pipeline_cross_tuopu.csv')

# ==== 创建结果目录 ====
os.makedirs(results_dir, exist_ok=True)

# ==== 时间单位转换函数 ====
def convert_to_us(value, unit):
    if unit == 's':
        return value * 1_000_000
    elif unit == 'ms':
        return value * 1_000
    elif unit == 'us':
        return value
    elif unit == 'ns':
        return value / 1000
    else:
        raise ValueError(f"Unknown unit: {unit}")

# ==== SSH 客户端 ====
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())

print(f"连接远程服务器 {remote_host}:{remote_port} ...")
ssh.connect(remote_host, port=remote_port, username=remote_user)

# ==== 用于保持 server 的 shell 会话 ====
transport = ssh.get_transport()
channel = transport.open_session()
channel.get_pty()
channel.invoke_shell()

# ==== 打开 CSV 文件 ====
with open(output_csv, 'w', newline='') as csvfile:
    fieldnames = [
        'batch_size',
        'p50_time_us',
        'p99_time_us',
        'data_throughput_MB/s_p50',
        'data_throughput_MB/s_p99',
        'request_throughput_rps_p50',
        'request_throughput_rps_p99'
    ]
    writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
    writer.writeheader()

    for batch_size in batch_sizes:
        print(f"\n===== 测试 batch_size={batch_size} 开始 =====")

        # 1. 启动 server
        remote_server_cmd = remote_server_cmd_template.format(batch_size=batch_size)
        print(f"远程启动 server: {remote_server_cmd}")
        channel.send(remote_server_cmd + '\n')

        # 2. 等待 server 启动
        print("等待 server 启动...")
        time.sleep(3)

        # 3. 启动本地 client
        client_cmd = [arg.format(batch_size=batch_size) for arg in client_cmd_template]
        print(f"启动本地 client: {' '.join(client_cmd)}")
        client_proc = subprocess.run(
            client_cmd,
            stdout=subprocess.PIPE,
            stderr=subprocess.STDOUT,
            text=True,
            encoding='utf-8',
            errors='ignore'
        )

        # 保存输出
        output_file = os.path.join(results_dir, f'batch_{batch_size}.log')
        with open(output_file, 'w') as f:
            f.write(client_proc.stdout)
        print(f"client 执行完成，输出保存在 {output_file}")

        # 4. 停止远程 server（发送 Ctrl+C）
        print("发送 Ctrl+C 停止远程 server...")
        channel.send('\x03')
        time.sleep(2)

        # 清空 server 输出
        while channel.recv_ready():
            _ = channel.recv(1024).decode('utf-8', errors='ignore')

        # 5. 解析 client 输出中的 total_time 行
        time_line = None
        for line in client_proc.stdout.splitlines():
            if "total_time" in line:
                time_line = line
                break

        if not time_line:
            print(f"Warning: No 'total_time' found for batch_size={batch_size}")
            continue

        parts = time_line.split('|')
        if len(parts) < 4:
            print(f"Warning: Unexpected format for batch_size={batch_size}")
            continue

        try:
            p50_str = parts[2].strip()
            p99_str = parts[3].strip()

            p50_value, p50_unit = p50_str.split()
            p99_value, p99_unit = p99_str.split()

            p50_us = convert_to_us(float(p50_value), p50_unit)
            p99_us = convert_to_us(float(p99_value), p99_unit)

            # 计算数据吞吐量
            data_bytes = batch_size * 40 * 8
            data_throughput_p50 = data_bytes / (p50_us / 1_000_000) / (1024 * 1024)
            data_throughput_p99 = data_bytes / (p99_us / 1_000_000) / (1024 * 1024)

            # 请求吞吐量
            request_throughput_p50 = batch_size / (p50_us / 1_000_000)
            request_throughput_p99 = batch_size / (p99_us / 1_000_000)

            writer.writerow({
                'batch_size': batch_size,
                'p50_time_us': p50_us,
                'p99_time_us': p99_us,
                'data_throughput_MB/s_p50': data_throughput_p50,
                'data_throughput_MB/s_p99': data_throughput_p99,
                'request_throughput_rps_p50': request_throughput_p50,
                'request_throughput_rps_p99': request_throughput_p99
            })

        except Exception as e:
            print(f"Error parsing total_time for batch_size={batch_size}: {e}")
            continue

        print(f"===== 测试 batch_size={batch_size} 结束 =====\n")

# ==== 清理 ====
print("关闭 SSH 连接")
channel.close()
ssh.close()

print(f"测试完成! 结果已保存到 {output_csv}")
