import json
import csv
from collections import defaultdict

def analyze_and_export(json_file: str, output_prefix: str) -> None:
    # 读取JSON数据
    with open(json_file, 'r', encoding='utf-8') as f:
        data = json.load(f)
    
    # 按SID组织数据
    sid_data = defaultdict(lambda: {
        'pending_requests': {},
        'matched_pairs': [],
        'response_times': []
    })

    for entry in data:
        layers = entry.get('_source', {}).get('layers', {})
        frame_num = layers.get('frame', {}).get('frame.number')
        doip = layers.get('doip', {})
        uds = layers.get('uds', {})

        # 解析关键字段
        service_id = uds.get('uds.sid')
        if not service_id:
            continue

        source = _parse_hex(doip.get('doip.source_address'))
        target = _parse_hex(doip.get('doip.target_address'))
        subfunction = uds.get('uds.subfunction')
        is_request = uds.get('uds.reply') == '0x00'
        is_response = uds.get('uds.reply') == '0x01'
        frame_time = _get_frame_time(layers)

        # 获取当前SID的数据存储
        sid_group = sid_data[service_id]

        # 处理请求
        if is_request and all([source, target, subfunction]):
            key = (source, target, service_id, subfunction)
            sid_group['pending_requests'][key] = {
                'frame_num': frame_num,
                'time': frame_time
            }

        # 处理响应
        elif is_response and all([source, target, subfunction]):
            reverse_key = (target, source, service_id, subfunction)
            req = sid_group['pending_requests'].pop(reverse_key, None)
            if req and frame_time:
                time_diff_ms = (frame_time - req['time']) * 1000
                sid_group['matched_pairs'].append({
                    'req_frame': req['frame_num'],
                    'res_frame': frame_num,
                    'source': f"0x{source:04x}",
                    'target': f"0x{target:04x}",
                    'service_id': service_id,
                    'subfunction': subfunction,
                    'response_time': round(time_diff_ms, 2)
                })
                sid_group['response_times'].append(time_diff_ms)

    # 为每个SID生成报告
    for sid, data in sid_data.items():
        sanitized_sid = sid.replace('0x', '').replace(':', '')
        generate_reports(data, output_prefix, sanitized_sid)

def generate_reports(data: dict, prefix: str, sid: str) -> None:
    # 生成详细列表
    if data['matched_pairs']:
        detailed_file = f"{prefix}_detailed_{sid}.csv"
        with open(detailed_file, 'w', newline='') as f:
            writer = csv.DictWriter(f, fieldnames=data['matched_pairs'][0].keys())
            writer.writeheader()
            writer.writerows(data['matched_pairs'])

    # 生成统计报告
    stats_file = f"{prefix}_stats_{sid}.csv"
    response_times = data['response_times']
    with open(stats_file, 'w', newline='') as f:
        writer = csv.writer(f)
        writer.writerow(['统计项', '值'])
        writer.writerow(['服务ID', f"0x{sid}"])
        writer.writerow(['有效请求-响应对总数', len(response_times)])
        if response_times:
            writer.writerow(['平均响应时间(ms)', round(sum(response_times)/len(response_times), 2)])
            writer.writerow(['最大响应时间(ms)', round(max(response_times), 2)])
            writer.writerow(['最小响应时间(ms)', round(min(response_times), 2)])
        writer.writerow([])
        writer.writerow(['时间分布(ms)', '频次'])
        time_bins = defaultdict(int)
        for t in response_times:
            bin_start = int(t // 10) * 10  # 10ms为间隔
            time_bins[f"{bin_start}-{bin_start+10}"] += 1
        for interval, count in sorted(time_bins.items()):
            writer.writerow([interval, count])

def _parse_hex(hex_str: str | None) -> int | None:
    """解析十六进制字符串"""
    if hex_str and hex_str.startswith('0x'):
        try:
            return int(hex_str, 16)
        except ValueError:
            return None
    return None

def _get_frame_time(layers: dict) -> float | None:
    """获取时间戳"""
    time_str = layers.get('frame', {}).get('frame.time_epoch')
    try:
        return float(time_str) if time_str else None
    except ValueError:
        return None

# 示例调用（输出文件前缀为"report"）
analyze_and_export(
    json_file='doip_input.json',
    output_prefix='report'
)